prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>window.py<|end_file_name|><|fim▁begin|># -*- mode: python; tab-width: 4; indent-tabs-mode: nil -*-
from gi.repository import Gtk
import os
import shutil
import gettext
from cloudsn.core import config, provider, account, indicator, keyring
from cloudsn import logger
import cloudsn.core.utils as coreutils
STOP_RESPONSE = 1
class MainWindow:
__default = None
def __init__ (self):
if MainWindow.__default:
raise MainWindow.__default
self.builder = None
self.window = None
self.dialog_only = False
self.pref_dialog = None
self.config = config.SettingsController.get_instance()
self.pm = provider.ProviderManager.get_instance()
self.am = account.AccountManager.get_instance()
self.im = indicator.IndicatorManager.get_instance()
self.km = keyring.KeyringManager.get_instance()
self.am.connect ("account-deleted", self.account_deleted_cb)
@staticmethod
def get_instance():
if not MainWindow.__default:
MainWindow.__default = MainWindow()
return MainWindow.__default
def get_main_account_selected (self):
selection = self.main_account_tree.get_selection()
if selection:
model, paths = selection.get_selected_rows()
for path in paths:
citer = self.main_store.get_iter(path)
account_name = self.main_store.get_value(citer, 1)
acc = self.am.get_account(account_name)
return acc, citer
return None, None
def __get_account_date(self, acc):
last_update = ''
dt = acc.get_last_update()
if dt:
last_update = dt.strftime("%Y-%m-%d %H:%M:%S")
return last_update
def select_provider_combo (self, providers_combo, name):
#Select the provider and disable item
i=0
for row in providers_combo.get_model():
if row[1] == name:
providers_combo.set_active (i)
break
i += 1
def load_window(self):
from cloudsn.core.controller import Controller
self.builder=Gtk.Builder()
self.builder.set_translation_domain("cloudsn")
self.builder.add_from_file(config.add_data_prefix("preferences.ui"))
self.builder.connect_signals(self)
self.window=self.builder.get_object("main_window")
self.window.connect ("delete-event", self.window_delete_event_cb)
self.window.set_icon(config.get_cloudsn_icon())
self.main_account_tree = self.builder.get_object("main_account_tree");
self.main_store = self.builder.get_object("account_store");
self.providers_combo = self.builder.get_object("providers_combo");
self.providers_store = self.builder.get_object("providers_store");
self.play_button = self.builder.get_object("tool_play");
self.read_button = self.builder.get_object("main_read_button");
#Populate accounts
for acc in self.am.get_accounts():
self.main_store.append([acc.get_icon(), acc.get_name(),
self.__get_account_date(acc), acc.get_active(),
acc.get_total_unread()])
#Populate providers
for prov in self.pm.get_providers():
self.providers_store.append([prov.get_icon(), prov.get_name()])
#Update the last check date
Controller.get_instance().connect ("account-checked",
self.__on_account_checked_cb)
Controller.get_instance().connect ("account-check-error",
self.__on_account_check_error_cb)
self.set_play_active (Controller.get_instance().get_active())
def run(self):
self.load_window()
self.window.show()
def set_play_active(self, active):
self.play_button.set_active(active)
if active:
self.play_button.set_stock_id(Gtk.STOCK_MEDIA_PAUSE)
self.play_button.set_tooltip_text(
_("Press to pause the checker daemon"))
else:
self.play_button.set_stock_id(Gtk.STOCK_MEDIA_PLAY)
self.play_button.set_tooltip_text(
_("Press to start the checker daemon"))
def preferences_action_activate_cb (self, widget, data=None):
self.pref_dialog = self.builder.get_object("preferences_dialog")
self.pref_dialog.set_transient_for(self.window)
self.pref_dialog.set_destroy_with_parent (True)
indicator_combo = self.builder.get_object("indicator_combo")
indicators_store = self.builder.get_object("indicators_store");
keyring_combo = self.builder.get_object("keyring_combo")
keyring_store = self.builder.get_object("keyring_store");
minutes=self.builder.get_object("minutes_spin")
max_not_spin=self.builder.get_object("max_not_spin")
startup_check = self.builder.get_object("startup_check")
enable_sounds_check = self.builder.get_object("enable_sounds_check")
minutes.set_value (float(self.config.get_prefs()["minutes"]))
max_not_spin.set_value (float(self.config.get_prefs()["max_notifications"]))
if os.path.exists(config.get_startup_file_path()):
startup_check.set_active(True)
else:
startup_check.set_active(False)
enable_sounds_check.set_active(coreutils.get_boolean(self.config.get_prefs()["enable_sounds"]))
#Populate indicator combo
i=0
indicator_name = self.config.get_prefs()["indicator"]
indicators_store.clear()
for indi in self.im.get_indicators():
indicators_store.append([indi.get_name()])
if indi.get_name() == indicator_name:
indicator_combo.set_active(i)
i+=1
i=0
keyring_id = self.config.get_prefs()["keyring"]
keyring_store.clear()
for k in self.km.get_managers():
keyring_store.append([k.get_name(), k.get_id()])
if k.get_id() == keyring_id:
keyring_combo.set_active(i)
i+=1
response = self.pref_dialog.run()
self.pref_dialog.hide()
self.config.set_pref ("minutes", minutes.get_value())
self.config.set_pref ("max_notifications", max_not_spin.get_value())
self.config.set_pref ("enable_sounds", enable_sounds_check.get_active())
iiter = indicator_combo.get_active_iter()
if iiter:
self.config.set_pref ("indicator", indicators_store.get_value(iiter,0))
iiter = keyring_combo.get_active_iter()
selected = keyring_store.get_value(iiter,1)
for m in self.km.get_managers():
logger.debug("selected %s, current %s" % (selected, m.get_id()))
if m.get_id() == selected:
self.km.set_manager(m)
break
self.config.set_pref ("keyring", selected)
#Check startup checkbox
if startup_check.get_active():
if not os.path.exists(config.get_startup_file_path()):
if not os.path.exists(config.get_startup_file_dir()):
os.makedirs(config.get_startup_file_dir())
shutil.copyfile(config.add_data_prefix("cloudsn.desktop"),
config.get_startup_file_path())
else:
if os.path.exists(config.get_startup_file_path()):
os.remove (config.get_startup_file_path())
self.config.save_prefs()
def about_action_activate_cb (self, widget, data=None):
about.show_about_dialog()
def quit_action_activate_cb (self, widget, data=None):
Gtk.main_quit()
def close_action_activate_cb (self, widget, data=None):
if self.dialog_only:
Gtk.main_quit()
else:
self.window.hide()
def main_delete_button_clicked_cb(self, widget, data=None):
acc, citer = self.get_main_account_selected()
if not acc:
return
msg = (_('Are you sure you want to delete the account %s?')) % (acc.get_name());
dia = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO,
msg)
dia.show_all()
if dia.run() == Gtk.ResponseType.YES:
self.am.del_account(acc, True)
dia.hide()
def main_update_button_clicked_cb(self, widget, data=None):
from cloudsn.core.controller import Controller
acc, citer = self.get_main_account_selected()
if acc:
Controller.get_instance().update_account(acc)
def main_read_button_clicked_cb(self, widget, data=None):
acc, citer = self.get_main_account_selected()
if acc and acc.can_mark_read():
acc.mark_read()
self.__on_account_checked_cb(None, acc)
def main_account_tree_cursor_changed_cb(self, widget, data=None):
acc, citer = self.get_main_account_selected()
if acc and acc.can_mark_read():
self.read_button.set_sensitive(True)
else:
self.read_button.set_sensitive(False)
def tool_play_toggled_cb (self, widget, data=None):
from cloudsn.core.controller import Controller
self.set_play_active(widget.get_active())
Controller.get_instance().set_active(widget.get_active())
def account_deleted_cb(self, widget, acc):
selection = self.main_account_tree.get_selection()
if selection:
model, paths = selection.get_selected_rows()
for path in paths:
citer = self.main_store.get_iter(path)
self.main_store.remove(citer)
def window_delete_event_cb (self, widget, event, data=None):
if self.dialog_only:
Gtk.main_quit()
else:
self.window.hide()
def active_cell_toggled_cb(self, cell, path, data=None):
active = not self.main_store[path][3]
self.main_store[path][3] = active
account_name = self.main_store[path][1]
acc = self.am.get_account(account_name)
self.am.set_account_active(acc, active)
def new_action_activate_cb(self, widget, data=None):
self.new_dialog = self.builder.get_object("account_new_dialog")
account_name_entry = self.builder.get_object("account_name_entry");
self.provider_content = self.builder.get_object("provider_content")
self.activate_command_entry = self.builder.get_object("activate_command_entry")
self.provider_content.account = None
self.new_dialog.set_transient_for(self.window)
self.new_dialog.set_destroy_with_parent (True)
account_name_entry.set_text("")
account_name_entry.set_sensitive (True)
self.providers_combo.set_sensitive (True)
self.providers_combo.set_active(-1)
for c in self.provider_content.get_children():
if c:
self.provider_content.remove(c)
c.destroy()
end = False
while not end:
response = self.new_dialog.run()
if response == 0:
try:
if len(self.provider_content.get_children())==0:
raise Exception(_("You must select a provider and fill the data"))
acc_name = account_name_entry.get_text()
if acc_name == '':
raise Exception(_("You must fill the account name"))
custom_widget = self.provider_content.get_children()[0]
citer = self.providers_combo.get_active_iter()
provider_name = self.providers_store.get_value (citer, 1)
provider = self.pm.get_provider(provider_name)
acc = provider.set_account_data_from_widget(acc_name, custom_widget)
acc.set_activate_command (self.activate_command_entry.get_text())
self.am.add_account(acc)
self.am.save_account(acc)
self.main_store.append([acc.get_icon(),
acc.get_name(),self.__get_account_date(acc),
acc.get_active(), acc.get_total_unread()])
end = True
except Exception, e:
logger.error ('Error adding a new account: %s', e)
md = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE,
_('Error adding a new account: ') + str(e))
md.run()
md.destroy()
else:
end = True
self.new_dialog.hide()
def edit_action_activate_cb(self, widget, data=None):
acc, citer = self.get_main_account_selected()
if not acc:
return
self.new_dialog = self.builder.get_object("account_new_dialog")
account_name_entry = self.builder.get_object("account_name_entry");
account_name_entry.set_text(acc.get_name())
#TODO the name cannot be modified by the moment
account_name_entry.set_sensitive (False)
self.provider_content = self.builder.get_object("provider_content")
self.activate_command_entry = self.builder.get_object("activate_command_entry")
self.provider_content.account = acc
self.new_dialog.set_transient_for(self.window)
self.new_dialog.set_destroy_with_parent (True)
#Select the provider and disable item
providers_combo = self.builder.get_object("providers_combo")
providers_combo.set_active(-1)
self.select_provider_combo (providers_combo, acc.get_provider().get_name())
providers_combo.set_sensitive (False)
end = False
while not end:
response = self.new_dialog.run()
if response == 0:
try:
acc_name = account_name_entry.get_text()
if acc_name == '':<|fim▁hole|> raise Exception(_("You must fill the account name"))
custom_widget = self.provider_content.get_children()[0]
acc = acc.get_provider().set_account_data_from_widget(acc_name, custom_widget, acc)
acc.set_activate_command (self.activate_command_entry.get_text())
self.am.save_account(acc)
end = True
except Exception, e:
logger.exception ('Error editing the account: %s', e)
md = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE,
_('Error editing the account: ') + str(e))
md.run()
md.destroy()
else:
end = True
self.new_dialog.hide()
def update_all_action_activate_cb (self, widget, data=None):
from cloudsn.core.controller import Controller
Controller.get_instance().update_accounts()
def providers_combo_changed_cb(self, widget, data=None):
ch = self.provider_content.get_children()
for c in ch:
self.provider_content.remove(c)
c.destroy()
citer = self.providers_combo.get_active_iter()
if not citer:
return
provider_name = self.providers_store.get_value (citer, 1)
provider = self.pm.get_provider(provider_name)
if provider.get_import_error():
md = Gtk.MessageDialog(self.window,
Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE,
_('Error loading the provider: ') + str(provider.get_import_error()))
md.run()
md.destroy()
return
box = provider.get_account_data_widget(self.provider_content.account)
self.provider_content.add(box)
if self.provider_content.account:
self.activate_command_entry.set_text(self.provider_content.account.get_activate_command())
box.show_all()
def __on_account_checked_cb(self, widget, acc):
for row in self.main_store:
if row[1] == acc.get_name():
row[0] = acc.get_icon()
row[2] = self.__get_account_date(acc)
row[4] = acc.get_total_unread()
def __on_account_check_error_cb(self, widget, acc):
for row in self.main_store:
if row[1] == acc.get_name():
row[0] = acc.get_icon()
row[2] = self.__get_account_date(acc)
row[4] = acc.get_total_unread()
def main ():
import cloudsn.cloudsn
import cloudsn.core.controller
cloudsn.cloudsn.setup_locale_and_gettext()
#account.AccountManager.get_instance().load_accounts()
cloudsn.core.controller.Controller.get_instance()
win = MainWindow.get_instance()
win.dialog_only = True
win.run()
Gtk.main()
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>CheckForNewVersionAction.java<|end_file_name|><|fim▁begin|>/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.actions;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.browser.IWebBrowser;
import org.eclipse.ui.browser.IWorkbenchBrowserSupport;
import com.archimatetool.editor.ArchiPlugin;
import com.archimatetool.editor.preferences.IPreferenceConstants;
import com.archimatetool.editor.utils.StringUtils;
/**
* Check for New Action
*
* @author Phillip Beauvoir
*/
public class CheckForNewVersionAction extends Action {
public CheckForNewVersionAction() {
super(Messages.CheckForNewVersionAction_0);
}
String getOnlineVersion(URL url) throws IOException {
URLConnection connection = url.openConnection();
connection.connect();
InputStream is = connection.getInputStream();
char[] buf = new char[32];
Reader r = new InputStreamReader(is, "UTF-8"); //$NON-NLS-1$
StringBuilder s = new StringBuilder();
while(true) {
int n = r.read(buf);
if(n < 0) {
break;
}
s.append(buf, 0, n);
}
is.close();
r.close();
return s.toString();
}
@Override
public void run() {
try {
String versionFile = ArchiPlugin.PREFERENCES.getString(IPreferenceConstants.UPDATE_URL);
if(!StringUtils.isSet(versionFile)) {
return;
}
URL url = new URL(versionFile);
String newVersion = getOnlineVersion(url);
// Get this app's main version number
String thisVersion = ArchiPlugin.INSTANCE.getVersion();
if(StringUtils.compareVersionNumbers(newVersion, thisVersion) > 0) {
String downloadURL = ArchiPlugin.PREFERENCES.getString(IPreferenceConstants.DOWNLOAD_URL);
// No download URL
if(!StringUtils.isSet(downloadURL)) {
MessageDialog.openInformation(null, Messages.CheckForNewVersionAction_1,
Messages.CheckForNewVersionAction_2 + " (" + newVersion + "). "); //$NON-NLS-1$ //$NON-NLS-2$
return;
}
// Does have download URL
boolean reply = MessageDialog.openQuestion(null, Messages.CheckForNewVersionAction_1,
Messages.CheckForNewVersionAction_2 + " (" + newVersion + "). " + //$NON-NLS-1$ //$NON-NLS-2$
Messages.CheckForNewVersionAction_3);
if(reply) {
IWorkbenchBrowserSupport support = PlatformUI.getWorkbench().getBrowserSupport();
IWebBrowser browser = support.getExternalBrowser();
if(browser != null) {
<|fim▁hole|> }
}
else {
MessageDialog.openInformation(null, Messages.CheckForNewVersionAction_1, Messages.CheckForNewVersionAction_4);
}
}
catch(MalformedURLException ex) {
ex.printStackTrace();
}
catch(IOException ex) {
ex.printStackTrace();
showErrorMessage(Messages.CheckForNewVersionAction_5 + " " + ex.getMessage()); //$NON-NLS-1$
return;
}
catch(PartInitException ex) {
ex.printStackTrace();
}
};
@Override
public boolean isEnabled() {
String versionFile = ArchiPlugin.PREFERENCES.getString(IPreferenceConstants.UPDATE_URL);
return StringUtils.isSet(versionFile);
}
private void showErrorMessage(String message) {
MessageDialog.openError(null, Messages.CheckForNewVersionAction_6, message);
}
}<|fim▁end|> | URL url2 = new URL(downloadURL);
browser.openURL(url2);
}
|
<|file_name|>apollo.js<|end_file_name|><|fim▁begin|>import React from "react";
import { Link } from "@curi/react-dom";
import {
TitledPlainSection,
HashSection,
Paragraph,
CodeBlock,
Note,
IJS
} from "../../components/guide/common";
let meta = {
title: "Apollo Integration"
};
let setupMeta = {
title: "Setup",
hash: "setup"
};
let looseMeta = {
title: "Loose Pairing",
hash: "loose-pairing"
};
let prefetchMeta = {
title: "Prefetching",
hash: "prefetch"
};
let tightMeta = {
title: "Tight Pairing",
hash: "tight-pairing",
children: [prefetchMeta]
};
let contents = [setupMeta, looseMeta, tightMeta];
function ApolloGuide() {
return (
<React.Fragment>
<TitledPlainSection title={meta.title}>
<Paragraph>
<a href="https://apollographql.com">Apollo</a> is a great solution for
managing an application's data using{" "}
<a href="http://graphql.org">GraphQL</a>.
</Paragraph>
<Paragraph>
There are a few different implementation strategies for integrating
Apollo and Curi based on how tightly you want them to be paired.
</Paragraph>
<Note>
<Paragraph>
This guide only covers integration between Curi and Apollo. If you
are not already familiar with how to use Apollo, you will want to
learn that first.
</Paragraph>
<Paragraph>
Also, this guide will only be referencing Apollo's React
implementation, but the principles are the same no matter how you
render your application.
</Paragraph>
</Note>
</TitledPlainSection>
<HashSection meta={setupMeta} tag="h2">
<Paragraph>
Apollo's React package provides an <IJS>ApolloProvider</IJS> component<|fim▁hole|> should be a descendant of the <IJS>ApolloProvider</IJS> because we
don't need to re-render the <IJS>ApolloProvider</IJS> for every new
response.
</Paragraph>
<CodeBlock lang="jsx">
{`import { ApolloProvider } from "react-apollo";
import { createRouterComponent } from "@curi/react-dom";
let Router = createRouterComponent(router);
ReactDOM.render((
<ApolloProvider client={client}>
<Router>
<App />
</Router>
</ApolloProvider>
), holder);`}
</CodeBlock>
</HashSection>
<HashSection meta={looseMeta} tag="h2">
<Paragraph>
Apollo and Curi don't actually have to know about each other. Curi can
create a response without doing any data fetching and let Apollo
handle that with its <IJS>Query</IJS> component.
</Paragraph>
<CodeBlock>
{`// routes.js
import Noun from "./pages/Noun";
// nothing Apollo related in here
let routes = prepareRoutes([
{
name: 'Noun',
path: 'noun/:word',
respond: () => {
return {
body: Noun
};
}
}
]);`}
</CodeBlock>
<Paragraph>
Any location data that a query needs can be taken from the response
object. The best way to access this is to read the current{" "}
<IJS>response</IJS> from the context. This can either be done in the
component or the response can be passed down from the root app.
</Paragraph>
<CodeBlock lang="jsx">
{`import { useResponse } from "@curi/react-dom";
function App() {
let { response } = useResponse();
let { body:Body } = response;
return <Body response={response} />;
}`}
</CodeBlock>
<Paragraph>
Because we pass the <IJS>response</IJS> to the route's <IJS>body</IJS>{" "}
component, we can pass a <IJS>Query</IJS> the response's location
params using <IJS>props.response.params</IJS>.
</Paragraph>
<CodeBlock lang="jsx">
{`// pages/Nouns.js
import { Query } from "react-apollo";
let GET_NOUN = gql\`
query noun(\$word: String!) {
noun(word: $word) {
word,
type,
definition
}
}
\`;
// use the "word" param from the response props
// to query the correct data
let Noun = ({ response }) => (
<Query
query={GET_NOUN}
variables={{ word: response.params.word }}
>
{({ loading, error, data }) => {
if (loading) {
return <Loading />;
}
// ...
return (
<article>
<h1>{data.noun.word}</h1>
<Paragraph>{data.noun.definition}</Paragraph>
</article>
)
}}
</Query>
);`}
</CodeBlock>
</HashSection>
<HashSection meta={tightMeta} tag="h2">
<Paragraph>
You can use your Apollo client instance to call queries in a route's{" "}
<IJS>resolve</IJS> function. <IJS>resolve</IJS> is expected to return
a Promise, which is exactly what <IJS>client.query</IJS> returns.
Tightly pairing Curi and Apollo is mostly center around using{" "}
<IJS>resolve</IJS> to return a <IJS>client.query</IJS> call. This will
delay navigation until after a route's GraphQL data has been loaded by
Apollo.
</Paragraph>
<Paragraph>
The <IJS>external</IJS> option can be used when creating the router to
make the Apollo client accessible from routes.
</Paragraph>
<CodeBlock>
{`import client from "./apollo";
let router = createRouter(browser, routes, {
external: { client }
});`}
</CodeBlock>
<CodeBlock>
{`import { EXAMPLE_QUERY } from "./queries";
let routes = prepareRoutes([
{
name: "Example",
path: "example/:id",
resolve({ params }, external) {
return external.client.query({
query: EXAMPLE_QUERY,
variables: { id: params.id }
});
}
}
]);`}
</CodeBlock>
<Paragraph>There are two strategies for doing this.</Paragraph>
<Paragraph>
The first approach is to avoid the <IJS>Query</IJS> altogether.
Instead, you can use a route's <IJS>response</IJS> property to attach
the data fetched by Apollo directly to a response through its{" "}
<IJS>data</IJS> property.
</Paragraph>
<Paragraph>
While we know at this point that the query has executed, we should
also check <IJS>error</IJS> in the <IJS>respond</IJS> function to
ensure that the query was executed successfully.
</Paragraph>
<CodeBlock>
{`// routes.js
import GET_VERB from "./queries";
import Verb from "./pages/Verb";
export default [
{
name: "Verb",
path: "verb/:word",
resolve({ params }, external) {
return external.client.query({
query: GET_VERB,
variables: { word: params.word }
});
},
respond({ error, resolved }) {
if (error) {
// handle failed queries
}
return {
body: Verb,
data: resolved.verb.data
}
}
}
];`}
</CodeBlock>
<Paragraph>
When rendering, you can access the query data through the{" "}
<IJS>response</IJS>'s <IJS>data</IJS> property.
</Paragraph>
<CodeBlock lang="jsx">
{`// pages/Verb.js
let Verb = ({ response }) => (
<article>
<h1>{response.data.verb.word}</h1>
<Paragraph>
{response.data.verb.definition}
</Paragraph>
</article>
)`}
</CodeBlock>
<Paragraph>
The second approach is to use the <IJS>resolve</IJS> function as a way
to cache the data, but also use <IJS>Query</IJS>. With this approach,
we do not have to attach the query data to the response; we are
relying on the fact that Apollo will execute and cache the results
prior to navigation.
</Paragraph>
<CodeBlock>
{`// routes.js
import { GET_VERB } from "./queries";
export default [
{
name: "Verb",
path: "verb/:word",
resolve({ params, external }) {
// load the data so it is cached by
// your Apollo client
return external.client.query({
query: GET_VERB,
variables: { word: params.word }
});
}
}
];`}
</CodeBlock>
<Paragraph>
The route's component will render a <IJS>Query</IJS> to also call the
query. Because the query has already been executed, Apollo will grab
the data from its cache instead of re-sending a request to your
server.
</Paragraph>
<CodeBlock lang="jsx">
{`// pages/Verb.js
import { GET_VERB } from "../queries";
let Verb = ({ response }) => (
<Query
query={GET_VERB}
variables={{ word: response.params.word }}
>
{({ loading, error, data }) => {
// ...
return (
<article>
<h1>{data.verb.word}</h1>
<Paragraph>
{data.verb.definition}
</Paragraph>
</article>
);
}}
</Query>
)`}
</CodeBlock>
<HashSection meta={prefetchMeta} tag="h3">
<Paragraph>
One additional benefit of adding queries to routes using{" "}
<IJS>resolve</IJS> is that you can prefetch data for a route.
</Paragraph>
<Paragraph>
The{" "}
<Link
name="Package"
params={{ package: "interactions", version: "v2" }}
hash="prefetch"
>
<IJS>prefetch</IJS>
</Link>{" "}
interaction lets you programmatically fetch the data for a route
prior to navigating to a location.
</Paragraph>
<CodeBlock>
{`// index.js
import { prefetch } from "@curi/router";
let routes = prepareRoutes([
{
name: "Example",
path: "example/:id",
resolve({ params }, external) {
return external.client.query({
query: GET_EXAMPLES,
variables: { id: params.id }
});
}
}
]);
let router = createRouter(browser, routes);
// this will call the GET_EXAMPLES query
// and Apollo will cache the results
let exampleRoute = router.route("Example");
prefetch(exampleRoute, { params: { id: 2 }});`}
</CodeBlock>
</HashSection>
</HashSection>
</React.Fragment>
);
}
export { ApolloGuide as component, contents };<|fim▁end|> | for accessing your Apollo client throughout the application. The{" "}
<IJS>Router</IJS> (or whatever you name the root Curi component) |
<|file_name|>strings.js<|end_file_name|><|fim▁begin|>define(
({
"sourceSetting": "搜索源设置",
"instruction": "添加并配置地理编码服务或要素图层为搜索源。这些指定的源决定了搜索框中的可搜索内容。",
"add": "添加搜索源",
"addGeocoder": "添加地理编码器",
"geocoder": "地理编码器",
"setLayerSource": "设置图层源",
"setGeocoderURL": "设置地理编码器 URL",
"searchableLayer": "要素图层",
"name": "名称",
"countryCode": "国家代码或区域代码",
"countryCodeEg": "例如 ",
"countryCodeHint": "将此值留空可搜索所有国家和地区",
"generalSetting": "常规设置",
"allPlaceholder": "用于搜索全部内容的占位符文本: ",
"showInfoWindowOnSelect": "显示已找到要素或位置的弹出窗口",
"searchInCurrentMapExtent": "仅在当前地图范围内搜索",
"zoomScale": "缩放比例",
"locatorUrl": "地理编码器 URL",
"locatorName": "地理编码器名称",
"locatorExample": "示例",
"locatorWarning": "不支持此版本的地理编码服务。该微件支持 10.0 及更高版本的地理编码服务。",
"locatorTips": "由于地理编码服务不支持建议功能,因此建议不可用。",
"layerSource": "图层源",
"searchLayerTips": "由于要素服务不支持分页功能,因此建议不可用。",
"placeholder": "占位符文本",
<|fim▁hole|> "maxResults": "最大结果数",
"setSearchFields": "设置搜索字段",
"set": "设置",
"fieldSearchable": "可搜索",
"fieldName": "名称",
"fieldAlias": "别名",
"ok": "确定",
"cancel": "取消",
"invalidUrlTip": "URL ${URL} 无效或不可访问。"
})
);<|fim▁end|> | "searchFields": "搜索字段",
"displayField": "显示字段",
"exactMatch": "完全匹配",
"maxSuggestions": "最大建议数",
|
<|file_name|>test_tap.py<|end_file_name|><|fim▁begin|># Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.names.tap}.
"""
from twisted.trial.unittest import TestCase
from twisted.python.usage import UsageError
from twisted.names.tap import Options, _buildResolvers
from twisted.names.dns import PORT
from twisted.names.secondary import SecondaryAuthorityService
from twisted.names.resolve import ResolverChain
from twisted.names.client import Resolver
class OptionsTests(TestCase):
"""
Tests for L{Options}, defining how command line arguments for the DNS server
are parsed.
"""
def test_malformedSecondary(self):
"""
If the value supplied for an I{--secondary} option does not provide a
server IP address, optional port number, and domain name,
L{Options.parseOptions} raises L{UsageError}.
<|fim▁hole|> self.assertRaises(
UsageError, options.parseOptions, ['--secondary', ''])
self.assertRaises(
UsageError, options.parseOptions, ['--secondary', '1.2.3.4'])
self.assertRaises(
UsageError, options.parseOptions, ['--secondary', '1.2.3.4:hello'])
self.assertRaises(
UsageError, options.parseOptions,
['--secondary', '1.2.3.4:hello/example.com'])
def test_secondary(self):
"""
An argument of the form C{"ip/domain"} is parsed by L{Options} for the
I{--secondary} option and added to its list of secondaries, using the
default DNS port number.
"""
options = Options()
options.parseOptions(['--secondary', '1.2.3.4/example.com'])
self.assertEqual(
[(('1.2.3.4', PORT), ['example.com'])], options.secondaries)
def test_secondaryExplicitPort(self):
"""
An argument of the form C{"ip:port/domain"} can be used to specify an
alternate port number for for which to act as a secondary.
"""
options = Options()
options.parseOptions(['--secondary', '1.2.3.4:5353/example.com'])
self.assertEqual(
[(('1.2.3.4', 5353), ['example.com'])], options.secondaries)
def test_secondaryAuthorityServices(self):
"""
After parsing I{--secondary} options, L{Options} constructs a
L{SecondaryAuthorityService} instance for each configured secondary.
"""
options = Options()
options.parseOptions(['--secondary', '1.2.3.4:5353/example.com',
'--secondary', '1.2.3.5:5354/example.com'])
self.assertEqual(len(options.svcs), 2)
secondary = options.svcs[0]
self.assertIsInstance(options.svcs[0], SecondaryAuthorityService)
self.assertEqual(secondary.primary, '1.2.3.4')
self.assertEqual(secondary._port, 5353)
secondary = options.svcs[1]
self.assertIsInstance(options.svcs[1], SecondaryAuthorityService)
self.assertEqual(secondary.primary, '1.2.3.5')
self.assertEqual(secondary._port, 5354)
def test_recursiveConfiguration(self):
"""
Recursive DNS lookups, if enabled, should be a last-resort option.
Any other lookup method (cache, local lookup, etc.) should take
precedence over recursive lookups
"""
options = Options()
options.parseOptions(['--hosts-file', 'hosts.txt', '--recursive'])
ca, cl = _buildResolvers(options)
# Extra cleanup, necessary on POSIX because client.Resolver doesn't know
# when to stop parsing resolv.conf. See #NNN for improving this.
for x in cl:
if isinstance(x, ResolverChain):
recurser = x.resolvers[-1]
if isinstance(recurser, Resolver):
recurser._parseCall.cancel()
self.assertIsInstance(cl[-1], ResolverChain)<|fim▁end|> | """
options = Options()
|
<|file_name|>ber.py<|end_file_name|><|fim▁begin|>def bits_set(x):
bits = 0
for i in range(0,8):
if (x & (1<<i))>0:
bits += 1
return bits
def find_ber(sent, received):
assert(len(received)<=len(sent))
if len(received) < len(sent)/2:
print "frame detection error, more than half of the frames were lost!"
return 0.5
errors = 0<|fim▁hole|><|fim▁end|> | for i in range(0,len(received)):
errors += bits_set(sent[i] ^ received[i]) # ^ is xor
return float(errors)/float(8*len(received)) |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import create from './create.js';
import {InstanceDefaults} from './types.js';
import Options from './core/options.js';
const defaults: InstanceDefaults = {
options: new Options(),
handlers: [],
mutableDefaults: false,
};
const got = create(defaults);
export default got;
export {got};
export {default as Options} from './core/options.js';
export * from './core/options.js';
export * from './core/response.js';
export type {default as Request} from './core/index.js';
export * from './core/index.js';
export * from './core/errors.js';
export {Delays} from './core/timed-out.js';
export {default as calculateRetryDelay} from './core/calculate-retry-delay.js';<|fim▁hole|>export * from './types.js';
export {default as create} from './create.js';
export {default as parseLinkHeader} from './core/parse-link-header.js';<|fim▁end|> | export * from './as-promise/types.js'; |
<|file_name|>LiloConf.py<|end_file_name|><|fim▁begin|>#
#LiloConf.py
#
import sys, re, os
import logging
import GrubConf
class LiloImage(object):
def __init__(self, lines, path):
self.reset(lines, path)
def __repr__(self):
return ("title: %s\n"
" root: %s\n"
" kernel: %s\n"
" args: %s\n"
" initrd: %s\n" %(self.title, self.root, self.kernel,
self.args, self.initrd))
def reset(self, lines, path):
self._initrd = self._kernel = self._readonly = None
self._args = ""
self.title = ""
self.lines = []
self.path = path
self.root = ""
map(self.set_from_line, lines)
def set_from_line(self, line, replace = None):
(com, arg) = GrubConf.grub_exact_split(line, 2)
if self.commands.has_key(com):
if self.commands[com] is not None:
setattr(self, self.commands[com], re.sub('^"(.+)"$', r"\1", arg.strip()))
else:
logging.info("Ignored image directive %s" %(com,))
else:
logging.warning("Unknown image directive %s" %(com,))
# now put the line in the list of lines
if replace is None:
self.lines.append(line)
else:
self.lines.pop(replace)
self.lines.insert(replace, line)
def set_kernel(self, val):
self._kernel = (None, self.path + "/" + val)
def get_kernel(self):
return self._kernel
kernel = property(get_kernel, set_kernel)
def set_initrd(self, val):
self._initrd = (None, self.path + "/" + val)
def get_initrd(self):
return self._initrd
initrd = property(get_initrd, set_initrd)
def set_args(self, val):
self._args = val
def get_args(self):
args = self._args
if self.root:
args += " root=" + self.root
if self.readonly:
args += " ro"
return args
args = property(get_args, set_args)
def set_readonly(self, val):
self._readonly = 1
def get_readonly(self):
return self._readonly
readonly = property(get_readonly, set_readonly)
# set up command handlers
commands = { "label": "title",
"root": "root",
"rootnoverify": "root",
"image": "kernel",
"initrd": "initrd",
"append": "args",
"read-only": "readonly",
"chainloader": None,
"module": None}
class LiloConfigFile(object):
def __init__(self, fn = None):
self.filename = fn
self.images = []
self.timeout = -1
self._default = 0
if fn is not None:
self.parse()
def parse(self, buf = None):
if buf is None:
if self.filename is None:
raise ValueError, "No config file defined to parse!"
f = open(self.filename, 'r')
lines = f.readlines()
f.close()
else:
lines = buf.split("\n")
path = os.path.dirname(self.filename)
img = []
for l in lines:
l = l.strip()
# skip blank lines
if len(l) == 0:
continue
# skip comments
if l.startswith('#'):
continue
# new image
if l.startswith("image"):
if len(img) > 0:
self.add_image(LiloImage(img, path))
img = [l]
continue
if len(img) > 0:
img.append(l)
continue
(com, arg) = GrubConf.grub_exact_split(l, 2)
if self.commands.has_key(com):
if self.commands[com] is not None:
setattr(self, self.commands[com], arg.strip())
else:
logging.info("Ignored directive %s" %(com,))
else:
logging.warning("Unknown directive %s" %(com,))
if len(img) > 0:
self.add_image(LiloImage(img, path))
def add_image(self, image):
self.images.append(image)
def _get_default(self):
for i in range(0, len(self.images) - 1):
if self.images[i].title == self._default:
return i
return 0
def _set_default(self, val):
self._default = val
default = property(_get_default, _set_default)
commands = { "default": "self.default",
"timeout": "self.timeout",
"prompt": None,<|fim▁hole|> }
if __name__ == "__main__":
if sys.argv < 2:
raise RuntimeError, "Need a grub.conf to read"
g = LiloConfigFile(sys.argv[1])
for i in g.images:
print i #, i.title, i.root, i.kernel, i.args, i.initrd
print g.default<|fim▁end|> | "relocatable": None, |
<|file_name|>radialActuationDiskSource.C<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2014 OpenFOAM Foundation
\\/ M anipulation |
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "radialActuationDiskSource.H"
#include "geometricOneField.H"
#include "addToRunTimeSelectionTable.H"
// * * * * * * * * * * * * * * Static Data Members * * * * * * * * * * * * * //
namespace Foam
{
namespace fv
{
defineTypeNameAndDebug(radialActuationDiskSource, 0);
addToRunTimeSelectionTable
(
option,
radialActuationDiskSource,
dictionary
);
}
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::fv::radialActuationDiskSource::radialActuationDiskSource
(
const word& name,
const word& modelType,
const dictionary& dict,
const fvMesh& mesh
)
:
actuationDiskSource(name, modelType, dict, mesh),
radialCoeffs_(coeffs_.lookup("coeffs"))
{
Info<< " - creating radial actuation disk zone: " << name_ << endl;
}
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
void Foam::fv::radialActuationDiskSource::addSup
(
fvMatrix<vector>& eqn,
const label fieldI
)
{
const scalargpuField& cellsV = mesh_.V().getField();
vectorgpuField& Usource = eqn.source();
const vectorgpuField& U = eqn.psi();
if (V_ > VSMALL)
{
addRadialActuationDiskAxialInertialResistance
(
Usource,
cells_,
cellsV,
geometricOneField(),<|fim▁hole|> );
}
}
void Foam::fv::radialActuationDiskSource::addSup
(
const volScalarField& rho,
fvMatrix<vector>& eqn,
const label fieldI
)
{
const scalargpuField& cellsV = mesh_.V();
vectorgpuField& Usource = eqn.source();
const vectorgpuField& U = eqn.psi();
if (V_ > VSMALL)
{
addRadialActuationDiskAxialInertialResistance
(
Usource,
cells_,
cellsV,
rho,
U
);
}
}
void Foam::fv::radialActuationDiskSource::writeData(Ostream& os) const
{
actuationDiskSource::writeData(os);
}
bool Foam::fv::radialActuationDiskSource::read(const dictionary& dict)
{
if (option::read(dict))
{
coeffs_.readIfPresent("diskDir", diskDir_);
coeffs_.readIfPresent("Cp", Cp_);
coeffs_.readIfPresent("Ct", Ct_);
coeffs_.readIfPresent("diskArea", diskArea_);
coeffs_.lookup("coeffs") >> radialCoeffs_;
return true;
}
else
{
return false;
}
}
// ************************************************************************* //<|fim▁end|> | U |
<|file_name|>kvmexit.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# kvmexit.py
#
# Display the exit_reason and its statistics of each vm exit
# for all vcpus of all virtual machines. For example:
# $./kvmexit.py
# PID TID KVM_EXIT_REASON COUNT
# 1273551 1273568 EXIT_REASON_MSR_WRITE 6
# 1274253 1274261 EXIT_REASON_EXTERNAL_INTERRUPT 1
# 1274253 1274261 EXIT_REASON_HLT 12
# ...
#
# Besides, we also allow users to specify one pid, tid(s), or one
# pid and its vcpu. See kvmexit_example.txt for more examples.
#
# @PID: each vitual machine's pid in the user space.
# @TID: the user space's thread of each vcpu of that virtual machine.
# @KVM_EXIT_REASON: the reason why the vm exits.
# @COUNT: the counts of the @KVM_EXIT_REASONS.
#
# REQUIRES: Linux 4.7+ (BPF_PROG_TYPE_TRACEPOINT support)
#
# Copyright (c) 2021 ByteDance Inc. All rights reserved.
#
# Author(s):
# Fei Li <[email protected]>
from __future__ import print_function
from time import sleep
from bcc import BPF
import argparse
import multiprocessing
import os
import subprocess
#
# Process Arguments
#
def valid_args_list(args):
args_list = args.split(",")
for arg in args_list:
try:
int(arg)
except:
raise argparse.ArgumentTypeError("must be valid integer")
return args_list
# arguments
examples = """examples:
./kvmexit # Display kvm_exit_reason and its statistics in real-time until Ctrl-C
./kvmexit 5 # Display in real-time after sleeping 5s
./kvmexit -p 3195281 # Collpase all tids for pid 3195281 with exit reasons sorted in descending order
./kvmexit -p 3195281 20 # Collpase all tids for pid 3195281 with exit reasons sorted in descending order, and display after sleeping 20s
./kvmexit -p 3195281 -v 0 # Display only vcpu0 for pid 3195281, descending sort by default
./kvmexit -p 3195281 -a # Display all tids for pid 3195281
./kvmexit -t 395490 # Display only for tid 395490 with exit reasons sorted in descending order
./kvmexit -t 395490 20 # Display only for tid 395490 with exit reasons sorted in descending order after sleeping 20s
./kvmexit -T '395490,395491' # Display for a union like {395490, 395491}
"""
parser = argparse.ArgumentParser(
description="Display kvm_exit_reason and its statistics at a timed interval",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("duration", nargs="?", default=99999999, type=int, help="show delta for next several seconds")
parser.add_argument("-p", "--pid", type=int, help="trace this PID only")
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("-t", "--tid", type=int, help="trace this TID only")
exgroup.add_argument("-T", "--tids", type=valid_args_list, help="trace a comma separated series of tids with no space in between")
exgroup.add_argument("-v", "--vcpu", type=int, help="trace this vcpu only")
exgroup.add_argument("-a", "--alltids", action="store_true", help="trace all tids for this pid")
args = parser.parse_args()
duration = int(args.duration)
#
# Setup BPF
#
# load BPF program
bpf_text = """
#include <linux/delay.h>
#define REASON_NUM 69
#define TGID_NUM 1024
struct exit_count {
u64 exit_ct[REASON_NUM];
};
BPF_PERCPU_ARRAY(init_value, struct exit_count, 1);
BPF_TABLE("percpu_hash", u64, struct exit_count, pcpu_kvm_stat, TGID_NUM);
struct cache_info {
u64 cache_pid_tgid;
struct exit_count cache_exit_ct;
};
BPF_PERCPU_ARRAY(pcpu_cache, struct cache_info, 1);
FUNC_ENTRY {
int cache_miss = 0;
int zero = 0;
u32 er = GET_ER;
if (er >= REASON_NUM) {
return 0;
}
u64 cur_pid_tgid = bpf_get_current_pid_tgid();
u32 tgid = cur_pid_tgid >> 32;
u32 pid = cur_pid_tgid;
if (THREAD_FILTER)
return 0;
struct exit_count *tmp_info = NULL, *initial = NULL;
struct cache_info *cache_p;
cache_p = pcpu_cache.lookup(&zero);
if (cache_p == NULL) {
return 0;
}
if (cache_p->cache_pid_tgid == cur_pid_tgid) {
//a. If the cur_pid_tgid hit this physical cpu consecutively, save it to pcpu_cache
tmp_info = &cache_p->cache_exit_ct;
} else {
//b. If another pid_tgid matches this pcpu for the last hit, OR it is the first time to hit this physical cpu.
cache_miss = 1;
// b.a Try to load the last cache struct if exists.
tmp_info = pcpu_kvm_stat.lookup(&cur_pid_tgid);
// b.b If it is the first time for the cur_pid_tgid to hit this pcpu, employ a
// per_cpu array to initialize pcpu_kvm_stat's exit_count with each exit reason's count is zero
if (tmp_info == NULL) {
initial = init_value.lookup(&zero);
if (initial == NULL) {
return 0;
}
pcpu_kvm_stat.update(&cur_pid_tgid, initial);
tmp_info = pcpu_kvm_stat.lookup(&cur_pid_tgid);
// To pass the verifier
if (tmp_info == NULL) {
return 0;
}
}
}
if (er < REASON_NUM) {
tmp_info->exit_ct[er]++;
if (cache_miss == 1) {
if (cache_p->cache_pid_tgid != 0) {
// b.*.a Let's save the last hit cache_info into kvm_stat.
pcpu_kvm_stat.update(&cache_p->cache_pid_tgid, &cache_p->cache_exit_ct);
}
// b.* As the cur_pid_tgid meets current pcpu_cache_array for the first time, save it.
cache_p->cache_pid_tgid = cur_pid_tgid;
bpf_probe_read(&cache_p->cache_exit_ct, sizeof(*tmp_info), tmp_info);
}
return 0;
}
return 0;
}
"""
# format output
exit_reasons = (
"EXCEPTION_NMI",
"EXTERNAL_INTERRUPT",
"TRIPLE_FAULT",
"INIT_SIGNAL",
"N/A",
"N/A",
"N/A",
"INTERRUPT_WINDOW",
"NMI_WINDOW",
"TASK_SWITCH",
"CPUID",
"N/A",
"HLT",
"INVD",
"INVLPG",
"RDPMC",
"RDTSC",
"N/A",
"VMCALL",
"VMCLEAR",
"VMLAUNCH",
"VMPTRLD",
"VMPTRST",
"VMREAD",
"VMRESUME",
"VMWRITE",
"VMOFF",
"VMON",
"CR_ACCESS",
"DR_ACCESS",
"IO_INSTRUCTION",
"MSR_READ",
"MSR_WRITE",
"INVALID_STATE",
"MSR_LOAD_FAIL",
"N/A",
"MWAIT_INSTRUCTION",
"MONITOR_TRAP_FLAG",
"N/A",
"MONITOR_INSTRUCTION",
"PAUSE_INSTRUCTION",
"MCE_DURING_VMENTRY",
"N/A",
"TPR_BELOW_THRESHOLD",
"APIC_ACCESS",
"EOI_INDUCED",
"GDTR_IDTR",
"LDTR_TR",
"EPT_VIOLATION",
"EPT_MISCONFIG",
"INVEPT",
"RDTSCP",
"PREEMPTION_TIMER",
"INVVPID",
"WBINVD",
"XSETBV",
"APIC_WRITE",
"RDRAND",
"INVPCID",
"VMFUNC",
"ENCLS",
"RDSEED",
"PML_FULL",
"XSAVES",
"XRSTORS",
"N/A",
"N/A",
"UMWAIT",
"TPAUSE"
)
#
# Do some checks
#
try:
# Currently, only adapte on intel architecture
cmd = "cat /proc/cpuinfo | grep vendor_id | head -n 1"
arch_info = subprocess.check_output(cmd, shell=True).strip()
if b"Intel" in arch_info:
pass
else:
raise Exception("Currently we only support Intel architecture, please do expansion if needs more.")
# Check if kvm module is loaded
if os.access("/dev/kvm", os.R_OK | os.W_OK):
pass
else:
raise Exception("Please insmod kvm module to use kvmexit tool.")
except Exception as e:
raise Exception("Failed to do precondition check, due to: %s." % e)
try:
if BPF.support_raw_tracepoint_in_module():
# Let's firstly try raw_tracepoint_in_module
func_entry = "RAW_TRACEPOINT_PROBE(kvm_exit)"
get_er = "ctx->args[0]"
else:
# If raw_tp_in_module is not supported, fall back to regular tp
func_entry = "TRACEPOINT_PROBE(kvm, kvm_exit)"
get_er = "args->exit_reason"
except Exception as e:
raise Exception("Failed to catch kvm exit reasons due to: %s" % e)
<|fim▁hole|>
def find_tid(tgt_dir, tgt_vcpu):
for tid in os.listdir(tgt_dir):
path = tgt_dir + "/" + tid + "/comm"
fp = open(path, "r")
comm = fp.read()
if (comm.find(tgt_vcpu) != -1):
return tid
return -1
# set process/thread filter
thread_context = ""
header_format = ""
need_collapse = not args.alltids
if args.tid is not None:
thread_context = "TID %s" % args.tid
thread_filter = 'pid != %s' % args.tid
elif args.tids is not None:
thread_context = "TIDS %s" % args.tids
thread_filter = "pid != " + " && pid != ".join(args.tids)
header_format = "TIDS "
elif args.pid is not None:
thread_context = "PID %s" % args.pid
thread_filter = 'tgid != %s' % args.pid
if args.vcpu is not None:
thread_context = "PID %s VCPU %s" % (args.pid, args.vcpu)
# transfer vcpu to tid
tgt_dir = '/proc/' + str(args.pid) + '/task'
tgt_vcpu = "CPU " + str(args.vcpu)
args.tid = find_tid(tgt_dir, tgt_vcpu)
if args.tid == -1:
raise Exception("There's no v%s for PID %d." % (tgt_vcpu, args.pid))
thread_filter = 'pid != %s' % args.tid
elif args.alltids:
thread_context = "PID %s and its all threads" % args.pid
header_format = "TID "
else:
thread_context = "all threads"
thread_filter = '0'
header_format = "PID TID "
bpf_text = bpf_text.replace('THREAD_FILTER', thread_filter)
# For kernel >= 5.0, use RAW_TRACEPOINT_MODULE for performance consideration
bpf_text = bpf_text.replace('FUNC_ENTRY', func_entry)
bpf_text = bpf_text.replace('GET_ER', get_er)
b = BPF(text=bpf_text)
# header
print("Display kvm exit reasons and statistics for %s" % thread_context, end="")
if duration < 99999999:
print(" after sleeping %d secs." % duration)
else:
print("... Hit Ctrl-C to end.")
try:
sleep(duration)
except KeyboardInterrupt:
print()
# Currently, sort multiple tids in descending order is not supported.
if (args.pid or args.tid):
ct_reason = []
if args.pid:
tgid_exit = [0 for i in range(len(exit_reasons))]
# output
print("%s%-35s %s" % (header_format, "KVM_EXIT_REASON", "COUNT"))
pcpu_kvm_stat = b["pcpu_kvm_stat"]
pcpu_cache = b["pcpu_cache"]
for k, v in pcpu_kvm_stat.items():
tgid = k.value >> 32
pid = k.value & 0xffffffff
for i in range(0, len(exit_reasons)):
sum1 = 0
for inner_cpu in range(0, multiprocessing.cpu_count()):
cachePIDTGID = pcpu_cache[0][inner_cpu].cache_pid_tgid
# Take priority to check if it is in cache
if cachePIDTGID == k.value:
sum1 += pcpu_cache[0][inner_cpu].cache_exit_ct.exit_ct[i]
# If not in cache, find from kvm_stat
else:
sum1 += v[inner_cpu].exit_ct[i]
if sum1 == 0:
continue
if (args.pid and args.pid == tgid and need_collapse):
tgid_exit[i] += sum1
elif (args.tid and args.tid == pid):
ct_reason.append((sum1, i))
elif not need_collapse or args.tids:
print("%-8u %-35s %-8u" % (pid, exit_reasons[i], sum1))
else:
print("%-8u %-8u %-35s %-8u" % (tgid, pid, exit_reasons[i], sum1))
# Display only for the target tid in descending sort
if (args.tid and args.tid == pid):
ct_reason.sort(reverse=True)
for i in range(0, len(ct_reason)):
if ct_reason[i][0] == 0:
continue
print("%-35s %-8u" % (exit_reasons[ct_reason[i][1]], ct_reason[i][0]))
break
# Aggregate all tids' counts for this args.pid in descending sort
if args.pid and need_collapse:
for i in range(0, len(exit_reasons)):
ct_reason.append((tgid_exit[i], i))
ct_reason.sort(reverse=True)
for i in range(0, len(ct_reason)):
if ct_reason[i][0] == 0:
continue
print("%-35s %-8u" % (exit_reasons[ct_reason[i][1]], ct_reason[i][0]))<|fim▁end|> | |
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>var path = require('path');
module.exports = {
// entry: ['babel-polyfill', './src/main.js'],
entry: './src/main.js',
target: 'node',
output: {
filename: 'main.js',<|fim▁hole|> module: {
rules: [
{ test: /\.js$/,
exclude: /node_modules/,
use: {
loader: 'babel-loader',
options: {
presets: [
['env', {
targets: { node: '6.6.0' }
}]
]
}
}
}
]
},
externals: {
'lodash' : 'commonjs lodash'
}
};<|fim▁end|> | path: path.resolve(__dirname, 'build')
}, |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![allow(unused_variables)]
fn main() {
// Rust let bindings are immutable by default.
let z = 3;
// This will raise a compiler error:
// z += 2; //~ ERROR cannot assign twice to immutable variable `z`
// You must declare a variable mutable explicitly:
let mut x = 3;
// Similarly, references are immutable by default e.g.
// The following lines would raise a compiler error. Even though x is mutable, y is an
// immutable reference.<|fim▁hole|> // let y = &x;
// *y += 2; //~ ERROR cannot borrow `x` as mutable because it is also borrowed as immutable
let y = &mut x;
*y += 2; // Works
// Note that though y is now a mutable reference, y itself is still immutable e.g.
// let mut z = 5;
// y = &mut z; //~ ERROR re-assignment of immutable variable `y`
}<|fim▁end|> | |
<|file_name|>logger.js<|end_file_name|><|fim▁begin|>QUnit.module('pages > strategy > tabs > logger', function () {
const logger = KC3StrategyTabs.logger.definition;
QUnit.module('filters > logTypes', {
beforeEach() { this.subject = logger.filterFuncs.logTypes; },
}, function () {
QUnit.test('check type is set to visible', function (assert) {
logger.filterState.logTypes = { yellow: true, purple: false };
assert.ok(this.subject({ type: 'yellow' }));
assert.notOk(this.subject({ type: 'purple' }));
});
});
QUnit.module('filters > contexts', {
beforeEach() { this.subject = logger.filterFuncs.contexts; },
}, function () {
QUnit.test('check context is set to visible', function (assert) {
logger.filterState.contexts = { banana: true, potato: false };
assert.ok(this.subject({ context: 'banana' }));
assert.notOk(this.subject({ context: 'potato' }));
});
});
QUnit.module('filters > logSearch', {
beforeEach() { this.subject = logger.filterFuncs.logSearch; },
}, function () {
QUnit.test('search message', function (assert) {
logger.filterState.logSearch = 'small';
assert.equal(this.subject({ message: 'a big blue dog' }), false);
assert.equal(this.subject({ message: 'a small blue dog' }), true);
});
QUnit.test('search data', function (assert) {
logger.filterState.logSearch = 'banana';
assert.equal(this.subject({ data: ['red', 'blue'] }), false);
assert.equal(this.subject({ data: ['apple', 'orange', 'banana'] }), true);
});
QUnit.test('case-insensitive search', function (assert) {
logger.filterState.logSearch = 'tea';
assert.equal(this.subject({ message: 'Drinks', data: ['Coffee', 'TEA'] }), true);
});
});
QUnit.module('isDateSplit', {
beforeEach() { this.subject = logger.isDateSplit; },
}, function () {
QUnit.test('true if specified times are on different days', function (assert) {
const result = this.subject({ timestamp: new Date(2017, 1, 1).getTime() },
{ timestamp: new Date(2017, 1, 2).getTime() });
assert.equal(result, true);
});
QUnit.test('false if specified times are on the same day', function (assert) {
const result = this.subject({ timestamp: new Date(2017, 1, 1, 5) },
{ timestamp: new Date(2017, 1, 1, 20) });
assert.equal(result, false);
});
});
QUnit.module('createDateSeparator', {
beforeEach() { this.subject = logger.createDateSeparator; },
}, function () {
QUnit.test('success', function (assert) {
const entry = { timestamp: new Date().getTime() };
const result = this.subject(entry);
assert.deepEqual(result, {
type: 'dateSeparator',
timestamp: entry.timestamp,
});
});
});
QUnit.module('elementFactory > error > formatStack', {
beforeEach() { this.subject = logger.formatStack; },
}, function () {
QUnit.test('undefined stack', function (assert) {
const result = this.subject();
assert.equal(result, '');
});
QUnit.test('replace chrome extension id', function (assert) {
const stack = `at loadLogEntries (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/pages/strategy/tabs/logger/logger.js:56:18)
at Object.execute (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/pages/strategy/tabs/logger/logger.js:30:21)
at chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/library/objects/StrategyTab.js:80:21
at Object.success (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/library/objects/StrategyTab.js:40:6)
at i (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/assets/js/jquery.min.js:2:27151)
at Object.fireWith [as resolveWith] (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/assets/js/jquery.min.js:2:27914)
at z (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/assets/js/jquery.min.js:4:12059)
at XMLHttpRequest.<anonymous> (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/assets/js/jquery.min.js:4:15619)`;
const result = this.subject(stack);
assert.equal(result, `at loadLogEntries (src/pages/strategy/tabs/logger/logger.js:56:18)
at Object.execute (src/pages/strategy/tabs/logger/logger.js:30:21)
at src/library/objects/StrategyTab.js:80:21
at Object.success (src/library/objects/StrategyTab.js:40:6)
at i (src/assets/js/jquery.min.js:2:27151)
at Object.fireWith [as resolveWith] (src/assets/js/jquery.min.js:2:27914)
at z (src/assets/js/jquery.min.js:4:12059)
at XMLHttpRequest.<anonymous> (src/assets/js/jquery.min.js:4:15619)`);
});
});
QUnit.module('getCallsite', {
beforeEach() { this.subject = logger.getCallsite; },
}, function () {
QUnit.test('named function', function (assert) {
const stack = `Error: message
at loadLogEntries (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/pages/strategy/tabs/logger/logger.js:56:18)
at Object.execute (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/pages/strategy/tabs/logger/logger.js:30:21)
at chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/library/objects/StrategyTab.js:80:21
at Object.success (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/library/objects/StrategyTab.js:40:6)
at i (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/assets/js/jquery.min.js:2:27151)
at Object.fireWith [as resolveWith] (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/assets/js/jquery.min.js:2:27914)
at z (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/assets/js/jquery.min.js:4:12059)
at XMLHttpRequest.<anonymous> (chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/assets/js/jquery.min.js:4:15619)`;
const result = this.subject(stack);
assert.deepEqual(result, {
short: 'logger.js:56',
full: 'src/pages/strategy/tabs/logger/logger.js:56:18',
});
});
QUnit.test('anonymous function', function (assert) {
const stack = `Error: gameScreenChg
at chrome-extension://hgnaklcechmjlpaeamgcnagnhpjhllne/library/modules/Service.js:471:12
at EventImpl.dispatchToListener (extensions::event_bindings:388:22)
at Event.publicClassPrototype.(anonymous function) [as dispatchToListener] (extensions::utils:149:26)
at EventImpl.dispatch_ (extensions::event_bindings:372:35)
at EventImpl.dispatch (extensions::event_bindings:394:17)
at Event.publicClassPrototype.(anonymous function) [as dispatch] (extensions::utils:149:26)
at messageListener (extensions::messaging:196:29)`;<|fim▁hole|> short: 'Service.js:471',
full: 'src/library/modules/Service.js:471:12',
});
});
});
});<|fim▁end|> |
const result = this.subject(stack);
assert.deepEqual(result, { |
<|file_name|>compute_group_group.cpp<|end_file_name|><|fim▁begin|>/* ----------------------------------------------------------------------
LAMMPS - Large-scale Atomic/Molecular Massively Parallel Simulator
http://lammps.sandia.gov, Sandia National Laboratories
Steve Plimpton, [email protected]
Copyright (2003) Sandia Corporation. Under the terms of Contract
DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
certain rights in this software. This software is distributed under
the GNU General Public License.
See the README file in the top-level LAMMPS directory.
------------------------------------------------------------------------- */
/* ----------------------------------------------------------------------
Contributing author: Naveen Michaud-Agrawal (Johns Hopkins U)
K-space terms added by Stan Moore (BYU)
------------------------------------------------------------------------- */
#include <mpi.h>
#include <string.h>
#include "compute_group_group.h"
#include "atom.h"
#include "update.h"
#include "force.h"
#include "pair.h"
#include "neighbor.h"
#include "neigh_request.h"
#include "neigh_list.h"
#include "group.h"
#include "kspace.h"
#include "error.h"
#include <math.h>
#include "comm.h"
#include "domain.h"
#include "math_const.h"
using namespace LAMMPS_NS;
using namespace MathConst;
#define SMALL 0.00001
enum{OFF,INTER,INTRA};
/* ---------------------------------------------------------------------- */
ComputeGroupGroup::ComputeGroupGroup(LAMMPS *lmp, int narg, char **arg) :
Compute(lmp, narg, arg),
group2(NULL)
{
if (narg < 4) error->all(FLERR,"Illegal compute group/group command");
scalar_flag = vector_flag = 1;
size_vector = 3;
extscalar = 1;
extvector = 1;
int n = strlen(arg[3]) + 1;
group2 = new char[n];
strcpy(group2,arg[3]);
jgroup = group->find(group2);
if (jgroup == -1)
error->all(FLERR,"Compute group/group group ID does not exist");
jgroupbit = group->bitmask[jgroup];
pairflag = 1;
kspaceflag = 0;
boundaryflag = 1;
molflag = OFF;
int iarg = 4;
while (iarg < narg) {
if (strcmp(arg[iarg],"pair") == 0) {
if (iarg+2 > narg)
error->all(FLERR,"Illegal compute group/group command");
if (strcmp(arg[iarg+1],"yes") == 0) pairflag = 1;
else if (strcmp(arg[iarg+1],"no") == 0) pairflag = 0;
else error->all(FLERR,"Illegal compute group/group command");
iarg += 2;
} else if (strcmp(arg[iarg],"kspace") == 0) {
if (iarg+2 > narg)
error->all(FLERR,"Illegal compute group/group command");
if (strcmp(arg[iarg+1],"yes") == 0) kspaceflag = 1;
else if (strcmp(arg[iarg+1],"no") == 0) kspaceflag = 0;
else error->all(FLERR,"Illegal compute group/group command");
iarg += 2;
} else if (strcmp(arg[iarg],"boundary") == 0) {
if (iarg+2 > narg)
error->all(FLERR,"Illegal compute group/group command");
if (strcmp(arg[iarg+1],"yes") == 0) boundaryflag = 1;
else if (strcmp(arg[iarg+1],"no") == 0) boundaryflag = 0;
else error->all(FLERR,"Illegal compute group/group command");
iarg += 2;
} else if (strcmp(arg[iarg],"molecule") == 0) {
if (iarg+2 > narg)
error->all(FLERR,"Illegal compute group/group command");
if (strcmp(arg[iarg+1],"off") == 0) molflag = OFF;
else if (strcmp(arg[iarg+1],"inter") == 0) molflag = INTER;
else if (strcmp(arg[iarg+1],"intra") == 0) molflag = INTRA;
else error->all(FLERR,"Illegal compute group/group command");
if (molflag != OFF && atom->molecule_flag == 0)
error->all(FLERR,"Compute group/group molecule requires molecule IDs");
iarg += 2;
} else error->all(FLERR,"Illegal compute group/group command");
}
vector = new double[3];
}
/* ---------------------------------------------------------------------- */
ComputeGroupGroup::~ComputeGroupGroup()
{
delete [] group2;
delete [] vector;
}
/* ---------------------------------------------------------------------- */
void ComputeGroupGroup::init()
{
// if non-hybrid, then error if single_enable = 0
// if hybrid, let hybrid determine if sub-style sets single_enable = 0
if (pairflag && force->pair == NULL)
error->all(FLERR,"No pair style defined for compute group/group");
if (force->pair_match("hybrid",0) == NULL && force->pair->single_enable == 0)
error->all(FLERR,"Pair style does not support compute group/group");
// error if Kspace style does not compute group/group interactions
if (kspaceflag && force->kspace == NULL)
error->all(FLERR,"No Kspace style defined for compute group/group");
if (kspaceflag && force->kspace->group_group_enable == 0)
error->all(FLERR,"Kspace style does not support compute group/group");
if (pairflag) {
pair = force->pair;
cutsq = force->pair->cutsq;
} else pair = NULL;
if (kspaceflag) kspace = force->kspace;
else kspace = NULL;
// compute Kspace correction terms
if (kspaceflag) {
kspace_correction();
if (fabs(e_correction) > SMALL && comm->me == 0) {
char str[128];
sprintf(str,"Both groups in compute group/group have a net charge; "
"the Kspace boundary correction to energy will be non-zero");
error->warning(FLERR,str);
}
}
// recheck that group 2 has not been deleted
jgroup = group->find(group2);
if (jgroup == -1)
error->all(FLERR,"Compute group/group group ID does not exist");
jgroupbit = group->bitmask[jgroup];
// need an occasional half neighbor list
if (pairflag) {
int irequest = neighbor->request(this,instance_me);
neighbor->requests[irequest]->pair = 0;
neighbor->requests[irequest]->compute = 1;
neighbor->requests[irequest]->occasional = 1;
}
}
/* ---------------------------------------------------------------------- */
void ComputeGroupGroup::init_list(int id, NeighList *ptr)
{
list = ptr;
}
/* ---------------------------------------------------------------------- */
double ComputeGroupGroup::compute_scalar()
{
invoked_scalar = invoked_vector = update->ntimestep;
scalar = 0.0;
vector[0] = vector[1] = vector[2] = 0.0;
if (pairflag) pair_contribution();
if (kspaceflag) kspace_contribution();
return scalar;
}
/* ---------------------------------------------------------------------- */
void ComputeGroupGroup::compute_vector()
{
invoked_scalar = invoked_vector = update->ntimestep;
scalar = 0.0;
vector[0] = vector[1] = vector[2] = 0.0;
if (pairflag) pair_contribution();
if (kspaceflag) kspace_contribution();
}
/* ---------------------------------------------------------------------- */
void ComputeGroupGroup::pair_contribution()
{
int i,j,ii,jj,inum,jnum,itype,jtype;
double xtmp,ytmp,ztmp,delx,dely,delz;
double rsq,eng,fpair,factor_coul,factor_lj;
int *ilist,*jlist,*numneigh,**firstneigh;
double **x = atom->x;
tagint *molecule = atom->molecule;
int *type = atom->type;
int *mask = atom->mask;
int nlocal = atom->nlocal;
double *special_coul = force->special_coul;
double *special_lj = force->special_lj;
int newton_pair = force->newton_pair;
// invoke half neighbor list (will copy or build if necessary)
neighbor->build_one(list);
inum = list->inum;
ilist = list->ilist;
numneigh = list->numneigh;
firstneigh = list->firstneigh;
// loop over neighbors of my atoms
// skip if I,J are not in 2 groups
double one[4];
one[0] = one[1] = one[2] = one[3] = 0.0;
for (ii = 0; ii < inum; ii++) {
i = ilist[ii];
// skip if atom I is not in either group
if (!(mask[i] & groupbit || mask[i] & jgroupbit)) continue;
xtmp = x[i][0];
ytmp = x[i][1];
ztmp = x[i][2];
itype = type[i];
jlist = firstneigh[i];
jnum = numneigh[i];
for (jj = 0; jj < jnum; jj++) {
j = jlist[jj];
factor_lj = special_lj[sbmask(j)];
factor_coul = special_coul[sbmask(j)];
j &= NEIGHMASK;
// skip if atom J is not in either group
if (!(mask[j] & groupbit || mask[j] & jgroupbit)) continue;
// skip if atoms I,J are only in the same group
int ij_flag = 0;
int ji_flag = 0;
if (mask[i] & groupbit && mask[j] & jgroupbit) ij_flag = 1;
if (mask[j] & groupbit && mask[i] & jgroupbit) ji_flag = 1;
if (!ij_flag && !ji_flag) continue;
// skip if molecule IDs of atoms I,J do not satisfy molflag setting
if (molflag != OFF) {
if (molflag == INTER) {
if (molecule[i] == molecule[j]) continue;
} else {
if (molecule[i] != molecule[j]) continue;
}
}
delx = xtmp - x[j][0];
dely = ytmp - x[j][1];
delz = ztmp - x[j][2];
rsq = delx*delx + dely*dely + delz*delz;
jtype = type[j];
if (rsq < cutsq[itype][jtype]) {
eng = pair->single(i,j,itype,jtype,rsq,factor_coul,factor_lj,fpair);
// energy only computed once so tally full amount
// force tally is jgroup acting on igroup
if (newton_pair || j < nlocal) {
one[0] += eng;
if (ij_flag) {
one[1] += delx*fpair;
one[2] += dely*fpair;
one[3] += delz*fpair;
}
if (ji_flag) {
one[1] -= delx*fpair;
one[2] -= dely*fpair;
one[3] -= delz*fpair;
}
// energy computed twice so tally half amount
// only tally force if I own igroup atom
} else {
one[0] += 0.5*eng;
if (ij_flag) {
one[1] += delx*fpair;
one[2] += dely*fpair;
one[3] += delz*fpair;
}
}
}
}
}
double all[4];
MPI_Allreduce(one,all,4,MPI_DOUBLE,MPI_SUM,world);
scalar += all[0];
vector[0] += all[1]; vector[1] += all[2]; vector[2] += all[3];
}
/* ---------------------------------------------------------------------- */
void ComputeGroupGroup::kspace_contribution()
{
double *vector_kspace = force->kspace->f2group;
force->kspace->compute_group_group(groupbit,jgroupbit,0);
scalar += 2.0*force->kspace->e2group;
vector[0] += vector_kspace[0];
vector[1] += vector_kspace[1];
vector[2] += vector_kspace[2];
// subtract extra A <--> A Kspace interaction so energy matches
// real-space style of compute group-group
// add extra Kspace term to energy
force->kspace->compute_group_group(groupbit,jgroupbit,1);
scalar -= force->kspace->e2group;
// self energy correction term
<|fim▁hole|> // k=0 boundary correction term
if (boundaryflag) {
double xprd = domain->xprd;
double yprd = domain->yprd;
double zprd = domain->zprd;
// adjustment of z dimension for 2d slab Ewald
// 3d Ewald just uses zprd since slab_volfactor = 1.0
double volume = xprd*yprd*zprd*force->kspace->slab_volfactor;
scalar -= e_correction/volume;
}
}
/* ---------------------------------------------------------------------- */
void ComputeGroupGroup::kspace_correction()
{
// total charge of groups A & B, needed for correction term
double qsqsum_group,qsum_A,qsum_B;
qsqsum_group = qsum_A = qsum_B = 0.0;
double *q = atom->q;
int *mask = atom->mask;
int groupbit_A = groupbit;
int groupbit_B = jgroupbit;
for (int i = 0; i < atom->nlocal; i++) {
if ((mask[i] & groupbit_A) && (mask[i] & groupbit_B))
qsqsum_group += q[i]*q[i];
if (mask[i] & groupbit_A) qsum_A += q[i];
if (mask[i] & groupbit_B) qsum_B += q[i];
}
double tmp;
MPI_Allreduce(&qsqsum_group,&tmp,1,MPI_DOUBLE,MPI_SUM,world);
qsqsum_group = tmp;
MPI_Allreduce(&qsum_A,&tmp,1,MPI_DOUBLE,MPI_SUM,world);
qsum_A = tmp;
MPI_Allreduce(&qsum_B,&tmp,1,MPI_DOUBLE,MPI_SUM,world);
qsum_B = tmp;
double g_ewald = force->kspace->g_ewald;
double scale = 1.0;
const double qscale = force->qqrd2e * scale;
// self-energy correction
e_self = qscale * g_ewald*qsqsum_group/MY_PIS;
e_correction = 2.0*qsum_A*qsum_B;
// subtract extra AA terms
qsum_A = qsum_B = 0.0;
for (int i = 0; i < atom->nlocal; i++) {
if (!((mask[i] & groupbit_A) && (mask[i] & groupbit_B)))
continue;
if (mask[i] & groupbit_A) qsum_A += q[i];
if (mask[i] & groupbit_B) qsum_B += q[i];
}
MPI_Allreduce(&qsum_A,&tmp,1,MPI_DOUBLE,MPI_SUM,world);
qsum_A = tmp;
MPI_Allreduce(&qsum_B,&tmp,1,MPI_DOUBLE,MPI_SUM,world);
qsum_B = tmp;
// k=0 energy correction term (still need to divide by volume above)
e_correction -= qsum_A*qsum_B;
e_correction *= qscale * MY_PI2 / (g_ewald*g_ewald);
}<|fim▁end|> | scalar -= e_self;
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict'
var PassThrough = require('stream').PassThrough
var statistics = require('vfile-statistics')
var fileSetPipeline = require('./file-set-pipeline')
module.exports = run
// Run the file set pipeline once.
// `callback` is invoked with a fatal error, or with a status code (`0` on
// success, `1` on failure).
function run(options, callback) {
var settings = {}
var stdin = new PassThrough()
var tree
var detectConfig
var hasConfig
var detectIgnore
var hasIgnore
try {
stdin = process.stdin
} catch (_) {
// Obscure bug in Node (seen on Windows).
// See: <https://github.com/nodejs/node/blob/f856234/lib/internal/process/stdio.js#L82>,
// <https://github.com/AtomLinter/linter-markdown/pull/85>.
}
if (!callback) {
throw new Error('Missing `callback`')
}
if (!options || !options.processor) {
return next(new Error('Missing `processor`'))
}
// Processor.
settings.processor = options.processor
// Path to run as.
settings.cwd = options.cwd || process.cwd()
// Input.
settings.files = options.files || []
settings.extensions = (options.extensions || []).map(extension)
settings.filePath = options.filePath || null
settings.streamIn = options.streamIn || stdin
// Output.
settings.streamOut = options.streamOut || process.stdout
settings.streamError = options.streamError || process.stderr
settings.alwaysStringify = options.alwaysStringify
settings.output = options.output
settings.out = options.out
// Null overwrites config settings, `undefined` does not.
if (settings.output === null || settings.output === undefined) {
settings.output = undefined
}
if (settings.output && settings.out) {
return next(new Error('Cannot accept both `output` and `out`'))
}
// Process phase management.
tree = options.tree || false
settings.treeIn = options.treeIn
settings.treeOut = options.treeOut
settings.inspect = options.inspect
if (settings.treeIn === null || settings.treeIn === undefined) {
settings.treeIn = tree
}
if (settings.treeOut === null || settings.treeOut === undefined) {
settings.treeOut = tree
}
// Configuration.
detectConfig = options.detectConfig
hasConfig = Boolean(options.rcName || options.packageField)
if (detectConfig && !hasConfig) {
return next(
new Error('Missing `rcName` or `packageField` with `detectConfig`')
)
}
settings.detectConfig =
detectConfig === null || detectConfig === undefined
? hasConfig
: detectConfig
settings.rcName = options.rcName || null
settings.rcPath = options.rcPath || null
settings.packageField = options.packageField || null
settings.settings = options.settings || {}
settings.configTransform = options.configTransform
settings.defaultConfig = options.defaultConfig
// Ignore.
detectIgnore = options.detectIgnore
hasIgnore = Boolean(options.ignoreName)
settings.detectIgnore =
detectIgnore === null || detectIgnore === undefined
? hasIgnore
: detectIgnore
settings.ignoreName = options.ignoreName || null
settings.ignorePath = options.ignorePath || null
settings.ignorePatterns = options.ignorePatterns || []
settings.silentlyIgnore = Boolean(options.silentlyIgnore)
if (detectIgnore && !hasIgnore) {
return next(new Error('Missing `ignoreName` with `detectIgnore`'))
}
// Plugins.
settings.pluginPrefix = options.pluginPrefix || null
settings.plugins = options.plugins || {}
// Reporting.
settings.reporter = options.reporter || null
settings.reporterOptions = options.reporterOptions || null
settings.color = options.color || false<|fim▁hole|>
// Process.
fileSetPipeline.run({files: options.files || []}, settings, next)
function next(error, context) {
var stats = statistics((context || {}).files)
var failed = Boolean(
settings.frail ? stats.fatal || stats.warn : stats.fatal
)
if (error) {
callback(error)
} else {
callback(null, failed ? 1 : 0, context)
}
}
}
function extension(ext) {
return ext.charAt(0) === '.' ? ext : '.' + ext
}<|fim▁end|> | settings.silent = options.silent || false
settings.quiet = options.quiet || false
settings.frail = options.frail || false |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Functionality to query and extract information from aligned BAM files.
"""
import collections
import contextlib
import os
import itertools
import signal
import subprocess
import numpy
import pysam
import toolz as tz
from bcbio import utils
from bcbio.bam import ref
from bcbio.distributed import objectstore
from bcbio.distributed.transaction import file_transaction
from bcbio.log import logger
from bcbio.pipeline import config_utils
import bcbio.pipeline.datadict as dd
from bcbio.provenance import do
def is_paired(bam_file):
"""Determine if a BAM file has paired reads.
Works around issues with head closing the samtools pipe using signal trick from:
http://stackoverflow.com/a/12451083/252589
"""
bam_file = objectstore.cl_input(bam_file)
cmd = ("set -o pipefail; "
"sambamba view -h {bam_file} | head -50000 | "
"sambamba view -S -F paired /dev/stdin | head -1 | wc -l")
p = subprocess.Popen(cmd.format(**locals()), shell=True,
executable=do.find_bash(),
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
preexec_fn=lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL))
stdout, stderr = p.communicate()
if p.returncode == 0 or p.returncode == 141 and stderr.strip() == "":
return int(stdout) > 0
else:
raise ValueError("Failed to check paired status of BAM file: %s" % str(stderr))
def index(in_bam, config, check_timestamp=True):
"""Index a BAM file, skipping if index present.
Centralizes BAM indexing providing ability to switch indexing approaches.
"""
assert is_bam(in_bam), "%s in not a BAM file" % in_bam
index_file = "%s.bai" % in_bam
alt_index_file = "%s.bai" % os.path.splitext(in_bam)[0]
if check_timestamp:
bai_exists = utils.file_uptodate(index_file, in_bam) or utils.file_uptodate(alt_index_file, in_bam)
else:
bai_exists = utils.file_exists(index_file) or utils.file_exists(alt_index_file)
if not bai_exists:
# Remove old index files and re-run to prevent linking into tx directory
for fname in [index_file, alt_index_file]:
utils.remove_safe(fname)
sambamba = _get_sambamba(config)
samtools = config_utils.get_program("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, index_file) as tx_index_file:
assert tx_index_file.find(".bam.bai") > 0
tx_bam_file = tx_index_file.replace(".bam.bai", ".bam")
utils.symlink_plus(in_bam, tx_bam_file)
if sambamba:
cmd = "{sambamba} index -t {num_cores} {tx_bam_file}"
else:
cmd = "{samtools} index {tx_bam_file}"
do.run(cmd.format(**locals()), "Index BAM file: %s" % os.path.basename(in_bam))
return index_file if utils.file_exists(index_file) else alt_index_file
def remove(in_bam):
"""
remove bam file and the index if exists
"""
if utils.file_exists(in_bam):
utils.remove_safe(in_bam)
if utils.file_exists(in_bam + ".bai"):
utils.remove_safe(in_bam + ".bai")
def idxstats(in_bam, data):
"""Return BAM index stats for the given file, using samtools idxstats.
"""
index(in_bam, data["config"])
AlignInfo = collections.namedtuple("AlignInfo", ["contig", "length", "aligned", "unaligned"])
samtools = config_utils.get_program("samtools", data["config"])
idxstats_out = subprocess.check_output([samtools, "idxstats", in_bam])
out = []
for line in idxstats_out.split("\n"):
if line.strip():
contig, length, aligned, unaligned = line.split("\t")
out.append(AlignInfo(contig, int(length), int(aligned), int(unaligned)))
return out
def get_downsample_pct(in_bam, target_counts, data):
"""Retrieve percentage of file to downsample to get to target counts.
"""
total = sum(x.aligned for x in idxstats(in_bam, data))
with contextlib.closing(pysam.Samfile(in_bam, "rb")) as work_bam:
n_rgs = max(1, len(work_bam.header.get("RG", [])))
rg_target = n_rgs * target_counts
if total > rg_target:
return float(rg_target) / float(total)
def get_aligned_reads(in_bam, data):
index(in_bam, data["config"])
bam_stats = idxstats(in_bam, data)
align = sum(x.aligned for x in bam_stats)
unaligned = sum(x.unaligned for x in bam_stats)
total = float(align + unaligned)
return 1.0 * align / total
def downsample(in_bam, data, target_counts, read_filter="", always_run=False,
work_dir=None):
"""Downsample a BAM file to the specified number of target counts.
"""
index(in_bam, data["config"])
ds_pct = get_downsample_pct(in_bam, target_counts, data)
if always_run and not ds_pct:
ds_pct = 1.0
if ds_pct:
out_file = "%s-downsample%s" % os.path.splitext(in_bam)
if work_dir:
out_file = os.path.join(work_dir, os.path.basename(out_file))
if not utils.file_exists(out_file):
with file_transaction(data, out_file) as tx_out_file:
sambamba = config_utils.get_program("sambamba", data["config"])
num_cores = dd.get_num_cores(data)
cmd = ("{sambamba} view -t {num_cores} {read_filter} -f bam -o {tx_out_file} "
"--subsample={ds_pct:.3} --subsampling-seed=42 {in_bam}")
do.run(cmd.format(**locals()), "Downsample BAM file: %s" % os.path.basename(in_bam))
return out_file
def check_header(in_bam, rgnames, ref_file, config):
"""Ensure passed in BAM header matches reference file and read groups names.
"""
_check_bam_contigs(in_bam, ref_file, config)
_check_sample(in_bam, rgnames)
def _check_sample(in_bam, rgnames):
"""Ensure input sample name matches expected run group names.
"""
with contextlib.closing(pysam.Samfile(in_bam, "rb")) as bamfile:
rg = bamfile.header.get("RG", [{}])
msgs = []
warnings = []
if len(rg) > 1:
warnings.append("Multiple read groups found in input BAM. Expect single RG per BAM.")
elif len(rg) == 0:
msgs.append("No read groups found in input BAM. Expect single RG per BAM.")
elif rg[0].get("SM") != rgnames["sample"]:
msgs.append("Read group sample name (SM) does not match configuration `description`: %s vs %s"
% (rg[0].get("SM"), rgnames["sample"]))
if len(msgs) > 0:
raise ValueError("Problems with pre-aligned input BAM file: %s\n" % (in_bam)
+ "\n".join(msgs) +
"\nSetting `bam_clean: picard` in the configuration can often fix this issue.")
if warnings:
print("*** Potential problems in input BAM compared to reference:\n%s\n" %
"\n".join(warnings))
def _check_bam_contigs(in_bam, ref_file, config):
"""Ensure a pre-aligned BAM file matches the expected reference genome.
"""
ref_contigs = [c.name for c in ref.file_contigs(ref_file, config)]
with contextlib.closing(pysam.Samfile(in_bam, "rb")) as bamfile:
bam_contigs = [c["SN"] for c in bamfile.header["SQ"]]
problems = []
warnings = []
for bc, rc in itertools.izip_longest(bam_contigs, ref_contigs):
if bc != rc:
if bc and rc:
problems.append("Reference mismatch. BAM: %s Reference: %s" % (bc, rc))
elif bc:
warnings.append("Extra BAM chromosomes: %s" % bc)
elif rc:
warnings.append("Extra reference chromosomes: %s" % rc)
if problems:
raise ValueError("Unexpected order, name or contig mismatches between input BAM and reference file:\n%s\n"
"Setting `bam_clean: picard` in the configuration can often fix this issue."
% "\n".join(problems))
if warnings:
print("*** Potential problems in input BAM compared to reference:\n%s\n" %
"\n".join(warnings))
def open_samfile(in_file):
if is_bam(in_file):
return pysam.Samfile(in_file, "rb")
elif is_sam(in_file):
return pysam.Samfile(in_file, "r")
else:
raise IOError("in_file must be either a BAM file or SAM file. Is the "
"extension .sam or .bam?")
def is_bam(in_file):
_, ext = os.path.splitext(in_file)
if ext == ".bam":
return True
else:
return False
def is_sam(in_file):
_, ext = os.path.splitext(in_file)
if ext == ".sam":
return True
else:
return False
def mapped(in_bam, config):
"""
return a bam file of only the mapped reads
"""
out_file = os.path.splitext(in_bam)[0] + ".mapped.bam"
if utils.file_exists(out_file):
return out_file
sambamba = _get_sambamba(config)
with file_transaction(config, out_file) as tx_out_file:
if sambamba:
cmd = ("{sambamba} view --format=bam -F 'not (unmapped or mate_is_unmapped)' "
"{in_bam} -o {tx_out_file}")
else:
samtools = config_utils.get_program("samtools", config)
cmd = "{samtools} view -b -F 4 {in_bam} -o {tx_out_file}"
do.run(cmd.format(**locals()),
"Filtering mapped reads to %s." % (tx_out_file))
return out_file
def count(in_bam, config=None):
"""
return the counts in a BAM file
"""
if not config:
config = {}
sambamba = _get_sambamba(config)
if sambamba:
cmd = ("{sambamba} view -c {in_bam}").format(**locals())
else:
samtools = config_utils.get_program("samtools", config)
cmd = ("{samtools} view -c {in_bam}").format(**locals())
out = subprocess.check_output(cmd, shell=True)
return int(out)
def sam_to_bam(in_sam, config):
if is_bam(in_sam):
return in_sam
assert is_sam(in_sam), "%s is not a SAM file" % in_sam
out_file = os.path.splitext(in_sam)[0] + ".bam"
if utils.file_exists(out_file):
return out_file
samtools = config_utils.get_program("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, out_file) as tx_out_file:
cmd = "{samtools} view -@ {num_cores} -h -S -b {in_sam} -o {tx_out_file}"
do.run(cmd.format(**locals()),
("Convert SAM to BAM (%s cores): %s to %s"
% (str(num_cores), in_sam, out_file)))
return out_file
def sam_to_bam_stream_cmd(config, named_pipe=None):
sambamba = config_utils.get_program("sambamba", config)
num_cores = config["algorithm"].get("num_cores", 1)
pipe = named_pipe if named_pipe else "/dev/stdin"
cmd = " {sambamba} view --format=bam -S -t {num_cores} {pipe} ".format(**locals())
return cmd
def bam_to_sam(in_file, config):
if is_sam(in_file):
return in_file
assert is_bam(in_file), "%s is not a BAM file" % in_file
out_file = os.path.splitext(in_file)[0] + ".sam"
if utils.file_exists(out_file):
return out_file
samtools = config_utils.get_program("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, out_file) as tx_out_file:
cmd = "{samtools} view -@ {num_cores} -h {in_file} -o {tx_out_file}"
do.run(cmd.format(**locals()),
("Convert BAM to SAM (%s cores): %s to %s"
% (str(num_cores), in_file, out_file)))
return out_file
def reheader(header, bam_file, config):
samtools = config_utils.get_program("samtools", config)
base, ext = os.path.splitext(bam_file)
out_file = base + ".reheadered" + ext
cmd = "{samtools} reheader {header} {bam_file} > {out_file}"
do.run(cmd.format(**locals()), "Reheadering %s." % bam_file)
return out_file
def merge(bamfiles, out_bam, config):
assert all(map(is_bam, bamfiles)), ("Not all of the files to merge are not BAM "
"files: %s " % (bamfiles))
assert all(map(utils.file_exists, bamfiles)), ("Not all of the files to merge "
"exist: %s" % (bamfiles))
if len(bamfiles) == 1:
return bamfiles[0]
if os.path.exists(out_bam):
return out_bam
sambamba = _get_sambamba(config)
sambamba = None
samtools = config_utils.get_program("samtools", config)
bamtools = config_utils.get_program("bamtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, out_bam) as tx_out_bam:
try:
if sambamba:
cmd = "{sambamba} merge -t {num_cores} {tx_out_bam} " + " ".join(bamfiles)
else:
cmd = "{samtools} merge -@ {num_cores} {tx_out_bam} " + " ".join(bamfiles)
do.run(cmd.format(**locals()), "Merge %s into %s." % (bamfiles, out_bam))
except subprocess.CalledProcessError:
files = " -in ".join(bamfiles)
cmd = "{bamtools} merge -in {files} -out {tx_out_bam}"
do.run(cmd.format(**locals()), "Error with other tools. Merge %s into %s with bamtools" %
(bamfiles, out_bam))
index(out_bam, config)
return out_bam
def sort(in_bam, config, order="coordinate"):
"""Sort a BAM file, skipping if already present.
"""
assert is_bam(in_bam), "%s in not a BAM file" % in_bam
if bam_already_sorted(in_bam, config, order):
return in_bam
sort_stem = _get_sort_stem(in_bam, order)
sort_file = sort_stem + ".bam"
if not utils.file_exists(sort_file):
sambamba = _get_sambamba(config)
samtools = config_utils.get_program("samtools", config)
cores = config["algorithm"].get("num_cores", 1)
with file_transaction(config, sort_file) as tx_sort_file:
tx_sort_stem = os.path.splitext(tx_sort_file)[0]
tx_dir = utils.safe_makedir(os.path.dirname(tx_sort_file))
order_flag = "-n" if order == "queryname" else ""
resources = config_utils.get_resources("samtools", config)
mem = resources.get("memory", "2G")
samtools_cmd = ("{samtools} sort -@ {cores} -m {mem} {order_flag} "
"{in_bam} {tx_sort_stem}")
if sambamba:
if tz.get_in(["resources", "sambamba"], config):
sm_resources = config_utils.get_resources("sambamba", config)
mem = sm_resources.get("memory", "2G")
# sambamba uses total memory, not memory per core
mem = config_utils.adjust_memory(mem, cores, "increase").upper()
# Use samtools compatible natural sorting
# https://github.com/lomereiter/sambamba/issues/132
order_flag = "--natural-sort" if order == "queryname" else ""
cmd = ("{sambamba} sort -t {cores} -m {mem} {order_flag} "
"-o {tx_sort_file} --tmpdir={tx_dir} {in_bam}")
else:
cmd = samtools_cmd
# sambamba has intermittent multicore failures. Allow
# retries with single core
try:
do.run(cmd.format(**locals()),
"Sort BAM file (multi core, %s): %s to %s" %
(order, os.path.basename(in_bam),
os.path.basename(sort_file)))
except:
logger.exception("Multi-core sorting failed, reverting to single core")
resources = config_utils.get_resources("samtools", config)
mem = resources.get("memory", "2G")
cores = 1
order_flag = "-n" if order == "queryname" else ""
do.run(samtools_cmd.format(**locals()),
"Sort BAM file (single core, %s): %s to %s" %
(order, os.path.basename(in_bam),
os.path.basename(sort_file)))
return sort_file
def sort_cmd(config, tmp_dir, named_pipe=None, order="coordinate"):
""" Get a sort command, suitable for piping
"""
sambamba = _get_sambamba(config)
pipe = named_pipe if named_pipe else "/dev/stdin"
order_flag = "-n" if order == "queryname" else ""
resources = config_utils.get_resources("samtools", config)
num_cores = config["algorithm"].get("num_cores", 1)
mem = config_utils.adjust_memory(resources.get("memory", "2G"), 1, "decrease").upper()
cmd = ("{sambamba} sort -m {mem} --tmpdir {tmp_dir} -t {num_cores} {order_flag} -o /dev/stdout {pipe}")
return cmd.format(**locals())
def _get_sambamba(config):
try:
sambamba = config_utils.get_program("sambamba", config)
except config_utils.CmdNotFound:
sambamba = None
return sambamba
def bam_already_sorted(in_bam, config, order):
return order == _get_sort_order(in_bam, config)
def _get_sort_order(in_bam, config):
with open_samfile(in_bam) as bam_handle:
header = bam_handle.header
return utils.get_in(header, ("HD", "SO"), None)
def _get_sort_stem(in_bam, order):
SUFFIXES = {"coordinate": ".sorted", "queryname": ".nsorted"}
sort_base = os.path.splitext(in_bam)[0]
for suffix in SUFFIXES:
sort_base = sort_base.split(suffix)[0]
return sort_base + SUFFIXES[order]
def sample_name(in_bam):
"""Get sample name from BAM file.
"""
with contextlib.closing(pysam.AlignmentFile(in_bam, "rb", check_sq=False)) as in_pysam:
try:
if "RG" in in_pysam.header:
return in_pysam.header["RG"][0]["SM"]
except ValueError:
return None
def estimate_read_length(bam_file, nreads=1000):
"""
estimate median read length of a SAM/BAM file
"""<|fim▁hole|> return int(numpy.median(lengths))
def estimate_fragment_size(bam_file, nreads=1000):
"""
estimate median fragment size of a SAM/BAM file
"""
with open_samfile(bam_file) as bam_handle:
reads = tz.itertoolz.take(nreads, bam_handle)
lengths = [x.tlen for x in reads]
return int(numpy.median(lengths))
def filter_stream_cmd(bam_file, data, filter_flag):
"""
return a command to keep only alignments matching the filter flag
see https://github.com/lomereiter/sambamba/wiki/%5Bsambamba-view%5D-Filter-expression-syntax for examples
"""
sambamba = config_utils.get_program("sambamba", data["config"])
num_cores = dd.get_num_cores(data)
cmd = ('{sambamba} view -t {num_cores} -f bam -F "{filter_flag}" {bam_file}')
return cmd.format(**locals())
def filter_primary_stream_cmd(bam_file, data):
return filter_stream_cmd(bam_file, data, "not secondary_alignment")
def filter_primary(bam_file, data):
stem, ext = os.path.splitext(bam_file)
out_file = stem + ".primary" + ext
if utils.file_exists(out_file):
return out_file
with file_transaction(out_file) as tx_out_file:
cmd = filter_primary_stream_cmd(bam_file, data)
cmd += "> {tx_out_file}"
do.run(cmd.format(**locals()), ("Filtering primary alignments in %s." %
os.path.basename(bam_file)))
return out_file<|fim▁end|> | with open_samfile(bam_file) as bam_handle:
reads = tz.itertoolz.take(nreads, bam_handle)
lengths = [len(x.seq) for x in reads] |
<|file_name|>special.py<|end_file_name|><|fim▁begin|>SIMPLE_SETTINGS = {
'OVERRIDE_BY_ENV': True
}<|fim▁hole|><|fim▁end|> |
MY_VAR = u'Some Value' |
<|file_name|>account_setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Post-installation configuration helpers
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
"""Common code for scripting installation of a chart of accounts
into a company.
The function you probably want to use is setup_company_accounts()
"""
from datetime import date
import logging
from . import confutil
_logger = logging.getLogger(__name__)
def setup_company_accounts(cr, registry, uid, company, chart_template, code_digits=None, context=None):
"""This sets up accounts, fiscal year and periods for the given company.
company: A res.company object
chart_template: An account.chart.template object
code_digits: The number of digits (the default is usually 6)
context: e.g. {'lang': 'en_GB', 'tz': False, 'uid': openerp.SUPERUSER_ID}
A financial year is set up starting this year on 1st Jan and ending this year on 31st Dec.
"""
unconfigured_companies = unconfigured_company_ids(cr, registry, uid, context=context)
if company.id in unconfigured_companies:
setup_chart_of_accounts(cr, registry, uid,
company_id=company.id,
chart_template_id=chart_template.id,
code_digits=code_digits,
context=context,
)<|fim▁hole|> account_start = today.strftime('%Y-01-01')
account_end = today.strftime('%Y-12-31')
create_fiscal_year(cr, registry, uid,
company_id=company.id,
name=fy_name,
code=fy_code,
start_date=account_start,
end_date=account_end,
context=context,
)
confutil.set_account_settings(cr, registry, uid,
company=company,
changes={
'date_start': account_start,
'date_stop': account_end,
'period': 'month',
},
context=context,
)
def unconfigured_company_ids(cr, registry, uid, context=None):
"""Return list of ids of companies without a chart of accounts.
"""
account_installer = registry['account.installer']
return account_installer.get_unconfigured_cmp(cr, uid, context=context)
def setup_chart_of_accounts(cr, registry, uid, company_id, chart_template_id, code_digits=None, context=None):
chart_wizard = registry['wizard.multi.charts.accounts']
defaults = chart_wizard.default_get(cr, uid, ['bank_accounts_id', 'currency_id'], context=context)
bank_accounts_spec = defaults.pop('bank_accounts_id')
bank_accounts_id = [(0, False, i) for i in bank_accounts_spec]
data = defaults.copy()
data.update({
"chart_template_id": chart_template_id,
'company_id': company_id,
'bank_accounts_id': bank_accounts_id,
})
onchange = chart_wizard.onchange_chart_template_id(cr, uid, [], data['chart_template_id'], context=context)
data.update(onchange['value'])
if code_digits:
data.update({'code_digits': code_digits})
conf_id = chart_wizard.create(cr, uid, data, context=context)
chart_wizard.execute(cr, uid, [conf_id], context=context)
def create_fiscal_year(cr, registry, uid, company_id, name, code, start_date, end_date, context=None):
fy_model = registry['account.fiscalyear']
fy_data = fy_model.default_get(cr, uid, ['state', 'company_id'], context=context).copy()
fy_data.update({
'company_id': company_id,
'name': name,
'code': code,
'date_start': start_date,
'date_stop': end_date,
})
fy_id = fy_model.create(cr, uid, fy_data, context=context)
fy_model.create_period(cr, uid, [fy_id], context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> |
today = date.today()
fy_name = today.strftime('%Y')
fy_code = 'FY' + fy_name |
<|file_name|>RandomNameColors.java<|end_file_name|><|fim▁begin|>package net.blay09.mods.bmc.chat;
import com.google.common.collect.Maps;
import net.minecraft.util.text.TextFormatting;
import java.util.Map;
import java.util.Random;
public class RandomNameColors {
private static final Random random = new Random();
private static final TextFormatting[] VALID_COLORS = new TextFormatting[] {
TextFormatting.DARK_BLUE,
TextFormatting.DARK_GREEN,
TextFormatting.DARK_AQUA,<|fim▁hole|> TextFormatting.GOLD,
TextFormatting.GRAY,
TextFormatting.BLUE,
TextFormatting.GREEN,
TextFormatting.AQUA,
TextFormatting.RED,
TextFormatting.LIGHT_PURPLE,
TextFormatting.YELLOW,
TextFormatting.WHITE
};
private static Map<String, TextFormatting> nameColorMap = Maps.newHashMap();
public static TextFormatting getRandomNameColor(String senderName) {
TextFormatting color = nameColorMap.get(senderName);
if(color == null) {
color = VALID_COLORS[random.nextInt(VALID_COLORS.length)];
nameColorMap.put(senderName, color);
}
return color;
}
}<|fim▁end|> | TextFormatting.DARK_RED,
TextFormatting.DARK_PURPLE, |
<|file_name|>eye.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { IconBaseProps } from 'react-icon-base';<|fim▁hole|><|fim▁end|> | declare class GoEye extends React.Component<IconBaseProps> { }
export = GoEye; |
<|file_name|>generics.rs<|end_file_name|><|fim▁begin|>#![recursion_limit = "128"]
#[macro_use]
extern crate generic_array;
use generic_array::typenum::consts::U4;
use std::fmt::Debug;
use std::ops::Add;
use generic_array::{GenericArray, ArrayLength};
use generic_array::sequence::*;
use generic_array::functional::*;
/// Example function using generics to pass N-length sequences and map them
pub fn generic_map<S>(s: S)
where<|fim▁hole|> S: FunctionalSequence<i32>, // `.map`
S::Item: Add<i32, Output = i32>, // `x + 1`
S: MappedGenericSequence<i32, i32>, // `i32` -> `i32`
MappedSequence<S, i32, i32>: Debug, // println!
{
let a = s.map(|x| x + 1);
println!("{:?}", a);
}
/// Complex example function using generics to pass N-length sequences, zip them, and then map that result.
///
/// If used with `GenericArray` specifically this isn't necessary
pub fn generic_sequence_zip_sum<A, B>(a: A, b: B) -> i32
where
A: FunctionalSequence<i32>, // `.zip`
B: FunctionalSequence<i32, Length = A::Length>, // `.zip`
A: MappedGenericSequence<i32, i32>, // `i32` -> `i32`
B: MappedGenericSequence<i32, i32, Mapped = MappedSequence<A, i32, i32>>, // `i32` -> `i32`, prove A and B can map to the same output
A::Item: Add<B::Item, Output = i32>, // `l + r`
MappedSequence<A, i32, i32>: MappedGenericSequence<i32, i32> + FunctionalSequence<i32>, // `.map`
SequenceItem<MappedSequence<A, i32, i32>>: Add<i32, Output=i32>, // `x + 1`
MappedSequence<MappedSequence<A, i32, i32>, i32, i32>: Debug, // `println!`
MappedSequence<MappedSequence<A, i32, i32>, i32, i32>: FunctionalSequence<i32>, // `.fold`
SequenceItem<MappedSequence<MappedSequence<A, i32, i32>, i32, i32>>: Add<i32, Output=i32> // `x + a`, note the order
{
let c = a.zip(b, |l, r| l + r).map(|x| x + 1);
println!("{:?}", c);
c.fold(0, |a, x| x + a)
}
/// Super-simple fixed-length i32 `GenericArray`s
pub fn generic_array_plain_zip_sum(a: GenericArray<i32, U4>, b: GenericArray<i32, U4>) -> i32 {
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
pub fn generic_array_variable_length_zip_sum<N>(a: GenericArray<i32, N>, b: GenericArray<i32, N>) -> i32
where
N: ArrayLength<i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
pub fn generic_array_same_type_variable_length_zip_sum<T, N>(a: GenericArray<T, N>, b: GenericArray<T, N>) -> i32
where
N: ArrayLength<T> + ArrayLength<<T as Add<T>>::Output>,
T: Add<T, Output=i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
/// Complex example using fully generic `GenericArray`s with the same length.
///
/// It's mostly just the repeated `Add` traits, which would be present in other systems anyway.
pub fn generic_array_zip_sum<A, B, N: ArrayLength<A> + ArrayLength<B>>(a: GenericArray<A, N>, b: GenericArray<B, N>) -> i32
where
A: Add<B>,
N: ArrayLength<<A as Add<B>>::Output> +
ArrayLength<<<A as Add<B>>::Output as Add<i32>>::Output>,
<A as Add<B>>::Output: Add<i32>,
<<A as Add<B>>::Output as Add<i32>>::Output: Add<i32, Output=i32>,
{
a.zip(b, |l, r| l + r).map(|x| x + 1).fold(0, |a, x| x + a)
}
#[test]
fn test_generics() {
generic_map(arr![i32; 1, 2, 3, 4]);
assert_eq!(generic_sequence_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_plain_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_variable_length_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_same_type_variable_length_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
assert_eq!(generic_array_zip_sum(arr![i32; 1, 2, 3, 4], arr![i32; 2, 3, 4, 5]), 28);
}<|fim▁end|> | |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Package flagnet provides helpers for the standard package 'flag' for parsing<|fim▁hole|>package flagnet<|fim▁end|> | // host:port and host flags |
<|file_name|>lease.go<|end_file_name|><|fim▁begin|>// This is lease support for nclient4
package nclient4
import (
"fmt"
"net"
"time"
"github.com/insomniacslk/dhcp/dhcpv4"
)<|fim▁hole|> Offer *dhcpv4.DHCPv4
ACK *dhcpv4.DHCPv4
CreationTime time.Time
}
// Release send DHCPv4 release messsage to server, based on specified lease.
// release is sent as unicast per RFC2131, section 4.4.4.
// Note: some DHCP server requries of using assigned IP address as source IP,
// use nclient4.WithUnicast to create client for such case.
func (c *Client) Release(lease *Lease, modifiers ...dhcpv4.Modifier) error {
if lease == nil {
return fmt.Errorf("lease is nil")
}
req, err := dhcpv4.NewReleaseFromACK(lease.ACK, modifiers...)
if err != nil {
return fmt.Errorf("fail to create release request,%w", err)
}
_, err = c.conn.WriteTo(req.ToBytes(), &net.UDPAddr{IP: lease.ACK.Options.Get(dhcpv4.OptionServerIdentifier), Port: ServerPort})
if err == nil {
c.logger.PrintMessage("sent message:", req)
}
return err
}<|fim▁end|> |
// Lease contains a DHCPv4 lease after DORA.
// note: Lease doesn't include binding interface name
type Lease struct { |
<|file_name|>axfs.py<|end_file_name|><|fim▁begin|>"""A parser for axfs file system images"""
from stat import *
import zlib
from . import *
from ..io import *
from ..util import *
AxfsHeader = Struct('AxfsHeader', [
('magic', Struct.STR % 4),
('signature', Struct.STR % 16),
('digest', Struct.STR % 40),
('blockSize', Struct.INT32),
('files', Struct.INT64),
('size', Struct.INT64),
('blocks', Struct.INT64),
('mmapSize', Struct.INT64),
('regions', Struct.STR % 144),
('...', 13),
], Struct.BIG_ENDIAN)
axfsHeaderMagic = b'\x48\xA0\xE4\xCD'
axfsHeaderSignature = b'Advanced XIP FS\0'
AxfsRegionDesc = Struct('AxfsRegionDesc', [
('offset', Struct.INT64),
('size', Struct.INT64),
('compressedSize', Struct.INT64),
('maxIndex', Struct.INT64),
('tableByteDepth', Struct.INT8),
('incore', Struct.INT8),
], Struct.BIG_ENDIAN)
axfsRegions = [
'strings',
'xip',
'byteAligned',
'compressed',
# tableRegions:
'nodeType',
'nodeIndex',
'cnodeOffset',
'cnodeIndex',
'banodeOffset',
'cblockOffset',
'fileSize',
'nameOffset',
'numEntries',
'modeIndex',
'arrayIndex',
'modes',
'uids',
'gids',
]
def isAxfs(file):
header = AxfsHeader.unpack(file)
return header and header.magic == axfsHeaderMagic and header.signature == axfsHeaderSignature
def readAxfs(file):
header = AxfsHeader.unpack(file)
if header.magic != axfsHeaderMagic or header.signature != axfsHeaderSignature:
raise Exception('Wrong magic')
regions = {}
tables = {}
for i, k in enumerate(axfsRegions):
region = AxfsRegionDesc.unpack(file, parse64be(header.regions[i*8:(i+1)*8]))
regions[k] = FilePart(file, region.offset, region.size)
if i >= 4:
regionData = regions[k].read()
tables[k] = [sum([ord(regionData[j*region.maxIndex+i:j*region.maxIndex+i+1]) << (8*j) for j in range(region.tableByteDepth)]) for i in range(region.maxIndex)]
def readInode(id, path=''):
size = tables['fileSize'][id]
nameOffset = tables['nameOffset'][id]
mode = tables['modes'][tables['modeIndex'][id]]
uid = tables['uids'][tables['modeIndex'][id]]
gid = tables['gids'][tables['modeIndex'][id]]
numEntries = tables['numEntries'][id]
arrayIndex = tables['arrayIndex'][id]
name = b''
regions['strings'].seek(nameOffset)
while b'\0' not in name:
name += regions['strings'].read(1024)
name = name.partition(b'\0')[0].decode('ascii')
path += name if id != 0 else ''
isDir = S_ISDIR(mode)
def generateChunks(arrayIndex=arrayIndex, numEntries=numEntries, size=size):
read = 0
for i in range(numEntries):
nodeType = tables['nodeType'][arrayIndex + i]
nodeIndex = tables['nodeIndex'][arrayIndex + i]
if nodeType == 0:
regions['xip'].seek(nodeIndex << 12)
contents = regions['xip'].read(4096)
elif nodeType == 1:
cnodeIndex = tables['cnodeIndex'][nodeIndex]
regions['compressed'].seek(tables['cblockOffset'][cnodeIndex])
contents = zlib.decompress(regions['compressed'].read(tables['cblockOffset'][cnodeIndex+1] - tables['cblockOffset'][cnodeIndex]))
elif nodeType == 2:
regions['byteAligned'].seek(tables['banodeOffset'][nodeIndex])
contents = regions['byteAligned'].read(size - read)
else:
raise Exception('Unknown type')<|fim▁hole|>
yield UnixFile(
path = path,
size = size if not isDir else 0,
mtime = 0,
mode = mode,
uid = uid,
gid = gid,
contents = ChunkedFile(generateChunks, size) if S_ISREG(mode) or S_ISLNK(mode) else None,
)
if isDir:
for i in range(numEntries):
for f in readInode(arrayIndex + i, path + '/'):
yield f
for f in readInode(0):
yield f<|fim▁end|> | yield contents
read += len(contents) |
<|file_name|>sampleWithBeforeAndAfter.js<|end_file_name|><|fim▁begin|>describe('Demo test with Mocha', function() {
describe('for testing purposes', function() {
before(function(client, done) {
client.globals.test_calls++;
done();
});
after(function(client, done) {
setTimeout(function() {<|fim▁hole|> client.globals.test_calls++;
done();
}, 100);
});
afterEach(function(client, done) {
setTimeout(function() {
client.globals.test_calls++;
done();
}, 100);
});
beforeEach(function(client, done) {
setTimeout(function() {
client.globals.test_calls++;
done();
}, 100);
});
it('demoTestAsyncOne', function(client) {
client.url('http://localhost');
});
it('demoTestAsyncTwo', function(client) {
client.end();
});
});
});<|fim▁end|> | |
<|file_name|>Meta_temporary_type.cpp<|end_file_name|><|fim▁begin|>#include "Meta_temporary_type.hpp"
#include "../colors.h"
#if COMPILER
#include "../compiler/Compiler.hpp"
#endif
namespace ls {
bool Meta_temporary_type::operator == (const Type* type) const {
return false;
}
int Meta_temporary_type::distance(const Type* type) const {
return -1;<|fim▁hole|>}
#if COMPILER
llvm::Type* Meta_temporary_type::llvm(Compiler& c) const {
return llvm::Type::getVoidTy(c.getContext());
}
#endif
std::string Meta_temporary_type::class_name() const {
return "";
}
Json Meta_temporary_type::json() const {
return type->json();
}
std::ostream& Meta_temporary_type::print(std::ostream& os) const {
os << C_GREY << "tmp(" << type << ")" << END_COLOR;
return os;
}
Type* Meta_temporary_type::clone() const {
return new Meta_temporary_type(type);
}
}<|fim▁end|> | |
<|file_name|>shootout-nbody.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::os;
static PI: f64 = 3.141592653589793;
static SOLAR_MASS: f64 = 4.0 * PI * PI;
static YEAR: f64 = 365.24;
static N_BODIES: uint = 5;
static BODIES: [Planet, ..N_BODIES] = [
// Sun
Planet {
x: [ 0.0, 0.0, 0.0 ],
v: [ 0.0, 0.0, 0.0 ],
mass: SOLAR_MASS,
},
// Jupiter
Planet {
x: [
4.84143144246472090e+00,
-1.16032004402742839e+00,
-1.03622044471123109e-01,
],
v: [
1.66007664274403694e-03 * YEAR,
7.69901118419740425e-03 * YEAR,
-6.90460016972063023e-05 * YEAR,
],
mass: 9.54791938424326609e-04 * SOLAR_MASS,
},
// Saturn
Planet {
x: [
8.34336671824457987e+00,
4.12479856412430479e+00,
-4.03523417114321381e-01,
],
v: [
-2.76742510726862411e-03 * YEAR,
4.99852801234917238e-03 * YEAR,
2.30417297573763929e-05 * YEAR,
],
mass: 2.85885980666130812e-04 * SOLAR_MASS,
},
// Uranus
Planet {
x: [
1.28943695621391310e+01,
-1.51111514016986312e+01,
-2.23307578892655734e-01,
],
v: [
2.96460137564761618e-03 * YEAR,
2.37847173959480950e-03 * YEAR,
-2.96589568540237556e-05 * YEAR,
],
mass: 4.36624404335156298e-05 * SOLAR_MASS,
},
// Neptune
Planet {
x: [
1.53796971148509165e+01,
-2.59193146099879641e+01,
1.79258772950371181e-01,
],
v: [
2.68067772490389322e-03 * YEAR,
1.62824170038242295e-03 * YEAR,
-9.51592254519715870e-05 * YEAR,
],
mass: 5.15138902046611451e-05 * SOLAR_MASS,
},
];
struct Planet {
x: [f64, ..3],
v: [f64, ..3],
mass: f64,
}
fn advance(bodies: &mut [Planet, ..N_BODIES], dt: f64, steps: i32) {
let mut d = [ 0.0, ..3 ];
for _ in range(0, steps) {
for i in range(0u, N_BODIES) {
for j in range(i + 1, N_BODIES) {
d[0] = bodies[i].x[0] - bodies[j].x[0];
d[1] = bodies[i].x[1] - bodies[j].x[1];
d[2] = bodies[i].x[2] - bodies[j].x[2];
let d2 = d[0]*d[0] + d[1]*d[1] + d[2]*d[2];
let mag = dt / (d2 * d2.sqrt());
let a_mass = bodies[i].mass;
let b_mass = bodies[j].mass;
bodies[i].v[0] -= d[0] * b_mass * mag;
bodies[i].v[1] -= d[1] * b_mass * mag;
bodies[i].v[2] -= d[2] * b_mass * mag;
bodies[j].v[0] += d[0] * a_mass * mag;
bodies[j].v[1] += d[1] * a_mass * mag;
bodies[j].v[2] += d[2] * a_mass * mag;
}
}
for a in bodies.mut_iter() {
a.x[0] += dt * a.v[0];
a.x[1] += dt * a.v[1];
a.x[2] += dt * a.v[2];
}
}
}
fn energy(bodies: &[Planet, ..N_BODIES]) -> f64 {
let mut e = 0.0;
let mut d = [ 0.0, ..3 ];
for i in range(0u, N_BODIES) {
for k in range(0u, 3) {
e += bodies[i].mass * bodies[i].v[k] * bodies[i].v[k] / 2.0;
}
for j in range(i + 1, N_BODIES) {
for k in range(0u, 3) {
d[k] = bodies[i].x[k] - bodies[j].x[k];
}
let dist = (d[0]*d[0] + d[1]*d[1] + d[2]*d[2]).sqrt();
e -= bodies[i].mass * bodies[j].mass / dist;
}<|fim▁hole|> }
e
}
fn offset_momentum(bodies: &mut [Planet, ..N_BODIES]) {
for i in range(0u, N_BODIES) {
for k in range(0u, 3) {
bodies[0].v[k] -= bodies[i].v[k] * bodies[i].mass / SOLAR_MASS;
}
}
}
fn main() {
let args = os::args();
let args = if os::getenv("RUST_BENCH").is_some() {
vec!("".to_owned(), "1000".to_owned())
} else if args.len() <= 1u {
vec!("".to_owned(), "1000".to_owned())
} else {
args.move_iter().collect()
};
let n: i32 = from_str::<i32>(*args.get(1)).unwrap();
let mut bodies = BODIES;
offset_momentum(&mut bodies);
println!("{:.9f}", energy(&bodies) as f64);
advance(&mut bodies, 0.01, n);
println!("{:.9f}", energy(&bodies) as f64);
}<|fim▁end|> | |
<|file_name|>FTS3Placement.py<|end_file_name|><|fim▁begin|>from DIRAC import S_ERROR, S_OK, gLogger
from DIRAC.DataManagementSystem.private.FTSAbstractPlacement import FTSAbstractPlacement, FTSRoute
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getFTS3Servers
from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus
import random
class FTS3Placement( FTSAbstractPlacement ):
"""
This class manages all the FTS strategies, routes and what not
"""
__serverPolicy = "Random"
__nextServerID = 0
__serverList = None
__maxAttempts = 0
def __init__( self, csPath = None, ftsHistoryViews = None ):
"""
Call the init of the parent, and initialize the list of FTS3 servers
"""
self.log = gLogger.getSubLogger( "FTS3Placement" )
super( FTS3Placement, self ).__init__( csPath = csPath, ftsHistoryViews = ftsHistoryViews )
srvList = getFTS3Servers()
if not srvList['OK']:
self.log.error( srvList['Message'] )
self.__serverList = srvList.get( 'Value', [] )
self.maxAttempts = len( self.__serverList )
self.rssClient = ResourceStatus()
def getReplicationTree( self, sourceSEs, targetSEs, size, strategy = None ):
""" For multiple source to multiple destination, find the optimal replication
strategy.
:param sourceSEs : list of source SE
:param targetSEs : list of destination SE
:param size : size of the File
:param strategy : which strategy to use
:returns S_OK(dict) < route name : { dict with key Ancestor, SourceSE, TargetSEtargetSE, Strategy } >
For the time being, we are waiting for FTS3 to provide advisory mechanisms. So we just use
simple techniques
"""
# We will use a single random source
sourceSE = random.choice( sourceSEs )
tree = {}
for targetSE in targetSEs:
tree["%s#%s" % ( sourceSE, targetSE )] = { "Ancestor" : False, "SourceSE" : sourceSE,
"TargetSE" : targetSE, "Strategy" : "FTS3Simple" }
return S_OK( tree )
def refresh( self, ftsHistoryViews ):
"""
Refresh, whatever that means... recalculate all what you need,
fetches the latest conf and what not.
"""
return super( FTS3Placement, self ).refresh( ftsHistoryViews = ftsHistoryViews )
def __failoverServerPolicy(self, attempt = 0):
"""
Returns always the server at a given position (normally the first one)
:param attempt: position of the server in the list
"""
if attempt >= len( self.__serverList ):
raise Exception( "FTS3Placement.__failoverServerPolicy: attempt to reach non existing server index" )
return self.__serverList[attempt]
def __sequenceServerPolicy( self ):
"""
Every time the this policy is called, return the next server on the list
"""
fts3server = self.__serverList[self.__nextServerID]
self.__nextServerID = ( self.__nextServerID + 1 ) % len( self.__serverList )
return fts3server
def __randomServerPolicy(self):
"""
return a random server from the list
"""
return random.choice( self.__serverList )
def __chooseFTS3Server( self ):
"""
Choose the appropriate FTS3 server depending on the policy
"""
fts3Server = None
attempt = 0
# FIXME : need to get real valeu from RSS
ftsServerStatus = True
while not fts3Server and attempt < self.maxAttempts:
if self.__serverPolicy == 'Random':
fts3Server = self.__randomServerPolicy()
elif self.__serverPolicy == 'Sequence':
fts3Server = self.__sequenceServerPolicy()
elif self.__serverPolicy == 'Failover':
fts3Server = self.__failoverServerPolicy( attempt = attempt )
else:
self.log.error( 'Unknown server policy %s. Using Random instead' % self.__serverPolicy )
fts3Server = self.__randomServerPolicy()
if not ftsServerStatus:
self.log.warn( 'FTS server %s is not in good shape. Choose another one' % fts3Server )
fts3Server = None
attempt += 1
<|fim▁hole|> # FIXME : I need to get the FTS server status from RSS
# ftsStatusFromRss = rss.ftsStatusOrSomethingLikeThat
if fts3Server:
return S_OK( fts3Server )
return S_ERROR ( "Could not find an FTS3 server (max attempt reached)" )
def findRoute( self, sourceSE, targetSE ):
""" Find the appropriate route from point A to B
:param sourceSE : source SE
:param targetSE : destination SE
:returns S_OK(FTSRoute)
"""
fts3server = self.__chooseFTS3Server()
if not fts3server['OK']:
return fts3server
fts3server = fts3server['Value']
route = FTSRoute( sourceSE, targetSE, fts3server )
return S_OK( route )
def isRouteValid( self, route ):
"""
FIXME: until RSS is ready, I check manually the status
In FTS3, all routes are valid a priori.
If a route was not valid for some reason, then FTS would know it
thanks to the blacklist sent by RSS, and would deal with it itself.
:param route : FTSRoute
:returns S_OK or S_ERROR(reason)
"""
rAccess = self.rssClient.getStorageElementStatus( route.sourceSE, "ReadAccess" )
self.log.debug( "se read %s %s" % ( route.sourceSE, rAccess ) )
if not rAccess["OK"]:
self.log.error( rAccess["Message"] )
return rAccess
if rAccess["Value"][route.sourceSE]["ReadAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Source SE is not readable" )
wAccess = self.rssClient.getStorageElementStatus( route.targetSE, "WriteAccess" )
self.log.debug( "se write %s %s" % ( route.targetSE, wAccess ) )
if not wAccess["OK"]:
self.log.error( wAccess["Message"] )
return wAccess
if wAccess["Value"][route.targetSE]["WriteAccess"] not in ( "Active", "Degraded" ):
return S_ERROR( "Target SE is not writable" )
return S_OK()<|fim▁end|> | |
<|file_name|>test_load_user.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
class TestLoadUser(TestCase):
def test_find_user(self):<|fim▁hole|> user = load_user('Neill', 'password')
self.assertIsNotNone(user)
self.assertEqual(user.password, "Password")
user = load_user("Tony")
self.assertIsNone(user)<|fim▁end|> | from backend import load_user
|
<|file_name|>cluster_health.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package command
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"os"
"os/signal"
"time"
"github.com/codegangsta/cli"
"github.com/coreos/etcd/client"
"golang.org/x/net/context"
)
func NewClusterHealthCommand() cli.Command {
return cli.Command{
Name: "cluster-health",
Usage: "check the health of the etcd cluster",
ArgsUsage: " ",
Flags: []cli.Flag{
cli.BoolFlag{Name: "forever, f", Usage: "forever check the health every 10 second until CTRL+C"},
},
Action: handleClusterHealth,
}
}
func handleClusterHealth(c *cli.Context) {
forever := c.Bool("forever")
if forever {
sigch := make(chan os.Signal, 1)
signal.Notify(sigch, os.Interrupt)
go func() {
<-sigch
os.Exit(0)
}()
}
tr, err := getTransport(c)
if err != nil {
handleError(ExitServerError, err)
}
hc := http.Client{
Transport: tr,
}
cln := mustNewClientNoSync(c)
mi := client.NewMembersAPI(cln)
ms, err := mi.List(context.TODO())
if err != nil {
fmt.Println("cluster may be unhealthy: failed to list members")
handleError(ExitServerError, err)
}
for {
health := false
for _, m := range ms {
if len(m.ClientURLs) == 0 {
fmt.Printf("member %s is unreachable: no available published client urls\n", m.ID)
continue
}
checked := false
for _, url := range m.ClientURLs {
resp, err := hc.Get(url + "/health")
if err != nil {
fmt.Printf("failed to check the health of member %s on %s: %v\n", m.ID, url, err)
continue
}
result := struct{ Health string }{}
nresult := struct{ Health bool }{}
bytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
fmt.Printf("failed to check the health of member %s on %s: %v\n", m.ID, url, err)
continue
}
resp.Body.Close()
err = json.Unmarshal(bytes, &result)
if err != nil {
err = json.Unmarshal(bytes, &nresult)
}
if err != nil {<|fim▁hole|>
checked = true
if result.Health == "true" || nresult.Health {
health = true
fmt.Printf("member %s is healthy: got healthy result from %s\n", m.ID, url)
} else {
fmt.Printf("member %s is unhealthy: got unhealthy result from %s\n", m.ID, url)
}
break
}
if !checked {
fmt.Printf("member %s is unreachable: %v are all unreachable\n", m.ID, m.ClientURLs)
}
}
if health {
fmt.Println("cluster is healthy")
} else {
fmt.Println("cluster is unhealthy")
}
if !forever {
if health {
os.Exit(ExitSuccess)
} else {
os.Exit(ExitClusterNotHealthy)
}
}
fmt.Printf("\nnext check after 10 second...\n\n")
time.Sleep(10 * time.Second)
}
}<|fim▁end|> | fmt.Printf("failed to check the health of member %s on %s: %v\n", m.ID, url, err)
continue
} |
<|file_name|>ChoQueListFragment.java<|end_file_name|><|fim▁begin|>package cn.honjow.leanc.ui.Fragment;
import cn.honjow.leanc.adapter.QuestionListAdapter;
import cn.honjow.leanc.ui.Activice.ChoQueActivity;
import cn.droidlover.xdroidmvp.base.SimpleRecAdapter;
import cn.honjow.leanc.model.QuestionItem;
import cn.honjow.leanc.ui.BaseLeancFragment;
import cn.droidlover.xrecyclerview.RecyclerItemCallback;
import cn.droidlover.xrecyclerview.XRecyclerView;
/**
* Created by honjow311 on 2017/5/17.
*/
public class ChoQueListFragment extends BaseLeancFragment {
QuestionListAdapter adapter;
@Override
public SimpleRecAdapter getAdapter() {
if (adapter == null) {
adapter = new QuestionListAdapter(context);
adapter.setRecItemClick(new RecyclerItemCallback<QuestionItem, QuestionListAdapter.ViewHolder>() {<|fim▁hole|> @Override
public void onItemClick(int position, QuestionItem model, int tag, QuestionListAdapter.ViewHolder holder) {
super.onItemClick(position, model, tag, holder);
switch (tag) {
case QuestionListAdapter.TAG_VIEW:
ChoQueActivity.launch(context, model);
break;
}
}
});
}
return adapter;
}
@Override
public void setLayoutManager(XRecyclerView recyclerView) {
recyclerView.verticalLayoutManager(context);
}
@Override
public String getType() {
return "1";
}
public static ChoQueListFragment newInstance() {
return new ChoQueListFragment();
}
}<|fim▁end|> | |
<|file_name|>ExampleCommands.java<|end_file_name|><|fim▁begin|>/*
* This file is part of Zinc, licensed under the MIT License (MIT).
*
* Copyright (c) 2015-2016, Jamie Mansfield <https://github.com/jamierocks>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package uk.jamierocks.zinc.example;
import com.google.common.collect.Lists;
import org.spongepowered.api.command.CommandResult;
import org.spongepowered.api.command.CommandSource;
import org.spongepowered.api.command.args.CommandArgs;
import org.spongepowered.api.text.Text;
import uk.jamierocks.zinc.Command;
import uk.jamierocks.zinc.TabComplete;
import java.util.List;
public class ExampleCommands {
@Command(name = "example")
public CommandResult exampleCommand(CommandSource source, CommandArgs args) {
source.sendMessage(Text.of("This is the base command."));
return CommandResult.success();
}
@Command(parent = "example",
name = "sub")
public CommandResult exampleSubCommand(CommandSource source, CommandArgs args) {
source.sendMessage(Text.of("This is a sub command."));
return CommandResult.success();
}
@TabComplete(name = "example")
public List<String> tabComplete(CommandSource source, String args) {
return Lists.newArrayList();<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>allocator.py<|end_file_name|><|fim▁begin|>__author__ = "Jon Dawson"
__copyright__ = "Copyright (C) 2012, Jonathan P Dawson"
__version__ = "0.1"
class Allocator:
"""Maintain a pool of registers, variables and arrays. Keep track of what they are used for."""
def __init__(self, reuse):
self.registers = []
self.all_registers = {}
self.memory_size_2 = 0
self.memory_size_4 = 0
self.reuse = reuse
self.memory_content_2 = {}
self.memory_content_4 = {}
def new_array(self, size, contents, element_size):
if element_size == 2:
reg = self.memory_size_2
self.memory_size_2 += int(size)
if contents is not None:
for location, value in enumerate(contents, reg):
self.memory_content_2[location] = value
return reg
elif element_size == 4:
reg = self.memory_size_4
self.memory_size_4 += int(size)
if contents is not None:
for location, value in enumerate(contents, reg):
self.memory_content_4[location] = value
return reg
def regsize(self, reg):
return self.all_registers[reg][1]
def new(self, size, name="temporary_register"):
assert type(size) == int
reg = 0
while reg in self.registers or (reg in self.all_registers and self.regsize(reg) != size):
reg += 1<|fim▁hole|> return reg
def free(self, register):
if register in self.registers and self.reuse:
self.registers.remove(register)<|fim▁end|> | self.registers.append(reg)
self.all_registers[reg] = (name, size) |
<|file_name|>tag_follow_disagreement.py<|end_file_name|><|fim▁begin|>import sys
tagging_filepath = sys.argv[1]
following_filepath = sys.argv[2]
delim = '\t'
if len(sys.argv) > 3:
delim = sys.argv[3]
graph = {}
for line in open(tagging_filepath):
entry = line.rstrip().split('\t')
src = entry[0]
dst = entry[1]
if not src in graph: graph[src] = {}
graph[src][dst] = 0
for line in open(following_filepath):
entry = line.rstrip().split('\t')
src = entry[0]
dst = entry[1]
if src in graph and dst in graph[src]:
graph[src][dst] += 1
if dst in graph and src in graph[dst]:
graph[dst][src] += 2
w_dir = 0
wo_dir = 0<|fim▁hole|>for src in graph:
for dst in graph[src]:
val = graph[src][dst]
count += 1
if val in [1,3]:
w_dir += 1
if val in [1,2,3]:
wo_dir += 1
print "%s\t%s" % (w_dir/count, wo_dir/count)<|fim▁end|> | count = 0.0 |
<|file_name|>TextTest.java<|end_file_name|><|fim▁begin|>/*
* See LICENSE file in distribution for copyright and licensing information.
*/
package seph.lang;
<|fim▁hole|>
/**
* @author <a href="mailto:[email protected]">Ola Bini</a>
*/
public class TextTest {
@Test
public void is_a_seph_object() {
assertTrue("A Text should be a SephObject", new Text("foo") instanceof SephObject);
}
}// TextTest<|fim▁end|> | import org.junit.Test;
import static org.junit.Assert.*; |
<|file_name|>ScheduleRuleMinutely.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'
import { ValueLine } from '@framework/Lines'
import { TypeContext } from '@framework/TypeContext'
import { ScheduleRuleMinutelyEntity } from '../Signum.Entities.Scheduler'
export default function ScheduleRuleMinutely(p : { ctx: TypeContext<ScheduleRuleMinutelyEntity> }){
const ctx4 = p.ctx.subCtx({ labelColumns: { sm: 2 } });
<|fim▁hole|> <div>
<ValueLine ctx={ctx4.subCtx(f => f.startingOn)} />
<ValueLine ctx={ctx4.subCtx(f => f.eachMinutes)} />
</div>
);
}<|fim▁end|> |
return (
|
<|file_name|>test_casda.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import pytest
import requests
import os
from astropy.coordinates import SkyCoord
import astropy.units as u
from astropy.table import Table, Column
from astropy.io.votable import parse
from astroquery import log
from astroquery.casda import Casda
try:
from unittest.mock import Mock, patch, MagicMock
except ImportError:
pytest.skip("Install mock for the casda tests.", allow_module_level=True)
DATA_FILES = {'CIRCLE': 'cone.xml', 'RANGE': 'box.xml', 'DATALINK': 'datalink.xml', 'RUN_JOB': 'run_job.xml',
'COMPLETED_JOB': 'completed_job.xml', 'DATALINK_NOACCESS': 'datalink_noaccess.xml'}
class MockResponse:
def __init__(self, content):
self.content = content
self.text = content.decode()
def raise_for_status(self):
return
first_job_pass = True
def get_mockreturn(self, method, url, data=None, timeout=10,
files=None, params=None, headers=None, **kwargs):
log.debug("get_mockreturn url:{} params:{} kwargs:{}".format(url, params, kwargs))
if kwargs and 'auth' in kwargs:
auth = kwargs['auth']
if auth and (auth[0] != 'user' or auth[1] != 'password'):
log.debug("Rejecting credentials")
return create_auth_failure_response()
if 'data/async' in str(url):
# Responses for an asynchronous SODA job
if str(url).endswith('data/async'):
self.first_job_pass = True
return create_soda_create_response('111-000-111-000')
elif str(url).endswith('/phase') and method == 'POST':
key = "RUN_JOB"
elif str(url).endswith('111-000-111-000') and method == 'GET':
key = "RUN_JOB" if self.first_job_pass else "COMPLETED_JOB"
self.first_job_pass = False
else:
raise ValueError("Unexpected SODA async {} call to url {}".format(method, url))
elif 'datalink' in str(url):
if 'cube-244' in str(url):
key = 'DATALINK'
else:
key = 'DATALINK_NOACCESS'
else:
key = params['POS'].split()[0] if params['POS'] else None
filename = data_path(DATA_FILES[key])
log.debug('providing ' + filename)
content = open(filename, 'rb').read()
return MockResponse(content)
def create_soda_create_response(jobid):
job_url = 'https://casda.csiro.au/casda_data_access/data/async/' + jobid
create_response_headers = [
['location', job_url]
]
create_response = Mock(spec=requests.Response)
create_response.configure_mock(status_code=303, message='OK', headers=create_response_headers, url=job_url)
return create_response
def create_auth_failure_response():
unauthenticated_headers = [
['WWW-Authenticate', 'Basic realm="ATNF OPAL Login"']
]
create_response = MagicMock(spec=requests.Response)
attrs = {'raise_for_status.side_effect': requests.exceptions.HTTPError()}
create_response.configure_mock(status_code=401, message='OK', headers=unauthenticated_headers, **attrs)
return create_response
@pytest.fixture
def patch_get(request):
mp = request.getfixturevalue("monkeypatch")
mp.setattr(requests.Session, 'request', get_mockreturn)
return mp
def data_path(filename):
data_dir = os.path.join(os.path.dirname(__file__), 'data')
return os.path.join(data_dir, filename)
def isclose(value1, value2, abs_tol=1e-09):
return abs(value1 - value2) < abs_tol
def test_query_region_text_radius(patch_get):
ra = 333.9092
dec = -45.8418
radius = 0.5
query_payload = Casda.query_region('22h15m38.2s -45d50m30.5s', radius=radius * u.deg, cache=False,
get_query_payload=True)
assert isinstance(query_payload, dict)
assert 'POS' in query_payload
assert query_payload['POS'].startswith('CIRCLE 333')
pos_parts = query_payload['POS'].split(' ')
assert pos_parts[0] == 'CIRCLE'
assert isclose(float(pos_parts[1]), ra, abs_tol=1e-4)
assert isclose(float(pos_parts[2]), dec, abs_tol=1e-4)
assert isclose(float(pos_parts[3]), radius)
assert len(pos_parts) == 4
responses = Casda.query_region('22h15m38.2s -45d50m30.5s', radius=0.5 * u.deg, cache=False)
assert isinstance(responses, Table)
assert len(responses) == 3
def test_query_region_radius(patch_get):
ra = 333.9092
dec = -45.8418
radius = 0.5
centre = SkyCoord(ra, dec, unit=('deg', 'deg'))
query_payload = Casda.query_region(centre, radius=radius * u.deg, cache=False, get_query_payload=True)
assert isinstance(query_payload, dict)
assert 'POS' in query_payload
assert query_payload['POS'].startswith('CIRCLE 333')
pos_parts = query_payload['POS'].split(' ')
assert pos_parts[0] == 'CIRCLE'
assert isclose(float(pos_parts[1]), ra, abs_tol=1e-5)
assert isclose(float(pos_parts[2]), dec, abs_tol=1e-5)
assert isclose(float(pos_parts[3]), radius)
assert len(pos_parts) == 4
responses = Casda.query_region(centre, radius=0.5 * u.deg, cache=False)<|fim▁hole|>def test_query_region_async_radius(patch_get):
ra = 333.9092
dec = -45.8418
radius = 0.5
centre = SkyCoord(ra, dec, unit=('deg', 'deg'))
query_payload = Casda.query_region_async(centre, radius=radius * u.deg, cache=False, get_query_payload=True)
assert isinstance(query_payload, dict)
assert 'POS' in query_payload
assert query_payload['POS'].startswith('CIRCLE 333')
pos_parts = query_payload['POS'].split(' ')
assert pos_parts[0] == 'CIRCLE'
assert isclose(float(pos_parts[1]), ra, abs_tol=1e-5)
assert isclose(float(pos_parts[2]), dec, abs_tol=1e-5)
assert isclose(float(pos_parts[3]), radius)
assert len(pos_parts) == 4
responses = Casda.query_region_async(centre, radius=0.5 * u.deg, cache=False)
assert isinstance(responses, MockResponse)
def test_query_region_box(patch_get):
ra = 333.9092
dec = -45.8418
width = 0.5
height = 0.2
centre = SkyCoord(ra, dec, unit=('deg', 'deg'))
query_payload = Casda.query_region(centre, width=width * u.deg, height=height * u.deg, cache=False,
get_query_payload=True)
assert isinstance(query_payload, dict)
assert 'POS' in query_payload
assert query_payload['POS'].startswith('RANGE 333')
pos_parts = query_payload['POS'].split(' ')
assert pos_parts[0] == 'RANGE'
assert isclose(float(pos_parts[1]), ra - width / 2, abs_tol=1e-5)
assert isclose(float(pos_parts[2]), ra + width / 2, abs_tol=1e-5)
assert isclose(float(pos_parts[3]), dec - height / 2, abs_tol=1e-5)
assert isclose(float(pos_parts[4]), dec + height / 2, abs_tol=1e-5)
assert len(pos_parts) == 5
responses = Casda.query_region(centre, width=width * u.deg, height=height * u.deg, cache=False)
assert isinstance(responses, Table)
assert len(responses) == 2
def test_query_region_async_box(patch_get):
ra = 333.9092
dec = -45.8418
width = 0.5
height = 0.2
centre = SkyCoord(ra, dec, unit=('deg', 'deg'))
query_payload = Casda.query_region_async(centre, width=width * u.deg, height=height * u.deg, cache=False,
get_query_payload=True)
assert isinstance(query_payload, dict)
assert 'POS' in query_payload
assert query_payload['POS'].startswith('RANGE 333')
pos_parts = query_payload['POS'].split(' ')
assert pos_parts[0] == 'RANGE'
assert isclose(float(pos_parts[1]), ra - width / 2, abs_tol=1e-5)
assert isclose(float(pos_parts[2]), ra + width / 2, abs_tol=1e-5)
assert isclose(float(pos_parts[3]), dec - height / 2, abs_tol=1e-5)
assert isclose(float(pos_parts[4]), dec + height / 2, abs_tol=1e-5)
assert len(pos_parts) == 5
responses = Casda.query_region_async(centre, width=width * u.deg, height=height * u.deg, cache=False)
assert isinstance(responses, MockResponse)
def test_filter_out_unreleased():
all_records = parse(data_path('partial_unreleased.xml'), verify='warn').get_first_table().to_table()
assert all_records[0]['obs_release_date'] == '2017-08-02T03:51:19.728Z'
assert all_records[1]['obs_release_date'] == '2218-01-02T16:51:00.728Z'
assert all_records[2]['obs_release_date'] == ''
assert len(all_records) == 3
# This should filter out the rows with either a future obs_release_date or no obs_release_date
filtered = Casda.filter_out_unreleased(all_records)
assert filtered[0]['obs_release_date'] == '2017-08-02T03:51:19.728Z'
assert filtered[0]['obs_publisher_did'] == 'cube-502'
assert len(filtered) == 1
def test_stage_data_unauthorised(patch_get):
table = Table()
with pytest.raises(ValueError) as excinfo:
Casda.stage_data(table)
assert "Credentials must be supplied" in str(excinfo.value)
def test_stage_data_empty(patch_get):
table = Table()
casda = Casda('user', 'password')
urls = casda.stage_data(table)
assert urls == []
def test_stage_data_invalid_credentials(patch_get):
prefix = 'https://somewhere/casda/datalink/links?'
access_urls = [prefix + 'cube-220']
table = Table([Column(data=access_urls, name='access_url')])
casda = Casda('user', 'notthepassword')
with pytest.raises(requests.exceptions.HTTPError) as excinfo:
casda.stage_data(table)
def test_stage_data_no_link(patch_get):
prefix = 'https://somewhere/casda/datalink/links?'
access_urls = [prefix + 'cube-240']
table = Table([Column(data=access_urls, name='access_url')])
casda = Casda('user', 'password')
casda.POLL_INTERVAL = 1
with pytest.raises(ValueError) as excinfo:
casda.stage_data(table)
assert "You do not have access to any of the requested data files." in str(excinfo.value)
def test_stage_data(patch_get):
prefix = 'https://somewhere/casda/datalink/links?'
access_urls = [prefix + 'cube-244']
table = Table([Column(data=access_urls, name='access_url')])
casda = Casda('user', 'password')
casda.POLL_INTERVAL = 1
urls = casda.stage_data(table, verbose=True)
assert urls == ['http://casda.csiro.au/download/web/111-000-111-000/askap_img.fits.checksum',
'http://casda.csiro.au/download/web/111-000-111-000/askap_img.fits']<|fim▁end|> | assert isinstance(responses, Table)
assert len(responses) == 3
|
<|file_name|>negative_sampling.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import numpy
import six
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class NegativeSamplingFunction(function.Function):
def __init__(self, sampler, sample_size):
self.sampler = sampler
self.sample_size = sample_size
def _make_samples(self, t):
if hasattr(self, 'samples'):
return self.samples # for testing
size = int(t.shape[0])
# first one is the positive, and others are sampled negatives
samples = self.sampler((size, self.sample_size + 1))
samples[:, 0] = t
self.samples = samples
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 3)
x_type, t_type, w_type = in_types
type_check.expect(
x_type.dtype == numpy.float32,
x_type.ndim == 2,
t_type.dtype == numpy.int32,
t_type.ndim == 1,
x_type.shape[0] == t_type.shape[0],
w_type.dtype == numpy.float32,
w_type.ndim == 2,
)
def forward_cpu(self, inputs):
x, t, W = inputs
self._make_samples(t)
loss = numpy.float32(0.0)
for i, (ix, k) in enumerate(six.moves.zip(x, self.samples)):
w = W[k]
f = w.dot(ix)
f[0] *= -1 # positive sample
loss += numpy.sum(numpy.logaddexp(f, 0))
return numpy.array(loss, numpy.float32),
def forward_gpu(self, inputs):
x, t, W = inputs
n_in = x.shape[1]
self._make_samples(t)
self.wx = cuda.elementwise(
'raw T W, raw T x, S k, int32 c, int32 m', 'T wx',
'''
T f = 0;
for (int j = 0; j < c; ++j) {
int x_ind[] = {(i / m), j};
int w_ind[] = {k, j};
f += x[x_ind] * W[w_ind];
}
wx = f;
''',
'negative_sampling_wx'
)(W, x, self.samples, n_in, self.sample_size + 1)
y = cuda.elementwise(
'T wx, int32 c, int32 m', 'T y',
'''
T f = wx;
if (i % m == 0) {
f = -f;
}
T loss;
if (f < 0) {
loss = __logf(1 + __expf(f));
} else {
loss = f + __logf(1 + __expf(-f));
}
y = loss;
''',
'negative_sampling_forward'
)(self.wx, n_in, self.sample_size + 1)
# TODO(okuta): merge elementwise
loss = cuda.cupy.sum(y)
return loss,
def backward_cpu(self, inputs, grads):
x, t, W = inputs
gloss, = grads
gx = numpy.zeros_like(x)
gW = numpy.zeros_like(W)
for i, (ix, k) in enumerate(six.moves.zip(x, self.samples)):
w = W[k]
f = w.dot(ix)
# g == -y * gloss / (1 + exp(yf))
f[0] *= -1
g = gloss / (1 + numpy.exp(-f))
g[0] *= -1
gx[i] = g.dot(w)
for ik, ig in six.moves.zip(k, g):
gW[ik] += ig * ix
return gx, None, gW
def backward_gpu(self, inputs, grads):
cupy = cuda.cupy
x, t, W = inputs
gloss, = grads
n_in = x.shape[1]
g = cuda.elementwise(
'T wx, raw T gloss, int32 m', 'T g',
'''
T y;
if (i % m == 0) {
y = 1;
} else {
y = -1;
}
g = -y * gloss[0] / (1.0f + __expf(wx * y));
''',
'negative_sampling_calculate_g'
)(self.wx, gloss, self.sample_size + 1)
gx = cupy.zeros_like(x)
cuda.elementwise(
'raw T g, raw T W, raw S k, int32 c, int32 m', 'T gx',
'''
int d = i / c;
T w = 0;
for (int j = 0; j < m; ++j) {
w += g[d * m + j] * W[k[d * m + j] * c + i % c];
}
gx = w;
''',
'negative_sampling_calculate_gx'
)(g, W, self.samples, n_in, self.sample_size + 1, gx)
gW = cupy.zeros_like(W)
cuda.elementwise(
'T g, raw T x, S k, int32 c, int32 m', 'raw T gW',
'''
T gi = g;
for (int j = 0; j < c; ++j) {
atomicAdd(&gW[k * c + j], gi * x[(i / m) * c + j]);
}
''',
'negative_sampling_calculate_gw'
)(g, x, self.samples, n_in, self.sample_size + 1, gW)
return gx, None, gW
def negative_sampling(x, t, W, sampler, sample_size):
"""Negative sampling loss function.
In natural language processing, especially language modeling, the number of
vocabulary is very large.
Therefore, you need to spend a lot of time to calculate the gradient of the
embedding matrix.
Instead, in negative sampling trick, you only need to calculate the
gradient for a few sampled negative examples.
The objective function is below:
.. math::
f(x, p) = \\log \\sigma(x^\\top w_p) + \\
k E_{i \\sim P(i)}[\\log \\sigma(- x^\\top w_i)],
where :math:`\\sigma(\\cdot)` is a sigmoid function, :math:`w_i` is the
weight vector for the word :math:`i`, and :math:`p` is a positive example.
It is approximated with :math:`k` examples :math:`N` sampled from
probability :math:`P(i)`, like this:
.. math::
f(x, p) \\approx \\log \\sigma(x^\\top w_p) + \\
\\sum_{n \\in N} \\log \\sigma(-x^\\top w_n).
Each sample of :math:`N` is drawn from the word distribution :math:`P(w)`.
This is calculated as :math:`P(w) = \\frac{1}{Z} c(w)^\\alpha`, where
:math:`c(w)` is the unigram count of the word :math:`w`, :math:`\\alpha` is
a hyper-parameter, and :math:`Z` is the normalization constant.
Args:
x (~chainer.Variable): Batch of input vectors.
t (~chainer.Variable): Vector of ground truth labels.
W (~chainer.Variable): Weight matrix.
sampler (~types.FunctionType): Sampling function. It takes a shape and
returns an integer array of the shape. Each element of this array
is a sample from the word distribution.
A :class:`~chainer.utils.WalkerAlias` object built with the power
distribution of word frequency is recommended.
sample_size (int): Number of samples.
See: `Distributed Representations of Words and Phrases and their\
Compositionality <http://arxiv.org/abs/1310.4546>`_
.. seealso:: :class:`~chainer.links.NegativeSampling`.
"""
return NegativeSamplingFunction(sampler, sample_size)(x, t, W)<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
// == Global variables start
// -- screen info
var _wh = getScreenSize();
var gDevWidth = 1920;
var gDevHeight = 1080;
var gTargetWidth = 960;
var gTargetHeight = 540;
var gDeviceWidth = Math.max(_wh.width, _wh.height);
var gDeviceHeight = Math.min(_wh.width, _wh.height);
var gGameWidth = gDeviceWidth;
var gGameHeight = gDeviceHeight;
var gGameOffsetX = 0;
var gGameOffsetY = 0;
if (gDeviceWidth / gDeviceHeight > 1.78) {<|fim▁hole|> gGameWidth = Math.round(gGameHeight * 1.777778);
gGameOffsetX = (gDeviceWidth - gGameWidth) / 2;
} else if (gDeviceWidth / gDeviceHeight < 1.77) {
gGameHeight = Math.round(gGameWidth / 1.777778);
gGameOffsetY = (gDeviceHeight - gGameHeight) / 2;
}
var gRatioTarget = gTargetWidth / gDevWidth;
var gRatioDevice = gGameWidth / gDevWidth;
// -- others
var gZeroColor = { r: 0, g: 0, b: 0 };
// == Global variables en
// Utils for sending message
var gUserPlan = -1;
var gLastSendingTime = 0;
var Utils = function () {
function Utils() {
_classCallCheck(this, Utils);
}
_createClass(Utils, null, [{
key: 'checkCanSendMessage',
value: function checkCanSendMessage() {
gUserPlan = -1;
if (getUserPlan !== undefined && sendNormalMessage !== undefined) {
gUserPlan = getUserPlan();
}
}
}, {
key: 'canSendMessage',
value: function canSendMessage() {
if (gUserPlan == -1) {
return;
}
var during = Date.now() - gLastSendingTime;
if (gUserPlan >= 0 && during > 60 * 60 * 1000) {
return true;
}
}
}, {
key: 'sendMessage',
value: function sendMessage(topMsg, msg, force) {
if (force || Utils.canSendMessage()) {
gLastSendingTime = Date.now();
if (force) {
console.log(sendUrgentMessage(topMsg, msg));
} else {
console.log(sendNormalMessage(topMsg, msg));
}
}
}
}, {
key: 'nearColor',
value: function nearColor(c, c1, c2) {
var d1 = Math.abs(c1.r - c.r) + Math.abs(c1.g - c.g) + Math.abs(c1.b - c.b);
var d2 = Math.abs(c2.r - c.r) + Math.abs(c2.g - c.g) + Math.abs(c2.b - c.b);
return d1 - d2;
}
}, {
key: 'mergeColor',
value: function mergeColor(c1, c2) {
return {
r: Math.round((c1.r + c2.r) / 2),
g: Math.round((c1.g + c2.g) / 2),
b: Math.round((c1.b + c2.b) / 2)
};
}
}, {
key: 'diffColor',
value: function diffColor(c, c1) {
return Math.abs(c1.r - c.r) + Math.abs(c1.g - c.g) + Math.abs(c1.b - c.b);
}
}, {
key: 'minMaxDiff',
value: function minMaxDiff(c) {
var max = Math.max(Math.max(c.r, c.g), c.b);
var min = Math.min(Math.min(c.r, c.g), c.b);
return max - min;
}
}, {
key: 'isSameColor',
value: function isSameColor(c1, c2) {
var d = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 25;
if (Math.abs(c1.r - c2.r) < d && Math.abs(c1.g - c2.g) < d && Math.abs(c1.b - c2.b) < d) {
return true;
}
return false;
}
}, {
key: 'targetToDevice',
value: function targetToDevice(xy) {
var r = gRatioDevice / gRatioTarget;
return { x: gGameOffsetX + xy.x * r, y: gGameOffsetY + xy.y * r };
}
}]);
return Utils;
}();
Utils.checkCanSendMessage();
var Rect = function () {
function Rect(x1, y1, x2, y2) {
_classCallCheck(this, Rect);
this.x1 = x1;
this.y1 = y1;
this.x2 = x2;
this.y2 = y2;
this.w = x2 - x1;
this.h = y2 - y1;
this.tx = this.x1 * gRatioTarget;
this.ty = this.y1 * gRatioTarget;
this.tw = (this.x2 - this.x1) * gRatioTarget;
this.th = (this.y2 - this.y1) * gRatioTarget;
}
_createClass(Rect, [{
key: 'crop',
value: function crop(img) {
return cropImage(img, this.tx, this.ty, this.tw, this.th);
}
}]);
return Rect;
}();
var Point = function () {
function Point(x, y) {
_classCallCheck(this, Point);
this.x = x;
this.y = y;
this.tx = this.x * gRatioTarget;
this.ty = this.y * gRatioTarget;
this.dx = gGameOffsetX + this.x * gRatioDevice;
this.dy = gGameOffsetY + this.y * gRatioDevice;
}
_createClass(Point, [{
key: 'tap',
value: function (_tap) {
function tap() {
return _tap.apply(this, arguments);
}
tap.toString = function () {
return _tap.toString();
};
return tap;
}(function () {
var times = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 1;
var delay = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
while (times > 0) {
if (delay > 0) {
sleep(delay);
}
tap(this.dx, this.dy, 20);
times--;
}
})
}, {
key: 'tapDown',
value: function (_tapDown) {
function tapDown() {
return _tapDown.apply(this, arguments);
}
tapDown.toString = function () {
return _tapDown.toString();
};
return tapDown;
}(function () {
tapDown(this.dx, this.dy, 20);
})
}, {
key: 'tapUp',
value: function (_tapUp) {
function tapUp() {
return _tapUp.apply(this, arguments);
}
tapUp.toString = function () {
return _tapUp.toString();
};
return tapUp;
}(function () {
tapUp(this.dx, this.dy, 20);
})
}, {
key: 'moveTo',
value: function (_moveTo) {
function moveTo() {
return _moveTo.apply(this, arguments);
}
moveTo.toString = function () {
return _moveTo.toString();
};
return moveTo;
}(function () {
moveTo(this.dx, this.dy, 20);
})
}]);
return Point;
}();
var FeaturePoint = function (_Point) {
_inherits(FeaturePoint, _Point);
// need: true => should exist, false => should not exist
function FeaturePoint(x, y, r, g, b, need) {
var diff = arguments.length > 6 && arguments[6] !== undefined ? arguments[6] : 25;
_classCallCheck(this, FeaturePoint);
var _this = _possibleConstructorReturn(this, (FeaturePoint.__proto__ || Object.getPrototypeOf(FeaturePoint)).call(this, x, y));
_this.r = r;
_this.g = g;
_this.b = b;
_this.d = diff;
_this.need = need;
return _this;
}
_createClass(FeaturePoint, [{
key: 'check',
value: function check(img) {
var c = getImageColor(img, this.tx, this.ty);
if (this.need && !Utils.isSameColor(c, this, this.d)) {
return false;
} else if (!this.need && Utils.isSameColor(c, this)) {
return false;
}
return true;
}
}, {
key: 'print',
value: function print(img) {
var c = getImageColor(img, this.tx, this.ty);
console.log('target', this.tx, this.ty, 'param', this.x + ', ' + this.y + ', ' + c.r + ', ' + c.g + ', ' + c.b + ', true');
}
}]);
return FeaturePoint;
}(Point);
var PageFeature = function () {
function PageFeature(name, featurPoints) {
_classCallCheck(this, PageFeature);
this.name = name || 'Unknown';
this.featurPoints = featurPoints || [];
}
_createClass(PageFeature, [{
key: 'check',
value: function check(img) {
for (var i = 0; i < this.featurPoints.length; i++) {
var _p = this.featurPoints[i];
if (!_p.check(img)) {
return false;
}
}
return true;
}
}, {
key: 'print',
value: function print(img) {
for (var i = 0; i < this.featurPoints.length; i++) {
var _p2 = this.featurPoints[i];
_p2.print(img);
}
}
}, {
key: 'tap',
value: function tap() {
var idx = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
this.featurPoints[idx].tap();
}
}]);
return PageFeature;
}();
var GameInfo = function GameInfo() {
_classCallCheck(this, GameInfo);
this.hpBarRect = new Rect(122, 30, 412, 51);
this.mpBarRect = new Rect(122, 58, 412, 72);
this.expBarRect = new Rect(16, 1070, 1904, 1072);
this.zeroRect = new Rect(0, 0, 1, 1);
this.mapRect = new Rect(384, 217, 1920, 937); // 1536, 720
this.regionTypeRect = new Rect(1710, 470, 1816, 498);
this.storeHpRect = new Rect(78, 274, 80 + 122, 276 + 122);
this.mapSelector = new Rect(56, 339, 350, 937); // h 112
this.moneyRect = new Rect(990, 40, 1150, 80);
this.centerRect = new Rect(600, 200, 1400, 800);
this.storeOther = new Point(510, 220);
this.store10 = new Point(670, 970);
this.store100 = new Point(900, 970);
this.store1000 = new Point(1100, 970);
this.storeMax = new Point(1300, 970);
this.storeHp = new Point(150, 330);
this.storeArrow = new Point(260, 560);
this.storeBuy = new Point(1600, 970);
this.storeBuy2 = new Point(1130, 882);
this.storeSelfOrder = new Point(200, 970);
this.storeBuyOrder = new Point(1500, 970);
this.storeBuyOrder2 = new Point(1750, 970);
this.storeSpecial = new Point(1140, 340);
this.getReward = new Point(1680, 320);
this.signAlliance = new Point(1820, 252);
this.itemBtns = [new Point(730, 960), new Point(840, 960), new Point(960, 960), new Point(1060, 960), new Point(1180, 960), new Point(1400, 960), new Point(1510, 960), new Point(1620, 960), new Point(1730, 960), new Point(1840, 960), new Point(1280, 960)];
this.unknownBtn = new Point(1100, 800);
this.mapBtn = new Point(1740, 300);
this.mapDetailBtn = new Point(700, 160);
this.mapController = new Point(290, 860);
this.mapControllerL = new Point(190, 860);
this.mapControllerR = new Point(390, 860);
this.mapControllerT = new Point(290, 760);
this.mapControllerB = new Point(290, 960);
this.mapMoveBtn = new Point(1588, 986);
this.mapFloorBtn = new Point(1120, 886);
this.storeMode = new PageFeature('storeMode', [new FeaturePoint(116, 862, 224, 155, 46, true, 32), new FeaturePoint(223, 862, 28, 45, 70, true, 32), new FeaturePoint(196, 946, 43, 33, 17, true, 32), new FeaturePoint(692, 710, 0, 0, 0, true, 32), new FeaturePoint(830, 710, 0, 0, 0, true, 32), new FeaturePoint(1487, 944, 25, 22, 16, true, 32)]);
this.menuOffEvent = new PageFeature('menuOffEvent', [new FeaturePoint(1850, 56, 173, 166, 147, true, 80), new FeaturePoint(1850, 66, 173, 166, 147, true, 80), new FeaturePoint(1860, 76, 173, 166, 147, true, 80), new FeaturePoint(1880, 42, 242, 30, 26, true, 30)]);
this.menuSign = new PageFeature('menuOpenSign', [new FeaturePoint(1652, 250, 242, 30, 26, true, 80)]);
this.menuMail = new PageFeature('menuOpenMail', [new FeaturePoint(1652, 466, 242, 30, 26, true, 80)]);
this.menuAlliance = new PageFeature('menuOpenAlliance', [new FeaturePoint(1418, 360, 242, 30, 26, true, 80)]);
this.menuOnBtn = new PageFeature('menuOn', [new FeaturePoint(1844, 56, 245, 245, 241, true, 30), new FeaturePoint(1844, 66, 128, 70, 56, true, 30), new FeaturePoint(1844, 76, 245, 220, 215, true, 30)]);
this.menuOffBtn = new PageFeature('menuOff', [new FeaturePoint(1850, 56, 173, 166, 147, true, 80), new FeaturePoint(1850, 66, 173, 166, 147, true, 80), new FeaturePoint(1860, 76, 173, 166, 147, true, 80)]);
this.autoPlayBtn = new PageFeature('autoPlayOff', [new FeaturePoint(1430, 768, 140, 154, 127, true, 60), new FeaturePoint(1476, 772, 140, 157, 130, true, 60)]);
this.killNumber = new PageFeature('killNumber', [new FeaturePoint(1678, 538, 65, 62, 45, true, 60), new FeaturePoint(1780, 554, 235, 83, 44, true, 40), new FeaturePoint(1810, 554, 220, 59, 39, true, 40), new FeaturePoint(1804, 532, 255, 186, 142, true, 40)]);
this.selfSkillBtn = new PageFeature('selfSkillOff', [new FeaturePoint(1594, 601, 141, 147, 137, true, 60), new FeaturePoint(1591, 624, 117, 128, 114, true, 60)]);
this.attackBtn = new PageFeature('attackOff', [new FeaturePoint(1634, 769, 165, 180, 170, true, 60)]);
this.disconnectBtn = new PageFeature('disconnect', [new FeaturePoint(840, 880, 34, 51, 79, true, 20), new FeaturePoint(1080, 880, 34, 51, 79, true, 20), new FeaturePoint(1170, 880, 31, 20, 14, true, 20), new FeaturePoint(1150, 916, 31, 24, 14, true, 20)]);
this.loginBtn = new PageFeature('login', [new FeaturePoint(335, 310, 236, 175, 110, true, 40), new FeaturePoint(430, 415, 161, 123, 78, true, 40), new FeaturePoint(140, 145, 60, 55, 55, true, 40), new FeaturePoint(280, 191, 140, 100, 90, true, 40)]);
this.enterBtn = new PageFeature('enter', [new FeaturePoint(1480, 990, 31, 47, 70, true, 20), new FeaturePoint(1750, 990, 31, 47, 70, true, 20), new FeaturePoint(1690, 990, 31, 47, 70, true, 20)]);
this.beAttacked = new PageFeature('beAttacked', [new FeaturePoint(1616, 744, 210, 90, 50, true, 45), new FeaturePoint(1676, 744, 210, 90, 50, true, 45), new FeaturePoint(1666, 756, 210, 90, 50, true, 45), new FeaturePoint(1624, 750, 210, 90, 50, true, 45), new FeaturePoint(1800, 818, 240, 160, 140, true, 30), new FeaturePoint(1634, 769, 165, 180, 170, false, 50)]);
this.storeExceed = new PageFeature('storeExceed', [new FeaturePoint(1102, 812, 33, 23, 0, true, 40)]);
};
var RoleState = function () {
function RoleState(gi) {
_classCallCheck(this, RoleState);
this.gi = gi;
this.lastHP = 0;
this.lastMP = 0;
this.hp = 0;
this.mp = 0;
this.exp = 0;
this.isDisconnect = false;
this.isLogin = false;
this.isEnter = false;
this.isMenuOn = false;
this.isMenuOff = false;
this.lastSafeRegion = false;
this.isSafeRegion = false;
this.isAutoPlay = false;
this.isAttacking = false;
this.isSelfSkill = false;
this.isAttacked = false;
this.hasKillNumber = false;
this.autoPlayOffCount = 5;
this.isPoison = false;
this.movingScore = 0.9;
this.isMovingCount = 0;
this.shouldTapMiddle = true; // determine to tap middle or tap back
}
_createClass(RoleState, [{
key: 'print',
value: function print() {
if (Math.abs(this.lastHP - this.hp) > 5 || Math.abs(this.lastMP - this.mp) > 5) {
console.log('\u8840\u91CF\uFF1A' + this.hp + '\uFF0C\u9B54\u91CF\uFF1A' + this.mp);
this.lastHP = this.hp;
this.lastMP = this.mp;
}
}
}]);
return RoleState;
}();
var LineageM = function () {
function LineageM(config) {
_classCallCheck(this, LineageM);
this.config = config || { conditions: [] };
this.gi = new GameInfo();
this.rState = new RoleState(this.gi);
this.localPath = getStoragePath() + '/scripts/com.r2studio.LineageM/images';
this._loop = false;
this._img = 0;
this.refreshScreen();
// load images
this.images = {
safeRegion: openImage(this.localPath + '/safeRegionType.png'),
normalRegion: openImage(this.localPath + '/normalRegionType.png'),
hpWater: openImage(this.localPath + '/hp.png'),
store: openImage(this.localPath + '/store.png'),
store2: openImage(this.localPath + '/store2.png'),
arrow: openImage(this.localPath + '/arrow.png'),
floor1: openImage(this.localPath + '/floor1.png'),
floor2: openImage(this.localPath + '/floor2.png')
};
// this.gi.menuOffEvent.print(this._img);
this.tmpExp = 0;
this.isRecordLocation = false;
}
_createClass(LineageM, [{
key: 'safeSleep',
value: function safeSleep(t) {
while (this._loop && t > 0) {
t -= 100;
sleep(100);
}
}
}, {
key: 'refreshScreen',
value: function refreshScreen() {
var startTime = Date.now();
var newImg = getScreenshotModify(gGameOffsetX, gGameOffsetY, gGameWidth, gGameHeight, gTargetWidth, gTargetHeight, 80);
if (this._img !== 0) {
if (this.config.grabMonster) {
var s = getIdentityScore(this._img, newImg);
if (this.rState.movingScore - s > 0.05) {
this.rState.isMovingCount++;
} else {
this.rState.isMovingCount = 0;
}
this.rState.movingScore = this.rState.movingScore * 0.95 + s * 0.05;
}
releaseImage(this._img);
this._img = 0;
}
this._img = newImg;
if (Date.now() - startTime < 120) {
sleep(120);
}
return this._img;
}
}, {
key: 'checkIsSystemPage',
value: function checkIsSystemPage() {
if (this.rState.isLogin) {
console.log('登入遊戲,等待 2 秒');
this.gi.loginBtn.tap();
this.safeSleep(2 * 1000);
return true;
}
if (this.rState.isEnter) {
console.log('進入遊戲,等待 10 秒');
this.gi.enterBtn.tap();
this.safeSleep(10 * 1000);
return true;
}
if (this.rState.isDisconnect) {
console.log('重新連線中,等待 10 秒');
this.gi.disconnectBtn.tap();
this.safeSleep(10 * 1000);
return true;
}
if (!this.rState.isMenuOn && !this.rState.isMenuOff) {
if (this.rState.shouldTapMiddle) {
console.log('未知狀態,隨便點看看,等待 5 秒');
this.gi.enterBtn.tap();
this.safeSleep(5 * 1000);
this.rState.shouldTapMiddle = false;
return true;
} else {
console.log('未知狀態,等待 5 秒');
keycode('BACK', 100);
this.safeSleep(5 * 1000);
this.rState.shouldTapMiddle = true;
return true;
}
}
return false;
}
}, {
key: 'checkBeAttacked',
value: function checkBeAttacked() {
if (this.config.beAttackedRandTeleport && this.gi.beAttacked.check(this._img)) {
var c = getImageColor(this._img, this.gi.zeroRect.tx, this.gi.zeroRect.ty);
if (c.r > (c.g + c.b) / 2) {
console.log('警告!你被攻擊了,使用按鈕 7');
this.gi.itemBtns[6].tap();
this.safeSleep(2000);
return true;
}
}
return false;
}
}, {
key: 'updateGlobalState',
value: function updateGlobalState() {
this.rState.isDisconnect = this.gi.disconnectBtn.check(this._img);
this.rState.isLogin = this.gi.loginBtn.check(this._img);
this.rState.isEnter = this.gi.enterBtn.check(this._img);
if (this.rState.isDisconnect || this.rState.isLogin || this.rState.isEnter) {
return;
}
this.rState.isMenuOn = this.gi.menuOnBtn.check(this._img);
this.rState.isMenuOff = this.gi.menuOffBtn.check(this._img);
// console.log(this.rState.isMenuOn, this.rState.isMenuOff);
if (!this.rState.isMenuOn && !this.rState.isMenuOff) {
return;
}
if (this.rState.isMenuOn) {
return;
}
this.rState.hp = this.getHpPercent();
if (this.rState.hp < 30 && this.rState.hp > 0.1) {
sleep(300);
this.refreshScreen();
this.rState.hp = this.getHpPercent();
}
this.rState.mp = this.getMpPercent();
// this.rState.exp = this.getExpPercent();
this.rState.isSafeRegion = this.isSafeRegionState();
this.rState.isAttacking = !this.gi.attackBtn.check(this._img);
this.rState.isSelfSkill = !this.gi.selfSkillBtn.check(this._img);
this.rState.hasKillNumber = this.gi.killNumber.check(this._img);
if (this.gi.autoPlayBtn.check(this._img)) {
this.rState.autoPlayOffCount++;
} else {
this.rState.autoPlayOffCount = 0;
}
if (this.rState.autoPlayOffCount > 4) {
this.rState.isAutoPlay = false;
} else {
this.rState.isAutoPlay = true;
}
this.rState.print();
}
}, {
key: 'checkCondiction',
value: function checkCondiction() {
for (var i = 0; i < this.config.conditions.length && this._loop; i++) {
var cd = this.config.conditions[i];
if (cd.useTime === undefined) {
cd.useTime = 0;
}
if (!cd.enabled) {
continue;
}
if (Date.now() - cd.useTime < cd.interval) {
continue;
}
var value = this.rState[cd.type];
if (value < 0.1) {
continue;
}
if (cd.type === 'exp') {
if (this.rState.exp !== this.tmpExp) {
this.gi.itemBtns[cd.btn].tap(1, 50);
console.log('\u4F7F\u7528\u6309\u9215 ' + (cd.btn + 1) + '\uFF0C\u689D\u4EF6 ' + cd.type + ' ' + (cd.op === 1 ? '大於' : '小於') + ' ' + cd.value + ' (' + value + ')');
cd.useTime = Date.now();
break;
}
} else if (value * cd.op > cd.value * cd.op) {
if (cd.btn >= 0 && cd.btn < this.gi.itemBtns.length) {
if (cd.btn === 7 && this.rState.isSafeRegion && !this.rState.isAttacking) {
continue;
}
this.gi.itemBtns[cd.btn].tap(1, 50);
console.log('\u4F7F\u7528\u6309\u9215 ' + (cd.btn + 1) + '\uFF0C\u689D\u4EF6 ' + cd.type + ' ' + (cd.op === 1 ? '大於' : '小於') + ' ' + cd.value + ' (' + value + ')');
cd.useTime = Date.now();
break;
}
}
}
}
}, {
key: 'start',
value: function start() {
this._loop = true;
var goBackTime = Date.now();
var useHomeTime = Date.now();
var poisonTime = Date.now();
var tmpTime = Date.now();
var noMonsterTime = Date.now();
var isBuy = false;
var receiveTime = 0;
while (this._loop) {
sleep(100);
this.refreshScreen();
if (this.checkBeAttacked()) {
this.sendDangerMessage('你被攻擊了,使用順卷');
continue;
}
this.updateGlobalState();
if (this.checkIsSystemPage()) {
continue;
}
if (this.rState.isMenuOn) {
console.log('關閉選單');
this.gi.menuOnBtn.tap();
this.safeSleep(500);
continue;
}
// go home (8th btn), rand teleport (7th btn)
if (this.rState.isSafeRegion && !this.rState.isAttacking) {
var isAttacking = true;
for (var i = 0; i < 2; i++) {
this.safeSleep(1000);
this.refreshScreen();
this.rState.isAttacking = !this.gi.attackBtn.check(this._img);
if (!this.rState.isAttacking) {
isAttacking = false;
break;
}
}
if (this.rState.isAutoPlay) {
console.log('安全區域,關閉自動攻擊', this.rState.autoPlayOffCount);
this.gi.autoPlayBtn.tap();
sleep(1000);
continue;
}
if (!isAttacking) {
if (!isBuy && this.config.autoBuyFirstSet) {
this.checkAndBuyItems();
isBuy = true;
} else if (this.config.inHomeUseBtn && Date.now() - useHomeTime > 4000) {
this.gi.itemBtns[6].tap();
useHomeTime = Date.now();
} else if (this.config.mapSelect > 0 && this.rState.hp > 40) {
console.log('移動到地圖', this.config.mapSelect);
this.goToMapPage();
this.slideMapSelector(this.config.mapSelect);
}
}
} else {
if (this.rState.isAttacking) {
noMonsterTime = Date.now();
}
isBuy = false;
if (this.config.dangerousGoHome && this.rState.hp < 25 && this.rState.hp > 0.1) {
this.gi.itemBtns[7].tap(1, 100);
this.safeSleep(1000);
console.log('危險,血量少於 25%,使用按鈕 8');
this.sendDangerMessage('危險,血量少於25%,回家');
continue;
}
if (!this.rState.isAutoPlay && this.config.autoAttack) {
console.log('開啟自動攻擊');
this.gi.autoPlayBtn.tap();
this.rState.autoPlayOffCount = 0;
sleep(600);
continue;
}
if (this.config.autoUseAntidote && this.gi.isPoison && Date.now() - poisonTime > 1500) {
console.log('中毒,使用解毒劑,使用按鈕 6');
sleep(500);
this.gi.itemBtns[5].tap();
poisonTime = Date.now();
continue;
}
var cd = this.config.conditions[0];
if (this.config.grabMonster && this.rState.isAttacking && this.rState.isMovingCount > 0 && Date.now() - tmpTime > cd.interval) {
tmpTime = Date.now();
var value = this.rState[cd.type];
if (value > 0.1 && value * cd.op > cd.value * cd.op) {
this.gi.itemBtns[cd.btn].tap(1, 50);
console.log('尋找怪物, 使用按鈕 1');
this.gi.itemBtns[0].tap();
} else {
console.log('尋找怪物, HP/MP 不滿足');
}
continue;
}
if (this.config.autoTeleport && Date.now() - noMonsterTime > 6000) {
console.log('沒有怪物, 使用按鈕 7');
noMonsterTime = Date.now();
this.gi.itemBtns[7 - 1].tap(2, 200);
continue;
}
}
// console.log('Check conditions');
this.checkCondiction();
if (this.config.autoReceiveReward && Date.now() - receiveTime > 300 * 1000) {
this.checkAndAutoGetReward();
receiveTime = Date.now();
}
this.sendMoneyInfo();
if (this.rState.lastSafeRegion != this.rState.isSafeRegion) {
this.rState.lastSafeRegion = this.rState.isSafeRegion;
if (this.rState.lastSafeRegion) {
console.log('安全區域');
}
}
if (this.rState.isSafeRegion) {
continue;
}
if (this.config.goBackInterval != 0 && !this.isRecordLocation) {
console.log('記錄現在位置');
this.goToMapPage();
this.recordCurrentLocation();
this.gi.menuOnBtn.tap();
this.isRecordLocation = true;
continue;
}
// go back to record location
if (this.config.goBackInterval != 0 && Date.now() - goBackTime > this.config.goBackInterval) {
console.log('嘗試走回紀錄點');
this.goToMapPage();
var diffXY = this.getDiffRecordLocation();
this.gi.menuOnBtn.tap();
sleep(1000);
console.log(JSON.stringify(diffXY));
if (diffXY !== undefined) {
this.goMap(-diffXY.x, -diffXY.y);
}
goBackTime = Date.now();
}
}
}
}, {
key: 'waitForChangeScreen',
value: function waitForChangeScreen() {
var score = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0.8;
var maxSleep = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 10000;
var oriImg = clone(this._img);
for (var i = 0; i < maxSleep / 500 && this._loop; i++) {
sleep(400);
this.refreshScreen();
var s = getIdentityScore(this._img, oriImg);
if (s < score) {
break;
}
}
releaseImage(oriImg);
}
}, {
key: 'goToMapPage',
value: function goToMapPage() {
this.gi.mapBtn.tap();
this.waitForChangeScreen();
this.gi.mapDetailBtn.tap();
this.waitForChangeScreen(0.8, 2000);
console.log('地圖畫面');
}
}, {
key: 'stop',
value: function stop() {
this._loop = false;
releaseImage(this._img);
for (var k in this.images) {
releaseImage(this.images[k]);
}
}
}, {
key: 'sendDangerMessage',
value: function sendDangerMessage(msg) {
console.log('送危險訊息中...');
var centerImg = this.gi.centerRect.crop(this._img);
var rmi = resizeImage(centerImg, this.gi.centerRect.w / 2, this.gi.centerRect.h / 2);
var base64 = getBase64FromImage(rmi);
releaseImage(rmi);
releaseImage(centerImg);
Utils.sendMessage('天堂M 危險', base64, true);
}
}, {
key: 'sendMoneyInfo',
value: function sendMoneyInfo() {
if (Utils.canSendMessage()) {
console.log('送錢訊息中...');
var moneyImg = this.gi.moneyRect.crop(this._img);
var rmi = resizeImage(moneyImg, this.gi.moneyRect.w / 2, this.gi.moneyRect.h / 2);
var base64 = getBase64FromImage(rmi);
releaseImage(rmi);
releaseImage(moneyImg);
Utils.sendMessage('天堂M', base64);
}
}
}, {
key: 'checkAndBuyItems',
value: function checkAndBuyItems() {
var tryTimes = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 10;
console.log('嘗試購買物品');sleep(500);
this.refreshScreen();
for (var i = 0; i < tryTimes && this._loop; i++) {
if (i == 4) {
console.log('移動到綠洲,確保有商人等待4秒');
this.goToMapPage();
this.slideMapSelector(41);
this.safeSleep(3000);
console.log('移動到綠洲,往上移動一些');
this.gi.mapController.tapDown();
this.safeSleep(1500);
this.gi.mapControllerT.moveTo();
this.safeSleep(1500);
this.gi.mapControllerT.tapUp();
this.safeSleep(2200);
this.refreshScreen();
console.log('尋找商店');
var _storeType = this.findStore();
console.log('storeType', _storeType);
if (_storeType === 1) {
this.buyItems();
this.refreshScreen();
this.gi.itemBtns[7].tap();
this.safeSleep(2000);
break;
}
this.gi.itemBtns[7].tap();
this.safeSleep(2000);
}
var storeType = this.findStore();
if (storeType === 1) {
this.safeSleep(1000);
this.buyItems();
this.refreshScreen();
break;
} else if (storeType === 2) {
this.buyItems();
this.refreshScreen();
// this.gi.itemBtns[7].tap();
// this.safeSleep(4000);
// this.refreshScreen();
} else if (i < tryTimes - 1) {
console.log('找不到商店,再試一次');
this.gi.itemBtns[7].tap();
this.safeSleep(4000);
this.refreshScreen();
}
}
}
// 0 = no store, 1 = 雜貨電. 2 = others
}, {
key: 'findStore',
value: function findStore() {
var stores1 = findImages(this._img, this.images.store, 0.89, 4, true);
var stores2 = findImages(this._img, this.images.store2, 0.89, 4, true);
var stores = stores1.concat(stores2);
for (var k in stores) {
if (!this._loop) {
return false;
}
var blueCount = 0;
var sx = stores[k].x;
var sy = stores[k].y;
if (sx < 280 && sy < 144) {
continue;
}
if (sx > 790 && sy < 260) {
continue;
}
// for check is right store
for (var i = 0; i < 10; i++) {
if (sx + 10 >= gTargetWidth || sy + 67 + i >= gTargetHeight) {
break;
}
var color = getImageColor(this._img, sx + 10, sy + 67 + i);
if (color.b * 2 > color.g + color.r && color.b > color.r + 30) {
blueCount++;
}
}
if (blueCount < 4) {
continue;
}
var dXY = Utils.targetToDevice(stores[k]);
console.log('可能是商店,打開看看');
tap(dXY.x + 5, dXY.y + 5, 50);
this.waitForChangeScreen(0.7, 7000);if (!this._loop) {
return false;
}
this.safeSleep(2000);
this.refreshScreen();
if (this.gi.storeMode.check(this._img)) {
var testHpImg = this.gi.storeHpRect.crop(this._img);
var results = findImages(testHpImg, this.images.hpWater, 0.88, 1);
releaseImage(testHpImg);
console.log('是雜貨店嗎', results.length > 0 ? results[0].score : 0);
if (results.length > 0 && results[0].score > 0.88) {
console.log('找到雜貨店1');
return 1;
} else {
// find method 2
var redCount = 0;
for (var y = 160; y < 176; y++) {
var _color = getImageColor(this._img, 70, y);
if (1.2 * _color.r > _color.g + _color.b) {
redCount++;
}
}
if (redCount > 10) {
console.log('找到雜貨店2');
return 1;
}
}
} else {
console.log('不是商店,換下一個');
}
if (this.gi.menuOnBtn.check(this._img)) {
this.gi.menuOnBtn.tap();
}
this.safeSleep(2000);
continue;
}
return 0;
}
}, {
key: 'buyItems',
value: function buyItems() {
console.log('購買自訂清單');
this.gi.storeSelfOrder.tap();
sleep(2000);if (!this._loop) {
return false;
}
this.gi.storeBuyOrder.tap();
sleep(2000);if (!this._loop) {
return false;
}
this.gi.storeBuyOrder2.tap();
sleep(2000);if (!this._loop) {
return false;
}
this.gi.storeBuy2.tap();
sleep(2000);if (!this._loop) {
return false;
}
console.log('購買自訂清單完成');
this.gi.menuOnBtn.tap();
return true;
}
// utils
}, {
key: 'cropAndSave',
value: function cropAndSave(filename, rect) {
var img = rect.crop(this._img);
saveImage(img, this.localPath + '/lineageM/' + filename);
releaseImage(img);
}
// globalState 764 240 812 240
}, {
key: 'isSafeRegionState',
value: function isSafeRegionState() {
var bColor = 0;
var rColor = 0;
var gColor = 0; //gray
for (var x = 850; x < 900; x += 2) {
var color = getImageColor(this._img, x, 241);
if (color.b > color.g + color.r) {
// 18
bColor++;
continue;
}
if (color.r > color.g + color.b) {
// 20
rColor++;
continue;
}
if (color.r > 80 && color.g > 80 && color.b > 80) {
// 12
gColor++;
}
}
if (gColor > bColor || rColor > bColor) {
return false;
}
var greenColor = 0;
var orangeColor = 0;
for (var _x9 = 764; _x9 < 812; _x9++) {
var _color2 = getImageColor(this._img, _x9, 240);
if (_color2.b > 86 && _color2.b < 110 && _color2.r < 60 && _color2.g > 140 && _color2.g < 200) {
greenColor++;
}
if (_color2.b < 30 && _color2.r > 200 && _color2.g > 90 && _color2.g < 130) {
orangeColor++;
}
}
if (greenColor > 6 || orangeColor > 6) {
return false;
}
return true;
}
}, {
key: 'checkAndAutoGetReward',
value: function checkAndAutoGetReward() {
if (!this.gi.menuOffEvent.check(this._img)) {
return;
}
this.gi.menuOffEvent.tap();
this.waitForChangeScreen(0.95, 3000);
if (!this._loop) {
return;
}
if (this.gi.menuMail.check(this._img)) {
console.log('自動收取獎勵:信箱');
this.gi.menuMail.tap();
this.waitForChangeScreen(0.9, 5000);
if (!this._loop) {
return;
}
this.gi.getReward.tap();this.safeSleep(1000);
this.gi.getReward.tap();this.safeSleep(1000);
this.gi.getReward.tap();this.safeSleep(1000);
this.gi.getReward.tap();this.safeSleep(1000);
this.gi.menuOnBtn.tap();
this.waitForChangeScreen(0.95, 5000);
}
if (this.gi.menuSign.check(this._img)) {
console.log('自動收取獎勵:登入');
this.gi.menuSign.tap();
this.waitForChangeScreen(0.95, 5000);
if (!this._loop) {
return;
}
this.gi.getReward.tap();this.safeSleep(500);
this.safeSleep(5000);
if (!this._loop) {
return;
}
this.gi.getReward.tap();this.safeSleep(500);
this.gi.menuOnBtn.tap();
this.waitForChangeScreen(0.95, 5000);
}
if (this.gi.menuAlliance.check(this._img)) {
console.log('自動收取獎勵:血盟');
this.gi.menuAlliance.tap();
this.waitForChangeScreen(0.9, 5000);
if (!this._loop) {
return;
}
this.gi.signAlliance.tap();
this.safeSleep(3000);
if (!this._loop) {
return;
}
this.gi.menuOnBtn.tap();
this.waitForChangeScreen(0.95, 5000);
}
}
// HP MP EXP
}, {
key: 'getHpPercent',
value: function getHpPercent() {
return this.getBarPercent(this.gi.hpBarRect, 70, 14, true);
}
}, {
key: 'getMpPercent',
value: function getMpPercent() {
return this.getBarPercent(this.gi.mpBarRect, 70, 70);
}
}, {
key: 'getExpPercent',
value: function getExpPercent() {
return this.getBarPercent(this.gi.expBarRect, 70, 70);
}
}, {
key: 'getBarPercent',
value: function getBarPercent(barRect, b1, b2) {
var poison = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : false;
var bar = cropImage(this._img, barRect.tx, barRect.ty, barRect.tw, barRect.th);
var y1 = barRect.th / 3;
var y2 = barRect.th / 3 * 2;
var fc = Utils.mergeColor(getImageColor(bar, 0, y1), getImageColor(bar, 0, y2));
var bright1 = 0;
var bright2 = 0;
for (var x = 0; x < barRect.tw; x += 1) {
var c = Utils.mergeColor(getImageColor(bar, x, y1), getImageColor(bar, x, y2));
var d = Utils.minMaxDiff(c);
if (d > b1) {
bright1++;
}
if (d > b2) {
bright2++;
}
}
releaseImage(bar);
if (fc.g > fc.r) {
if (poison) {
this.gi.isPoison = true;
}
return (bright2 / barRect.tw * 100).toFixed(0);
} else {
if (poison) {
this.gi.isPoison = false;
}
return (bright1 / barRect.tw * 100).toFixed(0);
}
}
// MAP
}, {
key: 'goMap',
value: function goMap(disX, disY) {
var max = 20000;
if (Math.abs(disX) < 30 && Math.abs(disY) < 30) {
return;
}
var timeL = 3000;var timeR = 3000;var timeT = 3000;var timeB = 3000;
if (disX >= 0 && disX > 30) {
timeR += Math.min(1600 * Math.abs(disX) / 10, max);
} else if (disX < 0 && disX < -30) {
timeL += Math.min(1600 * Math.abs(disX) / 10, max);
}
if (disY >= 0 && disY > 30) {
timeB += Math.min(1600 * Math.abs(disY) / 10, max);
} else if (disY < 0 && disY < -30) {
timeT += Math.min(1600 * Math.abs(disY) / 10, max);
}
var times = Math.ceil((timeL + timeR + timeT + timeB) / 24000);
console.log('左', timeL, '右', timeR, '上', timeT, '下', timeB, times);
var tl = Math.ceil(timeL / times);
var tr = Math.ceil(timeR / times);
var tt = Math.ceil(timeT / times);
var tb = Math.ceil(timeB / times);
this.gi.mapController.tapDown();
for (var t = 0; t < times && this._loop; t++) {
if (timeL > 100) {
console.log('往左移動', tl);
this.gi.mapControllerL.moveTo();
this.gi.mapControllerL.moveTo();
this.safeSleep(tl);
timeL -= tl;
}
if (timeT > 100) {
console.log('往上移動', tt);
this.gi.mapControllerT.moveTo();
this.gi.mapControllerT.moveTo();
this.safeSleep(tt);
timeT -= tt;
}
if (timeR > 100) {
console.log('往右移動', tr);
this.gi.mapControllerR.moveTo();
this.gi.mapControllerR.moveTo();
this.safeSleep(tr);
timeR -= tr;
}
if (timeB > 100) {
console.log('往下移動', tb);
this.gi.mapControllerB.moveTo();
this.gi.mapControllerB.moveTo();
this.safeSleep(tb);
timeB -= tb;
}
}
this.gi.mapController.tapUp();
}
}, {
key: 'recordCurrentLocation',
value: function recordCurrentLocation() {
var p = new Point(768, 360);
var rect1 = new Rect(p.x - 120, p.y - 90, p.x - 30, p.y - 30); // left top
var rect2 = new Rect(p.x + 30, p.y - 90, p.x + 120, p.y - 30); // right top
var rect3 = new Rect(p.x - 120, p.y + 30, p.x - 30, p.y + 90); // left bottom
var rect4 = new Rect(p.x + 30, p.y + 30, p.x + 120, p.y + 90); // right bottom
var img1 = cropImage(this._img, rect1.tx, rect1.ty, rect1.tw, rect1.th);
var img2 = cropImage(this._img, rect2.tx, rect2.ty, rect2.tw, rect2.th);
var img3 = cropImage(this._img, rect3.tx, rect3.ty, rect3.tw, rect3.th);
var img4 = cropImage(this._img, rect4.tx, rect4.ty, rect4.tw, rect4.th);
saveImage(img1, this.localPath + '/mapRecord1.png');
saveImage(img2, this.localPath + '/mapRecord2.png');
saveImage(img3, this.localPath + '/mapRecord3.png');
saveImage(img4, this.localPath + '/mapRecord4.png');
releaseImage(img1);releaseImage(img2);releaseImage(img3);releaseImage(img4);
}
}, {
key: 'getDiffRecordLocation',
value: function getDiffRecordLocation() {
var result = undefined;
for (var i = 0; i < 3; i++) {
result = this.findDiffRecordLocation();
if (result !== undefined) {
break;
}
sleep(1000);
this.refreshScreen();
}
if (result === undefined) {
console.log('無法找到紀錄點');
return { x: 0, y: 0 };
}
return result;
}
}, {
key: 'findDiffRecordLocation',
value: function findDiffRecordLocation() {
var p = new Point(768, 360);
var images = [openImage(this.localPath + '/mapRecord1.png'), openImage(this.localPath + '/mapRecord2.png'), openImage(this.localPath + '/mapRecord3.png'), openImage(this.localPath + '/mapRecord4.png')];
var findXYs = [];
for (var i = 0; i < images.length; i++) {
if (images[i] === 0) {
console.log('無法記錄地圖位置');
return;
}
var xy = findImage(this._img, images[i]);
switch (i) {
case 0:
xy.x = p.x - xy.x / gRatioTarget - 120;
xy.y = p.y - xy.y / gRatioTarget - 90;
break;
case 1:
xy.x = p.x - xy.x / gRatioTarget + 30;
xy.y = p.y - xy.y / gRatioTarget - 90;
break;
case 2:
xy.x = p.x - xy.x / gRatioTarget - 120;
xy.y = p.y - xy.y / gRatioTarget + 30;
break;
case 3:
xy.x = p.x - xy.x / gRatioTarget + 30;
xy.y = p.y - xy.y / gRatioTarget + 30;
break;
}
findXYs.push(xy);
releaseImage(images[i]);
}
var finalXY = undefined;
for (var _i = 0; _i < findXYs.length; _i++) {
var count = 0;
for (var j = 0; j < findXYs.length; j++) {
if (Math.abs(findXYs[_i].x - findXYs[j].x) < 30 && Math.abs(findXYs[_i].y - findXYs[j].y) < 30) {
count++;
}
}
if (count > 1) {
finalXY = findXYs[_i];
}
}
if (finalXY !== undefined) {
// console.log(JSON.stringify(findXYs));
console.log('\u4F4D\u7F6E\u76F8\u5DEE x\uFF1A' + finalXY.x + '\uFF0Cy\uFF1A' + finalXY.y);
}
return finalXY;
}
}, {
key: 'slideMapSelector',
value: function slideMapSelector(nth) {
var itemHeight = 112 * gRatioDevice; // dev 1920 * 1080 => device item height
var sDCX = gGameOffsetX + (this.gi.mapSelector.x1 + this.gi.mapSelector.x2) / 2 * gRatioDevice;
var sDCY = gGameOffsetY + this.gi.mapSelector.y1 * gRatioDevice;
var itemsY = [sDCY + itemHeight * 0.5, sDCY + itemHeight * 1.5, sDCY + itemHeight * 2.5, sDCY + itemHeight * 3.5, sDCY + itemHeight * 4.5];
// move to top
var move2Top = function move2Top() {
for (var i = 0; i < 3; i++) {
tapDown(sDCX, itemsY[0], 10);
tapUp(sDCX, itemsY[4], 10);
sleep(1000);
}
};
var move4down = function move4down() {
tapDown(sDCX, itemsY[4], 20);
moveTo(sDCX, itemsY[4], 20);
moveTo(sDCX, itemsY[3], 20);
moveTo(sDCX, itemsY[2], 20);
moveTo(sDCX, itemsY[1], 20);
sleep(150);
moveTo(sDCX, itemsY[0], 20);
sleep(1500);
tapUp(sDCX, itemsY[0], 20);
};
move2Top();
sleep(500);
for (var i = 0; i < Math.floor((nth - 1) / 4) && this._loop; i++) {
move4down();
}
tap(sDCX, itemsY[(nth - 1) % 4], 20);
sleep(500);
this.refreshScreen();
this.gi.mapMoveBtn.tap();
// this.waitForChangeScreen(0.92, 5000);
// this.safeSleep(3000); if (!this._loop) { return; }
// this.refreshScreen();
// const floorXY1 = findImage(this._img, this.images.floor1);
// if (floorXY1.score > 0.8) {
// const dXY = Utils.targetToDevice(floorXY1);
// tap(dXY.x + 5, dXY.y + 5, 50);
// sleep(1000);
// this.gi.mapFloorBtn.tap();
// sleep(1000);
// return;
// }
// const floorXY2 = findImage(this._img, this.images.floor2);
// if (floorXY2.score > 0.8) {
// const dXY = Utils.targetToDevice(floorXY2);
// tap(dXY.x + 5, dXY.y + 5, 50);
// sleep(1000);
// this.gi.mapFloorBtn.tap();
// sleep(1000);
// return;
// }
}
}, {
key: 'getImageNumber',
value: function getImageNumber(img, numbers) {
var maxLength = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 8;
if (numbers.length != 10) {
console.log('圖片數量應為 10');
return 0;
}
var results = [];
for (var i = 0; i < 10; i++) {
var nImg = numbers[i];
if (nImg == 0) {
console.log('\u5716\u7247 ' + i + ' \u4E0D\u5B58\u5728');
return 0;
}
var rs = findImages(img, nImg, 0.95, maxLength, true);
for (var k in rs) {
rs[k].number = i;
results.push(rs[k]);
}
}
results.sort(function (a, b) {
return b.score - a.score;
});
results = results.slice(0, Math.min(maxLength, results.length));
results.sort(function (a, b) {
return a.x - b.x;
});
var numberSize = getImageSize(numbers[0]);
var nw = numberSize.width;
var imgSize = getImageSize(img);
var iw = imgSize.width;
var px = 0;
var numberStr = '';
for (var _i2 in results) {
var r = results[_i2];
if (r.x > p) {
numberStr += r.number.toString();
p = r.x - 2;
}
}
console.log('\u5716\u7247\u5927\u5C0F\u70BA ' + numberStr);
return numberStr;
}
}]);
return LineageM;
}();
var DefaultConfig = {
conditions: [
// {type: 'hp', op: -1, value: 80, btn: 0, interval: 1000}, // if hp < 60% use 3th button, like 瞬移
// {type: 'mp', op: 1, value: 50, btn: 1, interval: 1000}, // if hp < 30% use 8th button, like 回卷
// {type: 'mp', op: -1, value: 80, btn: 2, interval: 2000}, // if hp < 75% use 4th button, like 高治
// {type: 'mp', op: -1, value: 70, btn: 4, interval: 2000}, // if mp < 70% use 5th button, like 魂體
// {type: 'mp', op: 1, value: 50, btn: 1, interval: 8000}, // if mp > 80% use th button, like 三重矢, 光箭, 火球等
],
inHomeUseBtn: false, // if in safe region use 3th button, like 瞬移.
beAttackedRandTeleport: true,
dangerousGoHome: true, // if hp < 25%, go home, use button 8th
autoAttack: false,
autoReceiveReward: false,
autoUseAntidote: false, // take an antidote for the poison, use six button
goBackInterval: 0, // whether to go back to origin location, check location every n min
autoBuyFirstSet: false, // 1 * 100, -1 => max
mapSelect: 0, // move to nth map in safe region
grabMonster: false,
autoTeleport: true
};
var lm = undefined;
function testSpecialScreen() {
// for special screen
if (gDeviceWidth / gDeviceHeight > 1.78) {
var _blackX = 0;
var _img = getScreenshot();
for (var x = 0; x < gDeviceWidth; x++) {
var color = getImageColor(_img, x, gDeviceHeight - 1);
if (color.r === 0 && color.g === 0 && color.b === 0) {
_blackX++;
} else {
break;
}
}
releaseImage(_img);
_blackX++;
if (Math.abs(_blackX - gGameOffsetX) >= 2) {
gGameOffsetX = _blackX;
console.log("修正特殊螢幕位置", _blackX);
sleep(1000);
}
}
}
function start(config) {
console.log('📢 啟動腳本 📢');
testSpecialScreen();
console.log('螢幕位移', gGameOffsetX, gGameWidth);
sleep(2000);
if (typeof config === 'string') {
config = JSON.parse(config);
}
if (lm !== undefined) {
console.log('📢 腳本已啟動 📢');
return;
}
lm = new LineageM(config);
lm.start();
lm.stop();
lm = undefined;
console.log('📢 腳本已停止 📢');
}
function stop() {
if (lm == undefined) {
return;
}
lm._loop = false;
lm = undefined;
console.log('📢 停止腳本中 📢');
}
// start(DefaultConfig);
// lm = new LineageM(DefaultConfig);
// lm._loop = true;
// lm.checkAndBuyItems();
// console.log(lm.isSafeRegionState());
// lm.goToMapPage();
// lm.slideMapSelector(5);
// lm.buyItems();
// lm.checkAndAutoGetReward();
// for (var i= 0; i < 1; i++) {
// lm.refreshScreen();
// const a = lm.gi.attackBtn.check(lm._img);
// const b = lm.gi.killNumber.check(lm._img);
// // lm.gi.killNumber.print(lm._img);
// // console.log(b)
// const c = lm.gi.autoPlayBtn.check(lm._img);
// lm.gi.autoPlayBtn.print(lm._img);
// console.log('attack Off', a, 'has kn', b, 'autoOff', c);
// }
// lm.findStore();
// for (let i = 0; i < 5; i++) {
// const hp = lm.getHpPercent();
// // const mp = lm.getMpPercent();
// // const exp = lm.getExpPercent();
// lm.refreshScreen();
// console.log(hp);
// }
// lm.checkAndBuyItems(1);
// lm.goToMapPage();
// const hp = lm.getHpPercent();
// const mp = lm.getMpPercent();
// const exp = lm.getExpPercent();
// console.log(hp, mp, exp);
// lm.goToMapPage();
// lm._loop = true;
// lm.recordCurrentLocation();
// var xy = lm.getDiffRecordLocation();
// lm.gi.menuOnBtn.tap();
// sleep(1000);
// lm.goMap(-xy.x, -xy.y);
// lm.cropAndSave('safeRegionType.png', lm.gi.regionTypeRect);
// lm.updateGlobalState();
// lm.stop();<|fim▁end|> | |
<|file_name|>siphash.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Andreas Auernhammer. All rights reserved.
// Use of this source code is governed by a license that can be
// found in the LICENSE file.
// Package siphash implements the SipHash-64 and SipHash-128
// pseudo-random-functions - with the recommended parameters:
// c = 2 and d = 4.
// SipHash computes a message authentication code (MAC) from a
// variable-length message and a 128 bit secret key. SipHash
// was designed to be efficient, even for short inputs, with
// performance comparable to non-cryptographic hash functions.
//
//
// Security
//
// SipHash cannot be used as a cryptographic hash function.
// Neither SipHash-64 nor SipHash-128 are strong collision
// resistant.
//
//
// Recommendations
//
// SipHash was designed to defend hash flooding DoS attacks.
// SipHash-64 can be used as hashing scheme within hash maps<|fim▁hole|>
import (
"encoding/binary"
"hash"
"strconv"
)
const (
// KeySize is the size of the SipHash secret key in bytes.
KeySize = 16
// BlockSize is the block size of SipHash in bytes.
BlockSize = 8
)
const (
c0 = 0x736f6d6570736575
c1 = 0x646f72616e646f6d
c2 = 0x6c7967656e657261
c3 = 0x7465646279746573
)
type KeySizeError uint
func (k KeySizeError) Error() string {
return "siphash: invalid key size " + strconv.Itoa(int(k))
}
// Sum64 returns the 64 bit authenticator for msg using a 128 bit secret key.
func Sum64(msg []byte, key *[KeySize]byte) uint64 {
k0 := binary.LittleEndian.Uint64(key[0:])
k1 := binary.LittleEndian.Uint64(key[8:])
var hVal [4]uint64
hVal[0] = k0 ^ c0
hVal[1] = k1 ^ c1
hVal[2] = k0 ^ c2
hVal[3] = k1 ^ c3
n := len(msg)
ctr := byte(n)
if n >= BlockSize {
n &= (^(BlockSize - 1))
core(&hVal, msg[:n])
msg = msg[n:]
}
var block [BlockSize]byte
copy(block[:], msg)
block[7] = ctr
return finalize64(&hVal, &block)
}
// New64 returns a hash.Hash64 computing the SipHash-64 checksum.
// This function returns a non-nil error if len(key) != 16.
func New64(key []byte) (hash.Hash64, error) {
if k := len(key); k != KeySize {
return nil, KeySizeError(k)
}
h := new(digest64)
h.key[0] = binary.LittleEndian.Uint64(key)
h.key[1] = binary.LittleEndian.Uint64(key[8:])
h.Reset()
return h, nil
}
type digest64 struct {
hVal [4]uint64
key [2]uint64
block [BlockSize]byte
off int
ctr byte
}
func (d *digest64) BlockSize() int { return BlockSize }
func (d *digest64) Size() int { return 8 }
func (d *digest64) Reset() {
d.hVal[0] = d.key[0] ^ c0
d.hVal[1] = d.key[1] ^ c1
d.hVal[2] = d.key[0] ^ c2
d.hVal[3] = d.key[1] ^ c3
d.off = 0
d.ctr = 0
}
func (d *digest64) Write(p []byte) (n int, err error) {
n = len(p)
d.ctr += byte(n)
if d.off > 0 {
dif := BlockSize - d.off
if n < dif {
d.off += copy(d.block[d.off:], p)
return
}
copy(d.block[d.off:], p[:dif])
core(&(d.hVal), d.block[:])
p = p[dif:]
d.off = 0
}
if nn := len(p) &^ (BlockSize - 1); nn >= BlockSize {
core(&(d.hVal), p[:nn])
p = p[nn:]
}
if len(p) > 0 {
d.off = copy(d.block[:], p)
}
return n, nil
}
func (d *digest64) Sum64() uint64 {
hVal := d.hVal
block := d.block
for i := d.off; i < BlockSize-1; i++ {
block[i] = 0
}
block[7] = d.ctr
return finalize64(&hVal, &block)
}
func (d *digest64) Sum(sum []byte) []byte {
var out [8]byte
binary.LittleEndian.PutUint64(out[:], d.Sum64())
return append(sum, out[:]...)
}<|fim▁end|> | // or other key-value data structures.
// SipHash-128 can be used to compute a 128 bit authentication
// tag for messages.
package siphash // import "github.com/aead/siphash" |
<|file_name|>MasterStateType.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012 Adam Roughton.
*
* This file is part of CrowdHammer.
*
* CrowdHammer is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* CrowdHammer is distributed in the hope that it will be useful,<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with CrowdHammer. If not, see <http://www.gnu.org/licenses/>.
*/
package com.adamroughton.crowdhammer.master.state;
public enum MasterStateType {
START,
SCENARIO_RUN_START,
SET_UP_PHASE,
TEST_PHASE_PREP,
TEST_PHASE_EXEC,
SHUT_DOWN,
ERROR
}<|fim▁end|> | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![warn(rust_2018_idioms)]
#[macro_use]
extern crate tracing;
use std::collections::hash_map::{Entry, HashMap};
use conduit::{box_error, Handler, HandlerResult, Method, RequestExt};
use route_recognizer::{Match, Params, Router};
#[derive(Default)]
pub struct RouteBuilder {
routers: HashMap<Method, Router<WrappedHandler>>,
}
#[derive(Clone, Copy)]
pub struct RoutePattern(&'static str);
impl RoutePattern {
pub fn pattern(&self) -> &str {
self.0
}
}
struct WrappedHandler {
pattern: RoutePattern,
handler: Box<dyn Handler>,
}
impl conduit::Handler for WrappedHandler {
fn call(&self, request: &mut dyn RequestExt) -> HandlerResult {
self.handler.call(request)
}
}
#[derive(Debug, thiserror::Error)]
pub enum RouterError {
#[error("Invalid method")]
UnknownMethod,
#[error("Path not found")]
PathNotFound,
}
impl RouteBuilder {
pub fn new() -> Self {
Self {
routers: HashMap::new(),
}
}
#[instrument(level = "trace", skip(self))]
fn recognize<'a>(
&'a self,
method: &Method,
path: &str,
) -> Result<Match<&WrappedHandler>, RouterError> {
match self.routers.get(method) {
Some(router) => router.recognize(path).or(Err(RouterError::PathNotFound)),
None => Err(RouterError::UnknownMethod),
}
}
#[instrument(level = "trace", skip(self, handler))]
pub fn map<H: Handler>(
&mut self,
method: Method,
pattern: &'static str,
handler: H,
) -> &mut Self {
{
let router = match self.routers.entry(method) {
Entry::Occupied(e) => e.into_mut(),
Entry::Vacant(e) => e.insert(Router::new()),
};
let wrapped_handler = WrappedHandler {
pattern: RoutePattern(pattern),
handler: Box::new(handler),
};
router.add(pattern, wrapped_handler);
}
self
}
pub fn get<H: Handler>(&mut self, pattern: &'static str, handler: H) -> &mut Self {
self.map(Method::GET, pattern, handler)
}
pub fn post<H: Handler>(&mut self, pattern: &'static str, handler: H) -> &mut Self {
self.map(Method::POST, pattern, handler)
}
pub fn put<H: Handler>(&mut self, pattern: &'static str, handler: H) -> &mut Self {
self.map(Method::PUT, pattern, handler)
}
pub fn delete<H: Handler>(&mut self, pattern: &'static str, handler: H) -> &mut Self {
self.map(Method::DELETE, pattern, handler)
}
pub fn head<H: Handler>(&mut self, pattern: &'static str, handler: H) -> &mut Self {
self.map(Method::HEAD, pattern, handler)
}
}
impl conduit::Handler for RouteBuilder {
#[instrument(level = "trace", skip(self, request))]
fn call(&self, request: &mut dyn RequestExt) -> HandlerResult {
let mut m = {
let method = request.method();
let path = request.path();
match self.recognize(&method, path) {
Ok(m) => m,
Err(e) => {
info!("{}", e);
return Err(box_error(e));
}
}
};<|fim▁hole|>
// We don't have `pub` access to the fields to destructure `Params`, so swap with an empty
// value to avoid an allocation.
let mut params = Params::new();
std::mem::swap(m.params_mut(), &mut params);
let pattern = m.handler().pattern;
debug!(pattern = pattern.0, "matching route handler found");
{
let extensions = request.mut_extensions();
extensions.insert(pattern);
extensions.insert(params);
}
let span = trace_span!("handler", pattern = pattern.0);
span.in_scope(|| m.handler().call(request))
}
}
pub trait RequestParams<'a> {
fn params(self) -> &'a Params;
}
impl<'a> RequestParams<'a> for &'a (dyn RequestExt + 'a) {
fn params(self) -> &'a Params {
self.extensions().get::<Params>().expect("Missing params")
}
}
#[cfg(test)]
mod tests {
use super::{RequestParams, RouteBuilder, RoutePattern};
use conduit::{Body, Handler, Method, Response, StatusCode};
use conduit_test::{MockRequest, ResponseExt};
lazy_static::lazy_static! {
static ref TRACING: () = {
tracing_subscriber::FmtSubscriber::builder()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::FULL)
.with_test_writer()
.init();
};
}
#[test]
fn basic_get() {
lazy_static::initialize(&TRACING);
let router = test_router();
let mut req = MockRequest::new(Method::GET, "/posts/1");
let res = router.call(&mut req).expect("No response");
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(*res.into_cow(), b"1, GET, /posts/:id"[..]);
}
#[test]
fn basic_post() {
lazy_static::initialize(&TRACING);
let router = test_router();
let mut req = MockRequest::new(Method::POST, "/posts/10");
let res = router.call(&mut req).expect("No response");
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(*res.into_cow(), b"10, POST, /posts/:id"[..]);
}
#[test]
fn path_not_found() {
lazy_static::initialize(&TRACING);
let router = test_router();
let mut req = MockRequest::new(Method::POST, "/nonexistent");
let err = router.call(&mut req).err().unwrap();
assert_eq!(err.to_string(), "Path not found");
}
#[test]
fn unknown_method() {
lazy_static::initialize(&TRACING);
let router = test_router();
let mut req = MockRequest::new(Method::DELETE, "/posts/1");
let err = router.call(&mut req).err().unwrap();
assert_eq!(err.to_string(), "Invalid method");
}
#[test]
fn catch_all() {
lazy_static::initialize(&TRACING);
let mut router = RouteBuilder::new();
router.get("/*", test_handler);
let mut req = MockRequest::new(Method::GET, "/foo");
let res = router.call(&mut req).expect("No response");
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(*res.into_cow(), b", GET, /*"[..]);
}
fn test_router() -> RouteBuilder {
let mut router = RouteBuilder::new();
router.post("/posts/:id", test_handler);
router.get("/posts/:id", test_handler);
router
}
fn test_handler(req: &mut dyn conduit::RequestExt) -> conduit::HttpResult {
let res = vec![
req.params().find("id").unwrap_or("").to_string(),
format!("{:?}", req.method()),
req.extensions()
.get::<RoutePattern>()
.unwrap()
.pattern()
.to_string(),
];
let bytes = res.join(", ").into_bytes();
Response::builder().body(Body::from_vec(bytes))
}
}<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# GromacsWrapper documentation build configuration file, created by
# sphinx-quickstart on Tue Jun 23 19:38:56 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('some/directory'))
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = u'GromacsWrapper'
copyright = u'2009-2018, The Authors of GromacsWrapper (see AUTHORS)'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# Dynamically calculate the version (uses versioneer)
packageversion = __import__('gromacs').__version__
# The short X.Y version.
version = '.'.join(packageversion.split('.')[:2])
# The full version, including alpha/beta/rc tags.
release = packageversion
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directories, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinxdoc'<|fim▁hole|>
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "logos/GromacsWrapper_logo_200x200.png"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "logos/GromacsWrapper_logo_32x32.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'GromacsWrapperdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('index', 'GromacsWrapper.tex', u'GromacsWrapper Documentation',
u'Oliver Beckstein', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Options for ext.intersphinx
# ---------------------------
# intersphinx: reference standard lib and RecSQL
# http://sphinx.pocoo.org/latest/ext/intersphinx.html
intersphinx_mapping = {'https://docs.python.org/': None,
'https://docs.scipy.org/doc/numpy/': None,
'https://docs.scipy.org/doc/scipy/reference/': None,
}
# Options for ext.autodoc
# -----------------------
# see http://sphinx.pocoo.org/ext/autodoc.html
# This value selects what content will be inserted into the main body of an autoclass directive.
# "class", "init", "both"
autoclass_content = "both"<|fim▁end|> | |
<|file_name|>poll.rs<|end_file_name|><|fim▁begin|>use crate::future::FutureExt;
use core::pin::Pin;
use futures_core::future::Future;
use futures_core::task::{Context, Poll};
/// A macro which returns the result of polling a future once within the<|fim▁hole|>/// It is also gated behind the `async-await` feature of this library, which is
/// activated by default.
///
/// If you need the result of polling a [`Stream`](crate::stream::Stream),
/// you can use this macro with the [`next`](crate::stream::StreamExt::next) method:
/// `poll!(stream.next())`.
#[macro_export]
macro_rules! poll {
($x:expr $(,)?) => {
$crate::__private::async_await::poll($x).await
};
}
#[doc(hidden)]
pub fn poll<F: Future + Unpin>(future: F) -> PollOnce<F> {
PollOnce { future }
}
#[allow(missing_debug_implementations)]
#[doc(hidden)]
pub struct PollOnce<F: Future + Unpin> {
future: F,
}
impl<F: Future + Unpin> Future for PollOnce<F> {
type Output = Poll<F::Output>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
Poll::Ready(self.future.poll_unpin(cx))
}
}<|fim▁end|> | /// current `async` context.
///
/// This macro is only usable inside of `async` functions, closures, and blocks. |
<|file_name|>subscribe.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Simple script to test sending UTF8 text with the GrowlNotifier class
import logging
logging.basicConfig(level=logging.DEBUG)
from gntp.notifier import GrowlNotifier
import platform<|fim▁hole|><|fim▁end|> |
growl = GrowlNotifier(notifications=['Testing'],password='password',hostname='ayu')
growl.subscribe(platform.node(),platform.node(),12345) |
<|file_name|>pouchdb.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for Pouch 0.1
// Project: http://pouchdb.com
// Definitions by: Bill Sears <https://github.com/MrBigDog2U/>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
//
//
interface IPouchDocument {
_id?:string;
_rev?:string;
_deleted?:boolean;
_attachments?:any;
}// interface IPouchDocument
interface PouchError {
status: number;
error: string;
reason: string;
}
interface IPouchAttachment {
content_type: string;
digest?: string;
stub?: boolean;
data?: any;
length?: number;
revpos?: number;
}
interface PouchInfoResponse {
db_name: string;
doc_count: number;
update_seq: string;
idb_attachement_format?: string;
sqlite_plugin?: boolean;
websql_encoding?: string;
}
interface PouchAjaxOptions {
cache?: boolean;
headers?: any[];
timeout?: number;
username?: string;
password?: string;
}
interface PouchGetOptions {
ajax?: PouchAjaxOptions
attachments?: boolean;
att_encoding_info?: boolean;
atts_since?: any[];
conflicts?: boolean;
deleted_conflicts?: boolean;
latest?: boolean;
local_seq?: boolean;
meta?: boolean;
open_revs?: any;
rev?: string;
revs?: boolean;
revs_info?: boolean;
startkey?: any;
endkey?: any;
keys?: any;
descending?: boolean;
include_docs?: boolean;
inclusive_end?: boolean;
skip?: number;
limit?: number;
}
interface PouchGetResponse {
_id: string;
_rev: string;
_deleted?: boolean;
_attachments?: any;
_conflicts?: any[];
_deleted_conflicts?: any[];
_local_seq?: number;
_revs_info?: any[];
_revisions?: any[];
}
interface PouchAllDocsOptions {
startkey?: string;
endkey?: string;
descending?: boolean;
include_docs?: boolean;
conflicts?: boolean;
skip?: number;
limit?: number;
keys?: any[];
inclusive_end?: boolean;
attachments?: boolean;
}
interface PouchAllDocsItem {
id: string;
key: string;
value: any;
doc?: IPouchDocument;
deleted?:boolean;
_deleted?:boolean;
}
interface PouchAllDocsResponse {
offset?: number;
total_rows: number;
rows: PouchAllDocsItem[];
}
<|fim▁hole|> docs: IPouchDocument[];
}
interface PouchUpdateOptions {
new_edits?: boolean;
}
interface PouchUpdateResponse {
ok: boolean;
id?: string;
rev?: string;
}
interface PouchFilter {
map: (doc: any) => void;
reduce?: (key: string,value: any) => any;
}
interface PouchQueryOptions {
complete?: any;
include_docs?: boolean;
error?: (err: PouchError) => void;
descending?: boolean;
reduce?: boolean;
startkey?: any;
endkey?: any;
inclusive_end?: boolean;
skip?: number;
limit?: number;
keys?: any[];
group?: boolean;
group_level?: number;
stale?: string;
}
interface PouchQueryResponse {
rows: any[];
}
interface GQLOptions {
select: string;
where?: string;
groupBy?: string;
pivot?: string;
label?: string;
}// interfacaGQLOptions
interface PouchAttachmentOptions {
decode?: boolean;
}
interface PouchCancellable {
cancel: () => void;
}
interface PouchChangesOptions {
onChange: (change: PouchChange) => void;
complete?: (err: PouchError,res: PouchChanges) => void;
seq?: number;
since?: number;
descending?: boolean;
filter?: PouchFilter;
continuous?: boolean;
include_docs?: boolean;
conflicts?: boolean;
}
interface PouchChange {
changes: any;
doc: PouchGetResponse;
id: string;
seq: number;
}
interface PouchChanges {
results: PouchChange[];
}
interface IPouchFuncs {
enable: (v: string) => void;
disable: () => void;
}
interface PouchRevsDiffOptions {
}
interface PouchReplicateOptions {
continuous?: boolean;
onChange?: (any) => void;
filter?: any; // Can be either string or PouchFilter
complete?: (err: PouchError,res: PouchChanges) => void;
}
interface PouchReplicateResponse {
ok: boolean;
start_time: Date;
end_time: Date;
docs_read: number;
docs_written: number;
}
interface PouchReplicateObject {
cancel : ()=> void;
}
interface PouchReplicate {
/*
from : (url:string, opts:PouchReplicateOptions, callback: (err: PouchError, res: PouchReplicateResponse)=>void) => PouchCancellable;
from : (url:string, callback: (err: PouchError, res: PouchReplicateResponse)=>void) => PouchCancellable;
to : (url:string, opts:PouchReplicateOptions, callback: (err: PouchError, res: PouchReplicateResponse)=>void) => PouchCancellable;
to : (url:string, callback: (err: PouchError, res: PouchReplicateResponse)=>void) => PouchCancellable;
*/
from(url: string,opts: PouchReplicateOptions,callback: (err: PouchError,res: PouchReplicateResponse) => void): PouchCancellable;
from(url: string,callback: (err: PouchError,res: PouchReplicateResponse) => void): PouchCancellable;
to(dbName: string,opts: PouchReplicateOptions,callback: (err: PouchError,res: PouchReplicateResponse) => void): PouchCancellable;
to(dbName: string,callback: (err: PouchError,res: PouchReplicateResponse) => void): PouchCancellable;
}
interface PouchOptions {
name?: string;
auto_compaction?: boolean;
adapter?: string;
ajax?: PouchAjaxOptions;
}
interface PouchCreateIndexOptions {
fields: string[];
name?: string;
ddoc?: string;
type?: string;
}
interface PouchCreateIndexArg {
index: PouchCreateIndexOptions
}
interface PouchCreateIndexResponse {
id?:string;
name?:string;
result: string;
}
interface PouchIndexDefinition {
ddoc: string;
name: string;
type: string;
def: any;
}
interface PouchGetIndexResponse {
indexes: PouchIndexDefinition[];
}
interface PouchFindOptions {
selector: any;
fields?: string[];
sort?: any[];
limit?: number;
skip?: number;
}
interface PouchFindResponse {
docs: any[];
}
// Support AMD require
//declare module 'pouchdb' {
// var PouchDB: PouchDB;
//}
//
// emit is the function that the PouchFilter.map function should call in order to add a particular item to
// a filter view.
//
declare function emit(key: any,value?: any): any;
// Support AMD require
interface IPouchDB {
//
new (name: string,opts: PouchOptions,callback: (err: PouchError,res: IPouchDB) => void): IPouchDB;
new (name: string,callback: (err: PouchError,res: IPouchDB) => void): IPouchDB;
constructor(name: string,callback: (err: PouchError,res: IPouchDB) => void);
destroy(name: string,callback: (err: PouchError) => void): void;
plugin(p: any): void;
replicate(source:any,dest:any,options?:any): Promise<any>;
debug: IPouchFuncs;
//
createIndex: (options: PouchCreateIndexArg) => Promise<PouchCreateIndexResponse>;
getIndexes: () => Promise<PouchGetIndexResponse>;
deleteIndex: (index: PouchIndexDefinition) => Promise<any>;
find: (request: PouchFindOptions) => Promise<PouchFindResponse>;
//
type: () => string;
id: () => string;
//close(callback: () => void): void;
close: () => Promise<any>;
//
info: () => Promise<PouchInfoResponse>;
// get: (id: string, opts?: PouchGetOptions) => Promise<PouchGetResponse>;
get: (id: string,opts?: PouchGetOptions) => Promise<PouchGetResponse>;
allDocs: (opts?: PouchAllDocsOptions) => Promise<PouchAllDocsResponse>;
bulkDocs: (req: any[],opts?: PouchUpdateOptions) => Promise<PouchUpdateResponse[]>;
post: (doc: any,opts?: PouchUpdateOptions) => Promise<PouchUpdateResponse>;
put: (doc: any,opts?: PouchUpdateOptions) => Promise<PouchUpdateResponse>;
remove: (doc: any,opts?: PouchUpdateOptions) => Promise<PouchUpdateResponse>;
gql: (q: GQLOptions,callback: (err: PouchError,res: any) => void) => void;
query: (fun: string | PouchFilter,opts?: PouchQueryOptions) => Promise<PouchQueryResponse>;
getAttachment: (docId: string,attachmentId: string,opts?: PouchAttachmentOptions) => Promise<any>;
putAttachment: (docId: string,attachmentId: string,docRev: string,attachment: any,type: string) => Promise<PouchUpdateResponse>;
removeAttachment: (docId: string,attachmentId: string,docRev: string) => Promise<PouchUpdateResponse>;
changes: (opts?: PouchChangesOptions) => PouchCancellable;
revsDiff: (req: any,opts?: PouchRevsDiffOptions) => Promise<any>;
}//
declare var PouchDB:IPouchDB;
declare module "pouchdb"{
export = PouchDB;
}<|fim▁end|> | interface PouchBulkDocsRequest {
|
<|file_name|>execute.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
Program start here.
To python soon.
"""
# @Author: Zackary BEAUGELIN <gysco>
# @Date: 2017-04-10T15:43:09+02:00<|fim▁hole|># @Email: [email protected]
# @Project: SSWD
# @Filename: execute.py
# @Last modified by: gysco
# @Last modified time: 2017-06-19T16:15:30+02:00
from statistics import (calcul_ic_empirique, calcul_ic_normal,
calcul_ic_triang_p, calcul_ic_triang_q, calcul_R2,
calcul_res, tirage)
from pandas import ExcelWriter
# from charts import draw_chart
from common import (affichage_options, calcul_col_res, calcul_lig_graph,
calcul_ref_pond, ecrire_data_co, ecrire_titre, edit_wb,
efface_feuil_inter, verif, write_feuil_inter)
from initialisation import initialise
from weighting import calcul_nbvar, calcul_ponderation, sort_collection
def lance_ihm():
"""Fait apparaitre la boite de dialogue SSWD."""
global frm_sswd
frm_sswd.Show()
def lance_apropos():
"""Fait apparaitre la boite A propos."""
global frm_apropos
frm_apropos.Show()
def lance(output, data_co, nom_colonne, isp, pcat, dist, B, a, n_optim,
conserv_inter, nb_taxo, val_pcat, liste_taxo, triang_ajust, seed,
graph):
"""
Module de lancement de la procedure SSWD.
Remarque : HC=Hazardous Concentration;
SSWD=Species Sensitivity Weighted Distribution;
WECP=Weighted Empirical Cumulative Probability
Principales etapes algorithmiques :
1. Calcul des ponderations associees a chaque resultat de test
ecotox (concentration), compte tenu des poids et des options
choisis par l'utilisateur et des proportions de donnees
existantes; calcul des probabilites empiriques cumulees
ponderees
2. Calcul des parametres mu, sig, _min, _max, mode, suivant les cas,
qui permettent l'estimationdes valeurs de best-estimates des
HCx% a partir des donnees ponderees
3. Tirages aleatoires (procedure de bootstrap) pour estimation de
l'intervalle de confiance associee a chaque HCx%
4. Affichage des resultats et representation graphique
___________________________________________________________________
Parametres principaux
@param data_co: tableaux des donnees exploites pour le calcul des
HC et genere par la procedure
attention : ce tableau contient des colonnes qui
ne sont pas affichees dans les feuilles
de calcul
ce tableau est affiche dans nom_feuille_pond pour
les calculs intermediaires et deux fois dans
nom_feuille_res pour l'affichage des graphiques
SSWD.
une fois triee en fonction des categories
taxonomiques et une fois dans l'ordre croissant des
concentrations nous appellerons data_co_feuil la
data_co telle qu'elle est affichees dans ces
feuilles
@param nom_colonne: nom des colonnes de data_co_feuil
@param isp: indice correspondant a la methode de traitement du
parametre espece 1=wted, 2=unwted,3=mean
@param pcat: poids accordes a chaque categorie taxonomique
@param nb_taxo: nombre de categories taxonomiques ou niveaux
trophiques
@param triang_ajust: option d'ajustement pour la loi triangulaire
si True ajustement sur les quantiles, sinon
sur les probabilites cumulees
___________________________________________________________________
// TODO: Ajouter le reste de la docummentation a la main.
"""
"""Debut de la procedure"""
# Application.ScreenUpdating = False
"""Valeurs specifique a la procedure SSWD"""
iproc = 1
as_identity = 2
ind_tax = 2 + as_identity
ind_data = 3 + as_identity
ind_pond = 4 + as_identity
ind_pcum = 5 + as_identity
tmp = 0
"""
Initialisation : definition des valeurs par defaut pour certains
parametres
modifiables par l'utilisateur averti
"""
(nom_feuille_pond, nom_feuille_stat, nom_feuille_res, nom_feuille_qemp,
nom_feuille_qnorm, nom_feuille_sort, nom_feuille_Ftriang,
nom_feuille_qtriang, pourcent, ind_hc, pcent, titre_graf, titre_axe,
titre_res, titre_data) = initialise()
"""
Test sur l'existence de feuilles de resultats et creation des feuilles
necessaires
"""
verif(nom_feuille_pond, nom_feuille_stat, nom_feuille_res,
nom_feuille_qemp, nom_feuille_qnorm, nom_feuille_sort,
nom_feuille_Ftriang, nom_feuille_qtriang, '', '', '')
"""
1. Calcul des ponderations et affichage resultats
dans nom_feuille_pond
"""
pond_lig = 0
pond_col = 0
calcul_ponderation(data_co, liste_taxo, pcat, isp, a, nb_taxo)
ecrire_data_co(data_co, nom_colonne, pond_lig, pond_col, nom_feuille_pond,
False, iproc)
nbdata = len(data_co)
"""2. Calcul nbvar et Tirages aleatoires"""
nbvar = calcul_nbvar(n_optim, data_co, pcat)
(pond_lig_deb, pond_lig_fin, pond_col_data, pond_col_pond,
pond_col_pcum, pond_col_data_act) = calcul_ref_pond(
pond_col, pond_lig, ind_data, ind_pond, ind_pcum, nbdata, tmp)
tirage(nom_feuille_stat, nbvar, B, nom_feuille_pond, pond_col_data,
pond_col_pond, seed)
"""
Remarque : le resultat des tirages est affiche dan@s nom_feuille_stat
L'affichage commence a la premiere ligne et a la premiere colonne;
la premiere ligne est une ligne de titre;
ceci n'est pour l'instant pas parametrable
"""
"""3. Calculs valeurs best-estimates et statistiques apres tirages"""
"""Definition indice lignes et colonnes"""
l1 = 1
"""
l1 ne peut être modifiee : c'est en fait une constante definie par la
procedure tirage
"""
l2 = B + l1 - 1
c1 = 0
"""c'est une constante definie par la sub tirage"""
c2 = c1 + nbvar - 1
lig_hc = 26
"""attention : il faut tenir compte de l'affichage des options"""
col_hc = 0
nbcol_vide = 1
lig_data = 1
writer = ExcelWriter(output)
"""
Calcul des indices des colonnes d'affichage des resultats dans
nom_feuille_res
"""
(col_deb, col_fin, col_data1, col_data2, col_tax, col_data, col_pcum,
col_data_le, col_pcum_le,
col_data_act, col_data_act_le, col_pcum_a) = calcul_col_res(
col_hc, nbcol_vide, pourcent, dist, ind_tax, ind_data, ind_pcum,
len(nom_colonne) + as_identity, tmp, tmp)
"""Calcul des indices des lignes pour les graphes de nom_feuille_res"""
lig_p, lig_qbe, lig_qbi, lig_qbs = calcul_lig_graph(lig_hc)
mup = [0] * 4
sigmap = [0] * 4
_min = [0] * 4
_max = [0] * 4
mode = [0] * 4
data_c = [0] * 4
R2_triang = 0
Pvalue_triang = 0
R2_norm = 0
Pvalue_norm = 0
"""initialisation de ligne_tot"""
i = 0
feuilles_res = ['_log_emp', '_log_nor', '_log_tri']
for x in dist:
if x is True:
"""
Ecriture de data_co_feuil triees par rapport aux categories
taxonomiques dans nom_feuille_res
"""
sort_collection(data_co, 2, 0)
ecrire_titre(titre_data[0], nom_feuille_res + feuilles_res[i],
lig_data - 1, col_data1)
ecrire_data_co(data_co, nom_colonne, lig_data, col_data1,
nom_feuille_res + feuilles_res[i], True, iproc)
"""
Ecriture de data_co_feuil triees par ordre croissant des
concentrations dans nom_feuille_res
"""
sort_collection(data_co, 7, 1)
ecrire_titre(titre_data[1], nom_feuille_res + feuilles_res[i],
lig_data - 1, col_data2)
ecrire_data_co(data_co, nom_colonne, lig_data, col_data2,
nom_feuille_res + feuilles_res[i], True, iproc)
i += 1
"""loi empirique"""
if dist[0] is True:
loi = 1
"""Calcul les valeurs correspondant a chaque tirage"""
calcul_ic_empirique(l1, c1, l2, c2, c1, pourcent, nom_feuille_stat,
nom_feuille_qemp, nom_feuille_sort, nbvar, a)
"""Calcul des valeurs best-estimates et affichage des resultats"""
mup[loi], sigmap[loi], _min[
loi], _max[loi], mode[loi], data_c[loi] = calcul_res(
l1, l2, ind_hc, pond_lig_deb, pond_col, pond_col_data,
pond_col_pcum, lig_hc, col_hc, nbvar, loi, titre_res, pcent,
pourcent, data_co, nom_colonne, nom_feuille_res + "_log_emp",
nom_feuille_qemp, nom_feuille_pond, '', 0, triang_ajust, iproc,
nbdata)
"""Graphes de SSWD"""
# draw_chart(writer, nom_feuille_res + "_log_emp", lig_p, lig_qbe,
# lig_qbi, lig_qbs, col_deb, col_fin, lig_data, col_tax,
# col_data, col_pcum, col_data_le, col_pcum_le, loi,
# titre_graf, 0, 0, nbdata, mup, sigmap, _min, _max, mode,
# titre_axe, val_pcat, liste_taxo, isp, tmp, tmp, iproc)
"""loi normale"""
if dist[1] is True:
loi = 2
calcul_ic_normal(l1, c1, l2, c2, c1, pourcent, nom_feuille_stat,
nom_feuille_qnorm)
mup[loi], sigmap[loi], _min[
loi], _max[loi], mode[loi], data_c[loi] = calcul_res(
l1, l2, ind_hc, pond_lig_deb, pond_col, pond_col_data,
pond_col_pcum, lig_hc, col_hc, nbvar, loi, titre_res, pcent,
pourcent, data_co, nom_colonne, nom_feuille_res + "_log_nor",
nom_feuille_qnorm, nom_feuille_pond, nom_feuille_stat, 0,
triang_ajust, iproc, nbdata)
R2_norm, Pvalue_norm = calcul_R2(data_co, loi, mup[loi], sigmap[loi],
_min[loi], _max[loi], mode[loi],
nbdata, data_c[loi])
# draw_chart(writer, nom_feuille_res + "_log_nor", lig_p, lig_qbe,
# lig_qbi, lig_qbs, col_deb, col_fin, lig_data, col_tax,
# col_data, col_pcum, col_data_le, col_pcum_le, loi,
# titre_graf, R2_norm, Pvalue_norm, nbdata, mup, sigmap,
# _min, _max, mode, titre_axe, val_pcat, liste_taxo, isp,
# tmp, tmp, iproc)
"""loi triangulaire"""
if dist[2] is True:
loi = 3
if triang_ajust is True:
c_min = calcul_ic_triang_q(
l1, c1, l2, c2, c1, nbvar, a, pourcent, nom_feuille_stat,
nom_feuille_sort, nom_feuille_Ftriang, nom_feuille_qtriang)
else:
c_min = calcul_ic_triang_p(
l1, c1, l2, c2, c1, nbvar, a, pourcent, nom_feuille_stat,
nom_feuille_sort, nom_feuille_Ftriang, nom_feuille_qtriang)
mup[loi], sigmap[loi], _min[
loi], _max[loi], mode[loi], data_c[loi] = calcul_res(
l1, l2, ind_hc, pond_lig_deb, pond_col, pond_col_data,
pond_col_pcum, lig_hc, col_hc, nbvar, loi, titre_res, pcent,
pourcent, data_co, nom_colonne, nom_feuille_res + "_log_tri",
nom_feuille_qtriang, nom_feuille_pond, nom_feuille_Ftriang,
c_min, triang_ajust, iproc, nbdata)
R2_triang, Pvalue_triang = calcul_R2(data_co, loi, mup[loi],
sigmap[loi], _min[loi], _max[loi],
mode[loi], nbdata, data_c[loi])
# draw_chart(writer, nom_feuille_res + "_log_tri", lig_p, lig_qbe,
# lig_qbi, lig_qbs, col_deb, col_fin, lig_data, col_tax,
# col_data, col_pcum, col_data_le, col_pcum_le, loi,
# titre_graf, R2_triang, Pvalue_triang, nbdata, mup, sigmap,
# _min, _max, mode, titre_axe, val_pcat, liste_taxo, isp,
# tmp, tmp, iproc)
# decaler_graph(nom_feuille_res)
affichage_options("details", isp, val_pcat, liste_taxo, B, 0, 0, 9, 0,
dist, nbvar, iproc, a, seed)
# cellule_gras()
if conserv_inter is False:
efface_feuil_inter(nom_feuille_pond, nom_feuille_stat,
nom_feuille_qemp, nom_feuille_qnorm,
nom_feuille_qtriang, nom_feuille_sort,
nom_feuille_Ftriang, '', '', '')
write_feuil_inter(writer)
writer.close()
edit_wb(output, lig_p, lig_qbe, lig_qbi, lig_qbs, col_deb, col_fin,
lig_data, col_tax, col_data, col_pcum, col_data_le, col_pcum_le,
titre_graf, R2_norm, Pvalue_norm, R2_triang, Pvalue_triang, nbdata,
mup, sigmap, _min, _max, mode, titre_axe, val_pcat, liste_taxo,
isp, tmp, iproc, graph)<|fim▁end|> | |
<|file_name|>netutil.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2011 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Miscellaneous network utility code."""
from __future__ import absolute_import, division, print_function, with_statement
import errno
import os
import platform
import socket
import stat
from tornado.concurrent import dummy_executor, run_on_executor
from tornado.ioloop import IOLoop
from tornado.platform.auto import set_close_exec
from tornado.util import u, Configurable, errno_from_exception
try:
import ssl
except ImportError:
# ssl is not available on Google App Engine
ssl = None
if hasattr(ssl, 'match_hostname') and hasattr(ssl, 'CertificateError'): # python 3.2+
ssl_match_hostname = ssl.match_hostname
SSLCertificateError = ssl.CertificateError
elif ssl is None:
ssl_match_hostname = SSLCertificateError = None
else:
import backports.ssl_match_hostname
ssl_match_hostname = backports.ssl_match_hostname.match_hostname
SSLCertificateError = backports.ssl_match_hostname.CertificateError
# ThreadedResolver runs getaddrinfo on a thread. If the hostname is unicode,
# getaddrinfo attempts to import encodings.idna. If this is done at
# module-import time, the import lock is already held by the main thread,
# leading to deadlock. Avoid it by caching the idna encoder on the main
# thread now.
u('foo').encode('idna')
# These errnos indicate that a non-blocking operation must be retried
# at a later time. On most platforms they're the same value, but on
# some they differ.
_ERRNO_WOULDBLOCK = (errno.EWOULDBLOCK, errno.EAGAIN)
if hasattr(errno, "WSAEWOULDBLOCK"):
_ERRNO_WOULDBLOCK += (errno.WSAEWOULDBLOCK,)
def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
"""Creates listening sockets bound to the given port and address.
Returns a list of socket objects (multiple sockets are returned if
the given address maps to multiple IP addresses, which is most common
for mixed IPv4 and IPv6 use).
Address may be either an IP address or hostname. If it's a hostname,
the server will listen on all IP addresses associated with the
name. Address may be an empty string or None to listen on all
available interfaces. Family may be set to either `socket.AF_INET`
or `socket.AF_INET6` to restrict to IPv4 or IPv6 addresses, otherwise
both will be used if available.
The ``backlog`` argument has the same meaning as for
`socket.listen() <socket.socket.listen>`.
``flags`` is a bitmask of AI_* flags to `~socket.getaddrinfo`, like
``socket.AI_PASSIVE | socket.AI_NUMERICHOST``.
"""
sockets = []
if address == "":
address = None
if not socket.has_ipv6 and family == socket.AF_UNSPEC:
# Python can be compiled with --disable-ipv6, which causes
# operations on AF_INET6 sockets to fail, but does not
# automatically exclude those results from getaddrinfo
# results.
# http://bugs.python.org/issue16208
family = socket.AF_INET
if flags is None:
flags = socket.AI_PASSIVE
bound_port = None
for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM,
0, flags)):
af, socktype, proto, canonname, sockaddr = res
if (platform.system() == 'Darwin' and address == 'localhost' and
af == socket.AF_INET6 and sockaddr[3] != 0):
# Mac OS X includes a link-local address fe80::1%lo0 in the
# getaddrinfo results for 'localhost'. However, the firewall
# doesn't understand that this is a local address and will
# prompt for access (often repeatedly, due to an apparent
# bug in its ability to remember granting access to an
# application). Skip these addresses.
continue
try:
sock = socket.socket(af, socktype, proto)
except socket.error as e:
if errno_from_exception(e) == errno.EAFNOSUPPORT:
continue
raise
set_close_exec(sock.fileno())
if os.name != 'nt':
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if af == socket.AF_INET6:
# On linux, ipv6 sockets accept ipv4 too by default,
# but this makes it impossible to bind to both
# 0.0.0.0 in ipv4 and :: in ipv6. On other systems,
# separate sockets *must* be used to listen for both ipv4
# and ipv6. For consistency, always disable ipv4 on our
# ipv6 sockets and use a separate ipv4 socket when needed.
#
# Python 2.x on windows doesn't have IPPROTO_IPV6.
if hasattr(socket, "IPPROTO_IPV6"):
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
# automatic port allocation with port=None
# should bind on the same port on IPv4 and IPv6
host, requested_port = sockaddr[:2]
if requested_port == 0 and bound_port is not None:
sockaddr = tuple([host, bound_port] + list(sockaddr[2:]))
sock.setblocking(0)
sock.bind(sockaddr)
bound_port = sock.getsockname()[1]
sock.listen(backlog)
sockets.append(sock)
return sockets
if hasattr(socket, 'AF_UNIX'):
def bind_unix_socket(file, mode=0o600, backlog=128):
"""Creates a listening unix socket.
If a socket with the given name already exists, it will be deleted.
If any other file with that name exists, an exception will be
raised.
Returns a socket object (not a list of socket objects like
`bind_sockets`)
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
set_close_exec(sock.fileno())
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.setblocking(0)
try:
st = os.stat(file)
except OSError as err:
if errno_from_exception(err) != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(file)
else:
raise ValueError("File %s exists and is not a socket", file)
sock.bind(file)
os.chmod(file, mode)
sock.listen(backlog)
return sock
def add_accept_handler(sock, callback, io_loop=None):
"""Adds an `.IOLoop` event handler to accept new connections on ``sock``.
When a connection is accepted, ``callback(connection, address)`` will
be run (``connection`` is a socket object, and ``address`` is the
address of the other end of the connection). Note that this signature
is different from the ``callback(fd, events)`` signature used for
`.IOLoop` handlers.
"""
if io_loop is None:
io_loop = IOLoop.current()
def accept_handler(fd, events):
while True:
try:
connection, address = sock.accept()
except socket.error as e:
# _ERRNO_WOULDBLOCK indicate we have accepted every
# connection that is available.
if errno_from_exception(e) in _ERRNO_WOULDBLOCK:
return
# ECONNABORTED indicates that there was a connection
# but it was closed while still in the accept queue.
# (observed on FreeBSD).
if errno_from_exception(e) == errno.ECONNABORTED:
continue
raise
callback(connection, address)
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
def is_valid_ip(ip):
"""Returns true if the given string is a well-formed IP address.
Supports IPv4 and IPv6.
"""
if not ip or '\x00' in ip:
# getaddrinfo resolves empty strings to localhost, and truncates
# on zero bytes.
return False
try:
res = socket.getaddrinfo(ip, 0, socket.AF_UNSPEC,
socket.SOCK_STREAM,
0, socket.AI_NUMERICHOST)
return bool(res)
except socket.gaierror as e:
if e.args[0] == socket.EAI_NONAME:
return False
raise
return True
class Resolver(Configurable):
"""Configurable asynchronous DNS resolver interface.
By default, a blocking implementation is used (which simply calls
`socket.getaddrinfo`). An alternative implementation can be
chosen with the `Resolver.configure <.Configurable.configure>`
class method::
Resolver.configure('tornado.netutil.ThreadedResolver')
The implementations of this interface included with Tornado are
* `tornado.netutil.BlockingResolver`
* `tornado.netutil.ThreadedResolver`
* `tornado.netutil.OverrideResolver`
* `tornado.platform.twisted.TwistedResolver`
* `tornado.platform.caresresolver.CaresResolver`
"""
@classmethod
def configurable_base(cls):
return Resolver
@classmethod
def configurable_default(cls):
return BlockingResolver
def resolve(self, host, port, family=socket.AF_UNSPEC, callback=None):
"""Resolves an address.
The ``host`` argument is a string which may be a hostname or a
literal IP address.
Returns a `.Future` whose result is a list of (family,
address) pairs, where address is a tuple suitable to pass to
`socket.connect <socket.socket.connect>` (i.e. a ``(host,
port)`` pair for IPv4; additional fields may be present for
IPv6). If a ``callback`` is passed, it will be run with the
result as an argument when it is complete.
"""
raise NotImplementedError()
def close(self):
"""Closes the `Resolver`, freeing any resources used.
.. versionadded:: 3.1
"""
pass
class ExecutorResolver(Resolver):
"""Resolver implementation using a `concurrent.futures.Executor`.
Use this instead of `ThreadedResolver` when you require additional
control over the executor being used.
The executor will be shut down when the resolver is closed unless
``close_resolver=False``; use this if you want to reuse the same
executor elsewhere.
"""
def initialize(self, io_loop=None, executor=None, close_executor=True):
self.io_loop = io_loop or IOLoop.current()
if executor is not None:
self.executor = executor
self.close_executor = close_executor
else:
self.executor = dummy_executor
self.close_executor = False
def close(self):
if self.close_executor:
self.executor.shutdown()
self.executor = None
@run_on_executor
def resolve(self, host, port, family=socket.AF_UNSPEC):
# On Solaris, getaddrinfo fails if the given port is not found
# in /etc/services and no socket type is given, so we must pass
# one here. The socket type used here doesn't seem to actually
# matter (we discard the one we get back in the results),
# so the addresses we return should still be usable with SOCK_DGRAM.
addrinfo = socket.getaddrinfo(host, port, family, socket.SOCK_STREAM)
results = []
for family, socktype, proto, canonname, address in addrinfo:
results.append((family, address))
return results
class BlockingResolver(ExecutorResolver):
"""Default `Resolver` implementation, using `socket.getaddrinfo`.
The `.IOLoop` will be blocked during the resolution, although the
callback will not be run until the next `.IOLoop` iteration.
"""
def initialize(self, io_loop=None):
super(BlockingResolver, self).initialize(io_loop=io_loop)
<|fim▁hole|>
class ThreadedResolver(ExecutorResolver):
"""Multithreaded non-blocking `Resolver` implementation.
Requires the `concurrent.futures` package to be installed
(available in the standard library since Python 3.2,
installable with ``pip install futures`` in older versions).
The thread pool size can be configured with::
Resolver.configure('tornado.netutil.ThreadedResolver',
num_threads=10)
.. versionchanged:: 3.1
All ``ThreadedResolvers`` share a single thread pool, whose
size is set by the first one to be created.
"""
_threadpool = None
_threadpool_pid = None
def initialize(self, io_loop=None, num_threads=10):
threadpool = ThreadedResolver._create_threadpool(num_threads)
super(ThreadedResolver, self).initialize(
io_loop=io_loop, executor=threadpool, close_executor=False)
@classmethod
def _create_threadpool(cls, num_threads):
pid = os.getpid()
if cls._threadpool_pid != pid:
# Threads cannot survive after a fork, so if our pid isn't what it
# was when we created the pool then delete it.
cls._threadpool = None
if cls._threadpool is None:
from concurrent.futures import ThreadPoolExecutor
cls._threadpool = ThreadPoolExecutor(num_threads)
cls._threadpool_pid = pid
return cls._threadpool
class OverrideResolver(Resolver):
"""Wraps a resolver with a mapping of overrides.
This can be used to make local DNS changes (e.g. for testing)
without modifying system-wide settings.
The mapping can contain either host strings or host-port pairs.
"""
def initialize(self, resolver, mapping):
self.resolver = resolver
self.mapping = mapping
def close(self):
self.resolver.close()
def resolve(self, host, port, *args, **kwargs):
if (host, port) in self.mapping:
host, port = self.mapping[(host, port)]
elif host in self.mapping:
host = self.mapping[host]
return self.resolver.resolve(host, port, *args, **kwargs)
# These are the keyword arguments to ssl.wrap_socket that must be translated
# to their SSLContext equivalents (the other arguments are still passed
# to SSLContext.wrap_socket).
_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile',
'cert_reqs', 'ca_certs', 'ciphers'])
def ssl_options_to_context(ssl_options):
"""Try to convert an ``ssl_options`` dictionary to an
`~ssl.SSLContext` object.
The ``ssl_options`` dictionary contains keywords to be passed to
`ssl.wrap_socket`. In Python 3.2+, `ssl.SSLContext` objects can
be used instead. This function converts the dict form to its
`~ssl.SSLContext` equivalent, and may be used when a component which
accepts both forms needs to upgrade to the `~ssl.SSLContext` version
to use features like SNI or NPN.
"""
if isinstance(ssl_options, dict):
assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options
if (not hasattr(ssl, 'SSLContext') or
isinstance(ssl_options, ssl.SSLContext)):
return ssl_options
context = ssl.SSLContext(
ssl_options.get('ssl_version', ssl.PROTOCOL_SSLv23))
if 'certfile' in ssl_options:
context.load_cert_chain(ssl_options['certfile'], ssl_options.get('keyfile', None))
if 'cert_reqs' in ssl_options:
context.verify_mode = ssl_options['cert_reqs']
if 'ca_certs' in ssl_options:
context.load_verify_locations(ssl_options['ca_certs'])
if 'ciphers' in ssl_options:
context.set_ciphers(ssl_options['ciphers'])
if hasattr(ssl, 'OP_NO_COMPRESSION'):
# Disable TLS compression to avoid CRIME and related attacks.
# This constant wasn't added until python 3.3.
context.options |= ssl.OP_NO_COMPRESSION
return context
def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs):
"""Returns an ``ssl.SSLSocket`` wrapping the given socket.
``ssl_options`` may be either a dictionary (as accepted by
`ssl_options_to_context`) or an `ssl.SSLContext` object.
Additional keyword arguments are passed to ``wrap_socket``
(either the `~ssl.SSLContext` method or the `ssl` module function
as appropriate).
"""
context = ssl_options_to_context(ssl_options)
if hasattr(ssl, 'SSLContext') and isinstance(context, ssl.SSLContext):
if server_hostname is not None and getattr(ssl, 'HAS_SNI'):
# Python doesn't have server-side SNI support so we can't
# really unittest this, but it can be manually tested with
# python3.2 -m tornado.httpclient https://sni.velox.ch
return context.wrap_socket(socket, server_hostname=server_hostname,
**kwargs)
else:
return context.wrap_socket(socket, **kwargs)
else:
return ssl.wrap_socket(socket, **dict(context, **kwargs))<|fim▁end|> | |
<|file_name|>perf_monitor.cc<|end_file_name|><|fim▁begin|>//--------------------------------------------------------------------------
// Copyright (C) 2014-2016 Cisco and/or its affiliates. All rights reserved.
// Copyright (C) 2002-2013 Sourcefire, Inc.
//
// This program is free software; you can redistribute it and/or modify it
// under the terms of the GNU General Public License Version 2 as published
// by the Free Software Foundation. You may not use, modify or distribute
// this program under any other version of the GNU General Public License.<|fim▁hole|>// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public License along
// with this program; if not, write to the Free Software Foundation, Inc.,
// 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
//--------------------------------------------------------------------------
/*
** Marc Norton <[email protected]>
** Dan Roelker <[email protected]>
**
** NOTES
** 6.4.02 - Initial Source Code. Norton/Roelker
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdlib.h>
#include <ctype.h>
#include <errno.h>
#include <unistd.h>
#include <string>
#include "perf_monitor.h"
#include "perf_module.h"
#include "main/analyzer.h"
#include "main/snort_config.h"
#include "main/snort_types.h"
#include "main/snort_debug.h"
#include "parser/parser.h"
#include "packet_io/sfdaq.h"
#include "profiler/profiler.h"
#include "framework/inspector.h"
#include "utils/stats.h"
#include "utils/util.h"
#include "base_tracker.h"
#include "cpu_tracker.h"
#include "flow_tracker.h"
#include "flow_ip_tracker.h"
#ifdef UNIT_TEST
#include "catch/catch.hpp"
#endif
THREAD_LOCAL SimpleStats pmstats;
THREAD_LOCAL ProfileStats perfmonStats;
THREAD_LOCAL bool perfmon_rotate_perf_file = false;
static PerfConfig config;
PerfConfig* perfmon_config = &config; //FIXIT-M remove this after flowip can be decoupled.
THREAD_LOCAL std::vector<PerfTracker*>* trackers;
static bool ready_to_process(Packet* p);
//-------------------------------------------------------------------------
// class stuff
//-------------------------------------------------------------------------
class PerfMonitor : public Inspector
{
public:
PerfMonitor(PerfMonModule*);
bool configure(SnortConfig*) override;
void show(SnortConfig*) override;
void eval(Packet*) override;
void tinit() override;
void tterm() override;
};
static THREAD_LOCAL PerfMonitor* this_perf_monitor;
PerfMonitor::PerfMonitor(PerfMonModule* mod)
{
mod->get_config(config);
perfmon_config = &config;
}
void PerfMonitor::show(SnortConfig*)
{
LogMessage("PerfMonitor config:\n");
LogMessage(" Sample Time: %d seconds\n", config.sample_interval);
LogMessage(" Packet Count: %d\n", config.pkt_cnt);
LogMessage(" Max File Size: " STDu64 "\n", config.max_file_size);
LogMessage(" Summary Mode: %s\n",
config.perf_flags & PERF_SUMMARY ? "ACTIVE" : "INACTIVE");
LogMessage(" Base Stats: %s\n",
config.perf_flags & PERF_BASE ? "ACTIVE" : "INACTIVE");
LogMessage(" Flow Stats: %s\n",
config.perf_flags & PERF_FLOW ? "ACTIVE" : "INACTIVE");
if (config.perf_flags & PERF_FLOW)
{
LogMessage(" Max Flow Port: %u\n", config.flow_max_port_to_track);
}
LogMessage(" Event Stats: %s\n",
config.perf_flags & PERF_EVENT ? "ACTIVE" : "INACTIVE");
LogMessage(" Flow IP Stats: %s\n",
config.perf_flags & PERF_FLOWIP ? "ACTIVE" : "INACTIVE");
if (config.perf_flags & PERF_FLOWIP)
{
LogMessage(" Flow IP Memcap: %u\n", config.flowip_memcap);
}
LogMessage(" CPU Stats: %s\n",
config.perf_flags & PERF_CPU ? "ACTIVE" : "INACTIVE");
switch(config.output)
{
case PERF_CONSOLE:
LogMessage(" Output Location: console\n");
break;
case PERF_FILE:
LogMessage(" Output Location: file\n");
break;
}
switch(config.format)
{
case PERF_TEXT:
LogMessage(" Output Format: text\n");
break;
case PERF_CSV:
LogMessage(" Output Format: csv\n");
break;
#ifdef UNIT_TEST
case PERF_MOCK:
break;
#endif
}
}
// FIXIT-L perfmonitor should be logging to one file and writing record type and
// version fields immediately after timestamp like
// seconds, usec, type, version#, data1, data2, ...
bool PerfMonitor::configure(SnortConfig*)
{
return true;
}
void PerfMonitor::tinit()
{
trackers = new std::vector<PerfTracker*>();
if (config.perf_flags & PERF_BASE)
trackers->push_back(new BaseTracker(&config));
if (config.perf_flags & PERF_FLOW)
trackers->push_back(new FlowTracker(&config));
if (config.perf_flags & PERF_FLOWIP)
trackers->push_back(perf_flow_ip = new FlowIPTracker(&config));
if (config.perf_flags & PERF_CPU )
trackers->push_back(new CPUTracker(&config));
for (auto& tracker : *trackers)
tracker->open(true);
for (auto& tracker : *trackers)
tracker->reset();
this_perf_monitor = this;
}
void PerfMonitor::tterm()
{
perf_flow_ip = nullptr;
while (!trackers->empty())
{
auto back = trackers->back();
if ( config.perf_flags & PERF_SUMMARY )
back->process(true);
back->close();
delete back;
trackers->pop_back();
}
delete trackers;
}
void PerfMonitor::eval(Packet* p)
{
Profile profile(perfmonStats);
if (IsSetRotatePerfFileFlag())
{
for (auto& tracker : *trackers)
tracker->rotate();
ClearRotatePerfFileFlag();
}
if (p)
{
for (auto& tracker : *trackers)
{
tracker->update(p);
tracker->update_time(p->pkth->ts.tv_sec);
}
}
if ( (!p || !p->is_rebuilt()) && !(config.perf_flags & PERF_SUMMARY) )
{
if (ready_to_process(p))
{
for (auto& tracker : *trackers)
{
tracker->process(false);
tracker->auto_rotate();
}
}
}
if (p)
++pmstats.total_packets;
}
//FIXIT-M uncouple from Snort class when framework permits
void perf_monitor_idle_process()
{
if ( this_perf_monitor )
this_perf_monitor->eval(nullptr);
}
static bool ready_to_process(Packet* p)
{
static THREAD_LOCAL time_t sample_time = 0;
static THREAD_LOCAL time_t cur_time;
static THREAD_LOCAL uint64_t cnt = 0;
if (p)
{
cnt++;
cur_time = p->pkth->ts.tv_sec;
}
else
cur_time = time(nullptr);
if (!sample_time)
sample_time = cur_time;
if ( cnt >= config.pkt_cnt )
{
if ((cur_time - sample_time) >= config.sample_interval)
{
cnt = 0;
sample_time = cur_time;
return true;
}
}
return false;
}
//-------------------------------------------------------------------------
// api stuff
//-------------------------------------------------------------------------
static Module* mod_ctor()
{ return new PerfMonModule; }
static void mod_dtor(Module* m)
{ delete m; }
static Inspector* pm_ctor(Module* m)
{
static THREAD_LOCAL unsigned s_init = true;
if ( !s_init )
return nullptr;
return new PerfMonitor((PerfMonModule*)m);
}
static void pm_dtor(Inspector* p)
{
delete p;
}
static const InspectApi pm_api =
{
{
PT_INSPECTOR,
sizeof(InspectApi),
INSAPI_VERSION,
0,
API_RESERVED,
API_OPTIONS,
PERF_NAME,
PERF_HELP,
mod_ctor,
mod_dtor
},
IT_PROBE,
(uint16_t)PktType::ANY,
nullptr, // buffers
nullptr, // service
nullptr, // pinit
nullptr, // pterm
nullptr, // tinit
nullptr, // tterm
pm_ctor,
pm_dtor,
nullptr, // ssn
nullptr // reset
};
const BaseApi* nin_perf_monitor = &pm_api.base;
#ifdef UNIT_TEST
TEST_CASE("Process timing logic", "[perfmon]")
{
Packet p;
DAQ_PktHdr_t pkth;
p.pkth = &pkth;
config.pkt_cnt = 0;
config.sample_interval = 0;
pkth.ts.tv_sec = 0;
REQUIRE(ready_to_process(&p) == true);
pkth.ts.tv_sec = 1;
REQUIRE(ready_to_process(&p) == true);
config.pkt_cnt = 2;
config.sample_interval = 0;
pkth.ts.tv_sec = 2;
REQUIRE(ready_to_process(&p) == false);
pkth.ts.tv_sec = 3;
REQUIRE(ready_to_process(&p) == true);
config.pkt_cnt = 0;
config.sample_interval = 2;
pkth.ts.tv_sec = 4;
REQUIRE(ready_to_process(&p) == false);
pkth.ts.tv_sec = 8;
REQUIRE(ready_to_process(&p) == true);
pkth.ts.tv_sec = 10;
REQUIRE(ready_to_process(&p) == true);
config.pkt_cnt = 5;
config.sample_interval = 4;
pkth.ts.tv_sec = 11;
REQUIRE(ready_to_process(&p) == false);
pkth.ts.tv_sec = 14;
REQUIRE(ready_to_process(&p) == false);
REQUIRE(ready_to_process(&p) == false);
REQUIRE(ready_to_process(&p) == false);
REQUIRE(ready_to_process(&p) == true);
}
#endif<|fim▁end|> | //
// This program is distributed in the hope that it will be useful, but |
<|file_name|>runtests.py<|end_file_name|><|fim▁begin|>import unittest
import os
from ui import main
print os.getcwd()
class TestMain(unittest.TestCase):
def setUp(self):
self.m = main.MainWindow()
def test_mainWindow(self):
assert(self.m)
def test_dataframe(self):
import numpy
#Random 25x4 Numpy Matrix
self.m.render_dataframe(numpy.random.rand(25,4) ,name='devel',rownames=xrange(0,25))
assert(self.m.active_robject)
assert(self.m.active_robject.columns)
assert(self.m.active_robject.column_data)<|fim▁hole|>
def test_imports(self):
datasets = ['iris','Nile','morley','freeny','sleep','mtcars']
for a in datasets:
main.rsession.r('%s=%s' % (a,a))
self.m.sync_with_r()
assert(a in self.m.robjects)
unittest.main()<|fim▁end|> | |
<|file_name|>pyunit_iris_nfoldsRF.py<|end_file_name|><|fim▁begin|>import sys
sys.path.insert(1, "../../../")
import h2o
def iris_nfolds(ip,port):
iris = h2o.import_file(path=h2o.locate("smalldata/iris/iris.csv"))
<|fim▁hole|> try:
h2o.random_forest(y=iris[4], x=iris[0:4], validation_y=iris[4], validation_x=iris[0:4], ntrees=50, nfolds=5)
assert True
except EnvironmentError:
assert False, "expected an error"
if __name__ == "__main__":
h2o.run_test(sys.argv, iris_nfolds)<|fim▁end|> | model = h2o.random_forest(y=iris[4], x=iris[0:4], ntrees=50, nfolds=5)
model.show()
# Can specify both nfolds >= 2 and validation = H2OParsedData at once |
<|file_name|>struct_h5_t_l_1_1adapt_3_01_t[_n]_4.js<|end_file_name|><|fim▁begin|>var struct_h5_t_l_1_1adapt_3_01_t[_n]_4 =
[
[ "allocate_return", "struct_h5_t_l_1_1adapt_3_01_t[_n]_4.html#a0b70e9265935053f7cd15dd9ae47b5e9", null ],
[ "const_data_return", "struct_h5_t_l_1_1adapt_3_01_t[_n]_4.html#aa26ab555a2c6ae40181e9212b292e3df", null ],
[ "data_return", "struct_h5_t_l_1_1adapt_3_01_t[_n]_4.html#a255473fbedaa64738ad99c1b2b4e9f33", null ],
[ "data_t", "struct_h5_t_l_1_1adapt_3_01_t[_n]_4.html#af9933a1521aecd615759226aab22fc60", null ],
[ "dtype_return", "struct_h5_t_l_1_1adapt_3_01_t[_n]_4.html#a796cb13cf8219e0bef087b366e33be18", null ]<|fim▁hole|><|fim▁end|> | ]; |
<|file_name|>drop.js<|end_file_name|><|fim▁begin|>import { Observable } from './observable'<|fim▁hole|> return Observable(add => {
let dropped = 0
return source.subscribe((val, name) => {
if (dropped++ >= count) add(val, name)
})
})
}<|fim▁end|> |
export default function drop(count, source) { |
<|file_name|>plot_from_stats.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
#from matplotlib.backends.backend_pdf import PdfPages
import sys
def stats_file_as_matrix(file_name):
with open(file_name, 'r') as f:
return [ map(float,line.strip().split(' ')) for line in f ]
#pdfTitle = 'results.pdf'
#pp = PdfPages(pdfTitle)
titles = ["Bitrate", "Delay", "Jitter", "Packet loss"]
for f in sys.argv[1:]:
print("Starting work on "+f+", converting stats to matrix!")
mat = stats_file_as_matrix(f)
x = range(len(mat))
#define the figure size and grid layout properties
figsize = (10, 8)
cols = 2
rows = 2
gs = gridspec.GridSpec( rows, cols)
fig = plt.figure(num=1, figsize=figsize)
fig.suptitle(f)
ax = []
for i in range(4):
y = map(lambda r:r[i+1],mat)
row = (i // cols)
col = i % cols<|fim▁hole|> ax[-1].set_xlabel('Time [ms]')
ax[-1].plot(x, y, 'o', ls='-', ms=4)
print("Finished with "+f+", creating JPG!")
#pp.savefig(fig)
plt.savefig(f+'.jpg')
plt.clf()
#pp.close()<|fim▁end|> | ax.append(fig.add_subplot(gs[row, col]))
ax[-1].set_title(titles[i]) |
<|file_name|>primitive_implementation_formatter.hpp<|end_file_name|><|fim▁begin|>/* -*- mode: c++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* Copyright (C) 2012-2015 Marco Craveiro <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
*/
#ifndef MASD_DOGEN_GENERATION_CPP_TYPES_FORMATTERS_SERIALIZATION_PRIMITIVE_IMPLEMENTATION_FORMATTER_HPP
#define MASD_DOGEN_GENERATION_CPP_TYPES_FORMATTERS_SERIALIZATION_PRIMITIVE_IMPLEMENTATION_FORMATTER_HPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
#pragma once
#endif
#include <algorithm>
#include "masd.dogen.generation.cpp/types/formatters/artefact_formatter_interface.hpp"
namespace masd::dogen::generation::cpp::formatters::serialization {
class primitive_implementation_formatter final : public artefact_formatter_interface {
public:
static std::string static_id();
public:
std::string id() const override;
annotations::archetype_location archetype_location() const override;
const coding::meta_model::name& meta_name() const override;
std::string family() const override;
public:
std::list<std::string> inclusion_dependencies(
const formattables::dependencies_builder_factory& f,
const coding::meta_model::element& e) const override;
inclusion_support_types inclusion_support_type() const override;
boost::filesystem::path inclusion_path(
const formattables::locator& l,
const coding::meta_model::name& n) const override;
<|fim▁hole|> const coding::meta_model::name& n) const override;
public:
extraction::meta_model::artefact format(const context& ctx,
const coding::meta_model::element& e) const override;
};
}
#endif<|fim▁end|> | boost::filesystem::path full_path(
const formattables::locator& l, |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>'''
Rigidity is a simple wrapper to the built-in csv module that allows for
validation and correction of data being read/written from/to CSV files.
This module allows you to easily construct validation and correction
rulesets to be applied automatically while preserving the csv interface.
This allows you to easily upgrade old software to use new, strict rules.
'''
import rigidity.errors
import rigidity.rules as rules
class Rigidity():
'''
A wrapper for CSV readers and writers that allows
'''
csvobj = None # Declare here to prevent getattr/setattr recursion
#: Do not display output at all.
DISPLAY_NONE = 0
#: Display simple warnings when ValueError is raised by a rule.
DISPLAY_SIMPLE = 1
def __init__(self, csvobj, rules=[], display=DISPLAY_NONE):
'''
:param csvfile: a Reader or Writer object from the csv module;
any calls to this object's methods will be wrapped to perform
the specified rigidity checks.
:param rules=[]: a two dimensional list containing rules to
be applied to columns moving in/out of `csvobj`. The row
indices in this list match the column in the CSV file the list
of rules will be applied to.
:param int display: When an error is thrown, display the row
and information about which column caused the error.
'''
self.csvobj = csvobj
self.rules = rules
self.display = display
if isinstance(rules, dict):
self.keys = rules.keys()
else:
self.keys = range(0, len(rules))
# Wrapper methods for the `csv` interface
def writeheader(self):
'''
Plain pass-through to the given CSV object. It is assumed that
header information is already valid when the CSV object is
constructed.
'''
self.csvobj.writeheader()
def writerow(self, row):
'''
Validate and correct the data provided in `row` and raise an
exception if the validation or correction fails. Then, write the
row to the CSV file.
'''
try:
self.csvobj.writerow(self.validate_write(row))
except rigidity.errors.DropRow:
return
def writerows(self, rows):
'''
Validate and correct the data provided in every row and raise an
exception if the validation or correction fails.
.. note::
Behavior in the case that the data is invalid and cannot be
repaired is undefined. For example, the implementation may
choose to write all valid rows up until the error, or it may
choose to only conduct the write operation after all rows have
been verified. Do not depend on the presence or absence of any
of the rows in `rows` in the event that an exception occurs.
'''
for row in rows:
self.writerow(row)
# New methods, not part of the `csv` interface
def validate(self, row):
'''
.. warning::
This method is deprecated and will be removed in a future
release; it is included only to support old code. It will
not produce consistent results with bi-directional rules.
You should use :meth:`validate_read` or
:meth:`validate_write` instead.
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be passed to a CSVWriter's
writerow() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
if hasattr(rule, 'apply'):
value = rule.apply(value)
else:
return rule.read(value)
row[key] = value
# Return the updated data
return row
def validate_write(self, row):
'''
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be passed to a CSVWriter's
__next__() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
try:
value = rule.write(value)
except ValueError as err:
if self.display == self.DISPLAY_SIMPLE:
print('Invalid data encountered in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
row[key] = value
# Return the updated data
return row
def validate_read(self, row):
'''
Validate that the row conforms with the specified rules,
correcting invalid rows where the rule is able to do so.
If the row is valid or can be made valid through corrections,
this method will return a row that can be written to the CSV
file. If the row is invalid and cannot be corrected, then this
method will raise an exception.
:param row: a row object that can be returned from CSVReader's
readrow() method.
'''
# Ensure mutability - I'm looking at you, tuples!
if not isinstance(row, (list, dict)):
row = list(row)
# Iterate through all keys, updating the data
for key in self.keys:
value = row[key]
for rule in self.rules[key]:
try:
value = rule.read(value)
except ValueError as err:
if self.display == self.DISPLAY_SIMPLE:
print('Invalid data encountered in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
except IndexError as err:
if self.display == self.DISPLAY_SIMPLE:
print('IndexError raised in column %s:' % key)
print(' -', row)
print(' - Error raised by rule:', rule)
print('')
raise err
row[key] = value
# Return the updated data
return row
def skip(self):
'''
Return a row, skipping validation. This is useful when you want
to skip validation of header information.
'''
return next(self.csvobj)
def __iter__(self):
for row in iter(self.csvobj):
try:
yield self.validate_read(row)
except rigidity.errors.DropRow:
continue
def __next__(self):
'''
Call the __next__() method on the given CSV object, validate and
repair the row it returns, raise an exception if the row cannot
be repaired, and then return the row.
'''
try:
return self.validate_read(next(self.csvobj))
except rigidity.errors.DropRow:<|fim▁hole|>
def __getattr__(self, name):
if hasattr(self.csvobj, name):
return getattr(self.csvobj, name)
else:
return super().__getattr__(self, name)
def __setattr__(self, name, value):
if hasattr(self.csvobj, name):
return setattr(self.csvobj, name, value)
super().__setattr__(name, value)
def __delattr__(self, name):
if hasattr(self.csvobj, name):
return delattr(self.csvobj, name)
return super().__delattr__(name)<|fim▁end|> | return next(self) |
<|file_name|>moviescanner.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# === This file is part of RateItSeven ===
#
# Copyright 2015, Paolo de Vathaire <[email protected]>
#
# RateItSeven is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# RateItSeven is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.<|fim▁hole|>#
import guessit
from rateItSeven.scan.legacy.filescanner import FileScanner
from rateItSeven.scan.legacy.containers.movieguess import MovieGuess
class MovieScanner(object):
"""
Scan file system directories for video files
Find info for each file wrapped into a MovieGuess
"""
def __init__(self, dir_paths: list):
self.dir_paths = dir_paths
def list_movies(self):
return self.list_videos_in_types(["movie"])
def list_episodes(self):
return self.list_videos_in_types(["episode"])
def list_videos_in_types(self, video_types):
file_scanner = FileScanner(self.dir_paths)
for abs_path in file_scanner.absolute_file_paths():
guess = MovieGuess(guessit.guessit(abs_path), abs_path)
if guess.is_video_in_types(video_types):
yield guess<|fim▁end|> | #
# You should have received a copy of the GNU General Public License
# along with RateItSeven. If not, see <http://www.gnu.org/licenses/>. |
<|file_name|>test_recognize_digits_conv.py<|end_file_name|><|fim▁begin|>import paddle.v2 as paddle
import paddle.v2.framework.layers as layers
import paddle.v2.framework.nets as nets
import paddle.v2.framework.core as core
import paddle.v2.framework.optimizer as optimizer
from paddle.v2.framework.framework import Program, g_program
from paddle.v2.framework.executor import Executor
import numpy as np
init_program = Program()
program = Program()
images = layers.data(
name='pixel',
shape=[1, 28, 28],
data_type='float32',
program=program,
init_program=init_program)
label = layers.data(
name='label',
shape=[1],
data_type='int64',
program=program,
init_program=init_program)
conv_pool_1 = nets.simple_img_conv_pool(
input=images,
filter_size=5,
num_filters=20,
pool_size=2,
pool_stride=2,
act="relu",
program=program,
init_program=init_program)
conv_pool_2 = nets.simple_img_conv_pool(
input=conv_pool_1,
filter_size=5,
num_filters=50,
pool_size=2,
pool_stride=2,
act="relu",
program=program,
init_program=init_program)
predict = layers.fc(input=conv_pool_2,
size=10,
act="softmax",
program=program,
init_program=init_program)
cost = layers.cross_entropy(
input=predict, label=label, program=program, init_program=init_program)
avg_cost = layers.mean(x=cost, program=program)
accuracy = layers.accuracy(
input=predict, label=label, program=program, init_program=init_program)
sgd_optimizer = optimizer.SGDOptimizer(learning_rate=0.001)
opts = sgd_optimizer.minimize(avg_cost)
BATCH_SIZE = 50
PASS_NUM = 3
train_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.mnist.train(), buf_size=500),
batch_size=BATCH_SIZE)
place = core.CPUPlace()
exe = Executor(place)
exe.run(init_program, feed={}, fetch_list=[])
for pass_id in range(PASS_NUM):
count = 0
for data in train_reader():
img_data = np.array(map(lambda x: x[0].reshape([1, 28, 28]),
data)).astype("float32")
y_data = np.array(map(lambda x: x[1], data)).astype("int64")
y_data = y_data.reshape([BATCH_SIZE, 1])
tensor_img = core.LoDTensor()
tensor_y = core.LoDTensor()
tensor_img.set(img_data, place)
tensor_y.set(y_data, place)
<|fim▁hole|> feed={"pixel": tensor_img,
"label": tensor_y},
fetch_list=[avg_cost, accuracy])
loss = np.array(outs[0])
acc = np.array(outs[1])
if loss < 10.0 and acc > 0.9:
# if avg cost less than 10.0 and accuracy is larger than 0.9, we think our code is good.
exit(0)
exit(1)<|fim▁end|> | outs = exe.run(program, |
<|file_name|>range.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::CharacterDataBinding::CharacterDataMethods;
use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeConstants;
use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use crate::dom::bindings::codegen::Bindings::NodeListBinding::NodeListMethods;
use crate::dom::bindings::codegen::Bindings::RangeBinding::RangeMethods;
use crate::dom::bindings::codegen::Bindings::RangeBinding::{self, RangeConstants};
use crate::dom::bindings::codegen::Bindings::TextBinding::TextMethods;
use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::inheritance::{CharacterDataTypeId, NodeTypeId};
use crate::dom::bindings::reflector::{reflect_dom_object, Reflector};
use crate::dom::bindings::root::{Dom, DomRoot, MutDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::bindings::trace::JSTraceable;
use crate::dom::bindings::weakref::{WeakRef, WeakRefVec};
use crate::dom::characterdata::CharacterData;
use crate::dom::document::Document;
use crate::dom::documentfragment::DocumentFragment;
use crate::dom::element::Element;
use crate::dom::htmlscriptelement::HTMLScriptElement;
use crate::dom::node::{Node, ShadowIncluding, UnbindContext};
use crate::dom::text::Text;
use crate::dom::window::Window;
use dom_struct::dom_struct;
use js::jsapi::JSTracer;
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use std::cell::{Cell, UnsafeCell};
use std::cmp::{Ord, Ordering, PartialEq, PartialOrd};
#[dom_struct]
pub struct Range {
reflector_: Reflector,
start: BoundaryPoint,
end: BoundaryPoint,
}
impl Range {
fn new_inherited(
start_container: &Node,
start_offset: u32,
end_container: &Node,
end_offset: u32,
) -> Range {
Range {
reflector_: Reflector::new(),
start: BoundaryPoint::new(start_container, start_offset),
end: BoundaryPoint::new(end_container, end_offset),
}
}
pub fn new_with_doc(document: &Document) -> DomRoot<Range> {
let root = document.upcast();
Range::new(document, root, 0, root, 0)
}
pub fn new(
document: &Document,
start_container: &Node,
start_offset: u32,
end_container: &Node,
end_offset: u32,
) -> DomRoot<Range> {
let range = reflect_dom_object(
Box::new(Range::new_inherited(
start_container,
start_offset,
end_container,
end_offset,
)),
document.window(),
RangeBinding::Wrap,
);
start_container.ranges().push(WeakRef::new(&range));
if start_container != end_container {
end_container.ranges().push(WeakRef::new(&range));
}
range
}
// https://dom.spec.whatwg.org/#dom-range
pub fn Constructor(window: &Window) -> Fallible<DomRoot<Range>> {
let document = window.Document();
Ok(Range::new_with_doc(&document))
}
// https://dom.spec.whatwg.org/#contained
fn contains(&self, node: &Node) -> bool {
match (
bp_position(node, 0, &self.StartContainer(), self.StartOffset()),
bp_position(node, node.len(), &self.EndContainer(), self.EndOffset()),
) {
(Some(Ordering::Greater), Some(Ordering::Less)) => true,
_ => false,
}
}
// https://dom.spec.whatwg.org/#partially-contained
fn partially_contains(&self, node: &Node) -> bool {
self.StartContainer()
.inclusive_ancestors(ShadowIncluding::No)
.any(|n| &*n == node) !=
self.EndContainer()
.inclusive_ancestors(ShadowIncluding::No)
.any(|n| &*n == node)
}
// https://dom.spec.whatwg.org/#concept-range-clone
fn contained_children(
&self,
) -> Fallible<(
Option<DomRoot<Node>>,
Option<DomRoot<Node>>,
Vec<DomRoot<Node>>,
)> {
let start_node = self.StartContainer();
let end_node = self.EndContainer();
// Steps 5-6.
let common_ancestor = self.CommonAncestorContainer();
let first_contained_child = if start_node.is_inclusive_ancestor_of(&end_node) {
// Step 7.
None
} else {
// Step 8.
common_ancestor
.children()
.find(|node| Range::partially_contains(self, node))
};
let last_contained_child = if end_node.is_inclusive_ancestor_of(&start_node) {
// Step 9.
None
} else {
// Step 10.
common_ancestor
.rev_children()
.find(|node| Range::partially_contains(self, node))
};
// Step 11.
let contained_children: Vec<DomRoot<Node>> = common_ancestor
.children()
.filter(|n| self.contains(n))
.collect();
// Step 12.
if contained_children.iter().any(|n| n.is_doctype()) {
return Err(Error::HierarchyRequest);
}
Ok((
first_contained_child,
last_contained_child,
contained_children,
))
}
// https://dom.spec.whatwg.org/#concept-range-bp-set
fn set_start(&self, node: &Node, offset: u32) {
if &self.start.node != node {
if self.start.node == self.end.node {
node.ranges().push(WeakRef::new(&self));
} else if &self.end.node == node {
self.StartContainer().ranges().remove(self);
} else {
node.ranges()
.push(self.StartContainer().ranges().remove(self));
}
}
self.start.set(node, offset);
}
// https://dom.spec.whatwg.org/#concept-range-bp-set
fn set_end(&self, node: &Node, offset: u32) {
if &self.end.node != node {
if self.end.node == self.start.node {
node.ranges().push(WeakRef::new(&self));
} else if &self.start.node == node {
self.EndContainer().ranges().remove(self);
} else {
node.ranges()
.push(self.EndContainer().ranges().remove(self));
}
}
self.end.set(node, offset);
}
// https://dom.spec.whatwg.org/#dom-range-comparepointnode-offset
fn compare_point(&self, node: &Node, offset: u32) -> Fallible<Ordering> {
let start_node = self.StartContainer();
let start_node_root = start_node
.inclusive_ancestors(ShadowIncluding::No)
.last()
.unwrap();
let node_root = node
.inclusive_ancestors(ShadowIncluding::No)
.last()
.unwrap();
if start_node_root != node_root {
// Step 1.
return Err(Error::WrongDocument);
}
if node.is_doctype() {
// Step 2.
return Err(Error::InvalidNodeType);
}
if offset > node.len() {
// Step 3.
return Err(Error::IndexSize);
}
if let Ordering::Less = bp_position(node, offset, &start_node, self.StartOffset()).unwrap()
{
// Step 4.
return Ok(Ordering::Less);
}
if let Ordering::Greater =
bp_position(node, offset, &self.EndContainer(), self.EndOffset()).unwrap()
{
// Step 5.
return Ok(Ordering::Greater);
}
// Step 6.
Ok(Ordering::Equal)
}
}
impl RangeMethods for Range {
// https://dom.spec.whatwg.org/#dom-range-startcontainer
fn StartContainer(&self) -> DomRoot<Node> {
self.start.node.get()
}
// https://dom.spec.whatwg.org/#dom-range-startoffset
fn StartOffset(&self) -> u32 {
self.start.offset.get()
}
// https://dom.spec.whatwg.org/#dom-range-endcontainer
fn EndContainer(&self) -> DomRoot<Node> {
self.end.node.get()<|fim▁hole|> // https://dom.spec.whatwg.org/#dom-range-endoffset
fn EndOffset(&self) -> u32 {
self.end.offset.get()
}
// https://dom.spec.whatwg.org/#dom-range-collapsed
fn Collapsed(&self) -> bool {
self.start == self.end
}
// https://dom.spec.whatwg.org/#dom-range-commonancestorcontainer
fn CommonAncestorContainer(&self) -> DomRoot<Node> {
let end_container = self.EndContainer();
// Step 1.
for container in self
.StartContainer()
.inclusive_ancestors(ShadowIncluding::No)
{
// Step 2.
if container.is_inclusive_ancestor_of(&end_container) {
// Step 3.
return container;
}
}
unreachable!();
}
// https://dom.spec.whatwg.org/#dom-range-setstart
fn SetStart(&self, node: &Node, offset: u32) -> ErrorResult {
if node.is_doctype() {
// Step 1.
Err(Error::InvalidNodeType)
} else if offset > node.len() {
// Step 2.
Err(Error::IndexSize)
} else {
// Step 3.
self.set_start(node, offset);
if !(self.start <= self.end) {
// Step 4.
self.set_end(node, offset);
}
Ok(())
}
}
// https://dom.spec.whatwg.org/#dom-range-setend
fn SetEnd(&self, node: &Node, offset: u32) -> ErrorResult {
if node.is_doctype() {
// Step 1.
Err(Error::InvalidNodeType)
} else if offset > node.len() {
// Step 2.
Err(Error::IndexSize)
} else {
// Step 3.
self.set_end(node, offset);
if !(self.end >= self.start) {
// Step 4.
self.set_start(node, offset);
}
Ok(())
}
}
// https://dom.spec.whatwg.org/#dom-range-setstartbefore
fn SetStartBefore(&self, node: &Node) -> ErrorResult {
let parent = node.GetParentNode().ok_or(Error::InvalidNodeType)?;
self.SetStart(&parent, node.index())
}
// https://dom.spec.whatwg.org/#dom-range-setstartafter
fn SetStartAfter(&self, node: &Node) -> ErrorResult {
let parent = node.GetParentNode().ok_or(Error::InvalidNodeType)?;
self.SetStart(&parent, node.index() + 1)
}
// https://dom.spec.whatwg.org/#dom-range-setendbefore
fn SetEndBefore(&self, node: &Node) -> ErrorResult {
let parent = node.GetParentNode().ok_or(Error::InvalidNodeType)?;
self.SetEnd(&parent, node.index())
}
// https://dom.spec.whatwg.org/#dom-range-setendafter
fn SetEndAfter(&self, node: &Node) -> ErrorResult {
let parent = node.GetParentNode().ok_or(Error::InvalidNodeType)?;
self.SetEnd(&parent, node.index() + 1)
}
// https://dom.spec.whatwg.org/#dom-range-collapse
fn Collapse(&self, to_start: bool) {
if to_start {
self.set_end(&self.StartContainer(), self.StartOffset());
} else {
self.set_start(&self.EndContainer(), self.EndOffset());
}
}
// https://dom.spec.whatwg.org/#dom-range-selectnode
fn SelectNode(&self, node: &Node) -> ErrorResult {
// Steps 1, 2.
let parent = node.GetParentNode().ok_or(Error::InvalidNodeType)?;
// Step 3.
let index = node.index();
// Step 4.
self.set_start(&parent, index);
// Step 5.
self.set_end(&parent, index + 1);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-range-selectnodecontents
fn SelectNodeContents(&self, node: &Node) -> ErrorResult {
if node.is_doctype() {
// Step 1.
return Err(Error::InvalidNodeType);
}
// Step 2.
let length = node.len();
// Step 3.
self.set_start(node, 0);
// Step 4.
self.set_end(node, length);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-range-compareboundarypoints
fn CompareBoundaryPoints(&self, how: u16, other: &Range) -> Fallible<i16> {
if how > RangeConstants::END_TO_START {
// Step 1.
return Err(Error::NotSupported);
}
let this_root = self
.StartContainer()
.inclusive_ancestors(ShadowIncluding::No)
.last()
.unwrap();
let other_root = other
.StartContainer()
.inclusive_ancestors(ShadowIncluding::No)
.last()
.unwrap();
if this_root != other_root {
// Step 2.
return Err(Error::WrongDocument);
}
// Step 3.
let (this_point, other_point) = match how {
RangeConstants::START_TO_START => (&self.start, &other.start),
RangeConstants::START_TO_END => (&self.end, &other.start),
RangeConstants::END_TO_END => (&self.end, &other.end),
RangeConstants::END_TO_START => (&self.start, &other.end),
_ => unreachable!(),
};
// step 4.
match this_point.partial_cmp(other_point).unwrap() {
Ordering::Less => Ok(-1),
Ordering::Equal => Ok(0),
Ordering::Greater => Ok(1),
}
}
// https://dom.spec.whatwg.org/#dom-range-clonerange
fn CloneRange(&self) -> DomRoot<Range> {
let start_node = self.StartContainer();
let owner_doc = start_node.owner_doc();
Range::new(
&owner_doc,
&start_node,
self.StartOffset(),
&self.EndContainer(),
self.EndOffset(),
)
}
// https://dom.spec.whatwg.org/#dom-range-ispointinrange
fn IsPointInRange(&self, node: &Node, offset: u32) -> Fallible<bool> {
match self.compare_point(node, offset) {
Ok(Ordering::Less) => Ok(false),
Ok(Ordering::Equal) => Ok(true),
Ok(Ordering::Greater) => Ok(false),
Err(Error::WrongDocument) => {
// Step 2.
Ok(false)
},
Err(error) => Err(error),
}
}
// https://dom.spec.whatwg.org/#dom-range-comparepoint
fn ComparePoint(&self, node: &Node, offset: u32) -> Fallible<i16> {
self.compare_point(node, offset).map(|order| match order {
Ordering::Less => -1,
Ordering::Equal => 0,
Ordering::Greater => 1,
})
}
// https://dom.spec.whatwg.org/#dom-range-intersectsnode
fn IntersectsNode(&self, node: &Node) -> bool {
let start_node = self.StartContainer();
let start_node_root = self
.StartContainer()
.inclusive_ancestors(ShadowIncluding::No)
.last()
.unwrap();
let node_root = node
.inclusive_ancestors(ShadowIncluding::No)
.last()
.unwrap();
if start_node_root != node_root {
// Step 1.
return false;
}
let parent = match node.GetParentNode() {
Some(parent) => parent,
None => {
// Step 3.
return true;
},
};
// Step 4.
let offset = node.index();
// Step 5.
Ordering::Greater ==
bp_position(&parent, offset + 1, &start_node, self.StartOffset()).unwrap() &&
Ordering::Less ==
bp_position(&parent, offset, &self.EndContainer(), self.EndOffset()).unwrap()
}
// https://dom.spec.whatwg.org/#dom-range-clonecontents
// https://dom.spec.whatwg.org/#concept-range-clone
fn CloneContents(&self) -> Fallible<DomRoot<DocumentFragment>> {
// Step 3.
let start_node = self.StartContainer();
let start_offset = self.StartOffset();
let end_node = self.EndContainer();
let end_offset = self.EndOffset();
// Step 1.
let fragment = DocumentFragment::new(&start_node.owner_doc());
// Step 2.
if self.start == self.end {
return Ok(fragment);
}
if end_node == start_node {
if let Some(cdata) = start_node.downcast::<CharacterData>() {
// Steps 4.1-2.
let data = cdata
.SubstringData(start_offset, end_offset - start_offset)
.unwrap();
let clone = cdata.clone_with_data(data, &start_node.owner_doc());
// Step 4.3.
fragment.upcast::<Node>().AppendChild(&clone)?;
// Step 4.4
return Ok(fragment);
}
}
// Steps 5-12.
let (first_contained_child, last_contained_child, contained_children) =
self.contained_children()?;
if let Some(child) = first_contained_child {
// Step 13.
if let Some(cdata) = child.downcast::<CharacterData>() {
assert!(child == start_node);
// Steps 13.1-2.
let data = cdata
.SubstringData(start_offset, start_node.len() - start_offset)
.unwrap();
let clone = cdata.clone_with_data(data, &start_node.owner_doc());
// Step 13.3.
fragment.upcast::<Node>().AppendChild(&clone)?;
} else {
// Step 14.1.
let clone = child.CloneNode(/* deep */ false)?;
// Step 14.2.
fragment.upcast::<Node>().AppendChild(&clone)?;
// Step 14.3.
let subrange = Range::new(
&clone.owner_doc(),
&start_node,
start_offset,
&child,
child.len(),
);
// Step 14.4.
let subfragment = subrange.CloneContents()?;
// Step 14.5.
clone.AppendChild(subfragment.upcast())?;
}
}
// Step 15.
for child in contained_children {
// Step 15.1.
let clone = child.CloneNode(/* deep */ true)?;
// Step 15.2.
fragment.upcast::<Node>().AppendChild(&clone)?;
}
if let Some(child) = last_contained_child {
// Step 16.
if let Some(cdata) = child.downcast::<CharacterData>() {
assert!(child == end_node);
// Steps 16.1-2.
let data = cdata.SubstringData(0, end_offset).unwrap();
let clone = cdata.clone_with_data(data, &start_node.owner_doc());
// Step 16.3.
fragment.upcast::<Node>().AppendChild(&clone)?;
} else {
// Step 17.1.
let clone = child.CloneNode(/* deep */ false)?;
// Step 17.2.
fragment.upcast::<Node>().AppendChild(&clone)?;
// Step 17.3.
let subrange = Range::new(&clone.owner_doc(), &child, 0, &end_node, end_offset);
// Step 17.4.
let subfragment = subrange.CloneContents()?;
// Step 17.5.
clone.AppendChild(subfragment.upcast())?;
}
}
// Step 18.
Ok(fragment)
}
// https://dom.spec.whatwg.org/#dom-range-extractcontents
// https://dom.spec.whatwg.org/#concept-range-extract
fn ExtractContents(&self) -> Fallible<DomRoot<DocumentFragment>> {
// Step 3.
let start_node = self.StartContainer();
let start_offset = self.StartOffset();
let end_node = self.EndContainer();
let end_offset = self.EndOffset();
// Step 1.
let fragment = DocumentFragment::new(&start_node.owner_doc());
// Step 2.
if self.Collapsed() {
return Ok(fragment);
}
if end_node == start_node {
if let Some(end_data) = end_node.downcast::<CharacterData>() {
// Step 4.1.
let clone = end_node.CloneNode(/* deep */ true)?;
// Step 4.2.
let text = end_data.SubstringData(start_offset, end_offset - start_offset);
clone
.downcast::<CharacterData>()
.unwrap()
.SetData(text.unwrap());
// Step 4.3.
fragment.upcast::<Node>().AppendChild(&clone)?;
// Step 4.4.
end_data.ReplaceData(start_offset, end_offset - start_offset, DOMString::new())?;
// Step 4.5.
return Ok(fragment);
}
}
// Steps 5-12.
let (first_contained_child, last_contained_child, contained_children) =
self.contained_children()?;
let (new_node, new_offset) = if start_node.is_inclusive_ancestor_of(&end_node) {
// Step 13.
(DomRoot::from_ref(&*start_node), start_offset)
} else {
// Step 14.1-2.
let reference_node = start_node
.ancestors()
.take_while(|n| !n.is_inclusive_ancestor_of(&end_node))
.last()
.unwrap_or(DomRoot::from_ref(&start_node));
// Step 14.3.
(
reference_node.GetParentNode().unwrap(),
reference_node.index() + 1,
)
};
if let Some(child) = first_contained_child {
if let Some(start_data) = child.downcast::<CharacterData>() {
assert!(child == start_node);
// Step 15.1.
let clone = start_node.CloneNode(/* deep */ true)?;
// Step 15.2.
let text = start_data.SubstringData(start_offset, start_node.len() - start_offset);
clone
.downcast::<CharacterData>()
.unwrap()
.SetData(text.unwrap());
// Step 15.3.
fragment.upcast::<Node>().AppendChild(&clone)?;
// Step 15.4.
start_data.ReplaceData(
start_offset,
start_node.len() - start_offset,
DOMString::new(),
)?;
} else {
// Step 16.1.
let clone = child.CloneNode(/* deep */ false)?;
// Step 16.2.
fragment.upcast::<Node>().AppendChild(&clone)?;
// Step 16.3.
let subrange = Range::new(
&clone.owner_doc(),
&start_node,
start_offset,
&child,
child.len(),
);
// Step 16.4.
let subfragment = subrange.ExtractContents()?;
// Step 16.5.
clone.AppendChild(subfragment.upcast())?;
}
}
// Step 17.
for child in contained_children {
fragment.upcast::<Node>().AppendChild(&child)?;
}
if let Some(child) = last_contained_child {
if let Some(end_data) = child.downcast::<CharacterData>() {
assert!(child == end_node);
// Step 18.1.
let clone = end_node.CloneNode(/* deep */ true)?;
// Step 18.2.
let text = end_data.SubstringData(0, end_offset);
clone
.downcast::<CharacterData>()
.unwrap()
.SetData(text.unwrap());
// Step 18.3.
fragment.upcast::<Node>().AppendChild(&clone)?;
// Step 18.4.
end_data.ReplaceData(0, end_offset, DOMString::new())?;
} else {
// Step 19.1.
let clone = child.CloneNode(/* deep */ false)?;
// Step 19.2.
fragment.upcast::<Node>().AppendChild(&clone)?;
// Step 19.3.
let subrange = Range::new(&clone.owner_doc(), &child, 0, &end_node, end_offset);
// Step 19.4.
let subfragment = subrange.ExtractContents()?;
// Step 19.5.
clone.AppendChild(subfragment.upcast())?;
}
}
// Step 20.
self.SetStart(&new_node, new_offset)?;
self.SetEnd(&new_node, new_offset)?;
// Step 21.
Ok(fragment)
}
// https://dom.spec.whatwg.org/#dom-range-detach
fn Detach(&self) {
// This method intentionally left blank.
}
// https://dom.spec.whatwg.org/#dom-range-insertnode
// https://dom.spec.whatwg.org/#concept-range-insert
fn InsertNode(&self, node: &Node) -> ErrorResult {
let start_node = self.StartContainer();
let start_offset = self.StartOffset();
// Step 1.
if &*start_node == node {
return Err(Error::HierarchyRequest);
}
match start_node.type_id() {
// Handled under step 2.
NodeTypeId::CharacterData(CharacterDataTypeId::Text(_)) => (),
NodeTypeId::CharacterData(_) => return Err(Error::HierarchyRequest),
_ => (),
}
// Step 2.
let (reference_node, parent) = match start_node.type_id() {
NodeTypeId::CharacterData(CharacterDataTypeId::Text(_)) => {
// Step 3.
let parent = match start_node.GetParentNode() {
Some(parent) => parent,
// Step 1.
None => return Err(Error::HierarchyRequest),
};
// Step 5.
(Some(DomRoot::from_ref(&*start_node)), parent)
},
_ => {
// Steps 4-5.
let child = start_node.ChildNodes().Item(start_offset);
(child, DomRoot::from_ref(&*start_node))
},
};
// Step 6.
Node::ensure_pre_insertion_validity(node, &parent, reference_node.as_deref())?;
// Step 7.
let split_text;
let reference_node = match start_node.downcast::<Text>() {
Some(text) => {
split_text = text.SplitText(start_offset)?;
let new_reference = DomRoot::upcast::<Node>(split_text);
assert!(new_reference.GetParentNode().as_deref() == Some(&parent));
Some(new_reference)
},
_ => reference_node,
};
// Step 8.
let reference_node = if Some(node) == reference_node.as_deref() {
node.GetNextSibling()
} else {
reference_node
};
// Step 9.
node.remove_self();
// Step 10.
let new_offset = reference_node
.as_ref()
.map_or(parent.len(), |node| node.index());
// Step 11
let new_offset = new_offset +
if let NodeTypeId::DocumentFragment(_) = node.type_id() {
node.len()
} else {
1
};
// Step 12.
Node::pre_insert(node, &parent, reference_node.as_deref())?;
// Step 13.
if self.Collapsed() {
self.set_end(&parent, new_offset);
}
Ok(())
}
// https://dom.spec.whatwg.org/#dom-range-deletecontents
fn DeleteContents(&self) -> ErrorResult {
// Step 1.
if self.Collapsed() {
return Ok(());
}
// Step 2.
let start_node = self.StartContainer();
let end_node = self.EndContainer();
let start_offset = self.StartOffset();
let end_offset = self.EndOffset();
// Step 3.
if start_node == end_node {
if let Some(text) = start_node.downcast::<CharacterData>() {
return text.ReplaceData(start_offset, end_offset - start_offset, DOMString::new());
}
}
// Step 4.
rooted_vec!(let mut contained_children);
let ancestor = self.CommonAncestorContainer();
let mut iter = start_node.following_nodes(&ancestor);
let mut next = iter.next();
while let Some(child) = next {
if self.contains(&child) {
contained_children.push(Dom::from_ref(&*child));
next = iter.next_skipping_children();
} else {
next = iter.next();
}
}
let (new_node, new_offset) = if start_node.is_inclusive_ancestor_of(&end_node) {
// Step 5.
(DomRoot::from_ref(&*start_node), start_offset)
} else {
// Step 6.
fn compute_reference(start_node: &Node, end_node: &Node) -> (DomRoot<Node>, u32) {
let mut reference_node = DomRoot::from_ref(start_node);
while let Some(parent) = reference_node.GetParentNode() {
if parent.is_inclusive_ancestor_of(end_node) {
return (parent, reference_node.index() + 1);
}
reference_node = parent;
}
unreachable!()
}
compute_reference(&start_node, &end_node)
};
// Step 7.
if let Some(text) = start_node.downcast::<CharacterData>() {
text.ReplaceData(
start_offset,
start_node.len() - start_offset,
DOMString::new(),
)
.unwrap();
}
// Step 8.
for child in &*contained_children {
child.remove_self();
}
// Step 9.
if let Some(text) = end_node.downcast::<CharacterData>() {
text.ReplaceData(0, end_offset, DOMString::new()).unwrap();
}
// Step 10.
self.SetStart(&new_node, new_offset).unwrap();
self.SetEnd(&new_node, new_offset).unwrap();
Ok(())
}
// https://dom.spec.whatwg.org/#dom-range-surroundcontents
fn SurroundContents(&self, new_parent: &Node) -> ErrorResult {
// Step 1.
let start = self.StartContainer();
let end = self.EndContainer();
if start
.inclusive_ancestors(ShadowIncluding::No)
.any(|n| !n.is_inclusive_ancestor_of(&end) && !n.is::<Text>()) ||
end.inclusive_ancestors(ShadowIncluding::No)
.any(|n| !n.is_inclusive_ancestor_of(&start) && !n.is::<Text>())
{
return Err(Error::InvalidState);
}
// Step 2.
match new_parent.type_id() {
NodeTypeId::Document(_) |
NodeTypeId::DocumentType |
NodeTypeId::DocumentFragment(_) => {
return Err(Error::InvalidNodeType);
},
_ => (),
}
// Step 3.
let fragment = self.ExtractContents()?;
// Step 4.
Node::replace_all(None, new_parent);
// Step 5.
self.InsertNode(new_parent)?;
// Step 6.
new_parent.AppendChild(fragment.upcast())?;
// Step 7.
self.SelectNode(new_parent)
}
// https://dom.spec.whatwg.org/#dom-range-stringifier
fn Stringifier(&self) -> DOMString {
let start_node = self.StartContainer();
let end_node = self.EndContainer();
// Step 1.
let mut s = DOMString::new();
if let Some(text_node) = start_node.downcast::<Text>() {
let char_data = text_node.upcast::<CharacterData>();
// Step 2.
if start_node == end_node {
return char_data
.SubstringData(self.StartOffset(), self.EndOffset() - self.StartOffset())
.unwrap();
}
// Step 3.
s.push_str(
&*char_data
.SubstringData(self.StartOffset(), char_data.Length() - self.StartOffset())
.unwrap(),
);
}
// Step 4.
let ancestor = self.CommonAncestorContainer();
let mut iter = start_node
.following_nodes(&ancestor)
.filter_map(DomRoot::downcast::<Text>);
while let Some(child) = iter.next() {
if self.contains(child.upcast()) {
s.push_str(&*child.upcast::<CharacterData>().Data());
}
}
// Step 5.
if let Some(text_node) = end_node.downcast::<Text>() {
let char_data = text_node.upcast::<CharacterData>();
s.push_str(&*char_data.SubstringData(0, self.EndOffset()).unwrap());
}
// Step 6.
s
}
// https://dvcs.w3.org/hg/innerhtml/raw-file/tip/index.html#extensions-to-the-range-interface
fn CreateContextualFragment(&self, fragment: DOMString) -> Fallible<DomRoot<DocumentFragment>> {
// Step 1.
let node = self.StartContainer();
let owner_doc = node.owner_doc();
let element = match node.type_id() {
NodeTypeId::Document(_) | NodeTypeId::DocumentFragment(_) => None,
NodeTypeId::Element(_) => Some(DomRoot::downcast::<Element>(node).unwrap()),
NodeTypeId::CharacterData(CharacterDataTypeId::Comment) |
NodeTypeId::CharacterData(CharacterDataTypeId::Text(_)) => node.GetParentElement(),
NodeTypeId::CharacterData(CharacterDataTypeId::ProcessingInstruction) |
NodeTypeId::DocumentType => unreachable!(),
};
// Step 2.
let element = Element::fragment_parsing_context(&owner_doc, element.as_deref());
// Step 3.
let fragment_node = element.parse_fragment(fragment)?;
// Step 4.
for node in fragment_node
.upcast::<Node>()
.traverse_preorder(ShadowIncluding::No)
{
if let Some(script) = node.downcast::<HTMLScriptElement>() {
script.set_already_started(false);
script.set_parser_inserted(false);
}
}
// Step 5.
Ok(fragment_node)
}
}
#[derive(DenyPublicFields, JSTraceable, MallocSizeOf)]
#[must_root]
pub struct BoundaryPoint {
node: MutDom<Node>,
offset: Cell<u32>,
}
impl BoundaryPoint {
fn new(node: &Node, offset: u32) -> BoundaryPoint {
debug_assert!(!node.is_doctype());
debug_assert!(offset <= node.len());
BoundaryPoint {
node: MutDom::new(node),
offset: Cell::new(offset),
}
}
pub fn set(&self, node: &Node, offset: u32) {
self.node.set(node);
self.set_offset(offset);
}
pub fn set_offset(&self, offset: u32) {
self.offset.set(offset);
}
}
#[allow(unrooted_must_root)]
impl PartialOrd for BoundaryPoint {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
bp_position(
&self.node.get(),
self.offset.get(),
&other.node.get(),
other.offset.get(),
)
}
}
#[allow(unrooted_must_root)]
impl PartialEq for BoundaryPoint {
fn eq(&self, other: &Self) -> bool {
self.node.get() == other.node.get() && self.offset.get() == other.offset.get()
}
}
// https://dom.spec.whatwg.org/#concept-range-bp-position
fn bp_position(a_node: &Node, a_offset: u32, b_node: &Node, b_offset: u32) -> Option<Ordering> {
if a_node as *const Node == b_node as *const Node {
// Step 1.
return Some(a_offset.cmp(&b_offset));
}
let position = b_node.CompareDocumentPosition(a_node);
if position & NodeConstants::DOCUMENT_POSITION_DISCONNECTED != 0 {
// No order is defined for nodes not in the same tree.
None
} else if position & NodeConstants::DOCUMENT_POSITION_FOLLOWING != 0 {
// Step 2.
match bp_position(b_node, b_offset, a_node, a_offset).unwrap() {
Ordering::Less => Some(Ordering::Greater),
Ordering::Greater => Some(Ordering::Less),
Ordering::Equal => unreachable!(),
}
} else if position & NodeConstants::DOCUMENT_POSITION_CONTAINS != 0 {
// Step 3-1, 3-2.
let mut b_ancestors = b_node.inclusive_ancestors(ShadowIncluding::No);
let child = b_ancestors
.find(|child| &*child.GetParentNode().unwrap() == a_node)
.unwrap();
// Step 3-3.
if child.index() < a_offset {
Some(Ordering::Greater)
} else {
// Step 4.
Some(Ordering::Less)
}
} else {
// Step 4.
Some(Ordering::Less)
}
}
pub struct WeakRangeVec {
cell: UnsafeCell<WeakRefVec<Range>>,
}
#[allow(unsafe_code)]
impl WeakRangeVec {
/// Create a new vector of weak references.
pub fn new() -> Self {
WeakRangeVec {
cell: UnsafeCell::new(WeakRefVec::new()),
}
}
/// Whether that vector of ranges is empty.
pub fn is_empty(&self) -> bool {
unsafe { (*self.cell.get()).is_empty() }
}
/// Used for steps 2.1-2. when inserting a node.
/// <https://dom.spec.whatwg.org/#concept-node-insert>
pub fn increase_above(&self, node: &Node, offset: u32, delta: u32) {
self.map_offset_above(node, offset, |offset| offset + delta);
}
/// Used for steps 4-5. when removing a node.
/// <https://dom.spec.whatwg.org/#concept-node-remove>
pub fn decrease_above(&self, node: &Node, offset: u32, delta: u32) {
self.map_offset_above(node, offset, |offset| offset - delta);
}
/// Used for steps 2-3. when removing a node.
/// <https://dom.spec.whatwg.org/#concept-node-remove>
pub fn drain_to_parent(&self, context: &UnbindContext, child: &Node) {
if self.is_empty() {
return;
}
let offset = context.index();
let parent = context.parent;
unsafe {
let ranges = &mut *self.cell.get();
ranges.update(|entry| {
let range = entry.root().unwrap();
if &range.start.node == parent || &range.end.node == parent {
entry.remove();
}
if &range.start.node == child {
range.start.set(context.parent, offset);
}
if &range.end.node == child {
range.end.set(context.parent, offset);
}
});
(*context.parent.ranges().cell.get()).extend(ranges.drain(..));
}
}
/// Used for steps 7.1-2. when normalizing a node.
/// <https://dom.spec.whatwg.org/#dom-node-normalize>
pub fn drain_to_preceding_text_sibling(&self, node: &Node, sibling: &Node, length: u32) {
if self.is_empty() {
return;
}
unsafe {
let ranges = &mut *self.cell.get();
ranges.update(|entry| {
let range = entry.root().unwrap();
if &range.start.node == sibling || &range.end.node == sibling {
entry.remove();
}
if &range.start.node == node {
range.start.set(sibling, range.StartOffset() + length);
}
if &range.end.node == node {
range.end.set(sibling, range.EndOffset() + length);
}
});
(*sibling.ranges().cell.get()).extend(ranges.drain(..));
}
}
/// Used for steps 7.3-4. when normalizing a node.
/// <https://dom.spec.whatwg.org/#dom-node-normalize>
pub fn move_to_text_child_at(&self, node: &Node, offset: u32, child: &Node, new_offset: u32) {
unsafe {
let child_ranges = &mut *child.ranges().cell.get();
(*self.cell.get()).update(|entry| {
let range = entry.root().unwrap();
let node_is_start = &range.start.node == node;
let node_is_end = &range.end.node == node;
let move_start = node_is_start && range.StartOffset() == offset;
let move_end = node_is_end && range.EndOffset() == offset;
let remove_from_node = move_start && move_end ||
move_start && !node_is_end ||
move_end && !node_is_start;
let already_in_child = &range.start.node == child || &range.end.node == child;
let push_to_child = !already_in_child && (move_start || move_end);
if remove_from_node {
let ref_ = entry.remove();
if push_to_child {
child_ranges.push(ref_);
}
} else if push_to_child {
child_ranges.push(WeakRef::new(&range));
}
if move_start {
range.start.set(child, new_offset);
}
if move_end {
range.end.set(child, new_offset);
}
});
}
}
/// Used for steps 8-11. when replacing character data.
/// <https://dom.spec.whatwg.org/#concept-cd-replace>
pub fn replace_code_units(
&self,
node: &Node,
offset: u32,
removed_code_units: u32,
added_code_units: u32,
) {
self.map_offset_above(node, offset, |range_offset| {
if range_offset <= offset + removed_code_units {
offset
} else {
range_offset + added_code_units - removed_code_units
}
});
}
/// Used for steps 7.2-3. when splitting a text node.
/// <https://dom.spec.whatwg.org/#concept-text-split>
pub fn move_to_following_text_sibling_above(&self, node: &Node, offset: u32, sibling: &Node) {
unsafe {
let sibling_ranges = &mut *sibling.ranges().cell.get();
(*self.cell.get()).update(|entry| {
let range = entry.root().unwrap();
let start_offset = range.StartOffset();
let end_offset = range.EndOffset();
let node_is_start = &range.start.node == node;
let node_is_end = &range.end.node == node;
let move_start = node_is_start && start_offset > offset;
let move_end = node_is_end && end_offset > offset;
let remove_from_node = move_start && move_end ||
move_start && !node_is_end ||
move_end && !node_is_start;
let already_in_sibling = &range.start.node == sibling || &range.end.node == sibling;
let push_to_sibling = !already_in_sibling && (move_start || move_end);
if remove_from_node {
let ref_ = entry.remove();
if push_to_sibling {
sibling_ranges.push(ref_);
}
} else if push_to_sibling {
sibling_ranges.push(WeakRef::new(&range));
}
if move_start {
range.start.set(sibling, start_offset - offset);
}
if move_end {
range.end.set(sibling, end_offset - offset);
}
});
}
}
/// Used for steps 7.4-5. when splitting a text node.
/// <https://dom.spec.whatwg.org/#concept-text-split>
pub fn increment_at(&self, node: &Node, offset: u32) {
unsafe {
(*self.cell.get()).update(|entry| {
let range = entry.root().unwrap();
if &range.start.node == node && offset == range.StartOffset() {
range.start.set_offset(offset + 1);
}
if &range.end.node == node && offset == range.EndOffset() {
range.end.set_offset(offset + 1);
}
});
}
}
fn map_offset_above<F: FnMut(u32) -> u32>(&self, node: &Node, offset: u32, mut f: F) {
unsafe {
(*self.cell.get()).update(|entry| {
let range = entry.root().unwrap();
let start_offset = range.StartOffset();
if &range.start.node == node && start_offset > offset {
range.start.set_offset(f(start_offset));
}
let end_offset = range.EndOffset();
if &range.end.node == node && end_offset > offset {
range.end.set_offset(f(end_offset));
}
});
}
}
fn push(&self, ref_: WeakRef<Range>) {
unsafe {
(*self.cell.get()).push(ref_);
}
}
fn remove(&self, range: &Range) -> WeakRef<Range> {
unsafe {
let ranges = &mut *self.cell.get();
let position = ranges.iter().position(|ref_| ref_ == range).unwrap();
ranges.swap_remove(position)
}
}
}
#[allow(unsafe_code)]
impl MallocSizeOf for WeakRangeVec {
fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
unsafe { (*self.cell.get()).size_of(ops) }
}
}
#[allow(unsafe_code)]
unsafe impl JSTraceable for WeakRangeVec {
unsafe fn trace(&self, _: *mut JSTracer) {
(*self.cell.get()).retain_alive()
}
}<|fim▁end|> | }
|
<|file_name|>inject-5.py<|end_file_name|><|fim▁begin|>from gwpy.plot import Plot
plot = Plot(noise, signal, data, separate=True, sharex=True, sharey=True)<|fim▁hole|><|fim▁end|> | plot.gca().set_epoch(0)
plot.show() |
<|file_name|>loginTests.js<|end_file_name|><|fim▁begin|>// ----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// ----------------------------------------------------------------------------
/// <reference path="../../../ZumoE2ETestAppJs/ZumoE2ETestAppJs/js/MobileServices.js" />
/// <reference path="/LiveSDKHTML/js/wl.js" />
/// <reference path="../testFramework.js" />
function defineLoginTestsNamespace() {
var tests = [];
var i;
var TABLE_PERMISSION_PUBLIC = 1;
var TABLE_PERMISSION_APPLICATION = 2;
var TABLE_PERMISSION_USER = 3;
var TABLE_PERMISSION_ADMIN = 4;
var TABLE_NAME_PUBLIC = 'public';
var TABLE_NAME_APPLICATION = 'application';
var TABLE_NAME_AUTHENTICATED = 'authenticated';
var TABLE_NAME_ADMIN = 'admin';
var tables = [
{ name: TABLE_NAME_PUBLIC, permission: TABLE_PERMISSION_PUBLIC },
{ name: TABLE_NAME_APPLICATION, permission: TABLE_PERMISSION_APPLICATION },
{ name: TABLE_NAME_AUTHENTICATED, permission: TABLE_PERMISSION_USER },
{ name: TABLE_NAME_ADMIN, permission: TABLE_PERMISSION_ADMIN }];
var supportRecycledToken = {
facebook: true,
google: false, // Known bug - Drop login via Google token until Google client flow is reintroduced
twitter: false,
microsoftaccount: false
};
tests.push(createLogoutTest());
var index, table;
for (index = 0; index < tables.length; index++) {
table = tables[index];
tests.push(createCRUDTest(table.name, null, table.permission, false));
}
var indexOfTestsWithAuthentication = 0;
var lastUserIdentityObject = null;
var providers = ['facebook', 'google', 'twitter', 'microsoftaccount'];
for (i = 0; i < providers.length; i++) {
var provider = providers[i];
tests.push(createLogoutTest());
tests.push(createLoginTest(provider));
for (index = 0; index < tables.length; index++) {
table = tables[index];
if (table.permission !== TABLE_PERMISSION_PUBLIC) {
tests.push(createCRUDTest(table.name, provider, table.permission, true));
}
}
if (supportRecycledToken[provider]) {
tests.push(createLogoutTest());
tests.push(createClientSideLoginTest(provider));
tests.push(createCRUDTest(TABLE_NAME_AUTHENTICATED, provider, TABLE_PERMISSION_USER, true));
}
}
if (!testPlatform.IsHTMLApplication) {
//In Browser, default is single signon and LIVE SDK is not supported
tests.push(createLogoutTest());
tests.push(createLiveSDKLoginTest());
tests.push(createCRUDTest(TABLE_NAME_AUTHENTICATED, 'microsoftaccount', TABLE_PERMISSION_USER, true));
providers.forEach(function (provider) {
if (provider === 'microsoftaccount') {
// Known issue - SSO for MS account does not work in application which also uses the Live SDK
} else {
tests.push(createLogoutTest());
tests.push(createLoginTest(provider, true));
tests.push(createCRUDTest(TABLE_NAME_AUTHENTICATED, provider, TABLE_PERMISSION_USER, true));
}
});
}
for (var i = indexOfTestsWithAuthentication; i < tests.length; i++) {
tests[i].canRunUnattended = false;
}
function createLiveSDKLoginTest() {
var liveSDKInitialized = false;
return new zumo.Test('Login via token with the Live SDK', function (test, done) {
/// <param name="test" type="zumo.Test">The test associated with this execution.</param>
var client = zumo.getClient();
if (!liveSDKInitialized) {
WL.init({ redirect_uri: client.applicationUrl });
liveSDKInitialized = true;
test.addLog('Initialized the WL object');
}
WL.login({ scope: 'wl.basic' }).then(function (wlLoginResult) {
test.addLog('Logged in via Live SDK: ', wlLoginResult);
WL.api({ path: 'me', method: 'GET' }).then(function (wlMeResult) {
test.addLog('My information: ', wlMeResult);
var token = { authenticationToken: wlLoginResult.session.authentication_token };
client.login('microsoftaccount', token).done(function (user) {
test.addLog('Logged in as ', user);
done(true);
}, function (err) {
test.addLog('Error logging into the mobile service: ', err);
done(false);
});
}, function (err) {
test.addLog('Error calling WL.api: ', err);
done(false);
});
}, function (err) {
test.addLog('Error logging in via Live SDK: ', err);
done(false);
});
});
}
function createClientSideLoginTest(provider) {
/// <param name="provider" type="String" mayBeNull="true">The name of the authentication provider for
/// the client. Currently only 'facebook' and 'google' are supported for this test.</param>
return new zumo.Test('Login via token for ' + provider, function (test, done) {
/// <param name="test" type="zumo.Test">The test associated with this execution.</param>
var client = zumo.getClient();
var lastIdentity = lastUserIdentityObject;
if (!lastIdentity) {
test.addLog('Last identity object is null. Cannot run this test.');
done(false);
} else {
var token = {};
if (provider === 'facebook' || provider === 'google') {
token.access_token = lastIdentity[provider].accessToken;
client.login(provider, token).done(function (user) {
test.addLog('Logged in as ', user);
done(true);
}, function (err) {
test.addLog('Error on login: ', err);
done(false);
});
} else {
test.addLog('Client-side login for ' + provider + ' is not implemented or not supported.');
done(false);
}
}
});
}
function createCRUDTest(tableName, provider, tablePermission, userIsAuthenticated) {
/// <param name="tableName" type="String">The name of the table to attempt the CRUD operations for.</param>
/// <param name="provider" type="String" mayBeNull="true">The name of the authentication provider for
/// the client.</param>
/// <param name="tablePermission" type="Number" mayBeNull="false">The permission required to access the
/// table. One of the constants defined in the scope.</param>
/// <param name="userIsAuthenticated" type="Boolean" mayBeNull="false">The name of the table to attempt
/// the CRUD operations for.</param>
/// <return type="zumo.Test"/>
var testName = 'CRUD, ' + (userIsAuthenticated ? ('auth by ' + provider) : 'unauthenticated');
testName = testName + ', table with ';
testName = testName + ['public', 'application', 'user', 'admin'][tablePermission - 1];
testName = testName + ' permission.';
return new zumo.Test(testName, function (test, done) {
/// <param name="test" type="zumo.Test">The test associated with this execution.</param>
var crudShouldWork = tablePermission === TABLE_PERMISSION_PUBLIC ||
tablePermission === TABLE_PERMISSION_APPLICATION ||
(tablePermission === TABLE_PERMISSION_USER && userIsAuthenticated);
var client = zumo.getClient();
var table = client.getTable(tableName);
var currentUser = client.currentUser;
var item = { name: 'hello' };
var insertedItem;
var validateCRUDResult = function (operation, error) {
var result = false;
if (crudShouldWork) {
if (error) {
test.addLog(operation + ' should have succeeded, but got error: ', error);
} else {
test.addLog(operation + ' succeeded as expected.');
result = true;
}
} else {
if (error) {
var xhr = error.request;
if (xhr) {
var isInternetExplorer10 = testPlatform.IsHTMLApplication && window.ActiveXObject && window.navigator.userAgent.toLowerCase().match(/msie ([\d.]+)/)[1] == "10.0";
// IE 10 has a bug in which it doesn't set the status code correctly - https://connect.microsoft.com/IE/feedback/details/785990
// so we cannot validate the status code if this is the case.
if (isInternetExplorer10) {
result = true;
} else {
if (xhr.status == 401) {
test.addLog('Got expected response code (401) for ', operation);
result = true;
} else {
zumo.util.traceResponse(test, xhr);
test.addLog('Error, incorrect response.');
}
}
} else {
test.addLog('Error, error object does not have a \'request\' (for the XMLHttpRequest object) property.');
}
} else {
test.addLog(operation + ' should not have succeeded, but the success callback was called.');
}
}
if (!result) {
done(false);
}
return result;
}
// The last of the callbacks, which will call 'done(true);' if validation succeeds.
// called by readCallback
function deleteCallback(error) {
if (validateCRUDResult('delete', error)) {
test.addLog('Validation succeeded for all operations');
done(true);
}
}
// called by updateCallback
function readCallback(error) {
if (validateCRUDResult('read', error)) {
//table.del({ id: insertedItem.id || 1 }).done(function () { deleteCallback(); }, function (err) { deleteCallback(err); });
table.del({ id: insertedItem.id }).done(function () { deleteCallback(); }, function (err) { deleteCallback(err); });
}
}
// called by insertCallback
function updateCallback(error) {
if (validateCRUDResult('update', error)) {
item.id = insertedItem.id || 1;
table.where({ id: item.id }).read().done(function (items) {
test.addLog('Read items: ', items);
if (items.length !== 1) {
test.addLog('Error, query should have returned exactly one item');
done(false);
} else {
var retrievedItem = items[0];
var usersFeatureEnabled = retrievedItem.UsersEnabled;
if (retrievedItem.Identities) {
lastUserIdentityObject = JSON.parse(items[0].Identities);
test.addLog('Identities object: ', lastUserIdentityObject);
var providerName = provider;
if (providerName.toLowerCase() === 'microsoftaccount') {
providerName = 'microsoft';
}
var providerIdentity = lastUserIdentityObject[providerName];
if (!providerIdentity) {
test.addLog('Error, cannot fetch the identity for provider ', providerName);
done(false);
return;
}<|fim▁hole|> if (usersFeatureEnabled) {
var userName = providerIdentity.name || providerIdentity.screen_name;
if (userName) {
test.addLog('Found user name: ', userName);
} else {
test.addLog('Could not find user name!');
done(false);
return;
}
}
}
readCallback();
}
}, function (err) {
readCallback(err);
});
}
}
// called by the callback for insert.
function insertCallback(error) {
if (validateCRUDResult('insert', error)) {
if (tablePermission === TABLE_PERMISSION_PUBLIC) {
// No need for app key anymore
client = new WindowsAzure.MobileServiceClient(client.applicationUrl);
table = client.getTable(tableName);
}
item.id = insertedItem.id || 1;
item.text = 'world';
table.update(item).done(function (newItem) {
test.addLog('Updated item: ', newItem);
updateCallback();
}, function (err) {
updateCallback(err);
});
}
}
table.insert(item).done(function (newItem) {
insertedItem = newItem;
test.addLog('Inserted item: ', newItem);
if (tablePermission === TABLE_PERMISSION_USER) {
var currentUser = client.currentUser.userId;
if (currentUser === newItem.userId) {
test.addLog('User id correctly added by the server script');
} else {
test.addLog('Error, user id not set by the server script');
done(false);
return;
}
}
insertCallback();
}, function (err) {
insertedItem = item;
insertedItem.id = item.id || 1;
insertCallback(err);
});
});
}
function createLoginTest(provider, useSingleSignOn) {
/// <param name="provider" type="String">The authentication provider to use.</param>
/// <param name="useSingleSignOn" type="Boolean">Whether to use the single sign-on parameter for login.</param>
/// <return type="zumo.Test" />
return new zumo.Test('Login with ' + provider + (useSingleSignOn ? ' (using single sign-on)' : ''), function (test, done) {
/// <param name="test" type="zumo.Test">The test associated with this execution.</param>
var client = zumo.getClient();
var successFunction = function (user) {
test.addLog('Logged in: ', user);
done(true);
};
var errorFunction = function (err) {
test.addLog('Error during login: ', err);
done(false);
};
if (useSingleSignOn) {
client.login(provider, true).done(successFunction, errorFunction);
} else {
client.login(provider).done(successFunction, errorFunction);
}
});
}
function createLogoutTest() {
return new zumo.Test('Log out', function (test, done) {
var client = zumo.getClient();
client.logout();
test.addLog('Logged out');
done(true);
});
}
return {
name: 'Login',
tests: tests
};
}
zumo.tests.login = defineLoginTestsNamespace();<|fim▁end|> | |
<|file_name|>test_defaultdict.py<|end_file_name|><|fim▁begin|>"""Unit tests for collections.defaultdict."""
import os
import copy
import pickle
import tempfile
import unittest
from collections import defaultdict
def foobar():
return list
class TestDefaultDict(unittest.TestCase):
def test_basic(self):
d1 = defaultdict()
self.assertEqual(d1.default_factory, None)
d1.default_factory = list
d1[12].append(42)
self.assertEqual(d1, {12: [42]})
d1[12].append(24)
self.assertEqual(d1, {12: [42, 24]})
d1[13]
d1[14]
self.assertEqual(d1, {12: [42, 24], 13: [], 14: []})
self.assertTrue(d1[12] is not d1[13] is not d1[14])
d2 = defaultdict(list, foo=1, bar=2)
self.assertEqual(d2.default_factory, list)
self.assertEqual(d2, {"foo": 1, "bar": 2})
self.assertEqual(d2["foo"], 1)
self.assertEqual(d2["bar"], 2)
self.assertEqual(d2[42], [])
self.assertIn("foo", d2)
self.assertIn("foo", d2.keys())
self.assertIn("bar", d2)
self.assertIn("bar", d2.keys())
self.assertIn(42, d2)
self.assertIn(42, d2.keys())
self.assertNotIn(12, d2)
self.assertNotIn(12, d2.keys())
d2.default_factory = None
self.assertEqual(d2.default_factory, None)
try:
d2[15]
except KeyError as err:
self.assertEqual(err.args, (15,))
else:
self.fail("d2[15] didn't raise KeyError")
self.assertRaises(TypeError, defaultdict, 1)
def test_missing(self):
d1 = defaultdict()
self.assertRaises(KeyError, d1.__missing__, 42)
d1.default_factory = list
self.assertEqual(d1.__missing__(42), [])
def test_repr(self):
d1 = defaultdict()
self.assertEqual(d1.default_factory, None)
self.assertEqual(repr(d1), "defaultdict(None, {})")
self.assertEqual(eval(repr(d1)), d1)
d1[11] = 41
self.assertEqual(repr(d1), "defaultdict(None, {11: 41})")
d2 = defaultdict(int)
self.assertEqual(d2.default_factory, int)
d2[12] = 42
self.assertEqual(repr(d2), "defaultdict(<class 'int'>, {12: 42})")
def foo(): return 43
d3 = defaultdict(foo)
self.assertTrue(d3.default_factory is foo)
d3[13]
self.assertEqual(repr(d3), "defaultdict(%s, {13: 43})" % repr(foo))
def test_print(self):
d1 = defaultdict()
def foo(): return 42
d2 = defaultdict(foo, {1: 2})
# NOTE: We can't use tempfile.[Named]TemporaryFile since this
# code must exercise the tp_print C code, which only gets
# invoked for *real* files.
tfn = tempfile.mktemp()
try:
f = open(tfn, "w+")
try:
print(d1, file=f)
print(d2, file=f)
f.seek(0)
self.assertEqual(f.readline(), repr(d1) + "\n")
self.assertEqual(f.readline(), repr(d2) + "\n")
finally:
f.close()
finally:
os.remove(tfn)
def test_copy(self):
d1 = defaultdict()
d2 = d1.copy()
self.assertEqual(type(d2), defaultdict)
self.assertEqual(d2.default_factory, None)
self.assertEqual(d2, {})
d1.default_factory = list
d3 = d1.copy()
self.assertEqual(type(d3), defaultdict)
self.assertEqual(d3.default_factory, list)
self.assertEqual(d3, {})
d1[42]
d4 = d1.copy()
self.assertEqual(type(d4), defaultdict)
self.assertEqual(d4.default_factory, list)
self.assertEqual(d4, {42: []})
d4[12]
self.assertEqual(d4, {42: [], 12: []})
# Issue 6637: Copy fails for empty default dict
d = defaultdict()
d['a'] = 42
e = d.copy()
self.assertEqual(e['a'], 42)
def test_shallow_copy(self):
d1 = defaultdict(foobar, {1: 1})
d2 = copy.copy(d1)
self.assertEqual(d2.default_factory, foobar)
self.assertEqual(d2, d1)
d1.default_factory = list
d2 = copy.copy(d1)
self.assertEqual(d2.default_factory, list)
self.assertEqual(d2, d1)
def test_deep_copy(self):
d1 = defaultdict(foobar, {1: [1]})
d2 = copy.deepcopy(d1)
self.assertEqual(d2.default_factory, foobar)
self.assertEqual(d2, d1)
self.assertTrue(d1[1] is not d2[1])
d1.default_factory = list
d2 = copy.deepcopy(d1)
self.assertEqual(d2.default_factory, list)
self.assertEqual(d2, d1)
def test_keyerror_without_factory(self):
d1 = defaultdict()
try:
d1[(1,)]
except KeyError as err:
self.assertEqual(err.args[0], (1,))
else:
self.fail("expected KeyError")
def test_recursive_repr(self):
# Issue2045: stack overflow when default_factory is a bound method
class sub(defaultdict):
def __init__(self):
self.default_factory = self._factory
def _factory(self):
return []
d = sub()
self.assertRegex(repr(d),
r"defaultdict\(<bound method .*sub\._factory "
r"of defaultdict\(\.\.\., \{\}\)>, \{\}\)")
# NOTE: printing a subclass of a builtin type does not call its
# tp_print slot. So this part is essentially the same test as above.
tfn = tempfile.mktemp()
try:
f = open(tfn, "w+")
try:
print(d, file=f)
finally:
f.close()
finally:
os.remove(tfn)<|fim▁hole|>
def test_pickling(self):
d = defaultdict(int)
d[1]
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(d, proto)
o = pickle.loads(s)
self.assertEqual(d, o)
if __name__ == "__main__":
unittest.main()<|fim▁end|> |
def test_callable_arg(self):
self.assertRaises(TypeError, defaultdict, {}) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!env/python3
# coding: utf-8
from core.managers.containers import *
from core.managers.imports import *
from core.managers.analysis_manager import AnalysisManager
from core.managers.annotation_manager import AnnotationManager
from core.managers.file_manager import FileManager
from core.managers.filter_manager import FilterEngine
from core.managers.job_manager import JobManager
from core.managers.pipeline_manager import PipelineManager
from core.managers.project_manager import ProjectManager
from core.managers.sample_manager import SampleManager
from core.managers.user_manager import UserManager
from core.managers.search_manager import SearchManager
from core.managers.event_manager import EventManager
from core.managers.subject_manager import SubjectManager
from core.managers.admin_manager import AdminManager
<|fim▁hole|><|fim▁end|> | from core.managers.phenotype_manager import PhenotypeManager
from core.managers.panel_manager import PanelManager |
<|file_name|>anyword-hint.js<|end_file_name|><|fim▁begin|>(function() {
"use strict";
var WORD = /[\w$]+/, RANGE = 500;
CodeMirror.registerHelper("hint", "anyword", function(editor, options) {
var word = options && options.word || WORD;
var range = options && options.range || RANGE;
var cur = editor.getCursor(), curLine = editor.getLine(cur.line);
var start = cur.ch, end = start;
while (end < curLine.length && word.test(curLine.charAt(end))) ++end;
while (start && word.test(curLine.charAt(start - 1))) --start;
var curWord = start != end && curLine.slice(start, end);
var list = [], seen = {};
var re = new RegExp(word.source, "g");
for (var dir = -1; dir <= 1; dir += 2) {
var line = cur.line, end = Math.min(Math.max(line + dir * range, editor.firstLine()), editor.lastLine()) + dir;
for (; line != end; line += dir) {
var text = editor.getLine(line), m;
while (m = re.exec(text)) {<|fim▁hole|> if (line == cur.line && m[0] === curWord) continue;
if ((!curWord || m[0].lastIndexOf(curWord, 0) == 0) && !Object.prototype.hasOwnProperty.call(seen, m[0])) {
seen[m[0]] = true;
list.push(m[0]);
}
}
}
}
return {list: list, from: CodeMirror.Pos(cur.line, start), to: CodeMirror.Pos(cur.line, end)};
});
})();<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Purpose: dxf engine for R2007/AC1021
# Created: 12.03.2011
# Copyright (C) , Manfred Moitzi
# License: MIT License
from __future__ import unicode_literals
__author__ = "mozman <[email protected]>"<|fim▁hole|>from .headervars import VARMAP
from ..ac1018 import AC1018Factory
class AC1021Factory(AC1018Factory):
HEADERVARS = dict(VARMAP)<|fim▁end|> | |
<|file_name|>plot.py<|end_file_name|><|fim▁begin|>"""Plotting module for Sympy.
A plot is represented by the ``Plot`` class that contains a reference to the
backend and a list of the data series to be plotted. The data series are
instances of classes meant to simplify getting points and meshes from sympy
expressions. ``plot_backends`` is a dictionary with all the backends.
This module gives only the essential. For all the fancy stuff use directly
the backend. You can get the backend wrapper for every plot from the
``_backend`` attribute. Moreover the data series classes have various useful
methods like ``get_points``, ``get_segments``, ``get_meshes``, etc, that may
be useful if you wish to use another plotting library.
Especially if you need publication ready graphs and this module is not enough
for you - just get the ``_backend`` attribute and add whatever you want
directly to it. In the case of matplotlib (the common way to graph data in
python) just copy ``_backend.fig`` which is the figure and ``_backend.ax``
which is the axis and work on them as you would on any other matplotlib object.
Simplicity of code takes much greater importance than performance. Don't use it
if you care at all about performance. A new backend instance is initialized
every time you call ``show()`` and the old one is left to the garbage collector.
"""
from inspect import getargspec
from itertools import chain
from sympy import sympify, Expr, Tuple, Dummy
from sympy.external import import_module
from sympy.core.compatibility import set_union
from sympy.utilities.decorator import doctest_depends_on
import warnings
from experimental_lambdify import (vectorized_lambdify, lambdify)
# N.B.
# When changing the minimum module version for matplotlib, please change
# the same in the `SymPyDocTestFinder`` in `sympy/utilities/runtests.py`
# Backend specific imports - textplot
from sympy.plotting.textplot import textplot
# Global variable
# Set to False when running tests / doctests so that the plots don't show.
_show = True
def unset_show():
global _show
_show = False
##############################################################################
# The public interface
##############################################################################
class Plot(object):
"""The central class of the plotting module.
For interactive work the function ``plot`` is better suited.
This class permits the plotting of sympy expressions using numerous
backends (matplotlib, textplot, the old pyglet module for sympy, Google
charts api, etc).
The figure can contain an arbitrary number of plots of sympy expressions,
lists of coordinates of points, etc. Plot has a private attribute _series that
contains all data series to be plotted (expressions for lines or surfaces,
lists of points, etc (all subclasses of BaseSeries)). Those data series are
instances of classes not imported by ``from sympy import *``.
The customization of the figure is on two levels. Global options that
concern the figure as a whole (eg title, xlabel, scale, etc) and
per-data series options (eg name) and aesthetics (eg. color, point shape,
line type, etc.).
The difference between options and aesthetics is that an aesthetic can be
a function of the coordinates (or parameters in a parametric plot). The
supported values for an aesthetic are:
- None (the backend uses default values)
- a constant
- a function of one variable (the first coordinate or parameter)
- a function of two variables (the first and second coordinate or
parameters)
- a function of three variables (only in nonparametric 3D plots)
Their implementation depends on the backend so they may not work in some
backends.
If the plot is parametric and the arity of the aesthetic function permits
it the aesthetic is calculated over parameters and not over coordinates.
If the arity does not permit calculation over parameters the calculation is
done over coordinates.
Only cartesian coordinates are supported for the moment, but you can use
the parametric plots to plot in polar, spherical and cylindrical
coordinates.
The arguments for the constructor Plot must be subclasses of BaseSeries.
Any global option can be specified as a keyword argument.
The global options for a figure are:
- title : str
- xlabel : str
- ylabel : str
- legend : bool
- xscale : {'linear', 'log'}
- yscale : {'linear', 'log'}
- axis : bool
- axis_center : tuple of two floats or {'center', 'auto'}
- xlim : tuple of two floats
- ylim : tuple of two floats
- aspect_ratio : tuple of two floats or {'auto'}
- autoscale : bool
- margin : float in [0, 1]
The per data series options and aesthetics are:
There are none in the base series. See below for options for subclasses.
Some data series support additional aesthetics or options:
ListSeries, LineOver1DRangeSeries, Parametric2DLineSeries,
Parametric3DLineSeries support the following:
Aesthetics:
- line_color : function which returns a float.
options:
- label : str
- steps : bool
- integers_only : bool
SurfaceOver2DRangeSeries, ParametricSurfaceSeries support the following:
aesthetics:
- surface_color : function which returns a float.
"""
def __init__(self, *args, **kwargs):
super(Plot, self).__init__()
# Options for the graph as a whole.
# The possible values for each option are described in the docstring of
# Plot. They are based purely on convention, no checking is done.
self.title = None
self.xlabel = None
self.ylabel = None
self.aspect_ratio = 'auto'
self.xlim = None
self.ylim = None
self.axis_center = 'auto'
self.axis = True
self.xscale = 'linear'
self.yscale = 'linear'
self.legend = False
self.autoscale = True
self.margin = 0
# Contains the data objects to be plotted. The backend should be smart
# enough to iterate over this list.
self._series = []
self._series.extend(args)
# The backend type. On every show() a new backend instance is created
# in self._backend which is tightly coupled to the Plot instance
# (thanks to the parent attribute of the backend).
self.backend = DefaultBackend
# The keyword arguments should only contain options for the plot.
for key, val in kwargs.iteritems():
if hasattr(self, key):
setattr(self, key, val)
def show(self):
# TODO move this to the backend (also for save)
if hasattr(self, '_backend'):
self._backend.close()
self._backend = self.backend(self)
self._backend.show()
def save(self, path):
if hasattr(self, '_backend'):
self._backend.close()
self._backend = self.backend(self)
self._backend.save(path)
def __str__(self):
series_strs = [('[%d]: ' % i) + str(s)
for i, s in enumerate(self._series)]
return 'Plot object containing:\n' + '\n'.join(series_strs)
def __getitem__(self, index):
return self._series[index]
def __setitem__(self, index, *args):
if len(args) == 1 and isinstance(args[0], BaseSeries):
self._series[index] = args
def __delitem__(self, index):
del self._series[index]
def append(self, *args):
"""Adds one more graph to the figure."""
if len(args) == 1 and isinstance(args[0], BaseSeries):
self._series.append(*args)
else:
self._series.append(Series(*args))
def extend(self, arg):
"""Adds the series from another plot or a list of series."""
if isinstance(arg, Plot):
self._series.extend(arg._series)
else:
self._series.extend(arg)
##############################################################################
# Data Series
##############################################################################
#TODO more general way to calculate aesthetics (see get_color_array)
### The base class for all series
class BaseSeries(object):
"""Base class for the data objects containing stuff to be plotted.
The backend should check if it supports the data series that it's given.
(eg TextBackend supports only LineOver1DRange).
It's the backend responsibility to know how to use the class of
data series that it's given.
Some data series classes are grouped (using a class attribute like is_2Dline)
according to the api they present (based only on convention). The backend is
not obliged to use that api (eg. The LineOver1DRange belongs to the
is_2Dline group and presents the get_points method, but the
TextBackend does not use the get_points method).
"""
# Some flags follow. The rationale for using flags instead of checking base
# classes is that setting multiple flags is simpler than multiple
# inheritance.
is_2Dline = False
# Some of the backends expect:
# - get_points returning 1D np.arrays list_x, list_y
# - get_segments returning np.array (done in Line2DBaseSeries)
# - get_color_array returning 1D np.array (done in Line2DBaseSeries)
# with the colors calculated at the points from get_points
is_3Dline = False
# Some of the backends expect:
# - get_points returning 1D np.arrays list_x, list_y, list_y
# - get_segments returning np.array (done in Line2DBaseSeries)
# - get_color_array returning 1D np.array (done in Line2DBaseSeries)
# with the colors calculated at the points from get_points
is_3Dsurface = False
# Some of the backends expect:
# - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
is_contour = False
# Some of the backends expect:
# - get_meshes returning mesh_x, mesh_y, mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
is_implicit = False
# Some of the backends expect:
# - get_meshes returning mesh_x (1D array), mesh_y(1D array,
# mesh_z (2D np.arrays)
# - get_points an alias for get_meshes
#Different from is_contour as the colormap in backend will be
#different
is_parametric = False
# The calculation of aesthetics expects:
# - get_parameter_points returning one or two np.arrays (1D or 2D)
# used for calculation aesthetics
def __init__(self):
super(BaseSeries, self).__init__()
@property
def is_3D(self):
flags3D = [
self.is_3Dline,
self.is_3Dsurface
]
return any(flags3D)
@property
def is_line(self):
flagslines = [
self.is_2Dline,
self.is_3Dline
]
return any(flagslines)
### 2D lines
class Line2DBaseSeries(BaseSeries):
"""A base class for 2D lines.
- adding the label, steps and only_integers options
- making is_2Dline true
- defining get_segments and get_color_array
"""
is_2Dline = True
_dim = 2
def __init__(self):
super(Line2DBaseSeries, self).__init__()
self.label = None
self.steps = False
self.only_integers = False
self.line_color = None
def get_segments(self):
np = import_module('numpy')
points = self.get_points()
if self.steps is True:
x = np.array((points[0], points[0])).T.flatten()[1:]
y = np.array((points[1], points[1])).T.flatten()[:-1]
points = (x, y)
points = np.ma.array(points).T.reshape(-1, 1, self._dim)
return np.ma.concatenate([points[:-1], points[1:]], axis=1)
def get_color_array(self):
np = import_module('numpy')
c = self.line_color
if hasattr(c, '__call__'):
f = np.vectorize(c)
arity = len(getargspec(c)[0])
if arity == 1 and self.is_parametric:
x = self.get_parameter_points()
return f(centers_of_segments(x))
else:
variables = map(centers_of_segments, self.get_points())
if arity == 1:
return f(variables[0])
elif arity == 2:
return f(*variables[:2])
else: # only if the line is 3D (otherwise raises an error)
return f(*variables)
else:
return c*np.ones(self.nb_of_points)
class List2DSeries(Line2DBaseSeries):
"""Representation for a line consisting of list of points."""
def __init__(self, list_x, list_y):
np = import_module('numpy')
super(List2DSeries, self).__init__()
self.list_x = np.array(list_x)
self.list_y = np.array(list_y)
self.label = 'list'
def __str__(self):
return 'list plot'
def get_points(self):
return (self.list_x, self.list_y)
class LineOver1DRangeSeries(Line2DBaseSeries):
"""Representation for a line consisting of a sympy expression over a range."""
def __init__(self, expr, var_start_end, **kwargs):
super(LineOver1DRangeSeries, self).__init__()
self.expr = sympify(expr)
self.label = str(self.expr)
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.adaptive = kwargs.get('adaptive', True)
self.depth = kwargs.get('depth', 12)
self.line_color = kwargs.get('line_color', None)
def __str__(self):
return 'cartesian line: %s for %s over %s' % (
str(self.expr), str(self.var), str((self.start, self.end)))
def get_segments(self):
"""
Adaptively gets segments for plotting.
The adaptive sampling is done by recursively checking if three
points are almost collinear. If they are not collinear, then more
points are added between those points.
References
==========
[1] Adaptive polygonal approximation of parametric curves,
Luiz Henrique de Figueiredo.
"""
if self.only_integers or not self.adaptive:
return super(LineOver1DRangeSeries, self).get_segments()
else:
f = lambdify([self.var], self.expr)
list_segments = []
def sample(p, q, depth):
""" Samples recursively if three points are almost collinear.
For depth < 6, points are added irrespective of whether they
satisfy the collinearity condition or not. The maximum depth
allowed is 12.
"""
np = import_module('numpy')
#Randomly sample to avoid aliasing.
random = 0.45 + np.random.rand() * 0.1
xnew = p[0] + random * (q[0] - p[0])
ynew = f(xnew)
new_point = np.array([xnew, ynew])
#Maximum depth
if depth > self.depth:
list_segments.append([p, q])
#Sample irrespective of whether the line is flat till the
#depth of 6. We are not using linspace to avoid aliasing.
elif depth < 6:
sample(p, new_point, depth + 1)
sample(new_point, q, depth + 1)
#Sample ten points if complex values are encountered
#at both ends. If there is a real value in between, then
#sample those points further.
elif p[1] is None and q[1] is None:
xarray = np.linspace(p[0], q[0], 10)
yarray = map(f, xarray)
if any(y is not None for y in yarray):
for i in len(yarray) - 1:
if yarray[i] is not None or yarray[i + 1] is not None:
sample([xarray[i], yarray[i]],
[xarray[i + 1], yarray[i + 1]], depth + 1)
#Sample further if one of the end points in None( i.e. a complex
#value) or the three points are not almost collinear.
elif (p[1] is None or q[1] is None or new_point[1] is None
or not flat(p, new_point, q)):
sample(p, new_point, depth + 1)
sample(new_point, q, depth + 1)
else:
list_segments.append([p, q])
f_start = f(self.start)
f_end = f(self.end)
sample([self.start, f_start], [self.end, f_end], 0)
return list_segments
def get_points(self):
np = import_module('numpy')
if self.only_integers is True:
list_x = np.linspace(int(self.start), int(self.end),
num=int(self.end) - int(self.start) + 1)
else:
list_x = np.linspace(self.start, self.end, num=self.nb_of_points)
f = vectorized_lambdify([self.var], self.expr)
list_y = f(list_x)
return (list_x, list_y)
class Parametric2DLineSeries(Line2DBaseSeries):
"""Representation for a line consisting of two parametric sympy expressions
over a range."""
is_parametric = True
def __init__(self, expr_x, expr_y, var_start_end, **kwargs):
super(Parametric2DLineSeries, self).__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.label = "(%s, %s)" % (str(self.expr_x), str(self.expr_y))
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.adaptive = kwargs.get('adaptive', True)
self.depth = kwargs.get('depth', 12)
self.line_color = kwargs.get('line_color', None)
def __str__(self):
return 'parametric cartesian line: (%s, %s) for %s over %s' % (
str(self.expr_x), str(self.expr_y), str(self.var),
str((self.start, self.end)))
def get_parameter_points(self):
np = import_module('numpy')
return np.linspace(self.start, self.end, num=self.nb_of_points)
def get_points(self):
param = self.get_parameter_points()
fx = vectorized_lambdify([self.var], self.expr_x)
fy = vectorized_lambdify([self.var], self.expr_y)
list_x = fx(param)
list_y = fy(param)
return (list_x, list_y)
def get_segments(self):
"""
Adaptively gets segments for plotting.
The adaptive sampling is done by recursively checking if three
points are almost collinear. If they are not collinear, then more
points are added between those points.
References
==========
[1] Adaptive polygonal approximation of parametric curves,
Luiz Henrique de Figueiredo.
"""
if not self.adaptive:
return super(Parametric2DLineSeries, self).get_segments()
f_x = lambdify([self.var], self.expr_x)
f_y = lambdify([self.var], self.expr_y)
list_segments = []
def sample(param_p, param_q, p, q, depth):
""" Samples recursively if three points are almost collinear.
For depth < 6, points are added irrespective of whether they
satisfy the collinearity condition or not. The maximum depth
allowed is 12.
"""
#Randomly sample to avoid aliasing.
np = import_module('numpy')
random = 0.45 + np.random.rand() * 0.1
param_new = param_p + random * (param_q - param_p)
xnew = f_x(param_new)
ynew = f_y(param_new)
new_point = np.array([xnew, ynew])
#Maximum depth
if depth > self.depth:
list_segments.append([p, q])
#Sample irrespective of whether the line is flat till the
#depth of 6. We are not using linspace to avoid aliasing.
elif depth < 6:
sample(param_p, param_new, p, new_point, depth + 1)
sample(param_new, param_q, new_point, q, depth + 1)
#Sample ten points if complex values are encountered
#at both ends. If there is a real value in between, then
#sample those points further.
elif ((p[0] is None and q[1] is None) or
(p[1] is None and q[1] is None)):
param_array = np.linspace(param_p, param_q, 10)
x_array = map(f_x, param_array)
y_array = map(f_y, param_array)
if any(x is not None and y is not None
for x, y in zip(x_array, y_array)):
for i in len(y_array) - 1:
if ((x_array[i] is not None and y_array[i] is not None) or
(x_array[i + 1] is not None and y_array[i + 1] is not None)):
point_a = [x_array[i], y_array[i]]
point_b = [x_array[i + 1], y_array[i + 1]]
sample(param_array[i], param_array[i], point_a,
point_b, depth + 1)
#Sample further if one of the end points in None( ie a complex
#value) or the three points are not almost collinear.
elif (p[0] is None or p[1] is None
or q[1] is None or q[0] is None
or not flat(p, new_point, q)):
sample(param_p, param_new, p, new_point, depth + 1)
sample(param_new, param_q, new_point, q, depth + 1)
else:
list_segments.append([p, q])
f_start_x = f_x(self.start)
f_start_y = f_y(self.start)
start = [f_start_x, f_start_y]
f_end_x = f_x(self.end)
f_end_y = f_y(self.end)
end = [f_end_x, f_end_y]
sample(self.start, self.end, start, end, 0)
return list_segments
### 3D lines
class Line3DBaseSeries(Line2DBaseSeries):
"""A base class for 3D lines.
Most of the stuff is derived from Line2DBaseSeries."""
is_2Dline = False
is_3Dline = True
_dim = 3
def __init__(self):
super(Line3DBaseSeries, self).__init__()
class Parametric3DLineSeries(Line3DBaseSeries):
"""Representation for a 3D line consisting of two parametric sympy
expressions and a range."""
def __init__(self, expr_x, expr_y, expr_z, var_start_end, **kwargs):
super(Parametric3DLineSeries, self).__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.expr_z = sympify(expr_z)
self.label = "(%s, %s)" % (str(self.expr_x), str(self.expr_y))
self.var = sympify(var_start_end[0])
self.start = float(var_start_end[1])
self.end = float(var_start_end[2])
self.nb_of_points = kwargs.get('nb_of_points', 300)
self.line_color = kwargs.get('line_color', None)
def __str__(self):
return '3D parametric cartesian line: (%s, %s, %s) for %s over %s' % (
str(self.expr_x), str(self.expr_y), str(self.expr_z),
str(self.var), str((self.start, self.end)))
def get_parameter_points(self):
np = import_module('numpy')
return np.linspace(self.start, self.end, num=self.nb_of_points)
def get_points(self):
param = self.get_parameter_points()
fx = vectorized_lambdify([self.var], self.expr_x)
fy = vectorized_lambdify([self.var], self.expr_y)
fz = vectorized_lambdify([self.var], self.expr_z)
list_x = fx(param)
list_y = fy(param)
list_z = fz(param)
return (list_x, list_y, list_z)
### Surfaces
class SurfaceBaseSeries(BaseSeries):
"""A base class for 3D surfaces."""
is_3Dsurface = True
def __init__(self):
super(SurfaceBaseSeries, self).__init__()
self.surface_color = None
def get_color_array(self):
np = import_module('numpy')
c = self.surface_color
if callable(c):
f = np.vectorize(c)
arity = len(getargspec(c)[0])
if self.is_parametric:
variables = map(centers_of_faces, self.get_parameter_meshes())
if arity == 1:
return f(variables[0])
elif arity == 2:
return f(*variables)
variables = map(centers_of_faces, self.get_meshes())
if arity == 1:
return f(variables[0])
elif arity == 2:
return f(*variables[:2])
else:
return f(*variables)
else:
return c*np.ones(self.nb_of_points)
class SurfaceOver2DRangeSeries(SurfaceBaseSeries):
"""Representation for a 3D surface consisting of a sympy expression and 2D
range."""
def __init__(self, expr, var_start_end_x, var_start_end_y, **kwargs):
super(SurfaceOver2DRangeSeries, self).__init__()
self.expr = sympify(expr)
self.var_x = sympify(var_start_end_x[0])
self.start_x = float(var_start_end_x[1])
self.end_x = float(var_start_end_x[2])
self.var_y = sympify(var_start_end_y[0])
self.start_y = float(var_start_end_y[1])
self.end_y = float(var_start_end_y[2])
self.nb_of_points_x = kwargs.get('nb_of_points_x', 50)
self.nb_of_points_y = kwargs.get('nb_of_points_y', 50)
self.surface_color = kwargs.get('surface_color', None)
def __str__(self):
return ('cartesian surface: %s for'
' %s over %s and %s over %s') % (
str(self.expr),
str(self.var_x),
str((self.start_x, self.end_x)),
str(self.var_y),
str((self.start_y, self.end_y)))
def get_meshes(self):
np = import_module('numpy')
mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x,
num=self.nb_of_points_x),
np.linspace(self.start_y, self.end_y,
num=self.nb_of_points_y))
f = vectorized_lambdify((self.var_x, self.var_y), self.expr)
return (mesh_x, mesh_y, f(mesh_x, mesh_y))
class ParametricSurfaceSeries(SurfaceBaseSeries):
"""Representation for a 3D surface consisting of three parametric sympy
expressions and a range."""
is_parametric = True
def __init__(
self, expr_x, expr_y, expr_z, var_start_end_u, var_start_end_v,
**kwargs):
super(ParametricSurfaceSeries, self).__init__()
self.expr_x = sympify(expr_x)
self.expr_y = sympify(expr_y)
self.expr_z = sympify(expr_z)
self.var_u = sympify(var_start_end_u[0])
self.start_u = float(var_start_end_u[1])
self.end_u = float(var_start_end_u[2])
self.var_v = sympify(var_start_end_v[0])
self.start_v = float(var_start_end_v[1])
self.end_v = float(var_start_end_v[2])
self.nb_of_points_u = kwargs.get('nb_of_points_u', 50)
self.nb_of_points_v = kwargs.get('nb_of_points_v', 50)
self.surface_color = kwargs.get('surface_color', None)
def __str__(self):
return ('parametric cartesian surface: (%s, %s, %s) for'
' %s over %s and %s over %s') % (
str(self.expr_x),
str(self.expr_y),
str(self.expr_z),
str(self.var_u),
str((self.start_u, self.end_u)),
str(self.var_v),
str((self.start_v, self.end_v)))
def get_parameter_meshes(self):
np = import_module('numpy')
return np.meshgrid(np.linspace(self.start_u, self.end_u,
num=self.nb_of_points_u),
np.linspace(self.start_v, self.end_v,
num=self.nb_of_points_v))
def get_meshes(self):
mesh_u, mesh_v = self.get_parameter_meshes()
fx = vectorized_lambdify((self.var_u, self.var_v), self.expr_x)
fy = vectorized_lambdify((self.var_u, self.var_v), self.expr_y)
fz = vectorized_lambdify((self.var_u, self.var_v), self.expr_z)
return (fx(mesh_u, mesh_v), fy(mesh_u, mesh_v), fz(mesh_u, mesh_v))
### Contours
class ContourSeries(BaseSeries):
"""Representation for a contour plot."""
#The code is mostly repetition of SurfaceOver2DRange.
#XXX: Presently not used in any of those functions.
#XXX: Add contour plot and use this seties.
is_contour = True
def __init__(self, expr, var_start_end_x, var_start_end_y):
super(ContourSeries, self).__init__()
self.nb_of_points_x = 50
self.nb_of_points_y = 50
self.expr = sympify(expr)
self.var_x = sympify(var_start_end_x[0])
self.start_x = float(var_start_end_x[1])
self.end_x = float(var_start_end_x[2])
self.var_y = sympify(var_start_end_y[0])
self.start_y = float(var_start_end_y[1])
self.end_y = float(var_start_end_y[2])
self.get_points = self.get_meshes
def __str__(self):
return ('contour: %s for '
'%s over %s and %s over %s') % (
str(self.expr),
str(self.var_x),
str((self.start_x, self.end_x)),
str(self.var_y),
str((self.start_y, self.end_y)))
def get_meshes(self):
np = import_module('numpy')
mesh_x, mesh_y = np.meshgrid(np.linspace(self.start_x, self.end_x,
num=self.nb_of_points_x),
np.linspace(self.start_y, self.end_y,
num=self.nb_of_points_y))
f = vectorized_lambdify((self.var_x, self.var_y), self.expr)
return (mesh_x, mesh_y, f(mesh_x, mesh_y))
##############################################################################
# Backends
##############################################################################
class BaseBackend(object):
def __init__(self, parent):
super(BaseBackend, self).__init__()
self.parent = parent
## don't have to check for the success of importing matplotlib in each case;
## we will only be using this backend if we can successfully import matploblib
class MatplotlibBackend(BaseBackend):
def __init__(self, parent):
super(MatplotlibBackend, self).__init__(parent)
are_3D = [s.is_3D for s in self.parent._series]
self.matplotlib = import_module('matplotlib',
__import__kwargs={'fromlist': ['pyplot', 'cm', 'collections']},
min_module_version='1.1.0', catch=(RuntimeError,))
self.plt = self.matplotlib.pyplot
self.cm = self.matplotlib.cm
self.LineCollection = self.matplotlib.collections.LineCollection
if any(are_3D) and not all(are_3D):
raise ValueError('The matplotlib backend can not mix 2D and 3D.')
elif not any(are_3D):
self.fig = self.plt.figure()
self.ax = self.fig.add_subplot(111)
self.ax.spines['left'].set_position('zero')
self.ax.spines['right'].set_color('none')
self.ax.spines['bottom'].set_position('zero')
self.ax.spines['top'].set_color('none')
self.ax.spines['left'].set_smart_bounds(True)
self.ax.spines['bottom'].set_smart_bounds(True)
self.ax.xaxis.set_ticks_position('bottom')
self.ax.yaxis.set_ticks_position('left')
elif all(are_3D):
## mpl_toolkits.mplot3d is necessary for
## projection='3d'
mpl_toolkits = import_module('mpl_toolkits',
__import__kwargs={'fromlist': ['mplot3d']})
self.fig = self.plt.figure()
self.ax = self.fig.add_subplot(111, projection='3d')
def process_series(self):
parent = self.parent
for s in self.parent._series:
# Create the collections
if s.is_2Dline:
collection = self.LineCollection(s.get_segments())
self.ax.add_collection(collection)
elif s.is_contour:
self.ax.contour(*s.get_meshes())
elif s.is_3Dline:
# TODO too complicated, I blame matplotlib
mpl_toolkits = import_module('mpl_toolkits',
__import__kwargs={'fromlist': ['mplot3d']})
art3d = mpl_toolkits.mplot3d.art3d
collection = art3d.Line3DCollection(s.get_segments())
self.ax.add_collection(collection)
x, y, z = s.get_points()
self.ax.set_xlim((min(x), max(x)))
self.ax.set_ylim((min(y), max(y)))
self.ax.set_zlim((min(z), max(z)))
elif s.is_3Dsurface:
x, y, z = s.get_meshes()
collection = self.ax.plot_surface(x, y, z, cmap=self.cm.jet,
rstride=1, cstride=1,
linewidth=0.1)
elif s.is_implicit:
#Smart bounds have to be set to False for implicit plots.
self.ax.spines['left'].set_smart_bounds(False)
self.ax.spines['bottom'].set_smart_bounds(False)
points = s.get_raster()
if len(points) == 2:
#interval math plotting
x, y = _matplotlib_list(points[0])
self.ax.fill(x, y, facecolor='b', edgecolor='None' )
else:
# use contourf or contour depending on whether it is
# an inequality or equality.
#XXX: ``contour`` plots multiple lines. Should be fixed.
ListedColormap = self.matplotlib.colors.ListedColormap
colormap = ListedColormap(["white", "blue"])
xarray, yarray, zarray, plot_type = points
if plot_type == 'contour':
self.ax.contour(xarray, yarray, zarray,
contours=(0, 0), fill=False, cmap=colormap)
else:
self.ax.contourf(xarray, yarray, zarray, cmap=colormap)
else:
raise ValueError('The matplotlib backend supports only '
'is_2Dline, is_3Dline, is_3Dsurface and '
'is_contour objects.')
# Customise the collections with the corresponding per-series
# options.
if hasattr(s, 'label'):
collection.set_label(s.label)
if s.is_line and s.line_color:
if isinstance(s.line_color, (float, int)) or callable(s.line_color):
color_array = s.get_color_array()
collection.set_array(color_array)
else:
collection.set_color(s.line_color)
if s.is_3Dsurface and s.surface_color:
if self.matplotlib.__version__ < "1.2.0": # TODO in the distant future remove this check
warnings.warn('The version of matplotlib is too old to use surface coloring.')
elif isinstance(s.surface_color, (float, int)) or callable(s.surface_color):
color_array = s.get_color_array()
color_array = color_array.reshape(color_array.size)
collection.set_array(color_array)
else:
collection.set_color(s.surface_color)
# Set global options.
# TODO The 3D stuff
# XXX The order of those is important.
mpl_toolkits = import_module('mpl_toolkits',
__import__kwargs={'fromlist': ['mplot3d']})
Axes3D = mpl_toolkits.mplot3d.Axes3D
if parent.xscale and not isinstance(self.ax, Axes3D):
self.ax.set_xscale(parent.xscale)
if parent.yscale and not isinstance(self.ax, Axes3D):
self.ax.set_yscale(parent.yscale)
if parent.xlim:
self.ax.set_xlim(parent.xlim)
if parent.ylim:
self.ax.set_ylim(parent.ylim)
if not isinstance(self.ax, Axes3D) or self.matplotlib.__version__ >= '1.2.0': # XXX in the distant future remove this check
self.ax.set_autoscale_on(parent.autoscale)
if parent.axis_center:
val = parent.axis_center
if isinstance(self.ax, Axes3D):
pass
elif val == 'center':
self.ax.spines['left'].set_position('center')
self.ax.spines['bottom'].set_position('center')
elif val == 'auto':
xl, xh = self.ax.get_xlim()
yl, yh = self.ax.get_ylim()
pos_left = ('data', 0) if xl*xh <= 0 else 'center'
pos_bottom = ('data', 0) if yl*yh <= 0 else 'center'
self.ax.spines['left'].set_position(pos_left)
self.ax.spines['bottom'].set_position(pos_bottom)
else:
self.ax.spines['left'].set_position(('data', val[0]))
self.ax.spines['bottom'].set_position(('data', val[1]))
if not parent.axis:
self.ax.set_axis_off()
if parent.legend:
self.ax.legend()
self.ax.legend_.set_visible(parent.legend)
if parent.margin:
self.ax.set_xmargin(parent.margin)
self.ax.set_ymargin(parent.margin)
if parent.title:
self.ax.set_title(parent.title)
if parent.xlabel:
self.ax.set_xlabel(parent.xlabel, position=(1, 0))
if parent.ylabel:
self.ax.set_ylabel(parent.ylabel, position=(0, 1))
def show(self):
self.process_series()
#TODO after fixing https://github.com/ipython/ipython/issues/1255
# you can uncomment the next line and remove the pyplot.show() call
#self.fig.show()
if _show:
self.plt.show()
def save(self, path):
self.process_series()
self.fig.savefig(path)
def close(self):
self.plt.close(self.fig)
class TextBackend(BaseBackend):
def __init__(self, parent):
super(TextBackend, self).__init__(parent)
def show(self):
if len(self.parent._series) != 1:
raise ValueError(
'The TextBackend supports only one graph per Plot.')
elif not isinstance(self.parent._series[0], LineOver1DRangeSeries):
raise ValueError(
'The TextBackend supports only expressions over a 1D range')
else:
ser = self.parent._series[0]
textplot(ser.expr, ser.start, ser.end)
def close(self):
pass
class DefaultBackend(BaseBackend):
def __new__(cls, parent):
matplotlib = import_module('matplotlib', min_module_version='1.1.0', catch=(RuntimeError,))
if matplotlib:
return MatplotlibBackend(parent)
else:
return TextBackend(parent)
plot_backends = {
'matplotlib': MatplotlibBackend,
'text': TextBackend,
'default': DefaultBackend
}
##############################################################################
# Finding the centers of line segments or mesh faces
##############################################################################
def centers_of_segments(array):
np = import_module('numpy')
return np.average(np.vstack((array[:-1], array[1:])), 0)
def centers_of_faces(array):
np = import_module('numpy')
return np.average(np.dstack((array[:-1, :-1],
array[1:, :-1],
array[:-1, 1: ],
array[:-1, :-1],
)), 2)
def flat(x, y, z, eps=1e-3):
"""Checks whether three points are almost collinear"""
np = import_module('numpy')
vector_a = x - y
vector_b = z - y
dot_product = np.dot(vector_a, vector_b)
vector_a_norm = np.linalg.norm(vector_a)
vector_b_norm = np.linalg.norm(vector_b)
cos_theta = dot_product / (vector_a_norm * vector_b_norm)
return abs(cos_theta + 1) < eps
def _matplotlib_list(interval_list):
"""
Returns lists for matplotlib ``fill`` command from a list of bounding
rectangular intervals
"""
xlist = []
ylist = []
if len(interval_list):
for intervals in interval_list:
intervalx = intervals[0]
intervaly = intervals[1]
xlist.extend([intervalx.start, intervalx.start,
intervalx.end, intervalx.end, None])
ylist.extend([intervaly.start, intervaly.end,
intervaly.end, intervaly.start, None])
else:
#XXX Ugly hack. Matplotlib does not accept empty lists for ``fill``
xlist.extend([None, None, None, None])
ylist.extend([None, None, None, None])
return xlist, ylist
####New API for plotting module ####
# TODO: Add color arrays for plots.
# TODO: Add more plotting options for 3d plots.
# TODO: Adaptive sampling for 3D plots.
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot(*args, **kwargs):
"""
Plots a function of a single variable.
The plotting uses an adaptive algorithm which samples recursively to
accurately plot the plot. The adaptive algorithm uses a random point near
the midpoint of two points that has to be further sampled. Hence the same
plots can appear slightly different.
Usage
=====
Single Plot
``plot(expr, range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots with same range.
``plot(expr1, expr2, ..., range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots with different ranges.
``plot((expr1, range), (expr2, range), ..., **kwargs)``
Range has to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr`` : Expression representing the function of single variable
``range``: (x, 0, 5), A 3-tuple denoting the range of the free variable.
Keyword Arguments
=================
Arguments for ``LineOver1DRangeSeries`` class:
``adaptive``: Boolean. The default value is set to True. Set adaptive to False and
specify ``nb_of_points`` if uniform sampling is required.
``depth``: int Recursion depth of the adaptive algorithm. A depth of value ``n``
samples a maximum of `2^{n}` points.
``nb_of_points``: int. Used when the ``adaptive`` is set to False. The function
is uniformly sampled at ``nb_of_points`` number of points.
Aesthetics options:
``line_color``: float. Specifies the color for the plot.
See ``Plot`` to see how to set color for the plots.
If there are multiple plots, then the same series series are applied to
all the plots. If you want to set these options separately, you can index
the ``Plot`` object returned and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot. It is set to the latex representation of
the expression, if the plot has only one expression.
``xlabel`` : str. Label for the x - axis.
``ylabel`` : str. Label for the y - axis.
``xscale``: {'linear', 'log'} Sets the scaling of the x - axis.
``yscale``: {'linear', 'log'} Sets the scaling if the y - axis.
``axis_center``: tuple of two floats denoting the coordinates of the center or
{'center', 'auto'}
``xlim`` : tuple of two floats, denoting the x - axis limits.
``ylim`` : tuple of two floats, denoting the y - axis limits.
Examples
========
>>> from sympy import symbols
>>> from sympy.plotting import plot
>>> x = symbols('x')
Single Plot
>>> plot(x**2, (x, -5, 5))
Plot object containing:
[0]: cartesian line: x**2 for x over (-5.0, 5.0)
Multiple plots with single range.
>>> plot(x, x**2, x**3, (x, -5, 5))
Plot object containing:
[0]: cartesian line: x for x over (-5.0, 5.0)
[1]: cartesian line: x**2 for x over (-5.0, 5.0)
[2]: cartesian line: x**3 for x over (-5.0, 5.0)
Multiple plots with different ranges.
>>> plot((x**2, (x, -6, 6)), (x, (x, -5, 5)))
Plot object containing:
[0]: cartesian line: x**2 for x over (-6.0, 6.0)
[1]: cartesian line: x for x over (-5.0, 5.0)
No adaptive sampling.
>>> plot(x**2, adaptive=False, nb_of_points=400)
Plot object containing:
[0]: cartesian line: x**2 for x over (-10.0, 10.0)
See Also
========
Plot, LineOver1DRangeSeries.
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 1, 1)
series = [LineOver1DRangeSeries(*arg, **kwargs) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot_parametric(*args, **kwargs):
"""
Plots a 2D parametric plot.
The plotting uses an adaptive algorithm which samples recursively to
accurately plot the plot. The adaptive algorithm uses a random point near
the midpoint of two points that has to be further sampled. Hence the same
plots can appear slightly different.
Usage
=====
Single plot.
``plot_parametric(expr_x, expr_y, range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots with same range.
``plot_parametric((expr1_x, expr1_y), (expr2_x, expr2_y), range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots with different ranges.
``plot_parametric((expr_x, expr_y, range), ..., **kwargs)``
Range has to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr_x`` : Expression representing the function along x.
``expr_y`` : Expression representing the function along y.
``range``: (u, 0, 5), A 3-tuple denoting the range of the parameter
variable.
Keyword Arguments
=================
Arguments for ``Parametric2DLineSeries`` class:
``adaptive``: Boolean. The default value is set to True. Set adaptive to
False and specify ``nb_of_points`` if uniform sampling is required.
``depth``: int Recursion depth of the adaptive algorithm. A depth of
value ``n`` samples a maximum of `2^{n}` points.
``nb_of_points``: int. Used when the ``adaptive`` is set to False. The
function is uniformly sampled at ``nb_of_points`` number of points.
Aesthetics
----------
``line_color``: function which returns a float. Specifies the color for the
plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same Series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``xlabel`` : str. Label for the x - axis.
``ylabel`` : str. Label for the y - axis.
``xscale``: {'linear', 'log'} Sets the scaling of the x - axis.
``yscale``: {'linear', 'log'} Sets the scaling if the y - axis.
``axis_center``: tuple of two floats denoting the coordinates of the center
or {'center', 'auto'}
``xlim`` : tuple of two floats, denoting the x - axis limits.
``ylim`` : tuple of two floats, denoting the y - axis limits.
Examples
========
>>> from sympy import symbols, cos, sin
>>> from sympy.plotting import plot_parametric
>>> u = symbols('u')
Single Parametric plot
>>> plot_parametric(cos(u), sin(u), (u, -5, 5))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-5.0, 5.0)
Multiple parametric plot with single range.
>>> plot_parametric((cos(u), sin(u)), (u, cos(u)))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-10.0, 10.0)
[1]: parametric cartesian line: (u, cos(u)) for u over (-10.0, 10.0)
Multiple parametric plots.
>>> plot_parametric((cos(u), sin(u), (u, -5, 5)),
... (cos(u), u, (u, -5, 5)))
Plot object containing:
[0]: parametric cartesian line: (cos(u), sin(u)) for u over (-5.0, 5.0)
[1]: parametric cartesian line: (cos(u), u) for u over (-5.0, 5.0)
See Also
========
Plot, Parametric2DLineSeries
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 2, 1)
series = [Parametric2DLineSeries(*arg) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot3d_parametric_line(*args, **kwargs):
"""
Plots a 3D parametric line plot.
Usage
=====
Single plot:
``plot3d_parametric_line(expr_x, expr_y, expr_z, range, **kwargs)``
If the range is not specified, then a default range of (-10, 10) is used.
Multiple plots.
``plot3d_parametric_line((expr_x, expr_y, expr_z, range), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr_x`` : Expression representing the function along x.
``expr_y`` : Expression representing the function along y.
``expr_z`` : Expression representing the function along z.
``range``: ``(u, 0, 5)``, A 3-tuple denoting the range of the parameter
variable.
Keyword Arguments
=================
Arguments for ``Parametric3DLineSeries`` class.
``nb_of_points``: The range is uniformly sampled at ``nb_of_points``
number of points.
Aesthetics:
``line_color``: function which returns a float. Specifies the color for the
plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class.
``title`` : str. Title of the plot.
Examples
========
>>> from sympy import symbols, cos, sin
>>> from sympy.plotting import plot3d_parametric_line
>>> u = symbols('u')
Single plot.
>>> plot3d_parametric_line(cos(u), sin(u), u, (u, -5, 5))
Plot object containing:
[0]: 3D parametric cartesian line: (cos(u), sin(u), u) for u over (-5.0, 5.0)
Multiple plots.
>>> plot3d_parametric_line((cos(u), sin(u), u, (u, -5, 5)),
... (sin(u), u**2, u, (u, -5, 5)))
Plot object containing:
[0]: 3D parametric cartesian line: (cos(u), sin(u), u) for u over (-5.0, 5.0)
[1]: 3D parametric cartesian line: (sin(u), u**2, u) for u over (-5.0, 5.0)
See Also
========
Plot, Parametric3DLineSeries
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 3, 1)
series = [Parametric3DLineSeries(*arg) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:<|fim▁hole|>
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot3d(*args, **kwargs):
"""
Plots a 3D surface plot.
Usage
=====
Single plot
``plot3d(expr, range_x, range_y, **kwargs)``
If the ranges are not specified, then a default range of (-10, 10) is used.
Multiple plot with the same range.
``plot3d(expr1, expr2, range_x, range_y, **kwargs)``
If the ranges are not specified, then a default range of (-10, 10) is used.
Multiple plots with different ranges.
``plot3d((expr1, range_x, range_y), (expr2, range_x, range_y), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr`` : Expression representing the function along x.
``range_x``: (x, 0, 5), A 3-tuple denoting the range of the x
variable.
``range_y``: (y, 0, 5), A 3-tuple denoting the range of the y
variable.
Keyword Arguments
=================
Arguments for ``SurfaceOver2DRangeSeries`` class:
``nb_of_points_x``: int. The x range is sampled uniformly at
``nb_of_points_x`` of points.
``nb_of_points_y``: int. The y range is sampled uniformly at
``nb_of_points_y`` of points.
Aesthetics:
``surface_color``: Function which returns a float. Specifies the color for
the surface of the plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied to
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot.
Examples
========
>>> from sympy import symbols
>>> from sympy.plotting import plot3d
>>> x, y = symbols('x y')
Single plot
>>> plot3d(x*y, (x, -5, 5), (y, -5, 5))
Plot object containing:
[0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
Multiple plots with same range
>>> plot3d(x*y, -x*y, (x, -5, 5), (y, -5, 5))
Plot object containing:
[0]: cartesian surface: x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
[1]: cartesian surface: -x*y for x over (-5.0, 5.0) and y over (-5.0, 5.0)
Multiple plots with different ranges.
>>> plot3d((x**2 + y**2, (x, -5, 5), (y, -5, 5)),
... (x*y, (x, -3, 3), (y, -3, 3)))
Plot object containing:
[0]: cartesian surface: x**2 + y**2 for x over (-5.0, 5.0) and y over (-5.0, 5.0)
[1]: cartesian surface: x*y for x over (-3.0, 3.0) and y over (-3.0, 3.0)
See Also
========
Plot, SurfaceOver2DRangeSeries
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 1, 2)
series = [SurfaceOver2DRangeSeries(*arg) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
@doctest_depends_on(modules=('numpy', 'matplotlib',))
def plot3d_parametric_surface(*args, **kwargs):
"""
Plots a 3D parametric surface plot.
Usage
=====
Single plot.
``plot3d_parametric_surface(expr_x, expr_y, expr_z, range_u, range_v, **kwargs)``
If the ranges is not specified, then a default range of (-10, 10) is used.
Multiple plots.
``plot3d_parametric_surface((expr_x, expr_y, expr_z, range_u, range_v), ..., **kwargs)``
Ranges have to be specified for every expression.
Default range may change in the future if a more advanced default range
detection algorithm is implemented.
Arguments
=========
``expr_x``: Expression representing the function along ``x``.
``expr_y``: Expression representing the function along ``y``.
``expr_z``: Expression representing the function along ``z``.
``range_u``: ``(u, 0, 5)``, A 3-tuple denoting the range of the ``u``
variable.
``range_v``: ``(v, 0, 5)``, A 3-tuple denoting the range of the v
variable.
Keyword Arguments
=================
Arguments for ``ParametricSurfaceSeries`` class:
``nb_of_points_u``: int. The ``u`` range is sampled uniformly at
``nb_of_points_v`` of points
``nb_of_points_y``: int. The ``v`` range is sampled uniformly at
``nb_of_points_y`` of points
Aesthetics:
``surface_color``: Function which returns a float. Specifies the color for
the surface of the plot. See ``sympy.plotting.Plot`` for more details.
If there are multiple plots, then the same series arguments are applied for
all the plots. If you want to set these options separately, you can index
the returned ``Plot`` object and set it.
Arguments for ``Plot`` class:
``title`` : str. Title of the plot.
Examples
========
>>> from sympy import symbols, cos, sin
>>> from sympy.plotting import plot3d_parametric_surface
>>> u, v = symbols('u v')
Single plot.
>>> plot3d_parametric_surface(cos(u + v), sin(u - v), u - v,
... (u, -5, 5), (v, -5, 5))
Plot object containing:
[0]: parametric cartesian surface: (cos(u + v), sin(u - v), u - v) for u over (-5.0, 5.0) and v over (-5.0, 5.0)
See Also
========
Plot, ParametricSurfaceSeries
"""
args = map(sympify, args)
show = kwargs.pop('show', True)
series = []
plot_expr = check_arguments(args, 3, 2)
series = [ParametricSurfaceSeries(*arg) for arg in plot_expr]
plots = Plot(*series, **kwargs)
if show:
plots.show()
return plots
def check_arguments(args, expr_len, nb_of_free_symbols):
"""
Checks the arguments and converts into tuples of the
form (exprs, ranges)
>>> from sympy import plot, cos, sin, symbols
>>> from sympy.plotting.plot import check_arguments
>>> x,y,u,v = symbols('x y u v')
>>> check_arguments([cos(x), sin(x)], 2, 1)
[(cos(x), sin(x), (x, -10, 10))]
>>> check_arguments([x, x**2], 1, 1)
[(x, (x, -10, 10)), (x**2, (x, -10, 10))]
"""
if expr_len > 1 and isinstance(args[0], Expr):
# Multiple expressions same range.
# The arguments are tuples when the expression length is
# greater than 1.
assert len(args) >= expr_len
for i in range(len(args)):
if isinstance(args[i], Tuple):
break
else:
i = len(args) + 1
exprs = Tuple(*args[:i])
free_symbols = list(set_union(*[e.free_symbols for e in exprs]))
if len(args) == expr_len + nb_of_free_symbols:
#Ranges given
plots = [exprs + Tuple(*args[expr_len:])]
else:
default_range = Tuple(-10, 10)
ranges = []
for symbol in free_symbols:
ranges.append(Tuple(symbol) + default_range)
for i in range(len(free_symbols) - nb_of_free_symbols):
ranges.append(Tuple(Dummy()) + default_range)
plots = [exprs + Tuple(*ranges)]
return plots
if isinstance(args[0], Expr) or (isinstance(args[0], Tuple) and
len(args[0]) == expr_len and
expr_len != 3):
# Cannot handle expressions with number of expression = 3. It is
# not possible to differentiate between expressions and ranges.
#Series of plots with same range
for i in range(len(args)):
if isinstance(args[i], Tuple) and len(args[i]) != expr_len:
break
if not isinstance(args[i], Tuple):
args[i] = Tuple(args[i])
else:
i = len(args) + 1
exprs = args[:i]
assert all(isinstance(e, Expr) for expr in exprs for e in expr)
free_symbols = list(set_union(*[e.free_symbols for expr in exprs
for e in expr]))
if len(free_symbols) > nb_of_free_symbols:
raise ValueError("The number of free_symbols in the expression "
"is greater than %d" % nb_of_free_symbols)
if len(args) == i + nb_of_free_symbols and isinstance(args[i], Tuple):
ranges = Tuple(*[range_expr for range_expr in args[
i:i + nb_of_free_symbols]])
plots = [expr + ranges for expr in exprs]
return plots
else:
#Use default ranges.
default_range = Tuple(-10, 10)
ranges = []
for symbol in free_symbols:
ranges.append(Tuple(symbol) + default_range)
for i in range(len(free_symbols) - nb_of_free_symbols):
ranges.append(Tuple(Dummy()) + default_range)
ranges = Tuple(*ranges)
plots = [expr + ranges for expr in exprs]
return plots
elif isinstance(args[0], Tuple) and len(args[0]) == expr_len + nb_of_free_symbols:
#Multiple plots with different ranges.
for arg in args:
for i in range(expr_len):
if not isinstance(arg[i], Expr):
raise ValueError("Expected an expression, given %s" %
str(arg[i]))
for i in range(nb_of_free_symbols):
if not len(arg[i + expr_len]) == 3:
raise ValueError("The ranges should be a tuple of "
"length 3, got %s" % str(arg[i + expr_len]))
return args<|fim▁end|> | plots.show()
return plots |
<|file_name|>lazy.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
export { view }<|fim▁end|> | import view from './views/home' |
<|file_name|>helpers_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
# coding=utf-8
# Author: Dustyn Gibson <[email protected]>
# URL: http://github.com/SickRage/SickRage
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
"""
Test sickbeard.helpers
Methods:
fixGlob
indentXML
remove_non_release_groups
isMediaFile
isRarFile
isBeingWritten
remove_file_failed
makeDir
searchIndexerForShowID
listMediaFiles
copyFile
moveFile
link
hardlinkFile
symlink
moveAndSymlinkFile
make_dirs
rename_ep_file
delete_empty_folders
fileBitFilter
chmodAsParent
fixSetGroupID
is_anime_in_show_list
update_anime_support
get_absolute_number_from_season_and_episode
get_all_episodes_from_absolute_number
sanitizeSceneName
arithmeticEval
create_https_certificates
backupVersionedFile
restoreVersionedFile
md5_for_file
get_lan_ip
check_url
anon_url
encrypt
decrypt
full_sanitizeSceneName
_check_against_names
get_show
is_hidden_folder
real_path
validateShow
set_up_anidb_connection
makeZip
extractZip
backupConfigZip
restoreConfigZip
mapIndexersToShow
touchFile
_getTempDir
_setUpSession
getURL
download_file
get_size
generateApiKey
remove_article
generateCookieSecret
verify_freespace
pretty_time_delta
isFileLocked
getDiskSpaceUsage
"""
import os.path
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from sickbeard.helpers import remove_non_release_groups
TEST_RESULT = 'Show.Name.S01E01.HDTV.x264-RLSGROUP'
TEST_CASES = {
'removewords': [
TEST_RESULT,
'Show.Name.S01E01.HDTV.x264-RLSGROUP[cttv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP.RiPSaLoT',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[GloDLS]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[EtHD]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-20-40',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[NO-RAR] - [ www.torrentday.com ]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[rarbg]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[Seedbox]',
'{ www.SceneTime.com } - Show.Name.S01E01.HDTV.x264-RLSGROUP',
'].[www.tensiontorrent.com] - Show.Name.S01E01.HDTV.x264-RLSGROUP',
'[ www.TorrentDay.com ] - Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[silv4]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[AndroidTwoU]',
'[www.newpct1.com]Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-NZBGEEK',
'.www.Cpasbien.pwShow.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP [1044]',
'[ www.Cpasbien.pw ] Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP.[BT]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[vtv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP.[www.usabit.com]',
'[www.Cpasbien.com] Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[ettv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[rartv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-Siklopentan',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-RP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[PublicHD]',
'[www.Cpasbien.pe] Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP[eztv]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP-[SpastikusTV]',
'].[ www.tensiontorrent.com ] - Show.Name.S01E01.HDTV.x264-RLSGROUP',
'[ www.Cpasbien.com ] Show.Name.S01E01.HDTV.x264-RLSGROUP',
'Show.Name.S01E01.HDTV.x264-RLSGROUP- { www.SceneTime.com }',
'Show.Name.S01E01.HDTV.x264-RLSGROUP- [ www.torrentday.com ]',
'Show.Name.S01E01.HDTV.x264-RLSGROUP.Renc'
]
}
class HelpersTests(unittest.TestCase):
"""
Test using test generator
"""
def __init__(self, *args, **kwargs):
"""
Initialize test
"""
super(HelpersTests, self).__init__(*args, **kwargs)
def test_generator(test_strings):
"""
Generate tests from test strings
:param test_strings: to generate tests from
:return: test
"""
def _test(self):
"""
Generate tests
:param self:
:return: test to run
"""
for test_string in test_strings:
self.assertEqual(remove_non_release_groups(test_string), TEST_RESULT)
return _test
class HelpersZipTests(unittest.TestCase):
"""
Test zip methods
"""
@unittest.skip('Not yet implemented')
def test_make_zip(self):
"""
Test makeZip
"""
pass
@unittest.skip('Not yet implemented')
def test_extract_zip(self):
"""
Test extractZip
"""
pass
@unittest.skip('Not yet implemented')
def test_backup_config_zip(self):
"""
Test backupConfigZip
"""
pass
@unittest.skip('Not yet implemented')
def test_restore_config_zip(self):
"""
Test restoreConfigZip
"""
pass
@unittest.skip('Not yet implemented')
def test_is_rar_file(self):
"""
Test isRarFile
"""
pass
class HelpersDirectoryTests(unittest.TestCase):
"""
Test directory methods
"""
@unittest.skip('Not yet implemented')
def test_make_dirs(self):
"""
Test make_dirs
"""
pass
@unittest.skip('Not yet implemented')
def test_delete_empty_folders(self):
"""
Test delete_empty_folders
"""
pass
@unittest.skip('Not yet implemented')
def test_make_dir(self):
"""
Test makeDir
"""
pass
@unittest.skip('Not yet implemented')
def test_get_temp_dir(self):
"""
Test _getTempDir
"""
pass
@unittest.skip('Not yet implemented')
def test_is_hidden_folder(self):
"""
Test is_hidden_folder
"""
pass
@unittest.skip('Not yet implemented')
def test_real_path(self):
"""
Test real_path
"""
pass
class HelpersFileTests(unittest.TestCase):
"""
Test file helpers
"""
@unittest.skip('Not yet implemented')
def test_is_media_file(self):
"""
Test isMediaFile
"""
pass
@unittest.skip('Not yet implemented')
def test_is_file_locked(self):
"""
Test isFileLocked
"""
pass
@unittest.skip('Not yet implemented')
def test_is_being_written(self):
"""
Test isBeingWritten
"""
pass
@unittest.skip('Not yet implemented')
def test_remove_file_failed(self):
"""
Test remove_file_failed
"""
pass
@unittest.skip('Not yet implemented')
def test_list_media_files(self):
"""
Test listMediaFiles
"""
pass
@unittest.skip('Not yet implemented')
def test_copy_file(self):
"""
Test copyFile
"""
pass
@unittest.skip('Not yet implemented')
def test_move_file(self):
"""
Test moveFile
"""
pass
@unittest.skip('Not yet implemented')
def test_rename_ep_file(self):
"""
Test rename_ep_file
"""
pass
@unittest.skip('Not yet implemented')
def test_file_bit_filter(self):
"""
Test fileBitFilter
"""
pass
@unittest.skip('Not yet implemented')
def test_chmod_as_parent(self):
"""
Test chmodAsParent
"""
pass
@unittest.skip('Not yet implemented')
def test_backup_versioned_file(self):
"""
Test backupVersionedFile
"""
pass
@unittest.skip('Not yet implemented')
def test_restore_versioned_file(self):
"""
Test restoreVersionedFile
"""
pass
@unittest.skip('Not yet implemented')
def test_verify_free_space(self):
"""
Test verify_freespace
"""
pass
@unittest.skip('Not yet implemented')
def test_get_disk_space_usage(self):
"""
Test getDiskSpaceUsage
"""
pass
@unittest.skip('Not yet implemented')
def test_download_file(self):
"""
Test download_file
"""
pass
@unittest.skip('Not yet implemented')
def test_get_size(self):
"""
Test get_size
"""
pass
@unittest.skip('Not yet implemented')
def test_md5_for_file(self):
"""
Test md5_for_file
"""
pass
@unittest.skip('Not yet implemented')
def test_touch_file(self):
"""
Test touchFile
"""
pass
class HelpersFileLinksTests(unittest.TestCase):
"""
Test sym and hard links
"""
@unittest.skip('Not yet implemented')
def test_link(self):
"""
Test link
"""
pass
@unittest.skip('Not yet implemented')
def test_hardlink_file(self):
"""
Test hardlinkFile
"""
pass
@unittest.skip('Not yet implemented')
def test_symlink(self):
"""
Test symlink
"""
pass
@unittest.skip('Not yet implemented')
def test_move_and_symlink_file(self):
"""
Test moveAndSymlinkFile
"""
pass
class HelpersEncryptionTests(unittest.TestCase):
"""
Test encryption and decryption
"""
@unittest.skip('Not yet implemented')
def test_create_https_certificates(self):
"""
Test create_https_certificates
"""
pass
@unittest.skip('Not yet implemented')
def test_encrypt(self):
"""
Test encrypt
"""
pass
@unittest.skip('Not yet implemented')
def test_decrypt(self):
"""
Test decrypt
"""
pass
@unittest.skip('Not yet implemented')
def test_generate_cookie_secret(self):
"""
Test generateCookieSecret
"""
pass
class HelpersShowTests(unittest.TestCase):
"""
Test show methods
"""
@unittest.skip('Not yet implemented')
def test_search_indexer_for_show_id(self):
"""
Test searchIndexerForShowID
"""
pass
@unittest.skip('Not yet implemented')
def test_is_anime_in_show_list(self):
"""
Test is_anime_in_show_list
"""
pass
@unittest.skip('Not yet implemented')
def test_check_against_names(self):
"""
Test _check_against_names
"""
pass
@unittest.skip('Not yet implemented')
def test_get_show(self):
"""
Test get_show
"""
pass
@unittest.skip('Not yet implemented')
def test_validate_show(self):
"""
Test validateShow
"""
pass
@unittest.skip('Not yet implemented')
def test_map_indexers_to_show(self):
"""
Test mapIndexersToShow
"""
pass
@unittest.skip('Not yet implemented')
def test_get_abs_no_from_s_and_e(self):
"""
Test get_absolute_number_from_season_and_episode
"""
pass
@unittest.skip('Not yet implemented')
def test_get_all_eps_from_abs_no(self):
"""
Test get_all_episodes_from_absolute_number
"""
pass
class HelpersConnectionTests(unittest.TestCase):
"""
Test connections
"""
@unittest.skip('Not yet implemented')
def test_get_lan_ip(self):
"""
Test get_lan_ip
"""
pass
@unittest.skip('Not yet implemented')
def test_check_url(self):
"""
Test check_url
"""
pass
@unittest.skip('Not yet implemented')
def test_anon_url(self):
"""
Test anon_url
"""
pass
@unittest.skip('Not yet implemented')
def test_set_up_anidb_connection(self):
"""
Test set_up_anidb_connection
"""
pass<|fim▁hole|> Test _setUpSession
"""
pass
@unittest.skip('Not yet implemented')
def test_get_url(self):
"""
Test getURL
"""
pass
@unittest.skip('Not yet implemented')
def test_generate_api_key(self):
"""
Test generateApiKey
"""
pass
class HelpersMiscTests(unittest.TestCase):
"""
Test misc helper methods
"""
@unittest.skip('Not yet implemented')
def test_fix_glob(self):
"""
Test fixGlob
"""
pass
@unittest.skip('Not yet implemented')
def test_indent_xml(self):
"""
Test indentXML
"""
pass
@unittest.skip('Not yet implemented')
def test_remove_non_release_groups(self):
"""
Test remove_non_release_groups
"""
pass
@unittest.skip('Not yet implemented')
def test_fix_set_group_id(self):
"""
Test fixSetGroupID
"""
pass
@unittest.skip('Not yet implemented')
def test_update_anime_support(self):
"""
Test update_anime_support
"""
pass
@unittest.skip('Not yet implemented')
def test_sanitize_scene_name(self):
"""
Test sanitizeSceneName
"""
pass
@unittest.skip('Not yet implemented')
def test_arithmetic_eval(self):
"""
Test arithmeticEval
"""
pass
@unittest.skip('Not yet implemented')
def test_full_sanitize_scene_name(self):
"""
Test full_sanitizeSceneName
"""
pass
@unittest.skip('Not yet implemented')
def test_remove_article(self):
"""
Test remove_article
"""
pass
@unittest.skip('Not yet implemented')
def test_pretty_time_delta(self):
"""
Test pretty_time_delta
"""
pass
if __name__ == '__main__':
print "=================="
print "STARTING - Helpers TESTS"
print "=================="
print "######################################################################"
for name, test_data in TEST_CASES.items():
test_name = 'test_%s' % name
test = test_generator(test_data)
setattr(HelpersTests, test_name, test)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersConnectionTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersDirectoryTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersEncryptionTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersFileLinksTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersFileTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersMiscTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersShowTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
SUITE = unittest.TestLoader().loadTestsFromTestCase(HelpersZipTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)<|fim▁end|> |
@unittest.skip('Not yet implemented')
def test_set_up_session(self):
""" |
<|file_name|>_cell.py<|end_file_name|><|fim▁begin|>from .variables import *
<|fim▁hole|> # cells must stand on own line
if node.parent.cls not in ("Assign", "Assigns"):
node.auxiliary("cell")
return "{", ",", "}"
def Assign(node):
if node.name == 'varargin':
out = "%(0)s = va_arg(varargin, " + node[0].type + ") ;"
else:
out = "%(0)s.clear() ;"
# append to cell, one by one
for elem in node[1]:
out = out + "\n%(0)s.push_back(" + str(elem) + ") ;"
return out<|fim▁end|> | def Cell(node):
|
<|file_name|>SHA256DigestTest.java<|end_file_name|><|fim▁begin|>package org.spongycastle.crypto.test;
import org.spongycastle.crypto.Digest;
import org.spongycastle.crypto.digests.SHA256Digest;
/**
* standard vector test for SHA-256 from FIPS Draft 180-2.
*
* Note, the first two vectors are _not_ from the draft, the last three are.
*/
public class SHA256DigestTest
extends DigestTest
{
private static String[] messages =
{<|fim▁hole|> "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq"
};
private static String[] digests =
{
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
"ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb",
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad",
"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1"
};
// 1 million 'a'
static private String million_a_digest = "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0";
SHA256DigestTest()
{
super(new SHA256Digest(), messages, digests);
}
public void performTest()
{
super.performTest();
millionATest(million_a_digest);
}
protected Digest cloneDigest(Digest digest)
{
return new SHA256Digest((SHA256Digest)digest);
}
protected Digest cloneDigest(byte[] encodedState)
{
return new SHA256Digest(encodedState);
}
public static void main(
String[] args)
{
runTest(new SHA256DigestTest());
}
}<|fim▁end|> | "",
"a",
"abc", |
<|file_name|>error.go<|end_file_name|><|fim▁begin|>package libnetwork
import (
"fmt"
)
// ErrNoSuchNetwork is returned when a network query finds no result<|fim▁hole|> return fmt.Sprintf("network %s not found", string(nsn))
}
// BadRequest denotes the type of this error
func (nsn ErrNoSuchNetwork) BadRequest() {}
// ErrNoSuchEndpoint is returned when a endpoint query finds no result
type ErrNoSuchEndpoint string
func (nse ErrNoSuchEndpoint) Error() string {
return fmt.Sprintf("endpoint %s not found", string(nse))
}
// BadRequest denotes the type of this error
func (nse ErrNoSuchEndpoint) BadRequest() {}
// ErrInvalidNetworkDriver is returned if an invalid driver
// name is passed.
type ErrInvalidNetworkDriver string
func (ind ErrInvalidNetworkDriver) Error() string {
return fmt.Sprintf("invalid driver bound to network: %s", string(ind))
}
// BadRequest denotes the type of this error
func (ind ErrInvalidNetworkDriver) BadRequest() {}
// ErrInvalidJoin is returned if a join is attempted on an endpoint
// which already has a container joined.
type ErrInvalidJoin struct{}
func (ij ErrInvalidJoin) Error() string {
return "a container has already joined the endpoint"
}
// BadRequest denotes the type of this error
func (ij ErrInvalidJoin) BadRequest() {}
// ErrNoContainer is returned when the endpoint has no container
// attached to it.
type ErrNoContainer struct{}
func (nc ErrNoContainer) Error() string {
return "a container has already joined the endpoint"
}
// Maskable denotes the type of this error
func (nc ErrNoContainer) Maskable() {}
// ErrInvalidID is returned when a query-by-id method is being invoked
// with an empty id parameter
type ErrInvalidID string
func (ii ErrInvalidID) Error() string {
return fmt.Sprintf("invalid id: %s", string(ii))
}
// BadRequest denotes the type of this error
func (ii ErrInvalidID) BadRequest() {}
// ErrInvalidName is returned when a query-by-name or resource create method is
// invoked with an empty name parameter
type ErrInvalidName string
func (in ErrInvalidName) Error() string {
return fmt.Sprintf("invalid name: %s", string(in))
}
// BadRequest denotes the type of this error
func (in ErrInvalidName) BadRequest() {}
// ErrInvalidConfigFile type is returned when an invalid LibNetwork config file is detected
type ErrInvalidConfigFile string
func (cf ErrInvalidConfigFile) Error() string {
return fmt.Sprintf("Invalid Config file %q", string(cf))
}
// NetworkTypeError type is returned when the network type string is not
// known to libnetwork.
type NetworkTypeError string
func (nt NetworkTypeError) Error() string {
return fmt.Sprintf("unknown driver %q", string(nt))
}
// NotFound denotes the type of this error
func (nt NetworkTypeError) NotFound() {}
// NetworkNameError is returned when a network with the same name already exists.
type NetworkNameError string
func (nnr NetworkNameError) Error() string {
return fmt.Sprintf("network with name %s already exists", string(nnr))
}
// Forbidden denotes the type of this error
func (nnr NetworkNameError) Forbidden() {}
// UnknownNetworkError is returned when libnetwork could not find in it's database
// a network with the same name and id.
type UnknownNetworkError struct {
name string
id string
}
func (une *UnknownNetworkError) Error() string {
return fmt.Sprintf("unknown network %s id %s", une.name, une.id)
}
// NotFound denotes the type of this error
func (une *UnknownNetworkError) NotFound() {}
// ActiveEndpointsError is returned when a network is deleted which has active
// endpoints in it.
type ActiveEndpointsError struct {
name string
id string
}
func (aee *ActiveEndpointsError) Error() string {
return fmt.Sprintf("network with name %s id %s has active endpoints", aee.name, aee.id)
}
// Forbidden denotes the type of this error
func (aee *ActiveEndpointsError) Forbidden() {}
// UnknownEndpointError is returned when libnetwork could not find in it's database
// an endpoint with the same name and id.
type UnknownEndpointError struct {
name string
id string
}
func (uee *UnknownEndpointError) Error() string {
return fmt.Sprintf("unknown endpoint %s id %s", uee.name, uee.id)
}
// NotFound denotes the type of this error
func (uee *UnknownEndpointError) NotFound() {}
// ActiveContainerError is returned when an endpoint is deleted which has active
// containers attached to it.
type ActiveContainerError struct {
name string
id string
}
func (ace *ActiveContainerError) Error() string {
return fmt.Sprintf("endpoint with name %s id %s has active containers", ace.name, ace.id)
}
// Forbidden denotes the type of this error
func (ace *ActiveContainerError) Forbidden() {}
// InvalidContainerIDError is returned when an invalid container id is passed
// in Join/Leave
type InvalidContainerIDError string
func (id InvalidContainerIDError) Error() string {
return fmt.Sprintf("invalid container id %s", string(id))
}
// BadRequest denotes the type of this error
func (id InvalidContainerIDError) BadRequest() {}<|fim▁end|> | type ErrNoSuchNetwork string
func (nsn ErrNoSuchNetwork) Error() string { |
<|file_name|>big.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | // TODO: make it big enough to slow down tsc
export const a: number = 2350; |
<|file_name|>select_feature.py<|end_file_name|><|fim▁begin|>__author__ = 'LiGe'
#encoding:utf-8
import networkx as nx
import matplotlib.pyplot as plot
from file_to_graph import file_to_mat
def build_graph(mat):
G=nx.DiGraph()#创建空图
for i in range(0,mat.shape[0]):
G.add_node(i)#创造节点
for i in range(0,mat.shape[0]):
for j in range(0,mat.shape[1]):
if mat[i,j]==1:
G.add_edge(i,j)#加一条有向边
#print nx.in_degree(G,0)
#print nx.out_degree(G)
#print nx.degree(G)
print nx.clustering(G.to_undirected())
print G.in_degree(1)
#nx.convert_to_undirected(G)
#nx.convert_to_undirected()
print nx.betweenness_centrality(G)
print nx.closeness_centrality(G)
#print nx.diameter(G)
print nx.average_shortest_path_length(G)
# print nx.average_clustering(G)
sub_graph= nx.strongly_connected_component_subgraphs(G)
for line in sub_graph:
print nx.degree(line)
#pos =nx.circular_layout(G)
#plot.title('the orginal graph with pos')
#nx.draw(G,pos,with_label=True,node_size=300)
#plot.show()
nx.draw(line, with_label=True)
plot.show()
if __name__=='__main__':
<|fim▁hole|> mat=file_to_mat(file)
build_graph(mat)<|fim▁end|> | file='benapi_renew/mmc.exe.txt'
|
<|file_name|>gridSearch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = "Ponzoni, Nelson"
__copyright__ = "Copyright 2015"
__credits__ = ["Ponzoni Nelson"]
__maintainer__ = "Ponzoni Nelson"
__contact__ = "[email protected]"
__email__ = "[email protected]"
__license__ = "GPL"
__version__ = "1.0.0"
__status__ = "Production"
"""
GRID search
"""
from collections import Mapping
from functools import partial, reduce
import operator
from itertools import product
import numpy as np
class ParameterGrid(object):
"""Grid of parameters with a discrete number of values for each.
Can be used to iterate over parameter value combinations with the
Python built-in function iter.
Read more in the :ref:`User Guide <grid_search>`.
Parameters
----------
param_grid : dict of string to sequence, or sequence of such
The parameter grid to explore, as a dictionary mapping estimator
parameters to sequences of allowed values.
An empty dict signifies default parameters.
A sequence of dicts signifies a sequence of grids to search, and is
useful to avoid exploring parameter combinations that make no sense
or have no effect. See the examples below.
Examples
--------
>>> from sklearn.grid_search import ParameterGrid
>>> param_grid = {'a': [1, 2], 'b': [True, False]}
>>> list(ParameterGrid(param_grid)) == (
... [{'a': 1, 'b': True}, {'a': 1, 'b': False},
... {'a': 2, 'b': True}, {'a': 2, 'b': False}])
True
>>> grid = [{'kernel': ['linear']}, {'kernel': ['rbf'], 'gamma': [1, 10]}]
>>> list(ParameterGrid(grid)) == [{'kernel': 'linear'},<|fim▁hole|> True
>>> ParameterGrid(grid)[1] == {'kernel': 'rbf', 'gamma': 1}
True
See also
--------
:class:`GridSearchCV`:
uses ``ParameterGrid`` to perform a full parallelized parameter search.
"""
def __init__(self, param_grid):
if isinstance(param_grid, Mapping):
# wrap dictionary in a singleton list to support either dict
# or list of dicts
param_grid = [param_grid]
self.param_grid = param_grid
def __iter__(self):
"""Iterate over the points in the grid.
Returns
-------
params : iterator over dict of string to any
Yields dictionaries mapping each estimator parameter to one of its
allowed values.
"""
for p in self.param_grid:
# Always sort the keys of a dictionary, for reproducibility
items = sorted(p.items())
if not items:
yield {}
else:
keys, values = zip(*items)
for v in product(*values):
params = dict(zip(keys, v))
yield params
def __len__(self):
"""Number of points on the grid."""
# Product function that can handle iterables (np.product can't).
product = partial(reduce, operator.mul)
return sum(product(len(v) for v in p.values()) if p else 1
for p in self.param_grid)
def __getitem__(self, ind):
"""Get the parameters that would be ``ind``th in iteration
Parameters
----------
ind : int
The iteration index
Returns
-------
params : dict of string to any
Equal to list(self)[ind]
"""
# This is used to make discrete sampling without replacement memory
# efficient.
for sub_grid in self.param_grid:
# XXX: could memoize information used here
if not sub_grid:
if ind == 0:
return {}
else:
ind -= 1
continue
# Reverse so most frequent cycling parameter comes first
keys, values_lists = zip(*sorted(sub_grid.items())[::-1])
sizes = [len(v_list) for v_list in values_lists]
total = np.product(sizes)
if ind >= total:
# Try the next grid
ind -= total
else:
out = {}
for key, v_list, n in zip(keys, values_lists, sizes):
ind, offset = divmod(ind, n)
out[key] = v_list[offset]
return out
raise IndexError('ParameterGrid index out of range')
if __name__ == '__main__':
param_grid = {'a': [1, 2], 'b': [True, False]}
a = ParameterGrid(param_grid)
print(list(a))
print(len(a))
print(a[1])
print(a)<|fim▁end|> | ... {'kernel': 'rbf', 'gamma': 1},
... {'kernel': 'rbf', 'gamma': 10}] |
<|file_name|>audio-encoders.hpp<|end_file_name|><|fim▁begin|>#pragma once
#include <obs.hpp>
#include <map>
<|fim▁hole|>int FindClosestAvailableAACBitrate(int bitrate);<|fim▁end|> | const std::map<int, const char*> &GetAACEncoderBitrateMap();
const char *GetAACEncoderForBitrate(int bitrate); |
<|file_name|>alternating_color_fades.py<|end_file_name|><|fim▁begin|>import time, logging
from artnet import dmx, fixtures, rig
from artnet.dmx import fades
log = logging.getLogger(__name__)
# set up test fixtures
r = rig.get_default_rig()
g = r.groups['all']
def all_red():
"""
Create an all-red frame.
"""
g.setColor('#ff0000')
g.setIntensity(255)
return g.getFrame()
def all_blue():
"""
Create an all-blue frame.
"""
g.setColor('#0000ff')
g.setIntensity(255)
return g.getFrame()
def main(config, controller=None):
log.info("Running script %s" % __name__)
# global g
# g = get_default_fixture_group(config)
q = controller or dmx.Controller(config.get('base', 'address'), bpm=60, nodaemon=True, runout=True)
q.add(fades.create_multifade([<|fim▁hole|> ] * 3, secs=5.0))
if not controller:
q.start()<|fim▁end|> | all_red(),
all_blue(), |
<|file_name|>Blocker.java<|end_file_name|><|fim▁begin|>/**
* Bukkit plugin which moves the mobs closer to the players.
* Copyright (C) 2016 Jakub "Co0sh" Sapalski
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package pl.betoncraft.hordes;
import java.util.Random;
import org.bukkit.Bukkit;
import org.bukkit.attribute.Attribute;
import org.bukkit.attribute.AttributeInstance;
import org.bukkit.entity.LivingEntity;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.CreatureSpawnEvent;
/**
* Blocks the mobs from spawning in unwanted places.
*
* @author Jakub Sapalski
*/
public class Blocker implements Listener {
private Hordes plugin;
private Random rand = new Random();
/**
* Starts the blocker.
*
* @param plugin
* instance of the plugin
*/
public Blocker(Hordes plugin) {
this.plugin = plugin;
Bukkit.getPluginManager().registerEvents(this, plugin);
}
@EventHandler
public void onSpawn(CreatureSpawnEvent event) {
LivingEntity e = event.getEntity();
WorldSettings set = plugin.getWorlds().get(event.getEntity().getWorld().getName());
if (set == null) {<|fim▁hole|> return;
}
if (!set.shouldExist(e)) {
event.setCancelled(true);
} else if (rand.nextDouble() > set.getRatio(e.getType())) {
event.setCancelled(true);
} else {
AttributeInstance maxHealth = e.getAttribute(Attribute.GENERIC_MAX_HEALTH);
maxHealth.setBaseValue(maxHealth.getBaseValue() * set.getHealth(e.getType()));
e.setHealth(e.getMaxHealth());
}
}
}<|fim▁end|> | return;
}
if (!set.getEntities().contains(e.getType())) { |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 Dmytro Milinevskyi <[email protected]>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use record::Record;
/// Type of the formatting function.
///
/// Returns a formatted log record.
pub type Formatter = Box<Fn(&Record) -> String + Send + Sync>;
/// Default log formatter.
pub mod default;<|fim▁end|> | |
<|file_name|>FirePeer.js<|end_file_name|><|fim▁begin|>import 'webrtc-adapter/out/adapter.js';
import EventEmitter from 'events';
var configuration = {
iceServers: [
{urls: "stun:stun.l.google.com:19302"},
{urls: "turn:numb.viagenie.ca", credential: "w0kkaw0kka", username: "paul.sachs%40influitive.com"}
]
};
export default class FirePeer extends EventEmitter {
constructor(firebaseRef, userId, isMuted, isVideoMuted){
super();
this.firebaseRef = firebaseRef;
this.userRef = firebaseRef.child(userId);
this.userId = userId;
this.eventHandlers = {};
this.options = { audio: true, video: true };
this.connections = {};
// Stores mediastreams with the key being the id of the target peer
this.mediaStreams = {};
this.isMutedRef = this.userRef.child("isMuted");
this.isVideoMutedRef = this.userRef.child("isVideoMuted");
this.offersRef = this.userRef.child("offers");
this.answersRef = this.userRef.child("answers");
this.userRef.onDisconnect().remove();
this.isMuted = isMuted;
this.isVideoMuted = isVideoMuted;
this.isMutedRef.set(this.isMuted);
this.isVideoMutedRef.set(this.isVideoMuted);
this.offersRef.on("child_added", (snapshot) => {
const data = snapshot.val();
const incomingPeerId = snapshot.key();
this.acceptOffer(incomingPeerId, data).then(()=>{
// Delete the offer once accepted.
this.offersRef.child(incomingPeerId).set(null);
});
});
this.answersRef.on("child_added", (snapshot) => {
const data = snapshot.val();
const incomingPeerId = snapshot.key();
this.handleAnswer(incomingPeerId, data).then(()=>{
// Delete the offer once accepted.
this.answersRef.child(incomingPeerId).set(null);
});
});
this.firebaseRef.on("child_removed", (snapshot) => {
const peerId = snapshot.key();
if (this.userId == peerId) {
this.handleDisconnect();
}
});
this.isMutedRef.on("value", this.handleIsMuted);
this.isVideoMutedRef.on("value", this.handleIsVideoMuted);
}
connect = (peerId) => {
if (this.connections[peerId] && this.connections[peerId].signalingState != 'closed') {
console.log('Could send offer, already have connection');
return;
}
const connection = new RTCPeerConnection(configuration);
this.connections[peerId] = connection;
this.onaddstream = this.handleAddStream;
// place an offer on the room.
const media = this.getPeerMedia();
return media.then((mediaStream)=> {
connection.addStream(mediaStream);
this.emit('stream_added', { stream: mediaStream, isSelf: true});
this.mediaStreams[peerId] = mediaStream;
return connection.createOffer();
}).then((desc)=> {
return connection.setLocalDescription(desc);
}).then(() => {
const desc = connection.localDescription;
this.firebaseRef.child(peerId).child("offers").child(this.userId)
.set(JSON.stringify(desc.sdp));
this.emit('sent_offer', peerId, desc);
}).catch(this.handleError);
};
disconnectFrom = (peerId) => {
if(this.connections[peerId]) {
this.connections[peerId].close();
}
};
mute = (mute) => {
this.isMutedRef.set(mute);
};
muteVideo = (mute) => {
this.isVideoMutedRef.set(mute);
};
disconnect = () => {
this.userRef.remove();
};
// Private:
handleDisconnect = () => {
for (let key of Object.keys(this.connections)) {
this.connections[key].close();
}
}
getPeerMedia = () => {
const media = navigator.mediaDevices.getUserMedia(
{ audio: !this.isMuted, video: !this.isVideoMuted}
);
return media;
};
handleAddStream = (event) => {
this.emit('stream_added', { stream: event.stream, isSelf: false});
};
acceptOffer = (peerId, offer) => {
if (this.connections[peerId] && this.connections[peerId].signalingState != 'closed') {
console.log('Could not accept offer, already have connection');
return;
}
const connection = new RTCPeerConnection(configuration);
this.connections[peerId] = connection;
// place an offer on the room.
const media = this.getPeerMedia();
const remote_descr = new RTCSessionDescription();
remote_descr.type = "offer";
remote_descr.sdp = JSON.parse(offer);
return media.then((mediaStream)=> {
connection.addStream(mediaStream);
this.emit('stream_added', { stream: mediaStream, isSelf: true});
this.mediaStreams[peerId] = mediaStream;
return connection.setRemoteDescription(remote_descr);
}).then(()=> {
return connection.createAnswer();
}).then((answer) => {
return connection.setLocalDescription(answer);
}).then(()=> {
const answer = connection.localDescription;
this.firebaseRef.child(peerId).child("answers").child(this.userId)
.set(JSON.stringify(answer.sdp));
this.emit('accepted_offer', peerId, answer);
}).catch(this.handleError);<|fim▁hole|> remote_descr.type = "answer";
remote_descr.sdp = JSON.parse(answer);
return this.connections[peerId].setRemoteDescription(remote_descr).then(() => {
this.emit('handled_answer', peerId, answer);
}).catch(this.handleError);
};
handleError = (error) => {
console.error("FirePeer: ");
console.error(error);
};
handleIsMuted = (snapshot) => {
this.isMuted = snapshot.val();
for (const peerId of Object.keys(this.mediaStreams)) {
const stream = this.mediaStreams[peerId];
const audioTracks = stream.getAudioTracks();
for (const audioTrack of audioTracks) {
audioTrack.enabled = !this.isMuted;
}
}
this.emit("muted", this.isMuted);
};
handleIsVideoMuted = (snapshot) => {
this.isVideoMuted = snapshot.val();
for (const peerId of Object.keys(this.mediaStreams)) {
const stream = this.mediaStreams[peerId];
const videoTracks = stream.getVideoTracks();
for (const videoTrack of videoTracks) {
videoTrack.enabled = !this.isVideoMuted;
}
}
this.emit("video_muted", this.isVideoMuted);
};
logInfo = () => {
for (const peerId of Object.keys(this.mediaStreams)) {
const stream = this.mediaStreams[peerId];
console.log(peerId);
console.log("----");
console.log("stream:");
console.log(stream);
console.log("audioTracks:");
console.log(stream.getAudioTracks());
console.log("videoTracks:");
console.log(stream.getVideoTracks());
console.log("----");
}
}
}<|fim▁end|> | };
handleAnswer = (peerId, answer) => {
const remote_descr = new RTCSessionDescription(); |
<|file_name|>CompanyType.java<|end_file_name|><|fim▁begin|>//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2015.03.18 at 03:48:09 PM CET
//
package ch.fd.invoice440.request;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for companyType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="companyType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="companyname" type="{http://www.forum-datenaustausch.ch/invoice}stringType1_35"/>
* <element name="department" type="{http://www.forum-datenaustausch.ch/invoice}stringType1_35" minOccurs="0"/>
* <element name="subaddressing" type="{http://www.forum-datenaustausch.ch/invoice}stringType1_35" minOccurs="0"/>
* <element name="postal" type="{http://www.forum-datenaustausch.ch/invoice}postalAddressType"/>
* <element name="telecom" type="{http://www.forum-datenaustausch.ch/invoice}telecomAddressType" minOccurs="0"/>
* <element name="online" type="{http://www.forum-datenaustausch.ch/invoice}onlineAddressType" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "companyType", propOrder = {
"companyname",
"department",
"subaddressing",
"postal",
"telecom",
"online"
})
public class CompanyType {
@XmlElement(required = true)
protected String companyname;
protected String department;
protected String subaddressing;
@XmlElement(required = true)
protected PostalAddressType postal;
protected TelecomAddressType telecom;
protected OnlineAddressType online;
/**
<|fim▁hole|> * Gets the value of the companyname property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCompanyname() {
return companyname;
}
/**
* Sets the value of the companyname property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCompanyname(String value) {
this.companyname = value;
}
/**
* Gets the value of the department property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDepartment() {
return department;
}
/**
* Sets the value of the department property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDepartment(String value) {
this.department = value;
}
/**
* Gets the value of the subaddressing property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getSubaddressing() {
return subaddressing;
}
/**
* Sets the value of the subaddressing property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setSubaddressing(String value) {
this.subaddressing = value;
}
/**
* Gets the value of the postal property.
*
* @return
* possible object is
* {@link PostalAddressType }
*
*/
public PostalAddressType getPostal() {
return postal;
}
/**
* Sets the value of the postal property.
*
* @param value
* allowed object is
* {@link PostalAddressType }
*
*/
public void setPostal(PostalAddressType value) {
this.postal = value;
}
/**
* Gets the value of the telecom property.
*
* @return
* possible object is
* {@link TelecomAddressType }
*
*/
public TelecomAddressType getTelecom() {
return telecom;
}
/**
* Sets the value of the telecom property.
*
* @param value
* allowed object is
* {@link TelecomAddressType }
*
*/
public void setTelecom(TelecomAddressType value) {
this.telecom = value;
}
/**
* Gets the value of the online property.
*
* @return
* possible object is
* {@link OnlineAddressType }
*
*/
public OnlineAddressType getOnline() {
return online;
}
/**
* Sets the value of the online property.
*
* @param value
* allowed object is
* {@link OnlineAddressType }
*
*/
public void setOnline(OnlineAddressType value) {
this.online = value;
}
}<|fim▁end|> | |
<|file_name|>HgCopyDialog.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2010 - 2015 Detlev Offenbach <[email protected]>
#
"""
Module implementing a dialog to enter the data for a copy or rename operation.
"""
from __future__ import unicode_literals
import os.path
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
from E5Gui.E5PathPicker import E5PathPickerModes
from .Ui_HgCopyDialog import Ui_HgCopyDialog
class HgCopyDialog(QDialog, Ui_HgCopyDialog):
"""
Class implementing a dialog to enter the data for a copy or rename
operation.
"""
def __init__(self, source, parent=None, move=False):
"""
Constructor
@param source name of the source file/directory (string)
@param parent parent widget (QWidget)
@param move flag indicating a move operation (boolean)
"""
super(HgCopyDialog, self).__init__(parent)
self.setupUi(self)
self.source = source
if os.path.isdir(self.source):
self.targetPicker.setMode(E5PathPickerModes.DirectoryMode)
else:
self.targetPicker.setMode(E5PathPickerModes.SaveFileMode)
if move:
self.setWindowTitle(self.tr('Mercurial Move'))
else:
self.forceCheckBox.setEnabled(False)
self.sourceEdit.setText(source)
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(False)
msh = self.minimumSizeHint()
self.resize(max(self.width(), msh.width()), msh.height())
def getData(self):
"""
Public method to retrieve the copy data.
@return the target name (string) and a flag indicating
the operation should be enforced (boolean)
"""
target = self.targetPicker.text()<|fim▁hole|> if not os.path.isabs(target):
sourceDir = os.path.dirname(self.sourceEdit.text())
target = os.path.join(sourceDir, target)
return target, self.forceCheckBox.isChecked()
@pyqtSlot(str)
def on_targetPicker_textChanged(self, txt):
"""
Private slot to handle changes of the target.
@param txt contents of the target edit (string)
"""
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(
os.path.isabs(txt) or os.path.dirname(txt) == "")<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use redox::get_slice::GetSlice;
use redox::ops::DerefMut;
use redox::string::*;
use redox::vec::Vec;
use redox::boxed::Box;
use redox::fs::*;
use redox::io::*;
use redox::env::*;
use redox::time::Duration;
use redox::to_num::*;
/* Magic Macros { */
static mut application: *mut Application<'static> = 0 as *mut Application;
/// Execute a command
macro_rules! exec {
($cmd:expr) => ({
unsafe {
(*application).on_command(&$cmd.to_string());
}
})
}
/* } Magic Macros */
/// Structure which represents a Terminal's command.
/// This command structure contains a name, and the code which run the functionnality associated to this one, with zero, one or several argument(s).
/// # Example
/// ```
/// let my_command = Command {
/// name: "my_command",
/// main: box|args: &Vec<String>| {
/// println!("Say 'hello' to my command! :-D");
/// }
/// }
/// ```
pub struct Command<'a> {
pub name: &'a str,
pub main: Box<Fn(&Vec<String>)>,
}
impl<'a> Command<'a> {
/// Return the vector of the commands
// TODO: Use a more efficient collection instead
pub fn vec() -> Vec<Self> {
let mut commands: Vec<Self> = Vec::new();
commands.push(Command {
name: "cat",
main: Box::new(|args: &Vec<String>| {
let path = {
match args.get(1) {
Some(arg) => arg.clone(),
None => String::new(),
}
};
if let Some(mut file) = File::open(&path) {
let mut string = String::new();
match file.read_to_string(&mut string) {
Some(_) => println!("{}", string),
None => println!("Failed to read: {}", path),
}
} else {
println!("Failed to open file: {}", path);
}
}),
});
commands.push(Command {
name: "cd",
main: Box::new(|args: &Vec<String>| {
match args.get(1) {
Some(path) => {
if !change_cwd(&path) {
println!("Bad path: {}", path);
}
}
None => println!("No path given")
}
}),
});
commands.push(Command {
name: "echo",
main: Box::new(|args: &Vec<String>| {
let echo = args.iter()
.skip(1)
.fold(String::new(), |string, arg| string + " " + arg);
println!("{}", echo.trim());
}),
});
commands.push(Command {
name: "else",
main: Box::new(|_: &Vec<String>| {}),
});
commands.push(Command {
name: "exec",
main: Box::new(|args: &Vec<String>| {
if let Some(arg) = args.get(1) {
let mut args_str: Vec<&str> = Vec::new();
for arg in args.get_slice(Some(2), None) {
args_str.push(arg);
}
File::exec(arg, &args_str);
}
}),
});
commands.push(Command {
name: "exit",
main: Box::new(|_: &Vec<String>| {}),
});
commands.push(Command {
name: "fi",
main: Box::new(|_: &Vec<String>| {}),
});
commands.push(Command {
name: "if",
main: Box::new(|_: &Vec<String>| {}),
});
commands.push(Command {
name: "ls",
main: Box::new(|args: &Vec<String>| {
let path = {
match args.get(1) {
Some(arg) => arg.clone(),
None => String::new(),
}
};
if let Some(dir) = read_dir(&path) {
for entry in dir {
println!("{}", entry.path());
}
} else {
println!("Failed to open directory: {}", path);
}
}),
});
commands.push(Command {
name: "mkdir",
main: Box::new(|args: &Vec<String>| {
match args.get(1) {
Some(dir_name) => if DirEntry::create(dir_name).is_none() {
println!("Failed to create {}", dir_name);
},
None => println!("No name provided")
}
}),
});
commands.push(Command {
name: "pwd",
main: Box::new(|_: &Vec<String>| {
if let Some(file) = File::open("") {
if let Some(path) = file.path() {
println!("{}", path);
} else {
println!("Could not get the path");
}
} else {
println!("Could not open the working directory");
}
}),
});
commands.push(Command {
name: "read",
main: Box::new(|_: &Vec<String>| {}),
});
commands.push(Command {
name: "run",
main: Box::new(|args: &Vec<String>| {
if let Some(path) = args.get(1) {
let mut commands = String::new();
if let Some(mut file) = File::open(path) {
file.read_to_string(&mut commands);
}
for command in commands.split('\n') {
exec!(command);
}
}
}),
});
commands.push(Command {
name: "sleep",
main: Box::new(|args: &Vec<String>| {
let secs = {
match args.get(1) {
Some(arg) => arg.to_num() as i64,
None => 0,
}
};
let nanos = {
match args.get(2) {
Some(arg) => arg.to_num() as i32,
None => 0,
}
};
println!("Sleep: {} {}", secs, nanos);
let remaining = Duration::new(secs, nanos).sleep();
println!("Remaining: {} {}", remaining.secs, remaining.nanos);
}),
});
commands.push(Command {
name: "send",
main: Box::new(|args: &Vec<String>| {
if args.len() < 3 {
println!("Error: incorrect arguments");
println!("Usage: send [url] [data]");
return;
}
let path = {
match args.get(1) {
Some(arg) => arg.clone(),
None => String::new(),
}
};
if let Some(mut file) = File::open(&path) {
println!("URL: {:?}", file.path());
let string: String = args.iter()
.skip(2)
.fold(String::new(), |s, arg| s + " " + arg)
+ "\r\n\r\n";
match file.write(string.trim_left().as_bytes()) {
Some(size) => println!("Wrote {} bytes", size),
None => println!("Failed to write"),
}
let mut string = String::new();
match file.read_to_string(&mut string) {
Some(_) => println!("{}", string),
None => println!("Failed to read"),
}
}
}),
});
// Simple command to create a file, in the current directory
// The file has got the name given as the first argument of the command
// If the command have no arguments, the command don't create the file
commands.push(Command {
name: "touch",
main: Box::new(|args: &Vec<String>| {
match args.get(1) {
Some(file_name) => if File::create(file_name).is_none() {
println!("Failed to create: {}", file_name);
},
None => println!("No name provided")
}
}),
});
commands.push(Command {
name: "url_hex",
main: Box::new(|args: &Vec<String>| {
let path = {
match args.get(1) {
Some(arg) => arg.clone(),
None => String::new(),
}
};
if let Some(mut file) = File::open(&path) {
let mut vec: Vec<u8> = Vec::new();
match file.read_to_end(&mut vec) {
Some(_) => {<|fim▁hole|> println!("{}", line);
}
None => println!("Failed to read"),
}
}
}),
});
commands.push(Command {
name: "wget",
main: Box::new(|args: &Vec<String>| {
if let Some(host) = args.get(1) {
if let Some(req) = args.get(2) {
if let Some(mut con) = File::open(&("tcp://".to_string() + host)) {
con.write(("GET ".to_string() + req + " HTTP/1.1").as_bytes());
let mut res = Vec::new();
con.read_to_end(&mut res);
if let Some(mut file) = File::open(&req) {
file.write(&res);
}
}
} else {
println!("No request given");
}
} else {
println!("No url given");
}
}),
});
let command_list = commands.iter().fold(String::new(), |l , c| l + " " + c.name);
commands.push(Command {
name: "help",
main: Box::new(move |_: &Vec<String>| {
println!("Commands:{}", command_list);
}),
});
commands
}
}
/// A (env) variable
pub struct Variable {
pub name: String,
pub value: String,
}
pub struct Mode {
value: bool,
}
/// An application
pub struct Application<'a> {
commands: Vec<Command<'a>>,
variables: Vec<Variable>,
modes: Vec<Mode>,
}
impl<'a> Application<'a> {
/// Create a new empty application
pub fn new() -> Self {
return Application {
commands: Command::vec(),
variables: Vec::new(),
modes: Vec::new(),
};
}
fn on_command(&mut self, command_string: &str) {
//Comment
if command_string.starts_with('#') {
return;
}
//Show variables
if command_string == "$" {
for variable in self.variables.iter() {
println!("{}={}", variable.name, variable.value);
}
return;
}
//Explode into arguments, replace variables
let mut args: Vec<String> = Vec::<String>::new();
for arg in command_string.split(' ') {
if !arg.is_empty() {
if arg.starts_with('$') {
let name = arg[1 .. arg.len()].to_string();
for variable in self.variables.iter() {
if variable.name == name {
args.push(variable.value.clone());
break;
}
}
} else {
args.push(arg.to_string());
}
}
}
//Execute commands
if let Some(cmd) = args.get(0) {
if cmd == "if" {
let mut value = false;
if let Some(left) = args.get(1) {
if let Some(cmp) = args.get(2) {
if let Some(right) = args.get(3) {
if cmp == "==" {
value = *left == *right;
} else if cmp == "!=" {
value = *left != *right;
} else if cmp == ">" {
value = left.to_num_signed() > right.to_num_signed();
} else if cmp == ">=" {
value = left.to_num_signed() >= right.to_num_signed();
} else if cmp == "<" {
value = left.to_num_signed() < right.to_num_signed();
} else if cmp == "<=" {
value = left.to_num_signed() <= right.to_num_signed();
} else {
println!("Unknown comparison: {}", cmp);
}
} else {
println!("No right hand side");
}
} else {
println!("No comparison operator");
}
} else {
println!("No left hand side");
}
self.modes.insert(0, Mode { value: value });
return;
}
if cmd == "else" {
let mut syntax_error = false;
match self.modes.get_mut(0) {
Some(mode) => mode.value = !mode.value,
None => syntax_error = true,
}
if syntax_error {
println!("Syntax error: else found with no previous if");
}
return;
}
if cmd == "fi" {
let mut syntax_error = false;
if !self.modes.is_empty() {
self.modes.remove(0);
} else {
syntax_error = true;
}
if syntax_error {
println!("Syntax error: fi found with no previous if");
}
return;
}
for mode in self.modes.iter() {
if !mode.value {
return;
}
}
if cmd == "read" {
for i in 1..args.len() {
if let Some(arg_original) = args.get(i) {
let arg = arg_original.trim();
print!("{}=", arg);
if let Some(value_original) = readln!() {
let value = value_original.trim();
self.set_var(arg, value);
}
}
}
}
//Set variables
if let Some(i) = cmd.find('=') {
let name = cmd[0 .. i].trim();
let mut value = cmd[i + 1 .. cmd.len()].trim().to_string();
for i in 1..args.len() {
if let Some(arg) = args.get(i) {
value = value + " " + &arg;
}
}
self.set_var(name, &value);
return;
}
//Commands
for command in self.commands.iter() {
if &command.name == cmd {
(*command.main)(&args);
return;
}
}
println!("Unknown command: '{}'", cmd);
}
}
pub fn set_var(&mut self, name: &str, value: &str){
if name.is_empty() {
return;
}
if value.is_empty() {
let mut remove = -1;
for i in 0..self.variables.len() {
match self.variables.get(i) {
Some(variable) => if variable.name == name {
remove = i as isize;
break;
},
None => break,
}
}
if remove >= 0 {
self.variables.remove(remove as usize);
}
} else {
for variable in self.variables.iter_mut() {
if variable.name == name {
variable.value = value.to_string();
return;
}
}
self.variables.push(Variable {
name: name.to_string(),
value: value.to_string(),
});
}
}
/// Method to return the current directory
/// If the current directory cannot be found, a default string ("?") will be returned
pub fn get_current_directory(&mut self) -> String {
// Return the current path
File::open("")
.and_then(|file| file.path())
.unwrap_or("?".to_string())
}
/// Run the application
pub fn main(&mut self) {
println!("Type help for a command list");
if let Some(arg) = args().get(1) {
let command = "run ".to_string() + arg;
println!("user@redox:{}# {}", self.get_current_directory(), command);
self.on_command(&command);
}
loop {
for mode in self.modes.iter().rev() {
if mode.value {
print!("+ ");
} else {
print!("- ");
}
}
print!("user@redox:{}# ", self.get_current_directory());
if let Some(command_original) = readln!() {
let command = command_original.trim();
if command == "exit" {
println!("Exit temporarily blocked (due to using terminal as init)")
//break;
} else if !command.is_empty() {
self.on_command(&command);
}
} else {
println!("Failed to read from stdin");
}
}
}
}
pub fn main() {
unsafe {
let mut app = Box::new(Application::new());
application = app.deref_mut();
app.main();
}
}<|fim▁end|> | let mut line = "HEX:".to_string();
for byte in vec.iter() {
line = line + " " + &format!("{:X}", *byte);
} |
<|file_name|>health.py<|end_file_name|><|fim▁begin|>import gc
import os
import signal
from datetime import datetime
from errbot import BotPlugin, botcmd, arg_botcmd
from errbot.plugin_manager import global_restart
from errbot.utils import format_timedelta
class Health(BotPlugin):
@botcmd(template='status')
def status(self, mess, args):
""" If I am alive I should be able to respond to this one
"""
plugins_statuses = self.status_plugins(mess, args)
loads = self.status_load(mess, args)
gc = self.status_gc(mess, args)
return {'plugins_statuses': plugins_statuses['plugins_statuses'],
'loads': loads['loads'],
'gc': gc['gc']}
@botcmd(template='status_load')
def status_load(self, mess, args):
""" shows the load status
"""
try:
from posix import getloadavg
loads = getloadavg()
except Exception:
loads = None
return {'loads': loads}
@botcmd(template='status_gc')
def status_gc(self, mess, args):
""" shows the garbage collection details
"""
return {'gc': gc.get_count()}
@botcmd(template='status_plugins')
def status_plugins(self, mess, args):
""" shows the plugin status
"""
pm = self._bot.plugin_manager
all_blacklisted = pm.get_blacklisted_plugin()
all_loaded = pm.get_all_active_plugin_names()
all_attempted = sorted([p.name for p in pm.all_candidates])
plugins_statuses = []
for name in all_attempted:
if name in all_blacklisted:
if name in all_loaded:
plugins_statuses.append(('BA', name))
else:
plugins_statuses.append(('BD', name))
elif name in all_loaded:
plugins_statuses.append(('A', name))
elif pm.get_plugin_obj_by_name(name) is not None \
and pm.get_plugin_obj_by_name(name).get_configuration_template() is not None \
and pm.get_plugin_configuration(name) is None:<|fim▁hole|> else:
plugins_statuses.append(('D', name))
return {'plugins_statuses': plugins_statuses}
@botcmd
def uptime(self, mess, args):
""" Return the uptime of the bot
"""
return "I've been up for %s %s (since %s)" % (args, format_timedelta(datetime.now() - self._bot.startup_time),
self._bot.startup_time.strftime('%A, %b %d at %H:%M'))
# noinspection PyUnusedLocal
@botcmd(admin_only=True)
def restart(self, mess, args):
""" Restart the bot. """
self.send(mess.frm, "Deactivating all the plugins...")
self._bot.plugin_manager.deactivate_all_plugins()
self.send(mess.frm, "Restarting")
self._bot.shutdown()
global_restart()
return "I'm restarting..."
# noinspection PyUnusedLocal
@arg_botcmd('--confirm', dest="confirmed", action="store_true",
help="confirm you want to shut down", admin_only=True)
@arg_botcmd('--kill', dest="kill", action="store_true",
help="kill the bot instantly, don't shut down gracefully", admin_only=True)
def shutdown(self, mess, confirmed, kill):
"""
Shutdown the bot.
Useful when the things are going crazy and you don't have access to the machine.
"""
if not confirmed:
yield "Please provide `--confirm` to confirm you really want me to shut down."
return
if kill:
yield "Killing myself right now!"
os.kill(os.getpid(), signal.SIGKILL)
else:
yield "Roger that. I am shutting down."
os.kill(os.getpid(), signal.SIGINT)<|fim▁end|> | plugins_statuses.append(('C', name)) |
<|file_name|>623 - 500!.cpp<|end_file_name|><|fim▁begin|>#include<iostream>
#include<cstdio>
#include<cstring>
#include<cmath>
#include<vector>
#include<algorithm>
#define REP(i,a,b) for(int i=a;i<=b;++i)
#define FOR(i,a,b) for(int i=a;i<b;++i)
#define uREP(i,a,b) for(int i=a;i>=b;--i)
#define ECH(i,x) for(__typeof(x.begin()) i=x.begin();i!=x.end();++i)
#define CPY(a,b) memcpy(a,b,sizeof(a))
#define CLR(a,b) memset(a,b,sizeof(a))
#pragma GCC optimize("O2")
//#pragma comment(linker,"/STACK:36777216")
#define endl '\n'
#define sf scanf
#define pf printf
#define maxn 3000
using namespace std;
struct bign{
int len,v[maxn+2];<|fim▁hole|> REP(i,1,len)v[i]*=b;
REP(i,1,len){
v[i+1]+=v[i]/10;
v[i]%=10;
if(v[i+1])len=max(len,i+1);
}
return *this;
}
};
int main(){
//freopen("input.txt","r",stdin);
int n;
while(~sf("%d",&n)){
bign ans;
ans.v[1]=ans.len=1;
REP(i,1,n)ans=ans*i;
pf("%d!\n",n);
ans.print();pf("\n");
}
return 0;
}<|fim▁end|> | bign(){len=0,CLR(v,0);}
void print(){uREP(i,len,1)pf("%d",v[i]);}
bign operator*(int b){ |
<|file_name|>Util.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.external.classad;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Random;
import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.asterix.om.base.AMutableInt32;
public class Util {
// convert escapes in-place
// the string can only shrink while converting escapes so we can safely convert in-place.
private static final Pattern OCTAL = Pattern.compile("\\\\([0-3][0-7]{0,2})");
public static boolean convertEscapes(AMutableCharArrayString text) {
boolean validStr = true;
if (text.getLength() == 0) {
return true;
}
int dest = 0;
boolean hasOctal = false;
for (int source = 0; source < text.getLength(); ++source) {
char ch = text.charAt(source);
// scan for escapes, a terminating slash cannot be an escape
if (ch == '\\' && source < text.getLength() - 1) {
++source; // skip the \ character
ch = text.charAt(source);
// The escape part should be re-validated
switch (ch) {
case 'b':
ch = '\b';
break;
case 'f':
ch = '\f';
break;
case 'n':
ch = '\n';
break;
case 'r':
ch = '\r';
break;
case 't':
ch = '\t';
break;
case '\\':
ch = '\\';
break;
default:
if (Lexer.isodigit(ch)) {
hasOctal = true;
++dest;
}
break;
}
}
if (dest == source) {
// no need to assign ch to text when we haven't seen any escapes yet.
// text[dest] = ch;
++dest;
} else {
try {
text.erase(dest);
text.setChar(dest, ch);
++dest;
--source;
} catch (Throwable th) {
th.printStackTrace();
}
}
}
if (dest < text.getLength()) {
text.erase(dest);
text.setLength(dest);
}
// silly, but to fulfull the original contract for this function
// we need to remove the last character in the string if it is a '\0'
// (earlier logic guaranteed that a '\0' can ONLY be the last character)
if (text.getLength() > 0 && (text.charAt(text.getLength() - 1) == '\0')) {
text.erase(text.getLength() - 1);
}
if (hasOctal) {
Matcher m = OCTAL.matcher(text.toString());
StringBuffer out = new StringBuffer();
while (m.find()) {
int octet = Integer.parseInt(m.group(1), 8);
if (octet == 0 || octet > 255) {
return false;
}
m.appendReplacement(out, String.valueOf((char) octet));
}<|fim▁hole|> return validStr;
}
public static Random initialized = new Random((new Date()).getTime());
public static int getRandomInteger() {
return initialized.nextInt();
}
public static double getRandomReal() {
return initialized.nextDouble();
}
public static int timezoneOffset(ClassAdTime clock) {
return clock.getOffset();
}
public static void getLocalTime(ClassAdTime now, ClassAdTime localtm) {
localtm.setValue(Calendar.getInstance(), now);
localtm.isAbsolute(true);
}
public static void absTimeToString(ClassAdTime atime, AMutableCharArrayString buffer) {
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
//"yyyy-MM-dd'T'HH:mm:ss"
//2004-01-01T00:00:00+11:00
formatter.setTimeZone(TimeZone.getTimeZone("GMT"));
buffer.appendString(formatter.format(atime.getCalendar().getTime()));
buffer.appendString(
(atime.getOffset() >= 0 ? "+" : "-") + String.format("%02d", (Math.abs(atime.getOffset()) / 3600000))
+ ":" + String.format("%02d", ((Math.abs(atime.getOffset() / 60) % 60))));
}
public static void relTimeToString(long rsecs, AMutableCharArrayString buffer) {
double fractional_seconds;
int days, hrs, mins;
double secs;
if (rsecs < 0) {
buffer.appendChar('-');
rsecs = -rsecs;
}
fractional_seconds = rsecs % 1000;
days = (int) (rsecs / 1000);
hrs = days % 86400;
mins = hrs % 3600;
secs = (mins % 60) + (fractional_seconds / 1000.0);
days = days / 86400;
hrs = hrs / 3600;
mins = mins / 60;
if (days != 0) {
if (fractional_seconds == 0) {
buffer.appendString(String.format("%d+%02d:%02d:%02d", days, hrs, mins, (int) secs));
} else {
buffer.appendString(String.format("%d+%02d:%02d:%g", days, hrs, mins, secs));
}
} else if (hrs != 0) {
if (fractional_seconds == 0) {
buffer.appendString(String.format("%02d:%02d:%02d", hrs, mins, (int) secs));
} else {
buffer.appendString(String.format("%02d:%02d:%02g", hrs, mins, secs));
}
} else if (mins != 0) {
if (fractional_seconds == 0) {
buffer.appendString(String.format("%02d:%02d", mins, (int) secs));
} else {
buffer.appendString(String.format("%02d:%02g", mins, secs));
}
return;
} else {
if (fractional_seconds == 0) {
buffer.appendString(String.format("%02d", (int) secs));
} else {
buffer.appendString(String.format("%02g", secs));
}
}
}
public static void dayNumbers(int year, int month, int day, AMutableInt32 weekday, AMutableInt32 yearday) {
int fixed = fixedFromGregorian(year, month, day);
int jan1_fixed = fixedFromGregorian(year, 1, 1);
weekday.setValue(fixed % 7);
yearday.setValue(fixed - jan1_fixed);
return;
}
public static int fixedFromGregorian(int year, int month, int day) {
int fixed;
int month_adjustment;
if (month <= 2) {
month_adjustment = 0;
} else if (isLeapYear(year)) {
month_adjustment = -1;
} else {
month_adjustment = -2;
}
fixed = 365 * (year - 1) + ((year - 1) / 4) - ((year - 1) / 100) + ((year - 1) / 400)
+ ((367 * month - 362) / 12) + month_adjustment + day;
return fixed;
}
public static boolean isLeapYear(int year) {
int mod4;
int mod400;
boolean leap_year;
mod4 = year % 4;
mod400 = year % 400;
if (mod4 == 0 && mod400 != 100 && mod400 != 200 && mod400 != 300) {
leap_year = true;
} else {
leap_year = false;
}
return leap_year;
}
public static int isInf(double x) {
if (Double.isInfinite(x)) {
return (x < 0.0) ? (-1) : 1;
}
return 0;
}
public static boolean isNan(double x) {
return Double.isNaN(x);
}
}<|fim▁end|> | m.appendTail(out);
text.setValue(new String(out.toString().getBytes(StandardCharsets.ISO_8859_1), StandardCharsets.UTF_8));
}
|
<|file_name|>table.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! CSS table formatting contexts.
#![deny(unsafe_code)]
use app_units::Au;
use block::{BlockFlow, CandidateBSizeIterator, ISizeAndMarginsComputer};
use block::{ISizeConstraintInput, ISizeConstraintSolution};
use context::LayoutContext;
use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode};
use display_list_builder::{DisplayListBuildState, StackingContextCollectionFlags, StackingContextCollectionState};
use euclid::Point2D;
use flow;
use flow::{BaseFlow, EarlyAbsolutePositionInfo, Flow, FlowClass, ImmutableFlowUtils, OpaqueFlow};
use flow_list::MutFlowListIterator;
use fragment::{Fragment, FragmentBorderBoxIterator, Overflow};
use gfx_traits::print_tree::PrintTree;
use layout_debug;
use model::{IntrinsicISizes, IntrinsicISizesContribution, MaybeAuto};
use std::cmp;
use std::fmt;
use style::computed_values::{border_collapse, border_spacing, table_layout};
use style::context::SharedStyleContext;
use style::logical_geometry::LogicalSize;
use style::properties::ComputedValues;
use style::servo::restyle_damage::ServoRestyleDamage;
use style::values::CSSFloat;
use style::values::computed::LengthOrPercentageOrAuto;
use table_row::{self, CellIntrinsicInlineSize, CollapsedBorder, CollapsedBorderProvenance};
use table_row::TableRowFlow;
use table_wrapper::TableLayout;
#[allow(unsafe_code)]
unsafe impl ::flow::HasBaseFlow for TableFlow {}
/// A table flow corresponded to the table's internal table fragment under a table wrapper flow.
/// The properties `position`, `float`, and `margin-*` are used on the table wrapper fragment,
/// not table fragment per CSS 2.1 § 10.5.
#[derive(Serialize)]
#[repr(C)]
pub struct TableFlow {
pub block_flow: BlockFlow,
/// Information about the intrinsic inline-sizes of each column, computed bottom-up during
/// intrinsic inline-size bubbling.
pub column_intrinsic_inline_sizes: Vec<ColumnIntrinsicInlineSize>,
/// Information about the actual inline sizes of each column, computed top-down during actual
/// inline-size bubbling.
pub column_computed_inline_sizes: Vec<ColumnComputedInlineSize>,
/// The final width of the borders in the inline direction for each cell, computed by the
/// entire table and pushed down into each row during inline size computation.
pub collapsed_inline_direction_border_widths_for_table: Vec<Au>,
/// The final width of the borders in the block direction for each cell, computed by the
/// entire table and pushed down into each row during inline size computation.
pub collapsed_block_direction_border_widths_for_table: Vec<Au>,
/// Table-layout property
pub table_layout: TableLayout,
}
impl TableFlow {
pub fn from_fragment(fragment: Fragment) -> TableFlow {
let mut block_flow = BlockFlow::from_fragment(fragment);
let table_layout =
if block_flow.fragment().style().get_table().table_layout == table_layout::T::Fixed {
TableLayout::Fixed
} else {
TableLayout::Auto
};
TableFlow {
block_flow: block_flow,
column_intrinsic_inline_sizes: Vec::new(),
column_computed_inline_sizes: Vec::new(),
collapsed_inline_direction_border_widths_for_table: Vec::new(),
collapsed_block_direction_border_widths_for_table: Vec::new(),
table_layout: table_layout
}
}
/// Update the corresponding value of `self_inline_sizes` if a value of `kid_inline_sizes` has
/// a larger value than one of `self_inline_sizes`. Returns the minimum and preferred inline
/// sizes.
fn update_automatic_column_inline_sizes(
parent_inline_sizes: &mut Vec<ColumnIntrinsicInlineSize>,
child_cell_inline_sizes: &[CellIntrinsicInlineSize],
surrounding_size: Au)
-> IntrinsicISizes {
let mut total_inline_sizes = IntrinsicISizes {
minimum_inline_size: surrounding_size,
preferred_inline_size: surrounding_size,
};
let mut column_index = 0;
let mut incoming_rowspan = vec![];
for child_cell_inline_size in child_cell_inline_sizes {
// Skip any column occupied by a cell from a previous row.
while column_index < incoming_rowspan.len() && incoming_rowspan[column_index] != 1 {
if incoming_rowspan[column_index] > 1 {
incoming_rowspan[column_index] -= 1;
}
column_index += 1;
}
for _ in 0..child_cell_inline_size.column_span {
if column_index < parent_inline_sizes.len() {
// We already have some intrinsic size information for this column. Merge it in
// according to the rules specified in INTRINSIC § 4.
let parent_sizes = &mut parent_inline_sizes[column_index];
if child_cell_inline_size.column_span > 1 {
// TODO(pcwalton): Perform the recursive algorithm specified in INTRINSIC §
// 4. For now we make this column contribute no width.
} else {
let column_size = &child_cell_inline_size.column_size;
*parent_sizes = ColumnIntrinsicInlineSize {
minimum_length: cmp::max(parent_sizes.minimum_length,
column_size.minimum_length),
percentage: parent_sizes.greatest_percentage(column_size),
preferred: cmp::max(parent_sizes.preferred, column_size.preferred),
constrained: parent_sizes.constrained || column_size.constrained,
}
}
} else {
// We discovered a new column. Initialize its data.
debug_assert!(column_index == parent_inline_sizes.len());
if child_cell_inline_size.column_span > 1 {
// TODO(pcwalton): Perform the recursive algorithm specified in INTRINSIC §
// 4. For now we make this column contribute no width.
parent_inline_sizes.push(ColumnIntrinsicInlineSize::new())
} else {
parent_inline_sizes.push(child_cell_inline_size.column_size)
}
}
total_inline_sizes.minimum_inline_size +=
parent_inline_sizes[column_index].minimum_length;
total_inline_sizes.preferred_inline_size +=
parent_inline_sizes[column_index].preferred;
// If this cell spans later rows, record its rowspan.
if child_cell_inline_size.row_span > 1 {
if incoming_rowspan.len() < column_index + 1 {
incoming_rowspan.resize(column_index + 1, 0);
}
incoming_rowspan[column_index] = child_cell_inline_size.row_span;
}
column_index += 1
}
}
total_inline_sizes
}
/// Updates the minimum and preferred inline-size calculation for a single row. This is
/// factored out into a separate function because we process children of rowgroups too.
fn update_column_inline_sizes_for_row(row: &TableRowFlow,
column_inline_sizes: &mut Vec<ColumnIntrinsicInlineSize>,
computation: &mut IntrinsicISizesContribution,
first_row: bool,
table_layout: TableLayout,
surrounding_inline_size: Au) {
// Read column inline-sizes from the table-row, and assign inline-size=0 for the columns
// not defined in the column group.
//
// FIXME: Need to read inline-sizes from either table-header-group OR the first table-row.
match table_layout {
TableLayout::Fixed => {
// Fixed table layout only looks at the first row.
//
// FIXME(pcwalton): This is really inefficient. We should stop after the first row!
if first_row {
for cell_inline_size in &row.cell_intrinsic_inline_sizes {
column_inline_sizes.push(cell_inline_size.column_size);
}
}
}
TableLayout::Auto => {
computation.union_block(&TableFlow::update_automatic_column_inline_sizes(
column_inline_sizes,
&row.cell_intrinsic_inline_sizes,
surrounding_inline_size))
}
}
}
/// Returns the effective spacing per cell, taking the value of `border-collapse` into account.
pub fn spacing(&self) -> border_spacing::T {
let style = self.block_flow.fragment.style();
match style.get_inheritedtable().border_collapse {
border_collapse::T::Separate => style.get_inheritedtable().border_spacing,
border_collapse::T::Collapse => border_spacing::T::zero(),
}
}
pub fn total_horizontal_spacing(&self) -> Au {
let num_columns = self.column_intrinsic_inline_sizes.len();
if num_columns == 0 {
return Au(0);
}
self.spacing().horizontal() * (num_columns as i32 + 1)
}
}
impl Flow for TableFlow {
fn class(&self) -> FlowClass {
FlowClass::Table
}
fn as_mut_table(&mut self) -> &mut TableFlow {
self
}
fn as_table(&self) -> &TableFlow {
self
}
fn as_mut_block(&mut self) -> &mut BlockFlow {
&mut self.block_flow
}
fn as_block(&self) -> &BlockFlow {
&self.block_flow
}
fn mark_as_root(&mut self) {
self.block_flow.mark_as_root();
}
/// The specified column inline-sizes are set from column group and the first row for the fixed
/// table layout calculation.
/// The maximum min/pref inline-sizes of each column are set from the rows for the automatic
/// table layout calculation.
fn bubble_inline_sizes(&mut self) {
let _scope = layout_debug_scope!("table::bubble_inline_sizes {:x}",
self.block_flow.base.debug_id());
// Get column inline sizes from colgroups
for kid in self.block_flow.base.child_iter_mut().filter(|kid| kid.is_table_colgroup()) {
for specified_inline_size in &kid.as_mut_table_colgroup().inline_sizes {
self.column_intrinsic_inline_sizes.push(ColumnIntrinsicInlineSize {
minimum_length: match *specified_inline_size {
LengthOrPercentageOrAuto::Auto |
LengthOrPercentageOrAuto::Calc(_) |
LengthOrPercentageOrAuto::Percentage(_) => Au(0),
LengthOrPercentageOrAuto::Length(length) => Au::from(length),
},
percentage: match *specified_inline_size {
LengthOrPercentageOrAuto::Auto |
LengthOrPercentageOrAuto::Calc(_) |
LengthOrPercentageOrAuto::Length(_) => 0.0,
LengthOrPercentageOrAuto::Percentage(percentage) => percentage.0,
},
preferred: Au(0),
constrained: false,
})
}
}
self.collapsed_inline_direction_border_widths_for_table = Vec::new();
self.collapsed_block_direction_border_widths_for_table = vec![Au(0)];
let collapsing_borders = self.block_flow
.fragment
.style
.get_inheritedtable()
.border_collapse == border_collapse::T::Collapse;
let table_inline_collapsed_borders = if collapsing_borders {
Some(TableInlineCollapsedBorders {
start: CollapsedBorder::inline_start(&*self.block_flow.fragment.style,
CollapsedBorderProvenance::FromTable),
end: CollapsedBorder::inline_end(&*self.block_flow.fragment.style,
CollapsedBorderProvenance::FromTable),
})
} else {
None
};
let mut computation = IntrinsicISizesContribution::new();
let mut previous_collapsed_block_end_borders =
PreviousBlockCollapsedBorders::FromTable(CollapsedBorder::block_start(
&*self.block_flow.fragment.style,
CollapsedBorderProvenance::FromTable));
let mut first_row = true;
let (border_padding, _) = self.block_flow.fragment.surrounding_intrinsic_inline_size();
{
let mut iterator = TableRowIterator::new(&mut self.block_flow.base).peekable();
while let Some(row) = iterator.next() {
TableFlow::update_column_inline_sizes_for_row(
row,
&mut self.column_intrinsic_inline_sizes,
&mut computation,
first_row,
self.table_layout,
border_padding);
if collapsing_borders {
let next_index_and_sibling = iterator.peek();
let next_collapsed_borders_in_block_direction =
match next_index_and_sibling {
Some(next_sibling) => {
NextBlockCollapsedBorders::FromNextRow(
&next_sibling.as_table_row()
.preliminary_collapsed_borders
.block_start)
}
None => {
NextBlockCollapsedBorders::FromTable(
CollapsedBorder::block_end(&*self.block_flow.fragment.style,
CollapsedBorderProvenance::FromTable))
}
};
perform_border_collapse_for_row(row,
table_inline_collapsed_borders.as_ref().unwrap(),
previous_collapsed_block_end_borders,
next_collapsed_borders_in_block_direction,
&mut self.collapsed_inline_direction_border_widths_for_table,
&mut self.collapsed_block_direction_border_widths_for_table);
previous_collapsed_block_end_borders =
PreviousBlockCollapsedBorders::FromPreviousRow(
row.final_collapsed_borders.block_end.clone());
}
first_row = false
};
}
let total_horizontal_spacing = self.total_horizontal_spacing();
let mut style_specified_intrinsic_inline_size =
self.block_flow
.fragment
.style_specified_intrinsic_inline_size()
.finish();
style_specified_intrinsic_inline_size.minimum_inline_size -= total_horizontal_spacing;
style_specified_intrinsic_inline_size.preferred_inline_size -= total_horizontal_spacing;
computation.union_block(&style_specified_intrinsic_inline_size);
computation.surrounding_size += total_horizontal_spacing;
self.block_flow.base.intrinsic_inline_sizes = computation.finish()
}
/// Recursively (top-down) determines the actual inline-size of child contexts and fragments.
/// When called on this context, the context has had its inline-size set by the parent context.
fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) {
let _scope = layout_debug_scope!("table::assign_inline_sizes {:x}",
self.block_flow.base.debug_id());
debug!("assign_inline_sizes({}): assigning inline_size for flow", "table");
let shared_context = layout_context.shared_context();
// The position was set to the containing block by the flow's parent.
// FIXME: The code for distributing column widths should really be placed under table_wrapper.rs.
let containing_block_inline_size = self.block_flow.base.block_container_inline_size;
let mut constrained_column_inline_sizes_indices = vec![];
let mut unspecified_inline_sizes_indices = vec![];
for (idx, column_inline_size) in self.column_intrinsic_inline_sizes.iter().enumerate() {
if column_inline_size.constrained {
constrained_column_inline_sizes_indices.push(idx);
} else if column_inline_size.percentage == 0.0 {
unspecified_inline_sizes_indices.push(idx);
}
}
let inline_size_computer = InternalTable;
inline_size_computer.compute_used_inline_size(&mut self.block_flow,
shared_context,
containing_block_inline_size);
let inline_start_content_edge = self.block_flow.fragment.border_padding.inline_start;
let inline_end_content_edge = self.block_flow.fragment.border_padding.inline_end;
let padding_and_borders = self.block_flow.fragment.border_padding.inline_start_end();
let spacing_per_cell = self.spacing();
let total_horizontal_spacing = self.total_horizontal_spacing();
let content_inline_size = self.block_flow.fragment.border_box.size.inline -
padding_and_borders - total_horizontal_spacing;
let mut remaining_inline_size = content_inline_size;
match self.table_layout {
TableLayout::Fixed => {
self.column_computed_inline_sizes.clear();
// https://drafts.csswg.org/css2/tables.html#fixed-table-layout
for column_inline_size in &self.column_intrinsic_inline_sizes {
if column_inline_size.constrained {
self.column_computed_inline_sizes.push(ColumnComputedInlineSize {
size: column_inline_size.minimum_length,
});
remaining_inline_size -= column_inline_size.minimum_length;
} else if column_inline_size.percentage != 0.0 {
let size = remaining_inline_size.scale_by(column_inline_size.percentage);
self.column_computed_inline_sizes.push(ColumnComputedInlineSize {
size: size,
});
remaining_inline_size -= size;
} else {
// Set the size to 0 now, distribute the remaining widths later
self.column_computed_inline_sizes.push(ColumnComputedInlineSize {
size: Au(0),
});
}
}
// Distribute remaining content inline size
if unspecified_inline_sizes_indices.len() > 0 {
for &index in &unspecified_inline_sizes_indices {
self.column_computed_inline_sizes[index].size =
remaining_inline_size.scale_by(1.0 / unspecified_inline_sizes_indices.len() as f32);
}
} else {
let total_minimum_size = self.column_intrinsic_inline_sizes
.iter()
.filter(|size| size.constrained)
.map(|size| size.minimum_length.0 as f32)
.sum::<f32>();
for &index in &constrained_column_inline_sizes_indices {
self.column_computed_inline_sizes[index].size +=
remaining_inline_size.scale_by(
self.column_computed_inline_sizes[index].size.0 as f32 / total_minimum_size);
}
}
}
_ => {
// The table wrapper already computed the inline-sizes and propagated them down
// to us.
}
}
let column_computed_inline_sizes = &self.column_computed_inline_sizes;
let collapsed_inline_direction_border_widths_for_table =
&self.collapsed_inline_direction_border_widths_for_table;
let mut collapsed_block_direction_border_widths_for_table =
self.collapsed_block_direction_border_widths_for_table.iter().peekable();
let mut incoming_rowspan = vec![];
self.block_flow.propagate_assigned_inline_size_to_children(shared_context,
inline_start_content_edge,
inline_end_content_edge,
content_inline_size,
|child_flow,
_child_index,
_content_inline_size,
writing_mode,
_inline_start_margin_edge,
_inline_end_margin_edge| {
table_row::propagate_column_inline_sizes_to_child(
child_flow,
writing_mode,
column_computed_inline_sizes,
&spacing_per_cell,
&mut incoming_rowspan);
if child_flow.is_table_row() {
let child_table_row = child_flow.as_mut_table_row();
child_table_row.populate_collapsed_border_spacing(
collapsed_inline_direction_border_widths_for_table,
&mut collapsed_block_direction_border_widths_for_table);
} else if child_flow.is_table_rowgroup() {
let child_table_rowgroup = child_flow.as_mut_table_rowgroup();
child_table_rowgroup.populate_collapsed_border_spacing(
collapsed_inline_direction_border_widths_for_table,
&mut collapsed_block_direction_border_widths_for_table);
}
});
}
fn assign_block_size(&mut self, _: &LayoutContext) {
debug!("assign_block_size: assigning block_size for table");
let vertical_spacing = self.spacing().vertical();
self.block_flow.assign_block_size_for_table_like_flow(vertical_spacing)
}
fn compute_stacking_relative_position(&mut self, layout_context: &LayoutContext) {
self.block_flow.compute_stacking_relative_position(layout_context)
}
fn generated_containing_block_size(&self, flow: OpaqueFlow) -> LogicalSize<Au> {
self.block_flow.generated_containing_block_size(flow)
}
fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) {
self.block_flow.update_late_computed_inline_position_if_necessary(inline_position)
}
fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) {
self.block_flow.update_late_computed_block_position_if_necessary(block_position)
}
fn build_display_list(&mut self, state: &mut DisplayListBuildState) {
let border_painting_mode = match self.block_flow
.fragment
.style
.get_inheritedtable()
.border_collapse {
border_collapse::T::Separate => BorderPaintingMode::Separate,
border_collapse::T::Collapse => BorderPaintingMode::Hidden,
};
self.block_flow.build_display_list_for_block(state, border_painting_mode);
}
fn collect_stacking_contexts(&mut self, state: &mut StackingContextCollectionState) {
// Stacking contexts are collected by the table wrapper.
self.block_flow.collect_stacking_contexts_for_block(state,
StackingContextCollectionFlags::NEVER_CREATES_STACKING_CONTEXT);
}
fn repair_style(&mut self, new_style: &::ServoArc<ComputedValues>) {
self.block_flow.repair_style(new_style)
}
fn compute_overflow(&self) -> Overflow {
self.block_flow.compute_overflow()
}
fn iterate_through_fragment_border_boxes(&self,
iterator: &mut FragmentBorderBoxIterator,
level: i32,
stacking_context_position: &Point2D<Au>) {
self.block_flow.iterate_through_fragment_border_boxes(iterator, level, stacking_context_position)
}
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {
self.block_flow.mutate_fragments(mutator)
}
fn print_extra_flow_children(&self, print_tree: &mut PrintTree) {
self.block_flow.print_extra_flow_children(print_tree);
}
}
impl fmt::Debug for TableFlow {
/// Outputs a debugging string describing this table flow.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TableFlow: {:?}", self.block_flow)
}
}
/// Table, TableRowGroup, TableRow, TableCell types.
/// Their inline-sizes are calculated in the same way and do not have margins.
pub struct InternalTable;
impl ISizeAndMarginsComputer for InternalTable {
/// Compute the used value of inline-size, taking care of min-inline-size and max-inline-size.
///
/// CSS Section 10.4: Minimum and Maximum inline-sizes
fn compute_used_inline_size(
&self,
block: &mut BlockFlow,
shared_context: &SharedStyleContext,
parent_flow_inline_size: Au
) {
let mut input = self.compute_inline_size_constraint_inputs(block,
parent_flow_inline_size,
shared_context);
// Tables are always at least as wide as their minimum inline size.
let minimum_inline_size =
block.base.intrinsic_inline_sizes.minimum_inline_size -
block.fragment.border_padding.inline_start_end();
input.available_inline_size = cmp::max(input.available_inline_size, minimum_inline_size);
let solution = self.solve_inline_size_constraints(block, &input);
self.set_inline_size_constraint_solutions(block, solution);
}
/// Solve the inline-size and margins constraints for this block flow.
fn solve_inline_size_constraints(&self, _: &mut BlockFlow, input: &ISizeConstraintInput)
-> ISizeConstraintSolution {
ISizeConstraintSolution::new(input.available_inline_size, Au(0), Au(0))
}
}
/// Information about the intrinsic inline sizes of columns within a table.
///
/// During table inline-size bubbling, we might need to store both a percentage constraint and a
/// specific width constraint. For instance, one cell might say that it wants to be 100 pixels wide
/// in the inline direction and another cell might say that it wants to take up 20% of the inline-
/// size of the table. Now because we bubble up these constraints during the bubble-inline-sizes
/// phase of layout, we don't know yet how wide the table is ultimately going to be in the inline
/// direction. As we need to pick the maximum width of all cells for a column (in this case, the
/// maximum of 100 pixels and 20% of the table), the preceding constraint means that we must
/// potentially store both a specified width *and* a specified percentage, so that the inline-size
/// assignment phase of layout will know which one to pick.
#[derive(Clone, Copy, Debug, Serialize)]
pub struct ColumnIntrinsicInlineSize {
/// The preferred intrinsic inline size.
pub preferred: Au,
/// The largest specified size of this column as a length.
pub minimum_length: Au,
/// The largest specified size of this column as a percentage (`width` property).
pub percentage: CSSFloat,
/// Whether the column inline size is *constrained* per INTRINSIC § 4.1.
pub constrained: bool,
}
impl ColumnIntrinsicInlineSize {
/// Returns a newly-initialized `ColumnIntrinsicInlineSize` with all fields blank.
pub fn new() -> ColumnIntrinsicInlineSize {
ColumnIntrinsicInlineSize {
preferred: Au(0),
minimum_length: Au(0),
percentage: 0.0,
constrained: false,
}
}
/// Returns the higher of the two percentages specified in `self` and `other`.
pub fn greatest_percentage(&self, other: &ColumnIntrinsicInlineSize) -> CSSFloat {
if self.percentage > other.percentage {
self.percentage
} else {
other.percentage
}
}
}
/// The actual inline size for each column.
///
/// TODO(pcwalton): There will probably be some `border-collapse`-related info in here too
/// eventually.
#[derive(Clone, Copy, Debug, Serialize)]
pub struct ColumnComputedInlineSize {
/// The computed size of this inline column.
pub size: Au,
}
pub trait VecExt<T> {
fn push_or_set(&mut self, index: usize, value: T) -> &mut T;
fn get_mut_or_push(&mut self, index: usize, zero: T) -> &mut T;
}
impl<T> VecExt<T> for Vec<T> {
fn push_or_set(&mut self, index: usize, value: T) -> &mut T {
if index < self.len() {
self[index] = value
} else {
debug_assert!(index == self.len());
self.push(value)
}
&mut self[index]
}
fn get_mut_or_push(&mut self, index: usize, zero: T) -> &mut T {
if index >= self.len() {
debug_assert!(index == self.len());
self.push(zero)
}
&mut self[index]
}
}
/// Updates the border styles in the block direction for a single row. This function should
/// only be called if border collapsing is on. It is factored out into a separate function
/// because we process children of rowgroups too.
fn perform_border_collapse_for_row(child_table_row: &mut TableRowFlow,
table_inline_borders: &TableInlineCollapsedBorders,
previous_block_borders: PreviousBlockCollapsedBorders,
next_block_borders: NextBlockCollapsedBorders,
inline_spacing: &mut Vec<Au>,
block_spacing: &mut Vec<Au>) {
// TODO mbrubeck: Take rowspan and colspan into account.
let number_of_borders_inline_direction = child_table_row.preliminary_collapsed_borders.inline.len();
// Compute interior inline borders.
for (i, this_inline_border) in child_table_row.preliminary_collapsed_borders
.inline
.iter_mut()
.enumerate() {
child_table_row.final_collapsed_borders.inline.push_or_set(i, *this_inline_border);
if i == 0 {
child_table_row.final_collapsed_borders.inline[i].combine(&table_inline_borders.start);
} else if i + 1 == number_of_borders_inline_direction {
child_table_row.final_collapsed_borders.inline[i].combine(&table_inline_borders.end);
}
let inline_spacing = inline_spacing.get_mut_or_push(i, Au(0));
*inline_spacing = cmp::max(*inline_spacing, child_table_row.final_collapsed_borders.inline[i].width)
}
// Compute block-start borders.
let block_start_borders = &mut child_table_row.final_collapsed_borders.block_start;
*block_start_borders = child_table_row.preliminary_collapsed_borders.block_start.clone();
for (i, this_border) in block_start_borders.iter_mut().enumerate() {
match previous_block_borders {
PreviousBlockCollapsedBorders::FromPreviousRow(ref previous_block_borders) => {
if previous_block_borders.len() > i {
this_border.combine(&previous_block_borders[i]);
}
}
PreviousBlockCollapsedBorders::FromTable(table_border) => {
this_border.combine(&table_border);
}
}
}
// Compute block-end borders.
let next_block = &mut child_table_row.final_collapsed_borders.block_end;
block_spacing.push(Au(0));
let block_spacing = block_spacing.last_mut().unwrap();
for (i, this_block_border) in child_table_row.preliminary_collapsed_borders
.block_end
.iter()
.enumerate() {
let next_block = next_block.push_or_set(i, *this_block_border);
match next_block_borders {
NextBlockCollapsedBorders::FromNextRow(next_block_borders) => {
if next_block_borders.len() > i {
next_block.combine(&next_block_borders[i])
}
}
NextBlockCollapsedBorders::FromTable(ref next_block_borders) => {
next_block.combine(next_block_borders);
}
}
*block_spacing = cmp::max(*block_spacing, next_block.width)
}
}
/// Encapsulates functionality shared among all table-like flows: for now, tables and table
/// rowgroups.
pub trait TableLikeFlow {
/// Lays out the rows of a table.
fn assign_block_size_for_table_like_flow(&mut self, block_direction_spacing: Au);
}
impl TableLikeFlow for BlockFlow {
fn assign_block_size_for_table_like_flow(&mut self, block_direction_spacing: Au) {
debug_assert!(self.fragment.style.get_inheritedtable().border_collapse ==
border_collapse::T::Separate || block_direction_spacing == Au(0));
if self.base.restyle_damage.contains(ServoRestyleDamage::REFLOW) {
// Our current border-box position.
let block_start_border_padding = self.fragment.border_padding.block_start;
let mut current_block_offset = block_start_border_padding;
let mut has_rows = false;
// At this point, `current_block_offset` is at the content edge of our box. Now iterate
// over children.
for kid in self.base.child_iter_mut() {
// Account for spacing or collapsed borders.
if kid.is_table_row() {
has_rows = true;
let child_table_row = kid.as_table_row();
current_block_offset = current_block_offset +
match self.fragment.style.get_inheritedtable().border_collapse {
border_collapse::T::Separate => block_direction_spacing,
border_collapse::T::Collapse => {
child_table_row.collapsed_border_spacing.block_start
}
}
}
// At this point, `current_block_offset` is at the border edge of the child.
flow::mut_base(kid).position.start.b = current_block_offset;
// Move past the child's border box. Do not use the `translate_including_floats`
// function here because the child has already translated floats past its border
// box.
let kid_base = flow::mut_base(kid);
current_block_offset = current_block_offset + kid_base.position.size.block;
}
// Compute any explicitly-specified block size.
// Can't use `for` because we assign to
// `candidate_block_size_iterator.candidate_value`.
let mut block_size = current_block_offset - block_start_border_padding;
let mut candidate_block_size_iterator = CandidateBSizeIterator::new(
&self.fragment,
self.base.block_container_explicit_block_size);
while let Some(candidate_block_size) = candidate_block_size_iterator.next() {
candidate_block_size_iterator.candidate_value =
match candidate_block_size {
MaybeAuto::Auto => block_size,
MaybeAuto::Specified(value) => value
};
}
// Adjust `current_block_offset` as necessary to account for the explicitly-specified
// block-size.
block_size = candidate_block_size_iterator.candidate_value;
let delta = block_size - (current_block_offset - block_start_border_padding);
current_block_offset = current_block_offset + delta;
// Take border, padding, and spacing into account.
let block_end_offset = self.fragment.border_padding.block_end +
if has_rows { block_direction_spacing } else { Au(0) };
current_block_offset = current_block_offset + block_end_offset;
// Now that `current_block_offset` is at the block-end of the border box, compute the
// final border box position.
self.fragment.border_box.size.block = current_block_offset;
self.fragment.border_box.start.b = Au(0);
self.base.position.size.block = current_block_offset;
// Write in the size of the relative containing block for children. (This information
// is also needed to handle RTL.)
for kid in self.base.child_iter_mut() {
flow::mut_base(kid).early_absolute_position_info = EarlyAbsolutePositionInfo {
relative_containing_block_size: self.fragment.content_box().size,
relative_containing_block_mode: self.fragment.style().writing_mode,
};
}
}
self.base.restyle_damage.remove(ServoRestyleDamage::REFLOW_OUT_OF_FLOW | ServoRestyleDamage::REFLOW);
}
}
/// Inline collapsed borders for the table itself.
#[derive(Debug)]
struct TableInlineCollapsedBorders {
/// The table border at the start of the inline direction.
start: CollapsedBorder,
/// The table border at the end of the inline direction.
end: CollapsedBorder,
}
enum PreviousBlockCollapsedBorders {
FromPreviousRow(Vec<CollapsedBorder>),
FromTable(CollapsedBorder),
}
enum NextBlockCollapsedBorders<'a> {
FromNextRow(&'a [CollapsedBorder]),
FromTable(CollapsedBorder),
}
/// Iterator over all the rows of a table
struct TableRowIterator<'a> {
kids: MutFlowListIterator<'a>,
grandkids: Option<MutFlowListIterator<'a>>,
}
impl<'a> TableRowIterator<'a> {
fn new(base: &'a mut BaseFlow) -> Self {
TableRowIterator {
kids: base.child_iter_mut(),
grandkids: None,
}
}
}
impl<'a> Iterator for TableRowIterator<'a> {
type Item = &'a mut TableRowFlow;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
// If we're inside a rowgroup, iterate through the rowgroup's children.
if let Some(ref mut grandkids) = self.grandkids {
if let Some(grandkid) = grandkids.next() {
return Some(grandkid.as_mut_table_row())
}
}
// Otherwise, iterate through the table's children.
self.grandkids = None;
match self.kids.next() {
Some(kid) => {
if kid.is_table_rowgroup() {
self.grandkids = Some(flow::mut_base(kid).child_iter_mut());<|fim▁hole|> } else if kid.is_table_row() {
Some(kid.as_mut_table_row())
} else {
self.next() // Skip children that are not rows or rowgroups
}
}
None => None
}
}
}<|fim▁end|> | self.next() |
<|file_name|>JSDistance.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# '''
# Author: Eachen Kuang
# Date: 2017.10.20
# Goal: 将文件转化为字典
# Other:
# '''
import string
from math import log
import numpy as np
def KLD(p,q):
p,q=zip(*filter(lambda (x,y): x!=0 or y!=0, zip(p,q))) #去掉二者都是0的概率值
p=p+np.spacing(1)
q=q+np.spacing(1)
print p, q
return sum([_p * log(_p/_q,2) for (_p,_q) in zip(p,q)])
# p=np.ones(5)/5.0
# q=[0,0,0.5,0.2,0.3]
# print KLD(p,q)
def JSD_core(p, q):
p, q = zip(*filter(lambda[x,y]: x != 0 or y != 0, zip(p, q))) # 去掉二者都是0的概率值
M = [0.5 * (_p + _q) for _p, _q in zip(p, q)]
p = p + np.spacing(1)
q = q + np.spacing(1)<|fim▁hole|>
reg = lambda x: [x.count(i) for i in string.lowercase] # 频数分布
rate = lambda y: [round(i * 1.0 / sum(reg(y)), 4) for i in reg(y)] # 概率分布
s1 = 'KuangYichen1raf'
s2 = 'YichenKuang2'
print JSD_core(rate(s1), rate(s2))<|fim▁end|> | M = M + np.spacing(1)
# print p,q,M
return 0.5 * KLD(p, M) + 0.5 * KLD(q, M)
|
<|file_name|>costFunctionChecker.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
@file costFunctionChecker.py
@author Michael Behrisch
@author Daniel Krajzewicz
@author Jakob Erdmann
@date 2009-08-31
@version $Id: costFunctionChecker.py 13811 2013-05-01 20:31:43Z behrisch $
Run duarouter repeatedly and simulate weight changes via a cost function.
SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/
Copyright (C) 2009-2013 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""
import os, sys, subprocess, types
from datetime import datetime
from optparse import OptionParser
from xml.sax import make_parser, handler
def call(command, log):<|fim▁hole|> log.flush()
retCode = subprocess.call(command, stdout=log, stderr=log)
if retCode != 0:
print >> sys.stderr, "Execution of %s failed. Look into %s for details." % (command, log.name)
sys.exit(retCode)
def writeRouteConf(step, options, file, output):
fd = open("iteration_" + str(step) + ".duarcfg", "w")
print >> fd, """<configuration>
<input>
<net-file value="%s"/>""" % options.net
if step==0:
if options.flows:
print >> fd, ' <flow-definition value="%s"/>' % file
else:
print >> fd, ' <trip-defs value="%s"/>' % file
else:
print >> fd, ' <alternatives value="%s"/>' % file
print >> fd, ' <weights value="dump_%s_%s.xml"/>' % (step-1, options.aggregation)
print >> fd, """ </input>
<output>
<output-file value="%s"/>
<exit-times value="True"/>
</output>""" % output
print >> fd, """ <processing>
<continue-on-unbuild value="%s"/>
<expand-weights value="True"/>
<gBeta value="%s"/>
<gA value="%s"/>
</processing>""" % (options.continueOnUnbuild, options.gBeta, options.gA)
print >> fd, ' <random_number><abs-rand value="%s"/></random_number>' % options.absrand
print >> fd, ' <time><begin value="%s"/>' % options.begin,
if options.end:
print >> fd, '<end value="%s"/>' % options.end,
print >> fd, """</time>
<report>
<verbose value="%s"/>
<suppress-warnings value="%s"/>
</report>
</configuration>""" % (options.verbose, options.noWarnings)
fd.close()
class RouteReader(handler.ContentHandler):
def __init__(self):
self._edgeWeights = {}
self._maxDepart = 0
def startElement(self, name, attrs):
if name == 'route':
for edge in attrs['edges'].split():
if not edge in self._edgeWeights:
self._edgeWeights[edge] = 0
self._edgeWeights[edge] += 1
elif name == 'vehicle':
if float(attrs['depart']) > self._maxDepart:
self._maxDepart = float(attrs['depart'])
def getWeight(self, edge):
return self._edgeWeights.get(edge, 0)
def getMaxDepart(self):
return self._maxDepart
class NetReader(handler.ContentHandler):
def __init__(self):
self._edges = []
def startElement(self, name, attrs):
if name == 'edge':
if not attrs.has_key('function') or attrs['function'] == 'normal':
self._edges.append(attrs['id'])
def getEdges(self):
return self._edges
def identity(edge, weight):
return weight
def generateWeights(step, options, edges, weights, costFunction):
fd = open("dump_%s_%s.xml" % (step, options.aggregation), "w")
print >> fd, '<?xml version="1.0"?>\n<netstats>'
for time in range(0, int(reader.getMaxDepart()+1), options.aggregation):
print >> fd, ' <interval begin="%s" end="%s" id="dump_%s">' % (time, time + options.aggregation, options.aggregation)
for edge in edges:
cost = costFunction(edge, weights.getWeight(edge))
if cost != None:
print >> fd, ' <edge id="%s" traveltime="%s"/>' % (edge, cost)
print >> fd, ' </interval>'
print >> fd, '</netstats>'
fd.close()
optParser = OptionParser()
optParser.add_option("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="tell me what you are doing")
optParser.add_option("-C", "--continue-on-unbuild", action="store_true", dest="continueOnUnbuild",
default=False, help="continues on unbuild routes")
optParser.add_option("-w", "--disable-warnings", action="store_true", dest="noWarnings",
default=False, help="disables warnings")
optParser.add_option("-n", "--net-file", dest="net",
help="SUMO network (mandatory)", metavar="FILE")
optParser.add_option("-t", "--trips", dest="trips",
help="trips in step 0 (this or flows is mandatory)", metavar="FILE")
optParser.add_option("-F", "--flows",
help="flows in step 0 (this or trips is mandatory)", metavar="FILE")
optParser.add_option("-+", "--additional", dest="additional",
default="", help="Additional files")
optParser.add_option("-b", "--begin", dest="begin",
type="int", default=0, help="Set simulation/routing begin [default: %default]")
optParser.add_option("-e", "--end", dest="end",
type="int", help="Set simulation/routing end [default: %default]")
optParser.add_option("-R", "--route-steps", dest="routeSteps",
type="int", default=200, help="Set simulation route steps [default: %default]")
optParser.add_option("-a", "--aggregation", dest="aggregation",
type="int", default=900, help="Set main weights aggregation period [default: %default]")
optParser.add_option("-A", "--gA", dest="gA",
type="float", default=.5, help="Sets Gawron's Alpha [default: %default]")
optParser.add_option("-B", "--gBeta", dest="gBeta",
type="float", default=.9, help="Sets Gawron's Beta [default: %default]")
optParser.add_option("-f", "--first-step", dest="firstStep",
type="int", default=0, help="First DUA step [default: %default]")
optParser.add_option("-l", "--last-step", dest="lastStep",
type="int", default=50, help="Last DUA step [default: %default]")
optParser.add_option("-p", "--path", dest="path",
default=os.environ.get("SUMO_BINDIR", ""), help="Path to binaries [default: %default]")
optParser.add_option("-y", "--absrand", dest="absrand", action="store_true",
default=False, help="use current time to generate random number")
optParser.add_option("-c", "--cost-function", dest="costfunc",
default="identity", help="(python) function to use as cost function")
(options, args) = optParser.parse_args()
if not options.net or not (options.trips or options.flows):
optParser.error("At least --net-file and --trips or --flows have to be given!")
duaBinary = os.environ.get("DUAROUTER_BINARY", os.path.join(options.path, "duarouter"))
log = open("dua-log.txt", "w+")
parser = make_parser()
reader = NetReader()
parser.setContentHandler(reader)
parser.parse(options.net)
edges = reader.getEdges()
if "." in options.costfunc:
idx = options.costfunc.rfind(".")
module = options.costfunc[:idx]
func = options.costfunc[idx+1:]
exec("from %s import %s as costFunction" % (module, func))
else:
exec("costFunction = %s" % options.costfunc)
if options.flows:
tripFiles = options.flows.split(",")
else:
tripFiles = options.trips.split(",")
starttime = datetime.now()
for step in range(options.firstStep, options.lastStep):
btimeA = datetime.now()
print "> Executing step " + str(step)
# router
files = []
for tripFile in tripFiles:
file = tripFile
tripFile = os.path.basename(tripFile)
if step>0:
file = tripFile[:tripFile.find(".")] + "_%s.rou.alt.xml" % (step-1)
output = tripFile[:tripFile.find(".")] + "_%s.rou.xml" % step
print ">> Running router with " + file
btime = datetime.now()
print ">>> Begin time: %s" % btime
writeRouteConf(step, options, file, output)
retCode = call([duaBinary, "-c", "iteration_%s.duarcfg" % step], log)
etime = datetime.now()
print ">>> End time: %s" % etime
print ">>> Duration: %s" % (etime-btime)
print "<<"
files.append(output)
# generating weights file
print ">> Generating weights"
reader = RouteReader()
parser.setContentHandler(reader)
for f in files:
parser.parse(f)
generateWeights(step, options, edges, reader, costFunction)
print "<<"
print "< Step %s ended (duration: %s)" % (step, datetime.now() - btimeA)
print "------------------\n"
sys.stdout.flush()
print "dua-iterate ended (duration: %s)" % (datetime.now() - starttime)
log.close()<|fim▁end|> | if not isinstance(args, types.StringTypes):
command = [str(c) for c in command]
print >> log, "-" * 79
print >> log, command |
<|file_name|>x86.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use back::target_strs;
use driver::config::cfg_os_to_meta_os;
use metadata::loader::meta_section_name;
use syntax::abi;
pub fn get_target_strs(target_triple: StrBuf, target_os: abi::Os)
-> target_strs::t {
return target_strs::t {
module_asm: "".to_strbuf(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
data_layout: match target_os {
abi::OsMacos => {
"e-p:32:32:32-i1:8:8-i8:8:8-i16:16:16\
-i32:32:32-i64:32:64\
-f32:32:32-f64:32:64-v64:64:64\
-v128:128:128-a0:0:64-f80:128:128\
-n8:16:32".to_strbuf()
}
abi::OsWin32 => {
"e-p:32:32-f64:64:64-i64:64:64-f80:32:32-n8:16:32".to_strbuf()
}
abi::OsLinux => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
}
abi::OsAndroid => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
}
<|fim▁hole|> }
},
target_triple: target_triple,
cc_args: vec!("-m32".to_strbuf()),
};
}<|fim▁end|> | abi::OsFreebsd => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf() |
<|file_name|>FlumeAvroManager.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.flume.appender;
import java.util.Properties;
import org.apache.flume.Event;
import org.apache.flume.api.RpcClient;
import org.apache.flume.api.RpcClientFactory;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.apache.logging.log4j.core.appender.ManagerFactory;
/**
* Manager for FlumeAvroAppenders.
*/
public class FlumeAvroManager extends AbstractFlumeManager {
private static final int MAX_RECONNECTS = 3;
private static final int MINIMUM_TIMEOUT = 1000;
private static AvroManagerFactory factory = new AvroManagerFactory();
private final Agent[] agents;
private final int batchSize;
private final int retries;
private final int connectTimeout;
private final int requestTimeout;
private final int current = 0;
private RpcClient rpcClient = null;
/**
* Constructor
* @param name The unique name of this manager.
* @param agents An array of Agents.
* @param batchSize The number of events to include in a batch.
* @param retries The number of times to retry connecting before giving up.
* @param connectTimeout The connection timeout in ms.
* @param requestTimeout The request timeout in ms.
*
*/
protected FlumeAvroManager(final String name, final String shortName, final Agent[] agents, final int batchSize,
final int retries, final int connectTimeout, final int requestTimeout) {
super(name);
this.agents = agents;
this.batchSize = batchSize;
this.retries = retries;
this.connectTimeout = connectTimeout;
this.requestTimeout = requestTimeout;
this.rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
/**
* Returns a FlumeAvroManager.
* @param name The name of the manager.
* @param agents The agents to use.
* @param batchSize The number of events to include in a batch.
* @param retries The number of times to retry connecting before giving up.
* @param connectTimeout The connection timeout in ms.
* @param requestTimeout The request timeout in ms.
* @return A FlumeAvroManager.
*/
public static FlumeAvroManager getManager(final String name, final Agent[] agents, int batchSize,
final int retries, final int connectTimeout, final int requestTimeout) {
if (agents == null || agents.length == 0) {
throw new IllegalArgumentException("At least one agent is required");
}
if (batchSize <= 0) {
batchSize = 1;
}
final StringBuilder sb = new StringBuilder("FlumeAvro[");
boolean first = true;
for (final Agent agent : agents) {
if (!first) {
sb.append(",");
}
sb.append(agent.getHost()).append(":").append(agent.getPort());
first = false;
}
sb.append("]");
return getManager(sb.toString(), factory,
new FactoryData(name, agents, batchSize, retries, connectTimeout, requestTimeout));
}
/**
* Returns the agents.
* @return The agent array.
*/
public Agent[] getAgents() {
return agents;
}
/**
* Returns the index of the current agent.
* @return The index for the current agent.
*/
public int getCurrent() {
return current;
}
public int getRetries() {
return retries;
}
public int getConnectTimeout() {
return connectTimeout;
}
public int getRequestTimeout() {
return requestTimeout;
}
public int getBatchSize() {
return batchSize;
}
public synchronized void send(final BatchEvent events) {
if (rpcClient == null) {
rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
if (rpcClient != null) {
try {
LOGGER.trace("Sending batch of {} events", events.getEvents().size());
rpcClient.appendBatch(events.getEvents());
} catch (final Exception ex) {
rpcClient.close();
rpcClient = null;
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg, ex);
throw new AppenderLoggingException("No Flume agents are available");
}
} else {
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg);
throw new AppenderLoggingException("No Flume agents are available");
}
}
@Override
public synchronized void send(final Event event) {
if (rpcClient == null) {
rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
if (rpcClient != null) {
try {
rpcClient.append(event);
} catch (final Exception ex) {
rpcClient.close();
rpcClient = null;
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg, ex);
throw new AppenderLoggingException("No Flume agents are available");
}
} else {
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg);
throw new AppenderLoggingException("No Flume agents are available");
}
}
/**
* There is a very good chance that this will always return the first agent even if it isn't available.
* @param agents The list of agents to choose from
* @return The FlumeEventAvroServer.
*/
private RpcClient connect(final Agent[] agents, int retries, final int connectTimeout, final int requestTimeout) {
try {
final Properties props = new Properties();
props.put("client.type", agents.length > 1 ? "default_failover" : "default");
int count = 1;
final StringBuilder sb = new StringBuilder();
for (final Agent agent : agents) {
if (sb.length() > 0) {
sb.append(" ");
}
final String hostName = "host" + count++;<|fim▁hole|> sb.append(hostName);
}
props.put("hosts", sb.toString());
if (batchSize > 0) {
props.put("batch-size", Integer.toString(batchSize));
}
if (retries > 1) {
if (retries > MAX_RECONNECTS) {
retries = MAX_RECONNECTS;
}
props.put("max-attempts", Integer.toString(retries * agents.length));
}
if (requestTimeout >= MINIMUM_TIMEOUT) {
props.put("request-timeout", Integer.toString(requestTimeout));
}
if (connectTimeout >= MINIMUM_TIMEOUT) {
props.put("connect-timeout", Integer.toString(connectTimeout));
}
return RpcClientFactory.getInstance(props);
} catch (final Exception ex) {
LOGGER.error("Unable to create Flume RPCClient: {}", ex.getMessage());
return null;
}
}
@Override
protected void releaseSub() {
if (rpcClient != null) {
try {
rpcClient.close();
} catch (final Exception ex) {
LOGGER.error("Attempt to close RPC client failed", ex);
}
}
rpcClient = null;
}
/**
* Factory data.
*/
private static class FactoryData {
private final String name;
private final Agent[] agents;
private final int batchSize;
private final int retries;
private final int conntectTimeout;
private final int requestTimeout;
/**
* Constructor.
* @param name The name of the Appender.
* @param agents The agents.
* @param batchSize The number of events to include in a batch.
*/
public FactoryData(final String name, final Agent[] agents, final int batchSize, final int retries,
final int connectTimeout, final int requestTimeout) {
this.name = name;
this.agents = agents;
this.batchSize = batchSize;
this.retries = retries;
this.conntectTimeout = connectTimeout;
this.requestTimeout = requestTimeout;
}
}
/**
* Avro Manager Factory.
*/
private static class AvroManagerFactory implements ManagerFactory<FlumeAvroManager, FactoryData> {
/**
* Create the FlumeAvroManager.
* @param name The name of the entity to manage.
* @param data The data required to create the entity.
* @return The FlumeAvroManager.
*/
@Override
public FlumeAvroManager createManager(final String name, final FactoryData data) {
try {
return new FlumeAvroManager(name, data.name, data.agents, data.batchSize, data.retries,
data.conntectTimeout, data.requestTimeout);
} catch (final Exception ex) {
LOGGER.error("Could not create FlumeAvroManager", ex);
}
return null;
}
}
}<|fim▁end|> | props.put("hosts." + hostName, agent.getHost() + ":" + agent.getPort()); |
<|file_name|>_gdb.py<|end_file_name|><|fim▁begin|>################################################################################
# Name : GDB Wrapper
# Author : Jesse Schwartzentruber & Tyson Smith
#
# Copyright 2014 BlackBerry Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import distutils.spawn
import os
import platform
import re
import signal
import tempfile
import time
from . import _common
CLOSE_FDS = True
if platform.system() in ["Linux", "Darwin"]:
TOOL_GDB = distutils.spawn.find_executable('gdb', os.pathsep.join([os.environ['PATH'], _common.PATH_DBG]))
if platform.system() == "Linux":
TOOL_GDB_NTO = os.path.join(_common.PATH_DBG, "linux_x64-gdb-ntoarm")
TOOL_KDSRV = os.path.join(_common.PATH_DBG, "linux_x64-kdserver")
else:
TOOL_GDB_NTO = None
TOOL_KDSRV = None
elif platform.system() == "QNX":
TOOL_GDB = {"x86": os.path.join(_common.PATH_DBG, "ntox86-gdb"),
"armle": os.path.join(_common.PATH_DBG, "ntoarm-gdb"),
}[platform.processor()]
TOOL_GDB_NTO = TOOL_GDB
TOOL_KDSRV = None
assert os.access(TOOL_GDB, os.X_OK), "%s is not executable" % TOOL_GDB
elif platform.system() == "Windows":
TOOL_GDB = distutils.spawn.find_executable('gdb.exe', os.pathsep.join([os.environ['PATH'], _common.PATH_DBG]))
TOOL_GDB_NTO = os.path.join(_common.PATH_DBG, "gdb-ntoarm.exe")
TOOL_KDSRV = os.path.join(_common.PATH_DBG, "kdserver.exe")
CLOSE_FDS = False
GDB_CMDS = os.path.join(os.path.abspath(os.path.dirname(__file__)), "cmds.gdb")
# child sometimes doesn't die on SIGTERM in QNX
# wait this length of time before sending another SIGTERM, and finally SIGKILL
SLAY_TIMEOUT = 10
def _send_signal(signal, *args):
for pid in args:
if pid:
os.kill(pid, signal)
break
def _trim_disassembly(stdout):
if not stdout:
return stdout
start_loc = stdout.find("Dump of assembler code")
end_loc = stdout.find("End of assembler dump.", start_loc)
if start_loc == -1 or end_loc == -1:
return "%s\nError trimming assembler dump. start_loc = %d, end_loc = %d" % (stdout,
start_loc,
end_loc)
try:
a, b = stdout[start_loc:end_loc].split("\n=>")
except ValueError:
return "%s\nError trimming assembler dump. Could not find '=>'" % (stdout)
a = a.splitlines()
start_loc += len(a.pop(0))
return "%s\n%s\n=>%s\n%s" % (stdout[:start_loc],
"\n".join(a[-15:]),
"\n".join(b.splitlines()[:15]),
stdout[end_loc:])
def _gdb_cmd(target_exe, solib_search=None, run=True):
return [TOOL_GDB, "-nx", "-x", GDB_CMDS] + \
[i for sl in [("-ex", x) for x in
_gdb_cmd_gen(run=run, target=target_exe, solib_search=solib_search)] for i in sl] + \
["-return-child-result", "-batch", "--args"]
def run_with_gdb(target_cmd, symbols=None, solib_search=None, env=None, callback=None,
callback_args=None, timeout=_common.DEFAULT_TIMEOUT, memory_limit=None,
idle_limit=None):
"""
This function is similar to the :func:`run` function above,
except the target is executed under control of the GNU Debugger.
Symbols may be specified manually, otherwise they are expected
to be findable by GDB (usually included in the target itself).
:func:`run_with_gdb` returns a :class:`~alf.FuzzResult` instance.
If no crash was detected, the :attr:`~alf.FuzzResult.classification`
member of the :class:`~alf.FuzzResult` will be
:data:`~alf.debug.NOT_AN_EXCEPTION`.
Classifications: :data:`~alf.debug.NOT_AN_EXCEPTION`,
:data:`~alf.debug.TIMEOUT`, :data:`~alf.debug.UNKNOWN`.
Availability: Unix, Windows.
"""
classification = None
cpid = None
if platform.system() == "Windows":
_common._set_gflags(target_cmd[0])
if platform.system() == "QNX":
if not os.path.isfile("libc.so.3"):
if not os.path.isfile("/root/symbols/x86/lib/libc.so.3.sym"):
raise RuntimeError("Cannot find /root/symbols/x86/lib/libc.so.3.sym")
os.symlink("/root/symbols/x86/lib/libc.so.3.sym", "libc.so.3")
fd, temp_fn = tempfile.mkstemp(prefix="gdb", suffix=".log", dir=".")
os.close(fd)
nul = open(os.devnull, "w+")
try:
with open(temp_fn, "w+") as f:
if env is None:
env = dict(os.environ)
env["LIBC_FATAL_STDERR_"] = "1"
p = _common.subprocess.Popen(_gdb_cmd(target_cmd[0], solib_search) + target_cmd,
close_fds=CLOSE_FDS, stdout=f, stderr=f, stdin=nul,
creationflags=_common.POPEN_FLAGS, env=env)
try:
with open(temp_fn) as fr:
while p.poll() is None:
line = fr.readline()
m = re.match(r"^\*\s+1\s+Thread\s+\w+\s+\(LWP\s+(?P<pid>[0-9]+)\)", line)
if m is None:
m = re.match(r"^\*\s+1\s+(pid|process|Thread)\s+(?P<pid>[0-9]+)", line)
if m:
cpid = int(m.group("pid"))
break
cb_res = _common._call_callback(callback, callback_args, p.pid)
if cb_res == _common.CB_ERROR:
raise RuntimeError("callback() returned error")
target_mon = _common.TargetMonitor(cpid, idle_limit=idle_limit,
memory_limit=memory_limit, time_limit=timeout)
while p.poll() is None:
if target_mon.check_memory():
classification = _common.EXCESS_MEMORY_USAGE
break
if target_mon.check_idle():
break
if target_mon.check_timeout():
classification = _common.TIMEOUT
break
time.sleep(0.01)
finally:
while p.poll() is None:
try:
if platform.system() == "QNX":
attempt = -1
sigs = [signal.SIGTERM, signal.SIGKILL]
while p.poll() is None:
attempt += 1
assert attempt < len(sigs), "Failed to kill child process"
_send_signal(sigs[attempt], cpid, p.pid)
kill_time = _common.prof_timer()
while _common.prof_timer() - kill_time < SLAY_TIMEOUT:
if p.poll() is not None:
break
time.sleep(0.25)
elif platform.system() == "Windows":
_send_signal(signal.CTRL_BREAK_EVENT, cpid, p.pid)
else:
_send_signal(signal.SIGTERM, cpid, p.pid)
except OSError:
pass
exit_code = p.wait()
f.seek(0, os.SEEK_SET)
stdout = f.read()
finally:
_common.delete(temp_fn)
nul.close()
m = re.search(r"Traceback \(\D+\):.+Python command:", stdout, re.DOTALL)
if m:
tb = m.group(0)
tb = tb[:tb.rfind("\n")]
if not tb.endswith("No threads running"):
raise RuntimeError("GDB Python Failure\n\n%s" % tb)
else:
return _common.FuzzResult(_common.NOT_AN_EXCEPTION, stdout)
backtrace, debug_classification = _process_gdb_output(stdout)
if cb_res == _common.CB_HANG:
classification = _common.TIMEOUT
elif classification is None:
if cb_res == _common.CB_FAIL:
classification = _common.UNKNOWN
else:
classification = debug_classification
stdout = _trim_disassembly(stdout)
stdout = _common._limit_output_length(stdout)
return _common.FuzzResult(classification, stdout, backtrace, exit_code)
def _symbolize(target, output, tool, exp_opt):
fd, tmp_log = tempfile.mkstemp(prefix="%s_log" % tool, suffix=".txt", dir=".")
try:
os.write(fd, output)
finally:
os.close(fd)
try:
result = _common.run([TOOL_GDB, "-batch", "-nx",
"-ex", "set python print-stack full",
"-ex", "py import exploitable",
"-ex", "exploitable -m %s %s" % (exp_opt, tmp_log),
"-ex", "quit", target], timeout=180)
finally:
_common.delete(tmp_log)
if result.classification == _common.TIMEOUT:
raise RuntimeError("Timed out while processing %s output:\n%s" % (tool, output))
result.backtrace, result.classification = _process_gdb_output(result.text)
result.text = _common._limit_output_length(result.text)
if result.classification == _common.NOT_AN_EXCEPTION:
raise RuntimeError("Failed to process %s output:\n%s" % (tool, output))
return result
def symbolize_valgrind(target, valgrind_output):
"""
Creates a :class:`~alf.FuzzResult` with classification by analyzing the log
generated by Valgrind/Memcheck.
"""
return _symbolize(target, valgrind_output, "valgrind", "-vg")
def symbolize_asan(target, asan_output):
"""
Creates a :class:`~alf.FuzzResult` with classification by analyzing the log
generated by AddressSanitizer.
The result.text includes asan_output, but symbolized if possible.
"""
return _symbolize(target, asan_output, "asan", "-a")
def _gdb_core_debug(symbols, ucore=None, kcore=None, remote=None, solib_search=None):
assert TOOL_GDB_NTO, "GDB targetting NTO not available for this platform"
if kcore:
assert TOOL_KDSRV, "kdserver not available for this platform"
assert len([x for x in [ucore, kcore, remote] if x is not None]) == 1, "Must specify exactly one core file"
with tempfile.TemporaryFile() as f:
gdb_cmd = [TOOL_GDB_NTO, "-nx", "-x", GDB_CMDS, symbols]
if ucore is not None:
gdb_cmd.append(ucore)
gdb = _common.subprocess.Popen(gdb_cmd, stdout=f, stderr=f, stdin=_common.subprocess.PIPE)
if kcore is not None:
gdb.stdin.write("target remote |%s %s\n" % (TOOL_KDSRV, kcore.replace("\\", "\\\\")))
elif remote is not None:
gdb.stdin.write("target remote %s\n" % remote)
core = ucore or kcore
for c in _gdb_cmd_gen(core=core, solib_search=solib_search, detach=not core):
gdb.stdin.write("%s\n" % c)
gdb_wait_st = _common.prof_timer()
while gdb.poll() is None and (_common.prof_timer() - gdb_wait_st) < 20:
time.sleep(0.1)
if gdb.poll() is None:
gdb.terminate()
gdb.wait()
f.seek(0)
gdb_out = f.read()
trim = gdb_out.find(r'$1 = "TRIM"')
if trim != -1:
gdb_out = "\n".join([l for l in gdb_out[:trim].splitlines()[:-1] if not l.startswith("#0")] +
gdb_out[trim:].splitlines()[1:] + [""])
bt, cls = _process_gdb_output(gdb_out)
gdb_out = _trim_disassembly(gdb_out)
return _common.FuzzResult(cls, gdb_out, bt)
def _gdb_cmd_gen(core=False, run=False, use_rcheck=False,
solib_search=None, target=None, detach=False, follow_child=False):
# static cmds, sequence definitions, or conditional cmds (if, while, etc.) must go in cmds.gdb
if follow_child:
yield "set follow-fork-mode child"
if run and use_rcheck:
yield "set environment LD_PRELOAD librcheck.so"
# Suppress prints from librcheck
yield "set environment MALLOC_FILE /dev/null"
# memory tracing on start. If memory tracing is disabled, errors can't report allocation/deallocation backtraces for memory chunk involved in error condition.
yield "set environment MALLOC_START_TRACING 0"
# Start control thread, and allows the IDE to send commands to the application (can't use if process forks).
yield "set environment MALLOC_CTHREAD 0"
# Check for out of bounds errors on every allocation/deallocation.
yield "set environment MALLOC_CKBOUNDS 0"
# Check strings and memory functions for errors.
yield "set environment MALLOC_CKACCESS 0"
# Check free and alloc functions for errors.
yield "set environment MALLOC_CKALLOC 0"
# Set error action behavior, 1-abort, 2 - exit (no core), 3 - dump core
yield "set environment MALLOC_ACTION 0"
# Enable dumping leaks on exit
yield "set environment MALLOC_DUMP_LEAKS 0" # TODO: This causes a trace back when mem leaks are caught
# Set to 0 to disable optimization. The default is 32
yield "set environment MALLOC_USE_CACHE 0"
# Check the allocator chain integrity on every allocation/deallocation (very expensive).
yield "set environment MALLOC_CKCHAIN 0"
if solib_search:
yield "set solib-search-path %s" % solib_search
if core:
# put in a trim marker, because GDB runs "backtrace 1 full" when loading a core file
yield "print \"TRIM\""
yield "info program"
yield "monitor kprintf"
elif run:
yield "set environment ASAN_OPTIONS abort_on_error=1 handle_segv=0 strict_memcmp=0 alloc_dealloc_mismatch=0 check_malloc_usable_size=0"
yield "start"
# need the pid to be able to kill it
yield "info threads"
# continue running
yield "continue"
yield "symbol-file"
if target is None:
raise RuntimeError("Missing target")
yield "symbol-file %s" % target
yield "sharedlibrary"
yield "info proc mappings" # Linux only?
yield "info meminfo" # QNX, does it work on core files?
yield "info threads"
# try to load symbols for any shared libs that were dynamically loaded
yield "shared"
# print library info so we know if symbols are missing
yield "info sharedlibrary"
yield "backtrace full"
yield "exploitable -m"
yield "info locals"
yield "info registers"
yield "disassemble"
if detach:
yield "detach"
if platform.system() == "Windows":
if core:
yield "quit $_exitcode"
else:
yield "init-if-undefined $_exitcode = -1"
# this doesn't work in the hang case
#yield "while $_exitcode == -1"
#yield "continue"
#yield "end"
yield "quit $_exitcode"
else:
yield "quit_with_code"
_RE_GDB_OUTPUT = re.compile(r"""(?x) # verbose
^(It\ stopped\ with|Program\ received)\ signal
\ (?P<signame>SIG[A-Z]+), |
^Program\ terminated\ with\ signal
\ (?P<signum>[0-9]+), |
^\s+(?P<mapstart>0x[A-Fa-f0-9]+)\s+
(?P<mapend>0x[A-Fa-f0-9]+)\s+
(?P<mapsize>0x[A-Fa-f0-9]+)\s+
(?P<mapoffset>0x[A-Fa-f0-9]+)\s+
(?P<mapimage>.*)$ |
^\#[0-9]+\s+(?P<addr1>0x[A-Fa-f0-9]+)?\s*
<(?P<image1>[A-Za-z0-9_\.-]+)!
(?P<symbol1>[A-Za-z0-9_:]+)(\([^\+]+\))?\+?
(?P<offset1>[0-9]+)?>\s+\( |
^\#[0-9]+\s+(?P<addr2>0x[A-Fa-f0-9]+)\s+\(\) |
^\#[0-9]+\s+(?P<addr3>0x[A-Fa-f0-9]+)?(\s+in)?\s+
(?P<symbol3>[A-Za-z0-9_:?]+)\s+\(.*?\)\s+
(from|at)\s+(?P<image3>[A-Za-z0-9_\./-]+):?
(?P<offset3>[0-9]+)?$ |
^\#[0-9]+\s+(?P<addr4>0x[A-Fa-f0-9]+)?(\s+in)?\s+
(?P<symbol4>[A-Za-z0-9_:?]+)""", re.MULTILINE)
def _process_gdb_output(stdout):
# try parsing for CERT exploitable output first
backtrace, classification, _ = _common.process_exploitable_output(stdout)
if classification != _common.NOT_AN_EXCEPTION or backtrace:
return (backtrace, classification)
# CERT exploitable failed...
classification = _common.NOT_AN_EXCEPTION
backtrace = []
maps = {}
for m in _RE_GDB_OUTPUT.finditer(stdout):
sig = None
if m.group("signame"):
sig = m.group("signame")
elif m.group("signum"):
sig = int(m.group("signum"))
elif m.group("symbol1"):
addr = m.group("addr1")
image = m.group("image1")
symbol = m.group("symbol1")
offset = m.group("offset1")
elif m.group("addr2"):
addr = m.group("addr2")
image = symbol = offset = None
elif m.group("symbol3"):
addr = m.group("addr3")
image = m.group("image3")
symbol = m.group("symbol3")
offset = m.group("offset3")
if symbol == "??":
symbol = "Unknown"
if image:
image = os.path.basename(image)
elif m.group("symbol4"):
addr = m.group("addr4")
symbol = m.group("symbol4")
image = offset = None<|fim▁hole|> continue
if sig is not None:
if sig in [8, "SIGFPE"]:
classification = _common.PROBABLY_NOT_EXPLOITABLE
elif sig not in [2, "SIGINT"]:
classification = _common.UNKNOWN
else:
if addr is not None:
addr = int(addr, 16)
if offset is not None:
offset = int(offset)
backtrace.append((addr, image, symbol, offset))
real_bt = []
for (addr, image, symbol, offset) in backtrace:
if addr is not None:
# try to find a map matching this address
for (m_start, m_end), m_image in maps.items():
if (addr >= m_start) and (addr < m_end):
rel_addr = addr - m_start
#log.debug("got rel_addr of %s+0x%08X for symbol %s", m_image, rel_addr, symbol)
if image is None:
image = os.path.basename(m_image)
if offset is None:
offset = rel_addr
break
real_bt.append(_common.LSO((image, symbol, offset)))
return (real_bt, classification)<|fim▁end|> | elif m.group("mapstart"):
maps[(int(m.group("mapstart"), 16), int(m.group("mapend"), 16))] = m.group("mapimage") |
<|file_name|>event-target.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Class representing an event.
*
* @private
*/
class Event {
/**
* Create a new `Event`.
*
* @param {String} type The name of the event
* @param {Object} target A reference to the target to which the event was dispatched
*/
constructor(type, target) {
this.target = target;
this.type = type;
}
}
/**
* Class representing a message event.
*
* @extends Event
* @private
*/
class MessageEvent extends Event {
/**
* Create a new `MessageEvent`.
*
* @param {(String|Buffer|ArrayBuffer|Buffer[])} data The received data
* @param {WebSocket} target A reference to the target to which the event was dispatched
*/
constructor(data, target) {
super('message', target);
this.data = data;
}
}
/**
* Class representing a close event.
*
* @extends Event
* @private
*/
class CloseEvent extends Event {
/**
* Create a new `CloseEvent`.
*
* @param {Number} code The status code explaining why the connection is being closed
* @param {String} reason A human-readable string explaining why the connection is closing
* @param {WebSocket} target A reference to the target to which the event was dispatched
*/
constructor(code, reason, target) {
super('close', target);
this.wasClean = target._closeFrameReceived && target._closeFrameSent;
this.reason = reason;
this.code = code;
}
}
/**
* Class representing an open event.
*
* @extends Event
* @private
*/
class OpenEvent extends Event {
/**
* Create a new `OpenEvent`.
*
* @param {WebSocket} target A reference to the target to which the event was dispatched
*/
constructor(target) {
super('open', target);
}
}
/**
* Class representing an error event.
*
* @extends Event
* @private
*/
class ErrorEvent extends Event {
/**
* Create a new `ErrorEvent`.
*
* @param {Object} error The error that generated this event
* @param {WebSocket} target A reference to the target to which the event was dispatched
*/
constructor(error, target) {
super('error', target);
this.message = error.message;
this.error = error;
}
}
/**
* This provides methods for emulating the `EventTarget` interface. It's not
* meant to be used directly.
*
* @mixin
*/
const EventTarget = {
/**
* Register an event listener.
*
* @param {String} method A string representing the event type to listen for
* @param {Function} listener The listener to add
* @public
*/
addEventListener(method, listener) {
if (typeof listener !== 'function') return;
function onMessage(data) {
listener.call(this, new MessageEvent(data, this));
}
function onClose(code, message) {
listener.call(this, new CloseEvent(code, message, this));
}
function onError(error) {
listener.call(this, new ErrorEvent(error, this));
}
function onOpen() {
listener.call(this, new OpenEvent(this));
}
if (method === 'message') {
onMessage._listener = listener;
this.on(method, onMessage);
} else if (method === 'close') {
onClose._listener = listener;
this.on(method, onClose);
} else if (method === 'error') {
onError._listener = listener;
this.on(method, onError);
} else if (method === 'open') {
onOpen._listener = listener;
this.on(method, onOpen);
} else {
this.on(method, listener);
}<|fim▁hole|> * Remove an event listener.
*
* @param {String} method A string representing the event type to remove
* @param {Function} listener The listener to remove
* @public
*/
removeEventListener(method, listener) {
const listeners = this.listeners(method);
for (var i = 0; i < listeners.length; i++) {
if (listeners[i] === listener || listeners[i]._listener === listener) {
this.removeListener(method, listeners[i]);
}
}
}
};
module.exports = EventTarget;<|fim▁end|> | },
/** |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use blog::Post;<|fim▁hole|>
post.add_text("I ate a salad for lunch today");
assert_eq!("", post.content());
post.request_review();
assert_eq!("", post.content());
post.approve();
assert_eq!("I ate a salad for lunch today", post.content());
}<|fim▁end|> |
fn main() {
let mut post = Post::new(); |
<|file_name|>router.ts<|end_file_name|><|fim▁begin|>import * as Router from "universal-router";
import routes from "~/routes";
export default new Router(routes, {
resolveRoute(context, params) {
let { route } = context;
// if (typeof route.load === 'function') {
// return route.load().then(action => action.default(context, params));
// }
//this function will be rendered only once for initial props loading no more!
if (typeof route.getInitialProps === "function" && !route.props) {<|fim▁hole|> // to make sure that when there is server side props, we don't invoke getInitialProps from client side
let props =
context.state && context.state.route && context.state.route[route.path];
let state = objectWithoutKey(context.state, "route");
const serverProps = { props, state };
return route.getInitialProps(serverProps).then(props => {
route.props = context.props = props || {};
return route.action(context, params);
});
}
if (typeof route.action === "function") {
context.props = route.props;
return route.action(context, params);
}
return null;
}
});
const objectWithoutKey = (object, key) => {
return Object.keys(object).reduce((result, propName) => {
if (propName !== key) {
result[propName] = object[propName];
}
return result;
}, {});
};<|fim▁end|> | //return new Promise((resolve, reject) => { |
<|file_name|>example_automl_nl_text_sentiment.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG that uses Google AutoML services.
"""
import os
from datetime import datetime
from airflow import models
from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook
from airflow.providers.google.cloud.operators.automl import (
AutoMLCreateDatasetOperator,
AutoMLDeleteDatasetOperator,
AutoMLDeleteModelOperator,
AutoMLImportDataOperator,
AutoMLTrainModelOperator,
)
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "your-project-id")
GCP_AUTOML_LOCATION = os.environ.get("GCP_AUTOML_LOCATION", "us-central1")
GCP_AUTOML_SENTIMENT_BUCKET = os.environ.get("GCP_AUTOML_SENTIMENT_BUCKET", "gs://INVALID BUCKET NAME")
# Example values
DATASET_ID = ""
# Example model
MODEL = {
"display_name": "auto_model_1",
"dataset_id": DATASET_ID,
"text_sentiment_model_metadata": {},
}
# Example dataset
DATASET = {
"display_name": "test_text_sentiment_dataset",
"text_sentiment_dataset_metadata": {"sentiment_max": 10},
}
IMPORT_INPUT_CONFIG = {"gcs_source": {"input_uris": [GCP_AUTOML_SENTIMENT_BUCKET]}}
extract_object_id = CloudAutoMLHook.extract_object_id
# Example DAG for AutoML Natural Language Text Sentiment
with models.DAG(
"example_automl_text_sentiment",
schedule_interval=None, # Override to match your needs
start_date=datetime(2021, 1, 1),
catchup=False,<|fim▁hole|> task_id="create_dataset_task", dataset=DATASET, location=GCP_AUTOML_LOCATION
)
dataset_id = create_dataset_task.output['dataset_id']
import_dataset_task = AutoMLImportDataOperator(
task_id="import_dataset_task",
dataset_id=dataset_id,
location=GCP_AUTOML_LOCATION,
input_config=IMPORT_INPUT_CONFIG,
)
MODEL["dataset_id"] = dataset_id
create_model = AutoMLTrainModelOperator(task_id="create_model", model=MODEL, location=GCP_AUTOML_LOCATION)
model_id = create_model.output['model_id']
delete_model_task = AutoMLDeleteModelOperator(
task_id="delete_model_task",
model_id=model_id,
location=GCP_AUTOML_LOCATION,
project_id=GCP_PROJECT_ID,
)
delete_datasets_task = AutoMLDeleteDatasetOperator(
task_id="delete_datasets_task",
dataset_id=dataset_id,
location=GCP_AUTOML_LOCATION,
project_id=GCP_PROJECT_ID,
)
import_dataset_task >> create_model
delete_model_task >> delete_datasets_task
# Task dependencies created via `XComArgs`:
# create_dataset_task >> import_dataset_task
# create_dataset_task >> create_model
# create_model >> delete_model_task
# create_dataset_task >> delete_datasets_task<|fim▁end|> | user_defined_macros={"extract_object_id": extract_object_id},
tags=['example'],
) as example_dag:
create_dataset_task = AutoMLCreateDatasetOperator( |
<|file_name|>powersource.py<|end_file_name|><|fim▁begin|>from BinPy.Gates import *
class PowerSource:
"""
Models a Power Source from which various connectors can tap by connecting to it.
taps: The list of all connectors connected to this power source.
connect(): Takes in one or more connectors as input and connects them to the power source.
disconnect(): Takes in one or more connectors as input and disconnects them from the power source.
"""<|fim▁hole|> self.taps = []
def connect(self, *connectors):
"""Takes in one or more connectors as an input and taps to the power source."""
for connector in connectors:
if not isinstance(connector, Connector):
raise Exception("Error: Input given is not a connector")
else:
if len(connector.connections['output']) != 0:
raise Exception(
"ERROR: The connector is already an output of some other object")
self.taps.append(connector)
connector.state = 1
connector.tap(self, 'output')
connector.trigger()
def disconnect(self, *connectors):
"""
Takes in one or more connectors as an input and disconnects them from the power source.
A floating connector has a value of None.
A message is printed if a specified connector is not already tapping from this source.
"""
for connector in connectors:
if isinstance(connector, Connector):
try:
self.taps.remove(connector)
connector.state = None
connector.connections['output'].remove(self)
connector.trigger()
except:
print (
"The specified connector is not tapped to this power source")
else:
raise Exception("Error: Input given is not a connector")<|fim▁end|> |
def __init__(self): |
<|file_name|>wifi.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from datetime import datetime, date
from optionaldict import optionaldict
from wechatpy.client.api.base import BaseWeChatAPI
class WeChatWiFi(BaseWeChatAPI):
API_BASE_URL = 'https://api.weixin.qq.com/bizwifi/'
def list_shops(self, page_index=1, page_size=20):
"""
获取门店列表
详情请参考
http://mp.weixin.qq.com/wiki/15/bcfb5d4578ea818b89913472cf2bbf8f.html
:param page_index: 可选,分页下标,默认从1开始
:param page_size: 可选,每页的个数,默认20个,最大20个
:return: 返回的 JSON 数据包
"""
res = self._post(
'shop/list',
data={
'pageindex': page_index,
'pagesize': page_size,
}
)
return res['data']
def add_device(self, shop_id, ssid, password, bssid):
"""
添加设备
详情请参考
http://mp.weixin.qq.com/wiki/10/6232005bdc497f7cf8e19d4e843c70d2.html
:param shop_id: 门店 ID
:param ssid: 无线网络设备的ssid。非认证公众号添加的ssid必需是“WX”开头(“WX”为大写字母),
认证公众号和第三方平台无此限制;所有ssid均不能包含中文字符
:param password: 无线网络设备的密码,大于8个字符,不能包含中文字符
:param bssid: 无线网络设备无线mac地址,格式冒号分隔,字符长度17个,并且字母小写
:return: 返回的 JSON 数据包
"""
return self._post(
'device/add',
data={
'shop_id': shop_id,
'ssid': ssid,
'password': password,
'bssid': bssid,
}
)
def list_devices(self, shop_id=None, page_index=1, page_size=20):
"""
查询设备
详情请参考
http://mp.weixin.qq.com/wiki/10/6232005bdc497f7cf8e19d4e843c70d2.html
:param shop_id: 可选,门店 ID
:param page_index: 可选,分页下标,默认从1开始
:param page_size: 可选,每页的个数,默认20个,最大20个
:return: 返回的 JSON 数据包
"""
data = optionaldict(
shop_id=shop_id,
pageindex=page_index,
pagesize=page_size
)
res = self._post('device/list', data=data)
return res['data']
def delete_device(self, bssid):
"""
删除设备
详情请参考
http://mp.weixin.qq.com/wiki/10/6232005bdc497f7cf8e19d4e843c70d2.html
:param bssid: 无线网络设备无线mac地址,格式冒号分隔,字符长度17个,并且字母小写
:return: 返回的 JSON 数据包
"""
return self._post('device/delete', data={'bssid': bssid})
def get_qrcode_url(self, shop_id, img_id):
"""
获取物料二维码图片网址
详情请参考
http://mp.weixin.qq.com/wiki/7/fcd0378ef00617fc276be2b3baa80973.html
:param shop_id: 门店 ID
:param img_id: 物料样式编号:0-二维码,可用于自由设计宣传材料;
1-桌贴(二维码),100mm×100mm(宽×高),可直接张贴
:return: 二维码图片网址
"""
res = self._post(
'qrcode/get',
data={
'shop_id': shop_id,
'img_id': img_id,
}
)
return res['data']['qrcode_url']
def set_homepage(self, shop_id, template_id, url=None):
"""
设置商家主页
详情请参考
http://mp.weixin.qq.com/wiki/6/2732f3cf83947e0e4971aa8797ee9d6a.html
:param shop_id: 门店 ID
:param template_id: 模板ID,0-默认模板,1-自定义url
:param url: 自定义链接,当template_id为1时必填
:return: 返回的 JSON 数据包
"""
data = {
'shop_id': shop_id,
'template_id': template_id,
}
if url:
data['struct'] = {'url': url}
return self._post('homepage/set', data=data)
def get_homepage(self, shop_id):
"""
查询商家主页
详情请参考
http://mp.weixin.qq.com/wiki/6/2732f3cf83947e0e4971aa8797ee9d6a.html<|fim▁hole|> """
res = self._post('homepage/get', data={'shop_id': shop_id})
return res['data']
def list_statistics(self, begin_date, end_date, shop_id=-1):
"""
Wi-Fi数据统计
详情请参考
http://mp.weixin.qq.com/wiki/8/dfa2b756b66fca5d9b1211bc18812698.html
:param begin_date: 起始日期时间,最长时间跨度为30天
:param end_date: 结束日期时间戳,最长时间跨度为30天
:param shop_id: 可选,门店 ID,按门店ID搜索,-1为总统计
:return: 返回的 JSON 数据包
"""
if isinstance(begin_date, (datetime, date)):
begin_date = begin_date.strftime('%Y-%m-%d')
if isinstance(end_date, (datetime, date)):
end_date = end_date.strftime('%Y-%m-%d')
res = self._post(
'statistics/list',
data={
'begin_date': begin_date,
'end_date': end_date,
'shop_id': shop_id
}
)
return res['data']<|fim▁end|> |
:param shop_id: 门店 ID
:return: 返回的 JSON 数据包 |
<|file_name|>angular-multi-select-tree-0.0.1.min.js<|end_file_name|><|fim▁begin|>!
function() {
"use strict";
angular.module("multiselect-searchtree", [])
} (),
function() {
"use strict";
var a = angular.module("multiselect-searchtree");
a.controller("multiSelectTreeCtrl", ["$scope", "$document",
function(a, b) {
function d() {
e(),
a.$apply()
}
function e() {
a.showTree = !1,
c && (c.isActive = !1, c = void 0),
b.off("click", d)
}
function f(b) {
for (var c = 0,
d = b.length; c < d; c++) g(b[c]) || b[c].selected !== !0 ? g(b[c]) && b[c].selected === !1 && (b[c].selected = !0) : a.selectedItems.push(b[c]),
b[c] && b[c].children && f(b[c].children)
}
function g(b) {
var c = !1;
if (a.selectedItems){
for (var d = 0; d < a.selectedItems.length; d++){
if (a.selectedItems[d].name === b.name) {
c = !0;
break
}
}
}
return c
}
var c;
a.showTree = !1,
a.selectedItems = [],
a.multiSelect = a.multiSelect || !1,
a.onActiveItem = function(a) {
c !== a && (c && (c.isActive = !1), c = a, c.isActive = !0)
},
a.refreshOutputModel = function() {
a.outputModel = [];
var tmp = angular.copy(a.selectedItems);
angular.forEach(tmp,function(v){
v.children.length === 0 && v.selected && a.outputModel.push(v.name)
})
},
a.refreshSelectedItems = function() {
a.selectedItems = [],
a.inputModel && f(a.inputModel)
},
a.deselectItem = function(b, c) {
c.stopPropagation(),
a.selectedItems.splice(a.selectedItems.indexOf(b), 1),
b.selected = !1,
this.refreshOutputModel()
},
a.onControlClicked = function(c) {
c.stopPropagation(),
a.showTree = !a.showTree,
a.showTree && b.on("click", d)
},
a.onFilterClicked = function(a) {
a.stopPropagation()
},
a.clearFilter = function(b) {
b.stopPropagation(),
a.filterKeyword = ""
},
a.canSelectItem = function(b) {
return a.callback({
item: b,
selectedItems: a.selectedItems
})
},
a.itemSelected = function(b) {
function f(c) {
c.forEach(function(c) {
b.selected && !c.selected ? a.selectedItems.push(c) : !b.selected && c.selected && a.selectedItems.splice(a.selectedItems.indexOf(c), 1),
c.selected = b.selected,
c.children && c.children.length > 0 && f(c.children)
})
}
if (! (a.useCallback && a.canSelectItem(b) === !1 || a.selectOnlyLeafs && b.children && b.children.length > 0)) {
if (a.multiSelect) {
var d = a.selectedItems.indexOf(b);
if(g(b)){
b.selected = !1, a.selectedItems.splice(d, 1)
}else{
b.selected = !0
a.selectedItems.push(b)
}
b.children && b.children.length > 0 && f(b.children)
} else {
e();
for (var c = 0; c < a.selectedItems.length; c++) a.selectedItems[c].selected = !1;
b.selected = !0,
a.selectedItems = [],
a.selectedItems.push(b)
}
this.refreshOutputModel()
}
},
a.onSelectAll = function(a, b) {
function c(a) {
a.selected = !0,
a.children.length > 0 && a.children.forEach(function(a) {
c(a)
})
}
a.forEach(function(a) {
c(a)
}),
this.refreshSelectedItems(),
this.refreshOutputModel()
},
a.onClearAll = function(a, b) {
function c(a) {
a.selected = !1,
a.children.length > 0 && a.children.forEach(function(a) {
c(a)
})
}
a.forEach(function(a) {
c(a)
}),
this.refreshSelectedItems(),
this.refreshOutputModel()
}
}]),
a.directive("multiselectSearchtree",
function() {
return {
restrict: "E",<|fim▁hole|> inputModel: "=",
outputModel: "=?",
multiSelect: "=?",
selectOnlyLeafs: "=?",
callback: "&",
defaultLabel: "@",
extraButtons: "=?",
directSearch: "=?"
},
link: function(a, b, c) {
function d(a, b) {
for (var c = e(a, []), d = 0, f = c.length; d < f; d++) if (c[d].name.toLowerCase().indexOf(b.toLowerCase()) !== -1) return ! 1;
return ! 0
}
function e(a, b) {
for (var c = 0; c < a.children.length; c++) b.push(a.children[c]),
e(a.children[c], b);
return b
}
function f(a) {
a.isFiltered = !1,
void 0 != a.p && f(a.p)
}
function g(b, c) {
b.name.toLowerCase().indexOf(a.filterKeyword.toLowerCase()) !== -1 ? f(b) : b.isFiltered = !0,
b.children.length > 0 && angular.forEach(b.children,
function(a) {
a.p = b,
g(a)
})
}
function h(b) {
b.name.toLowerCase().indexOf(a.filterKeyword.toLowerCase()) !== -1 ? b.isFiltered = !1 : d(b, a.filterKeyword) ? b.isFiltered = !0 : b.isFiltered = !1
}
c.callback && (a.useCallback = !0),
a.extraButtons && (a.clearSearchIconStyle = {
right: "210px"
}),
a.$watch("inputModel",
function(b) {
b && (a.refreshSelectedItems(), a.refreshOutputModel())
}),
a.$watch("filterKeyword",
function() {
void 0 !== a.filterKeyword && (a.directSearch ? angular.forEach(a.inputModel,
function(a) {
g(a)
}) : angular.forEach(a.inputModel,
function(a) {
h(a)
}))
})
},
controller: "multiSelectTreeCtrl"
}
})
} (),
function() {
"use strict";
var a = angular.module("multiselect-searchtree");
a.controller("treeItemCtrl", ["$scope",
function(a) {
a.item.isExpanded !== !1 && a.item.isExpanded !== !0 && (a.item.isExpanded = !1),
a.showExpand = function(a) {
return a.children && a.children.length > 0
},
a.onExpandClicked = function(a, b) {
b.stopPropagation(),
a.isExpanded = !a.isExpanded
},
a.clickSelectItem = function(b, c) {
c.stopPropagation(),
a.itemSelected && a.itemSelected({
item: b
})
},
a.subItemSelected = function(b, c) {
a.itemSelected && a.itemSelected({
item: b
})
},
a.activeSubItem = function(b, c) {
a.onActiveItem && a.onActiveItem({
item: b
})
},
a.onMouseOver = function(b, c) {
c.stopPropagation(),
a.onActiveItem && a.onActiveItem({
item: b
})
},
a.showCheckbox = function() {
return !! a.multiSelect && (!a.selectOnlyLeafs && (a.useCallback ? a.canSelectItem(a.item) : void 0))
}
}]),
a.directive("treeItem", ["$compile",
function(a) {
return {
restrict: "E",
templateUrl: "src/tree-item.tpl.html",
scope: {
item: "=",
itemSelected: "&",
onActiveItem: "&",
multiSelect: "=?",
selectOnlyLeafs: "=?",
isActive: "=",
useCallback: "=",
canSelectItem: "="
},
controller: "treeItemCtrl",
compile: function(b, c, d) {
angular.isFunction(d) && (d = {
post: d
});
var f, e = b.contents().remove();
return {
pre: d && d.pre ? d.pre: null,
post: function(b, c, g) {
f || (f = a(e)),
f(b,
function(a) {
c.append(a)
}),
d && d.post && d.post.apply(null, arguments)
}
}
}
}
}])
} ();<|fim▁end|> | templateUrl: "src/multiselect-searchtree.tpl.html",
scope: { |
<|file_name|>blockRenderMap.js<|end_file_name|><|fim▁begin|>import { Map } from 'immutable';
const BLOCK_MAP = Map({
'header-one': {
element: 'h1',
},
'header-two': {
element: 'h2',
},
'header-three': {
element: 'h3',
},
'header-four': {
element: 'h4',
},
'header-five': {
element: 'h5',
},
'header-six': {
element: 'h6',
},
blockquote: {
element: 'blockquote',
},
'code-block': {
element: 'pre',
},
atomic: {
element: 'figure',
},
'unordered-list-item': {
element: 'li',
},
'ordered-list-item': {
element: 'li',
},<|fim▁hole|>
export default BLOCK_MAP;<|fim▁end|> | unstyled: {
element: 'div',
},
}); |
<|file_name|>htmlmapelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::HTMLMapElementBinding;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::DomRoot;
use crate::dom::document::Document;
use crate::dom::htmlareaelement::HTMLAreaElement;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLMapElement {
htmlelement: HTMLElement,
}
impl HTMLMapElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLMapElement {
HTMLMapElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(<|fim▁hole|> ) -> DomRoot<HTMLMapElement> {
Node::reflect_node(
Box::new(HTMLMapElement::new_inherited(local_name, prefix, document)),
document,
HTMLMapElementBinding::Wrap,
)
}
pub fn get_area_elements(&self) -> Vec<DomRoot<HTMLAreaElement>> {
self.upcast::<Node>()
.traverse_preorder()
.filter_map(DomRoot::downcast::<HTMLAreaElement>)
.collect()
}
}<|fim▁end|> | local_name: LocalName,
prefix: Option<Prefix>,
document: &Document, |
Subsets and Splits