prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>use atom::Atom;
pub fn replace_until_change<T, F>(atom: &Atom<Box<T>>, new_value: T, f: F)
where F: Fn(Box<T>, Box<T>) -> Box<T>
{
let mut new_value = Box::new(new_value);
loop {
let value = atom.take();
let result;
if let Some(actual_value) = value {
result = f(actual_value, new_value);
} else {
result = new_value;
}
if let Some(nv) = atom.swap(result) {
new_value = nv;
} else {
break;
}<|fim▁hole|><|fim▁end|>
|
}
}
|
<|file_name|>python_create_binary.py<|end_file_name|><|fim▁begin|># Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from dataclasses import dataclass
from typing import Optional
from pants.backend.python.rules.pex import Pex
from pants.backend.python.rules.pex_from_target_closure import CreatePexFromTargetClosure
from pants.backend.python.rules.targets import EntryPoint, PythonBinarySources
from pants.backend.python.rules.targets import targets as python_targets
from pants.backend.python.targets.python_binary import PythonBinary
from pants.build_graph.address import Address
from pants.engine.addressable import Addresses
from pants.engine.legacy.structs import PythonBinaryAdaptor
from pants.engine.parser import HydratedStruct
from pants.engine.rules import UnionRule, rule
from pants.engine.selectors import Get
from pants.engine.target import SourcesRequest, SourcesResult, Target, hydrated_struct_to_target
from pants.rules.core.binary import BinaryTarget, CreatedBinary
from pants.rules.core.strip_source_roots import SourceRootStrippedSources, StripSnapshotRequest
# TODO: consider replacing this with sugar like `SelectFields(EntryPoint, PythonBinarySources)` so
# that the rule would request that instead of this dataclass. Note that this syntax must support
# both optional_fields (see the below TODO) and opt-out `SentinelField`s
# (see https://github.com/pantsbuild/pants/pull/9316#issuecomment-600152573).
@dataclass(frozen=True)
class PythonBinaryFields:
address: Address
sources: PythonBinarySources
entry_point: EntryPoint
# TODO: consume the other PythonBinary fields like `ZipSafe`. Consider making those fields
# optional. We _need_ PythonBinarySources and EntryPoint to work properly. If your target
# type also has ZipSafe, AlwaysWriteCache, etc, then we can do some additional things as an
# extra bonus. Consider adding `Target.maybe_get()` to facilitate this.
@staticmethod
def is_valid_target(tgt: Target) -> bool:
return tgt.has_fields([EntryPoint, PythonBinarySources])
@classmethod
def create(cls, tgt: Target) -> "PythonBinaryFields":
return cls(
tgt.address, sources=tgt.get(PythonBinarySources), entry_point=tgt.get(EntryPoint)
)
@rule
async def convert_python_binary_target(adaptor: PythonBinaryAdaptor) -> PythonBinaryFields:
hydrated_struct = await Get[HydratedStruct](Address, adaptor.address)
tgt = hydrated_struct_to_target(hydrated_struct, target_types=python_targets())
return PythonBinaryFields.create(tgt)
@rule
async def create_python_binary(fields: PythonBinaryFields) -> CreatedBinary:
entry_point: Optional[str]
if fields.entry_point.value is not None:
entry_point = fields.entry_point.value
else:<|fim▁hole|> # TODO: rework determine_source_files.py to work with the Target API. It should take the
# Sources AsyncField as input, rather than TargetAdaptor.
sources_result = await Get[SourcesResult](SourcesRequest, fields.sources.request)
stripped_sources = await Get[SourceRootStrippedSources](
StripSnapshotRequest(sources_result.snapshot)
)
source_files = stripped_sources.snapshot.files
# NB: `PythonBinarySources` enforces that we have 0-1 sources.
if len(source_files) == 1:
module_name = source_files[0]
entry_point = PythonBinary.translate_source_path_to_py_module_specifier(module_name)
else:
entry_point = None
request = CreatePexFromTargetClosure(
addresses=Addresses([fields.address]),
entry_point=entry_point,
output_filename=f"{fields.address.target_name}.pex",
)
pex = await Get[Pex](CreatePexFromTargetClosure, request)
return CreatedBinary(digest=pex.directory_digest, binary_name=pex.output_filename)
def rules():
return [
UnionRule(BinaryTarget, PythonBinaryAdaptor),
convert_python_binary_target,
create_python_binary,
]<|fim▁end|>
| |
<|file_name|>02-methods-continued.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"
"math"
)
type MyFloat float64
func (f MyFloat) Abs() float64 {
if f < 0 {
return float64(-f)
}
return float64(f)<|fim▁hole|> f := MyFloat(-math.Sqrt2)
fmt.Println(f.Abs())
}<|fim▁end|>
|
}
func main() {
|
<|file_name|>playlister.py<|end_file_name|><|fim▁begin|>import os
import cPickle as pkl
from collections import namedtuple
import requests
from bs4 import BeautifulSoup
Song = namedtuple('Song', ['title', 'artist', 'album', 'length'])
class Playlist(object):
def __init__(self, title, url):
self.title = title
self.file_name = title.lower().replace(' ', '-') + '.pkl'
self.url = url
if os.path.isfile(self.file_name):
self.load_from_pickle()
else:
self.songs = []
def load_from_pickle(self):<|fim▁hole|>
def download_data(self):
url = self.url
resp = requests.get(url)
soup = BeautifulSoup(resp.text)
for song_elem in (soup.find(class_='songs')
.find_all(class_='media-body')):
title = song_elem.h4.text
ps = song_elem.find_all('p')
artist, album = ps[0].text.split(u' \xb7 ')
length = ps[1].text
song = Song(title, artist, album, length)
self.songs.append(song)
with open(self.file_name, 'wb') as out:
pkl.dump(self.songs, out)
ambient_bass = Playlist(
'ambient bass',
'http://www.playlister.io/items/playlist/1472493/ambient-bass/#')
beats = Playlist(
'Blissed-Out Beats',
'http://www.playlister.io/items/playlist/1682151/')
liquid = Playlist(
'Liquid Dubstep',
'http://www.playlister.io/items/playlist/1404323/')
liquid.download_data()<|fim▁end|>
|
with open(self.file_name, 'rb') as in_file:
self.songs = pkl.load(in_file)
|
<|file_name|>Binary Tree Level Order Traversal.py<|end_file_name|><|fim▁begin|>class Solution(object):
def __init__(self):
self.l=[]
def helper(self,root,level):
if not root:
return None
else:
if level<len(self.l):
self.l[level].append(root.val)
else:
self.l.append([root.val])
self.helper(root.left,level+1)
self.helper(root.right,level+1)
return self.l<|fim▁hole|> """
if not root:
return []
return self.helper(root,0)<|fim▁end|>
|
def levelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
|
<|file_name|>bktptcmd.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import side_effect
def useless_function(first, second):
print("I have the wrong number of arguments.")
<|fim▁hole|> se_value = extra_args.GetValueForKey("side_effect")
se_string = se_value.GetStringValue(100)
side_effect.fancy = se_string
def a_third_function(frame, bp_loc, extra_args, dict):
se_value = extra_args.GetValueForKey("side_effect")
se_string = se_value.GetStringValue(100)
side_effect.fancier = se_string
def empty_extra_args(frame, bp_loc, extra_args, dict):
if extra_args.IsValid():
side_effect.not_so_fancy = "Extra args should not be valid"
side_effect.not_so_fancy = "Not so fancy"<|fim▁end|>
|
def function(frame, bp_loc, dict):
side_effect.bktptcmd = "function was here"
def another_function(frame, bp_loc, extra_args, dict):
|
<|file_name|>FontRuntime.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
import tkinter
import PIL.Image
import PIL.ImageTk
from tkinter.ttk import Progressbar as pbar
from PyFont import Font, SVG
class TkFont():
CHARY = 200
CHARX = 50
LINEY = CHARY / 2
MAIN_COLOR = '#FFFFFF'
def set_label(self):
tmp = self.words[-1].export_png_to_str()
photoImg = PIL.Image.open(tmp)
w, h = photoImg.size
nh = self.CHARY
coef = nh / h
nw = int(w * coef)
self.tmpx[-1] = int(self.words[-1].colors[SVG.SVG.LINK_COLOR_RIGHT].x *
coef) + self.CHARX
photoImg = photoImg.resize((nw, nh))#, PIL.Image.ANTIALIAS)
pix = photoImg.load()
found = False
miny = 0
for y in range(nh):
for x in range(nw):
if pix[x, y] != (0, 0, 0, 0):
miny = y
found = True
break
if found:
break
found = False
maxy = 0
for y in range(nh):
for x in range(nw):
if pix[x, nh - y - 1] != (0, 0, 0, 0):
maxy = nh - y - 1
found = True
break
if found:
break
if found:
photoImg = photoImg.crop((0, miny, nw, maxy))
photo = PIL.ImageTk.PhotoImage(photoImg)
self.labels[-1].place_forget()
self.labels[-1] = tkinter.Label(self.win, image=photo)
self.labels[-1].config(background=self.MAIN_COLOR)
self.labels[-1].image = photo
self.labels[-1].place(x = self.x[-1], y = self.y + miny)
def __init__(self, win, font, gui):
self.win = win
self.gui = gui
self.font = font
self.string = ""
self.words = []
self.labels = []
self.y = 0
self.x = [0]
self.tmpx = [0]
def backspace(self):
if not self.string:
return
if self.string[-1] == "\n":
self.tmpx = self.tmpx[:-1]
self.x = self.x[:-1]
self.y -= self.LINEY
elif self.string[-1] == " ":
self.tmpx = self.tmpx[:-1]
self.x[-1] -= self.tmpx[-1]
else:
self.words[-1].backspace()
self.set_label()
if self.string[-2:-1] in ["\n", " ", ""]:
self.words[-1].backspace()
self.words = self.words[:-1]
self.labels[-1].place_forget()
self.labels = self.labels[:-1]
self.string = self.string[:-1]
def ret(self):
self.y += self.LINEY
self.x += [0]
self.tmpx += [0]
self.string += "\n"
def space(self):
self.x[-1] += self.tmpx[-1]
self.tmpx += [0]
self.string += " "
def handle_char(self, c):
c = c.lower()
if c == "\b":
self.backspace()
elif c == "\r":
self.ret()
elif c == " ":
self.space()
elif c in self.font.chars:
svg = self.font.chr2svg(c)
if self.string[-1:] in ["\n", " ", ""]:
self.words += [svg]
self.labels += [tkinter.Label(self.win)]
else:
self.words[-1].link_with(svg)
self.set_label()
self.string += c
def theend(self):
if self.words:
svg = self.font.generate_svg("")
word = False
for c in self.string:
if c == " ":
word = False
svg.link_with(self.font.chr2svg(" "))
elif c == "\n":
word = False
svg.newline()
elif not word:
word = True
svg.link_with(self.words[0])
self.words = self.words[1:]
# bar.value += 100 / barlen
self.gui.the_end(svg)
self.win.destroy()
def export(self):<|fim▁hole|> svg = self.font.generate_svg("")
word = False
for c in self.string:
if c == " ":
word = False
svg.link_with(self.font.chr2svg(" "))
elif c == "\n":
word = False
svg.newline()
elif not word:
word = True
svg.link_with(self.words[0])
self.words = self.words[1:]
self.gui.the_end(svg)
def get_svg(self):
if self.words:
svg = self.font.generate_svg("")
word = False
for c in self.string:
if c == " ":
word = False
svg.link_with(self.font.chr2svg(" "))
elif c == "\n":
word = False
svg.newline()
elif not word:
word = True
svg.link_with(self.words[0])
self.words = self.words[1:]
return svg
return None<|fim▁end|>
|
if self.words:
|
<|file_name|>GUI.java<|end_file_name|><|fim▁begin|>package gui;
import java.awt.Font;
import java.awt.event.ActionListener;
import java.awt.event.ActionEvent;
import java.awt.image.BufferedImage;
import java.awt.Toolkit;
import javax.imageio.ImageIO;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JTextArea;
import javax.swing.SwingConstants;
import javax.swing.JTextField;
import javax.swing.JButton;
import javax.swing.JSpinner;
import javax.swing.SpinnerNumberModel;
import javax.swing.JComboBox;
import javax.swing.JScrollPane;
import javax.swing.ScrollPaneConstants;
import javax.swing.DefaultComboBoxModel;
import javax.swing.UIManager;
import app.Applikation;
import javax.swing.DropMode;
import parsers.*;
import java.io.IOException;
import java.util.Random;
public class GUI {
private JFrame frmImageDownloader;
private JTextField txtInsertTagHere;
private JTextArea textArea;
private JSpinner PageSpinner;
private JComboBox<Object> comboBox;
private ImageParser parser;
private JButton Parsebtn;
private JTextField DelayField;
private BufferedImage bgImage;
private JLabel lblNewLabel;
private JLabel lblNewLabel_1;
private JLabel lblTag;
private final String FOURCHAN_THREAD = "4chan-Thread - http://boards.4chan.org/x/res/123123";<|fim▁hole|>
private final String INFINITYCHAN_THREAD = "Infinity Chan Thread - https://8chan.co/BOARD/res/THREADNR.html";
private final String INFINITYCHAN_BOARD = "Infinity Chan Board - https://8chan.co/BOARDNR/";
private final String PAHEAL = "http://rule34.paheal.net/";
private final String XXX = "http://rule34.xxx/";
private final String GELBOORU = "http://gelbooru.com/";
private final String R34HENTAI = "http://rule34hentai.net/";
private final String TUMBLR = "Tumblr Artist - http://XxXxXxX.tumblr.com";
private final String IMGUR = "Imgur-Album - http://imgur.com/a/xXxXx";
private final String GE_HENTAI_SINGLE = "http://g.e-hentai.org/ - Single Album Page";
private final String GE_HENTAI_MORE = "http://g.e-hentai.org/ - >=1 Pages";
private final String ARCHIVE_MOE_THREAD = "archive.moe/fgts.jp (Thread) - https://archive.moe/BOARD/thread/THREADNR/";
private final String ARCHIVE_MOE_BOARD = "archive-moe (Board) - https://archive.moe/BOARD/";
private final String FGTS_JP_BOARD = "fgts.jp (Board) - http://fgts.jp/BOARD/";
private boolean parsing;
public GUI() {
System.setProperty("http.agent", "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:35.0) Gecko/20100101 Firefox/35.0");
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (Exception e) {
e.printStackTrace();
}
frmImageDownloader = new JFrame();
frmImageDownloader.setIconImage(Toolkit.getDefaultToolkit()
.getImage(this.getClass().getResource("/images/icon.png")));
frmImageDownloader.getContentPane().setBackground(
UIManager.getColor("Button.background"));
frmImageDownloader.setResizable(false);
frmImageDownloader.setTitle("Image Downloader v1.5.1");
frmImageDownloader.setBounds(100, 100, 761, 558);
frmImageDownloader.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frmImageDownloader.getContentPane().setLayout(null);
JLabel lblMadeByLars = new JLabel("@ berserkingyadis");
lblMadeByLars.setFont(new Font("Dialog", Font.PLAIN, 12));
lblMadeByLars.setBounds(14, 468, 157, 40);
frmImageDownloader.getContentPane().add(lblMadeByLars);
JScrollPane scrollPane = new JScrollPane();
scrollPane
.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
scrollPane.setBounds(205, 185, 542, 331);
scrollPane.setAutoscrolls(true);
frmImageDownloader.getContentPane().add(scrollPane);
try {
bgImage = ImageIO.read(Applikation.class.getResource("/images/bg" + (new Random().nextInt(4) + 1) + ".png"));
} catch (IOException e) {
appendLog("Could not get Background Image", true);
}
textArea = new BackgroundArea(bgImage);
textArea.setEditable(false);
textArea.setDropMode(DropMode.INSERT);
textArea.setColumns(2);
textArea.setFont(new Font("Monospaced", Font.PLAIN, 11));
scrollPane.setViewportView(textArea);
textArea.setLineWrap(true);
textArea.setWrapStyleWord(true);
txtInsertTagHere = new JTextField();
txtInsertTagHere.setText("insert tag here");
txtInsertTagHere.setFont(new Font("Dialog", Font.PLAIN, 14));
txtInsertTagHere.setBounds(14, 208, 181, 30);
frmImageDownloader.getContentPane().add(txtInsertTagHere);
txtInsertTagHere.setColumns(10);
lblTag = new JLabel("Tag:");
lblTag.setFont(new Font("Dialog", Font.BOLD, 12));
lblTag.setBounds(12, 183, 175, 24);
frmImageDownloader.getContentPane().add(lblTag);
Parsebtn = new JButton("start parsing");
Parsebtn.setBounds(12, 372, 181, 85);
frmImageDownloader.getContentPane().add(Parsebtn);
PageSpinner = new JSpinner();
PageSpinner.setModel(new SpinnerNumberModel(new Integer(1),
new Integer(1), null, new Integer(1)));
PageSpinner.setBounds(155, 249, 40, 30);
frmImageDownloader.getContentPane().add(PageSpinner);
lblNewLabel = new JLabel("Pages to parse:");
lblNewLabel.setFont(new Font("Dialog", Font.BOLD, 11));
lblNewLabel.setBounds(12, 249, 123, 30);
frmImageDownloader.getContentPane().add(lblNewLabel);
lblNewLabel_1 = new JLabel("1 Page = 50-60 Pictures");
lblNewLabel_1.setFont(new Font("Dialog", Font.PLAIN, 12));
lblNewLabel_1.setBounds(12, 290, 175, 30);
frmImageDownloader.getContentPane().add(lblNewLabel_1);
comboBox = new JComboBox<Object>();
comboBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
switch (comboBox.getSelectedItem().toString()) {
case GE_HENTAI_SINGLE:
Parsebtn.setEnabled(true);
Parsebtn.setFont(new Font("Dialog", Font.BOLD, 12));
Parsebtn.setText("start parsing");
DelayField.setText("100");
lblNewLabel_1.setEnabled(true);
PageSpinner.setEnabled(false);
lblTag.setText("Page URL:");
txtInsertTagHere.setText("insert page URL here");
lblNewLabel.setEnabled(false);
break;
case GE_HENTAI_MORE:
Parsebtn.setEnabled(true);
Parsebtn.setFont(new Font("Dialog", Font.BOLD, 12));
Parsebtn.setText("start parsing");
DelayField.setText("100");
lblNewLabel.setEnabled(true);
lblNewLabel_1.setEnabled(true);
PageSpinner.setEnabled(true);
lblTag.setText("Album URL:");
txtInsertTagHere.setText("insert album URL here");
break;
case PAHEAL:
case GELBOORU:
case R34HENTAI:
case XXX:
Parsebtn.setEnabled(true);
Parsebtn.setFont(new Font("Dialog", Font.BOLD, 12));
Parsebtn.setText("start parsing");
DelayField.setText("100");
lblNewLabel.setEnabled(true);
lblNewLabel_1.setEnabled(true);
PageSpinner.setEnabled(true);
lblTag.setText("Tag:");
txtInsertTagHere.setText("insert tag here");
lblNewLabel.setText("Pages to parse:");
break;
case FOURCHAN_THREAD:
case INFINITYCHAN_THREAD:
case ARCHIVE_MOE_THREAD:
Parsebtn.setEnabled(true);
Parsebtn.setFont(new Font("Dialog", Font.BOLD, 12));
Parsebtn.setText("start parsing");
DelayField.setText("10");
lblNewLabel.setText("Pages to parse:");
lblNewLabel.setEnabled(false);
lblNewLabel_1.setEnabled(false);
PageSpinner.setEnabled(false);
lblTag.setText("Thread URL:");
txtInsertTagHere.setText("insert link here");
break;
case FOURCHAN_BOARD:
case INFINITYCHAN_BOARD:
Parsebtn.setEnabled(true);
Parsebtn.setFont(new Font("Dialog", Font.BOLD, 12));
Parsebtn.setText("start parsing");
DelayField.setText("10");
lblNewLabel.setEnabled(true);
lblNewLabel.setText("Threads to parse:");
lblNewLabel_1.setEnabled(false);
PageSpinner.setEnabled(true);
lblTag.setText("Board(eg: v or e):");
txtInsertTagHere.setText("insert Board Letter here");
break;
case ARCHIVE_MOE_BOARD:
case FGTS_JP_BOARD:
Parsebtn.setEnabled(true);
Parsebtn.setFont(new Font("Dialog", Font.BOLD, 12));
Parsebtn.setText("start parsing");
DelayField.setText("10");
lblNewLabel.setEnabled(true);
lblNewLabel.setText("Sites to parse(1 Site = 100 Threads):");
lblNewLabel_1.setEnabled(false);
PageSpinner.setEnabled(true);
lblTag.setText("Board(eg: v or e):");
txtInsertTagHere.setText("insert Board Letter here");
break;
case TUMBLR:
Parsebtn.setEnabled(true);
Parsebtn.setFont(new Font("Dialog", Font.BOLD, 12));
Parsebtn.setText("start parsing");
DelayField.setText("100");
lblNewLabel.setEnabled(true);
lblNewLabel_1.setEnabled(true);
PageSpinner.setEnabled(true);
lblTag.setText("Artist name:");
txtInsertTagHere.setText("insert artist name here");
lblNewLabel.setText("Pages to parse:");
break;
case IMGUR:
lblNewLabel.setEnabled(false);
lblNewLabel_1.setEnabled(false);
PageSpinner.setEnabled(false);
DelayField.setText("100");
lblTag.setText("Album Letters: (eg: \"Bl1QP\")");
txtInsertTagHere.setText("insert the letters here");
Parsebtn.setText("start parsing");
Parsebtn.setFont(new Font("Dialog", Font.BOLD, 12));
Parsebtn.setEnabled(true);
break;
}
}
});
comboBox.setModel(new DefaultComboBoxModel<Object>(
new String[]{
PAHEAL,
XXX,
R34HENTAI,
GELBOORU,
GE_HENTAI_SINGLE,
GE_HENTAI_MORE,
FOURCHAN_THREAD,
FOURCHAN_BOARD,
//ARCHIVE_MOE_THREAD,
//ARCHIVE_MOE_BOARD,
//FGTS_JP_BOARD,
INFINITYCHAN_THREAD,
INFINITYCHAN_BOARD,
TUMBLR,
IMGUR
}));
comboBox.setBounds(434, 35, 313, 24);
comboBox.setFont(new Font("Dialog", Font.PLAIN, 11));
frmImageDownloader.getContentPane().add(comboBox);
JLabel lblNewLabel_2 = new JLabel("choose the Site to parse:");
lblNewLabel_2.setFont(new Font("Dialog", Font.BOLD, 12));
lblNewLabel_2.setBounds(434, 5, 268, 30);
frmImageDownloader.getContentPane().add(lblNewLabel_2);
JLabel lblNewLabel_3 = new JLabel(
"<html>\nWelcome to the Image Downloader. <br>\nYour can enter the Tag of the images you are looking for below. <br>\nImgur is not supported yet. <br>\nWhen you click parse a folder with the name of the tag will be <br>\ncreated and the images will be downloaded into it. <br><br>\nI strongly advise you to not set the delay under 100 ms, <br>\nif you abuse this you will get banned from the site <br><br>\n\nhave fun :)\n\n\n</html>");
lblNewLabel_3.setVerticalAlignment(SwingConstants.TOP);
lblNewLabel_3.setFont(new Font("Dialog", Font.PLAIN, 12));
lblNewLabel_3.setBounds(12, 12, 404, 195);
frmImageDownloader.getContentPane().add(lblNewLabel_3);
JLabel lblDelay = new JLabel("Delay(ms):");
lblDelay.setFont(new Font("Dialog", Font.BOLD, 12));
lblDelay.setBounds(12, 331, 85, 30);
frmImageDownloader.getContentPane().add(lblDelay);
DelayField = new JTextField();
DelayField.setText("100");
DelayField.setBounds(140, 331, 55, 30);
frmImageDownloader.getContentPane().add(DelayField);
DelayField.setColumns(10);
JLabel lblPahealSearchFor = new JLabel(
"<html>\r\npaheal: Search for Characters or Artists\r\n\t\t eg: Tifa Lockhart, Fugtrup<br>\r\nrule34.xxx: Search for things\r\n\t\teg: long hair, hand on hip<br>\r\ngelbooru: Search for what you want<br>\r\n4chan/8chan: paste the Thread URL/Boardletter in the\r\n\t\tTextfield <br>\r\ntumblr: enter the artist's name<br>imgur: enter the album id</html>");
lblPahealSearchFor.setVerticalAlignment(SwingConstants.TOP);
lblPahealSearchFor.setFont(new Font("Dialog", Font.PLAIN, 11));
lblPahealSearchFor.setBounds(434, 71, 313, 109);
frmImageDownloader.getContentPane().add(lblPahealSearchFor);
Parsebtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
if (parsing) {
parser.diePlease();
parser = null;
} else {
GUI g = getGUI();
textArea.setText("");
frmImageDownloader
.setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
String tag = txtInsertTagHere.getText().replace(' ', '_');
int pages = (int) PageSpinner.getValue();
String site = comboBox.getSelectedItem().toString();
int delay = Integer.parseInt(DelayField.getText());
switch (site) {
case PAHEAL:
parser = new PahealParser();
parser.setup(tag, pages, g, delay);
break;
case XXX:
parser = new XXXParser();
parser.setup(tag, pages, g, delay);
break;
case R34HENTAI:
parser = new R34HentaiParser();
parser.setup(tag, pages, g, delay);
break;
case GELBOORU:
parser = new GelBooruParser();
parser.setup(tag, pages, g, delay);
break;
case FOURCHAN_THREAD:
parser = new FourChanParser(tag, delay, g, 0);
break;
case FOURCHAN_BOARD:
parser = new FourChanParser(tag, delay, g, 1, pages);
break;
case INFINITYCHAN_THREAD:
parser = new InfinityChanParser(tag, delay, g, 0);
break;
case INFINITYCHAN_BOARD:
parser = new InfinityChanParser(tag, delay, g, 1, pages);
break;
case ARCHIVE_MOE_THREAD:
parser = new ArchiveMoeParser(tag, delay, g, 0);
break;
case ARCHIVE_MOE_BOARD:
parser = new ArchiveMoeParser(tag, delay, g, 1, pages);
break;
case FGTS_JP_BOARD:
parser = new ArchiveMoeParser(tag, delay, g, 2, pages);
break;
case TUMBLR:
parser = new TumblrParser();
parser.setup(tag, pages, g, delay);
break;
case IMGUR:
parser = new ImgurParser(tag, delay, g);
break;
case GE_HENTAI_SINGLE:
parser = new GEHentaiParser(tag, delay, g, false, pages);
break;
case GE_HENTAI_MORE:
parser = new GEHentaiParser(tag, delay, g, true, pages);
}
parser.start();
parsing = true;
Parsebtn.setText("stop parsing");
}
}
});
frmImageDownloader.setVisible(true);
parsing = false;
}
public void appendLog(String txt, boolean brk) {
textArea.append(txt);
if (brk)
textArea.append("\n");
}
private GUI getGUI() {
return this;
}
public void reportback() {
Parsebtn.setEnabled(true);
frmImageDownloader.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
Parsebtn.setText("start parsing");
parsing = false;
}
}<|fim▁end|>
|
private final String FOURCHAN_BOARD = "4chan-Board - http://boards.4chan.org/x/catalog";
|
<|file_name|>reducer.js<|end_file_name|><|fim▁begin|>import { key, PLAY, PAUSE, MUTE, UNMUTE, UPDATE_VOLUME, UPDATE_TIME, SET_SONG, SET_TIME, updateTime } from './actions'
import { store } from '../store'
let audio = new Audio()
audio.addEventListener('timeupdate', event => store.dispatch(updateTime(event)))
const initialState = {
isPlaying: false,
muted: false,
volume: audio.volume,
src: '',
currentTime: '',
duration: 0.0,
completed: 0.0
}
export const selectors = {
audio: state => state[key].audio
}
export default function reducer (state = initialState, { type, payload }) {
switch (type) {
case PLAY:
{
audio.play()
return { ...state, isPlaying: !state.isPlaying }
}
case PAUSE:<|fim▁hole|> }
case MUTE:
{
audio.muted = true
return { ...state, muted: true }
}
case UNMUTE:
{
audio.muted = false
return { ...state, muted: false }
}
case UPDATE_TIME:
{
const { currentTime, duration, completed } = payload
return { ...state, currentTime, duration, completed }
}
case UPDATE_VOLUME:
{
audio.volume = payload
return { ...state, volume: payload }
}
case SET_TIME:
{
const newCurrentTime = state.currentTime * parseFloat(payload) / 100
audio.currentTime = newCurrentTime
return { ...state, currentTime: newCurrentTime }
}
case SET_SONG:
{
audio.src = payload
return { ...state, src: payload }
}
default:
return state
}
}<|fim▁end|>
|
{
audio.pause()
return { ...state, isPlaying: !state.isPlaying }
|
<|file_name|>BestTimetoBuyandSellStock_121.java<|end_file_name|><|fim▁begin|>/**
* hujiawei - 15/3/21.
* <p/>
* 贪心
* <p/>
* https://leetcode.com/problems/best-time-to-buy-and-sell-stock/
*/
public class BestTimetoBuyandSellStock_121 {<|fim▁hole|>
public static void main(String[] args) {
System.out.println(new BestTimetoBuyandSellStock_121().maxProfit(new int[]{2, 5, 3, 8, 1, 10}));
}
public int maxProfit(int[] prices) {
if (prices.length <= 1) return 0;
int max = 0, low = prices[0];
for (int i = 1; i < prices.length; i++) {
if (prices[i] < low) low = prices[i];
else if (prices[i] - low > max) max = prices[i] - low;
}
return max;
}
}<|fim▁end|>
| |
<|file_name|>p10.py<|end_file_name|><|fim▁begin|>import pandas as pd
import os
import time
from datetime import datetime
import re
from time import mktime
import matplotlib
import matplotlib.pyplot as plt
from matplotlib import style
style.use("dark_background")
# path = "X:/Backups/intraQuarter" # for Windows with X files :)
# if git clone'ed then use relative path,
# assuming you extracted the downloaded zip into this project's folder:
path = "intraQuarter"
def Key_Stats(gather="Total Debt/Equity (mrq)"):
statspath = path+'/_KeyStats'
stock_list = [x[0] for x in os.walk(statspath)]
df = pd.DataFrame(<|fim▁hole|> 'Unix',
'Ticker',
'DE Ratio',
'Price',
'stock_p_change',
'SP500',
'sp500_p_change',
'Difference',
'Status'
]
)
sp500_df = pd.DataFrame.from_csv("YAHOO-INDEX_GSPC.csv")
ticker_list = []
for each_dir in stock_list[1:25]:
each_file = os.listdir(each_dir)
# ticker = each_dir.split("\\")[1] # Windows only
# ticker = each_dir.split("/")[1] # this didn't work so do this:
ticker = os.path.basename(os.path.normpath(each_dir))
# print(ticker) # uncomment to verify
ticker_list.append(ticker)
starting_stock_value = False
starting_sp500_value = False
if len(each_file) > 0:
for file in each_file:
date_stamp = datetime.strptime(file, '%Y%m%d%H%M%S.html')
unix_time = time.mktime(date_stamp.timetuple())
full_file_path = each_dir + '/' + file
source = open(full_file_path,'r').read()
try:
try:
value = float(source.split(gather+':</td><td class="yfnc_tabledata1">')[1].split('</td>')[0])
except:
value = float(source.split(gather+':</td>\n<td class="yfnc_tabledata1">')[1].split('</td>')[0])
try:
sp500_date = datetime.fromtimestamp(unix_time).strftime('%Y-%m-%d')
row = sp500_df[(sp500_df.index == sp500_date)]
sp500_value = float(row['Adjusted Close'])
except:
sp500_date = datetime.fromtimestamp(unix_time-259200).strftime('%Y-%m-%d')
row = sp500_df[(sp500_df.index == sp500_date)]
sp500_value = float(row['Adjusted Close'])
try:
stock_price = float(source.split('</small><big><b>')[1].split('</b></big>')[0])
except:
try:
stock_price = (source.split('</small><big><b>')[1].split('</b></big>')[0])
#print(stock_price)
stock_price = re.search(r'(\d{1,8}\.\d{1,8})', stock_price)
stock_price = float(stock_price.group(1))
#print(stock_price)
except:
try:
stock_price = (source.split('<span class="time_rtq_ticker">')[1].split('</span>')[0])
#print(stock_price)
stock_price = re.search(r'(\d{1,8}\.\d{1,8})', stock_price)
stock_price = float(stock_price.group(1))
#print(stock_price)
except:
print('wtf stock price lol',ticker,file, value)
time.sleep(5)
if not starting_stock_value:
starting_stock_value = stock_price
if not starting_sp500_value:
starting_sp500_value = sp500_value
stock_p_change = ((stock_price - starting_stock_value) / starting_stock_value) * 100
sp500_p_change = ((sp500_value - starting_sp500_value) / starting_sp500_value) * 100
location = len(df['Date'])
difference = stock_p_change-sp500_p_change
if difference > 0:
status = "outperform"
else:
status = "underperform"
df = df.append({'Date':date_stamp,
'Unix':unix_time,
'Ticker':ticker,
'DE Ratio':value,
'Price':stock_price,
'stock_p_change':stock_p_change,
'SP500':sp500_value,
'sp500_p_change':sp500_p_change,
############################
'Difference':difference,
'Status':status},
ignore_index=True)
except Exception as e:
pass
#print(ticker,e,file, value)
#print(ticker_list)
#print(df)
for each_ticker in ticker_list:
try:
plot_df = df[(df['Ticker'] == each_ticker)]
plot_df = plot_df.set_index(['Date'])
if plot_df['Status'][-1] == 'underperform':
color = 'r'
else:
color = 'g'
plot_df['Difference'].plot(label=each_ticker, color=color)
plt.legend()
except Exception as e:
print(str(e))
plt.show()
save = gather.replace(' ','').replace(')','').replace('(','').replace('/','')+str('.csv')
print(save)
df.to_csv(save)
Key_Stats()<|fim▁end|>
|
columns = [
'Date',
|
<|file_name|>email.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals
import email
import logging
from email.utils import formataddr
from collections import defaultdict
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils import six, timezone
from django.utils.datastructures import MultiValueDict
from django.utils.six.moves.urllib.parse import urljoin
from djblets.mail.message import EmailMessage as DjbletsEmailMessage
from djblets.siteconfig.models import SiteConfiguration
from djblets.auth.signals import user_registered
from reviewboard.accounts.models import ReviewRequestVisit
from reviewboard.admin.server import get_server_url
from reviewboard.reviews.models import Group, ReviewRequest, Review
from reviewboard.reviews.signals import (review_request_published,
review_published, reply_published,
review_request_closed)
from reviewboard.reviews.views import build_diff_comment_fragments
# A mapping of signals to EmailHooks.
_hooks = defaultdict(set)
def _ensure_unicode(text):
"""Return a unicode object for the given text.
Args:
text (bytes or unicode):
The text to decode.
Returns:
unicode: The decoded text.
"""
if isinstance(text, bytes):
text = text.decode('utf-8')
return text
def register_email_hook(signal, handler):
"""Register an e-mail hook.
Args:
signal (django.dispatch.Signal):
The signal that will trigger the e-mail to be sent. This is one of
:py:data:`~reviewboard.reviews.signals.review_request_published`,
:py:data:`~reviewboard.reviews.signals.review_request_closed`,
:py:data:`~reviewboard.reviews.signals.review_published`, or
:py:data:`~reviewboard.reviews.signals.reply_published`.
handler (reviewboard.extensions.hooks.EmailHook):
The ``EmailHook`` that will be triggered when an e-mail of the
chosen type is about to be sent.
"""
assert signal in (review_request_published, review_request_closed,
review_published, reply_published), (
'Invalid signal %r' % signal)
_hooks[signal].add(handler)
def unregister_email_hook(signal, handler):
"""Unregister an e-mail hook.
Args:
signal (django.dispatch.Signal):
The signal that will trigger the e-mail to be sent. This is one of
:py:data:`~reviewboard.reviews.signals.review_request_published`,
:py:data:`~reviewboard.reviews.signals.review_request_closed`,
:py:data:`~reviewboard.reviews.signals.review_published`, or
:py:data:`~reviewboard.reviews.signals.reply_published`.
handler (reviewboard.extensions.hooks.EmailHook):
The ``EmailHook`` that will be triggered when an e-mail of the
chosen type is about to be sent.
"""
assert signal in (review_request_published, review_request_closed,
review_published, reply_published), (
'Invalid signal %r' % signal)
_hooks[signal].discard(handler)
def review_request_closed_cb(sender, user, review_request, type, **kwargs):
"""Send e-mail when a review request is closed.
Listens to the
:py:data:`~reviewboard.reviews.signals.review_request_closed` signal and
sends an e-mail if this type of notification is enabled (through the
``mail_send_review_close_mail`` site configuration setting).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('mail_send_review_close_mail'):
mail_review_request(review_request, user, close_type=type)
def review_request_published_cb(sender, user, review_request, trivial,
changedesc, **kwargs):
"""Send e-mail when a review request is published.
Listens to the
:py:data:`~reviewboard.reviews.signals.review_request_published` signal and
sends an e-mail if this type of notification is enabled through the
``mail_send_review_mail`` site configuration setting).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('mail_send_review_mail') and not trivial:
mail_review_request(review_request, user, changedesc)
def review_published_cb(sender, user, review, to_submitter_only, **kwargs):
"""Send e-mail when a review is published.
Listens to the :py:data:`~reviewboard.reviews.signals.review_published`
signal and sends e-mail if this type of notification is enabled through the
``mail_send_review_mail`` site configuration setting).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('mail_send_review_mail'):
mail_review(review, user, to_submitter_only)
def reply_published_cb(sender, user, reply, trivial, **kwargs):
"""Send e-mail when a review reply is published.
Listens to the :py:data:`~reviewboard.reviews.signals.reply_published`
signal and sends an e-mail if this type of notification is enabled (through
``mail_send_review_mail`` site configuration).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('mail_send_review_mail') and not trivial:
mail_reply(reply, user)
def user_registered_cb(user, **kwargs):
"""Send e-mail when a user is registered.
Listens for new user registrations and sends a new user registration
e-mail to administrators, if this type of notification is enabled (through
``mail_send_new_user_mail`` site configuration).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get('mail_send_new_user_mail'):
mail_new_user(user)
def connect_signals():
"""Connect e-mail callbacks to signals."""
review_request_published.connect(review_request_published_cb,
sender=ReviewRequest)
review_published.connect(review_published_cb, sender=Review)
reply_published.connect(reply_published_cb, sender=Review)
review_request_closed.connect(review_request_closed_cb,
sender=ReviewRequest)
user_registered.connect(user_registered_cb)
def build_email_address(fullname, email):
"""Build an e-mail address for the name and e-mail address.
Args:
fullname (unicode):
The full name associated with the e-mail address (or ``None``).
email (unicode):
The e-mail address.
Returns:
unicode: A properly formatted e-mail address.
"""
return formataddr((fullname, email))
def get_email_address_for_user(user):
"""Build an e-mail address for the given user.
Args:
user (django.contrib.auth.models.User):
The user.
Returns:
unicode: A properly formatted e-mail address for the user.
"""
return build_email_address(user.get_full_name(), user.email)
def get_email_addresses_for_group(group, review_request_id=None):
"""Build a list of e-mail addresses for the group.
Args:
group (reviewboard.reviews.models.Group):
The review group to build the e-mail addresses for.
Returns:
list: A list of properly formatted e-mail addresses for all users in
the review group.
"""
addresses = []
if group.mailing_list:
if ',' not in group.mailing_list:
# The mailing list field has only one e-mail address in it,
# so we can just use that and the group's display name.
addresses = [build_email_address(group.display_name,
group.mailing_list)]
else:
# The mailing list field has multiple e-mail addresses in it.
# We don't know which one should have the group's display name
# attached to it, so just return their custom list as-is.
addresses = group.mailing_list.split(',')
if not (group.mailing_list and group.email_list_only):
users_q = Q(is_active=True)
local_site = group.local_site
if local_site:
users_q = users_q & (Q(local_site=local_site) |
Q(local_site_admins=local_site))
users = group.users.filter(users_q).select_related('profile')
if review_request_id:
users = users.extra(select={
'visibility': """
SELECT accounts_reviewrequestvisit.visibility
FROM accounts_reviewrequestvisit
WHERE accounts_reviewrequestvisit.review_request_id =
%s
AND accounts_reviewrequestvisit.user_id =
reviews_group_users.user_id
""" % review_request_id
})
addresses.extend([
get_email_address_for_user(u)
for u in users
if (u.should_send_email() and
(not review_request_id or
u.visibility != ReviewRequestVisit.MUTED))
])
return addresses
class EmailMessage(DjbletsEmailMessage):
"""The Review Board EmailMessage subclass.
This class only differs from Djblets'
:py:class:`EmailMessage <djblets.email.message.EmailMessage>`
by using the site configuration to generate some e-mail settings.
"""
def __init__(self, subject, text_body, html_body, from_email, sender,
to, cc=None, in_reply_to=None, headers=None):
siteconfig = SiteConfiguration.objects.get_current()
auto_generated = siteconfig.get('mail_enable_autogenerated_header')
super(EmailMessage, self).__init__(
subject=subject,
text_body=text_body,
html_body=html_body,
from_email=from_email,
to=to,
cc=cc,
sender=sender,
in_reply_to=in_reply_to,
headers=headers,
auto_generated=auto_generated,
prevent_auto_responses=True)
def build_recipients(user, review_request, extra_recipients=None,
limit_recipients_to=None):
"""Build the recipient sets for an e-mail.
By default, the user sending the e-mail, the review request submitter (if
they are active), all active reviewers, and all active members of review
groups will be recipients of the e-mail.
If the ``limit_recipients_to`` parameter is provided, the given ``user``
and the review request submitter (if active) will still be recipients of
the e-mail, but all reviewers and members of review groups will not.
Instead, the recipients given in ``limit_recipients_to`` will be used.
Args:
user (django.contrib.auth.models.User):
The user sending the e-mail.
review_request (reviewboard.reviews.models.ReviewRequest):
The review request the e-mail corresponds to.
extra_recipients (list):
An optional list of extra recipients as
:py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>` that will
receive the e-mail.
limit_recipients_to (list):
An optional list of recipients as
:py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>` who will
receive the e-mail in place of the normal recipients.
Returns:
tuple: A 2-tuple of the To field and the CC field, as sets of
:py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>`.
"""
recipients = set()
to_field = set()
local_site = review_request.local_site_id
submitter = review_request.submitter
target_people = review_request.target_people.filter(is_active=True).extra(
select={
'visibility': """
SELECT accounts_reviewrequestvisit.visibility
FROM accounts_reviewrequestvisit
WHERE accounts_reviewrequestvisit.review_request_id =
reviews_reviewrequest_target_people.reviewrequest_id
AND accounts_reviewrequestvisit.user_id =
reviews_reviewrequest_target_people.user_id
"""
})
starred_users = User.objects.filter(
is_active=True,
profile__starred_review_requests=review_request,
profile__should_send_email=True)
local_site_q = Q()
if local_site:
# Filter out users who are on the reviewer list in some form or have
# starred the review request but are no longer part of the LocalSite.
local_site_q = (Q(local_site=local_site) |
Q(local_site_admins=local_site))
target_people = target_people.filter(local_site_q)
starred_users = starred_users.filter(local_site_q)
if not extra_recipients:
extra_recipients = User.objects.none()
if user.should_send_email():
recipients.add(user)
if submitter.is_active and submitter.should_send_email():
recipients.add(submitter)
recipients.update(starred_users)
def _filter_recipients(to_filter):
"""Filter the given recipients.
All groups will be added to the resulting recipients. Only users with a
matching local site will be added to the resulting recipients.
Args:
to_filter (list):
A list of recipients as
:py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>`.
"""
pks = set()
for recipient in to_filter:
if isinstance(recipient, User):
pks.add(recipient.pk)
elif isinstance(recipient, Group):
recipients.add(recipient)
else:
logging.error(
'Unexpected e-mail recipient %r; expected '
'django.contrib.auth.models.User or '
'reviewboard.reviews.models.Group.',
recipient)
if pks:
filtered_users = User.objects.filter(
Q(is_active=True, pk__in=pks),
local_site_q)
recipients.update(
recipient
for recipient in filtered_users.select_related('Profile')
if recipient.should_send_email()
)
if limit_recipients_to is not None:
_filter_recipients(limit_recipients_to)
else:
_filter_recipients(extra_recipients)
target_people = target_people.filter(is_active=True)
to_field.update(
recipient
for recipient in target_people.select_related('Profile')
if (recipient.should_send_email() and
recipient.visibility != ReviewRequestVisit.MUTED)
)
recipients.update(to_field)
recipients.update(review_request.target_groups.all())
if not user.should_send_own_updates():
recipients.discard(user)
to_field.discard(user)
if to_field:
cc_field = recipients.symmetric_difference(to_field)
else:
to_field = recipients
cc_field = set()
return to_field, cc_field
def recipients_to_addresses(recipients, review_request_id=None):
"""Return the set of e-mail addresses for the recipients.
Args:
recipients (list):
A list of :py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>`.
Returns:
set: The e-mail addresses for all recipients.
"""
addresses = set()
for recipient in recipients:
assert isinstance(recipient, User) or isinstance(recipient, Group)
if isinstance(recipient, User):
addresses.add(get_email_address_for_user(recipient))
else:
addresses.update(get_email_addresses_for_group(recipient,
review_request_id))
return addresses
def send_review_mail(user, review_request, subject, in_reply_to,
to_field, cc_field, text_template_name,
html_template_name, context=None, extra_headers=None):
"""Format and send an e-mail out.
Args:
user (django.contrib.auth.models.User):
The user who is sending the e-mail.
review_request (reviewboard.reviews.models.ReviewRequest):
The review request that the e-mail is about.
subject (unicode):
The subject of the e-mail address.
in_reply_to (unicode):
The e-mail message ID for threading.
to_field (list):
The recipients to send the e-mail to. This should be a list of
:py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>`.
cc_field (list):
The addresses to be CC'ed on the e-mail. This should be a list of
:py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>`.
text_template_name (unicode):
The name for the text e-mail template.
html_template_name (unicode):
The name for the HTML e-mail template.
context (dict):
Optional extra context to provide to the template.
extra_headers (dict):
Either a dict or
:py:class:`~django.utils.datastructures.MultiValueDict` providing
additional headers to send with the e-mail.
Returns:
unicode: The resulting e-mail message ID.
"""
current_site = Site.objects.get_current()
local_site = review_request.local_site
from_email = get_email_address_for_user(user)
to_field = recipients_to_addresses(to_field, review_request.id)
cc_field = recipients_to_addresses(cc_field, review_request.id) - to_field
if not user.should_send_own_updates():
to_field.discard(get_email_address_for_user(user))
if not to_field and not cc_field:
# Nothing to send.
return
siteconfig = current_site.config.get()
domain_method = siteconfig.get("site_domain_method")
if not context:
context = {}
context['user'] = user
context['domain'] = current_site.domain
context['domain_method'] = domain_method
context['review_request'] = review_request
if review_request.local_site:
context['local_site_name'] = review_request.local_site.name
text_body = render_to_string(text_template_name, context)
html_body = render_to_string(html_template_name, context)
base_url = get_server_url(local_site=local_site)
headers = MultiValueDict({
'X-ReviewBoard-URL': [base_url],
'X-ReviewRequest-URL': [urljoin(base_url,
review_request.get_absolute_url())],
'X-ReviewGroup': [', '.join(group.name for group in
review_request.target_groups.all())],
})
if extra_headers:
if not isinstance(extra_headers, MultiValueDict):
extra_headers = MultiValueDict(
(key, [value])
for (key, value) in six.iteritems(extra_headers)
)
headers.update(extra_headers)
if review_request.repository:
headers['X-ReviewRequest-Repository'] = review_request.repository.name
latest_diffset = review_request.get_latest_diffset()
if latest_diffset:
modified_files = set()
for filediff in latest_diffset.files.all():
if filediff.deleted or filediff.copied or filediff.moved:
modified_files.add(filediff.source_file)
if filediff.is_new or filediff.copied or filediff.moved:
modified_files.add(filediff.dest_file)
for filename in modified_files:
headers.appendlist('X-ReviewBoard-Diff-For', filename)
sender = None
if settings.DEFAULT_FROM_EMAIL:
sender = build_email_address(user.get_full_name(),
settings.DEFAULT_FROM_EMAIL)
if sender == from_email:
# RFC 2822 states that we should only include Sender if the
# two are not equal.
sender = None
message = EmailMessage(subject.strip(),
text_body.encode('utf-8'),
html_body.encode('utf-8'),
from_email, sender,
list(to_field), list(cc_field),
in_reply_to, headers)
try:
message.send()
except Exception:
logging.exception("Error sending e-mail notification with subject "
"'%s' on behalf of '%s' to '%s'",
subject.strip(),
from_email,
','.join(list(to_field) + list(cc_field)))
return message.message_id
def mail_review_request(review_request, user, changedesc=None,
close_type=None):
"""Send an e-mail representing the supplied review request.
Args:
review_request (reviewboard.reviews.models.ReviewRequest):
The review request to send an e-mail about.
user (django.contrib.auth.models.User):
The user who triggered the e-mail (i.e., they published or closed
the review request).
changedesc (reviewboard.changedescs.models.ChangeDescription):
An optional change description showing what has changed in the
review request, possibly with explanatory text from the submitter.
This is created when saving a draft on a public review request and
will be ``None`` when publishing initially. This is used by the
template to add contextual (updated) flags to inform people what
has changed.
close_type (unicode):
How the review request was closed or ``None`` if it was published.
If this is not ``None`` it must be one of
:py:attr:`~reviewboard.reviews.models.ReviewRequest.SUBMITTED` or
:py:attr:`~reviewboard.reviews.models.ReviewRequest.DISCARDED`.
"""
# If the review request is not yet public or has been discarded, don't send
# any mail. Relax the "discarded" rule when e-mails are sent on closing
# review requests
if (not review_request.public or
(not close_type and review_request.status == 'D')):
return
summary = _ensure_unicode(review_request.summary)
subject = "Review Request %d: %s" % (review_request.display_id,
summary)
reply_message_id = None
if review_request.email_message_id:
# Fancy quoted "replies"
subject = "Re: " + subject
reply_message_id = review_request.email_message_id
extra_recipients = review_request.participants
else:
extra_recipients = None
extra_context = {}
if close_type:
changedesc = review_request.changedescs.filter(public=True).latest()
limit_recipients_to = None
if changedesc:
extra_context['change_text'] = changedesc.text
extra_context['changes'] = changedesc.fields_changed
fields_changed = changedesc.fields_changed
changed_field_names = set(fields_changed.keys())
if (changed_field_names and
changed_field_names.issubset(['target_people', 'target_groups'])):
# If the only changes are to the target reviewers, try to send a
# much more targeted e-mail (rather than sending it out to
# everyone, only send it to new people).
limit_recipients_to = set()
if 'target_people' in changed_field_names:
user_pks = [
item[2]
for item in fields_changed['target_people']['added']
]
limit_recipients_to.update(User.objects.filter(
pk__in=user_pks))
if 'target_groups' in changed_field_names:
group_pks = [
item[2]
for item in fields_changed['target_groups']['added']
]
limit_recipients_to.update(Group.objects.filter(
pk__in=group_pks))
submitter = review_request.submitter
to_field, cc_field = build_recipients(submitter, review_request,
extra_recipients,
limit_recipients_to)
extra_filter_kwargs = {}
if close_type:
signal = review_request_closed
extra_filter_kwargs['close_type'] = close_type
else:
signal = review_request_published
to_field, cc_field = filter_email_recipients_from_hooks(
to_field, cc_field, signal, review_request=review_request, user=user,
**extra_filter_kwargs)
review_request.time_emailed = timezone.now()
review_request.email_message_id = \
send_review_mail(review_request.submitter, review_request, subject,
reply_message_id, to_field, cc_field,
'notifications/review_request_email.txt',
'notifications/review_request_email.html',
extra_context)
review_request.save()
def mail_review(review, user, to_submitter_only):
"""Send an e-mail representing the supplied review.
Args:
review (reviewboard.reviews.models.Review):
The review to send an e-mail about.
to_submitter_only (bool):
Determines if the review is to the submitter only or not.
"""
review_request = review.review_request
if not review_request.public:
return
review.ordered_comments = \
review.comments.order_by('filediff', 'first_line')
extra_context = {
'user': review.user,
'review': review,
}
extra_headers = {}
if review.ship_it:
extra_headers['X-ReviewBoard-ShipIt'] = '1'
if review.ship_it_only:
extra_headers['X-ReviewBoard-ShipIt-Only'] = '1'
has_error, extra_context['comment_entries'] = \
build_diff_comment_fragments(
review.ordered_comments, extra_context,
"notifications/email_diff_comment_fragment.html")
<|fim▁hole|> if to_submitter_only:
limit_to = set([review_request.submitter, review.user])
to_field, cc_field = build_recipients(reviewer, review_request,
limit_recipients_to=limit_to)
to_field, cc_field = filter_email_recipients_from_hooks(
to_field, cc_field, review_published, review=review, user=user,
review_request=review_request)
summary = _ensure_unicode(review_request.summary)
review.email_message_id = send_review_mail(
reviewer,
review_request,
('Re: Review Request %d: %s'
% (review_request.display_id, summary)),
review_request.email_message_id,
to_field,
cc_field,
'notifications/review_email.txt',
'notifications/review_email.html',
extra_context,
extra_headers=extra_headers)
review.time_emailed = timezone.now()
review.save()
def mail_reply(reply, user):
"""Send an e-mail representing the supplied reply to a review.
Args:
reply (reviewboard.reviews.models.Review):
The review reply to send an e-mail about.
"""
review = reply.base_reply_to
review_request = review.review_request
if not review_request.public:
return
extra_context = {
'user': reply.user,
'review': review,
'reply': reply,
}
has_error, extra_context['comment_entries'] = \
build_diff_comment_fragments(
reply.comments.order_by('filediff', 'first_line'),
extra_context,
"notifications/email_diff_comment_fragment.html")
reviewer = reply.user
to_field, cc_field = build_recipients(reviewer, review_request,
review_request.participants)
to_field, cc_field = filter_email_recipients_from_hooks(
to_field, cc_field, reply_published, reply=reply, user=user,
review=review, review_request=review_request)
summary = _ensure_unicode(review_request.summary)
reply.email_message_id = send_review_mail(
user,
review_request,
('Re: Review Request %d: %s'
% (review_request.display_id, summary)),
review.email_message_id,
to_field,
cc_field,
'notifications/reply_email.txt',
'notifications/reply_email.html',
extra_context)
reply.time_emailed = timezone.now()
reply.save()
def mail_new_user(user):
"""Send an e-mail to administrators for newly registered users.
Args:
user (django.contrib.auth.models.User):
The user to send an e-mail about.
"""
current_site = Site.objects.get_current()
siteconfig = current_site.config.get_current()
domain_method = siteconfig.get("site_domain_method")
subject = "New Review Board user registration for %s" % user.username
from_email = get_email_address_for_user(user)
context = {
'domain': current_site.domain,
'domain_method': domain_method,
'user': user,
'user_url': reverse('admin:auth_user_change', args=(user.id,))
}
text_message = render_to_string('notifications/new_user_email.txt',
context)
html_message = render_to_string('notifications/new_user_email.html',
context)
message = EmailMessage(subject.strip(), text_message, html_message,
settings.SERVER_EMAIL, settings.SERVER_EMAIL,
[build_email_address(*a)
for a in settings.ADMINS], None, None)
try:
message.send()
except Exception as e:
logging.error("Error sending e-mail notification with subject '%s' on "
"behalf of '%s' to admin: %s",
subject.strip(), from_email, e, exc_info=1)
def filter_email_recipients_from_hooks(to_field, cc_field, signal, **kwargs):
"""Filter the e-mail recipients through configured e-mail hooks.
Args:
to_field (set):
The original To field of the e-mail, as a set of
:py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>`.
cc_field (set):
The original CC field of the e-mail, as a set of
:py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>`.
signal (django.dispatch.Signal):
The signal that triggered the e-mail.
**kwargs (dict):
Extra keyword arguments to pass to the e-mail hook.
Returns:
tuple: A 2-tuple of the To field and the CC field, as sets
of :py:class:`Users <django.contrib.auth.models.User>` and
:py:class:`Groups <reviewboard.reviews.models.Group>`.
"""
if signal in _hooks:
for hook in _hooks[signal]:
to_field = hook.get_to_field(to_field, **kwargs)
cc_field = hook.get_cc_field(cc_field, **kwargs)
return to_field, cc_field
# Fixes bug #3613
_old_header_init = email.header.Header.__init__
def _unified_header_init(self, *args, **kwargs):
kwargs['continuation_ws'] = b' '
_old_header_init(self, *args, **kwargs)
email.header.Header.__init__ = _unified_header_init<|fim▁end|>
|
reviewer = review.user
limit_to=None
|
<|file_name|>allplot.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
from matplotlib.colors import Normalize
class MidpointNormalize(Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint<|fim▁hole|>
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
def make_colormap(seq):
seq = [(None,) * 3, 0.0] + list(seq) + [1.0, (None,) * 3]
cdict = {'red': [], 'green': [], 'blue': []}
for i, item in enumerate(seq):
if isinstance(item, float):
r1, g1, b1 = seq[i - 1]
r2, g2, b2 = seq[i + 1]
cdict['red'].append([item, r1, r2])
cdict['green'].append([item, g1, g2])
cdict['blue'].append([item, b1, b2])
return mcolors.LinearSegmentedColormap('CustomMap', cdict)
c = mcolors.ColorConverter().to_rgb
phimap = make_colormap([c('white'), c('tomato'), 0.33, c(
'tomato'), c('deepskyblue'), 0.66, c('deepskyblue'), c('white')])
yinver = np.load('finalLMmilne2.npy')
# PLOT
titulos = ['B', 'thetaB', 'phiB', 'vlos',
'eta0', 'a', 'ddop', 'S_0', 'S_1', 'chi2']
# plt.figure(1, figsize(18,9))
for i in range(9):
plt.subplot(3, 3, i + 1)
plt.imshow(yinver[:, :, i], cmap='cubehelix', origin='lower')
if i == 2:
plt.imshow(yinver[:, :, i], cmap=phimap, origin='lower')
if i == 3:
norm = MidpointNormalize(midpoint=0)
plt.imshow(yinver[:, :, i], norm=norm,
cmap=plt.cm.seismic, origin='lower')
plt.title(titulos[i])
plt.colorbar()
plt.tight_layout()
plt.figure(2)
plt.subplot(2, 1, 1)
plt.imshow(yinver[:, :, 9], cmap='cubehelix', origin='lower', vmax=0.01)
plt.colorbar()
plt.subplot(2, 1, 2)
plt.imshow(yinver[:, :, 10], cmap='cubehelix', origin='lower')
plt.colorbar()
plt.show()<|fim▁end|>
|
Normalize.__init__(self, vmin, vmax, clip)
|
<|file_name|>object_inspect.rs<|end_file_name|><|fim▁begin|>use command::{Command, Query};
use command::Command::ObjectInspect;
use std::collections::HashMap;
use command_query::CommandQuery;
use queryable::Queryable;
use command_line::CommandLine;
use commandable::Commandable;
use extendable::Extendable;
use request_cancellable::RequestCancellable;
use request_timeoutable::RequestTimeoutable;
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct ObjectInspectCommand {
command: Command,
arguments: HashMap<String, String>,
}
impl Default for ObjectInspectCommand {
fn default() -> ObjectInspectCommand {
ObjectInspectCommand {
command: ObjectInspect,
arguments: HashMap::new(),
}<|fim▁hole|> pub fn new() -> ObjectInspectCommand {
ObjectInspectCommand::default()
}
pub fn name(mut self, name: String) -> ObjectInspectCommand {
self.arguments.insert("name".to_string(), name.to_owned());
self
}
pub fn build(self) -> (Command, Query) {
let mut query: Query = vec![];
for (key, value) in &self.arguments {
query.push((key.to_owned(), value.to_owned()));
}
(ObjectInspect, query)
}
}
impl Queryable for ObjectInspectCommand {
fn to_query(self) -> String {
let (command, query) = self.build();
let mut command = CommandQuery::new(command, query);
command.encode()
}
}
impl Commandable for ObjectInspectCommand {
fn to_command(self) -> String {
let (command, query) = self.build();
let mut command = CommandLine::new(command, query);
command.encode()
}
}
extendable!(ObjectInspectCommand);
request_cancellable!(ObjectInspectCommand);
request_timeoutable!(ObjectInspectCommand);
#[cfg(test)]
mod test {
use super::*;
use command::Query;
use command::Command::ObjectInspect;
use std::collections::HashMap;
use queryable::Queryable;
use commandable::Commandable;
use extendable::Extendable;
#[test]
fn test_new() {
let object_inspect = ObjectInspectCommand::new();
let expected = ObjectInspectCommand {
command: ObjectInspect,
arguments: HashMap::new(),
};
assert_eq!(expected, object_inspect);
}
#[test]
fn test_name() {
let actual = ObjectInspectCommand::new().name("object".to_string()).build();
let expected_query: Query = vec![("name".to_string(), "object".to_string())];
let expected = (ObjectInspect, expected_query);
assert_eq!(expected, actual);
}
#[test]
fn test_build() {
let actual = ObjectInspectCommand::new().name("object".to_string()).build();
let expected_query: Query = vec![("name".to_string(), "object".to_string())];
let expected = (ObjectInspect, expected_query);
assert_eq!(expected, actual);
}
#[test]
fn test_queryable() {
let query = ObjectInspectCommand::new().name("object".to_string()).to_query();
let url_encoded = "/d/object_inspect?name=object";
assert_eq!(url_encoded.to_string(), query);
}
#[test]
fn test_commandable() {
let query = ObjectInspectCommand::new().name("object".to_string()).to_command();
let cli_encoded = "object_inspect --name object";
assert_eq!(cli_encoded.to_string(), query);
}
#[test]
fn test_extendable() {
let mut arg: HashMap<String, String> = HashMap::new();
arg.insert("user".to_string(), "defined".to_string());
let expected = ObjectInspectCommand {
command: ObjectInspect,
arguments: arg.to_owned(),
};
let query = ObjectInspectCommand::new();
unsafe {
let extended = query.set_arguments(arg.to_owned());
assert_eq!(expected, extended);
}
}
}<|fim▁end|>
|
}
}
impl ObjectInspectCommand {
|
<|file_name|>cruiz_04.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
number = 0
for a in range(999,99,-1):
for b in range(999,99,-1):
pal=a*b
if (str(pal) == str(pal)[::-1]):
if (pal > number):<|fim▁hole|> number = pal
break
print(number)<|fim▁end|>
| |
<|file_name|>backbone.syphon.keysplitter.js<|end_file_name|><|fim▁begin|>// Backbone.Syphon.KeySplitter
// ---------------------------
// This function is used to split DOM element keys in to an array
// of parts, which are then used to create a nested result structure.
// returning `["foo", "bar"]` results in `{foo: { bar: "value" }}`.
//
// Override this method to use a custom key splitter, such as:
// `<input name="foo.bar.baz">`, `return key.split(".")`
Backbone.Syphon.KeySplitter = function(key){
var matches = key.match(/[^\[\]]+/g);
if (key.indexOf("[]") === key.length - 2){<|fim▁hole|> lastKey = matches.pop();
matches.push([lastKey]);
}
return matches;
}<|fim▁end|>
| |
<|file_name|>Email.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013-2014 Richard M. Hightower
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* __________ _____ __ .__
* \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____
* | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\
* | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ >
* |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ /
* \/ \/ \/ \/ \/ \//_____/
* ____. ___________ _____ ______________.___.
* | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | |
* | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | |
* /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ |
* \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______|
* \/ \/ \/ \/ \/ \/
*/
package org.boon.validation.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention ( RetentionPolicy.RUNTIME )
@Target ( { ElementType.METHOD, ElementType.TYPE, ElementType.FIELD } )
public @interface Email {
<|fim▁hole|>
String summaryMessage() default "";
}<|fim▁end|>
|
String detailMessage() default "";
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .api import API, ApiMethods
from .config import Config
<|fim▁hole|><|fim▁end|>
|
__all__ = ["API", "ApiMethods", "Config"]
|
<|file_name|>ntp-sync.py<|end_file_name|><|fim▁begin|>import math
import numpy
import pyaudio
import time
import ntplib
def sine(frequency, length, rate):
length = int(length * rate)
factor = float(frequency) * (math.pi * 2) / rate
return numpy.sin(numpy.arange(length) * factor)
chunks = []
chunks.append(sine(440, 1, 44100))
chunk = numpy.concatenate(chunks) * 0.25
p = pyaudio.PyAudio()
stream = p.open(format=pyaudio.paFloat32, channels=1, rate=44100, output=1)
last = 0
print("[ntp-sync] getting clock")
c = ntplib.NTPClient()
response = c.request('pool.ntp.org', version=3)
print("[ntp-sync] clock offset %s" % response.offset)
while True:
curtime = int(math.floor(time.time() + response.offset))
if (curtime % 5) == 0 and curtime > last:<|fim▁hole|> stream.write(chunk.astype(numpy.float32).tostring())
stream.close()
p.terminate()<|fim▁end|>
|
print curtime
print("beep")
last = curtime
|
<|file_name|>notebook_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import mock
from tests import base
from girder.models.model_base import ValidationException
def setUpModule():
base.enabledPlugins.append('ythub')
base.startServer()
def tearDownModule():
base.stopServer()
class FakeAsyncResult(object):
def __init__(self):
self.task_id = 'fake_id'
def get(self):
return dict(
nodeId='123456',
volumeId='blah_volume',
serviceId='tmp-blah',
urlPath='?token=foo'
)
class FakeAsyncResult2(object):
def __init__(self):
self.task_id = 'fake_id'
def get(self):
return dict(
nodeId='654321',
volumeId='foobar_volume',
serviceId='tmp-foobar',
urlPath='?token=blah'
)
class FakeAsyncResult3(object):
def __init__(self):
self.task_id = 'fake_id'
def get(self):
return dict(
nodeId='162534',
volumeId='foobaz_volume',
serviceId='tmp-foobaz',
urlPath='?token=ragl'
)
class NotebookTestCase(base.TestCase):
def _getUser(self, userDict):
try:
user = self.model('user').createUser(**userDict)
except ValidationException:
resp = self.request(
path='/user/authentication', method='GET',
basicAuth='{login}:{password}'.format(**userDict))
self.assertStatusOk(resp)<|fim▁hole|> return user
def setUp(self):
global PluginSettings
from girder.plugins.ythub.constants import PluginSettings
self.model('setting').set(
PluginSettings.TMPNB_URL, "http://tmpnb.null")
users = ({
'email': '[email protected]',
'login': 'admin',
'firstName': 'Root',
'lastName': 'van Klompf',
'password': 'secret'
}, {
'email': '[email protected]',
'login': 'joeregular',
'firstName': 'Joe',
'lastName': 'Regular',
'password': 'secret'
})
self.admin, self.user = [self._getUser(user) for user in users]
def testNotebooks(self):
# Grab the default user folders
resp = self.request(
path='/folder', method='GET', user=self.user, params={
'parentType': 'user',
'parentId': self.user['_id'],
'sort': 'name',
'sortdir': 1
})
privateFolder = resp.json[0]
publicFolder = resp.json[1]
example_frontend = {
'imageName': 'xarthisius/ythub',
'command': './perform_magic',
'memLimit': '2048m',
'port': 12345,
'user': 'user',
'targetMount': '/blah',
'urlPath': '?token={token}',
'description': 'foo',
'cpuShares': None,
'public': True,
}
# Actually create a new frontend (private)
resp = self.request(
path='/frontend', method='POST', params=example_frontend,
user=self.admin)
self.assertStatus(resp, 200)
frontend = resp.json
with mock.patch('celery.Celery') as celeryMock:
with mock.patch('urllib.request.urlopen') as urllibMock:
instance = celeryMock.return_value
instance.send_task.side_effect = [
FakeAsyncResult(), FakeAsyncResult(),
FakeAsyncResult2(), FakeAsyncResult2(),
FakeAsyncResult3(), FakeAsyncResult3(),
FakeAsyncResult(), FakeAsyncResult()
]
req = urllibMock.return_value
req.fetch.return_value = {}
params = {
'frontendId': str(frontend['_id']),
'folderId': str(privateFolder['_id'])
}
resp = self.request(
'/notebook', method='POST',
user=self.user, params=params)
self.assertStatus(resp, 200)
notebook = resp.json
self.assertEqual(notebook['serviceInfo']['nodeId'], '123456')
self.assertEqual(notebook['serviceInfo']['volumeId'], 'blah_volume')
self.assertEqual(notebook['serviceInfo']['serviceId'], 'tmp-blah')
self.assertEqual(notebook['url'], 'http://tmp-blah.tmpnb.null/?token=foo')
self.assertEqual(notebook['frontendId'], str(frontend['_id']))
self.assertEqual(notebook['folderId'], str(privateFolder['_id']))
self.assertEqual(notebook['creatorId'], str(self.user['_id']))
with mock.patch('celery.Celery') as celeryMock:
with mock.patch('urllib.request.urlopen') as urllibMock:
params = {
'frontendId': str(frontend['_id']),
'folderId': str(privateFolder['_id'])
}
# Return exisiting
resp = self.request(
path='/notebook', method='POST', user=self.user,
params=params)
self.assertStatus(resp, 200)
self.assertEqual(resp.json['_id'], notebook['_id'])
# Create 2nd user's nb
params['folderId'] = str(publicFolder['_id'])
resp = self.request(
path='/notebook', method='POST', user=self.user,
params=params)
self.assertStatus(resp, 200)
other_notebook = resp.json
# Create admin nb
params['folderId'] = str(publicFolder['_id'])
resp = self.request(
path='/notebook', method='POST', user=self.admin,
params=params)
self.assertStatus(resp, 200)
admin_notebook = resp.json
# By default user can list only his/her notebooks
resp = self.request(
path='/notebook', method='GET', user=self.user)
self.assertStatus(resp, 200)
self.assertEqual([_['_id'] for _ in resp.json],
[other_notebook['_id'], notebook['_id']])
# Filter by folder
resp = self.request(
path='/notebook', method='GET', user=self.admin,
params={'folderId': publicFolder['_id']})
self.assertStatus(resp, 200)
self.assertEqual([_['_id'] for _ in resp.json],
[admin_notebook['_id'], other_notebook['_id']])
# Filter by folder and user
resp = self.request(
path='/notebook', method='GET', user=self.admin,
params={'folderId': publicFolder['_id'],
'userId': self.user['_id']})
self.assertStatus(resp, 200)
self.assertEqual(resp.json[0]['_id'], other_notebook['_id'])
# Get notebook by Id
resp = self.request(
path='/notebook/{_id}'.format(**notebook), method='GET')
self.assertStatus(resp, 401)
resp = self.request(
path='/notebook/{_id}'.format(**admin_notebook), method='GET',
user=self.user)
self.assertStatus(resp, 403)
resp = self.request(
path='/notebook/{_id}'.format(**notebook), method='GET',
user=self.admin)
self.assertStatus(resp, 200)
self.assertEqual(resp.json['_id'], notebook['_id'])
with mock.patch('celery.Celery') as celeryMock:
resp = self.request(
path='/notebook/{_id}'.format(**admin_notebook),
method='DELETE', user=self.user)
self.assertStatus(resp, 403)
resp = self.request(
path='/notebook/{_id}'.format(**notebook), method='DELETE',
user=self.admin)
self.assertStatus(resp, 200)
# Check if notebook is gone
resp = self.request(
path='/notebook/{_id}'.format(**notebook), method='GET',
user=self.admin)
self.assertStatus(resp, 400)
def tearDown(self):
self.model('user').remove(self.user)
self.model('user').remove(self.admin)<|fim▁end|>
|
user = resp.json['user']
|
<|file_name|>bitcoin_pt_BR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="pt_BR" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About SwansonCoin</source>
<translation>Sobre o SwansonCoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>SwansonCoin</b> version</source>
<translation>Versão do <b>SwansonCoin</b></translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>⏎
Este é um software experimental.⏎
⏎
Distribuido sob a licença de software MIT/X11, veja o arquivo anexo COPYING ou http://www.opensource.org/licenses/mit-license.php.⏎
⏎
Este produto inclui software desenvolvido pelo Projeto OpenSSL para uso no OpenSSL Toolkit (http://www.openssl.org/), software de criptografia escrito por Eric Young ([email protected]) e sofware UPnP escrito por Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>Copyright</translation>
</message>
<message>
<location line="+0"/>
<source>The SwansonCoin developers</source>
<translation>Desenvolvedores do SwansonCoin</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Catálogo de endereços</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Clique duas vezes para editar o endereço ou o etiqueta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Criar um novo endereço</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copie o endereço selecionado para a área de transferência do sistema</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Novo endereço</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your SwansonCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Estes são os seus endereços SwansonCoin para receber pagamentos. Você pode querer enviar um endereço diferente para cada remetente, para acompanhar quem está pagando.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Copiar Endereço</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Mostrar &QR Code</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a SwansonCoin address</source>
<translation>Assine uma mensagem para provar que você é dono de um endereço SwansonCoin</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>&Assinar Mensagem</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Excluir os endereços selecionados da lista</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar os dados na aba atual para um arquivo</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified SwansonCoin address</source>
<translation>Verificar mensagem para se assegurar que ela foi assinada pelo dono de um endereço SwansonCoin específico.</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Verificar Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Excluir</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your SwansonCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Estes são os seus endereços SwansonCoin para receber pagamentos. Você pode querer enviar um endereço diferente para cada remetente, para acompanhar quem está pagando.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Copiar &Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Editar</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>Enviar bit&coins</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Exportar Catálogo de Endereços</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Arquivo separado por vírgulas (*. csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Erro ao exportar</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Não foi possível gravar no arquivo %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Rótulo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(Sem rótulo)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Janela da Frase de Segurança</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Digite a frase de segurança</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nova frase de segurança</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Repita a nova frase de segurança</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Digite a nova frase de seguraça da sua carteira. <br/> Por favor, use uma frase de <b>10 ou mais caracteres aleatórios,</b> ou <b>oito ou mais palavras.</b></translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Criptografar carteira</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Esta operação precisa de sua frase de segurança para desbloquear a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Desbloquear carteira</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operação precisa de sua frase de segurança para descriptografar a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Descriptografar carteira</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Alterar frase de segurança</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Digite a frase de segurança antiga e nova para a carteira.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Confirmar criptografia da carteira</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR SWANSONCOINS</b>!</source>
<translation>Aviso: Se você criptografar sua carteira e perder sua senha, você vai <b>perder todos os seus SWANSONCOINS!</b></translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Tem certeza de que deseja criptografar sua carteira?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANTE: Qualquer backup prévio que você tenha feito do seu arquivo wallet deve ser substituído pelo novo e encriptado arquivo wallet gerado. Por razões de segurança, qualquer backup do arquivo wallet não criptografado se tornará inútil assim que você começar a usar uma nova carteira criptografada.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Cuidado: A tecla Caps Lock está ligada!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Carteira criptografada</translation>
</message>
<message>
<location line="-56"/>
<source>SwansonCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your swansoncoins from being stolen by malware infecting your computer.</source>
<translation>O SwansonCoin irá fechar agora para finalizar o processo de encriptação. Lembre-se de que encriptar sua carteira não protege totalmente suas swansoncoins de serem roubadas por malwares que tenham infectado o seu computador.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>A criptografia da carteira falhou</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>A criptografia da carteira falhou devido a um erro interno. Sua carteira não estava criptografada.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>A frase de segurança fornecida não confere.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>A abertura da carteira falhou</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>A frase de segurança digitada para a descriptografia da carteira estava incorreta.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>A descriptografia da carteira falhou</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>A frase de segurança da carteira foi alterada com êxito.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>&Assinar Mensagem...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Sincronizando com a rede...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Visão geral</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Mostrar visão geral da carteira</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transações</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Navegar pelo histórico de transações</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Editar a lista de endereços e rótulos</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Mostrar a lista de endereços para receber pagamentos</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>S&air</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Sair da aplicação</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about SwansonCoin</source>
<translation>Mostrar informação sobre SwansonCoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Sobre &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Mostrar informações sobre o Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opções...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Criptografar Carteira...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Backup Carteira...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Mudar frase de segurança...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>Importando blocos do disco...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Reindexando blocos no disco...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a SwansonCoin address</source>
<translation>Enviar moedas para um endereço swansoncoin</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for SwansonCoin</source>
<translation>Modificar opções de configuração para swansoncoin</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Fazer cópia de segurança da carteira para uma outra localização</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Mudar a frase de segurança utilizada na criptografia da carteira</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>Janela de &Depuração</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Abrir console de depuração e diagnóstico</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Verificar mensagem...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>SwansonCoin</source>
<translation>SwansonCoin</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Carteira</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>&Enviar</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>&Receber</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>&Endereços</translation>
</message>
<message>
<location line="+22"/>
<source>&About SwansonCoin</source>
<translation>&Sobre o SwansonCoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Exibir/Ocultar</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Mostrar ou esconder a Janela Principal.</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Criptografar as chaves privadas que pertencem à sua carteira</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your SwansonCoin addresses to prove you own them</source>
<translation>Assine mensagems com seus endereços SwansonCoin para provar que você é dono deles</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified SwansonCoin addresses</source>
<translation>Verificar mensagens para se assegurar que elas foram assinadas pelo dono de Endereços SwansonCoin específicos</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Arquivo</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Configurações</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Ajuda</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Barra de ferramentas</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>SwansonCoin client</source>
<translation>Cliente SwansonCoin</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to SwansonCoin network</source>
<translation><numerusform>%n conexão ativa na rede SwansonCoin</numerusform><numerusform>%n conexões ativas na rede SwansonCoin</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>Processado %1 de %2 blocos (estimado) de histórico de transações.</translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>Processado %1 blocos do histórico de transações.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n hora</numerusform><numerusform>%n horas</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n dia</numerusform><numerusform>%n dias</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n semana</numerusform><numerusform>%n semanas</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>%1 atrás</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>Último bloco recebido foi gerado %1 atrás.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Transações após isso ainda não estão visíveis.</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>Erro</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>Informação</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>A transação está acima do tamanho limite. Você ainda enviar ela com uma taxa de %1, que vai para os nós processam sua transação e ajuda a manter a rede. Você quer pagar a taxa?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Atualizado</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Recuperando o atraso ...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Confirmar taxa de transação</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Transação enviada</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Transação recebida</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Data: %1
Quantidade: %2
Tipo: %3
Endereço: %4</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>Manipulação de URI</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid SwansonCoin address or malformed URI parameters.</source>
<translation>URI não pode ser decodificado! Isso pode ter sido causado por um endereço SwansonCoin inválido ou por parâmetros URI malformados.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Carteira está <b>criptografada</b> e atualmente <b>desbloqueada</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Carteira está <b>criptografada</b> e atualmente <b>bloqueada</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. SwansonCoin can no longer continue safely and will quit.</source>
<translation>Um erro fatal ocorreu. SwansonCoin não pode continuar em segurança e irá fechar.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Alerta da Rede</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editar Endereço</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>A etiqueta associada a esse endereço do catálogo</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Endereço</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>O endereço associado à essa entrada do seu catálogo de endereços. Isso só pode ser modificado para endereço de envio.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Novo endereço de recebimento</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Novo endereço de envio</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editar endereço de recebimento</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editar endereço de envio</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>O endereço digitado "%1" já se encontra no catálogo de endereços.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid SwansonCoin address.</source>
<translation>O endereço digitado "%1" não é um endereço SwansonCoin válido.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Não foi possível destravar a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>A geração de nova chave falhou.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>SwansonCoin-Qt</source>
<translation>SwansonCoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versão</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>opções da linha de comando</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>opções da UI</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Escolher língua, por exemplo "de_DE" (padrão: localização do sistema)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Inicializar minimizado</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Mostrar tela inicial ao ligar (padrão: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opções</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>Principal</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Pagar taxa de &transação</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start SwansonCoin after logging in to the system.</source>
<translation>Iniciar SwansonCoin automaticamente após se logar no sistema.</translation>
</message>
<message>
<location line="+3"/>
<source>&Start SwansonCoin on system login</source>
<translation>Iniciar SwansonCoin no login do sistema</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>Redefinir todas as opções do cliente para opções padrão.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>&Redefinir opções</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>Rede</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the SwansonCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Abrir as portas do cliente SwansonCoin automaticamente no roteador. Isto só funcionará se seu roteador suportar UPnP e esta função estiver habilitada.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Mapear porta usando &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the SwansonCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Conectar à rede SwansonCoin através de um proxy SOCKS (ex. quando estiver usando através do Tor)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>&Conectar através de um proxy SOCKS:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP do proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Endereço &IP do proxy (ex. 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Porta:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Porta do serviço de proxy (ex. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>&Versão do SOCKS:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Versão do proxy SOCKS (ex. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Janela</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Mostrar apenas um ícone na bandeja ao minimizar a janela.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimizar para a bandeja em vez da barra de tarefas.</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimizar em vez de sair do aplicativo quando a janela for fechada. Quando esta opção é escolhida, o aplicativo só será fechado selecionando Sair no menu Arquivo.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimizar ao sair</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Mostrar</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>&Língua da interface com usuário:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting SwansonCoin.</source>
<translation>A língua da interface com usuário pode ser escolhida aqui. Esta configuração só surtirá efeito após reiniciar o SwansonCoin.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unidade usada para mostrar quantidades:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Escolha a unidade padrão de subdivisão para interface mostrar quando enviar swansoncoins.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show SwansonCoin addresses in the transaction list or not.</source>
<translation>Mostrar ou não endereços SwansonCoin na lista de transações.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Mostrar en&dereços na lista de transações</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Cancelar</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Aplicar</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>padrão</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>Confirmar redefinição de opções</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>Algumas configurações requerem reinicialização para surtirem efeito.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>Você quer continuar?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting SwansonCoin.</source>
<translation>Esta configuração surtirá efeito após reinicializar o aplicativo SwansonCoin</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>O endereço proxy fornecido é inválido.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulário</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the SwansonCoin network after a connection is established, but this process has not completed yet.</source>
<translation>A informação mostrada pode estar desatualizada. Sua carteira sincroniza automaticamente com a rede SwansonCoin depois que a conexão é estabelecida, mas este processo pode não estar completo ainda.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Não confirmadas:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Carteira</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>Imaturo:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Saldo minerado que ainda não maturou</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transações recentes</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Seu saldo atual</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Total de transações ainda não confirmadas, e que ainda não contam no saldo atual</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>fora de sincronia</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start swansoncoin: click-to-pay handler</source>
<translation>Não foi possível iniciar swansoncoin: manipulador clique-para-pagar</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>Janela do código QR</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Requisitar Pagamento</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Quantia:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Etiqueta:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Mensagem:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Salvar como...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Erro ao codigicar o URI em código QR</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>A quantidade digitada é inválida, favor verificar.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>URI resultante muito longa. Tente reduzir o texto do rótulo ou da mensagem.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Salvar código QR</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>Imagens PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Nome do cliente</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Versão do cliente</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informação</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Usando OpenSSL versão</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Horário de inicialização</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Rede</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Número de conexões</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>Na rede de teste</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Corrente de blocos</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Quantidade atual de blocos</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Total estimado de blocos</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Horário do último bloco</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Abrir</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>Opções da linha de comando</translation>
</message>
<message>
<location line="+7"/>
<source>Show the SwansonCoin-Qt help message to get a list with possible SwansonCoin command-line options.</source>
<translation>Mostrar mensagem de ajuda do SwansonCoin-Qt para obter uma lista com possíveis opções da linha de comando do SwansonCoin.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&Mostrar</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Console</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Data do 'build'</translation>
</message>
<message>
<location line="-104"/>
<source>SwansonCoin - Debug window</source>
<translation>SwansonCoin - Janela de Depuração</translation>
</message>
<message>
<location line="+25"/>
<source>SwansonCoin Core</source>
<translation>Núcleo SwansonCoin</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Arquivo de log de Depuração</translation>
</message>
<message>
<location line="+7"/>
<source>Open the SwansonCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Abrir o arquivo de log de depuração do SwansonCoin do diretório atual de dados. Isso pode levar alguns segundos para arquivos de log grandes.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Limpar console</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the SwansonCoin RPC console.</source>
<translation>Bem-vindo ao console SwansonCoin RPC.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Use as setas para cima e para baixo para navegar pelo histórico, e <b>Ctrl-L</b> para limpar a tela.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Digite <b>help</b> para uma visão geral dos comandos disponíveis.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/><|fim▁hole|> <location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Enviar dinheiro</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Enviar para vários destinatários de uma só vez</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Adicionar destinatário</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Remover todos os campos da transação</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Limpar Tudo</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Confirmar o envio</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>Enviar</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> para %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Confirmar envio de dinheiro</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Você tem certeza que deseja enviar %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>e</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>O endereço do destinatário não é válido, favor verificar.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>A quantidade a ser paga precisa ser maior que 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>A quantidade excede seu saldo.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>O total excede seu saldo quando uma taxa de transação de %1 é incluída.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Endereço duplicado: pode-se enviar para cada endereço apenas uma vez por transação.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>Erro: Criação da transação falhou!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Erro: A transação foi rejeitada. Isso pode acontecer se alguns dos swansoncoins de sua carteira já haviam sido gastos, por exemplo se você usou uma cópia do arquivo wallet.dat e alguns swansoncoins foram gastos na cópia mas não foram marcados como gastos aqui.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Formulário</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Q&uantidade:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Pagar &Para:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</source>
<translation>O endereço para onde enviar o pagamento (ex. RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Digite uma etiqueta para este endereço para adicioná-lo ao catálogo de endereços</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Escolha um endereço do seu catálogo</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Colar o endereço da área de transferência</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Remover este destinatário</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a SwansonCoin address (e.g. RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</source>
<translation>Digite um endereço SwansonCoin (exemplo: RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Assinaturas - Assinar / Verificar uma mensagem</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Assinar Mensagem</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Você pode assinar mensagens com seus endereços para provar que você é o dono deles. Seja cuidadoso para não assinar algo vago, pois ataques de pishing podem tentar te enganar para dar sua assinatura de identidade para eles. Apenas assine afirmações completamente detalhadas com as quais você concorda.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</source>
<translation>Endereço a ser usado para assinar a mensagem (e.g. RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Escolha um endereço do catálogo</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Colar o endereço da área de transferência</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Entre a mensagem que você quer assinar aqui</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>Assinatura</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Copiar a assinatura para a área de transferência do sistema</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this SwansonCoin address</source>
<translation>Assinar mensagem para provar que você é dono deste endereço SwansonCoin</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Assinar &Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Limpar todos os campos de assinatura da mensagem</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Limpar Tudo</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&Verificar Mensagem</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Forneça o endereço da assinatura, a mensagem (se assegure que você copiou quebras de linha, espaços, tabs, etc. exatamente) e a assinatura abaixo para verificar a mensagem. Cuidado para não ler mais na assinatura do que está escrito na mensagem propriamente, para evitar ser vítima de uma ataque do tipo "man-in-the-middle".</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</source>
<translation>O endereço usado para assinar a mensagem (ex. RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified SwansonCoin address</source>
<translation>Verificar mensagem para se assegurar que ela foi assinada pelo dono de um endereço SwansonCoin específico.</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>Verificar %Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Limpar todos os campos de assinatura da mensagem</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a SwansonCoin address (e.g. RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</source>
<translation>Digite um endereço SwansonCoin (exemplo: RC74svrUSLCmjPGQrc4sYvAxzse7tpA7hE)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Clique em "Assinar Mensagem" para gerar a assinatura</translation>
</message>
<message>
<location line="+3"/>
<source>Enter SwansonCoin signature</source>
<translation>Entre com a assinatura SwansonCoin</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>O endereço fornecido é inválido.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Por favor, verifique o endereço e tente novamente.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>O endereço fornecido não se refere a uma chave.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Destravamento da Carteira foi cancelado.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>A chave privada para o endereço fornecido não está disponível.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Assinatura da mensagem falhou.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Mensagem assinada.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>A assinatura não pode ser decodificada.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Por favor, verifique a assinatura e tente novamente.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>A assinatura não corresponde ao "resumo da mensagem".</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Verificação da mensagem falhou.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Mensagem verificada.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The SwansonCoin developers</source>
<translation>Desenvolvedores do SwansonCoin</translation>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Aberto até %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/não confirmadas</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 confirmações</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, difundir atráves de %n nó</numerusform><numerusform>, difundir atráves de %n nós</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Fonte</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Gerados</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>De</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Para</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>seu próprio endereço</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etiqueta</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Crédito</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>matura em mais %n bloco</numerusform><numerusform>matura em mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>não aceito</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Débito</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Taxa de transação</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Valor líquido</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Mensagem</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Comentário</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID da transação</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>SwansonCoins gerados precisam maturar por 120 blocos antes de serem gastos. Quando você gera este bloco, ele é difundido na rede para ser adicionado ao blockchain. Se ele falhar ao ser acrescentado no blockchain, seu estado mudará para "não aceito" e não poderá ser gasto. Isso pode ocasionamente acontecer se outro nó gerou um bloco poucos segundos antes do seu.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Informação de depuração</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transação</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>Entradas</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>verdadeiro</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>falso</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ainda não foi propagada na rede com sucesso.</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Abrir para mais %n bloco</numerusform><numerusform>Abrir para mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>desconhecido</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detalhes da transação</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Este painel mostra uma descrição detalhada da transação</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Abrir para mais %n bloco</numerusform><numerusform>Abrir para mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Aberto até %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline (%1 confirmações)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Não confirmado (%1 of %2 confirmações)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmado (%1 confirmações)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>Saldo minerado vai estar disponível quando ele maturar em mais %n bloco</numerusform><numerusform>Saldo minerado vai estar disponível quando ele maturar em mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Este bloco não foi recebido por nenhum outro participante da rede e provavelmente não será aceito!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Gerado mas não aceito</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Recebido por</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Recebido de</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Enviado para</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pagamento para você mesmo</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minerado</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status da transação. Passe o mouse sobre este campo para mostrar o número de confirmações.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Data e hora em que a transação foi recebida.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipo de transação.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Endereço de destino da transação.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Quantidade debitada ou creditada ao saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Todos</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Hoje</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Esta semana</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Este mês</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Mês passado</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Este ano</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Intervalo...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Recebido por</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Enviado para</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Para você mesmo</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minerado</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Outro</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Procure um endereço ou etiqueta</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Quantidade mínima</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Copiar endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copiar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiar quantia</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Copiar ID da transação</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Editar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Mostrar detalhes da transação</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Exportar Dados das Transações</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Arquivo separado por vírgulas (*. csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Erro ao exportar</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Não foi possível gravar no arquivo %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Intervalo: </translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>para</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Send Coins</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar os dados na aba atual para um arquivo</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>Fazer cópia de segurança da Carteira</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>Dados da Carteira (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Cópia de segurança Falhou</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Houve um erro ao tentar salvar os dados da carteira para uma nova localização.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>Backup feito com sucesso</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>Os dados da carteira foram salvos com sucesso na nova localização</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>SwansonCoin version</source>
<translation>Versão do SwansonCoin</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or swansoncoind</source>
<translation>Enviar comando para -server ou swansoncoind</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Lista de comandos</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Obtenha ajuda sobre um comando</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Opções:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: swansoncoin.conf)</source>
<translation>Especifique um arquivo de configurações (padrão: swansoncoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: swansoncoind.pid)</source>
<translation>Especifique um arquivo de pid (padrão: swansoncoind.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Especificar diretório de dados</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Definir o tamanho do cache do banco de dados em megabytes (padrão: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9333 or testnet: 19333)</source>
<translation>Procurar por conexões em <port> (padrão: 9333 ou testnet:19333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Manter no máximo <n> conexões aos peers (padrão: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Conectar a um nó para receber endereços de participantes, e desconectar.</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>Especificar seu próprio endereço público</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Limite para desconectar peers mal comportados (padrão: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Número de segundos para impedir que peers mal comportados reconectem (padrão: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>Um erro ocorreu ao configurar a porta RPC %u para escuta em IPv4: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9332 or testnet: 19332)</source>
<translation>Escutar conexões JSON-RPC na porta <porta> (padrão: 9332 ou testnet: 19332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceitar linha de comando e comandos JSON-RPC</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Rodar em segundo plano como serviço e aceitar comandos</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Usar rede de teste</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Aceitar conexões externas (padrão: 1 se opções -proxy ou -connect não estiverem presentes)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=swansoncoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "SwansonCoin Alert" [email protected]
</source>
<translation>%s, você deve especificar uma senha rpcpassword no arquivo de configuração:⏎
%s⏎
É recomendado que você use a seguinte senha aleatória:⏎
rpcuser=swansoncoinrpc⏎
rpcpassword=%s⏎
(você não precisa lembrar esta senha)⏎
O nome de usuário e a senha NÃO PODEM ser os mesmos.⏎
Se o arquivo não existir, crie um com permissão de leitura apenas para o dono.⏎
É recomendado também definir um alertnotify para que você seja notificado de problemas;⏎
por exemplo: alertnotify=echo %%s | mail -s "SwansonCoin Alert" [email protected]⏎
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>Um erro ocorreu ao configurar a porta RPC %u para escuta em IPv6, voltando ao IPv4: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Vincular ao endereço fornecido e sempre escutar nele. Use a notação [host]:port para IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. SwansonCoin is probably already running.</source>
<translation>Não foi possível obter exclusividade de escrita no endereço %s. O SwansonCoin provavelmente já está rodando.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Erro: A transação foi rejeitada. Isso pode acontecer se alguns dos swansoncoins de sua carteira já haviam sido gastos, por exemplo se você usou uma cópia do arquivo wallet.dat e alguns swansoncoins foram gastos na cópia mas não foram marcados como gastos aqui.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Erro: Esta transação requer uma taxa de transação de pelo menos %s, por causa sua quantidade, complexidade ou uso de dinheiro recebido recentemente.</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>Executar comando quando um alerta relevante for recebido (%s no comando será substituído pela mensagem)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Executar comando quando uma transação da carteira mudar (%s no comando será substituído por TxID)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Determinar tamanho máximo de transações de alta-prioridade/baixa-taxa em bytes (padrão: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Este pode ser um build de teste pré-lançamento - use por sua conta e risco - não use para mineração ou aplicações de comércio.</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Cuidado: valor de -paytxfee escolhido é muito alto! Este é o valor da taxa de transação que você irá pagar se enviar a transação.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Cuidado: Transações mostradas podem não estar corretas! Você pode precisar atualizar, ou outros nós podem precisar atualizar o cliente.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong SwansonCoin will not work properly.</source>
<translation>Cuidado: Por favor, verifique que a data e hora do seu computador estão corretas! If o seu relógio estiver errado, o SwansonCoin não irá funcionar corretamente.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Cuidado: erro ao ler arquivo wallet.dat! Todas as chaves foram lidas corretamente, mas dados transações e do catálogo de endereços podem estar faltando ou estar incorretas.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Aviso: wallet.dat corrompido, dados recuperados! Arquivo wallet.dat original salvo como wallet.{timestamp}.bak em %s; se seu saldo ou transações estiverem incorretos, você deve restauras o backup.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Tentar recuperar chaves privadas de um arquivo wallet.dat corrompido</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>Opções de criação de blocos:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Conectar apenas a nó(s) específico(s)</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>Detectado Banco de dados de blocos corrompido</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Descobrir os próprios endereços IP (padrão: 1 quando no modo listening e opção -externalip não estiver presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Você quer reconstruir o banco de dados de blocos agora?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>Erro ao inicializar banco de dados de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>Erro ao inicializar ambiente de banco de dados de carteira %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>Erro ao carregar banco de dados de blocos</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>Erro ao abrir banco de dados de blocos</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>Erro: Espaço em disco insuficiente!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Erro: Carteira travada, impossível criar transação!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>Erro: erro de sistema</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Falha ao escutar em qualquer porta. Use -listen=0 se você quiser isso.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>Falha ao ler informação de bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>Falha ao ler bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>Falha ao sincronizar índice de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>Falha ao escrever índice de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>Falha ao escrever informações de bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>Falha ao escrever bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>Falha ao escrever informções de arquivo</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>Falha ao escrever banco de dados de moedas</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>Falha ao escrever índice de transações</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>Falha ao escrever dados para desfazer ações</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>Procurar pares usando consulta de DNS (padrão: 1 a menos que a opção -connect esteja presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>Quantos blocos checar ao inicializar (padrão: 288, 0 = todos)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>Quão minuciosa é a verificação dos blocos (0-4, padrão: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Reconstruir índice de blockchain a partir dos arquivos atuais blk000??.dat</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>Defina o número de threads de script de verificação. (Padrão: 4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>Verificando blocos...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>Verificando carteira...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importar blocos de um arquivo externo blk000??.dat</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>Informação</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Endereço -tor inválido: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>Manter índice completo de transações (padrão: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Buffer máximo de recebimento por conexão, <n>*1000 bytes (padrão: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Buffer máximo de envio por conexão, <n>*1000 bytes (padrão: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>Apenas aceitar cadeia de blocos correspondente a marcas de verificação internas (padrão: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Apenas conectar em nós na rede <net> (IPv4, IPv6, ou Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Mostrar informações extras de depuração. Implica em outras opções -debug*</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Mostrar informações extras de depuração da rede</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Pré anexar a saída de debug com estampa de tempo</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the SwansonCoin Wiki for SSL setup instructions)</source>
<translation>Opções SSL: (veja a Wiki do SwansonCoin para instruções de configuração SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Escolher versão do proxy socks a ser usada (4-5, padrão: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Mandar informação de trace/debug para o console em vez de para o arquivo debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Mandar informação de trace/debug para o debugger</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Determinar tamanho máximo de bloco em bytes (padrão: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Determinar tamanho mínimo de bloco em bytes (padrão: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Encolher arquivo debug.log ao iniciar o cliente (padrão 1 se opção -debug não estiver presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Especifique o tempo limite (timeout) da conexão em milissegundos (padrão: 5000) </translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>Erro de sistema:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Usar UPnP para mapear porta de escuta (padrão: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Usar UPnP para mapear porta de escuta (padrão: 1 quando estiver escutando)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Usar proxy para alcançar serviços escondidos (padrão: mesmo que -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Nome de usuário para conexões JSON-RPC</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Cuidado: Esta versão está obsoleta, atualização exigida!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>Você precisa reconstruir os bancos de dados usando -reindex para mudar -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrompido, recuperação falhou</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Senha para conexões JSON-RPC</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Permitir conexões JSON-RPC de endereços IP específicos</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Enviar comando para nó rodando em <ip> (pardão: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Executar comando quando o melhor bloco mudar (%s no comando será substituído pelo hash do bloco)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Atualizar carteira para o formato mais recente</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Determinar tamanho do pool de endereços para <n> (padrão: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Re-escanear blocos procurando por transações perdidas da carteira</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Usar OpenSSL (https) para conexões JSON-RPC</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Arquivo de certificado do servidor (padrão: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Chave privada do servidor (padrão: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Algoritmos de criptografia aceitos (padrão: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Esta mensagem de ajuda</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Impossível vincular a %s neste computador (bind retornou erro %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Conectar através de um proxy socks</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permitir consultas DNS para -addnode, -seednode e -connect</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Carregando endereços...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Erro ao carregar wallet.dat: Carteira corrompida</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of SwansonCoin</source>
<translation>Erro ao carregar wallet.dat: Carteira requer uma versão mais nova do SwansonCoin</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart SwansonCoin to complete</source>
<translation>A Carteira precisou ser reescrita: reinicie o SwansonCoin para completar</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Erro ao carregar wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Endereço -proxy inválido: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Rede desconhecida especificada em -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Versão desconhecida do proxy -socks requisitada: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Impossível encontrar o endereço -bind: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Impossível encontrar endereço -externalip: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Quantidade inválida para -paytxfee=<quantidade>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Quantidade inválida</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Saldo insuficiente</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Carregando índice de blocos...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Adicionar um nó com o qual se conectar e tentar manter a conexão ativa</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. SwansonCoin is probably already running.</source>
<translation>Impossível vincular a %s neste computador. O SwansonCoin provavelmente já está rodando.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Taxa por KB a ser acrescida nas transações que você enviar</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Carregando carteira...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>Não é possível fazer downgrade da carteira</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>Não foi possível escrever no endereço padrão</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Re-escaneando...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Carregamento terminado</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>Para usar a opção %s</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Erro</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Você precisa especificar rpcpassword=<senha> no arquivo de configurações:⏎
%s⏎
Se o arquivo não existir, crie um com permissão de leitura apenas pelo dono</translation>
</message>
</context>
</TS><|fim▁end|>
|
<location line="+5"/>
|
<|file_name|>ec2.py<|end_file_name|><|fim▁begin|># This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <[email protected]>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import re
from time import sleep
from ansible.module_utils.cloud import CloudRetry
try:
import boto
import boto.ec2 #boto does weird import stuff
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
try:
import boto3
import botocore
HAS_BOTO3 = True
except:
HAS_BOTO3 = False
try:
from distutils.version import LooseVersion
HAS_LOOSE_VERSION = True
except:
HAS_LOOSE_VERSION = False
from ansible.module_utils.six import string_types, binary_type, text_type
class AnsibleAWSError(Exception):
pass
def _botocore_exception_maybe():
"""
Allow for boto3 not being installed when using these utils by wrapping
botocore.exceptions instead of assigning from it directly.
"""
if HAS_BOTO3:
return botocore.exceptions.ClientError
return type(None)
class AWSRetry(CloudRetry):
base_class = _botocore_exception_maybe()
@staticmethod
def status_code_from_exception(error):
return error.response['Error']['Code']
@staticmethod
def found(response_code):
# This list of failures is based on this API Reference
# http://docs.aws.amazon.com/AWSEC2/latest/APIReference/errors-overview.html
retry_on = [
'RequestLimitExceeded', 'Unavailable', 'ServiceUnavailable',
'InternalFailure', 'InternalError'
]
not_found = re.compile(r'^\w+.NotFound')
if response_code in retry_on or not_found.search(response_code):
return True
else:
return False
def boto3_conn(module, conn_type=None, resource=None, region=None, endpoint=None, **params):
try:
return _boto3_conn(conn_type=conn_type, resource=resource, region=region, endpoint=endpoint, **params)
except ValueError:
module.fail_json(msg='There is an issue in the code of the module. You must specify either both, resource or client to the conn_type parameter in the boto3_conn function call')
def _boto3_conn(conn_type=None, resource=None, region=None, endpoint=None, **params):
profile = params.pop('profile_name', None)
if conn_type not in ['both', 'resource', 'client']:
raise ValueError('There is an issue in the calling code. You '
'must specify either both, resource, or client to '
'the conn_type parameter in the boto3_conn function '
'call')
if conn_type == 'resource':
resource = boto3.session.Session(profile_name=profile).resource(resource, region_name=region, endpoint_url=endpoint, **params)
return resource
elif conn_type == 'client':
client = boto3.session.Session(profile_name=profile).client(resource, region_name=region, endpoint_url=endpoint, **params)
return client
else:
client = boto3.session.Session(profile_name=profile).client(resource, region_name=region, endpoint_url=endpoint, **params)
resource = boto3.session.Session(profile_name=profile).resource(resource, region_name=region, endpoint_url=endpoint, **params)
return client, resource
boto3_inventory_conn = _boto3_conn
def aws_common_argument_spec():
return dict(
ec2_url=dict(),
aws_secret_key=dict(aliases=['ec2_secret_key', 'secret_key'], no_log=True),
aws_access_key=dict(aliases=['ec2_access_key', 'access_key']),
validate_certs=dict(default=True, type='bool'),
security_token=dict(aliases=['access_token'], no_log=True),
profile=dict(),
)
def ec2_argument_spec():
spec = aws_common_argument_spec()
spec.update(
dict(
region=dict(aliases=['aws_region', 'ec2_region']),
)
)
return spec
def get_aws_connection_info(module, boto3=False):
# Check module args for credentials, then check environment vars
# access_key
ec2_url = module.params.get('ec2_url')
access_key = module.params.get('aws_access_key')
secret_key = module.params.get('aws_secret_key')
security_token = module.params.get('security_token')
region = module.params.get('region')
profile_name = module.params.get('profile')
validate_certs = module.params.get('validate_certs')
if not ec2_url:
if 'AWS_URL' in os.environ:
ec2_url = os.environ['AWS_URL']
elif 'EC2_URL' in os.environ:
ec2_url = os.environ['EC2_URL']
if not access_key:
if 'AWS_ACCESS_KEY_ID' in os.environ:
access_key = os.environ['AWS_ACCESS_KEY_ID']
elif 'AWS_ACCESS_KEY' in os.environ:
access_key = os.environ['AWS_ACCESS_KEY']
elif 'EC2_ACCESS_KEY' in os.environ:
access_key = os.environ['EC2_ACCESS_KEY']
else:
# in case access_key came in as empty string
access_key = None
if not secret_key:
if 'AWS_SECRET_ACCESS_KEY' in os.environ:
secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
elif 'AWS_SECRET_KEY' in os.environ:
secret_key = os.environ['AWS_SECRET_KEY']
elif 'EC2_SECRET_KEY' in os.environ:
secret_key = os.environ['EC2_SECRET_KEY']
else:
# in case secret_key came in as empty string
secret_key = None
if not region:
if 'AWS_REGION' in os.environ:
region = os.environ['AWS_REGION']
elif 'AWS_DEFAULT_REGION' in os.environ:
region = os.environ['AWS_DEFAULT_REGION']
elif 'EC2_REGION' in os.environ:
region = os.environ['EC2_REGION']
else:
if not boto3:
# boto.config.get returns None if config not found
region = boto.config.get('Boto', 'aws_region')
if not region:
region = boto.config.get('Boto', 'ec2_region')
elif HAS_BOTO3:
# here we don't need to make an additional call, will default to 'us-east-1' if the below evaluates to None.
region = botocore.session.get_session().get_config_variable('region')
else:
module.fail_json(msg="Boto3 is required for this module. Please install boto3 and try again")
if not security_token:
if 'AWS_SECURITY_TOKEN' in os.environ:
security_token = os.environ['AWS_SECURITY_TOKEN']
elif 'AWS_SESSION_TOKEN' in os.environ:
security_token = os.environ['AWS_SESSION_TOKEN']
elif 'EC2_SECURITY_TOKEN' in os.environ:
security_token = os.environ['EC2_SECURITY_TOKEN']
else:
# in case security_token came in as empty string
security_token = None
if HAS_BOTO3 and boto3:
boto_params = dict(aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
aws_session_token=security_token)
boto_params['verify'] = validate_certs
if profile_name:
boto_params['profile_name'] = profile_name
else:
boto_params = dict(aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
security_token=security_token)
# only set profile_name if passed as an argument
if profile_name:
boto_params['profile_name'] = profile_name
boto_params['validate_certs'] = validate_certs
for param, value in boto_params.items():
if isinstance(value, binary_type):
boto_params[param] = text_type(value, 'utf-8', 'strict')
return region, ec2_url, boto_params
def get_ec2_creds(module):
''' for compatibility mode with old modules that don't/can't yet
use ec2_connect method '''
region, ec2_url, boto_params = get_aws_connection_info(module)
return ec2_url, boto_params['aws_access_key_id'], boto_params['aws_secret_access_key'], region
def boto_fix_security_token_in_profile(conn, profile_name):
''' monkey patch for boto issue boto/boto#2100 '''
profile = 'profile ' + profile_name
if boto.config.has_option(profile, 'aws_security_token'):
conn.provider.set_security_token(boto.config.get(profile, 'aws_security_token'))
return conn
def connect_to_aws(aws_module, region, **params):
conn = aws_module.connect_to_region(region, **params)
if not conn:
if region not in [aws_module_region.name for aws_module_region in aws_module.regions()]:
raise AnsibleAWSError("Region %s does not seem to be available for aws module %s. If the region definitely exists, you may need to upgrade boto or extend with endpoints_path" % (region, aws_module.__name__))
else:
raise AnsibleAWSError("Unknown problem connecting to region %s for aws module %s." % (region, aws_module.__name__))
if params.get('profile_name'):
conn = boto_fix_security_token_in_profile(conn, params['profile_name'])
return conn
def ec2_connect(module):
""" Return an ec2 connection"""
region, ec2_url, boto_params = get_aws_connection_info(module)
# If we have a region specified, connect to its endpoint.
if region:
try:
ec2 = connect_to_aws(boto.ec2, region, **boto_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
# Otherwise, no region so we fallback to the old connection method
elif ec2_url:
try:
ec2 = boto.connect_ec2_endpoint(ec2_url, **boto_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="Either region or ec2_url must be specified")
return ec2
def paging(pause=0, marker_property='marker'):
""" Adds paging to boto retrieval functions that support a 'marker'
this is configurable as not all boto functions seem to use the
same name.
"""
def wrapper(f):
def page(*args, **kwargs):
results = []
marker = None
while True:
try:
new = f(*args, marker=marker, **kwargs)
marker = getattr(new, marker_property)
results.extend(new)
if not marker:
break
elif pause:
sleep(pause)
except TypeError:
# Older version of boto do not allow for marker param, just run normally
results = f(*args, **kwargs)
break
return results
return page
return wrapper
<|fim▁hole|>def camel_dict_to_snake_dict(camel_dict):
def camel_to_snake(name):
import re
first_cap_re = re.compile('(.)([A-Z][a-z]+)')
all_cap_re = re.compile('([a-z0-9])([A-Z])')
s1 = first_cap_re.sub(r'\1_\2', name)
return all_cap_re.sub(r'\1_\2', s1).lower()
def value_is_list(camel_list):
checked_list = []
for item in camel_list:
if isinstance(item, dict):
checked_list.append(camel_dict_to_snake_dict(item))
elif isinstance(item, list):
checked_list.append(value_is_list(item))
else:
checked_list.append(item)
return checked_list
snake_dict = {}
for k, v in camel_dict.items():
if isinstance(v, dict):
snake_dict[camel_to_snake(k)] = camel_dict_to_snake_dict(v)
elif isinstance(v, list):
snake_dict[camel_to_snake(k)] = value_is_list(v)
else:
snake_dict[camel_to_snake(k)] = v
return snake_dict
def snake_dict_to_camel_dict(snake_dict):
def camelize(complex_type):
if complex_type is None:
return
new_type = type(complex_type)()
if isinstance(complex_type, dict):
for key in complex_type:
new_type[camel(key)] = camelize(complex_type[key])
elif isinstance(complex_type, list):
for i in range(len(complex_type)):
new_type.append(camelize(complex_type[i]))
else:
return complex_type
return new_type
def camel(words):
return words.split('_')[0] + ''.join(x.capitalize() or '_' for x in words.split('_')[1:])
return camelize(snake_dict)
def ansible_dict_to_boto3_filter_list(filters_dict):
""" Convert an Ansible dict of filters to list of dicts that boto3 can use
Args:
filters_dict (dict): Dict of AWS filters.
Basic Usage:
>>> filters = {'some-aws-id', 'i-01234567'}
>>> ansible_dict_to_boto3_filter_list(filters)
{
'some-aws-id': 'i-01234567'
}
Returns:
List: List of AWS filters and their values
[
{
'Name': 'some-aws-id',
'Values': [
'i-01234567',
]
}
]
"""
filters_list = []
for k,v in filters_dict.items():
filter_dict = {'Name': k}
if isinstance(v, string_types):
filter_dict['Values'] = [v]
else:
filter_dict['Values'] = v
filters_list.append(filter_dict)
return filters_list
def boto3_tag_list_to_ansible_dict(tags_list):
""" Convert a boto3 list of resource tags to a flat dict of key:value pairs
Args:
tags_list (list): List of dicts representing AWS tags.
Basic Usage:
>>> tags_list = [{'Key': 'MyTagKey', 'Value': 'MyTagValue'}]
>>> boto3_tag_list_to_ansible_dict(tags_list)
[
{
'Key': 'MyTagKey',
'Value': 'MyTagValue'
}
]
Returns:
Dict: Dict of key:value pairs representing AWS tags
{
'MyTagKey': 'MyTagValue',
}
"""
tags_dict = {}
for tag in tags_list:
if 'key' in tag:
tags_dict[tag['key']] = tag['value']
elif 'Key' in tag:
tags_dict[tag['Key']] = tag['Value']
return tags_dict
def ansible_dict_to_boto3_tag_list(tags_dict):
""" Convert a flat dict of key:value pairs representing AWS resource tags to a boto3 list of dicts
Args:
tags_dict (dict): Dict representing AWS resource tags.
Basic Usage:
>>> tags_dict = {'MyTagKey': 'MyTagValue'}
>>> ansible_dict_to_boto3_tag_list(tags_dict)
{
'MyTagKey': 'MyTagValue'
}
Returns:
List: List of dicts containing tag keys and values
[
{
'Key': 'MyTagKey',
'Value': 'MyTagValue'
}
]
"""
tags_list = []
for k,v in tags_dict.items():
tags_list.append({'Key': k, 'Value': v})
return tags_list
def get_ec2_security_group_ids_from_names(sec_group_list, ec2_connection, vpc_id=None, boto3=True):
""" Return list of security group IDs from security group names. Note that security group names are not unique
across VPCs. If a name exists across multiple VPCs and no VPC ID is supplied, all matching IDs will be returned. This
will probably lead to a boto exception if you attempt to assign both IDs to a resource so ensure you wrap the call in
a try block
"""
def get_sg_name(sg, boto3):
if boto3:
return sg['GroupName']
else:
return sg.name
def get_sg_id(sg, boto3):
if boto3:
return sg['GroupId']
else:
return sg.id
sec_group_id_list = []
if isinstance(sec_group_list, string_types):
sec_group_list = [sec_group_list]
# Get all security groups
if boto3:
if vpc_id:
filters = [
{
'Name': 'vpc-id',
'Values': [
vpc_id,
]
}
]
all_sec_groups = ec2_connection.describe_security_groups(Filters=filters)['SecurityGroups']
else:
all_sec_groups = ec2_connection.describe_security_groups()['SecurityGroups']
else:
if vpc_id:
filters = { 'vpc-id': vpc_id }
all_sec_groups = ec2_connection.get_all_security_groups(filters=filters)
else:
all_sec_groups = ec2_connection.get_all_security_groups()
unmatched = set(sec_group_list).difference(str(get_sg_name(all_sg, boto3)) for all_sg in all_sec_groups)
sec_group_name_list = list(set(sec_group_list) - set(unmatched))
if len(unmatched) > 0:
# If we have unmatched names that look like an ID, assume they are
import re
sec_group_id_list[:] = [sg for sg in unmatched if re.match('sg-[a-fA-F0-9]+$', sg)]
still_unmatched = [sg for sg in unmatched if not re.match('sg-[a-fA-F0-9]+$', sg)]
if len(still_unmatched) > 0:
raise ValueError("The following group names are not valid: %s" % ', '.join(still_unmatched))
sec_group_id_list += [ str(get_sg_id(all_sg, boto3)) for all_sg in all_sec_groups if str(get_sg_name(all_sg, boto3)) in sec_group_name_list ]
return sec_group_id_list
def sort_json_policy_dict(policy_dict):
""" Sort any lists in an IAM JSON policy so that comparison of two policies with identical values but
different orders will return true
Args:
policy_dict (dict): Dict representing IAM JSON policy.
Basic Usage:
>>> my_iam_policy = {'Principle': {'AWS':["31","7","14","101"]}
>>> sort_json_policy_dict(my_iam_policy)
Returns:
Dict: Will return a copy of the policy as a Dict but any List will be sorted
{
'Principle': {
'AWS': [ '7', '14', '31', '101' ]
}
}
"""
def value_is_list(my_list):
checked_list = []
for item in my_list:
if isinstance(item, dict):
checked_list.append(sort_json_policy_dict(item))
elif isinstance(item, list):
checked_list.append(value_is_list(item))
else:
checked_list.append(item)
# Sort list. If it's a list of dictionaries, sort by tuple of key-value
# pairs, since Python 3 doesn't allow comparisons such as `<` between dictionaries.
checked_list.sort(key=lambda x: sorted(x.items()) if isinstance(x, dict) else x)
return checked_list
ordered_policy_dict = {}
for key, value in policy_dict.items():
if isinstance(value, dict):
ordered_policy_dict[key] = sort_json_policy_dict(value)
elif isinstance(value, list):
ordered_policy_dict[key] = value_is_list(value)
else:
ordered_policy_dict[key] = value
return ordered_policy_dict
def map_complex_type(complex_type, type_map):
"""
Allows to cast elements within a dictionary to a specific type
Example of usage:
DEPLOYMENT_CONFIGURATION_TYPE_MAP = {
'maximum_percent': 'int',
'minimum_healthy_percent': 'int'
}
deployment_configuration = map_complex_type(module.params['deployment_configuration'],
DEPLOYMENT_CONFIGURATION_TYPE_MAP)
This ensures all keys within the root element are casted and valid integers
"""
if complex_type is None:
return
new_type = type(complex_type)()
if isinstance(complex_type, dict):
for key in complex_type:
if key in type_map:
if isinstance(type_map[key], list):
new_type[key] = map_complex_type(
complex_type[key],
type_map[key][0])
else:
new_type[key] = map_complex_type(
complex_type[key],
type_map[key])
else:
return complex_type
elif isinstance(complex_type, list):
for i in range(len(complex_type)):
new_type.append(map_complex_type(
complex_type[i],
type_map))
elif type_map:
return globals()['__builtins__'][type_map](complex_type)
return new_type<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// Dependencies
var UFirst = require("ucfirst");
/**
* CrossStyle
* Returns an array of cross-browser CSS properties for given input.
*
* @name CrossStyle
* @function
* @param {String} input The CSS property (e.g. `"transform"` or `"transformOrigin"`).
* @return {Array} An array of strings representing the cross-browser CSS properties for the given input.
*/
function CrossStyle(input) {
var uInput = UFirst(input);
return [
"webkit" + uInput<|fim▁hole|> , "ms" + uInput
, "o" + uInput
, input
]
}
module.exports = CrossStyle;<|fim▁end|>
|
, "moz" + uInput
|
<|file_name|>backup1.py<|end_file_name|><|fim▁begin|>import cookielib
import urllib2
import urllib
import json
import time
#Default Settings for a system to keep cookies, please add it before testing
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
# Let the single cookie system override the current openurl
#Cookies Info
def INITCOOKIES():
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
urllib2.install_opener(opener)
def COOKIE_TO_FILE(filename):
cookie = cookielib.MozillaCookieJar(filename)
handler = urllib2.HTTPCookieProcessor(cookie)
opener = urllib2.build_opener(handler)
response = opener.open("http://www.baidu.com")
cookie.save(ignore_discard=True, ignore_expires=True)
def FILE_TO_COOKIE(filename):
cookie = cookielib.MozillaCookieJar()
#Initiallize the Cookies environment
cookie.load(filename, ignore_discard=True, ignore_expires=True)
req = urllib2.Request("http://www.baidu.com")
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
#opener are similar to urlopen and have the equal functionalities
response = opener.open(req)
print response.read()
#Helper Functions
def get_current_time():
return time.strftime("%F %T" , time.localtime() )
def title_exporter(dictionary):
my_dict_titles = []
try:
my_dict_titles.append(dictionary.keys())
for key in dictionary:
if isinstance(dictionary[key], dict):
my_dict_titles.append(title_exporter(dictionary[key]))
if isinstance(dictionary[key], list):
if len(dictionary[key]):
if isinstance(dictionary[key][0],dict):
my_dict_titles.append(title_exporter(dictionary[key][0]))
except:
if isinstance(dictionary, list):
my_dict_titles.append(title_exporter(dictionary[0]))
#print my_dict_titles
return my_dict_titles
#Major Functions
def POST(url, data, header_type = "application/json", encodejson = True):
if encodejson:
data = json.dumps(data)
req = urllib2.Request(url, data)
else:
data = urllib.urlencode(data)
req = urllib2.Request(url, data)
req.add_header('Content-Type', header_type)
source_code = ErrorOut(req)
try:
print json.loads(source_code)['message']
except:
print "Faulty Data Structure!"
#print source_code
return source_code
def GET(url,data = "", header_type = "application/json",encodejson = False):
if encodejson:
data = json.dumps(data)
else:
data = urllib.urlencode(data)
geturl = url + "?" + data
req = urllib2.Request(geturl)
req.add_header('Content-Type', header_type)<|fim▁hole|> print json.loads(source_code)['message']
except:
print "Faulty Data Structure!"
#print source_code
return source_code
def DELETE(url,data = "", header_type = "application/json",encodejson = True):
#Without Functionality test
if encodejson:
data = json.dumps(data)
else:
data = urllib.urlencode(data)
req = urllib2.Request(url, data)
req.add_header('Content-Type', header_type)
req.get_method = lambda: 'DELETE'
source_code = ErrorOut(req)
try:
print json.loads(source_code)['message']
except:
print "Faulty Data Structure!"
return source_code
def PUT(url,data = "", header_type = "application/json",encodejson = True):
if encodejson:
data = json.dumps(data)
else:
data = urllib.urlencode(data)
req = urllib2.Request(url, data)
req.add_header('Content-Type', header_type)
req.get_method = lambda: 'PUT'
resp = ErrorOut(req)
source_code = ErrorOut(req)
try:
print json.loads(source_code)['message']
except:
print "Faulty Data Structure!"
return source_code
def ErrorOut(req):
#Later for using in the test
try:
resp = urllib2.urlopen(req)
#print "Passed Basic Access!"
return resp.read()
except urllib2.URLError, e:
if hasattr(e,"code"):
print e.code
if hasattr(e,"reason"):
print e.reason
else:
print "OK"
return None
class News:
def __init__(self):
self.url_manager = []
self.iQuickerUrl = "http://testwww.iquicker.com.cn/iquicker_web/login"
self.newsurl = "http://testwww.iquicker.com.cn/iquicker_web/newstype/datas"
self.all_news = "http://testwww.iquicker.com.cn/iquicker_web/news/datas"
self.news_id = "http://testwww.iquicker.com.cn/iquicker_web/news/data/num"
self.news_root = "http://testwww.iquicker.com.cn/iquicker_web/news/data/"
self.id_info = ""
self.data_manager = []
self.my_dict = {"username":"15611765076","password":"MTIzNDU2Nzg=","rememberMe":True,"org":"ff808081557080a6015575e3d9300330"}
self.all_news_data = {"pageNo" : 1, "pageSize" : 20, "sortInfo" : "DESC_isUp_isUpTime_publishTime"}
def Login_to_system(self):
print "in Login System..."
POST(self.iQuickerUrl, self.my_dict)
def get_news_type(self):
print "in News Type..."
GET(self.newsurl)
def get_news_data(self):
print "in news Data..."
Dict = GET(self.all_news, self.all_news_data)
Dict = json.loads(Dict)
#print Dict
Dict = Dict['data']['list'][0]
self.id_info = Dict['id']
def get_news_id(self):
print "in news id..."
GET(self.news_root + str(self.id_info))
'''
My_news = News()
My_news.Login_to_system()
My_news.get_news_data()
My_news.get_news_id()
My_news.get_news_type()
'''
'''
iQuickerUrl = "http://testwww.iquicker.com.cn/iquicker_web/login"
my_dict = {"username":"15611765076","password":"MTIzNDU2Nzg=","rememberMe":True,"org":"ff808081557080a6015575e3d9300330"}
POST(iQuickerUrl, my_dict)
newsurl = "http://testwww.iquicker.com.cn/iquicker_web/newstype/datas"
GET(newsurl)
all_news = "http://testwww.iquicker.com.cn/iquicker_web/news/datas"
all_news_data = {"pageNo" : 1, "pageSize" : 20, "sortInfo" : "DESC_isUp_isUpTime_publishTime"}
news_data_dict = GET(all_news, all_news_data)
news_id = "http://testwww.iquicker.com.cn/iquicker_web/news/data/num"
GET(news_id)
news_data_dict = json.loads(news_data_dict)
#print news_data_dict
news_data_dict = news_data_dict['data']['list'][0]
news_data_dict = news_data_dict['id']
news1 = "http://testwww.iquicker.com.cn/iquicker_web/news/data/"+str(news_data_dict)
GET(news1)
'''
class Tasks:
def __init__(self):
self.iQuickerUrl = "http://testwww.iquicker.com.cn/iquicker_web/login"
self.personal_info = "http://testwww.iquicker.com.cn/iquicker_web/login/user"
self.get_user_data = "http://testwww.iquicker.com.cn/iquicker_web/mobile/ad_books"
self.task_info = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks/"
self.discuss_on_tasks = "http://testwww.iquicker.com.cn/iquicker_web/discuss/data"
self.discuss_list = "http://testwww.iquicker.com.cn/iquicker_web/discusslist/data/"
self.function_name = {"login" : 1, "Get Personal Data" : 2, "Get Name List" : 3, "Get Unfinished" : 4, "Get Finished" : 5,
"Label Finished" : 6, "Label UnFinished" : 7, "Post Tasks" : 8, "Modify Task" : 9, "Delete Task" : 10,
"Comment on Task" : 11, "Discuss List" : 12}
self.basic_struct = ['status', 'message', 'data', 'success']
self.error_count = []
self.url_list = []
self.port_type_warning = []
self.times = 0
self.id_book = []
self.name_book = []
self.Task_id = []
self.Finished_Task_id = []
self.my_dict = {"username":"15611765076","password":"MTIzNDU2Nzg=","rememberMe":True,"org":"ff808081557080a6015575e3d9300330"}
def login(self):
print "in Login System..."
template = [[u'status', u'message', u'data', u'success'], [[u'orgs', u'initialised']]]
self.determine_error(POST(self.iQuickerUrl, self.my_dict), "login",template, self.iQuickerUrl)
self.times += 1
def get_personal_data(self):
print "Fetching personal info..."
template = [[u'status', u'success', u'orgName', u'orgLogoWhite', u'orgLogoColour',
u'orgInnerEmailStatus', u'theme', u'orgCode', u'message', u'data'],
[[u'hometown', u'idcard', u'bankCard', u'telephone', u'statusReason',
u'sex', u'pinyinPrefix', u'id', u'innerEmail', u'img', u'innerEmailContact',
u'joindate', u'department', u'shortname', u'type', u'email', u'status', u'fax',
u'isTrialAccount', u'pinyin', u'qualifications', u'birthday', u'address', u'org',
u'createTime', u'itcode', u'name', u'mobile', u'prefixId', u'sn', u'signature',
u'position', u'joinTime', u'enname'],
[[u'org', u'subDept', u'id', u'deptManager', u'parDept', u'flag2', u'shortname',
u'status', u'usable', u'flag', u'zfield9', u'zfield8', u'zfield5', u'zfield4',
u'zfield7', u'zfield6', u'zfield1', u'zfield3', u'zfield2', u'name', u'zfield10',
u'prefixId', u'sn', u'root']]]]
self.determine_error(GET(self.personal_info), "Get Personal Data",template, self.personal_info)
self.times += 1
def get_name_list(self):
print "Fetching Namelist..."
user_data = GET(self.get_user_data)
template = [[[u'tel', u'uuid', u'mobile', u'piny', u'position', u'deptname', u'id', u'name']]]
self.determine_error(user_data, "Get Name List", template, self.get_user_data)
user_data = json.loads(user_data)
for i in range(len(user_data)):
self.id_book.append(user_data[i]['uuid'])
self.name_book.append(user_data[i]['name'])
self.times += 1
def get_unfinished(self):
print "Fetching Unfinished task id..."
self.Task_id = []
Data_tasks = {"isOver" : False, "page" : 1, "pageSize" : 20, "sortType" : 1, "type" : 0}
Task_info = GET(self.task_info,Data_tasks)
template = [[u'status', u'message', u'data', u'success'],
[[u'sort', u'last', u'size', u'number', u'content', u'totalPages', u'first',
u'totalElements', u'numberOfElements'],
[[u'endDate', u'overUserId', u'id', u'publishScopeName', u'subject', u'write',
u'overStatus', u'createDate', u'detail', u'priority', u'participants', u'publishScope',
u'shared', u'principals', u'overDate', u'readRight', u'createUser', u'org',
u'labelObjectList', u'createName', u'shareUserIds', u'writeRight', u'attList'],
[[u'id', u'name']], [[u'id', u'name']]]]]
self.determine_error(Task_info, "Get Unfinished", template, self.task_info)
Task_info = json.loads(Task_info)
Task_info = Task_info['data']['content']
for i in range(len(Task_info)):
self.Task_id.append(Task_info[i]['id'])
self.times += 1
def get_finished(self):
print "Fetching finished task id..."
self.Finished_Task_id = []
Data_tasks = {"isOver" : True, "page" : 1, "pageSize" : 20, "sortType" : 1, "type" : 0}
Task_info = GET(self.task_info,Data_tasks)
template = [[u'status', u'message', u'data', u'success'],
[[u'sort', u'last', u'size', u'number', u'content', u'totalPages', u'first',
u'totalElements', u'numberOfElements'],
[[u'endDate', u'overUserId', u'id', u'publishScopeName', u'subject', u'write',
u'overStatus', u'createDate', u'detail', u'priority', u'participants', u'publishScope',
u'shared', u'principals', u'overDate', u'readRight', u'createUser', u'org',
u'labelObjectList', u'createName', u'shareUserIds', u'writeRight', u'attList'],
[[u'id', u'name']], [[u'id', u'name']]]]]
result = self.determine_error(Task_info, "Get Finished", template, self.task_info)
title_exporter(json.loads(result))
Task_info = json.loads(Task_info)
Task_info = Task_info['data']['content']
for i in range(len(Task_info)):
self.Finished_Task_id.append(Task_info[i]['id'])
self.times += 1
def Label_finished(self):
print "Label Unfinished Task->Finished"
self.get_unfinished()
Label_url = self.task_info + str(self.Task_id[-1]) + "/completion"
template = [[u'status', u'message', u'success']]
self.determine_error(PUT(Label_url), "Label Finished", template, Label_url)
self.times += 1
def Label_unfinished(self):
print "Label Finished Task->Unfinished"
self.get_finished()
#print self.Finished_Task_id
Label_url = self.task_info + str(self.Finished_Task_id[-1]) + "/incompletion"
template = [[u'status', u'message', u'success']]
self.determine_error(PUT(Label_url), "Label UnFinished", template, Label_url)
self.times += 1
def post_task(self):
print "Posting new task now..."
Post_kernel = {"subject" : "RobotSend", "principals" : [{"id": self.id_book[0] , "name": self.name_book[0]}], "participants":[{"id": self.id_book[0] , "name": self.name_book[0]}], "endDate" : "2017-06-24T16:00:00.000Z", "priority" : 3, "detail" : "This is a Test Message" , "shared" : False , "publishScope" : ["company"], "publishScopeName" : ["/u5168/u516C/u53F8"]}
template = [[u'status', u'message', u'data', u'success'],
[[u'endDate', u'overUserId', u'id', u'publishScopeName',
u'subject', u'write', u'overStatus', u'createDate',
u'detail', u'priority', u'participants', u'publishScope',
u'shared', u'principals', u'overDate', u'readRight', u'createUser',
u'org', u'labelObjectList', u'createName', u'shareUserIds', u'writeRight',
u'attList'], [[u'id', u'name']], [[u'id', u'name']]]]
self.determine_error(POST(self.task_info,Post_kernel), "Post Tasks", template, self.task_info)
self.times += 1
def modify_task(self):
print "Modifying new task now..."
self.get_unfinished()
Post_kernel = {"id": self.Task_id[-1], "subject" : "RobotSendModify", "principals" : [{"id": self.id_book[0] , "name": self.name_book[0]}], "participants":[{"id": self.id_book[0] , "name": self.name_book[0]}], "endDate" : "2017-06-24T16:00:00.000Z", "priority" : 3, "detail" : "This is a Modified Message" , "shared" : False ,"attList": None, "publishScope" : ["company"], "publishScopeName" : ["/u5168/u516C/u53F8"]}
template = [[u'status', u'message', u'data', u'success'],
[[u'endDate', u'overUserId', u'id', u'publishScopeName',
u'subject', u'write', u'overStatus', u'createDate', u'detail',
u'priority', u'participants', u'publishScope', u'shared',
u'principals', u'overDate', u'readRight', u'createUser',
u'org', u'labelObjectList', u'createName', u'shareUserIds',
u'writeRight', u'attList'], [[u'id', u'name']], [[u'id', u'name']]]]
self.determine_error(POST(self.task_info,Post_kernel), "Modify Task", template, self.task_info)
self.times += 1
def delete_task(self):
print "deleting task now..."
self.get_unfinished()
Delete_url = self.task_info + str(self.Task_id[-1])
template = [[u'status', u'message', u'success']]
self.determine_error(DELETE(Delete_url), "Delete Task", template, Delete_url)
self.times += 1
def commment_on_tasks(self):
print "Posting discuss on the Tasks..."
self.get_unfinished()
kernel = {"discussType":"task","masterId":self.Task_id[-1],"discussedId":"","discussedUserId":"","discussedUserName":"","content":"Looks nice!","isFile":"N","publishTime":get_current_time(),"relay":0,"attList":[],"userIdNames":""}
template = [[u'status', u'message', u'data', u'success'],
[[u'storePeopleId', u'userId', u'relayTimes', u'id', u'publishScopeName',
u'write', u'createDate', u'goodPeopleId', u'content', u'publishScope',
u'type', u'discussList', u'companyName', u'deleted', u'readRight', u'createUser',
u'org', u'userName', u'shareUserIds', u'publishTime', u'writeRight', u'attList']]]
self.determine_error(POST(self.discuss_on_tasks, kernel), "Comment on Task", template, self.discuss_on_tasks)
self.times += 1
def get_disscuss_list(self):
print "Get all Discuss data for a task..."
self.get_unfinished()
url = self.discuss_list + self.Task_id[-1]
template = [[u'status', u'message', u'data', u'success'],
[[u'discussList', u'createDate', u'shareUserIds', u'write',
u'masterId', u'readRight', u'writeRight', u'createUser',
u'org', u'id', u'attList'],
[[u'userName', u'content', u'discussedUserId', u'userIdNames',
u'relay', u'createDate', u'isFile', u'shareUserIds',
u'publishScope', u'userId', u'publishTime', u'discussedId',
u'write', u'masterId', u'readRight', u'writeRight', u'createUser',
u'org', u'discussedUserName', u'id', u'attList']]]]
self.determine_error(GET(url), "Discuss List", template, url)
self.times += 1
def determine_error(self,data, name, template=[], url=""):
result = True
try:
result = json.loads(data)['success']
except:
print "There is no success options"
self.port_type_warning.append(self.function_name[name])
if (data == None or not result or template != title_exporter(json.loads(data))):
self.error_count.append(self.function_name[name])
self.url_list.append(url)
return data
def show_off_all_data(self):
print "................................................"
print "Function runs: " + str(self.times) + " times"
print "Error Counts: " + str(len(self.error_count)) + " times"
print "Failure in: " + str(self.error_count)
print "Not supported port: " + str(self.port_type_warning)
print "Failure Url:"
for url in self.url_list:
print url
print "Please check the dictionary for more information"
My_task = Tasks()
My_task.login() == None
My_task.get_personal_data()
My_task.get_name_list()
My_task.post_task()
My_task.commment_on_tasks()
My_task.modify_task()
My_task.Label_finished()
My_task.Label_unfinished()
My_task.delete_task()
My_task.get_disscuss_list()
My_task.show_off_all_data()
'''
#Login the system
iQuickerUrl = "http://testwww.iquicker.com.cn/iquicker_web/login"
my_dict = {"username":"15611765076","password":"MTIzNDU2Nzg=","rememberMe":True,"org":"ff808081557080a6015575e3d9300330"}
POST(iQuickerUrl, my_dict)
#personal_info
get_personal = "http://testwww.iquicker.com.cn/iquicker_web/login/user"
GET(get_personal)
#Get_user_data
Get_user_data = "http://testwww.iquicker.com.cn/iquicker_web/mobile/ad_books"
user_data = GET(Get_user_data)
user_data = json.loads(user_data)
id_book = []
name_book = []
for i in range(len(user_data)):
id_book.append(user_data[i]['uuid'])
name_book.append(user_data[i]['name'])
#Get Unfinished Task List
Tasks_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks"
Data_tasks = {"isOver" : False, "page" : 1, "pageSize" : 20, "sortType" : 1, "type" : 0}
Task_info = GET(Tasks_url,Data_tasks)
Task_id = []
Task_info = json.loads(Task_info)
Task_info = Task_info['data']['content']
for i in range(len(Task_info)):
Task_id.append(Task_info[i]['id'])
#Get Finished Task List
Tasks_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks"
Data_tasks = {"isOver" : True, "page" : 1, "pageSize" : 20, "sortType" : 1, "type" : 0}
Task_info = GET(Tasks_url,Data_tasks)
Finished_Task_id = []
Task_info = json.loads(Task_info)
Task_info = Task_info['data']['content']
for i in range(len(Task_info)):
Finished_Task_id.append(Task_info[i]['id'])
#Start Posting Tasks!
Post_task_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks"
Post_kernel = {"subject" : "RobotSend", "principals" : [{"id": id_book[0] , "name": name_book[0]}], "participants":[{"id": id_book[0] , "name": name_book[0]}], "endDate" : "2017-06-24T16:00:00.000Z", "priority" : 3, "detail" : "This is a Test Message" , "shared" : False , "publishScope" : ["company"], "publishScopeName" : ["/u5168/u516C/u53F8"]}
POST(Post_task_url,Post_kernel)
#Start Modifying Tasks!
Post_task_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks"
Post_kernel = {"id": Task_id[-1], "subject" : "RobotSendModify", "principals" : [{"id": id_book[0] , "name": name_book[0]}], "participants":[{"id": id_book[0] , "name": name_book[0]}], "endDate" : "2017-06-24T16:00:00.000Z", "priority" : 3, "detail" : "This is a Modified Message" , "shared" : False ,"attList": None, "publishScope" : ["company"], "publishScopeName" : ["/u5168/u516C/u53F8"]}
POST(Post_task_url,Post_kernel)
#Label_finished
Label_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks/" + str(Task_id[-1]) + "/completion"
PUT(Label_url)
#Label_Unfinished
UnLabel_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks/" + str(Finished_Task_id[-1]) + "/incompletion"
PUT(Label_url)
#Start Delete the Task!
Delete_url = "http://testwww.iquicker.com.cn/iquicker_web/task/tasks/" + str(Task_id[-1])
#go_To_TASK
DELETE(Delete_url)
'''
#for item in cj:
# print 'Name = '+item.name
# print 'Value = '+item.value<|fim▁end|>
|
source_code = ErrorOut(req)
try:
|
<|file_name|>ipc_lista2.16.py<|end_file_name|><|fim▁begin|>#EQUIPE 2
#Nahan Trindade Passos - 1615310021
#Ana Beatriz Frota - 1615310027
#
#
#
#
#
#
import math
print("Digite os termos da equacao ax2+bx+c")
a = float(input("Digite o valor de A:\n"))
if(a==0):
print("Nao e uma equacao de segundo grau")
else:
b = float(input("Valor de B:\n"))
c = float(input("Valor de C:\n"))
delta = (math.pow(b,2) - (4*a*c))
if(delta<0):
print("A equacao nao possui raizes reais")
elif(delta == 0):
raiz = ((-1)*b + math.sqrt(delta))/(2*a)
print("A equacao possui apenas uma raiz",raiz)
else:
raiz1 = ((-1)*b + math.sqrt(delta))/(2*a)
raiz2 = ((-1)*b - math.sqrt(delta))/(2*a)<|fim▁hole|> print("A equacao possui duas raizes")
print("Primeira raiz:",raiz1)
print("Segunda raiz:",raiz2)<|fim▁end|>
| |
<|file_name|>test_plugins.py<|end_file_name|><|fim▁begin|>import types
import unittest
from collections import namedtuple
import mock
from plugins.systems.config_container_crawler import ConfigContainerCrawler
from plugins.systems.config_host_crawler import ConfigHostCrawler
from plugins.systems.connection_container_crawler import ConnectionContainerCrawler
from plugins.systems.connection_host_crawler import ConnectionHostCrawler
from plugins.systems.connection_vm_crawler import ConnectionVmCrawler
from plugins.systems.cpu_container_crawler import CpuContainerCrawler
from plugins.systems.cpu_host_crawler import CpuHostCrawler
from plugins.systems.disk_container_crawler import DiskContainerCrawler
from plugins.systems.disk_host_crawler import DiskHostCrawler
from plugins.systems.dockerhistory_container_crawler import DockerhistoryContainerCrawler
from plugins.systems.dockerinspect_container_crawler import DockerinspectContainerCrawler
from plugins.systems.dockerps_host_crawler import DockerpsHostCrawler
from plugins.systems.file_container_crawler import FileContainerCrawler
from plugins.systems.file_host_crawler import FileHostCrawler
from plugins.systems.interface_container_crawler import InterfaceContainerCrawler
from plugins.systems.interface_host_crawler import InterfaceHostCrawler
from plugins.systems.interface_vm_crawler import InterfaceVmCrawler
from plugins.systems.load_container_crawler import LoadContainerCrawler
from plugins.systems.load_host_crawler import LoadHostCrawler
from plugins.systems.memory_container_crawler import MemoryContainerCrawler
from plugins.systems.memory_host_crawler import MemoryHostCrawler
from plugins.systems.memory_vm_crawler import MemoryVmCrawler
from plugins.systems.metric_container_crawler import MetricContainerCrawler
from plugins.systems.metric_host_crawler import MetricHostCrawler
from plugins.systems.metric_vm_crawler import MetricVmCrawler
from plugins.systems.os_container_crawler import OSContainerCrawler
from plugins.systems.os_host_crawler import OSHostCrawler
from plugins.systems.os_vm_crawler import os_vm_crawler
from plugins.systems.package_container_crawler import PackageContainerCrawler
from plugins.systems.package_host_crawler import PackageHostCrawler
from plugins.systems.process_container_crawler import ProcessContainerCrawler
from plugins.systems.process_host_crawler import ProcessHostCrawler
from plugins.systems.process_vm_crawler import process_vm_crawler
from container import Container
from utils.crawler_exceptions import CrawlError
from utils.features import (
OSFeature,
ConfigFeature,
DiskFeature,
PackageFeature,
MemoryFeature,
CpuFeature,
InterfaceFeature,
LoadFeature,
DockerPSFeature)
# for OUTVM psvmi
class DummyContainer(Container):
def __init__(self, long_id):
self.pid = '1234'
self.long_id = long_id
def get_memory_cgroup_path(self, node):
return '/cgroup/%s' % node
def get_cpu_cgroup_path(self, node):
return '/cgroup/%s' % node
# for OUTVM psvmi
psvmi_sysinfo = namedtuple('psvmi_sysinfo',
'''boottime ipaddr osdistro osname osplatform osrelease
ostype osversion memory_used memory_buffered
memory_cached memory_free''')
psvmi_memory = namedtuple(
'psvmi_memory',
'memory_used memory_buffered memory_cached memory_free')
psvmi_interface = namedtuple(
'psvmi_interface',
'ifname bytes_sent bytes_recv packets_sent packets_recv errout errin')
os_stat = namedtuple(
'os_stat',
'''st_mode st_gid st_uid st_atime st_ctime st_mtime st_size''')
def mocked_os_walk(root_dir):
files = ['file1', 'file2', 'file3']
dirs = ['dir']
yield ('/', dirs, files)
# simulate the os_walk behavior (if a dir is deleted, we don't walk it)
if '/dir' in dirs:
files = ['file4']
dirs = []
yield ('/dir', dirs, files)
def mocked_os_walk_for_avoidsetns(root_dir):
files = ['file1', 'file2', 'file3']
dirs = ['dir']
yield ('/1/2/3', dirs, files)
# simulate the os_walk behavior (if a dir is deleted, we don't walk it)
if '/1/2/3/dir' in dirs:
files = ['file4']
dirs = []
yield ('/dir', dirs, files)
# XXX can't do self.count = for some reason
mcount = 0
class MockedMemCgroupFile(mock.Mock):
def __init__(self):
pass
def readline(self):
return '2'
def __iter__(self):
return self
def next(self):
global mcount
mcount += 1
if mcount == 1:
return 'total_cache 100'
if mcount == 2:
return 'total_active_file 200'
else:
raise StopIteration()
# XXX can't do self.count = for some reason
ccount = 0
ccount2 = 0
class MockedCpuCgroupFile(mock.Mock):
def __init__(self):
pass
def readline(self):
global ccount2
ccount2 += 1
if ccount2 == 1:
return '1e7'
else:
return '2e7'
def __iter__(self):
return self
def next(self):
global ccount
ccount += 1
if ccount == 1:
return 'system 20'
if ccount == 2:
return 'user 20'
else:
raise StopIteration()
class MockedFile(mock.Mock):
def __init__(self):
pass
def read(self):
return 'content'
def mocked_codecs_open(filename, mode, encoding, errors):
m = mock.Mock()
m.__enter__ = mock.Mock(return_value=MockedFile())
m.__exit__ = mock.Mock(return_value=False)
return m
def mocked_cpu_cgroup_open(filename, mode):
m = mock.Mock()
m.__enter__ = mock.Mock(return_value=MockedCpuCgroupFile())
m.__exit__ = mock.Mock(return_value=False)
print filename
return m
def mocked_memory_cgroup_open(filename, mode):
m = mock.Mock()
m.__enter__ = mock.Mock(return_value=MockedMemCgroupFile())
m.__exit__ = mock.Mock(return_value=False)
print filename
return m
partition = namedtuple('partition', 'device fstype mountpoint opts')
pdiskusage = namedtuple('pdiskusage', 'percent total')
meminfo = namedtuple('meminfo', 'rss vms')
ioinfo = namedtuple('ioinfo', 'read_bytes write_bytes')
psutils_memory = namedtuple('psutils_memory', 'used free buffers cached')
psutils_cpu = namedtuple(
'psutils_cpu',
'idle nice user iowait system irq steal')
psutils_net = namedtuple(
'psutils_net',
'bytes_sent bytes_recv packets_sent packets_recv errout errin')
def mocked_disk_partitions(all):
return [partition('/dev/a', 'type', '/a', 'opts'),
partition('/dev/b', 'type', '/b', 'opts')]
class Connection():
def __init__(self):
self.laddr = ['1.1.1.1', '22']
self.raddr = ['2.2.2.2', '22']
self.status = 'Established'
class Process():
def __init__(self, name):
self.name = name
self.cmdline = ['cmd']
self.pid = 123
self.status = 'Running'
self.cwd = '/bin'
self.ppid = 1
self.create_time = 1000
def num_threads(self):
return 1
def username(self):
return 'don quijote'
def get_open_files(self):
return []
def get_connections(self):
return [Connection()]
def get_memory_info(self):
return meminfo(10, 20)
def get_io_counters(self):
return ioinfo(10, 20)
def get_cpu_percent(self, interval):
return 30
def get_memory_percent(self):
return 30
STAT_DIR_MODE = 16749
def mocked_os_lstat(path):
print path
if path == '/':
return os_stat(STAT_DIR_MODE, 2, 3, 4, 5, 6, 7)
elif path == '/file1':
return os_stat(1, 2, 3, 4, 5, 6, 7)
elif path == '/file2':
return os_stat(1, 2, 3, 4, 5, 6, 7)
elif path == '/file3':
return os_stat(1, 2, 3, 4, 5, 6, 7)
elif path == '/dir':
return os_stat(STAT_DIR_MODE, 2, 3, 4, 5, 6, 7)
else:
return os_stat(1, 2, 3, 4, 5, 6, 7)
def mocked_run_as_another_namespace(pid, ns, function, *args, **kwargs):
result = function(*args)
# if res is a generator (i.e. function uses yield)
if isinstance(result, types.GeneratorType):
result = list(result)
return result
def throw_os_error(*args, **kvargs):
raise OSError()
class PluginTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self, *args):
pass
@mock.patch('utils.os_utils.time.time',
side_effect=lambda: 1001)
@mock.patch('utils.os_utils.platform.platform',
side_effect=lambda: 'platform')
@mock.patch('utils.os_utils.utils.misc.get_host_ip4_addresses',
side_effect=lambda: ['1.1.1.1'])
@mock.patch('utils.os_utils.psutil.boot_time',
side_effect=lambda: 1000)
@mock.patch('utils.os_utils.platform.system',
side_effect=lambda: 'linux')
@mock.patch('utils.os_utils.platform.machine',
side_effect=lambda: 'machine')
@mock.patch(
'utils.os_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'os',
'version': 'os_version'})
def test_os_host_cawler_plugin(self, *args):
fc = OSHostCrawler()
for os in fc.crawl():
print os
assert os == (
'linux',
OSFeature(
boottime=1000,
uptime=1,
ipaddr=['1.1.1.1'],
os='os',
os_version='os_version',
os_kernel='platform',
architecture='machine'),
'os')
for i, arg in enumerate(args):
if i > 0: # time.time is called more than once
continue
assert arg.call_count == 1
@mock.patch('utils.os_utils.platform.system',
side_effect=throw_os_error)
def test_os_host_crawler_plugin_failure(self, *args):
fc = OSHostCrawler()
with self.assertRaises(OSError):
for os in fc.crawl():
pass
@mock.patch(
'utils.os_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'os',
'version': 'os_version'})
def test_os_host_crawler_plugin_mountpoint_mode(self, *args):
fc = OSHostCrawler()
for os in fc.crawl(root_dir='/a'):
print os
assert os == (
'linux',
OSFeature(
boottime='unsupported',
uptime='unsupported',
ipaddr='0.0.0.0',
os='os',
os_version='os_version',
os_kernel='unknown',
architecture='unknown'),
'os')
for i, arg in enumerate(args):
assert arg.call_count == 1
@mock.patch('utils.os_utils.osinfo.get_osinfo',
side_effect=throw_os_error)
def test_os_host_crawler_plugin_mountpoint_mode_failure(self, *args):
fc = OSHostCrawler()
with self.assertRaises(OSError):
for os in fc.crawl(root_dir='/a'):
pass
@mock.patch('utils.os_utils.time.time',
side_effect=lambda: 1001)
@mock.patch('utils.os_utils.platform.platform',
side_effect=lambda: 'platform')
@mock.patch('utils.os_utils.utils.misc.get_host_ip4_addresses',
side_effect=lambda: ['1.1.1.1'])
@mock.patch('utils.os_utils.psutil.boot_time',
side_effect=lambda: 1000)
@mock.patch('utils.os_utils.platform.system',
side_effect=lambda: 'linux')
@mock.patch('utils.os_utils.platform.machine',
side_effect=lambda: 'machine')
@mock.patch(
("plugins.systems.os_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.os_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'utils.os_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'os',
'version': 'os_version'})
def test_os_container_crawler_plugin(self, *args):
fc = OSContainerCrawler()
for os in fc.crawl(container_id=123):
print os
assert os == (
'linux',
OSFeature(
boottime=1000,
uptime=1,
ipaddr=['1.1.1.1'],
os='os',
os_version='os_version',
os_kernel='platform',
architecture='machine'),
'os')
for i, arg in enumerate(args):
if i > 0: # time.time is called more than once
continue
assert arg.call_count == 1
@mock.patch(
("plugins.systems.os_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.os_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/a/b/c')
@mock.patch(
'utils.os_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'os',
'version': 'os_version'})
def test_os_container_crawler_plugin_avoidsetns(self, *args):
fc = OSContainerCrawler()
for os in fc.crawl(container_id=123, avoid_setns=True):
print os
assert os == (
'linux',
OSFeature(
boottime='unsupported',
uptime='unsupported',
ipaddr='0.0.0.0',
os='os',
os_version='os_version',
os_kernel='unknown',
architecture='unknown'),
'os')
for i, arg in enumerate(args):
print i, arg
if i == 0:
# get_osinfo()
assert arg.call_count == 1
arg.assert_called_with(mount_point='/a/b/c')
elif i == 1:
# get_docker_container_rootfs_path
assert arg.call_count == 1
arg.assert_called_with(123)
else:
# exec_dockerinspect
assert arg.call_count == 1
arg.assert_called_with(123)
@mock.patch(
("plugins.systems.os_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.os_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=throw_os_error)
def test_os_container_crawler_plugin_avoidsetns_failure(self, *args):
fc = OSContainerCrawler()
with self.assertRaises(OSError):
for os in fc.crawl(container_id=123, avoid_setns=True):
pass
@mock.patch('plugins.systems.os_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.os_vm_crawler.psvmi.system_info',
side_effect=lambda vmc: psvmi_sysinfo(1000,
'1.1.1.1',
'osdistro',
'osname',
'osplatform',
'osrelease',
'ostype',
'osversion',
1000000,
100000,
100000,
100000))
@mock.patch('plugins.systems.os_vm_crawler.psvmi')
def test_os_vm_crawler_plugin_without_vm(self, *args):
fc = os_vm_crawler()
for os in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert os == (
'ostype',
OSFeature(
boottime=1000,
uptime='unknown',
ipaddr='1.1.1.1',
os='ostype',
os_version='osversion',
os_kernel='osrelease',
architecture='osplatform'),
'os')
pass
assert args[1].call_count == 1
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_host_crawler(self, *args):
fc = FileHostCrawler()
for (k, f, fname) in fc.crawl():
print f
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'dir', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3',
'/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 6
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/')
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_host_crawler_with_exclude_dirs(self, *args):
fc = FileHostCrawler()
for (k, f, fname) in fc.crawl(exclude_dirs=['dir']):
print f
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3']
assert f.path not in ['/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 4
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/')
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=throw_os_error)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_host_crawler_failure(self, *args):
fc = FileHostCrawler()
with self.assertRaises(OSError):
for (k, f, fname) in fc.crawl(root_dir='/a/b/c'):
pass
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.file_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler(self, *args):
fc = FileContainerCrawler()
for (k, f, fname) in fc.crawl(root_dir='/'):
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'dir', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3',
'/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 6
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/')
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch('utils.file_utils.os.walk',
side_effect=throw_os_error)
@mock.patch(
("plugins.systems.file_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler_failure(self, *args):
fc = FileContainerCrawler()
with self.assertRaises(OSError):
for (k, f, fname) in fc.crawl(root_dir='/a/b/c'):
pass
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.file_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/1/2/3')
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk_for_avoidsetns)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler_avoidsetns(self, *args):
fc = FileContainerCrawler()
for (k, f, fname) in fc.crawl(root_dir='/', avoid_setns=True):
print f
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'dir', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3',
'/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 6
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/1/2/3')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/1/2/3')
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.file_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler_with_exclude_dirs(self, *args):
fc = FileContainerCrawler()
for (k, f, fname) in fc.crawl(root_dir='/',
exclude_dirs=['dir']):
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3']
assert f.path not in ['/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 4
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/')
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.file_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/1/2/3')
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk_for_avoidsetns)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler_avoidsetns_with_exclude_dirs(
self,
*
args):
fc = FileContainerCrawler()
for (k, f, fname) in fc.crawl(root_dir='/',
avoid_setns=True,
exclude_dirs=['/dir']):
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3']
assert f.path not in ['/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 4
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/1/2/3')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/1/2/3')
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_host_crawler(self, *args):
fc = ConfigHostCrawler()
for (k, f, fname) in fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=False):
assert fname == "config"
assert f == ConfigFeature(name='file1', content='content',
path='/etc/file1')
assert args[0].call_count == 1 # lstat
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.walk',
side_effect=lambda p: [
('/', [], ['file1', 'file2', 'file3.conf'])])
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.isfile',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.getsize',
side_effect=lambda p: 1000)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_host_crawler_with_discover(self, *args):
fc = ConfigHostCrawler()
configs = fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=True)
print configs
assert set(configs) == set([('/file3.conf',
ConfigFeature(name='file3.conf',
content='content',
path='/file3.conf'),
'config'),
('/etc/file1',
ConfigFeature(name='file1',
content='content',
path='/etc/file1'),
'config')])
@mock.patch(
("plugins.systems.config_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.config_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_container_crawler(self, *args):
fc = ConfigContainerCrawler()
for (k, f, fname) in fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=False):
assert fname == "config"
assert f == ConfigFeature(name='file1', content='content',
path='/etc/file1')
assert args[0].call_count == 1 # codecs open
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch(
("plugins.systems.config_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.config_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.walk',
side_effect=lambda p: [
('/', [], ['file1', 'file2', 'file3.conf'])])
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.isfile',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.getsize',
side_effect=lambda p: 1000)
def test_config_container_crawler_discover(self, *args):
fc = ConfigContainerCrawler()
configs = fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=True)
assert set(configs) == set([('/file3.conf',
ConfigFeature(name='file3.conf',
content='content',
path='/file3.conf'),
'config'),
('/etc/file1',
ConfigFeature(name='file1',
content='content',
path='/etc/file1'),
'config')])
@mock.patch(
("plugins.systems.config_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.config_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.config_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/1/2/3')
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)<|fim▁hole|> def test_config_container_crawler_avoidsetns(self, *args):
fc = ConfigContainerCrawler()
for (k, f, fname) in fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=False,
avoid_setns=True):
assert fname == "config"
assert f == ConfigFeature(name='file1', content='content',
path='/etc/file1')
assert args[0].call_count == 1 # lstat
@mock.patch(
("plugins.systems.config_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.config_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.config_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/1/2/3')
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.walk',
side_effect=lambda p: [
('/', [], ['file1', 'file2', 'file3.conf'])])
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.isfile',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.getsize',
side_effect=lambda p: 1000)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_container_crawler_avoidsetns_discover(self, *args):
fc = ConfigContainerCrawler()
configs = fc.crawl(known_config_files=['/etc/file1'],
avoid_setns=True,
discover_config_files=True)
assert set(configs) == set([('/file3.conf',
ConfigFeature(name='file3.conf',
content='content',
path='/file3.conf'),
'config'),
('/etc/file1',
ConfigFeature(name='file1',
content='content',
path='/etc/file1'),
'config')])
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=lambda a, b, c: [('pkg1',
PackageFeature(None, 'pkg1',
123, 'v1',
'x86'))])
def test_package_host_crawler_dpkg(self, *args):
fc = PackageHostCrawler()
for (k, f, fname) in fc.crawl():
assert fname == "package"
assert f == PackageFeature(
installed=None,
pkgname='pkg1',
pkgsize=123,
pkgversion='v1',
pkgarchitecture='x86')
assert args[0].call_count == 1
args[0].assert_called_with('/', 'var/lib/dpkg', 0)
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=throw_os_error)
def test_package_host_crawler_dpkg_failure(self, *args):
fc = PackageHostCrawler()
with self.assertRaises(CrawlError):
for (k, f, fname) in fc.crawl():
pass
assert args[0].call_count == 1
args[0].assert_called_with('/', 'var/lib/dpkg', 0)
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'redhat',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_rpm_packages',
side_effect=lambda a, b, c, d: [('pkg1',
PackageFeature(None, 'pkg1',
123, 'v1',
'x86'))])
def test_package_host_crawler_rpm(self, *args):
fc = PackageHostCrawler()
for (k, f, fname) in fc.crawl():
assert fname == "package"
assert f == PackageFeature(
installed=None,
pkgname='pkg1',
pkgsize=123,
pkgversion='v1',
pkgarchitecture='x86')
assert args[0].call_count == 1
args[0].assert_called_with('/', 'var/lib/rpm', 0, False)
@mock.patch(
("plugins.systems.package_container_crawler."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch(
'plugins.systems.package_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=lambda a, b, c: [('pkg1',
PackageFeature(None, 'pkg1',
123, 'v1',
'x86'))])
def test_package_container_crawler_dpkg(self, *args):
fc = PackageContainerCrawler()
for (k, f, fname) in fc.crawl():
assert fname == "package"
assert f == PackageFeature(
installed=None,
pkgname='pkg1',
pkgsize=123,
pkgversion='v1',
pkgarchitecture='x86')
assert args[0].call_count == 1
args[0].assert_called_with('/', 'var/lib/dpkg', 0)
@mock.patch(
("plugins.systems.package_container_crawler."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.package_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.package_container_crawler."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/a/b/c')
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True if 'dpkg' in p else False)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=throw_os_error)
def test_package_container_crawler_dpkg_failure(self, *args):
fc = PackageContainerCrawler()
with self.assertRaises(CrawlError):
for (k, f, fname) in fc.crawl():
pass
# get_dpkg_packages is called a second time after the first failure.
# first time is OUTCONTAINER mode with setns
# second time is OUTCONTAINER mode with avoid_setns
assert args[0].call_count == 2
args[0].assert_called_with('/a/b/c', 'var/lib/dpkg', 0)
args[2].assert_called_with(mount_point='/a/b/c') # get_osinfo()
@mock.patch(
("plugins.systems.package_container_crawler."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.package_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.package_container_crawler."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/a/b/c')
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'redhat',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True if 'rpm' in p else False)
@mock.patch('utils.package_utils.get_rpm_packages',
side_effect=throw_os_error)
def test_package_container_crawler_rpm_failure(self, *args):
fc = PackageContainerCrawler()
with self.assertRaises(CrawlError):
for (k, f, fname) in fc.crawl():
pass
# get_dpkg_packages is called a second time after the first failure.
# first time is OUTCONTAINER mode with setns
# second time is OUTCONTAINER mode with avoid_setns
assert args[0].call_count == 2
args[0].assert_called_with('/a/b/c', 'var/lib/rpm', 0, True)
args[2].assert_called_with(mount_point='/a/b/c') # get_osinfo()
@mock.patch(
("plugins.systems.package_container_crawler."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.package_container_crawler."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/a/b/c')
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=lambda a, b, c: [('pkg1',
PackageFeature(None, 'pkg1',
123, 'v1',
'x86'))])
def test_package_container_crawler_avoidsetns(self, *args):
fc = PackageContainerCrawler()
for (k, f, fname) in fc.crawl(avoid_setns=True):
assert fname == "package"
assert f == PackageFeature(
installed=None,
pkgname='pkg1',
pkgsize=123,
pkgversion='v1',
pkgarchitecture='x86')
assert args[0].call_count == 1
@mock.patch('plugins.systems.process_host_crawler.psutil.process_iter',
side_effect=lambda: [Process('init')])
def test_process_host_crawler(self, *args):
fc = ProcessHostCrawler()
for (k, f, fname) in fc.crawl():
print f
assert fname == "process"
assert f.pname == 'init'
assert f.cmd == 'cmd'
assert f.pid == 123
assert args[0].call_count == 1
@mock.patch(
("plugins.systems.process_container_crawler.utils.dockerutils."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.process_container_crawler.psutil.process_iter',
side_effect=lambda: [Process('init')])
@mock.patch(
'plugins.systems.process_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
def test_process_container_crawler(self, *args):
fc = ProcessContainerCrawler()
for (k, f, fname) in fc.crawl('123'):
print f
assert fname == "process"
assert f.pname == 'init'
assert f.cmd == 'cmd'
assert f.pid == 123
assert args[0].call_count == 1
@mock.patch('plugins.systems.process_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.process_vm_crawler.psvmi.process_iter',
side_effect=lambda vmc: [Process('init')])
@mock.patch('plugins.systems.process_vm_crawler.psvmi')
def test_process_vm_crawler(self, *args):
fc = process_vm_crawler()
for (k, f, fname) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
print f
assert fname == "process"
assert f.pname == 'init'
assert f.cmd == 'cmd'
assert f.pid == 123
assert args[1].call_count == 1 # process_iter
@mock.patch('utils.disk_utils.psutil.disk_partitions',
side_effect=mocked_disk_partitions)
@mock.patch('utils.disk_utils.psutil.disk_usage',
side_effect=lambda x: pdiskusage(10, 100))
def test_crawl_disk_partitions_invm_mode(self, *args):
fc = DiskHostCrawler()
disks = fc.crawl()
assert set(disks) == set([('/a',
DiskFeature(partitionname='/dev/a',
freepct=90.0,
fstype='type',
mountpt='/a',
mountopts='opts',
partitionsize=100),
'disk'),
('/b',
DiskFeature(partitionname='/dev/b',
freepct=90.0,
fstype='type',
mountpt='/b',
mountopts='opts',
partitionsize=100),
'disk')])
@mock.patch(
'plugins.systems.disk_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.disk_utils.psutil.disk_partitions',
side_effect=mocked_disk_partitions)
@mock.patch('utils.disk_utils.psutil.disk_usage',
side_effect=lambda x: pdiskusage(10, 100))
@mock.patch(
("plugins.systems.disk_container_crawler.utils.dockerutils."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
def test_crawl_disk_partitions_outcontainer_mode(self, *args):
fc = DiskContainerCrawler()
disks = fc.crawl('123')
assert set(disks) == set([('/a',
DiskFeature(partitionname='/dev/a',
freepct=90.0,
fstype='type',
mountpt='/a',
mountopts='opts',
partitionsize=100),
'disk'),
('/b',
DiskFeature(partitionname='/dev/b',
freepct=90.0,
fstype='type',
mountpt='/b',
mountopts='opts',
partitionsize=100),
'disk')])
@mock.patch('utils.metric_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
def test_crawl_metrics_invm_mode(self, *args):
fc = MetricHostCrawler()
for (k, f, t) in fc.crawl():
assert f.cpupct == 30.0
assert f.mempct == 30.0
assert f.pname == 'init'
assert f.pid == 123
assert f.rss == 10
assert f.status == 'Running'
assert f.vms == 20
assert f.read == 10
assert f.write == 20
assert args[0].call_count == 1
@mock.patch('utils.metric_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
@mock.patch('utils.metric_utils.round',
side_effect=throw_os_error)
def test_crawl_metrics_invm_mode_failure(self, *args):
with self.assertRaises(OSError):
fc = MetricHostCrawler()
for ff in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch('utils.metric_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
@mock.patch(
'plugins.systems.metric_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.disk_container_crawler.utils.dockerutils."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
def test_crawl_metrics_outcontainer_mode(self, *args):
fc = MetricContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f.cpupct == 30.0
assert f.mempct == 30.0
assert f.pname == 'init'
assert f.pid == 123
assert f.rss == 10
assert f.status == 'Running'
assert f.vms == 20
assert f.read == 10
assert f.write == 20
assert args[0].call_count == 1
@mock.patch('plugins.systems.metric_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.metric_vm_crawler.psvmi.process_iter',
side_effect=lambda vmc: [Process('init')])
@mock.patch(
("plugins.systems.metric_vm_crawler."
"MetricVmCrawler._crawl_metrics_cpu_percent"),
side_effect=lambda proc: 30.0)
@mock.patch('plugins.systems.metric_vm_crawler.psvmi')
def test_crawl_metrics_vm_mode(self, *args):
fc = MetricVmCrawler()
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f.cpupct == 30.0
assert f.mempct == 30.0
assert f.pname == 'init'
assert f.pid == 123
assert f.rss == 10
assert f.status == 'Running'
assert f.vms == 20
assert f.read == 10
assert f.write == 20
assert args[1].call_count == 1 # process_iter
@mock.patch('utils.connection_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
def test_crawl_connections_invm_mode(self, *args):
fc = ConnectionHostCrawler()
for (k, f, t) in fc.crawl():
assert f.localipaddr == '1.1.1.1'
assert f.remoteipaddr == '2.2.2.2'
assert f.localport == '22'
assert f.remoteport == '22'
assert args[0].call_count == 1
@mock.patch('utils.connection_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
@mock.patch(
'plugins.systems.connection_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.connection_container_crawler.utils.dockerutils."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
def test_crawl_connections_outcontainer_mode(self, *args):
fc = ConnectionContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f.localipaddr == '1.1.1.1'
assert f.remoteipaddr == '2.2.2.2'
assert f.localport == '22'
assert f.remoteport == '22'
assert args[0].call_count == 1
@mock.patch('plugins.systems.connection_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.connection_vm_crawler.psvmi.process_iter',
side_effect=lambda vmc: [Process('init')])
@mock.patch('plugins.systems.connection_vm_crawler.psvmi')
def test_crawl_connections_outvm_mode(self, *args):
fc = ConnectionVmCrawler()
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f.localipaddr == '1.1.1.1'
assert f.remoteipaddr == '2.2.2.2'
assert f.localport == '22'
assert f.remoteport == '22'
assert args[1].call_count == 1
@mock.patch('plugins.systems.memory_host_crawler.psutil.virtual_memory',
side_effect=lambda: psutils_memory(2, 2, 3, 4))
def test_crawl_memory_invm_mode(self, *args):
fc = MemoryHostCrawler()
for (k, f, t) in fc.crawl():
assert f == MemoryFeature(
memory_used=2,
memory_buffered=3,
memory_cached=4,
memory_free=2,
memory_util_percentage=50)
assert args[0].call_count == 1
@mock.patch('plugins.systems.memory_host_crawler.psutil.virtual_memory',
side_effect=throw_os_error)
def test_crawl_memory_invm_mode_failure(self, *args):
fc = MemoryHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch('plugins.systems.memory_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.memory_vm_crawler.psvmi.system_memory_info',
side_effect=lambda vmc: psvmi_memory(10, 20, 30, 40))
@mock.patch('plugins.systems.memory_vm_crawler.psvmi')
def test_crawl_memory_outvm_mode(self, *args):
fc = MemoryVmCrawler()
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f == MemoryFeature(
memory_used=10,
memory_buffered=20,
memory_cached=30,
memory_free=40,
memory_util_percentage=20)
assert args[1].call_count == 1
@mock.patch(
'plugins.systems.memory_container_crawler.psutil.virtual_memory',
side_effect=lambda: psutils_memory(
10,
10,
3,
10))
@mock.patch('plugins.systems.memory_container_crawler.open',
side_effect=mocked_memory_cgroup_open)
@mock.patch('plugins.systems.memory_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_memory_outcontainer_mode(self, *args):
fc = MemoryContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == MemoryFeature(
memory_used=2,
memory_buffered=200,
memory_cached=100,
memory_free=0,
memory_util_percentage=100)
assert args[1].call_count == 3 # 3 cgroup files
@mock.patch(
'plugins.systems.memory_container_crawler.psutil.virtual_memory',
side_effect=lambda: psutils_memory(
10,
10,
3,
10))
@mock.patch('plugins.systems.memory_container_crawler.open',
side_effect=throw_os_error)
@mock.patch('plugins.systems.memory_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_memory_outcontainer_mode_failure(self, *args):
fc = MemoryContainerCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl('123'):
pass
assert args[1].call_count == 1 # 1 cgroup files
@mock.patch(
'plugins.systems.cpu_host_crawler.psutil.cpu_times_percent',
side_effect=lambda percpu: [
psutils_cpu(
10,
20,
30,
40,
50,
60,
70)])
def test_crawl_cpu_invm_mode(self, *args):
fc = CpuHostCrawler()
for (k, f, t) in fc.crawl():
assert f == CpuFeature(
cpu_idle=10,
cpu_nice=20,
cpu_user=30,
cpu_wait=40,
cpu_system=50,
cpu_interrupt=60,
cpu_steal=70,
cpu_util=90)
assert args[0].call_count == 1
@mock.patch('plugins.systems.cpu_host_crawler.psutil.cpu_times_percent',
side_effect=throw_os_error)
def test_crawl_cpu_invm_mode_failure(self, *args):
fc = CpuHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.cpu_container_crawler.psutil.cpu_times_percent',
side_effect=lambda percpu: [
psutils_cpu(
10,
20,
30,
40,
50,
60,
70)])
@mock.patch('plugins.systems.cpu_container_crawler.time.sleep')
@mock.patch('plugins.systems.cpu_container_crawler.open',
side_effect=mocked_cpu_cgroup_open)
@mock.patch('plugins.systems.cpu_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_cpu_outcontainer_mode(self, *args):
fc = CpuContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == CpuFeature(
cpu_idle=90.0,
cpu_nice=20,
cpu_user=5.0,
cpu_wait=40,
cpu_system=5.0,
cpu_interrupt=60,
cpu_steal=70,
cpu_util=10.0)
assert args[1].call_count == 3 # open for 3 cgroup files
@mock.patch(
'plugins.systems.cpu_container_crawler.psutil.cpu_times_percent',
side_effect=lambda percpu: [
psutils_cpu(
10,
20,
30,
40,
50,
60,
70)])
@mock.patch('plugins.systems.cpu_container_crawler.time.sleep')
@mock.patch('plugins.systems.cpu_container_crawler.open',
side_effect=throw_os_error)
@mock.patch('plugins.systems.cpu_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_cpu_outcontainer_mode_failure(self, *args):
fc = CpuContainerCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl('123'):
pass
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.interface_host_crawler.psutil.net_io_counters',
side_effect=lambda pernic: {'interface1-unit-tests':
psutils_net(
10,
20,
30,
40,
50,
60)})
def test_crawl_interface_invm_mode(self, *args):
fc = InterfaceHostCrawler()
for (k, f, t) in fc.crawl():
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
for (k, f, t) in fc.crawl():
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
assert args[0].call_count == 2
@mock.patch(
'plugins.systems.interface_host_crawler.psutil.net_io_counters',
side_effect=throw_os_error)
def test_crawl_interface_invm_mode_failure(self, *args):
fc = InterfaceHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
# Each crawl in crawlutils.py instantiates a FeaturesCrawler object
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 2
@mock.patch('plugins.systems.interface_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
@mock.patch(
'plugins.systems.interface_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
'plugins.systems.interface_container_crawler.psutil.net_io_counters',
side_effect=lambda pernic: {'eth0':
psutils_net(
10,
20,
30,
40,
50,
60)})
def test_crawl_interface_outcontainer_mode(self, *args):
fc = InterfaceContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
for (k, f, t) in fc.crawl('123'):
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
assert args[0].call_count == 2
assert args[1].call_count == 2
@mock.patch('plugins.systems.interface_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.interface_vm_crawler.psvmi.interface_iter',
side_effect=lambda vmc: [psvmi_interface(
'eth1', 10, 20, 30, 40, 50, 60)])
@mock.patch('plugins.systems.interface_vm_crawler.psvmi')
def test_crawl_interface_outvm_mode(self, *args):
fc = InterfaceVmCrawler()
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
assert args[1].call_count == 2
assert args[2].call_count == 2
@mock.patch('plugins.systems.load_host_crawler.os.getloadavg',
side_effect=lambda: [1, 2, 3])
def test_crawl_load_invm_mode(self, *args):
fc = LoadHostCrawler()
for (k, f, t) in fc.crawl():
assert f == LoadFeature(shortterm=1, midterm=2, longterm=2)
assert args[0].call_count == 1
@mock.patch('plugins.systems.load_host_crawler.os.getloadavg',
side_effect=throw_os_error)
def test_crawl_load_invm_mode_failure(self, *args):
fc = LoadHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.load_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('plugins.systems.load_container_crawler.os.getloadavg',
side_effect=lambda: [1, 2, 3])
@mock.patch('plugins.systems.load_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_load_outcontainer_mode(self, *args):
fc = LoadContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == LoadFeature(shortterm=1, midterm=2, longterm=2)
assert args[1].call_count == 1
assert args[2].call_count == 1
@mock.patch('plugins.systems.dockerps_host_crawler.exec_dockerps',
side_effect=lambda: [{'State': {'Running': True},
'Image': 'reg/image:latest',
'Config': {'Cmd': 'command'},
'Name': 'name',
'Id': 'id'}])
def test_crawl_dockerps_invm_mode(self, *args):
fc = DockerpsHostCrawler()
for (k, f, t) in fc.crawl():
assert f == DockerPSFeature(
Status=True,
Created=0,
Image='reg/image:latest',
Ports=[],
Command='command',
Names='name',
Id='id')
assert args[0].call_count == 1
@mock.patch('plugins.systems.dockerps_host_crawler.exec_dockerps',
side_effect=throw_os_error)
def test_crawl_dockerps_invm_mode_failure(self, *args):
fc = DockerpsHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch('plugins.systems.dockerhistory_container_crawler.exec_docker_history',
side_effect=lambda long_id: [
{'Id': 'image1', 'random': 'abc'},
{'Id': 'image2', 'random': 'abc'}])
def test_crawl_dockerhistory_outcontainer_mode(self, *args):
fc = DockerhistoryContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == {'history': [{'Id': 'image1', 'random': 'abc'},
{'Id': 'image2', 'random': 'abc'}]}
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.dockerhistory_container_crawler.exec_docker_history',
side_effect=throw_os_error)
def test_crawl_dockerhistory_outcontainer_mode_failure(self, *args):
fc = DockerhistoryContainerCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl('123'):
pass
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.dockerinspect_container_crawler.exec_dockerinspect',
side_effect=lambda long_id: {
'Id': 'image1',
'random': 'abc'})
def test_crawl_dockerinspect_outcontainer_mode(self, *args):
fc = DockerinspectContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == {'Id': 'image1', 'random': 'abc'}
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.dockerinspect_container_crawler.exec_dockerinspect',
side_effect=throw_os_error)
def test_crawl_dockerinspect_outcontainer_mode_failure(self, *args):
fc = DockerinspectContainerCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl('123'):
pass
assert args[0].call_count == 1<|fim▁end|>
|
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
|
<|file_name|>sequencer.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import six
from gbpservice.nfp.core import log as nfp_logging
LOG = nfp_logging.getLogger(__name__)<|fim▁hole|>
class SequencerEmpty(Exception):
pass
class SequencerBusy(Exception):
pass
"""Sequences the events. """
class EventSequencer(object):
class Sequencer(object):
def __init__(self):
# Events not scheduled are queued
self._waitq = deque()
# Currently scheduled event
self._scheduled = None
def _is_busy(self):
if self._scheduled:
raise SequencerBusy
def _is_empty(self):
if not len(self._waitq):
raise SequencerEmpty
def sequence(self, event):
self._waitq.append(event)
def run(self):
"""Run to get event to be scheduled.
If sequencer is busy - i.e, an event is already
scheduled and in progress raises busy except.
If sequencer is empty - i.e, no event in sequencer
raises empty except.
"""
self._is_busy()
self._is_empty()
# Pop the first element in the queue - FIFO
self._scheduled = self._waitq.popleft()
return self._scheduled
def is_scheduled(self, event):
if self._scheduled:
return self._scheduled.desc.uuid == event.desc.uuid and (
self._scheduled.id == event.id)
return True
def release(self):
self._scheduled = None
def pop(self):
self.release()
events = list(self._waitq)
self._waitq.clear()
return events
def __init__(self):
# Sequence of related events
# {key: sequencer()}
self._sequencer = {}
def sequence(self, key, event):
try:
self._sequencer[key].sequence(event)
except KeyError:
self._sequencer[key] = self.Sequencer()
self._sequencer[key].sequence(event)
message = "Sequenced event - %s" % (event.identify())
LOG.debug(message)
def run(self):
events = []
# Loop over copy and delete from original
sequencers = dict(self._sequencer)
for key, sequencer in six.iteritems(sequencers):
try:
event = sequencer.run()
if event:
message = "Desequenced event - %s" % (
event.identify())
LOG.debug(message)
event.sequence = False
events.append(event)
except SequencerBusy as exc:
pass
except SequencerEmpty as exc:
exc = exc
message = "Sequencer empty"
LOG.debug(message)
del self._sequencer[key]
return events
def pop(self):
events = []
sequencers = dict(self._sequencer)
for key, sequencer in six.iteritems(sequencers):
events += sequencer.pop()
return events
def release(self, key, event):
try:
message = "(event - %s) checking to release" % (event.identify())
LOG.debug(message)
if self._sequencer[key].is_scheduled(event):
message = "(event - %s) Releasing sequencer" % (
event.identify())
LOG.debug(message)
self._sequencer[key].release()
except KeyError:
return<|fim▁end|>
|
deque = collections.deque
|
<|file_name|>req_handler_unary.rs<|end_file_name|><|fim▁begin|>use crate::error;
use crate::result;
use crate::server::req_handler::ServerRequestStreamHandler;
use crate::server::req_handler::ServerRequestUnaryHandler;
use httpbis::IncreaseInWindow;
use std::marker;
pub(crate) struct RequestHandlerUnaryToStream<M, H>
where
H: ServerRequestUnaryHandler<M>,
M: Send + 'static,
{
pub(crate) increase_in_window: IncreaseInWindow,
pub(crate) handler: H,
pub(crate) message: Option<M>,
pub(crate) _marker: marker::PhantomData<M>,
}
impl<M, H> ServerRequestStreamHandler<M> for RequestHandlerUnaryToStream<M, H>
where
H: ServerRequestUnaryHandler<M>,
M: Send + 'static,
{
fn grpc_message(&mut self, message: M, frame_size: usize) -> result::Result<()> {
self.increase_in_window.data_frame_processed(frame_size);
self.increase_in_window.increase_window_auto()?;
if let Some(_) = self.message {
return Err(error::Error::Other("more than one message in a stream"));
}
self.message = Some(message);
Ok(())
}
fn end_stream(&mut self) -> result::Result<()> {
match self.message.take() {
Some(message) => self.handler.grpc_message(message),
None => Err(error::Error::Other("no message, end of stream")),
}
}
fn buffer_processed(&mut self, buffered: usize) -> result::Result<()> {
// TODO: overflow check
self.increase_in_window
.increase_window_auto_above(buffered as u32)?;
Ok(())
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>ExampleOne.cpp<|end_file_name|><|fim▁begin|>#include "ExampleOne.h"
ExampleOneWrite::ExampleOneWrite(std::string name): SamClass(name)
{
}
void ExampleOneWrite::SamInit(void)
{
myint=0;
newPort(&myfirst2, "W2"); // add new port
newPort(&myfirst, "W1");
StartModule();
puts("started writer");
}
void ExampleOneWrite::SamIter(void)
{
Bottle& B = myfirst.prepare(); // prepare the bottle/port
B.clear();
B.addInt(myint++);
myfirst.write(); // add stuff then send
Bottle& C = myfirst2.prepare();
C.clear();
C.addInt(myint+5);
myfirst2.write();
puts("running writer");
}
////////////////////////////////////////////////////////////////////////////////
ExampleTwoRead::ExampleTwoRead(std::string name): SamClass(name)
{
}
void ExampleTwoRead::SamInit(void)
{
myint=0;
newPort(&myfirst, "R1");
StartModule();
puts("started reader");
}
void ExampleTwoRead::SamIter(void)
{
puts("running reader");
Bottle *input = myfirst.read(false); // get in the input from the port, if
// you want it to wait use true, else use false
if(input!=NULL) // check theres data
{
puts("got a msg");
puts(input->toString());
}
else
puts("didn't get a msg");
}
////////////////////////////////////////////////////////////////////////////////
ExampleThreeSendClass::ExampleThreeSendClass(std::string name): SamClass(name)
<|fim▁hole|>void ExampleThreeSendClass::SamInit(void)
{
//name = "/SClass";
myint=0;
newPort(&myfirst, "Out");
StartModule();
}
void ExampleThreeSendClass::SamIter(void)
{
myint++;
BinPortable<DataForm> &MyData = myfirst.prepare(); // prepare data/port
MyData.content().x=myint;
MyData.content().y=myint+5;
MyData.content().p=myint+10;
myfirst.write(); // add stuff and write
}
////////////////////////////////////////////////////////////////////////////////
// a Interupt port, when data hits this port it'll do whatever is onread, be
// carefull, fast firing interupts can cause big problems as in normal code
void DataPort ::onRead(BinPortable<DataForm>& b)
{
printf("X %i Y %i P %i \n",b.content().x,b.content().y,b.content().p);
}
ExampleFourReciveClassInterupt::ExampleFourReciveClassInterupt(std::string name): SamClass(name)
{
}
void ExampleFourReciveClassInterupt::SamInit(void)
{
myint=0;
//name="/RClass";
newPort(&myfirst, "In");
myfirst.useCallback(); // this tells it to use the onread method
StartModule();
}
void ExampleFourReciveClassInterupt::SamIter(void)
{
}<|fim▁end|>
|
{
}
|
<|file_name|>xmlformatter.py<|end_file_name|><|fim▁begin|>"""
Format and compress XML documents
"""
import getopt
import re
import sys
import xml.parsers.expat
__version__ = "0.2.4"
DEFAULT_BLANKS = False
DEFAULT_COMPRESS = False
DEFAULT_SELFCLOSE = False
DEFAULT_CORRECT = True
DEFAULT_INDENT = 2
DEFAULT_INDENT_CHAR = " "
DEFAULT_INLINE = True
DEFAULT_ENCODING_INPUT = None
DEFAULT_ENCODING_OUTPUT = None
DEFAULT_EOF_NEWLINE = False
class Formatter:
# Use internal encoding:
encoding_internal = None
def __init__(
self,
indent=DEFAULT_INDENT,
preserve=[],
blanks=DEFAULT_BLANKS,
compress=DEFAULT_COMPRESS,
selfclose=DEFAULT_SELFCLOSE,
indent_char=DEFAULT_INDENT_CHAR,
encoding_input=DEFAULT_ENCODING_INPUT,
encoding_output=DEFAULT_ENCODING_OUTPUT,
inline=DEFAULT_INLINE,
correct=DEFAULT_CORRECT,
eof_newline=DEFAULT_EOF_NEWLINE,
):
# Minify the XML document:
self.compress = compress
# Use self-closing tags
self.selfclose = selfclose
# Correct text nodes
self.correct = correct
# Decode the XML document:
self.encoding_input = self.enc_normalize(encoding_input)
# Encode ouput by:
self.encoding_output = self.enc_normalize(encoding_output)
# Insert indent = indent*level*indent_char:
self.indent = int(indent)
# Indent by char:
self.indent_char = indent_char
# Format inline objects:
self.inline = inline
# Don't compress this elements and their descendants:
self.preserve = preserve
# Preserve blanks lines (collapse multiple into one)
self.blanks = blanks
# Always add a newline character at EOF
self.eof_newline = eof_newline
@property
def encoding_effective(self, enc=None):
if self.encoding_output:
return self.encoding_output
elif self.encoding_internal:
return self.encoding_internal
elif self.encoding_input:
return self.encoding_input
else:
return "UTF-8"
def enc_normalize(self, string):
""" Format an Encoding identifier to upper case. """
if isinstance(string, str):
return string.upper()
return None
def enc_encode(self, strg):
""" Encode a formatted XML document in target"""
if sys.version_info > (3, 0):
return strg.encode(self.encoding_effective) # v3
return strg.decode("utf-8").encode(self.encoding_effective) # v2
def enc_output(self, path, strg):
""" Output according to encoding """
fh = sys.stdout
if strg is not None:
if path is not None:
open(path, "w+b").write(strg)
elif sys.version_info > (3, 0):
fh.buffer.write(strg)
else:
fh.write(strg)
def format_string(self, xmldoc=""):
""" Format a XML document given by xmldoc """
token_list = Formatter.TokenList(self)
token_list.parser.Parse(xmldoc)
return self.enc_encode(str(token_list))
def format_file(self, file):
""" Format a XML document given by path name """
fh = open(file, "rb")
token_list = Formatter.TokenList(self)
token_list.parser.ParseFile(fh)
fh.close()
return self.enc_encode(str(token_list))
class TokenList:
# Being in a cdata section:
cdata_section = False
# Lock deletion of leading whitespace:
desc_mixed_level = None
# Lock indenting:
indent_level = None
# Reference the Formatter:
formatter = None
# Count levels:
level_counter = 0
# Lock deletion of whitespaces:
preserve_level = None
def __init__(self, formatter):
# Keep tokens in a list:
self._list = []
self.formatter = formatter
self.parser = xml.parsers.expat.ParserCreate(
encoding=self.formatter.encoding_input
)
self.parser.specified_attributes = 1
self.parser.buffer_text = True
# Push tokens to buffer:
for pattern in [
"XmlDecl%s",
"ElementDecl%s",
"AttlistDecl%s",
"EntityDecl%s",
"StartElement%s",
"EndElement%s",
"ProcessingInstruction%s",
"CharacterData%s",
"Comment%s",
"Default%s",
"StartDoctypeDecl%s",
"EndDoctypeDecl%s",
"StartCdataSection%s",
"EndCdataSection%s",
"NotationDecl%s",
]:
setattr(
self.parser, pattern % "Handler", self.xml_handler(pattern % "")
)
def __iter__(self):
return iter(self._list)
def __len__(self):
return len(self._list)
def __getitem__(self, pos):
if 0 <= pos < len(self._list):
return self._list[pos]
else:
raise IndexError
def __setitem__(self, pos, value):
if 0 <= pos < len(self._list):
self._list[pos] = value
else:
raise IndexError
def __str__(self):
""" Returns the formatted XML document in UTF-8. """
for step in ["configure", "pre_operate", "post_operate"]:
for tk in iter(self):
getattr(tk, step)()
result = ""
for tk in iter(self):
result += str(tk)
if self.formatter.eof_newline and not result.endswith("\n"):
result += "\n"
return result
def append(self, tk):
""" Add token to tokenlist. """
tk.pos = len(self._list)
self._list.append(tk)
def level_increment(self):
""" Increment level counter. """
self.level_counter += 1
def level_decrement(self):
""" Decrement level counter. """
self.level_counter -= 1
def token_descendant_mixed(self, tk):
""" Mark descendants of mixed content. """
if tk.name == "StartElement":
# Mark every descendant:
if tk.content_model in [2, 3] and self.desc_mixed_level is None:
self.desc_mixed_level = tk.level
return False
return self.desc_mixed_level is not None
elif tk.name == "EndElement":
# Stop marking every descendant:
if tk.level is self.desc_mixed_level:
self.desc_mixed_level = None
elif self.desc_mixed_level is not None:
return True
return False
elif self.desc_mixed_level is None:
return False
return self.desc_mixed_level >= tk.level - 1
def sequence(self, tk, scheme=None):
"""Returns sublist of token list.
None: next to last
EndElement: first to previous"""
if scheme == "EndElement" or (scheme is None and tk.end):
return reversed(self._list[: tk.pos])
return self._list[(tk.pos + 1) :]
def token_indent(self, tk):
if self.formatter.inline:
return self.token_indent_inline(tk)
""" Indent outside of text of mixed content. """
if tk.name == "StartElement":
# Block indenting for descendants of text and mixed content:
if tk.content_model in [2, 3] and self.indent_level is None:
self.indent_level = tk.level
elif self.indent_level is not None:
return False
return True
elif tk.name == "EndElement":
# Unblock indenting for descendants of text and mixed content:
if tk.level == self.indent_level:
self.indent_level = None
elif self.indent_level is None:
return True
return False
return self.indent_level is None
def token_indent_inline(self, tk):
""" Indent every element content - no matter enclosed by text or mixed content. """
for itk in iter(self.sequence(tk, "EndElement")):
if itk.level < tk.level and itk.name == "StartElement":
if itk.content_model == 1:
return True
return False
if (
itk.level == tk.level
and tk.name == "EndElement"
and itk.name == "StartElement"
):
if itk.content_model == 1:
return True
return False
return True
def token_model(self, tk):
"""Returns code for content model.
0: empty
1: element
2: text
3: mixed"""
eflag = tflag = 0
for itk in iter(self.sequence(tk)):
# Element boundary found:
if itk.level <= tk.level:
break
# Direct child found:
elif (itk.level - 1) == tk.level:
if itk.start:
eflag = 1
elif itk.not_empty:
tflag = 2
return eflag + tflag
def token_preserve(self, tk):
"""Preseve eyery descendant of an preserved element.
0: not locked
1: just (un)locked
2: locked"""
# Lock perserving for StartElements:
if tk.name == "StartElement":
if self.preserve_level is not None:
return 2
if tk.arg[0] in self.formatter.preserve:
self.preserve_level = tk.level
return 1
return 0
# Unlock preserving for EndElements:
elif tk.name == "EndElement":
if (
tk.arg[0] in self.formatter.preserve
and tk.level == self.preserve_level
):
self.preserve_level = None
return 1
elif self.preserve_level is None:
return 0
return 2
return self.preserve_level is not None
def whitespace_append_trailing(self, tk):
""" Add a trailing whitespace to previous character data. """
if self.formatter.correct and tk.leading and tk.not_empty:
self.whitespace_append(tk, "EndElement", "StartElement", True)
def whitespace_append_leading(self, tk):
""" Add a leading whitespace to previous character data. """
if self.formatter.correct and tk.trailing and tk.not_empty:
self.whitespace_append(tk)
def whitespace_append(
self, tk, start="StartElement", stop="EndElement", direct=False
):
""" Add a whitspace to token list. """
for itk in self.sequence(tk, start):
if (
itk.empty
or (itk.name == stop and itk.descendant_mixed is False)
or (itk.name == start and abs(tk - itk) == 1)
):
break
elif itk.not_empty or (itk.name == start and itk.descendant_mixed):
self.insert_empty(itk, direct)
break
def whitespace_delete_leading(self, tk):
""" Returns True, if no next token or all empty (up to next end element)"""
if (
self.formatter.correct
and tk.leading
and not tk.preserve
and not tk.cdata_section
):
for itk in self.sequence(tk, "EndElement"):
if itk.trailing:
return True
elif itk.name in ["EndElement", "CharacterData", "EndCdataSection"]:
return False
return True
return False
def whitespace_delete_trailing(self, tk):
"""Returns True, if no next token or all empty (up to next end element)"""
if (
self.formatter.correct
and tk.trailing
and not tk.preserve
and not tk.cdata_section
):
for itk in self.sequence(tk, "StartElement"):
if itk.end:
return True
elif (
itk.name in ["StartElement", "StartCdataSection"]
or itk.not_empty
):
return False
return True
return False
def insert_empty(self, tk, before=True):
""" Insert an Empty Token into token list - before or after tk. """
if not (0 < tk.pos < (len(self) - 1)):
return False
ptk = self[tk.pos - 1]
ntk = self.formatter.CharacterData(self, [" "])
ntk.level = max(ptk.level, tk.level)
ntk.descendant_mixed = tk.descendant_mixed
ntk.preserve = ptk.preserve * tk.preserve
ntk.cdata_section = ptk.cdata_section or tk.cdata_section
if before:
self._list.insert(tk.pos + 1, ntk)
else:
self._list.insert(tk.pos, ntk)
for i in range((tk.pos - 1), len(self._list)):
self._list[i].pos = i
def xml_handler(self, key):
""" Returns lambda function which adds token to token list"""
return lambda *arg: self.append(getattr(self.formatter, key)(self, arg))
class Token(object):
def __init__(self, tklist, arg):
# Reference Token List:
self.list = tklist
# Token datas:
self.arg = list(arg)
# Token is placed in an CDATA section:
self.cdata_section = False
# Token has content model:
self.content_model = None
# Remove trailing wihtespaces:
self.delete_trailing = False
# Remove leading whitespaces:
self.delete_leading = False
# Token is descendant of text or mixed content element:
self.descendant_mixed = False
# Reference to formatter:
self.formatter = tklist.formatter
# Insert indenting white spaces:
self.indent = False
# N-th generation of roots descendants:
self.level = self.list.level_counter
# Token class:
self.name = self.__class__.__name__
# Preserve white spaces within enclosed tokens:
self.preserve = False
# Position in token list:
self.pos = None
def __sub__(self, other):
return self.pos - other.pos
def __unicode__(self):
return ""
# Workaround, see http://lucumr.pocoo.org/2011/1/22/forwards-compatible-python/:
if sys.version_info > (3, 0):
__str__ = lambda x: x.__unicode__()
else:
__str__ = lambda x: unicode(x).encode("utf-8")
@property
def end(self):
return self.name == "EndElement"
@property
def empty(self):
return self.name == "CharacterData" and re.match(
r"^[\t\s\n]*$", self.arg[0]
)
@property
def leading(self):
return self.name == "CharacterData" and re.search(
r"^[\t\s\n]+", self.arg[0]
)
@property
def not_empty(self):
return (
self.name == "CharacterData"
and not self.cdata_section
and not re.match(r"^[\t\s\n]+$", self.arg[0])
)
@property
def trailing(self):
return self.name == "CharacterData" and re.search(
r"[\t\s\n]+$", self.arg[0]
)
@property
def start(self):
return self.name == "StartElement"
@property
def correct(self):
return self.formatter.correct
def attribute(self, key, value):
if key and value:
return ' %s="%s"' % (key, value)
elif key:
return ' %s=""' % (key)
return ""
def indent_insert(self):
""" Indent token. """
# Child of root and no empty node
if (
self.level > 0 and not (self.end and self.list[self.pos - 1].start)
) or ( # not empty node:
self.end and not self.list[self.pos - 1].start
):
return self.indent_create(self.level)
return ""
def indent_create(self, times=1):
""" Returns indent string. """
if not self.formatter.compress and self.formatter.indent:
return "\n%s" % (
(times * self.formatter.indent) * self.formatter.indent_char
)
return ""
def identifier(self, systemid, publicid):
# TODO add base parameter:
if publicid and systemid:
return ' PUBLIC "%s" "%s"' % (publicid, systemid)
elif publicid:
return ' PUBLIC "%s"' % publicid
elif systemid:
return ' SYSTEM "%s"' % systemid
return ""
def configure(self):
""" Set token properties. """
self.descendant_mixed = self.list.token_descendant_mixed(self)
self.preserve = self.list.token_preserve(self)
self.cdata_section = self.list.cdata_section
def pre_operate(self):
pass
def post_operate(self):
pass
class AttlistDecl(Token):
def __unicode__(self):
str = self.indent_create()
str += "<!ATTLIST %s %s" % (self.arg[0], self.arg[1])
if self.arg[2] is not None:
str += " %s" % self.arg[2]
if self.arg[4] and not self.arg[3]:
str += " #REQUIRED"
elif self.arg[3] and self.arg[4]:
str += " #FIXED"
elif not self.arg[4] and not self.arg[3]:
str += " #IMPLIED"
if self.arg[3]:
str += ' "%s"' % self.arg[3]
str += ">"
return str
class CharacterData(Token):
def __unicode__(self):
str = self.arg[0]
if not self.preserve and not self.cdata_section:
# remove empty tokens always in element content!
if self.empty and not self.descendant_mixed:
if self.formatter.blanks and not self.formatter.compress and re.match(r"\s*\n\s*\n\s*", str):
str = "\n"
else:
str = ""
else:
if self.correct:
str = re.sub(r"\r\n", "\n", str)
str = re.sub(r"\r|\n|\t", " ", str)
str = re.sub(r"\s+", " ", str)
if self.delete_leading:
str = re.sub(r"^\s", "", str)
if self.delete_trailing:
str = re.sub(r"\s$", "", str)
if not self.cdata_section:
str = re.sub(r"&", "&", str)
str = re.sub(r"<", "<", str)
return str
def pre_operate(self):
self.list.whitespace_append_trailing(self)
self.list.whitespace_append_leading(self)
def post_operate(self):
self.delete_leading = self.list.whitespace_delete_leading(self)
self.delete_trailing = self.list.whitespace_delete_trailing(self)
class Comment(Token):
def __unicode__(self):
str = ""
if self.preserve in [0, 1] and self.indent:
str += self.indent_insert()
str += "<!--%s-->" % re.sub(
r"^[\r\n]+$", "\n", re.sub(r"^[\r\n]+", "\n", self.arg[0])
)
return str
def configure(self):
super(Formatter.Comment, self).configure()
self.indent = self.list.token_indent(self)
class Default(Token):
pass
class EndCdataSection(Token):
def __unicode__(self):
return "]]>"
def configure(self):
self.list.cdata_section = False
class ElementDecl(Token):
def __unicode__(self):
str = self.indent_create()
str += "<!ELEMENT %s%s>" % (self.arg[0], self.evaluate_model(self.arg[1]))
return str
def evaluate_model(self, model, modelStr="", concatStr=""):
childSeq = []
mixed = model[0] == xml.parsers.expat.model.XML_CTYPE_MIXED
hasChilds = len(model[3]) or mixed
if model[0] == xml.parsers.expat.model.XML_CTYPE_EMPTY: # 1
modelStr += " EMPTY"
elif model[0] == xml.parsers.expat.model.XML_CTYPE_ANY: # 2
modelStr += " ANY"
elif model[0] == xml.parsers.expat.model.XML_CTYPE_NAME: # 4
modelStr = "%s" % model[2] # new start
elif model[0] in (
xml.parsers.expat.model.XML_CTYPE_CHOICE,
xml.parsers.expat.model.XML_CTYPE_MIXED,
): # 5
concatStr = "|"
elif model[0] == xml.parsers.expat.model.XML_CTYPE_SEQ: # 6
concatStr = ","
if hasChilds:
modelStr += " ("
if mixed:
childSeq.append("#PCDATA")
for child in model[3]:
childSeq.append(self.evaluate_model(child))
modelStr += concatStr.join(childSeq)
if hasChilds:
modelStr += ")"
modelStr += {
xml.parsers.expat.model.XML_CQUANT_NONE: "",
xml.parsers.expat.model.XML_CQUANT_OPT: "?",
xml.parsers.expat.model.XML_CQUANT_PLUS: "+",
xml.parsers.expat.model.XML_CQUANT_REP: "*",
}[model[1]]
return modelStr
class EndDoctypeDecl(Token):
def __unicode__(self):
str = ""
if self.list[self.pos - 1].name != "StartDoctypeDecl":
str += self.indent_create(0)
str += "]"
str += ">"
str += self.indent_create(0)
return str
class EndElement(Token):
def __init__(self, list, arg):
list.level_decrement()
super(Formatter.EndElement, self).__init__(list, arg)
def __unicode__(self):
str = ""
# Don't close empty nodes on compression mode:
if (
not (self.formatter.compress or self.formatter.selfclose)
or self.list[self.pos - 1].name != "StartElement"
):
if self.preserve in [0] and self.indent:
str += self.indent_insert()
str += "</%s>" % self.arg[0]
return str
def configure(self):
self.descendant_mixed = self.list.token_descendant_mixed(self)
self.preserve = self.list.token_preserve(self)
self.indent = self.list.token_indent(self)
class EntityDecl(Token):
def __unicode__(self):
str = self.indent_create()
str += "<!ENTITY "
if self.arg[1]:
str += "% "
str += "%s " % self.arg[0]
if self.arg[2]:
str += '"%s"' % self.arg[2]
else:
str += "%s " % self.identifier(self.arg[4], self.arg[5])
if self.arg[6]:
str += "NDATA %s" % self.arg[6]
str += ">"
return str
class NotationDecl(Token):
def __unicode__(self):
str = self.indent_create()
str += "<!NOTATION %s%s>" % (
self.arg[0],
self.identifier(self.arg[2], self.arg[3]),
)
return str
class ProcessingInstruction(Token):
def __unicode__(self):
str = ""
if self.preserve in [0, 1] and self.indent:
str += self.indent_insert()
str += "<?%s %s?>" % (self.arg[0], self.arg[1])
return str
def configure(self):
super(Formatter.ProcessingInstruction, self).configure()
self.indent = self.list.token_indent(self)
class StartCdataSection(Token):
def __unicode__(self):
return "<![CDATA["
def configure(self):
self.list.cdata_section = True
class StartDoctypeDecl(Token):
def __unicode__(self):
str = "<!DOCTYPE %s" % (self.arg[0])
if self.arg[1]:
str += self.identifier(self.arg[1], self.arg[2])
if self.arg[3]:
str += " ["
return str
class StartElement(Token):
def __init__(self, list, arg):
super(Formatter.StartElement, self).__init__(list, arg)
self.list.level_increment()
def __unicode__(self):
str = ""
if self.preserve in [0, 1] and self.indent:
str += self.indent_insert()
str += "<%s" % self.arg[0]
for attr in sorted(self.arg[1].keys()):
str += self.attribute(attr, self.arg[1][attr])
if self.list[self.pos + 1].end and (self.formatter.compress or self.formatter.selfclose):
str += "/>"
else:
str += ">"
return str
def configure(self):
self.content_model = self.list.token_model(self)
self.descendant_mixed = self.list.token_descendant_mixed(self)
self.preserve = self.list.token_preserve(self)
self.indent = self.list.token_indent(self)
class XmlDecl(Token):
def __init__(self, list, arg):
super(Formatter.XmlDecl, self).__init__(list, arg)
if len(self.arg) > 1:
self.formatter.encoding_internal = self.arg[1]
def __unicode__(self):
str = "<?xml%s%s" % (
self.attribute("version", self.arg[0]),
self.attribute("encoding", self.formatter.encoding_effective),
)
if self.arg[2] > -1:
str += self.attribute("standalone", "yes")
str += "?>\n"
return str
def cli_usage(msg=""):
""" Output usage for command line tool. """
sys.stderr.write(msg + "\n")
sys.stderr.write(
'Usage: xmlformat [--preserve "pre,literal"] [--blanks]\
[--compress] [--selfclose] [--indent num] [--indent-char char]\
[--outfile file] [--encoding enc] [--outencoding enc]\
[--disable-inlineformatting] [--overwrite] [--disable-correction]\
[--eof-newline]\
[--help] <--infile file | file | - >\n'
)
sys.exit(2)
def cli():
""" Launch xmlformatter from command line. """
res = None
indent = DEFAULT_INDENT
indent_char = DEFAULT_INDENT_CHAR
outfile = None
overwrite = False
preserve = []
blanks = False
compress = DEFAULT_COMPRESS
selfclose = DEFAULT_SELFCLOSE
infile = None
encoding = DEFAULT_ENCODING_INPUT
outencoding = DEFAULT_ENCODING_OUTPUT
inline = DEFAULT_INLINE
correct = DEFAULT_CORRECT
eof_newline = DEFAULT_EOF_NEWLINE
try:
opts, args = getopt.getopt(
sys.argv[1:],
"",
[
"compress",
"selfclose",
"disable-correction",
"disable-inlineformatting",
"encoding=",
"help",
"infile=",
"indent=",
"indent-char=",
"outfile=",
"outencoding=",
"overwrite",
"preserve=",
"blanks",
"eof-newline"
],
)
except getopt.GetoptError as err:
cli_usage(str(err))
for key, value in opts:
if key in ["--indent"]:
indent = value
elif key in ["--preserve"]:
preserve = value.replace(",", " ").split()
elif key in ["--blanks"]:
blanks = True
elif key in ["--help"]:
cli_usage()
elif key in ["--compress"]:
compress = True
elif key in ["--selfclose"]:
selfclose = True
elif key in ["--outfile"]:
outfile = value
elif key in ["--infile"]:
infile = value
elif key in ["--encoding"]:
encoding = value
elif key in ["--outencoding"]:
outencoding = value
elif key in ["--indent-char"]:
indent_char = value
elif key in ["--disable-inlineformatting"]:
inline = False
elif key in ["--disable-correction"]:
correct = False
elif key in ["--overwrite"]:
overwrite = True
elif key in ["--eof-newline"]:
eof_newline = True
try:
formatter = Formatter(
indent=indent,
preserve=preserve,
blanks=blanks,<|fim▁hole|> compress=compress,
selfclose=selfclose,
encoding_input=encoding,
encoding_output=outencoding,
indent_char=indent_char,
inline=inline,
correct=correct,
eof_newline=eof_newline,
)
input_file = None
if infile:
input_file = infile
res = formatter.format_file(input_file)
elif len(args) > 0:
if args[0] == "-":
res = formatter.format_string("".join(sys.stdin.readlines()))
else:
input_file = args[0]
res = formatter.format_file(input_file)
except xml.parsers.expat.ExpatError as err:
cli_usage("XML error: %s" % err)
except IOError as err:
cli_usage("IO error: %s" % err)
except:
cli_usage("Unkonwn error")
if overwrite:
formatter.enc_output(input_file, res)
else:
formatter.enc_output(outfile, res)<|fim▁end|>
| |
<|file_name|>htmltablecellelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use dom::bindings::codegen::Bindings::HTMLTableCellElementBinding::HTMLTableCellElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::LayoutJS;
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::{Element, RawLayoutElementHelpers};
use dom::htmlelement::HTMLElement;
use dom::htmltablerowelement::HTMLTableRowElement;
use dom::node::Node;
use dom::virtualmethods::VirtualMethods;
use html5ever_atoms::LocalName;
use style::attr::{AttrValue, LengthOrPercentageOrAuto};
<|fim▁hole|>
#[dom_struct]
pub struct HTMLTableCellElement {
htmlelement: HTMLElement,
}
impl HTMLTableCellElement {
pub fn new_inherited(tag_name: LocalName,
prefix: Option<DOMString>,
document: &Document)
-> HTMLTableCellElement {
HTMLTableCellElement {
htmlelement: HTMLElement::new_inherited(tag_name, prefix, document),
}
}
#[inline]
pub fn htmlelement(&self) -> &HTMLElement {
&self.htmlelement
}
}
impl HTMLTableCellElementMethods for HTMLTableCellElement {
// https://html.spec.whatwg.org/multipage/#dom-tdth-colspan
make_uint_getter!(ColSpan, "colspan", DEFAULT_COLSPAN);
// https://html.spec.whatwg.org/multipage/#dom-tdth-colspan
make_uint_setter!(SetColSpan, "colspan", DEFAULT_COLSPAN);
// https://html.spec.whatwg.org/multipage/#dom-tdth-bgcolor
make_getter!(BgColor, "bgcolor");
// https://html.spec.whatwg.org/multipage/#dom-tdth-bgcolor
make_legacy_color_setter!(SetBgColor, "bgcolor");
// https://html.spec.whatwg.org/multipage/#dom-tdth-width
make_getter!(Width, "width");
// https://html.spec.whatwg.org/multipage/#dom-tdth-width
make_nonzero_dimension_setter!(SetWidth, "width");
// https://html.spec.whatwg.org/multipage/#dom-tdth-cellindex
fn CellIndex(&self) -> i32 {
let self_node = self.upcast::<Node>();
let parent_children = match self_node.GetParentNode() {
Some(ref parent_node) if parent_node.is::<HTMLTableRowElement>() => {
parent_node.children()
},
_ => return -1,
};
parent_children.filter(|c| c.is::<HTMLTableCellElement>())
.position(|c| &*c == self_node)
.map_or(-1, |p| p as i32)
}
}
pub trait HTMLTableCellElementLayoutHelpers {
fn get_background_color(&self) -> Option<RGBA>;
fn get_colspan(&self) -> Option<u32>;
fn get_width(&self) -> LengthOrPercentageOrAuto;
}
#[allow(unsafe_code)]
impl HTMLTableCellElementLayoutHelpers for LayoutJS<HTMLTableCellElement> {
fn get_background_color(&self) -> Option<RGBA> {
unsafe {
(&*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("bgcolor"))
.and_then(AttrValue::as_color)
.cloned()
}
}
fn get_colspan(&self) -> Option<u32> {
unsafe {
(&*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("colspan"))
.map(AttrValue::as_uint)
}
}
fn get_width(&self) -> LengthOrPercentageOrAuto {
unsafe {
(&*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("width"))
.map(AttrValue::as_dimension)
.cloned()
.unwrap_or(LengthOrPercentageOrAuto::Auto)
}
}
}
impl VirtualMethods for HTMLTableCellElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn parse_plain_attribute(&self, local_name: &LocalName, value: DOMString) -> AttrValue {
match *local_name {
local_name!("colspan") => AttrValue::from_u32(value.into(), DEFAULT_COLSPAN),
local_name!("bgcolor") => AttrValue::from_legacy_color(value.into()),
local_name!("width") => AttrValue::from_nonzero_dimension(value.into()),
_ => self.super_type().unwrap().parse_plain_attribute(local_name, value),
}
}
}<|fim▁end|>
|
const DEFAULT_COLSPAN: u32 = 1;
|
<|file_name|>template_locals.ts<|end_file_name|><|fim▁begin|>import { argv } from 'yargs';
import * as CONFIG from '../../config';
/**
* Returns the project configuration (consisting of the base configuration provided by seed.config.ts and the additional
* project specific overrides as defined in project.config.ts)
*/<|fim▁hole|> const configEnv = CONFIG.getPluginConfig('environment-config')[configEnvName];
if (!configEnv) {
throw new Error('Invalid configuration name');
}
const config = {
ENV_CONFIG: JSON.stringify(configEnv)
};
return Object.assign(config, CONFIG);
}<|fim▁end|>
|
export function templateLocals() {
const configEnvName = argv['config-env'] || 'dev';
|
<|file_name|>event.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from eventlet import hubs
from eventlet.support import greenlets as greenlet
__all__ = ['Event']
class NOT_USED:
def __repr__(self):
return 'NOT_USED'
NOT_USED = NOT_USED()
class Event(object):
"""An abstraction where an arbitrary number of coroutines
can wait for one event from another.
Events are similar to a Queue that can only hold one item, but differ
in two important ways:
1. calling :meth:`send` never unschedules the current greenthread
2. :meth:`send` can only be called once; create a new event to send again.
They are good for communicating results between coroutines, and
are the basis for how
:meth:`GreenThread.wait() <eventlet.greenthread.GreenThread.wait>`
is implemented.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def baz(b):
... evt.send(b + 1)
...
>>> _ = eventlet.spawn_n(baz, 3)
>>> evt.wait()
4
"""
_result = None
_exc = None
def __init__(self):
self._waiters = set()
self.reset()
def __str__(self):
params = (self.__class__.__name__, hex(id(self)),
self._result, self._exc, len(self._waiters))
return '<%s at %s result=%r _exc=%r _waiters[%d]>' % params
def reset(self):
# this is kind of a misfeature and doesn't work perfectly well,
# it's better to create a new event rather than reset an old one
# removing documentation so that we don't get new use cases for it
assert self._result is not NOT_USED, 'Trying to re-reset() a fresh event.'
self._result = NOT_USED
self._exc = None
def ready(self):
""" Return true if the :meth:`wait` call will return immediately.
Used to avoid waiting for things that might take a while to time out.
For example, you can put a bunch of events into a list, and then visit
them all repeatedly, calling :meth:`ready` until one returns ``True``,
and then you can :meth:`wait` on that one."""
return self._result is not NOT_USED<|fim▁hole|> def has_result(self):
return self._result is not NOT_USED and self._exc is None
def poll(self, notready=None):
if self.ready():
return self.wait()
return notready
# QQQ make it return tuple (type, value, tb) instead of raising
# because
# 1) "poll" does not imply raising
# 2) it's better not to screw up caller's sys.exc_info() by default
# (e.g. if caller wants to calls the function in except or finally)
def poll_exception(self, notready=None):
if self.has_exception():
return self.wait()
return notready
def poll_result(self, notready=None):
if self.has_result():
return self.wait()
return notready
def wait(self):
"""Wait until another coroutine calls :meth:`send`.
Returns the value the other coroutine passed to
:meth:`send`.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def wait_on():
... retval = evt.wait()
... print("waited for {0}".format(retval))
>>> _ = eventlet.spawn(wait_on)
>>> evt.send('result')
>>> eventlet.sleep(0)
waited for result
Returns immediately if the event has already
occured.
>>> evt.wait()
'result'
"""
current = greenlet.getcurrent()
if self._result is NOT_USED:
self._waiters.add(current)
try:
return hubs.get_hub().switch()
finally:
self._waiters.discard(current)
if self._exc is not None:
current.throw(*self._exc)
return self._result
def send(self, result=None, exc=None):
"""Makes arrangements for the waiters to be woken with the
result and then returns immediately to the parent.
>>> from eventlet import event
>>> import eventlet
>>> evt = event.Event()
>>> def waiter():
... print('about to wait')
... result = evt.wait()
... print('waited for {0}'.format(result))
>>> _ = eventlet.spawn(waiter)
>>> eventlet.sleep(0)
about to wait
>>> evt.send('a')
>>> eventlet.sleep(0)
waited for a
It is an error to call :meth:`send` multiple times on the same event.
>>> evt.send('whoops')
Traceback (most recent call last):
...
AssertionError: Trying to re-send() an already-triggered event.
Use :meth:`reset` between :meth:`send` s to reuse an event object.
"""
assert self._result is NOT_USED, 'Trying to re-send() an already-triggered event.'
self._result = result
if exc is not None and not isinstance(exc, tuple):
exc = (exc, )
self._exc = exc
hub = hubs.get_hub()
for waiter in self._waiters:
hub.schedule_call_global(
0, self._do_send, self._result, self._exc, waiter)
def _do_send(self, result, exc, waiter):
if waiter in self._waiters:
if exc is None:
waiter.switch(result)
else:
waiter.throw(*exc)
def send_exception(self, *args):
"""Same as :meth:`send`, but sends an exception to waiters.
The arguments to send_exception are the same as the arguments
to ``raise``. If a single exception object is passed in, it
will be re-raised when :meth:`wait` is called, generating a
new stacktrace.
>>> from eventlet import event
>>> evt = event.Event()
>>> evt.send_exception(RuntimeError())
>>> evt.wait()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "eventlet/event.py", line 120, in wait
current.throw(*self._exc)
RuntimeError
If it's important to preserve the entire original stack trace,
you must pass in the entire :func:`sys.exc_info` tuple.
>>> import sys
>>> evt = event.Event()
>>> try:
... raise RuntimeError()
... except RuntimeError:
... evt.send_exception(*sys.exc_info())
...
>>> evt.wait()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "eventlet/event.py", line 120, in wait
current.throw(*self._exc)
File "<stdin>", line 2, in <module>
RuntimeError
Note that doing so stores a traceback object directly on the
Event object, which may cause reference cycles. See the
:func:`sys.exc_info` documentation.
"""
# the arguments and the same as for greenlet.throw
return self.send(None, args)<|fim▁end|>
|
def has_exception(self):
return self._exc is not None
|
<|file_name|>command_modal.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
use crate::{
components::{font_awesome::*, modal},
dependency_tree::{build_direct_dag, traverse_graph, DependencyDAG, Deps, Rich},
extensions::{MergeAttrs as _, NodeExt as _, RequestExt as _},
generated::css_classes::C,
key_codes, sleep_with_handle, GMsg,
};
use futures::channel::oneshot;
use iml_wire_types::{ApiList, AvailableTransition, Command, EndpointName, Job, Step};
use regex::{Captures, Regex};
use seed::{prelude::*, *};
use serde::de::DeserializeOwned;
use std::{
collections::{HashMap, HashSet},
fmt::{self, Display},
sync::Arc,
time::Duration,
};
/// The component polls `/api/(command|job|step)/` endpoint and this constant defines how often it does.
const POLL_INTERVAL: Duration = Duration::from_millis(1000);
type Job0 = Job<Option<serde_json::Value>>;
type RichCommand = Rich<i32, Arc<Command>>;
type RichJob = Rich<i32, Arc<Job0>>;
type RichStep = Rich<i32, Arc<Step>>;
type JobsGraph = DependencyDAG<i32, RichJob>;
#[derive(Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd, Debug)]
pub struct CmdId(i32);
#[derive(Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd, Debug)]
pub struct JobId(i32);
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
pub enum TypedId {
Command(i32),
Job(i32),
Step(i32),
}
#[derive(Clone, Debug)]
struct TransitionState(String);
#[derive(Clone, Eq, PartialEq, Debug, Default)]
pub struct Select(HashSet<TypedId>);
impl Display for Select {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Select {
fn split(&self) -> (Vec<i32>, Vec<i32>, Vec<i32>) {
fn insert_in_sorted(ids: &mut Vec<i32>, id: i32) {
match ids.binary_search(&id) {
Ok(_) => {}
Err(pos) => ids.insert(pos, id),
}
}
let mut cmd_ids = Vec::new();
let mut job_ids = Vec::new();
let mut step_ids = Vec::new();
for t in &self.0 {
match t {
TypedId::Command(c) => insert_in_sorted(&mut cmd_ids, *c),
TypedId::Job(j) => insert_in_sorted(&mut job_ids, *j),
TypedId::Step(s) => insert_in_sorted(&mut step_ids, *s),
}
}
(cmd_ids, job_ids, step_ids)
}
fn contains(&self, id: TypedId) -> bool {
self.0.contains(&id)
}
fn perform_click(&mut self, id: TypedId) -> bool {
if self.0.contains(&id) {
!self.0.remove(&id)
} else {
self.0.insert(id)
}
}
}
#[derive(Clone, Debug)]
pub struct Context<'a> {
pub steps_view: &'a HashMap<JobId, Vec<Arc<RichStep>>>,
pub select: &'a Select,
pub cancelling_jobs: &'a HashSet<i32>,
}
#[derive(Clone, Debug)]
pub enum Input {
Commands(Vec<Arc<Command>>),
Ids(Vec<i32>),
}
#[derive(Default, Debug)]
pub struct Model {
pub tree_cancel: Option<oneshot::Sender<()>>,
pub commands: HashMap<i32, Arc<RichCommand>>,
pub commands_view: Vec<Arc<RichCommand>>,
pub jobs: HashMap<i32, Arc<RichJob>>,
pub jobs_graphs: HashMap<CmdId, JobsGraph>,
pub steps: HashMap<i32, Arc<RichStep>>,
pub steps_view: HashMap<JobId, Vec<Arc<RichStep>>>,
pub select: Select,
pub cancelling_jobs: HashSet<i32>,
pub modal: modal::Model,
}
#[derive(Clone, Debug)]
pub enum Msg {
Modal(modal::Msg),
FireCommands(Input),
FetchTree,
FetchedCommands(Box<fetch::ResponseDataResult<ApiList<Command>>>),
FetchedJobs(Box<fetch::ResponseDataResult<ApiList<Job0>>>),
FetchedSteps(Box<fetch::ResponseDataResult<ApiList<Step>>>),
Click(TypedId),
CancelJob(i32),
CancelledJob(i32, Box<fetch::ResponseDataResult<Job0>>),
Noop,
}
pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {
match msg {
Msg::Modal(msg) => {
if msg == modal::Msg::Close {
model.clear();
}
modal::update(msg, &mut model.modal, &mut orders.proxy(Msg::Modal));
}
Msg::FireCommands(cmds) => {
model.select = Select(HashSet::new());
model.modal.open = true;
match cmds {
Input::Commands(cmds) => {
// use the (little) optimization:
// if we already have the commands and they all finished, we don't need to poll them anymore
model.update_commands(cmds);
if !is_all_commands_finished(&model.commands) {
orders.send_msg(Msg::FetchTree);
}
}
Input::Ids(ids) => {
// we have ids only, so we need to populate the vector first
orders.perform_cmd(fetch_the_batch(ids, |x| Msg::FetchedCommands(Box::new(x))));
}
}
}
Msg::FetchTree => {
model.tree_cancel = None;
if !is_all_commands_finished(&model.commands) {
schedule_fetch_tree(model, orders);
}
}
Msg::FetchedCommands(commands_data_result) => {
match *commands_data_result {
Ok(api_list) => {
model.update_commands(api_list.objects.into_iter().map(Arc::new).collect());
}
Err(e) => {
error!(format!("Failed to fetch commands {:#?}", e));
orders.skip();
}
}
if !is_all_commands_finished(&model.commands) {
let (cancel, fut) = sleep_with_handle(POLL_INTERVAL, Msg::FetchTree, Msg::Noop);
model.tree_cancel = Some(cancel);
orders.perform_cmd(fut);
}
}
Msg::FetchedJobs(jobs_data_result) => match *jobs_data_result {
Ok(api_list) => {
model.update_jobs(api_list.objects.into_iter().map(Arc::new).collect());
}
Err(e) => {
error!(format!("Failed to fetch jobs {:#?}", e));
orders.skip();
}
},
Msg::FetchedSteps(steps_data_result) => match *steps_data_result {
Ok(api_list) => {
model.update_steps(api_list.objects.into_iter().map(Arc::new).collect());
}
Err(e) => {
error!(format!("Failed to fetch steps {:#?}", e));
orders.skip();
}
},
Msg::Click(the_id) => {
let do_fetch = model.select.perform_click(the_id);
if do_fetch {
schedule_fetch_tree(model, orders);
}
}
Msg::CancelJob(job_id) => {
if let Some(job) = model.jobs.get(&job_id) {
if let Some(ct) = find_cancel_transition(job) {
if model.cancelling_jobs.insert(job_id) {
let fut = apply_job_transition(job_id, TransitionState(ct.state.clone()));
orders.skip().perform_cmd(fut);
}
}
}
}
Msg::CancelledJob(job_id, job_result) => {
model.cancelling_jobs.remove(&job_id);
if let Err(e) = *job_result {
error!(format!("Failed to cancel job {}: {:#?}", job_id, e));
orders.skip();
}
}
Msg::Noop => {}
}
}
fn schedule_fetch_tree(model: &mut Model, orders: &mut impl Orders<Msg, GMsg>) {
let (cmd_ids, job_ids, _) = &model.select.split();
// grab all the dependencies for the chosen items, except those that already loaded and completed
let load_cmd_ids = extract_sorted_keys(&model.commands)
.into_iter()
.filter(|c| to_load_cmd(model, *c))
.collect::<Vec<i32>>();
let load_job_ids = cmd_ids
.iter()
.filter(|c| model.commands.contains_key(c))
.flat_map(|c| model.commands[c].deps())
.filter(|j| to_load_job(model, **j))
.copied()
.collect::<Vec<i32>>();
let load_step_ids = job_ids
.iter()
.filter(|j| model.jobs.contains_key(j))
.flat_map(|j| model.jobs[j].deps())
.filter(|s| to_load_step(model, **s))
.copied()
.collect::<Vec<i32>>();
orders.skip();
if !load_cmd_ids.is_empty() {
orders.perform_cmd(fetch_the_batch(load_cmd_ids, |x| Msg::FetchedCommands(Box::new(x))));
}
if !load_job_ids.is_empty() {
orders.perform_cmd(fetch_the_batch(load_job_ids, |x| Msg::FetchedJobs(Box::new(x))));
}
if !load_step_ids.is_empty() {
orders.perform_cmd(fetch_the_batch(load_step_ids, |x| Msg::FetchedSteps(Box::new(x))));
}
}
pub(crate) fn view(model: &Model) -> Node<Msg> {
if !model.modal.open {
empty![]
} else {
modal::bg_view(
true,
Msg::Modal,
modal::content_view(
Msg::Modal,
if model.commands_view.is_empty() {
vec![
modal::title_view(Msg::Modal, span!["Loading Command"]),
div![
class![C.my_12, C.text_center, C.text_gray_500],
font_awesome(class![C.w_12, C.h_12, C.inline, C.pulse], "spinner")
],
modal::footer_view(vec![close_button()]).merge_attrs(class![C.pt_8]),
]
} else {
vec![
modal::title_view(Msg::Modal, plain!["Commands"]),
div![
class![C.py_8],
model.commands_view.iter().map(|x| { command_item_view(model, x) })
],
modal::footer_view(vec![close_button()]).merge_attrs(class![C.pt_8]),
]
},
),
)
.with_listener(keyboard_ev(Ev::KeyDown, move |ev| match ev.key_code() {
key_codes::ESC => Msg::Modal(modal::Msg::Close),
_ => Msg::Noop,
}))
.merge_attrs(class![C.text_black])
}
}
fn command_item_view(model: &Model, x: &RichCommand) -> Node<Msg> {
let is_open = model.select.contains(TypedId::Command(x.id));
// all commands will be marked complete when they finished, it's the absence of other states that makes them successful
let border = if !is_open {
C.border_transparent
} else if x.errored {
C.border_red_500
} else if x.cancelled {
C.border_gray_500
} else if x.complete {
C.border_green_500
} else {
C.border_transparent
};
let open_icon = if is_open {
"chevron-circle-up"
} else {
"chevron-circle-down"
};
let job_tree = job_tree_view(model, CmdId(x.id));
div![
class![C.border_b, C.last__border_b_0],
div![
class![
border,
C.border_l_2,
C.px_2
C.py_5,
C.text_gray_700,
],
header![
class![
C.flex,
C.justify_between,
C.items_center,
C.cursor_pointer,
C.select_none,
C.py_5
],
simple_ev(Ev::Click, Msg::Click(TypedId::Command(x.id))),
span![class![C.font_thin, C.text_xl], cmd_status_icon(x), &x.message],
font_awesome(
class![C.w_4, C.h_4, C.inline, C.text_gray_700, C.text_blue_500],
open_icon
),
],
ul![
class![C.pl_8, C.hidden => !is_open],
li![class![C.pb_2], "Started at: ", x.created_at],
li![class![C.pb_2], "Status: ", status_text(x)],
li![job_tree],
]
]
]
}
pub fn job_tree_view(model: &Model, parent_cid: CmdId) -> Node<Msg> {
if !model.jobs_graphs.contains_key(&parent_cid) || model.jobs_graphs[&parent_cid].is_empty() {
div![
class![C.my_8, C.text_center, C.text_gray_500],
font_awesome(class![C.w_8, C.h_8, C.inline, C.pulse], "spinner"),
]
} else {
let mut ctx = Context {
steps_view: &model.steps_view,
select: &model.select,
cancelling_jobs: &model.cancelling_jobs,
};
let dag_nodes = traverse_graph(
&model.jobs_graphs[&parent_cid],
&job_item_view,
&job_item_combine,
&mut ctx,
);
div![
class![C.font_ordinary, C.text_gray_700],
h4![class![C.text_lg, C.font_medium], "Jobs"],
div![class![C.p_1, C.pb_2, C.mb_1, C.overflow_auto], div![dag_nodes]],
]
}
}
fn job_item_view(job: Arc<RichJob>, is_new: bool, ctx: &mut Context) -> Node<Msg> {
let icon = job_status_icon(job.as_ref());
if !is_new {
empty![]
} else if job.steps.is_empty() {
span![span![class![C.mr_1], icon], span![job.description]]
} else {
let cancelling = ctx.cancelling_jobs.contains(&job.id);
let cancel_btn = job_item_cancel_button(&job, cancelling);
let is_open = ctx.select.contains(TypedId::Job(job.id));
let def_vec = Vec::new();
let steps = ctx.steps_view.get(&JobId(job.id)).unwrap_or(&def_vec);
div![
a![
span![class![C.mr_1], icon],
span![class![C.cursor_pointer, C.underline], job.description],
simple_ev(Ev::Click, Msg::Click(TypedId::Job(job.id))),
],
cancel_btn,
step_list_view(steps, ctx.select, is_open),
]
}
}
fn job_item_combine(parent: Node<Msg>, acc: Vec<Node<Msg>>, _ctx: &mut Context) -> Node<Msg> {
if !parent.is_empty() {
// all the dependencies are shifted with the indent
let acc_plus = acc.into_iter().map(|a| a.merge_attrs(class![C.ml_3, C.mt_1]));
div![parent, acc_plus]
} else {
empty![]
}
}
fn job_item_cancel_button(job: &Arc<RichJob>, cancelling: bool) -> Node<Msg> {
if let Some(trans) = find_cancel_transition(job) {
let cancel_btn: Node<Msg> = div![
class![C.inline, C.px_1, C.rounded_lg, C.text_white, C.cursor_pointer],
trans.label,
];
if !cancelling {
cancel_btn
.merge_attrs(class![C.bg_blue_500, C.hover__bg_blue_400])
.with_listener(simple_ev(Ev::Click, Msg::CancelJob(job.id)))
} else {
// ongoing action will render gray button without the handler
cancel_btn.merge_attrs(class![C.bg_gray_500, C.hover__bg_gray_400])
}
} else {
empty![]
}
}
fn step_list_view(steps: &[Arc<RichStep>], select: &Select, is_open: bool) -> Node<Msg> {
if !is_open {
empty![]
} else if steps.is_empty() {
div![
class![C.my_8, C.text_center, C.text_gray_500],
font_awesome(class![C.w_8, C.h_8, C.inline, C.pulse], "spinner"),
]
} else {
div![ul![
class![C.p_1, C.pb_2, C.mb_1, C.overflow_auto],
steps.iter().map(|x| {
let is_open = select.contains(TypedId::Step(x.id));
li![step_item_view(x, is_open)]
})
]]
}
}
fn step_item_view(step: &RichStep, is_open: bool) -> Vec<Node<Msg>> {
let icon = step_status_icon(step);
let item_caption = div![
class![C.flex],
div![
attrs![At::Style => "flex: 0 0 1em"],
class![C.mx_2, C.cursor_pointer],
icon,
simple_ev(Ev::Click, Msg::Click(TypedId::Step(step.id))),
],
div![
class![C.flex_grow, C.cursor_pointer, C.underline],
step.class_name,
simple_ev(Ev::Click, Msg::Click(TypedId::Step(step.id))),
],
];
let item_body = if !is_open {
empty![]
} else {
// note, we cannot just use the Debug instance for step.args,
// because the keys traversal order changes every time the HashMap is created
let mut arg_keys = step.args.keys().collect::<Vec<&String>>();
arg_keys.sort();
let mut args: Vec<Node<Msg>> = Vec::with_capacity(step.args.len());
for k in arg_keys {
args.push(span![class![C.text_blue_300], &format!("{}: ", k)]);
args.push(plain![format!(
"{}\n",
step.args.get(k).unwrap_or(&serde_json::value::Value::Null)
)]);
}
let pre_class = class![
C.p_2, C.m_2
C.leading_tight,
C.text_gray_100,
C.bg_gray_900,
C.overflow_x_hidden,
C.whitespace_pre_line,
C.break_all,
];
let caption_class = class![C.text_lg, C.font_medium];
// show logs and backtrace if the step has failed
let backtrace_view = if step.state == "failed" && !step.backtrace.is_empty() {
vec![h4![&caption_class, "Backtrace"], pre![&pre_class, step.backtrace]]
} else {
vec![]
};
let log_view = if step.state == "failed" && !step.log.is_empty() {
vec![h4![&caption_class, "Logs"], pre![&pre_class, step.log]]
} else {
vec![]
};
let console_view = if !step.console.is_empty() {
vec![h4![&caption_class, "Console output"], pre![&pre_class, step.console]]
} else {
vec![]
};
div![
class![C.flex],
div![
attrs![At::Style => "flex: 0 0 1em"],
class![C.border_r_2, C.border_gray_300, C.hover__border_gray_600],
simple_ev(Ev::Click, Msg::Click(TypedId::Step(step.id))),
],
div![attrs![At::Style => "flex: 0 0 1em"]],
div![
class![C.float_right, C.flex_grow],
h4![&caption_class, "Arguments"],
pre![&pre_class, args],
backtrace_view,
log_view,
console_view,
]
]
};
vec![item_caption, item_body]
}
fn status_text(cmd: &RichCommand) -> &'static str {
if cmd.cancelled {
"Cancelled"
} else if cmd.errored {
"Errored"
} else if cmd.complete {
"Complete"
} else {
"Running"
}
}
fn cmd_status_icon<T>(cmd: &RichCommand) -> Node<T> {
let awesome_class = class![C.w_4, C.h_4, C.inline, C.mr_4];
if cmd.cancelled {
font_awesome(awesome_class, "ban").merge_attrs(class![C.text_gray_500])
} else if cmd.errored {
font_awesome(awesome_class, "bell").merge_attrs(class![C.text_red_500])
} else if cmd.complete {
font_awesome(awesome_class, "check").merge_attrs(class![C.text_green_500])
} else {
font_awesome(awesome_class, "spinner").merge_attrs(class![C.text_gray_500, C.pulse])
}
}
fn job_status_icon<T>(job: &RichJob) -> Node<T> {
let awesome_style = class![C.fill_current, C.w_4, C.h_4, C.inline];
if job.cancelled {
font_awesome(awesome_style, "ban").merge_attrs(class![C.text_red_500])
} else if job.errored {
font_awesome(awesome_style, "exclamation").merge_attrs(class![C.text_red_500])
} else if job.state == "complete" {
font_awesome(awesome_style, "check").merge_attrs(class![C.text_green_500])
} else {
font_awesome(awesome_style, "spinner").merge_attrs(class![C.text_gray_500, C.pulse])
}
}
fn step_status_icon<T>(step: &RichStep) -> Node<T> {
let awesome_style = class![C.fill_current, C.w_4, C.h_4, C.inline];
match &step.state[..] {
"cancelled" => font_awesome(awesome_style, "ban").merge_attrs(class![C.text_red_500]),
"failed" => font_awesome(awesome_style, "exclamation").merge_attrs(class![C.text_red_500]),
"success" => font_awesome(awesome_style, "check").merge_attrs(class![C.text_green_500]),
_ /* "incomplete" */ => font_awesome(awesome_style, "spinner").merge_attrs(class![C.text_gray_500, C.pulse]),
}
}
fn close_button() -> Node<Msg> {
button![
class![
C.bg_transparent,
C.py_2,
C.px_4,
C.rounded_full,
C.text_blue_500,
C.hover__bg_gray_100,
C.hover__text_blue_400,
],
simple_ev(Ev::Click, modal::Msg::Close),
"Close",
]
.map_msg(Msg::Modal)
}
async fn fetch_the_batch<T, F, U>(ids: Vec<i32>, data_to_msg: F) -> Result<U, U>
where
T: DeserializeOwned + EndpointName + 'static,
F: FnOnce(ResponseDataResult<ApiList<T>>) -> U,
U: 'static,
{
// e.g. GET /api/something/?id__in=1&id__in=2&id__in=11&limit=0
let mut ids: Vec<_> = ids.into_iter().map(|x| ("id__in", x)).collect();
ids.push(("limit", 0));
match Request::api_query(T::endpoint_name(), &ids) {
Ok(req) => req.fetch_json_data(data_to_msg).await,
Err(_) => {
// we always can url encode a vector of i32-s
unreachable!("Cannot encode request for {} with params {:?}", T::endpoint_name(), ids)
}
}
}
async fn apply_job_transition(job_id: i32, transition_state: TransitionState) -> Result<Msg, Msg> {
let json = serde_json::json!({
"id": job_id,
"state": transition_state.0,
});
let req = Request::api_item(Job0::endpoint_name(), job_id)
.with_auth()
.method(fetch::Method::Put)
.send_json(&json);
req.fetch_json_data(|x| Msg::CancelledJob(job_id, Box::new(x))).await
}
fn extract_uri_id<T: EndpointName>(input: &str) -> Option<i32> {
lazy_static::lazy_static! {
static ref RE: Regex = Regex::new(r"/api/(\w+)/(\d+)/").unwrap();
}
RE.captures(input).and_then(|cap: Captures| {
let s = cap.get(1).unwrap().as_str();
let t = cap.get(2).unwrap().as_str();
if s == T::endpoint_name() {
t.parse::<i32>().ok()
} else {
None
}
})
}
fn to_load_cmd(model: &Model, cmd_id: i32) -> bool {
// load the command if it is not found or is not complete
model.commands.get(&cmd_id).map(|c| !c.complete).unwrap_or(true)
}
fn to_load_job(model: &Model, job_id: i32) -> bool {
// job.state can be "pending", "tasked" or "complete"
// if a job is errored or cancelled, it is also complete
model.jobs.get(&job_id).map(|j| j.state != "complete").unwrap_or(true)
}
fn to_load_step(model: &Model, step_id: i32) -> bool {
// step.state can be "success", "failed" or "incomplete"
model
.steps
.get(&step_id)
.map(|s| s.state == "incomplete")
.unwrap_or(true)
}
fn is_all_commands_finished(cmds: &HashMap<i32, Arc<RichCommand>>) -> bool {
cmds.values().all(|c| c.complete)
}
fn find_cancel_transition(job: &Arc<RichJob>) -> Option<&AvailableTransition> {
job.available_transitions.iter().find(|at| at.state == "cancelled")
}
fn extract_children_from_cmd(cmd: &Arc<Command>) -> (i32, Vec<i32>) {
let mut deps = cmd
.jobs
.iter()
.filter_map(|s| extract_uri_id::<Job0>(s))
.collect::<Vec<i32>>();
deps.sort();
(cmd.id, deps)
}
fn extract_children_from_job(job: &Arc<Job0>) -> (i32, Vec<i32>) {
let mut deps = job
.steps
.iter()
.filter_map(|s| extract_uri_id::<Step>(s))
.collect::<Vec<i32>>();
deps.sort();
(job.id, deps)
}
fn extract_children_from_step(step: &Arc<Step>) -> (i32, Vec<i32>) {
(step.id, Vec::new()) // steps have no descendants
}
fn extract_wait_fors_from_job(job: &Job0, jobs: &HashMap<i32, Arc<RichJob>>) -> (i32, Vec<i32>) {
// Extract the interdependencies between jobs.
// See [command_modal::tests::test_jobs_ordering]
let mut deps = job
.wait_for
.iter()
.filter_map(|s| extract_uri_id::<Job0>(s))
.collect::<Vec<i32>>();
deps.sort_by(|i1, i2| {
let t1 = jobs
.get(i1)
.map(|arj| (-(arj.deps.len() as i32), &arj.description[..], arj.id))
.unwrap_or((0, "", *i1));
let t2 = jobs
.get(i2)
.map(|arj| (-(arj.deps.len() as i32), &arj.description[..], arj.id))
.unwrap_or((0, "", *i2));
t1.cmp(&t2)
});
(job.id, deps)
}
fn extract_sorted_keys<T>(hm: &HashMap<i32, T>) -> Vec<i32> {
let mut ids = hm.keys().copied().collect::<Vec<_>>();
ids.sort();
ids
}
fn convert_to_sorted_vec<T>(hm: &HashMap<i32, Arc<T>>) -> Vec<Arc<T>> {
extract_sorted_keys(hm)
.into_iter()
.map(|k| Arc::clone(&hm[&k]))
.collect()
}
fn convert_to_rich_hashmap<T>(ts: Vec<T>, extract: impl Fn(&T) -> (i32, Vec<i32>)) -> HashMap<i32, Arc<Rich<i32, T>>> {
ts.into_iter()
.map(|t| {
let (id, deps) = extract(&t);
(id, Arc::new(Rich { id, deps, inner: t }))
})
.collect()
}
/// NOTE: the slices must be sorted
pub fn is_subset<T: PartialEq>(part: &[T], all: &[T]) -> bool {
let mut idx = 0;
for it in part {
while (idx < all.len()) && (&all[idx] != it) {
idx += 1;
}
if idx == all.len() {
return false;
}
}
true
}
impl Model {
fn clear(&mut self) {
self.tree_cancel = None;
self.commands.clear();
self.commands_view.clear();
self.jobs.clear();
self.jobs_graphs.clear();
self.steps.clear();
self.steps_view.clear();
self.select = Default::default();
self.cancelling_jobs.clear();
}
fn update_commands(&mut self, cmds: Vec<Arc<Command>>) {
let commands = convert_to_rich_hashmap(cmds, extract_children_from_cmd);
self.commands.extend(commands);
let tuple = self.check_back_consistency();
self.refresh_view(tuple);
}
fn update_jobs(&mut self, jobs: Vec<Arc<Job0>>) {
let jobs = convert_to_rich_hashmap(jobs, extract_children_from_job);
self.jobs.extend(jobs);
let tuple = self.check_back_consistency();
self.refresh_view(tuple);
}
fn update_steps(&mut self, steps: Vec<Arc<Step>>) {
let steps = convert_to_rich_hashmap(steps, extract_children_from_step);
self.steps.extend(steps);
let tuple = self.check_back_consistency();
self.refresh_view(tuple);
}
/// We perform the consistency check of the current collections
/// `self.commands`, `self.jobs` and `self.steps`.
/// The selection, if it is non-empty, places additional constraints.
fn check_back_consistency(&self) -> (bool, bool, bool) {
let mut ok = [false; 3];
// check between layers
let cmd_ids = extract_sorted_keys(&self.commands);
let job_ids = extract_sorted_keys(&self.jobs);
let step_ids = extract_sorted_keys(&self.steps);
ok[0] = !self.commands.is_empty();
ok[1] = job_ids
.iter()
.all(|j| self.commands.values().any(|cmd| cmd.deps().contains(j)));
ok[2] = step_ids
.iter()
.all(|s| self.jobs.values().any(|job| job.deps().contains(s)));
// the additional constraints from the selection
let (cs, js, ss) = self.select.split();
ok[0] = ok[0] && is_subset(&cs, &cmd_ids);
ok[1] = ok[1] && is_subset(&js, &job_ids);
ok[2] = ok[2] && is_subset(&ss, &step_ids);
// make ensure the consistency levels are ordered
if ok[0] && ok[1] && ok[2] {
(true, true, true)
} else if ok[0] && ok[1] {
(true, true, false)
} else if ok[0] {
(true, false, false)
} else {
(false, false, false)
}
}
fn refresh_view(&mut self, layers: (bool, bool, bool)) {
let (cmds_ok, jobs_ok, steps_ok) = layers;
if cmds_ok {
self.commands_view = convert_to_sorted_vec(&self.commands);
}
if jobs_ok {
let mut jobs_graphs = HashMap::new();
for (c, cmd) in &self.commands {
if cmd.deps().iter().all(|j| self.jobs.contains_key(j)) {
let extract_fun = |job: &Arc<Job0>| extract_wait_fors_from_job(job, &self.jobs);
let jobs_graph_data = cmd
.deps()
.iter()
.map(|k| RichJob::new(Arc::clone(&self.jobs[k].inner), extract_fun))
.collect::<Vec<RichJob>>();
let graph = build_direct_dag(&jobs_graph_data);
jobs_graphs.insert(CmdId(*c), graph);
}
}
self.jobs_graphs = jobs_graphs;
}
if steps_ok {
let mut steps_view = HashMap::new();
for (j, job) in &self.jobs {
let steps = &self.steps;
if job.deps().iter().all(|s| steps.contains_key(s)) {
let steps = job
.deps()
.iter()
.map(|s| Arc::clone(&steps[s]))
.collect::<Vec<Arc<RichStep>>>();
steps_view.insert(JobId(*j), steps);
}
}
self.steps_view = steps_view;
}
}
}
#[cfg(test)]
mod tests {<|fim▁hole|> use rand_xoshiro::Xoroshiro64Star;
use std::fmt::Debug;
use std::hash::Hash;
use std::iter;
#[derive(Default, Clone, Debug)]
struct Db {
all_cmds: Vec<Arc<Command>>,
all_jobs: Vec<Arc<Job0>>,
all_steps: Vec<Arc<Step>>,
}
impl Db {
fn select_cmds(&self, is: &[i32]) -> Vec<Arc<Command>> {
self.all_cmds
.iter()
.filter(|x| is.contains(&x.id))
.map(|x| Arc::clone(x))
.collect()
}
fn select_jobs(&self, is: &[i32]) -> Vec<Arc<Job0>> {
self.all_jobs
.iter()
.filter(|x| is.contains(&x.id))
.map(|x| Arc::clone(x))
.collect()
}
fn select_steps(&self, is: &[i32]) -> Vec<Arc<Step>> {
self.all_steps
.iter()
.filter(|x| is.contains(&x.id))
.map(|x| Arc::clone(x))
.collect()
}
}
#[derive(Debug, Clone)]
struct Context0 {
level: usize,
}
#[test]
fn test_parse_job() {
assert_eq!(extract_uri_id::<Job0>("/api/job/39/"), Some(39));
assert_eq!(extract_uri_id::<Step>("/api/step/123/"), Some(123));
assert_eq!(extract_uri_id::<Command>("/api/command/12/"), Some(12));
assert_eq!(extract_uri_id::<Command>("/api/xxx/1/"), None);
}
#[test]
fn test_is_subset() {
let all = vec![1, 2, 3, 4, 5];
assert_eq!(is_subset(&vec![1, 2, 3], &all), true);
assert_eq!(is_subset(&vec![1, 3, 5], &all), true);
assert_eq!(is_subset(&vec![], &all), true);
assert_eq!(is_subset(&all, &all), true);
assert_eq!(is_subset(&vec![1, 6], &all), false);
// if not sorted, the correctness is not guaranteed
assert_eq!(is_subset(&vec![5, 1], &all), false);
}
#[test]
fn test_selection_split() {
let select = Select(
vec![
TypedId::Command(1),
TypedId::Command(1),
TypedId::Command(2),
TypedId::Job(13),
TypedId::Job(12),
TypedId::Job(11),
TypedId::Step(23),
TypedId::Step(22),
TypedId::Step(21),
]
.into_iter()
.collect(),
);
let (cs, js, ss) = select.split();
assert_eq!(cs, vec![1, 2]);
assert_eq!(js, vec![11, 12, 13]);
assert_eq!(ss, vec![21, 22, 23]);
}
#[test]
fn test_jobs_ordering() {
// The jobs' dependencies (vector of i32) are sorted in special order so that the
// jobs with more dependencies come first. If the number of dependencies are equal,
// they are sorted by alphabetical order by the description.
// If the description is the same, then the jobs are ordered by id.
// The sort order of the jobs themself does NOT matter.
let mut rng = Xoroshiro64Star::seed_from_u64(555);
let db = build_db_2();
let commands = db.select_cmds(&[109]);
let mut jobs = db.select_jobs(&extract_ids::<Job0>(&commands[0].jobs));
for i in (1..jobs.len()).rev() {
let j = (rng.next_u64() as usize) % (i + 1);
jobs.swap(i, j);
}
let mut model = Model::default();
model.jobs = convert_to_rich_hashmap(jobs, extract_children_from_job);
model.commands = convert_to_rich_hashmap(commands, extract_children_from_cmd);
// here the [command_modal::extract_wait_fors_from_job] function plays
model.refresh_view((false, true, false));
let dag = &model.jobs_graphs[&CmdId(109)];
let mut ctx = Context0 { level: 0 };
let result = traverse_graph(dag, &rich_job_to_string, &rich_job_combine_strings, &mut ctx).join("");
assert_eq!(result, WELL_ORDERED_TREE);
}
/// There could the the problem, when `schedule_fetch_tree` made the api requests
/// in one order, but during delays or something the responses may come in different
/// order. In all such cases, however, model should remain consistent,
/// and the test checks this.
#[test]
fn test_async_handlers_consistency() {
let mut rng = Xoroshiro64Star::seed_from_u64(555);
let db = build_db_1();
let mut model = Model::default();
let cmd_ids = db.all_cmds.iter().map(|x| x.id).collect::<Vec<_>>();
let selects = generate_random_selects(&db, &mut rng, 200);
for select in selects {
let sel_cmd_ids = if select.0.is_empty() {
cmd_ids.clone()
} else {
let (cs, _, _) = select.split();
cs
};
let (c, j, s) = prepare_subset(&db, &sel_cmd_ids);
model.clear();
model.select = select.clone();
model.update_commands(c.clone());
model.update_jobs(j.clone());
model.update_steps(s.clone());
let expected_cmd = to_str_vec(std::mem::replace(&mut model.commands_view, Vec::new()));
let expected_jobs = to_str_hm(std::mem::replace(&mut model.jobs_graphs, HashMap::new()));
let expected_steps = to_str_hm(std::mem::replace(&mut model.steps_view, HashMap::new()));
let permutations = vec![[1, 3, 2], [2, 1, 3], [2, 3, 1], [3, 1, 2], [3, 2, 1]];
for permutation in permutations {
model.clear();
model.select = select.clone();
// we simulate, that FetchCommands, FetchJobs and FetchSteps come in arbitrary order
for p in &permutation {
match p {
1 => model.update_commands(c.clone()),
2 => model.update_jobs(j.clone()),
3 => model.update_steps(s.clone()),
_ => unreachable!(),
}
}
let actual_cmd = to_str_vec(std::mem::replace(&mut model.commands_view, Vec::new()));
let actual_jobs = to_str_hm(std::mem::replace(&mut model.jobs_graphs, HashMap::new()));
let actual_steps = to_str_hm(std::mem::replace(&mut model.steps_view, HashMap::new()));
assert_eq!(actual_cmd, expected_cmd);
assert_eq!(actual_jobs, expected_jobs);
assert_eq!(actual_steps, expected_steps);
}
}
}
fn to_str_vec<V: Debug>(vec: Vec<V>) -> Vec<String> {
vec.into_iter().map(|x| format!("{:?}", x)).collect()
}
fn to_str_hm<K: Hash + Eq, V: Debug>(hm: HashMap<K, V>) -> HashMap<K, String> {
hm.into_iter().map(|(k, v)| (k, format!("{:?}", v))).collect()
}
fn make_command(id: i32, jobs: &[i32], msg: &str) -> Arc<Command> {
Arc::new(Command {
cancelled: false,
complete: false,
created_at: "2020-03-16T07:22:34.491600".to_string(),
errored: false,
id,
jobs: jobs.iter().map(|x| format!("/api/job/{}/", x)).collect(),
logs: "".to_string(),
message: msg.to_string(),
resource_uri: format!("/api/command/{}/", id),
})
}
fn make_job(id: i32, cmd_id: CmdId, steps: &[i32], wait_for: &[i32], descr: &str) -> Arc<Job0> {
Arc::new(Job0 {
available_transitions: vec![],
cancelled: false,
class_name: "".to_string(),
commands: iter::once(cmd_id)
.map(|CmdId(id)| format!("/api/command/{}/", id))
.collect(),
created_at: "2020-03-16T07:22:34.491600".to_string(),
description: descr.to_string(),
errored: false,
id,
modified_at: "".to_string(),
read_locks: vec![],
resource_uri: format!("/api/job/{}/", id),
state: "complete".to_string(),
step_results: Default::default(),
steps: steps.iter().map(|x| format!("/api/step/{}/", x)).collect(),
wait_for: wait_for.iter().map(|x| format!("/api/job/{}/", x)).collect(),
write_locks: vec![],
})
}
fn make_step(id: i32, class_name: &str) -> Arc<Step> {
Arc::new(Step {
args: Default::default(),
backtrace: "".to_string(),
class_name: class_name.to_string(),
console: "".to_string(),
created_at: "2020-03-16T07:22:34.491600".to_string(),
description: "".to_string(),
id,
log: "".to_string(),
modified_at: "2020-03-16T07:22:34.491600".to_string(),
resource_uri: format!("/api/step/{}/", id),
result: None,
state: "incomplete".to_string(),
step_count: 0,
step_index: 0,
})
}
fn build_db_1() -> Db {
let all_cmds = vec![
make_command(1, &[10, 11], "One"),
make_command(2, &[12, 13], "Two"),
make_command(3, &[14, 15], "Three"),
make_command(4, &[16, 17], "Four"),
];
let all_jobs = vec![
make_job(10, CmdId(1), &[20, 21], &[11], "Ten"),
make_job(11, CmdId(1), &[21, 26], &[], "Eleven"),
make_job(12, CmdId(2), &[22, 23], &[13], "Twelve"),
make_job(13, CmdId(2), &[23, 28], &[], "Thirteen"),
make_job(14, CmdId(3), &[24, 15], &[], "Ten"),
make_job(15, CmdId(3), &[25, 20], &[], "Eleven"),
make_job(16, CmdId(4), &[26, 27], &[], "Twelve"),
make_job(17, CmdId(4), &[27, 22], &[], "Thirteen"),
];
let all_steps = vec![
make_step(20, "Twenty and zero"),
make_step(21, "Twenty and one"),
make_step(22, "Twenty and two"),
make_step(23, "Twenty and three"),
make_step(24, "Twenty and four"),
make_step(25, "Twenty and five"),
make_step(26, "Twenty and six"),
make_step(27, "Twenty and seven"),
make_step(28, "Twenty and eight"),
make_step(29, "Twenty and nine"),
];
Db {
all_cmds,
all_jobs,
all_steps,
}
}
fn build_db_2() -> Db {
let all_cmds = vec![make_command(
109,
&[240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253],
"Stop file system fs",
)];
let all_jobs = vec![
make_job(240, CmdId(109), &[], &[], "Stop target fs-OST0008"),
make_job(241, CmdId(109), &[], &[], "Stop target fs-OST0007"),
make_job(242, CmdId(109), &[], &[], "Stop target fs-OST0003"),
make_job(243, CmdId(109), &[], &[], "Stop target fs-OST0000"),
make_job(
244,
CmdId(109),
&[],
&[240, 241, 242, 243],
"Make file system fs unavailable",
),
make_job(245, CmdId(109), &[], &[244], "Stop target fs-OST0005"),
make_job(246, CmdId(109), &[], &[244], "Stop target fs-MDT0000"),
make_job(247, CmdId(109), &[], &[244], "Stop target fs-OST0004"),
make_job(248, CmdId(109), &[], &[244], "Stop target fs-OST0002"),
make_job(249, CmdId(109), &[], &[244], "Stop target fs-OST0001"),
make_job(250, CmdId(109), &[], &[244], "Stop target fs-OST0006"),
make_job(251, CmdId(109), &[], &[244], "Stop target fs-MDT0001"),
make_job(252, CmdId(109), &[], &[244], "Stop target fs-OST0009"),
make_job(
253,
CmdId(109),
&[],
&[240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252],
"Stop file system fs",
),
];
let all_steps = vec![];
Db {
all_cmds,
all_jobs,
all_steps,
}
}
fn prepare_subset(db: &Db, cmd_ids: &[i32]) -> (Vec<Arc<Command>>, Vec<Arc<Job0>>, Vec<Arc<Step>>) {
let cmds = db.select_cmds(&cmd_ids);
let c_ids = cmds
.iter()
.map(|x| extract_ids::<Job0>(&x.jobs))
.flatten()
.collect::<Vec<i32>>();
let jobs = db.select_jobs(&c_ids);
let j_ids = jobs
.iter()
.map(|x| extract_ids::<Step>(&x.steps))
.flatten()
.collect::<Vec<i32>>();
let steps = db.select_steps(&j_ids);
let cmds = db.all_cmds.clone(); // use all roots
(cmds, jobs, steps)
}
fn generate_random_selects<R: RngCore>(db: &Db, rng: &mut R, n: i32) -> Vec<Select> {
let cmd_ids = db.all_cmds.iter().map(|x| x.id).collect::<Vec<_>>();
let job_ids = db.all_jobs.iter().map(|x| x.id).collect::<Vec<_>>();
let step_ids = db.all_steps.iter().map(|x| x.id).collect::<Vec<_>>();
fn sample<R: RngCore>(rng: &mut R, ids: &[i32], m: usize) -> Vec<i32> {
let mut hs = HashSet::with_capacity(m);
let n = ids.len();
if n < m {
panic!("Must be m <= ids.len()")
}
for _ in 0..m {
loop {
let id = ids[rng.next_u32() as usize % n];
if hs.insert(id) {
break;
}
}
}
let mut sam = hs.into_iter().collect::<Vec<_>>();
sam.sort();
sam
}
(0..n)
.into_iter()
.map(|_| {
let nc = (rng.next_u32() % 2 + 1) as usize;
let nj = (rng.next_u32() % 4 + 1) as usize;
let ns = (rng.next_u32() % 4 + 1) as usize;
let sel_cmd_ids = sample(rng, &cmd_ids, nc);
let sel_job_ids = sample(rng, &job_ids, nj);
let sel_step_ids = sample(rng, &step_ids, ns);
let result = sel_cmd_ids
.into_iter()
.map(|id| TypedId::Command(id))
.chain(sel_job_ids.into_iter().map(|id| TypedId::Job(id)))
.chain(sel_step_ids.into_iter().map(|id| TypedId::Step(id)))
.collect::<HashSet<_>>();
Select(result)
})
.collect()
}
fn extract_ids<T: EndpointName>(uris: &[String]) -> Vec<i32> {
// uris is the slice of strings like ["/api/step/123/", .. , "/api/step/234/"]
uris.iter().filter_map(|s| extract_uri_id::<T>(s)).collect()
}
fn rich_job_to_string(node: Arc<RichJob>, is_new: bool, ctx: &mut Context0) -> String {
ctx.level += 1;
if is_new {
format!("{}: {}\n", node.id, node.description)
} else {
String::new()
}
}
fn rich_job_combine_strings(node: String, nodes: Vec<String>, ctx: &mut Context0) -> String {
if ctx.level > 0 {
ctx.level -= 1;
}
let space = if ctx.level > 0 { " " } else { "" };
let mut result = String::with_capacity(100);
for line in node.lines() {
result.push_str(space);
result.push_str(line);
result.push('\n');
}
for n in nodes.iter() {
for line in n.lines() {
result.push_str(space);
result.push_str(line);
result.push('\n');
}
}
result
}
const WELL_ORDERED_TREE: &'static str = r#"253: Stop file system fs
244: Make file system fs unavailable
243: Stop target fs-OST0000
242: Stop target fs-OST0003
241: Stop target fs-OST0007
240: Stop target fs-OST0008
246: Stop target fs-MDT0000
251: Stop target fs-MDT0001
249: Stop target fs-OST0001
248: Stop target fs-OST0002
247: Stop target fs-OST0004
245: Stop target fs-OST0005
250: Stop target fs-OST0006
252: Stop target fs-OST0009
"#;
// test_view is here https://gist.github.com/nlinker/9cbd9092986180531a841f9e610ef53a
}<|fim▁end|>
|
use super::*;
use rand_core::{RngCore, SeedableRng};
|
<|file_name|>fake_data_transfer_manager.cc<|end_file_name|><|fim▁begin|>// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/webshare/win/fake_data_transfer_manager.h"
#include <wrl/module.h>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/win/core_winrt_util.h"
#include "base/win/scoped_hstring.h"
#include "base/win/vector.h"
#include "base/win/windows_version.h"
#include "testing/gtest/include/gtest/gtest.h"
using ABI::Windows::ApplicationModel::DataTransfer::DataPackage;
using ABI::Windows::ApplicationModel::DataTransfer::DataPackageOperation;
using ABI::Windows::ApplicationModel::DataTransfer::DataRequestedEventArgs;
using ABI::Windows::ApplicationModel::DataTransfer::DataTransferManager;
using ABI::Windows::ApplicationModel::DataTransfer::IDataPackage;
using ABI::Windows::ApplicationModel::DataTransfer::IDataPackage2;
using ABI::Windows::ApplicationModel::DataTransfer::IDataPackagePropertySet;
using ABI::Windows::ApplicationModel::DataTransfer::IDataPackagePropertySet3;
using ABI::Windows::ApplicationModel::DataTransfer::IDataPackageView;
using ABI::Windows::ApplicationModel::DataTransfer::IDataProviderHandler;
using ABI::Windows::ApplicationModel::DataTransfer::IDataRequest;
using ABI::Windows::ApplicationModel::DataTransfer::IDataRequestDeferral;
using ABI::Windows::ApplicationModel::DataTransfer::IDataRequestedEventArgs;
using ABI::Windows::ApplicationModel::DataTransfer::IDataTransferManager;
using ABI::Windows::ApplicationModel::DataTransfer::OperationCompletedEventArgs;
using ABI::Windows::ApplicationModel::DataTransfer::
TargetApplicationChosenEventArgs;
using ABI::Windows::Foundation::DateTime;
using ABI::Windows::Foundation::ITypedEventHandler;
using ABI::Windows::Foundation::IUriRuntimeClass;
using ABI::Windows::Foundation::Collections::IIterable;
using ABI::Windows::Foundation::Collections::IIterator;
using ABI::Windows::Foundation::Collections::IMap;
using ABI::Windows::Foundation::Collections::IVector;
using ABI::Windows::Storage::IStorageFile;
using ABI::Windows::Storage::IStorageItem;
using ABI::Windows::Storage::Streams::IRandomAccessStreamReference;
using ABI::Windows::Storage::Streams::RandomAccessStreamReference;
using Microsoft::WRL::ActivationFactory;
using Microsoft::WRL::ComPtr;
using Microsoft::WRL::Make;
using Microsoft::WRL::RuntimeClass;
using Microsoft::WRL::RuntimeClassFlags;
using Microsoft::WRL::WinRtClassicComMix;
namespace ABI {
namespace Windows {
namespace Foundation {
namespace Collections {
// Define template specializations for the types used.
template <>
struct __declspec(uuid("AF82EEF9-F786-475D-A3EB-929AEB6F0689"))
IObservableVector<HSTRING> : IObservableVector_impl<HSTRING> {};
template <>
struct __declspec(uuid("1ED11184-03B9-4911-875C-9682969C732A"))
VectorChangedEventHandler<HSTRING>
: VectorChangedEventHandler_impl<HSTRING> {};
} // namespace Collections
} // namespace Foundation
} // namespace Windows
} // namespace ABI
namespace webshare {
namespace {
class FakeDataPackagePropertySet final
: public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>,
IDataPackagePropertySet,
IDataPackagePropertySet3> {
public:
FakeDataPackagePropertySet(
FakeDataTransferManager::DataRequestedContent& data_requested_content)
: data_requested_content_(data_requested_content) {}
FakeDataPackagePropertySet(const FakeDataPackagePropertySet&) = delete;
FakeDataPackagePropertySet& operator=(const FakeDataPackagePropertySet&) =
delete;
~FakeDataPackagePropertySet() final {
// Though it is technically legal for consuming code to hold on to the
// FileTypes past the lifetime of the DataPackagePropertySet, there is
// no good reason to do so, so any lingering references presumably point
// to a coding error.
if (file_types_)
EXPECT_EQ(0u, file_types_.Reset());
}
// IDataPackagePropertySet
IFACEMETHODIMP get_ApplicationListingUri(IUriRuntimeClass** value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP get_ApplicationName(HSTRING* value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP get_Description(HSTRING* value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP get_FileTypes(IVector<HSTRING>** value) final {
if (!file_types_)
file_types_ = Make<base::win::Vector<HSTRING>>();
auto hr = file_types_->QueryInterface(IID_PPV_ARGS(value));
EXPECT_HRESULT_SUCCEEDED(hr);
return hr;
}
IFACEMETHODIMP get_Thumbnail(IRandomAccessStreamReference** value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP get_Title(HSTRING* value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP put_ApplicationListingUri(IUriRuntimeClass* value) final {
return S_OK;
}
IFACEMETHODIMP put_ApplicationName(HSTRING value) final { return S_OK; }
IFACEMETHODIMP put_Description(HSTRING value) final { return S_OK; }
IFACEMETHODIMP put_Thumbnail(IRandomAccessStreamReference* value) final {
return S_OK;
}
IFACEMETHODIMP put_Title(HSTRING value) final {
base::win::ScopedHString wrapped_value(value);
data_requested_content_.title = wrapped_value.GetAsUTF8();
return S_OK;
}
// IDataPackagePropertySet3
IFACEMETHODIMP get_EnterpriseId(HSTRING* value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP put_EnterpriseId(HSTRING value) final { return S_OK; }
private:
FakeDataTransferManager::DataRequestedContent& data_requested_content_;
ComPtr<base::win::Vector<HSTRING>> file_types_;
};
class FakeDataPackage final
: public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>,
IDataPackage,
IDataPackage2> {
public:
FakeDataPackage(
FakeDataTransferManager::DataRequestedContent& data_requested_content)
: data_requested_content_(data_requested_content) {}
FakeDataPackage(const FakeDataPackage&) = delete;
FakeDataPackage& operator=(const FakeDataPackage&) = delete;
~FakeDataPackage() final {
// Though it is technically legal for consuming code to hold on to the
// DataPackagePropertySet past the lifetime of the DataPackage, there is
// no good reason to do so, so any lingering references presumably point
// to a coding error.
if (properties_)
EXPECT_EQ(0u, properties_.Reset());
}
// IDataPackage
IFACEMETHODIMP add_Destroyed(
ITypedEventHandler<DataPackage*, IInspectable*>* handler,
EventRegistrationToken* token) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP add_OperationCompleted(
ITypedEventHandler<DataPackage*, OperationCompletedEventArgs*>* handler,
EventRegistrationToken* token) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP GetView(IDataPackageView** result) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP get_Properties(IDataPackagePropertySet** value) final {
if (!properties_)
properties_ = Make<FakeDataPackagePropertySet>(data_requested_content_);
auto hr = properties_->QueryInterface(IID_PPV_ARGS(value));
EXPECT_HRESULT_SUCCEEDED(hr);
return hr;
}
IFACEMETHODIMP get_RequestedOperation(DataPackageOperation* value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP get_ResourceMap(
IMap<HSTRING, RandomAccessStreamReference*>** value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP put_RequestedOperation(DataPackageOperation value) final {
return S_OK;
}
IFACEMETHODIMP remove_Destroyed(EventRegistrationToken token) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP remove_OperationCompleted(EventRegistrationToken token) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP SetBitmap(IRandomAccessStreamReference* value) final {
return S_OK;
}
IFACEMETHODIMP SetData(HSTRING formatId, IInspectable* value) final {
return S_OK;
}
IFACEMETHODIMP SetDataProvider(HSTRING formatId,
IDataProviderHandler* delayRenderer) final {
return S_OK;
}
IFACEMETHODIMP SetHtmlFormat(HSTRING value) final { return S_OK; }
IFACEMETHODIMP SetRtf(HSTRING value) final { return S_OK; }
IFACEMETHODIMP SetText(HSTRING value) final {
base::win::ScopedHString wrapped_value(value);
data_requested_content_.text = wrapped_value.GetAsUTF8();
return S_OK;
}
IFACEMETHODIMP SetStorageItems(IIterable<IStorageItem*>* value,
boolean readOnly) final {
EXPECT_TRUE(readOnly);
return SetStorageItemsReadOnly(value);
}
IFACEMETHODIMP SetStorageItemsReadOnly(
IIterable<IStorageItem*>* value) final {
ComPtr<IIterator<IStorageItem*>> iterator;
HRESULT hr = value->First(&iterator);
if (FAILED(hr))
return hr;
boolean has_current;
hr = iterator->get_HasCurrent(&has_current);
if (FAILED(hr))
return hr;
while (has_current == TRUE) {
ComPtr<IStorageItem> storage_item;
hr = iterator->get_Current(&storage_item);
if (FAILED(hr))
return hr;
HSTRING name;
hr = storage_item->get_Name(&name);
base::win::ScopedHString wrapped_name(name);
if (FAILED(hr))
return hr;
ComPtr<IStorageFile> storage_file;
hr = storage_item.As(&storage_file);
if (FAILED(hr))
return hr;
FakeDataTransferManager::DataRequestedFile file;
file.name = wrapped_name.GetAsUTF8();
file.file = storage_file;
data_requested_content_.files.push_back(std::move(file));
hr = iterator->MoveNext(&has_current);
if (FAILED(hr))
return hr;
}
return S_OK;
}
IFACEMETHODIMP SetUri(IUriRuntimeClass* value) final { return S_OK; }
// IDataPackage2
IFACEMETHODIMP SetApplicationLink(IUriRuntimeClass* value) final {
return S_OK;
}
IFACEMETHODIMP SetWebLink(IUriRuntimeClass* value) final {
HSTRING raw_uri;
value->get_RawUri(&raw_uri);
base::win::ScopedHString wrapped_value(raw_uri);
data_requested_content_.uri = wrapped_value.GetAsUTF8();
return S_OK;
}
private:
FakeDataTransferManager::DataRequestedContent& data_requested_content_;
ComPtr<IDataPackagePropertySet> properties_;
};
class FakeDataRequest final
: public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>, IDataRequest> {
public:
struct FakeDataRequestDeferral final
: public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>,
IDataRequestDeferral> {
public:
explicit FakeDataRequestDeferral(FakeDataRequest* data_request)
: data_request_(data_request) {}
FakeDataRequestDeferral(const FakeDataRequestDeferral&) = delete;
FakeDataRequestDeferral& operator=(const FakeDataRequestDeferral&) = delete;
// IDataRequestDeferral
IFACEMETHODIMP Complete() final {
data_request_->RunPostDataRequestedCallbackImpl();
return S_OK;
}
private:
ComPtr<FakeDataRequest> data_request_;
};
FakeDataRequest(FakeDataTransferManager::PostDataRequestedCallback
post_data_requested_callback)
: post_data_requested_callback_(post_data_requested_callback) {}
FakeDataRequest(const FakeDataRequest&) = delete;
FakeDataRequest& operator=(const FakeDataRequest&) = delete;
~FakeDataRequest() final = default;
// IDataRequest
IFACEMETHODIMP FailWithDisplayText(HSTRING value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP get_Data(IDataPackage** value) final {
if (!data_package_)
data_package_ = Make<FakeDataPackage>(data_requested_content_);
auto hr = data_package_->QueryInterface(IID_PPV_ARGS(value));
EXPECT_HRESULT_SUCCEEDED(hr);
return hr;
}
IFACEMETHODIMP
get_Deadline(DateTime* value) final {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP GetDeferral(IDataRequestDeferral** value) final {
if (!data_request_deferral_)
data_request_deferral_ = Make<FakeDataRequestDeferral>(this);
auto hr = data_request_deferral_->QueryInterface(IID_PPV_ARGS(value));
EXPECT_HRESULT_SUCCEEDED(hr);
return hr;
}
IFACEMETHODIMP put_Data(IDataPackage* value) final {
data_package_ = value;
return S_OK;
}
void RunPostDataRequestedCallback() {
// If there is not a deferral trigger the callback right away, otherwise it
// will be triggered when the deferral is complete
if (!data_request_deferral_)
RunPostDataRequestedCallbackImpl();
}
private:
void RunPostDataRequestedCallbackImpl() {
post_data_requested_callback_.Run(data_requested_content_);
}
ComPtr<IDataPackage> data_package_;
ComPtr<FakeDataRequestDeferral> data_request_deferral_;
FakeDataTransferManager::DataRequestedContent data_requested_content_;
FakeDataTransferManager::PostDataRequestedCallback
post_data_requested_callback_;
};
class FakeDataRequestedEventArgs final
: public RuntimeClass<RuntimeClassFlags<WinRtClassicComMix>,
IDataRequestedEventArgs> {
public:
FakeDataRequestedEventArgs(FakeDataTransferManager::PostDataRequestedCallback
post_data_requested_callback)
: post_data_requested_callback_(post_data_requested_callback) {}
FakeDataRequestedEventArgs(const FakeDataRequestedEventArgs&) = delete;
FakeDataRequestedEventArgs& operator=(const FakeDataRequestedEventArgs&) =
delete;
~FakeDataRequestedEventArgs() final = default;
// IDataRequestedEventArgs
IFACEMETHODIMP get_Request(IDataRequest** value) final {
if (!data_request_)
data_request_ = Make<FakeDataRequest>(post_data_requested_callback_);
auto hr = data_request_->QueryInterface(IID_PPV_ARGS(value));
EXPECT_HRESULT_SUCCEEDED(hr);
return hr;
}
void RunPostDataRequestedCallback() {
if (data_request_)
data_request_->RunPostDataRequestedCallback();
}
private:
ComPtr<FakeDataRequest> data_request_;
FakeDataTransferManager::PostDataRequestedCallback
post_data_requested_callback_;
};
} // namespace
// static
bool FakeDataTransferManager::IsSupportedEnvironment() {
return base::win::GetVersion() >= base::win::Version::WIN10;
}
FakeDataTransferManager::FakeDataTransferManager() {
post_data_requested_callback_ = base::DoNothing();
}
FakeDataTransferManager::~FakeDataTransferManager() = default;
FakeDataTransferManager::DataRequestedFile::DataRequestedFile() = default;
FakeDataTransferManager::DataRequestedFile::DataRequestedFile(
FakeDataTransferManager::DataRequestedFile&&) = default;
FakeDataTransferManager::DataRequestedFile::~DataRequestedFile() = default;
FakeDataTransferManager::DataRequestedContent::DataRequestedContent() = default;
FakeDataTransferManager::DataRequestedContent::~DataRequestedContent() =
default;
IFACEMETHODIMP
FakeDataTransferManager::add_DataRequested(
ITypedEventHandler<DataTransferManager*, DataRequestedEventArgs*>*
event_handler,
EventRegistrationToken* event_cookie) {
DataRequestedHandlerEntry entry;
entry.event_handler = event_handler;
entry.token_value = ++latest_token_value_;
data_requested_event_handlers_.push_back(std::move(entry));
event_cookie->value = latest_token_value_;
return S_OK;
}
IFACEMETHODIMP
FakeDataTransferManager::remove_DataRequested(
EventRegistrationToken event_cookie) {
auto it = data_requested_event_handlers_.begin();
while (it != data_requested_event_handlers_.end()) {
if (it->token_value == event_cookie.value) {
data_requested_event_handlers_.erase(it);
return S_OK;
}
it++;
}
ADD_FAILURE() << "remove_DataRequested called for untracked token";
return E_FAIL;
}
IFACEMETHODIMP FakeDataTransferManager::add_TargetApplicationChosen(
ITypedEventHandler<DataTransferManager*, TargetApplicationChosenEventArgs*>*
eventHandler,
EventRegistrationToken* event_cookie) {
NOTREACHED();
return E_NOTIMPL;
}
IFACEMETHODIMP
FakeDataTransferManager::remove_TargetApplicationChosen(
EventRegistrationToken event_cookie) {
NOTREACHED();
return E_NOTIMPL;
}
base::OnceClosure FakeDataTransferManager::GetDataRequestedInvoker() {
if (data_requested_event_handlers_.empty()) {
ADD_FAILURE()<|fim▁hole|>
// Though multiple handlers may be registered for this event, only the
// latest is invoked by the OS and then the event is considered handled.
auto handler = data_requested_event_handlers_.back().event_handler;
ComPtr<FakeDataTransferManager> self = this;
return base::BindOnce(
[](ComPtr<FakeDataTransferManager> self,
ComPtr<ITypedEventHandler<DataTransferManager*,
DataRequestedEventArgs*>> handler) {
auto event_args = Make<FakeDataRequestedEventArgs>(
self->post_data_requested_callback_);
handler->Invoke(self.Get(), event_args.Get());
event_args->RunPostDataRequestedCallback();
},
self, handler);
}
bool FakeDataTransferManager::HasDataRequestedListener() {
return !data_requested_event_handlers_.empty();
}
void FakeDataTransferManager::SetPostDataRequestedCallback(
PostDataRequestedCallback post_data_requested_callback) {
post_data_requested_callback_ = std::move(post_data_requested_callback);
}
FakeDataTransferManager::DataRequestedHandlerEntry::
DataRequestedHandlerEntry() = default;
FakeDataTransferManager::DataRequestedHandlerEntry::DataRequestedHandlerEntry(
DataRequestedHandlerEntry const& other) = default;
FakeDataTransferManager::DataRequestedHandlerEntry::
~DataRequestedHandlerEntry() {
// Check that the event handler has not been over-freed.
//
// An explicit call to Reset() will cause an Access Violation exception if the
// reference count is already at 0. Though the underling ComPtr code does a
// similar check on destruction of the ComPtr, it does not throw an exception
// in that case, so we have to call Reset() to have the failure exposed to us.
//
// We cannot assume that this particular ComPtr is the last reference to the
// event handler, so do not check to see if the value returned by Reset() is
// 0.
event_handler.Reset();
}
} // namespace webshare<|fim▁end|>
|
<< "GetDataRequestedInvoker called with no event handler registered";
return base::DoNothing();
}
|
<|file_name|>test_govt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"Fully test this module's functionality through the use of fixtures."
from megacosm.generators import Govt, Country, City
import unittest2 as unittest
import fakeredis
import fixtures
from config import TestConfiguration
class TestGovt(unittest.TestCase):
def setUp(self):
""" """
self.redis = fakeredis.FakeRedis()
fixtures.govt.import_fixtures(self)
fixtures.city.import_fixtures(self)
fixtures.region.import_fixtures(self)
fixtures.country.import_fixtures(self)
fixtures.organization.import_fixtures(self)
fixtures.business.import_fixtures(self)
fixtures.leader.import_fixtures(self)
fixtures.npc.import_fixtures(self)
fixtures.motivation.import_fixtures(self)
fixtures.phobia.import_fixtures(self)
self.redis.lpush('npc_race','gnome')
def tearDown(self):
self.redis.flushall()
def test_random_govt(self):
""" """
govt = Govt(self.redis)
self.assertEqual('far longer than should be allowed', govt.age['name'])
def test_static_body(self):
""" """
country=Country(self.redis)
govt = Govt(self.redis,{'body':country})
self.assertIn('Central Afkil', str(govt.body))
self.assertEqual(type(govt.body), Country)
def test_static_body_country(self):
""" """
govt = Govt(self.redis,{'kind':'country'})
self.assertIn('Central Afkil', str(govt.body))
self.assertEqual(type(govt.body), Country)
def test_str(self):
""" """
govt = Govt(self.redis,{'kind':'country'})
self.assertIn('absolute monarchy', str(govt))
def test_static_body_tacos(self):
""" What happens if you pass in an unsupported kind? it defaults to country."""
govt = Govt(self.redis,{'kind':'tacos'})
self.assertIn('Central Afkil', str(govt.body))
self.assertEqual(type(govt.body), Country)
<|fim▁hole|> """ """
self.redis.lpush('govt_kind', 'city')
self.redis.hset('govtcity_govttype_description', 'councilmanager', '{ "name":"council/manager", "description":"things are run by a council, which selects a manager for administrative tasks"}')
self.redis.lpush('govtcity_govttype', 'councilmanager')
govt = Govt(self.redis,{'kind':'city'})
self.assertIn('Alta DeAllentle Gate', str(govt.body))
self.assertEqual(type(govt.body), City)<|fim▁end|>
|
def test_static_body_city(self):
|
<|file_name|>equipmentscheduleitem.py<|end_file_name|><|fim▁begin|>from indivo.lib import iso8601
from indivo.models import EquipmentScheduleItem
XML = 'xml'
DOM = 'dom'
class IDP_EquipmentScheduleItem:
def post_data(self, name=None,
name_type=None,
name_value=None,
name_abbrev=None,
scheduledBy=None,
dateScheduled=None,
dateStart=None,
dateEnd=None,
recurrenceRule_frequency=None,
recurrenceRule_frequency_type=None,
recurrenceRule_frequency_value=None,
recurrenceRule_frequency_abbrev=None,
recurrenceRule_interval=None,
recurrenceRule_interval_type=None,
recurrenceRule_interval_value=None,
recurrenceRule_interval_abbrev=None,
recurrenceRule_dateUntil=None,
recurrenceRule_count=None,
instructions=None):
"""
SZ: More error checking needs to be performed in this method
"""
try:
if dateScheduled:
"""
Elliot: 3/4 changed parse_utc_date to parse_date to handle XML:datetime
"""
dateScheduled = iso8601.parse_date(dateScheduled)
if dateStart:
"""
Elliot: 3/4 changed parse_utc_date to parse_date to handle XML:datetime
"""
dateStart = iso8601.parse_date(dateStart)
if dateEnd:
"""
Elliot: 3/4 changed parse_utc_date to parse_date to handle XML:datetime
"""
dateEnd = iso8601.parse_date(dateEnd)
if recurrenceRule_dateUntil:
"""
Elliot: 3/4 changed parse_utc_date to parse_date to handle XML:datetime
"""
recurrenceRule_dateUntil = iso8601.parse_date(recurrenceRule_dateUntil)
equipmentscheduleitem_obj = EquipmentScheduleItem.objects.create(
name=name,
name_type=name_type,
name_value=name_value,
name_abbrev=name_abbrev,<|fim▁hole|> date_scheduled=dateScheduled,
date_start=dateStart,
date_end=dateEnd,
recurrencerule_frequency=recurrenceRule_frequency,
recurrencerule_frequency_type=recurrenceRule_frequency_type,
recurrencerule_frequency_value=recurrenceRule_frequency_value,
recurrencerule_frequency_abbrev=recurrenceRule_frequency_abbrev,
recurrencerule_interval=recurrenceRule_interval,
recurrencerule_interval_type=recurrenceRule_interval_type,
recurrencerule_interval_value=recurrenceRule_interval_value,
recurrencerule_interval_abbrev=recurrenceRule_interval_abbrev,
recurrencerule_dateuntil=recurrenceRule_dateUntil,
recurrencerule_count=recurrenceRule_count,
instructions=instructions)
return equipmentscheduleitem_obj
except Exception, e:
print "Error: " + str(e)
raise ValueError("problem processing equipmentscheduleitem report " + str(e))<|fim▁end|>
|
scheduled_by=scheduledBy,
|
<|file_name|>BookMetadataTable.tsx<|end_file_name|><|fim▁begin|>import * as React from "react";
import ReactTable from "react-table";
import * as mobxReact from "mobx-react";
import { StringListCheckbox } from "../../react_components/stringListCheckbox";
import { Label } from "../../react_components/l10nComponents";
import { Link } from "../../react_components/link";
import "./BookMetadataTable.less";
import SubjectChooser from "./SubjectChooser";
import A11yLevelChooser from "./A11yLevelChooser";
import TextField from "@material-ui/core/TextField";
interface IProps {
// We don't know or care what the top level elements are to this. We will show a row for each
// of the top level entries that we find.
// However the "value" of each entry must itself be an object of type {type:___, value:___}.
// I don't know if it is possible to express that in Typescript and it doesn't seem worth a lot of effort.
metadata: any;
translatedControlStrings: any;
}
// The BookMetadataTable shows some elements of https://docs.google.com/document/d/e/2PACX-1vREQ7fUXgSE7lGMl9OJkneddkWffO4sDnMG5Vn-IleK35fJSFqnC-6ulK1Ss3eoETCHeLn0wPvcxJOf/pub
// BookMetadataTable should not be marked with @mobxReact.observer because this causes problems tabbing
// between TextField elements: re-rendering the entire table resets the tab position to the beginning, so
// editing a text field and trying to tab to the next field would send the user to the initial (picture?)
// field instead of the next field. See https://issues.bloomlibrary.org/youtrack/issue/BL-9022.
export default class BookMetadataTable extends React.Component<IProps> {
constructor(props) {
super(props);
}
public componentDidMount() {}
public render() {
return (
<div>
<ReactTable
className="bookMetadataTable"
loading={false}
NoDataComponent={() => (
<div className="loading">Loading...</div>
)}
showPagination={false}
minRows={1} //don't add extra blank rows
data={Object.keys(this.props.metadata).map(key => {
return {
key,
value: this.props.metadata[key].value,
type: this.props.metadata[key].type,
translatedLabel: this.props.metadata[key]
.translatedLabel,
helpurl: this.props.metadata[key].helpurl
};
})}
columns={[
{
// there is no automatic way to compute this (https://github.com/react-tools/react-table/issues/94);
// need to keep it large enough for localization
width: 150,
accessor: "key",
className: "label",
Cell: (cellInfo: any) => {
return (
<>
<Label
l10nKey={
"BookMetadata." + cellInfo.value
}
alreadyLocalized={true}
>
{cellInfo.original.translatedLabel}
</Label>
{cellInfo.original.helpurl &&
cellInfo.original.helpurl.length > 0 ? (<|fim▁hole|> href={cellInfo.original.helpurl}
>
What's this?
</Link>
) : (
""
)}
</>
);
}
},
{
className: "value",
Cell: (cellInfo: any) => {
const f = cellInfo.original;
//console.log(JSON.stringify(f));
switch (f.type) {
case "image":
return <img src={f.value} />;
case "readOnlyText":
// We need to wrap in a div (or something) so we can put in a margin to replace the removed padding of rt-dt
// See stylesheet for more info.
return <div>{f.value}</div>;
case "editableText":
case "bigEditableText":
return (
<TextField
defaultValue={f.value}
margin="dense"
variant="outlined"
fullWidth={
f.type == "bigEditableText"
}
multiline={
f.type == "bigEditableText"
}
onBlur={(
event: React.FocusEvent<
HTMLTextAreaElement
>
) => {
this.props.metadata[
f.key
].value =
event.currentTarget.value;
}}
/>
);
case "subjects":
return (
<SubjectChooser
subjects={
this.props.metadata.subjects
}
/>
);
case "hazards":
return this.makeHazardControls();
case "a11yLevel":
return (
<A11yLevelChooser
a11yLevel={
this.props.metadata
.a11yLevel
}
/>
);
case "a11yFeatures":
return this.makeA11yFeaturesControls();
default:
return "??" + f.type;
}
}
}
]}
/>
</div>
);
}
// <mobxReact.Observer> means mobx will automatically track which observables this component uses
// in its render attribute function, and then re-render when they change. The "observable" here is
// the metadata.hazards prop, and it's observable because metadata is marked as such back where it
// is created in our parent component.
private makeHazardControls() {
return (
<mobxReact.Observer
render={() => (
<div className="checkbox-list">
{/* from https://www.w3.org/wiki/WebSchemas/Accessibility*/}
{/* "Sound Hazard" is too hard to explain (BL-6947) */}
{["flashingHazard", "motionSimulationHazard"].map(
hazardName => {
return (
<StringListCheckbox
key={hazardName}
l10nKey={"BookMetadata." + hazardName}
alreadyLocalized={true}
list={this.props.metadata.hazards.value}
itemName={hazardName}
tristateItemOffName={
"no" +
this.capitalizeFirstChar(hazardName)
}
onChange={list =>
(this.props.metadata.hazards.value = list)
}
label={
this.props.translatedControlStrings[
hazardName
]
}
/>
);
}
)}
</div>
)}
/>
);
}
private capitalizeFirstChar(hazardName: string): string {
let uc = hazardName[0].toUpperCase();
return uc + hazardName.substr(1, hazardName.length - 1);
}
// <mobxReact.Observer> means mobx will automatically track which observables this component uses
// in its render attribute function, and then re-render when they change. The "observable" here is
// the metadata.a11yFeatures prop, and it's observable because metadata is marked as such back where
// it is created in our parent component.
private makeA11yFeaturesControls() {
return (
<mobxReact.Observer
render={() => (
<div className="checkbox-list">
{/* from https://www.w3.org/wiki/WebSchemas/Accessibility*/}
{["alternativeText", "signLanguage"].map(
featureName => {
return (
<StringListCheckbox
key={featureName}
l10nKey={"BookMetadata." + featureName}
alreadyLocalized={true}
list={
this.props.metadata.a11yFeatures
.value
}
itemName={featureName}
onChange={list =>
(this.props.metadata.a11yFeatures.value = list)
}
label={
this.props.translatedControlStrings[
featureName
]
}
/>
);
}
)}
</div>
)}
/>
);
}
}<|fim▁end|>
|
<Link
className="whatsthis"
l10nKey="Common.WhatsThis"
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(old_path)]
extern crate sdl2;
#[test]
fn audio_spec_wav() {
let wav = sdl2::audio::AudioSpecWAV::load_wav(&Path::new("./tests/sine.wav")).unwrap();<|fim▁hole|> assert_eq!(wav.freq, 22050);
assert_eq!(wav.format, sdl2::audio::AUDIOS16LSB);
assert_eq!(wav.channels, 1);
let buffer = wav.get_buffer();
assert_eq!(buffer.len(), 4410);
}<|fim▁end|>
| |
<|file_name|>measurables.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-"""
# Temperature conversion constants
KELVIN_OFFSET = 273.15
FAHRENHEIT_OFFSET = 32.0
FAHRENHEIT_DEGREE_SCALE = 1.8
# Wind speed conversion constants
MILES_PER_HOUR_FOR_ONE_METER_PER_SEC = 2.23694
KM_PER_HOUR_FOR_ONE_METER_PER_SEC = 3.6
KNOTS_FOR_ONE_METER_PER_SEC = 1.94384
# Barometric conversion constants
HPA_FOR_ONE_INHG = 33.8639
# Visibility distance conversion constants
MILE_FOR_ONE_METER = 0.000621371
KMS_FOR_ONE_METER = .001
# Decimal precision
ROUNDED_TO = 2
def kelvin_dict_to(d, target_temperature_unit):
"""
Converts all the values in a dict from Kelvin temperatures to the
specified temperature format.
:param d: the dictionary containing Kelvin temperature values
:type d: dict
:param target_temperature_unit: the target temperature unit, may be:
'celsius' or 'fahrenheit'
:type target_temperature_unit: str
:returns: a dict with the same keys as the input dict and converted
temperature values as values<|fim▁hole|>
"""
if target_temperature_unit == 'kelvin':
return d
elif target_temperature_unit == 'celsius':
return {key: kelvin_to_celsius(d[key]) for key in d}
elif target_temperature_unit == 'fahrenheit':
return {key: kelvin_to_fahrenheit(d[key]) for key in d}
else:
raise ValueError("Invalid value for target temperature conversion \
unit")
def kelvin_to_celsius(kelvintemp):
"""
Converts a numeric temperature from Kelvin degrees to Celsius degrees
:param kelvintemp: the Kelvin temperature
:type kelvintemp: int/long/float
:returns: the float Celsius temperature
:raises: *TypeError* when bad argument types are provided
"""
if kelvintemp < 0:
raise ValueError(__name__ +
": negative temperature values not allowed")
celsiustemp = kelvintemp - KELVIN_OFFSET
return float("{0:.2f}".format(celsiustemp))
def kelvin_to_fahrenheit(kelvintemp):
"""
Converts a numeric temperature from Kelvin degrees to Fahrenheit degrees
:param kelvintemp: the Kelvin temperature
:type kelvintemp: int/long/float
:returns: the float Fahrenheit temperature
:raises: *TypeError* when bad argument types are provided
"""
if kelvintemp < 0:
raise ValueError(__name__ +
": negative temperature values not allowed")
fahrenheittemp = (kelvintemp - KELVIN_OFFSET) * \
FAHRENHEIT_DEGREE_SCALE + FAHRENHEIT_OFFSET
return float("{0:.2f}".format(fahrenheittemp))
def metric_wind_dict_to_imperial(d):
"""
Converts all the wind values in a dict from meters/sec (metric measurement
system) to miles/hour (imperial measurement system)
.
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to miles/hour
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
result[key] = value * MILES_PER_HOUR_FOR_ONE_METER_PER_SEC
else:
result[key] = value
return result
def metric_wind_dict_to_km_h(d):
"""
Converts all the wind values in a dict from meters/sec
to km/hour.
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to km/hour
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
result[key] = value * KM_PER_HOUR_FOR_ONE_METER_PER_SEC
else:
result[key] = value
return result
def metric_wind_dict_to_knots(d):
"""
Converts all the wind values in a dict from meters/sec
to knots
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to km/hour
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
result[key] = value * KNOTS_FOR_ONE_METER_PER_SEC
else:
result[key] = value
return result
def metric_wind_dict_to_beaufort(d):
"""
Converts all the wind values in a dict from meters/sec
to the corresponding Beaufort scale level (which is not an exact number but rather
represents a range of wind speeds - see: https://en.wikipedia.org/wiki/Beaufort_scale).
Conversion table: https://www.windfinder.com/wind/windspeed.htm
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to Beaufort level
"""
result = {}
for key, value in d.items():
if key != 'deg': # do not convert wind degree
if value <= 0.2:
bf = 0
elif 0.2 < value <= 1.5:
bf = 1
elif 1.5 < value <= 3.3:
bf = 2
elif 3.3 < value <= 5.4:
bf = 3
elif 5.4 < value <= 7.9:
bf = 4
elif 7.9 < value <= 10.7:
bf = 5
elif 10.7 < value <= 13.8:
bf = 6
elif 13.8 < value <= 17.1:
bf = 7
elif 17.1 < value <= 20.7:
bf = 8
elif 20.7 < value <= 24.4:
bf = 9
elif 24.4 < value <= 28.4:
bf = 10
elif 28.4 < value <= 32.6:
bf = 11
else:
bf = 12
result[key] = bf
else:
result[key] = value
return result
def metric_pressure_dict_to_inhg(d):
"""
Converts all barometric pressure values in a dict to "inches of mercury."
:param d: the dictionary containing metric values
:type d: dict
:returns: a dict with the same keys as the input dict and values converted
to "Hg or inHg (inches of mercury)
Note what OWM says about pressure: "Atmospheric pressure [is given in hPa]
(on the sea level, if there is no sea_level or grnd_level data)"
"""
result = dict()
for key, value in d.items():
if value is None:
continue
result[key] = round((value / HPA_FOR_ONE_INHG), ROUNDED_TO)
return result
def visibility_distance_to(v, target_visibility_unit='kilometers'):
"""
Converts visibility distance (in meters) to kilometers or miles
Defaults to kilometer conversion
:param distance: the value of visibility_distance
:type distance: int
:param target_visibility_unit: the unit of conversion
:type target_visibility_unit: str
:returns: a converted value for visibility_distance (float)
"""
if v is None:
return v
if target_visibility_unit == 'kilometers':
const = KMS_FOR_ONE_METER
elif target_visibility_unit == 'miles':
const = MILE_FOR_ONE_METER
else:
raise ValueError('Invalid value for target visibility distance unit')
return round(v * const, ROUNDED_TO)<|fim▁end|>
|
:raises: *ValueError* when unknown target temperature units are provided
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from time import time
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_delete
from django.dispatch import receiver
<|fim▁hole|>def upload_path(instance, filename):
return 'uploads/user_{0}/{1}_{2}'.format(
instance.owner.id,
str(time()).replace('.', '_'),
filename
)
class UserProfile(models.Model):
user = models.OneToOneField(User)
photo = models.TextField()
class Images(models.Model):
owner = models.ForeignKey(User)
image = models.ImageField(upload_to=upload_path)
image_file_name = models.CharField(max_length=100, null=True)
date_created = models.DateTimeField(
auto_now_add=True, verbose_name='created')
# Function to delete from the file storage
@receiver(post_delete, sender=Images)
def delete_from_file_system(sender, instance, **kwargs):
image_path = instance.image.path
# split the image part
filepath, ext = os.path.splitext(image_path)
# create the filtered image path
new_filepath = filepath + "filtered" + ext
# delete from file directory
if os.path.exists(image_path):
# delete image
os.remove(image_path)
if os.path.exists(new_filepath):
# delete filtered image
os.remove(new_filepath)<|fim▁end|>
|
import os
|
<|file_name|>ThirdPartyMeasurementSettings.java<|end_file_name|><|fim▁begin|>// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202108;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* Contains third party auto-pixeling settings for cross-sell Partners.
*
*
* <p>Java class for ThirdPartyMeasurementSettings complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ThirdPartyMeasurementSettings">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="viewabilityPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyViewabilityIntegrationPartner" minOccurs="0"/>
* <element name="viewabilityClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="viewabilityReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherViewabilityPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyViewabilityIntegrationPartner" minOccurs="0"/>
* <element name="publisherViewabilityClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherViewabilityReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="brandLiftPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyBrandLiftIntegrationPartner" minOccurs="0"/>
* <element name="brandLiftClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="brandLiftReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="reachPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyReachIntegrationPartner" minOccurs="0"/>
* <element name="reachClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="reachReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherReachPartner" type="{https://www.google.com/apis/ads/publisher/v202108}ThirdPartyReachIntegrationPartner" minOccurs="0"/>
* <element name="publisherReachClientId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="publisherReachReportingId" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ThirdPartyMeasurementSettings", propOrder = {
"viewabilityPartner",
"viewabilityClientId",
"viewabilityReportingId",
"publisherViewabilityPartner",
"publisherViewabilityClientId",
"publisherViewabilityReportingId",
"brandLiftPartner",
"brandLiftClientId",
"brandLiftReportingId",
"reachPartner",
"reachClientId",
"reachReportingId",
"publisherReachPartner",
"publisherReachClientId",
"publisherReachReportingId"
})
public class ThirdPartyMeasurementSettings {
@XmlSchemaType(name = "string")
protected ThirdPartyViewabilityIntegrationPartner viewabilityPartner;
protected String viewabilityClientId;
protected String viewabilityReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyViewabilityIntegrationPartner publisherViewabilityPartner;
protected String publisherViewabilityClientId;
protected String publisherViewabilityReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyBrandLiftIntegrationPartner brandLiftPartner;
protected String brandLiftClientId;
protected String brandLiftReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyReachIntegrationPartner reachPartner;
protected String reachClientId;
protected String reachReportingId;
@XmlSchemaType(name = "string")
protected ThirdPartyReachIntegrationPartner publisherReachPartner;
protected String publisherReachClientId;
protected String publisherReachReportingId;
/**
* Gets the value of the viewabilityPartner property.
*
* @return
* possible object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public ThirdPartyViewabilityIntegrationPartner getViewabilityPartner() {
return viewabilityPartner;
}
/**
* Sets the value of the viewabilityPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public void setViewabilityPartner(ThirdPartyViewabilityIntegrationPartner value) {
this.viewabilityPartner = value;
}
/**
* Gets the value of the viewabilityClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getViewabilityClientId() {
return viewabilityClientId;
}
/**
* Sets the value of the viewabilityClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setViewabilityClientId(String value) {
this.viewabilityClientId = value;
}
/**
* Gets the value of the viewabilityReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getViewabilityReportingId() {
return viewabilityReportingId;
}
/**
* Sets the value of the viewabilityReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setViewabilityReportingId(String value) {
this.viewabilityReportingId = value;
}
/**
* Gets the value of the publisherViewabilityPartner property.
*
* @return
* possible object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public ThirdPartyViewabilityIntegrationPartner getPublisherViewabilityPartner() {
return publisherViewabilityPartner;
}
/**
* Sets the value of the publisherViewabilityPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyViewabilityIntegrationPartner }
*
*/
public void setPublisherViewabilityPartner(ThirdPartyViewabilityIntegrationPartner value) {
this.publisherViewabilityPartner = value;
}
/**
* Gets the value of the publisherViewabilityClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherViewabilityClientId() {
return publisherViewabilityClientId;
}
/**
* Sets the value of the publisherViewabilityClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherViewabilityClientId(String value) {
this.publisherViewabilityClientId = value;
}
/**
* Gets the value of the publisherViewabilityReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherViewabilityReportingId() {
return publisherViewabilityReportingId;
}
/**
* Sets the value of the publisherViewabilityReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherViewabilityReportingId(String value) {
this.publisherViewabilityReportingId = value;
}
/**
* Gets the value of the brandLiftPartner property.
*
* @return
* possible object is
* {@link ThirdPartyBrandLiftIntegrationPartner }
*
*/
public ThirdPartyBrandLiftIntegrationPartner getBrandLiftPartner() {
return brandLiftPartner;
}
/**
* Sets the value of the brandLiftPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyBrandLiftIntegrationPartner }
*
*/
public void setBrandLiftPartner(ThirdPartyBrandLiftIntegrationPartner value) {
this.brandLiftPartner = value;
}
/**
* Gets the value of the brandLiftClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBrandLiftClientId() {
return brandLiftClientId;
}
/**
* Sets the value of the brandLiftClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBrandLiftClientId(String value) {
this.brandLiftClientId = value;
}
/**
* Gets the value of the brandLiftReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getBrandLiftReportingId() {
return brandLiftReportingId;
}
/**
* Sets the value of the brandLiftReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setBrandLiftReportingId(String value) {
this.brandLiftReportingId = value;
}
/**
* Gets the value of the reachPartner property.
*
* @return
* possible object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public ThirdPartyReachIntegrationPartner getReachPartner() {
return reachPartner;
}
/**
* Sets the value of the reachPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public void setReachPartner(ThirdPartyReachIntegrationPartner value) {
this.reachPartner = value;
}
/**
* Gets the value of the reachClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getReachClientId() {
return reachClientId;
}
/**
* Sets the value of the reachClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setReachClientId(String value) {
this.reachClientId = value;
}
/**
* Gets the value of the reachReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getReachReportingId() {
return reachReportingId;
}
/**
* Sets the value of the reachReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setReachReportingId(String value) {
this.reachReportingId = value;
}
/**
* Gets the value of the publisherReachPartner property.
*
* @return
* possible object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public ThirdPartyReachIntegrationPartner getPublisherReachPartner() {
return publisherReachPartner;
}
/**
* Sets the value of the publisherReachPartner property.
*
* @param value
* allowed object is
* {@link ThirdPartyReachIntegrationPartner }
*
*/
public void setPublisherReachPartner(ThirdPartyReachIntegrationPartner value) {
this.publisherReachPartner = value;
}
/**
* Gets the value of the publisherReachClientId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherReachClientId() {<|fim▁hole|> return publisherReachClientId;
}
/**
* Sets the value of the publisherReachClientId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherReachClientId(String value) {
this.publisherReachClientId = value;
}
/**
* Gets the value of the publisherReachReportingId property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPublisherReachReportingId() {
return publisherReachReportingId;
}
/**
* Sets the value of the publisherReachReportingId property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPublisherReachReportingId(String value) {
this.publisherReachReportingId = value;
}
}<|fim▁end|>
| |
<|file_name|>OAuth2TokenValidationMessageContextTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.oauth2.validators;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationRequestDTO;
import org.wso2.carbon.identity.oauth2.dto.OAuth2TokenValidationResponseDTO;
import static org.testng.Assert.assertEquals;
public class OAuth2TokenValidationMessageContextTest {
private OAuth2TokenValidationMessageContext oAuth2TokenValidationMessageContext;
private OAuth2TokenValidationRequestDTO requestDTO;
private OAuth2TokenValidationResponseDTO responseDTO;
@BeforeMethod
public void setUp() throws Exception {
requestDTO = new OAuth2TokenValidationRequestDTO();
responseDTO = new OAuth2TokenValidationResponseDTO();
oAuth2TokenValidationMessageContext = new OAuth2TokenValidationMessageContext(requestDTO, responseDTO);
}
@Test
public void testGetRequestDTO() throws Exception {
assertEquals(oAuth2TokenValidationMessageContext.getRequestDTO(), requestDTO);
}
@Test
public void testGetResponseDTO() throws Exception {
assertEquals(oAuth2TokenValidationMessageContext.getResponseDTO(), responseDTO);
}
@Test
public void testAddProperty() throws Exception {<|fim▁hole|>
@Test
public void testGetProperty() throws Exception {
oAuth2TokenValidationMessageContext.addProperty("testProperty", "testValue");
assertEquals(oAuth2TokenValidationMessageContext.getProperty("testProperty"), "testValue");
}
}<|fim▁end|>
|
oAuth2TokenValidationMessageContext.addProperty("testProperty", "testValue");
assertEquals(oAuth2TokenValidationMessageContext.getProperty("testProperty"), "testValue");
}
|
<|file_name|>dynamictexture.js<|end_file_name|><|fim▁begin|>var THREEx = THREEx || {}
//////////////////////////////////////////////////////////////////////////////////
// Constructor //
//////////////////////////////////////////////////////////////////////////////////
/**
* create a dynamic texture with a underlying canvas
*
* @param {Number} width width of the canvas
* @param {Number} height height of the canvas
*/
THREEx.DynamicTexture = function(width, height){
var canvas = document.createElement( 'canvas' )
canvas.width = width
canvas.height = height
this.canvas = canvas
var context = canvas.getContext( '2d' )
this.context = context
var texture = new THREE.Texture(canvas)
this.texture = texture
}
//////////////////////////////////////////////////////////////////////////////////
// methods //
//////////////////////////////////////////////////////////////////////////////////
/**
* clear the canvas
*
* @param {String*} fillStyle the fillStyle to clear with, if not provided, fallback on .clearRect
* @return {THREEx.DynamicTexture} the object itself, for chained texture
*/
THREEx.DynamicTexture.prototype.clear = function(fillStyle){
// depends on fillStyle
if( fillStyle !== undefined ){
this.context.fillStyle = fillStyle
this.context.fillRect(0,0,this.canvas.width, this.canvas.height)
}else{
this.context.clearRect(0,0,this.canvas.width, this.canvas.height)
}
// make the texture as .needsUpdate
this.texture.needsUpdate = true;
// for chained API
return this;
}
/**
* draw text
*
* @param {String} text the text to display
* @param {Number|undefined} x if provided, it is the x where to draw, if not, the text is centered
* @param {Number} y the y where to draw the text
* @param {String*} fillStyle the fillStyle to clear with, if not provided, fallback on .clearRect
* @param {String*} contextFont the font to use
* @return {THREEx.DynamicTexture} the object itself, for chained texture
*/
THREEx.DynamicTexture.prototype.drawText = function(text, x, y, fillStyle, contextFont){
// set font if needed
if( contextFont !== undefined ) this.context.font = contextFont;
// if x isnt provided
if( x === undefined || x === null ){
var textSize = this.context.measureText(text);
x = (this.canvas.width - textSize.width) / 2;
}
// actually draw the text
this.context.fillStyle = fillStyle;
this.context.fillText(text, x, y);
// make the texture as .needsUpdate
this.texture.needsUpdate = true;
// for chained API
return this;
};
THREEx.DynamicTexture.prototype.drawTextCooked = function(text, options){
var context = this.context
var canvas = this.canvas
options = options || {}
var params = {
margin : options.margin !== undefined ? options.margin : 0.1,
lineHeight : options.lineHeight !== undefined ? options.lineHeight : 0.1,
align : options.align !== undefined ? options.align : 'left',
fillStyle : options.fillStyle !== undefined ? options.fillStyle : 'black',
}
context.save()
context.fillStyle = params.fillStyle;
var y = (params.lineHeight + params.margin)*canvas.height
while(text.length > 0 ){
// compute the text for specifically this line<|fim▁hole|>
// compute x based on params.align
var textSize = context.measureText(maxText);
if( params.align === 'left' ){
var x = params.margin*canvas.width
}else if( params.align === 'right' ){
var x = (1-params.margin)*canvas.width - textSize.width
}else if( params.align === 'center' ){
var x = (canvas.width - textSize.width) / 2;
}else console.assert( false )
// actually draw the text at the proper position
this.context.fillText(maxText, x, y);
// goto the next line
y += params.lineHeight*canvas.height
}
context.restore()
// make the texture as .needsUpdate
this.texture.needsUpdate = true;
// for chained API
return this;
function computeMaxTextLength(text){
var maxText = ''
var maxWidth = (1-params.margin*2)*canvas.width
while( maxText.length !== text.length ){
var textSize = context.measureText(maxText);
if( textSize.width > maxWidth ) break;
maxText += text.substr(maxText.length, 1)
}
return maxText
}
}
/**
* execute the drawImage on the internal context
* the arguments are the same the official context2d.drawImage
*/
THREEx.DynamicTexture.prototype.drawImage = function(/* same params as context2d.drawImage */){
// call the drawImage
this.context.drawImage.apply(this.context, arguments)
// make the texture as .needsUpdate
this.texture.needsUpdate = true;
// for chained API
return this;
}<|fim▁end|>
|
var maxText = computeMaxTextLength(text)
// update the remaining text
text = text.substr(maxText.length)
|
<|file_name|>stream.rs<|end_file_name|><|fim▁begin|>use unsafe_cell::UnsafeRefCell;
use error::{ErrCode, eof};
use core::{IoContext, AsIoContext};
use async::Handler;
use streams::Stream;
use buffers::StreamBuf;
use ssl::*;
use ssl::ffi::*;
use std::io;
use std::ptr;
use std::sync::Mutex;
use libc::{c_void, c_int, size_t};
use openssl_sys::*;
use openssl::types::OpenSslTypeRef;
lazy_static! {
static ref ACCEPT_MUTEX: Mutex<()> = Mutex::new(());
}
fn do_accept(ssl: *mut SSL, _: *mut c_void, _: size_t) -> c_int {
let _lock = ACCEPT_MUTEX.lock();
unsafe { SSL_accept(ssl) }
}
fn do_connect(ssl: *mut SSL, _: *mut c_void, _: size_t) -> c_int {
unsafe { SSL_connect(ssl) }
}
fn do_read(ssl: *mut SSL, data: *mut c_void, len: size_t) -> c_int {
assert!(len <= i32::max_value() as size_t);
unsafe { SSL_read(ssl, data, len as c_int) }
}
fn do_write(ssl: *mut SSL, data: *mut c_void, len: size_t) -> c_int {
assert!(len <= i32::max_value() as size_t);
unsafe { SSL_write(ssl, data, len as c_int) }
}
fn do_shutdown(ssl: *mut SSL, _: *mut c_void, _: size_t) -> c_int {
let res = unsafe { SSL_shutdown(ssl) };
if res == 0 {
unsafe { SSL_shutdown(ssl) }
} else {
res
}
}
enum Want {
InputAndRetry = -2,
OutputAndRetry = -1,
Nothing = 0,
Output = 1,
}
struct Engine {
ssl: *mut SSL,
ext_bio: *mut BIO,
verify_callback: Box<Fn(bool, &SslVerifyContext) -> bool>,
}
impl Engine {
pub fn new(ctx: &SslContext) -> Result<Engine> {
let ssl = unsafe { SSL_new(ctx.as_ptr()) };
if ssl.is_null() {
return Err(Error::last_ssl_error());
}
unsafe {
SSL_set_mode(ssl, SSL_MODE_ENABLE_PARTIAL_WRITE);
SSL_set_mode(ssl, SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER);
SSL_set_mode(ssl, SSL_MODE_RELEASE_BUFFERS);
}
let mut ext_bio = ptr::null_mut();
let mut int_bio = ptr::null_mut();
unsafe {
BIO_new_bio_pair(&mut int_bio, 4096, &mut ext_bio, 4096);
SSL_set_bio(ssl, int_bio, int_bio);
}
Ok(Engine {
ssl: ssl,
ext_bio: ext_bio,
verify_callback: Box::new(|_,_| false),
})
}
pub fn set_verify_mode(&self, mode: SslVerifyMode) -> Result<()> {
unsafe { SSL_set_verify(self.ssl, mode.bits(), SSL_get_verify_callback(self.ssl)) };
Ok(())
}
pub fn set_verify_depth(&self, depth: i32) -> Result<()> {
unsafe { SSL_set_verify_depth(self.ssl, depth as c_int) };
Ok(())
}
extern "C" fn verify_callback(preverified: c_int, ctx: *mut X509_STORE_CTX) -> c_int {
if !ctx.is_null() {
unsafe {
let ssl = X509_STORE_CTX_get_ex_data(ctx, SSL_get_ex_data_X509_STORE_CTX_idx()) as *mut SSL;
if !ssl.is_null() {
let this = &*(SSL_get_app_data(ssl) as *const Self);
return (*this.verify_callback)(preverified != 0, SslVerifyContext::from_ptr(ctx)) as c_int;
}
}
}
0
}
pub fn set_verify_callback<F>(&mut self, callback: F) -> Result<()>
where F: Fn(bool, &SslVerifyContext) -> bool + 'static
{
let user_data = self as *mut Self;
self.verify_callback = Box::new(callback);
unsafe {
SSL_set_app_data(self.ssl, user_data as *mut c_void);
SSL_set_verify(self.ssl, SSL_get_verify_mode(self.ssl), Some(Self::verify_callback));
}
Ok(())
}
fn perform(&self, op: fn(*mut SSL, *mut c_void, size_t) -> c_int, data: *mut c_void, len: size_t) -> (Want, Result<usize>) {
let pending_output_before = unsafe { BIO_ctrl_pending(self.ext_bio) };
clear_error();
let res = op(self.ssl, data, len);
let err = unsafe { SSL_get_error(self.ssl, res) };
let pending_output_after = unsafe { BIO_ctrl_pending(self.ext_bio) };
if err == SSL_ERROR_SSL {
(Want::Nothing, Err(Error::last_ssl_error()))
}
else if err == SSL_ERROR_SYSCALL {
(Want::Nothing, Err(Error::last_sys_error()))
}
else if err == SSL_ERROR_WANT_WRITE {
(Want::OutputAndRetry, Ok(res as usize))
}
else if pending_output_after > pending_output_before {
(if res > 0 { Want::Output } else { Want::OutputAndRetry }, Ok(res as usize))
}
else if err == SSL_ERROR_WANT_READ {
(Want::InputAndRetry, Ok(res as usize))
}
else if unsafe { SSL_get_shutdown(self.ssl) } & SSL_RECEIVED_SHUTDOWN != 0 {
(Want::Nothing, Err(eof().into()))
}
else {
(Want::Nothing, Ok(res as usize))
}
}
pub fn handshake(&self, mode: Handshake) -> (Want, Result<usize>) {
self.perform(match mode {
Handshake::Client => do_connect,
Handshake::Server => do_accept,
}, ptr::null_mut(), 0)
}
pub fn shutdown(&self) -> (Want, Result<usize>) {
self.perform(do_shutdown, ptr::null_mut(), 0)
}
pub fn read(&self, buf: &mut [u8]) -> (Want, Result<usize>) {
if buf.len() == 0 {
(Want::Nothing, Ok(0))
} else {
self.perform(do_read, buf.as_ptr() as *mut c_void, buf.len())
}
}
pub fn write(&self, buf: &[u8]) -> (Want, Result<usize>) {
if buf.len() == 0 {
(Want::Nothing, Ok(0))
} else {
self.perform(do_write, buf.as_ptr() as *const _ as *mut c_void, buf.len())
}
}
pub fn get_output(&self, sbuf: &mut StreamBuf) {
let len = {
let buf = sbuf.prepare(4096).unwrap();
unsafe { BIO_read(self.ext_bio, buf.as_ptr() as *mut c_void, buf.len() as c_int) }
};
sbuf.commit(len as usize);
}
pub fn put_input(&self, sbuf: &mut StreamBuf) {
let len = {
let buf = sbuf.as_slice();
unsafe { BIO_write(self.ext_bio, buf.as_ptr() as *const c_void, buf.len() as c_int) }
};
sbuf.consume(len as usize);
}
pub fn map_error_code(&self) {
unsafe { BIO_wpending(self.ext_bio) };
}
}
impl Drop for Engine {
fn drop(&mut self) {
unsafe {
BIO_free_all(self.ext_bio);
SSL_free(self.ssl);
}
}
}
struct SslHandler<S, O, F> {
imp: UnsafeRefCell<SslStreamImpl>,
next_layer: UnsafeRefCell<S>,
op: O,
handler: F,
}
// impl<S, O, F> Handler<usize, io::Error> for SslHandler<S, O, F>
// where S: Stream<io::Error>,
// O: FnMut(&Engine) -> (Want, Result<usize>) + Send + 'static,
// F: Handler<usize, io::Error>,
// {
// type Output = F::Output;
// fn callback(self, io: &IoService, res: ::std::result::Result<usize, io::Error>) {
// // let SslHandler { mut imp, next_layer, mut op, handler } = self;
// // let mut imp = unsafe { imp.as_mut() };
// // let imp_clone = UnsafeRefCell::new(imp);
// // let next_layer = unsafe { next_layer.as_ref() };
// // match res {
// // Ok(start) if start == 1 => {
// // loop {
// // match op(&imp.engine) {
// // (Want::InputAndRetry, _) => {
// // if imp.input_buf.len() != 0 {
// // imp.engine.put_input(&mut imp.input_buf);
// // } else {
// // match imp.input_buf.prepare(4096) {
// // Ok(buf) => {
// // let handler = SslHandler {
// // imp: imp_clone,
// // next_layer: UnsafeRefCell::new(next_layer),
// // op: op,
// // handler: handler,
// // };
// // next_layer.async_read_some(buf, handler);
// // }
// // Err(err) => handler.callback(io, Err(err.into())),
// // }
// // return;
// // }
// // },
// // (Want::OutputAndRetry, _) | (Want::Output, _) => {
// // let handler = SslHandler {
// // imp: imp_clone,
// // next_layer: UnsafeRefCell::new(next_layer),
// // op: op,
// // handler: handler,
// // };
// // let len = imp.output_buf.len();
// // async_write_until(next_layer, &mut imp.output_buf, len, handler);
// // return;
// // },
// // _ => {
// // if start > 0 {
// // }
// // },
// // }
// // }
// // },
// // Ok(_) => {
// // },
// // Err(err) => return handler.callback(io, Err(err.into())),
// // }
// }
// fn wrap<G>(self, callback: G) -> Callback
// where G: FnOnce(&IoService, ErrCode, Self) + Send + 'static,
// {
// let SslHandler { imp, next_layer, op, handler } = self;
// handler.wrap(move |io, ec, handler| {
// callback(io, ec, SslHandler {
// imp: imp,
// next_layer: next_layer,
// op: op,
// handler: handler,
// })
// })
// }
// type AsyncResult = F::AsyncResult;
// fn async_result(&self) -> Self::AsyncResult {
// self.handler.async_result()
// }
// }
struct SslStreamImpl {
engine: Engine,
input_buf: StreamBuf,
output_buf: StreamBuf,
}
impl SslStreamImpl {
fn new(ctx: &SslContext) -> Result<SslStreamImpl> {
Ok(SslStreamImpl {
engine: try!(Engine::new(ctx)),
input_buf: StreamBuf::new(),
output_buf: StreamBuf::new(),
})
}
fn io_mut<S, F>(&mut self, next_layer: &S, mut op: F) -> Result<usize>
where S: Stream<io::Error>,
F: FnMut(&Engine) -> (Want, Result<usize>),
{
// loop {
// match op(&self.engine) {
// (Want::InputAndRetry, _) => {
// if self.input_buf.len() == 0 {
// let len = try!(next_layer.read_some(try!(self.input_buf.prepare(4096))));
// self.input_buf.commit(len);
// }
// self.engine.put_input(&mut self.input_buf);
// },
// (Want::OutputAndRetry, _) => {
// self.engine.get_output(&mut self.output_buf);
// let len = self.output_buf.len();
// if len > 0 {
// try!(write_until(next_layer, &mut self.output_buf, len));
// }
// },
// (Want::Output, res) => {
// self.engine.get_output(&mut self.output_buf);
// let len = self.output_buf.len();
// try!(write_until(next_layer, &mut self.output_buf, len));
// return res;
// },
// (_, res) => if let Ok(len) = res {
// return Ok(len);
// },
// }
// }
return Ok(0)
}
fn io<S, O>(&self, next_layer: &S, op: O) -> Result<usize>
where S: Stream<io::Error>,
O: FnMut(&Engine) -> (Want, Result<usize>),
{
unsafe { &mut *(self as *const _ as *mut Self) }.io_mut(next_layer, op)
}
}
unsafe impl Send for SslStreamImpl {
}
unsafe impl Sync for SslStreamImpl {
}
pub struct SslStream<S> {
soc: S,
core: SslStreamImpl,
_ctx: SslContext,
}
impl<S: Stream<io::Error>> SslStream<S> {
pub fn new(soc: S, ctx: &SslContext) -> Result<SslStream<S>> {
let core = try!(SslStreamImpl::new(ctx));
Ok(SslStream {
soc: soc,
core: core,
_ctx: ctx.clone(),
})
}
pub fn async_handshake(&self) {
}
pub fn async_shutdown(&self) {
}
pub fn handshake(&self, mode: Handshake) -> Result<()> {
match self.core.io(&self.soc, move |eng| eng.handshake(mode)) {
Ok(_) => Ok(()),
Err(err) => Err(err.into()),
}
}
pub fn next_layer(&self) -> &S {
&self.soc
}
pub fn set_verify_callback<F>(&mut self, callback: F) -> Result<()>
where F: Fn(bool, &SslVerifyContext) -> bool + 'static
{<|fim▁hole|>
pub fn set_verify_depth(&self, depth: i32) -> Result<()> {
self.core.engine.set_verify_depth(depth)
}
pub fn set_verify_mode(&self, mode: SslVerifyMode) -> Result<()> {
self.core.engine.set_verify_mode(mode)
}
pub fn shutdown(&mut self) -> Result<()> {
match self.core.io(&self.soc, |eng| eng.shutdown()) {
Ok(_) => Ok(()),
Err(err) => Err(err.into()),
}
}
}
unsafe impl<S: Stream<io::Error>> AsIoContext for SslStream<S> {
fn as_ctx(&self) -> &IoContext {
self.soc.as_ctx()
}
}
// impl<S: Stream<io::Error>> Stream<Error> for SslStream<S> {
// fn async_read_some<F: Handler<usize, Error>>(&self, buf: &mut [u8], handler: F) -> F::Output {
// handler.async_result().get(self.io_service())
// }
// fn async_write_some<F: Handler<usize, Error>>(&self, buf: &[u8], handler: F) -> F::Output {
// handler.async_result().get(self.io_service())
// }
// fn read_some(&self, buf: &mut [u8]) -> Result<usize> {
// self.core.io(&self.soc, |eng| eng.read(buf))
// }
// fn write_some(&self, buf: &[u8]) -> Result<usize> {
// self.core.io(&self.soc, |eng| eng.write(buf))
// }
// }<|fim▁end|>
|
self.core.engine.set_verify_callback(callback)
}
|
<|file_name|>uint64.js<|end_file_name|><|fim▁begin|>export const plus = (f, l) => {
let next = {};
if (typeof l === 'number') {
next.low = l;
next.high = 0;
} else if (typeof l === 'object') {
if (l.high && l.low && l.unsigned) {
next = l;
} else {
throw new Error('Not a uint64 data');
}
}
return {
high: f.high + next.high,
low: f.low + next.low,
unsigned: true
};
};
export const generateKeyString = (uint64Object) => {
if (typeof uint64Object === 'number') {
return uint64Object.toString();
}
if (typeof uint64Object === 'object' && typeof uint64Object.high === 'number') {<|fim▁hole|> }
return Symbol(uint64Object).toString();
};<|fim▁end|>
|
return `${uint64Object.high}${uint64Object.low}`;
|
<|file_name|>mongo08.js<|end_file_name|><|fim▁begin|>db = connect(mserver);<|fim▁hole|>db.schedev.ensureIndex({expired : 1},{expireAfterSeconds : 120});<|fim▁end|>
|
db = db.getSiblingDB('kynetx');
db.schedev.ensureIndex({cron_id : 1});
db.schedev.ensureIndex({ken : 1});
|
<|file_name|>net.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Google Inc. All rights reserved.
// Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file.
package internal
// This file implements a network dialer that limits the number of concurrent connections.
// It is only used for API calls.
import (
"log"
"net"
"runtime"
"sync"
"time"
)
var limitSem = make(chan int, 100) // TODO(dsymonds): Use environment variable.
func limitRelease() {
// non-blocking
select {
case <-limitSem:
default:
// This should not normally happen.
log.Print("appengine: unbalanced limitSem release!")
}
}
func limitDial(network, addr string) (net.Conn, error) {
limitSem <- 1
// Dial with a timeout in case the API host is MIA.
// The connection should normally be very fast.
conn, err := net.DialTimeout(network, addr, 500*time.Millisecond)
if err != nil {
limitRelease()
return nil, err<|fim▁hole|>}
type limitConn struct {
close sync.Once
net.Conn
}
func (lc *limitConn) Close() error {
defer lc.close.Do(func() {
limitRelease()
runtime.SetFinalizer(lc, nil)
})
return lc.Conn.Close()
}<|fim▁end|>
|
}
lc := &limitConn{Conn: conn}
runtime.SetFinalizer(lc, (*limitConn).Close) // shouldn't usually be required
return lc, nil
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>#coding=utf-8
import os
# Basic settings
# requests settings
TIMEOUT = 5
VERIFY = False
# directories might be used
LOCATIONS = {
'log': 'log',
'data': 'data',
}<|fim▁hole|># stderr is redirected to this file
ERR_LOG_FILE = os.path.join(LOCATIONS['log'], 'err.log')
# log in this file
LOGGING_FILE = os.path.join(LOCATIONS['log'], 'requests.log')
STATION_NAME_FILE = os.path.join(LOCATIONS['data'], 'station_name.js')
CAPTCHA_FILE = os.path.join(LOCATIONS['data'], 'captcha.png')
CRYPTO_JS = os.path.join(LOCATIONS['data'], 'crypto.js')
CRYPTO_SCRIPT = os.path.join(LOCATIONS['data'], 'do_crypto.js')
# Query settings
QUERY_INTERVAL = 1
QUERY_ARGS_NS = 'leftTicketDTO'
TRAIN_DATA_JSON_KEY = 'queryLeftNewDTO'
LOGIN_NS = 'loginUserDTO'
USER_NS = 'userDTO'
PURPOSE_CODES = {'学生': '0X00', '普通': 'ADULT'}
PURPOSE_ID = {'0X00': 3, '学生': 3, 'ADULT': 1, '普通': 1}
SEAT_CODES = {
'商务座': 'swz',
'特等座': 'tz',
'一等座': 'zy',
'二等座': 'ze',
'高级软卧': 'gr',
'软卧': 'rw',
'硬卧': 'yw',
'软座': 'rz',
'硬座': 'yz',
'无座': 'wz',
'其他': 'qt',
}
SEAT_ID = {
'SWZ': '9',
'TZ': 'P',
'ZY': 'M',
'ZE': 'O',
'GR': '6',
'RW': '4',
'YW': '3',
'RZ': '2',
'YZ': '1',
'WZ': 'WZ',
'QT': '',
}
URL_BASE = 'https://kyfw.12306.cn/'
URLS = {
'entry': URL_BASE + 'otn/',
'station_name': URL_BASE + 'otn/resources/js/framework/station_name.js?station_version=1.8260',
'query': URL_BASE + 'otn/leftTicket/queryT',
'query_log': URL_BASE + 'otn/leftTicket/log',
'login_captcha': URL_BASE + 'otn/passcodeNew/getPassCodeNew?module=login&rand=sjrand',
'order_captcha': URL_BASE + 'otn/passcodeNew/getPassCodeNew?module=passenger&rand=randp',
'check_captcha': URL_BASE + 'otn/passcodeNew/checkRandCodeAnsyn',
'login_token': URL_BASE + 'otn/login/init',
'order_init_token': URL_BASE + 'otn/leftTicket/init',
'login': URL_BASE + 'otn/login/loginAysnSuggest',
'check_login': URL_BASE + 'otn/login/checkUser',
'passengers': URL_BASE + 'otn/confirmPassenger/getPassengerDTOs',
'order_init_submit': URL_BASE + 'otn/leftTicket/submitOrderRequest',
'order_confirm': URL_BASE + 'otn/confirmPassenger/initDc',
'order_check': URL_BASE + 'otn/confirmPassenger/checkOrderInfo',
}
# 3rd party tools settings
# Setup for settings
import socket
if socket.gethostname() in ['duankq-ThinkPad-X201', ]:
DEBUG = True
else:
DEBUG = False
import os
for loc in LOCATIONS.values():
if not os.path.isdir(loc):
os.mkdir(loc)
for (k, v) in SEAT_CODES.iteritems():
SEAT_ID[k] = SEAT_ID[v.upper()]
SEAT_ID[v] = SEAT_ID[v.upper()]<|fim▁end|>
| |
<|file_name|>ArgumentDeclaration.ts<|end_file_name|><|fim▁begin|>// Copyright (C) 2015, 2017 Simon Mika <[email protected]>
//
// This file is part of SysPL.
//
// SysPL is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// SysPL is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with SysPL. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>import * as Tokens from "../Tokens"
import * as Type from "./Type"
import * as SyntaxTree from "../SyntaxTree"
export function parse(source: Source): SyntaxTree.ArgumentDeclaration | undefined {
let result: SyntaxTree.ArgumentDeclaration | undefined
if (source.peek()!.isIdentifier()) {
//
// handles cases "x" and "x: Type"
//
const symbol = (source.fetch() as Tokens.Identifier).name
const type = Type.tryParse(source)
result = new SyntaxTree.ArgumentDeclaration(symbol, type, source.mark())
} else if (source.peek()!.isOperator("=") || source.peek()!.isSeparator(".")) {
//
// Handles syntactic sugar cases ".argument" and "=argument"
// The type of the argument will have to be resolved later
//
source.fetch() // consume "=" or "."
result = new SyntaxTree.ArgumentDeclaration((source.fetch() as Tokens.Identifier).name, undefined, source.mark())
}
return result
}
export function parseAll(source: Source): SyntaxTree.ArgumentDeclaration[] {
const result: SyntaxTree.ArgumentDeclaration[] = []
if (source.peek()!.isSeparator("(")) {
do {
source.fetch() // consume: ( or ,
result.push(parse(source.clone())!)
} while (source.peek()!.isSeparator(","))
if (!source.fetch()!.isSeparator(")"))
source.raise("Expected \")\"")
}
return result
}<|fim▁end|>
|
//
import { Source } from "./Source"
|
<|file_name|>xboxdrv_g_controller.cpp<|end_file_name|><|fim▁begin|>/*
** Xbox360 USB Gamepad Userspace Driver
** Copyright (C) 2011 Ingo Ruhnke <[email protected]>
**
** This program is free software: you can redistribute it and/or modify
** it under the terms of the GNU General Public License as published by
** the Free Software Foundation, either version 3 of the License, or
** (at your option) any later version.
**
** This program is distributed in the hope that it will be useful,
** but WITHOUT ANY WARRANTY; without even the implied warranty of
** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
** GNU General Public License for more details.
**
** You should have received a copy of the GNU General Public License
** along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "xboxdrv_g_controller.hpp"
#include "controller.hpp"
#include "controller_slot.hpp"
#include "controller_thread.hpp"
#include "uinput_message_processor.hpp"
#include "log.hpp"
#define XBOXDRV_CONTROLLER_ERROR xboxdrv_controller_error_quark()
#define XBOXDRV_CONTROLLER_ERROR_FAILED 0
GQuark
xboxdrv_controller_error_quark()
{
return g_quark_from_static_string("xboxdrv-controller-error-quark");
}
/* will create xboxdrv_g_controller_get_type and set xboxdrv_g_controller_parent_class */
G_DEFINE_TYPE(XboxdrvGController, xboxdrv_g_controller, G_TYPE_OBJECT)
static GObject*
xboxdrv_g_controller_constructor(GType gtype,
guint n_properties,
GObjectConstructParam* properties)
{
// Always chain up to the parent constructor
GObjectClass* parent_class = G_OBJECT_CLASS(xboxdrv_g_controller_parent_class);
return parent_class->constructor(gtype, n_properties, properties);
}
static void
xboxdrv_g_controller_class_init(XboxdrvGControllerClass* klass)
{
GObjectClass* gobject_class = G_OBJECT_CLASS(klass);
gobject_class->constructor = xboxdrv_g_controller_constructor;
}
static void
xboxdrv_g_controller_init(XboxdrvGController* self)
{
self->controller = NULL;
}
XboxdrvGController*
xboxdrv_g_controller_new(ControllerSlot* controller)
{
XboxdrvGController* self = static_cast<XboxdrvGController*>(g_object_new(XBOXDRV_TYPE_G_CONTROLLER, NULL));
self->controller = controller;
return self;
}
gboolean
xboxdrv_g_controller_set_led(XboxdrvGController* self, int status, GError** error)
{
log_info("D-Bus: xboxdrv_g_controller_set_led(" << self << ", " << status << ")");
if (self->controller &&
self->controller->get_controller())
{
self->controller->get_controller()->set_led(status);
return TRUE;
}
else
{
g_set_error(error, XBOXDRV_CONTROLLER_ERROR, XBOXDRV_CONTROLLER_ERROR_FAILED,
"could't access controller");
return FALSE;
}
}
gboolean
xboxdrv_g_controller_set_rumble(XboxdrvGController* self, int strong, int weak, GError** error)
{
log_info("D-Bus: xboxdrv_g_controller_set_rumble(" << self << ", " << strong << ", " << weak << ")");
if (self->controller &&
self->controller->get_controller())
{
self->controller->get_controller()->set_rumble(strong, weak);
return TRUE;
}
else
{
g_set_error(error, XBOXDRV_CONTROLLER_ERROR, XBOXDRV_CONTROLLER_ERROR_FAILED,
"could't access controller");
return FALSE;
}
}
gboolean
xboxdrv_g_controller_set_config(XboxdrvGController* self, int config_num, GError** error)
{
log_info("D-Bus: xboxdrv_g_controller_set_config(" << self << ", " << config_num << ")");
if (self->controller &&
self->controller->get_thread() &&
self->controller->get_thread()->get_controller())
{
MessageProcessor* gen_msg_proc = self->controller->get_thread()->get_message_proc();
UInputMessageProcessor* msg_proc = dynamic_cast<UInputMessageProcessor*>(gen_msg_proc);
try
{
msg_proc->set_config(config_num);
return TRUE;
}
catch(const std::exception& err)
{
g_set_error(error, XBOXDRV_CONTROLLER_ERROR, XBOXDRV_CONTROLLER_ERROR_FAILED,
"%s", err.what());
return FALSE;
}
}
else
{
g_set_error(error, XBOXDRV_CONTROLLER_ERROR, XBOXDRV_CONTROLLER_ERROR_FAILED,
"could't access controller");
return FALSE;
}<|fim▁hole|><|fim▁end|>
|
}
/* EOF */
|
<|file_name|>rbbirb.cpp<|end_file_name|><|fim▁begin|>// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html
//
// file: rbbirb.cpp
//
// Copyright (C) 2002-2011, International Business Machines Corporation and others.
// All Rights Reserved.
//
// This file contains the RBBIRuleBuilder class implementation. This is the main class for
// building (compiling) break rules into the tables required by the runtime
// RBBI engine.
//
#include "unicode/utypes.h"
#if !UCONFIG_NO_BREAK_ITERATION
#include "unicode/brkiter.h"
#include "unicode/rbbi.h"
#include "unicode/ubrk.h"
#include "unicode/unistr.h"
#include "unicode/uniset.h"
#include "unicode/uchar.h"
#include "unicode/uchriter.h"
#include "unicode/parsepos.h"
#include "unicode/parseerr.h"
#include "cmemory.h"
#include "cstring.h"
#include "rbbirb.h"
#include "rbbinode.h"
#include "rbbiscan.h"
#include "rbbisetb.h"
#include "rbbitblb.h"
#include "rbbidata.h"
#include "uassert.h"
U_NAMESPACE_BEGIN
//----------------------------------------------------------------------------------------
//
// Constructor.
//
//----------------------------------------------------------------------------------------
RBBIRuleBuilder::RBBIRuleBuilder(const UnicodeString &rules,
UParseError *parseErr,
UErrorCode &status)
: fRules(rules), fStrippedRules(rules)
{
fStatus = &status; // status is checked below
fParseError = parseErr;
fDebugEnv = NULL;
#ifdef RBBI_DEBUG
fDebugEnv = getenv("U_RBBIDEBUG");
#endif
fForwardTree = NULL;
fReverseTree = NULL;
fSafeFwdTree = NULL;
fSafeRevTree = NULL;
fDefaultTree = &fForwardTree;
fForwardTable = NULL;
fRuleStatusVals = NULL;
fChainRules = FALSE;
fLBCMNoChain = FALSE;
fLookAheadHardBreak = FALSE;
fUSetNodes = NULL;
fRuleStatusVals = NULL;
fScanner = NULL;
fSetBuilder = NULL;
if (parseErr) {
uprv_memset(parseErr, 0, sizeof(UParseError));
}
if (U_FAILURE(status)) {
return;
}
fUSetNodes = new UVector(status); // bcos status gets overwritten here
fRuleStatusVals = new UVector(status);
fScanner = new RBBIRuleScanner(this);
fSetBuilder = new RBBISetBuilder(this);
if (U_FAILURE(status)) {
return;
}
if(fSetBuilder == 0 || fScanner == 0 || fUSetNodes == 0 || fRuleStatusVals == 0) {
status = U_MEMORY_ALLOCATION_ERROR;
}
}
//----------------------------------------------------------------------------------------
//
// Destructor
//
//----------------------------------------------------------------------------------------
RBBIRuleBuilder::~RBBIRuleBuilder() {
int i;
for (i=0; ; i++) {
RBBINode *n = (RBBINode *)fUSetNodes->elementAt(i);
if (n==NULL) {
break;
}
delete n;
}
delete fUSetNodes;
delete fSetBuilder;
delete fForwardTable;
delete fForwardTree;
delete fReverseTree;
delete fSafeFwdTree;
delete fSafeRevTree;
delete fScanner;
delete fRuleStatusVals;
}
//----------------------------------------------------------------------------------------
//
// flattenData() - Collect up the compiled RBBI rule data and put it into
// the format for saving in ICU data files,
// which is also the format needed by the RBBI runtime engine.
//
//----------------------------------------------------------------------------------------
static int32_t align8(int32_t i) {return (i+7) & 0xfffffff8;}
RBBIDataHeader *RBBIRuleBuilder::flattenData() {
int32_t i;
if (U_FAILURE(*fStatus)) {
return NULL;<|fim▁hole|> fStrippedRules = fScanner->stripRules(fStrippedRules);
// Calculate the size of each section in the data.
// Sizes here are padded up to a multiple of 8 for better memory alignment.
// Sections sizes actually stored in the header are for the actual data
// without the padding.
//
int32_t headerSize = align8(sizeof(RBBIDataHeader));
int32_t forwardTableSize = align8(fForwardTable->getTableSize());
int32_t reverseTableSize = align8(fForwardTable->getSafeTableSize());
int32_t trieSize = align8(fSetBuilder->getTrieSize());
int32_t statusTableSize = align8(fRuleStatusVals->size() * sizeof(int32_t));
int32_t rulesSize = align8((fStrippedRules.length()+1) * sizeof(UChar));
int32_t totalSize = headerSize
+ forwardTableSize
+ reverseTableSize
+ statusTableSize + trieSize + rulesSize;
RBBIDataHeader *data = (RBBIDataHeader *)uprv_malloc(totalSize);
if (data == NULL) {
*fStatus = U_MEMORY_ALLOCATION_ERROR;
return NULL;
}
uprv_memset(data, 0, totalSize);
data->fMagic = 0xb1a0;
data->fFormatVersion[0] = RBBI_DATA_FORMAT_VERSION[0];
data->fFormatVersion[1] = RBBI_DATA_FORMAT_VERSION[1];
data->fFormatVersion[2] = RBBI_DATA_FORMAT_VERSION[2];
data->fFormatVersion[3] = RBBI_DATA_FORMAT_VERSION[3];
data->fLength = totalSize;
data->fCatCount = fSetBuilder->getNumCharCategories();
data->fFTable = headerSize;
data->fFTableLen = forwardTableSize;
data->fRTable = data->fFTable + data->fFTableLen;
data->fRTableLen = reverseTableSize;
data->fTrie = data->fRTable + data->fRTableLen;
data->fTrieLen = fSetBuilder->getTrieSize();
data->fStatusTable = data->fTrie + trieSize;
data->fStatusTableLen= statusTableSize;
data->fRuleSource = data->fStatusTable + statusTableSize;
data->fRuleSourceLen = fStrippedRules.length() * sizeof(UChar);
uprv_memset(data->fReserved, 0, sizeof(data->fReserved));
fForwardTable->exportTable((uint8_t *)data + data->fFTable);
fForwardTable->exportSafeTable((uint8_t *)data + data->fRTable);
fSetBuilder->serializeTrie ((uint8_t *)data + data->fTrie);
int32_t *ruleStatusTable = (int32_t *)((uint8_t *)data + data->fStatusTable);
for (i=0; i<fRuleStatusVals->size(); i++) {
ruleStatusTable[i] = fRuleStatusVals->elementAti(i);
}
fStrippedRules.extract((UChar *)((uint8_t *)data+data->fRuleSource), rulesSize/2+1, *fStatus);
return data;
}
//----------------------------------------------------------------------------------------
//
// createRuleBasedBreakIterator construct from source rules that are passed in
// in a UnicodeString
//
//----------------------------------------------------------------------------------------
BreakIterator *
RBBIRuleBuilder::createRuleBasedBreakIterator( const UnicodeString &rules,
UParseError *parseError,
UErrorCode &status)
{
//
// Read the input rules, generate a parse tree, symbol table,
// and list of all Unicode Sets referenced by the rules.
//
RBBIRuleBuilder builder(rules, parseError, status);
if (U_FAILURE(status)) { // status checked here bcos build below doesn't
return NULL;
}
RBBIDataHeader *data = builder.build(status);
if (U_FAILURE(status)) {
return nullptr;
}
//
// Create a break iterator from the compiled rules.
// (Identical to creation from stored pre-compiled rules)
//
// status is checked after init in construction.
RuleBasedBreakIterator *This = new RuleBasedBreakIterator(data, status);
if (U_FAILURE(status)) {
delete This;
This = NULL;
}
else if(This == NULL) { // test for NULL
status = U_MEMORY_ALLOCATION_ERROR;
}
return This;
}
RBBIDataHeader *RBBIRuleBuilder::build(UErrorCode &status) {
if (U_FAILURE(status)) {
return nullptr;
}
fScanner->parse();
if (U_FAILURE(status)) {
return nullptr;
}
//
// UnicodeSet processing.
// Munge the Unicode Sets to create a set of character categories.
// Generate the mapping tables (TRIE) from input code points to
// the character categories.
//
fSetBuilder->buildRanges();
//
// Generate the DFA state transition table.
//
fForwardTable = new RBBITableBuilder(this, &fForwardTree, status);
if (fForwardTable == nullptr) {
status = U_MEMORY_ALLOCATION_ERROR;
return nullptr;
}
fForwardTable->buildForwardTable();
optimizeTables();
fForwardTable->buildSafeReverseTable(status);
#ifdef RBBI_DEBUG
if (fDebugEnv && uprv_strstr(fDebugEnv, "states")) {
fForwardTable->printStates();
fForwardTable->printRuleStatusTable();
fForwardTable->printReverseTable();
}
#endif
fSetBuilder->buildTrie();
//
// Package up the compiled data into a memory image
// in the run-time format.
//
RBBIDataHeader *data = flattenData(); // returns NULL if error
if (U_FAILURE(status)) {
return nullptr;
}
return data;
}
void RBBIRuleBuilder::optimizeTables() {
// Begin looking for duplicates with char class 3.
// Classes 0, 1 and 2 are special; they are unused, {bof} and {eof} respectively,
// and should not have other categories merged into them.
IntPair duplPair = {3, 0};
while (fForwardTable->findDuplCharClassFrom(&duplPair)) {
fSetBuilder->mergeCategories(duplPair);
fForwardTable->removeColumn(duplPair.second);
}
fForwardTable->removeDuplicateStates();
}
U_NAMESPACE_END
#endif /* #if !UCONFIG_NO_BREAK_ITERATION */<|fim▁end|>
|
}
// Remove whitespace from the rules to make it smaller.
// The rule parser has already removed comments.
|
<|file_name|>VirtualScreen.py<|end_file_name|><|fim▁begin|>import math
class VirtualScreen: #cet ecran est normal a l'axe Z du Leap
def __init__(self,Xoffset=0,Yoffset=50,Zoffset=-50,Zlimit=220,length=350,height=300): #en mm<|fim▁hole|> self.Zoffset = Zoffset; # position du milieu du bord bas de l'ecran par rapport au centre du Leap
self.Zlimit = Zlimit # profondeur de la zone
self.length = length;
self.height = height;
self.UpperLeftCorner = [Xoffset-length/float(2),Yoffset+height]
self.Center = [self.Xoffset,self.Yoffset+0.5*self.height,Zoffset+0.5*Zlimit]
self.zoneUpperLeftCornerArray = [];
self.zoneHeight = height / float(2);
self.zoneLength = length / float(3);
for i in range(0,2):
for j in range(0,3):
self.zoneUpperLeftCornerArray.append([self.UpperLeftCorner[0]+self.zoneLength*j,self.UpperLeftCorner[1]-self.zoneHeight*i])
# print self.zoneUpperLeftCornerArray
def distanceFromScreen(self,position):
dX = max( max( position[0] - (self.Xoffset+self.length/float(2)), 0 ) , max (self.Xoffset-self.length/float(2) - position[0], 0 ) )
dY = max( max( position[1] - (self.Yoffset+self.height) , 0 ) , max (self.Yoffset - position[1], 0 ) )
dZ = max( max(self.Zoffset - position[2], 0 ) , max(position[2] - (self.Zlimit + self.Zoffset) , 0 ) )
return math.sqrt(dX**2+dY**2+dZ**2)
def isFacingTheScreen(self,position): #donner un vecteur position 3d en mm suivant les axes du Leapmotion ([x,y,z])
isXvalid = (position[0] <= self.Xoffset+self.length/float(2)) and (position[0] >= self.Xoffset-self.length/float(2))
isYvalid = (position[1] <= self.Yoffset+self.height) and (position[1] >= self.Yoffset)
isZvalid = (position[2] >= self.Zoffset) and (position[2] <= self.Zlimit + self.Zoffset)
return isXvalid and isYvalid and isZvalid
def getScreenZonePointedAt(self,position,direction):
if not self.isFacingTheScreen(position):
return -1
else:
lambdaIntersection = (self.Zoffset-position[2])/direction[2] # (Zoffset-Zpoint)/Zdirecteur
xIntersection = position[0] + lambdaIntersection*direction[0] # Xpoint + lambda * Xdirecteur
yIntersection = position[1] + lambdaIntersection*direction[1] # Ypoint + lambda * Ydirecteur
intersection = [xIntersection,yIntersection]
return(self.getScreenZoneFromPointOnScreen(intersection))
def getScreenZoneFromPointOnScreen(self,onScreenPosition):
for index,i in enumerate(self.zoneUpperLeftCornerArray):
if(onScreenPosition[0]>=i[0] and onScreenPosition[0]<i[0]+self.zoneLength and onScreenPosition[1]<=i[1] and onScreenPosition[1]>=i[1]-self.zoneHeight):
return index+1
return -1<|fim▁end|>
|
self.Xoffset = Xoffset; # position du milieu du bord bas de l'ecran par rapport au centre du Leap
self.Yoffset = Yoffset; # position du milieu du bord bas de l'ecran par rapport au centre du Leap
|
<|file_name|>A.py<|end_file_name|><|fim▁begin|><|fim▁hole|>if T%2==0:
print T/2
else:
print ((T-1)/2)-T<|fim▁end|>
|
T = input()
|
<|file_name|>traffic-control-layer.cc<|end_file_name|><|fim▁begin|>/* -*- Mode:C++; c-file-style:"gnu"; indent-tabs-mode:nil; -*- */
/*
* Copyright (c) 2015 Natale Patriciello <[email protected]>
* 2016 Stefano Avallone <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as<|fim▁hole|> *
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "traffic-control-layer.h"
#include "ns3/log.h"
#include "ns3/object-vector.h"
#include "ns3/packet.h"
#include "ns3/queue-disc.h"
namespace ns3 {
NS_LOG_COMPONENT_DEFINE ("TrafficControlLayer");
NS_OBJECT_ENSURE_REGISTERED (TrafficControlLayer);
TypeId
TrafficControlLayer::GetTypeId (void)
{
static TypeId tid = TypeId ("ns3::TrafficControlLayer")
.SetParent<Object> ()
.SetGroupName ("TrafficControl")
.AddConstructor<TrafficControlLayer> ()
.AddAttribute ("RootQueueDiscList", "The list of root queue discs associated to this Traffic Control layer.",
ObjectVectorValue (),
MakeObjectVectorAccessor (&TrafficControlLayer::m_rootQueueDiscs),
MakeObjectVectorChecker<QueueDisc> ())
;
return tid;
}
TypeId
TrafficControlLayer::GetInstanceTypeId (void) const
{
return GetTypeId ();
}
TrafficControlLayer::TrafficControlLayer ()
: Object ()
{
NS_LOG_FUNCTION_NOARGS ();
}
void
TrafficControlLayer::DoDispose (void)
{
NS_LOG_FUNCTION (this);
m_node = 0;
m_rootQueueDiscs.clear ();
m_handlers.clear ();
m_netDeviceQueueToQueueDiscMap.clear ();
Object::DoDispose ();
}
void
TrafficControlLayer::DoInitialize (void)
{
NS_LOG_FUNCTION (this);
for (uint32_t j = 0; j < m_rootQueueDiscs.size (); j++)
{
if (m_rootQueueDiscs[j])
{
Ptr<NetDevice> device = m_node->GetDevice (j);
// NetDevices supporting flow control can set the number of device transmission
// queues through the NetDevice queue interface during initialization. Thus,
// ensure that the device has completed initialization
device->Initialize ();
std::map<Ptr<NetDevice>, NetDeviceInfo>::iterator qdMap = m_netDeviceQueueToQueueDiscMap.find (device);
NS_ASSERT (qdMap != m_netDeviceQueueToQueueDiscMap.end ());
Ptr<NetDeviceQueueInterface> devQueueIface = qdMap->second.first;
NS_ASSERT (devQueueIface);
devQueueIface->SetQueueDiscInstalled (true);
// set the wake callbacks on netdevice queues
if (m_rootQueueDiscs[j]->GetWakeMode () == QueueDisc::WAKE_ROOT)
{
for (uint32_t i = 0; i < devQueueIface->GetTxQueuesN (); i++)
{
devQueueIface->GetTxQueue (i)->SetWakeCallback (MakeCallback (&QueueDisc::Run, m_rootQueueDiscs[j]));
qdMap->second.second.push_back (m_rootQueueDiscs[j]);
}
}
else if (m_rootQueueDiscs[j]->GetWakeMode () == QueueDisc::WAKE_CHILD)
{
NS_ASSERT_MSG (m_rootQueueDiscs[j]->GetNQueueDiscClasses () == devQueueIface->GetTxQueuesN (),
"The number of child queue discs does not match the number of netdevice queues");
for (uint32_t i = 0; i < devQueueIface->GetTxQueuesN (); i++)
{
devQueueIface->GetTxQueue (i)->SetWakeCallback (MakeCallback (&QueueDisc::Run,
m_rootQueueDiscs[j]->GetQueueDiscClass (i)->GetQueueDisc ()));
qdMap->second.second.push_back (m_rootQueueDiscs[j]->GetQueueDiscClass (i)->GetQueueDisc ());
}
}
// initialize the queue disc
m_rootQueueDiscs[j]->Initialize ();
}
}
Object::DoInitialize ();
}
void
TrafficControlLayer::SetupDevice (Ptr<NetDevice> device)
{
NS_LOG_FUNCTION (this << device);
// ensure this setup is done just once (in case of dual stack nodes)
if (device->GetObject<NetDeviceQueueInterface> ())
{
NS_LOG_DEBUG ("The setup for this device has been already done.");
return;
}
// create a NetDeviceQueueInterface object and aggregate it to the device
Ptr<NetDeviceQueueInterface> devQueueIface = CreateObject<NetDeviceQueueInterface> ();
device->AggregateObject (devQueueIface);
// store a pointer to the created queue interface
NS_ASSERT_MSG (m_netDeviceQueueToQueueDiscMap.find (device) == m_netDeviceQueueToQueueDiscMap.end (),
"This is a bug: SetupDevice should be called only once per device");
m_netDeviceQueueToQueueDiscMap[device] = NetDeviceInfo (devQueueIface, QueueDiscVector ());
}
void
TrafficControlLayer::RegisterProtocolHandler (Node::ProtocolHandler handler,
uint16_t protocolType, Ptr<NetDevice> device)
{
NS_LOG_FUNCTION (this << protocolType << device);
struct ProtocolHandlerEntry entry;
entry.handler = handler;
entry.protocol = protocolType;
entry.device = device;
entry.promiscuous = false;
m_handlers.push_back (entry);
NS_LOG_DEBUG ("Handler for NetDevice: " << device << " registered for protocol " <<
protocolType << ".");
}
void
TrafficControlLayer::SetRootQueueDiscOnDevice (Ptr<NetDevice> device, Ptr<QueueDisc> qDisc)
{
NS_LOG_FUNCTION (this << device);
uint32_t index = GetDeviceIndex (device);
NS_ASSERT_MSG (index < m_node->GetNDevices (), "The provided device does not belong to"
<< " the node which this TrafficControlLayer object is aggregated to" );
if (index >= m_rootQueueDiscs.size ())
{
m_rootQueueDiscs.resize (index+1);
}
NS_ASSERT_MSG (m_rootQueueDiscs[index] == 0, "Cannot install a root queue disc on a "
<< "device already having one. Delete the existing queue disc first.");
m_rootQueueDiscs[index] = qDisc;
}
Ptr<QueueDisc>
TrafficControlLayer::GetRootQueueDiscOnDevice (Ptr<NetDevice> device)
{
NS_LOG_FUNCTION (this << device);
uint32_t index = GetDeviceIndex (device);
NS_ASSERT_MSG (index < m_node->GetNDevices (), "The provided device does not belong to"
<< " the node which this TrafficControlLayer object is aggregated to" );
if (index >= m_rootQueueDiscs.size ())
{
m_rootQueueDiscs.resize (index+1);
}
return m_rootQueueDiscs[index];
}
void
TrafficControlLayer::DeleteRootQueueDiscOnDevice (Ptr<NetDevice> device)
{
NS_LOG_FUNCTION (this << device);
uint32_t index = GetDeviceIndex (device);
NS_ASSERT_MSG (index < m_node->GetNDevices (), "The provided device does not belong to"
<< " the node which this TrafficControlLayer object is aggregated to" );
NS_ASSERT_MSG (index < m_rootQueueDiscs.size () && m_rootQueueDiscs[index] != 0, "No root queue disc"
<< " installed on device " << device);
// remove the root queue disc
m_rootQueueDiscs[index] = 0;
}
void
TrafficControlLayer::SetNode (Ptr<Node> node)
{
NS_LOG_FUNCTION (this << node);
m_node = node;
}
void
TrafficControlLayer::NotifyNewAggregate ()
{
NS_LOG_FUNCTION (this);
if (m_node == 0)
{
Ptr<Node> node = this->GetObject<Node> ();
//verify that it's a valid node and that
//the node was not set before
if (node != 0)
{
this->SetNode (node);
}
}
Object::NotifyNewAggregate ();
}
uint32_t
TrafficControlLayer::GetDeviceIndex (Ptr<NetDevice> device)
{
NS_LOG_FUNCTION (this << device);
uint32_t i;
for (i = 0; i < m_node->GetNDevices () && device != m_node->GetDevice (i); i++);
return i;
}
void
TrafficControlLayer::Receive (Ptr<NetDevice> device, Ptr<const Packet> p,
uint16_t protocol, const Address &from, const Address &to,
NetDevice::PacketType packetType)
{
NS_LOG_FUNCTION (this << device << p << protocol << from << to << packetType);
bool found = false;
for (ProtocolHandlerList::iterator i = m_handlers.begin ();
i != m_handlers.end (); i++)
{
if (i->device == 0
|| (i->device != 0 && i->device == device))
{
if (i->protocol == 0
|| i->protocol == protocol)
{
NS_LOG_DEBUG ("Found handler for packet " << p << ", protocol " <<
protocol << " and NetDevice " << device <<
". Send packet up");
i->handler (device, p, protocol, from, to, packetType);
found = true;
}
}
}
if (! found)
{
NS_FATAL_ERROR ("Handler for protocol " << p << " and device " << device <<
" not found. It isn't forwarded up; it dies here.");
}
}
void
TrafficControlLayer::Send (Ptr<NetDevice> device, Ptr<QueueDiscItem> item)
{
NS_LOG_FUNCTION (this << device << item);
NS_LOG_DEBUG ("Send packet to device " << device << " protocol number " <<
item->GetProtocol ());
std::map<Ptr<NetDevice>, NetDeviceInfo>::iterator qdMap = m_netDeviceQueueToQueueDiscMap.find (device);
NS_ASSERT (qdMap != m_netDeviceQueueToQueueDiscMap.end ());
Ptr<NetDeviceQueueInterface> devQueueIface = qdMap->second.first;
NS_ASSERT (devQueueIface);
// determine the transmission queue of the device where the packet will be enqueued
uint8_t txq = devQueueIface->GetSelectedQueue (item);
NS_ASSERT (txq < devQueueIface->GetTxQueuesN ());
if (qdMap->second.second.empty ())
{
// The device has no attached queue disc, thus add the header to the packet and
// send it directly to the device if the selected queue is not stopped
if (!devQueueIface->GetTxQueue (txq)->IsStopped ())
{
item->AddHeader ();
device->Send (item->GetPacket (), item->GetAddress (), item->GetProtocol ());
}
}
else
{
// Enqueue the packet in the queue disc associated with the netdevice queue
// selected for the packet and try to dequeue packets from such queue disc
item->SetTxQueueIndex (txq);
Ptr<QueueDisc> qDisc = qdMap->second.second[txq];
NS_ASSERT (qDisc);
qDisc->Enqueue (item);
qDisc->Run ();
}
}
} // namespace ns3<|fim▁end|>
|
* published by the Free Software Foundation;
|
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
interactiveshell = pytest.importorskip("IPython.terminal.interactiveshell")
tools = pytest.importorskip("IPython.testing.tools")
@pytest.fixture(scope="session")
def ipython():
config = tools.default_config()
config.TerminalInteractiveShell.simple_prompt = True
shell = interactiveshell.TerminalInteractiveShell.instance(config=config)
return shell
@pytest.fixture(autouse=True)
def ipython_interactive(ipython):
"""Activate IPython's builtin hooks
for the duration of the test scope.
"""
with ipython.builtin_trap:
yield ipython<|fim▁end|>
|
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
|
<|file_name|>Rc.py<|end_file_name|><|fim▁begin|>from Components.Pixmap import MovingPixmap, MultiPixmap
from Tools.Directories import resolveFilename, SCOPE_SKIN
from xml.etree.ElementTree import ElementTree
from Components.config import config, ConfigInteger
from Components.RcModel import rc_model
from boxbranding import getBoxType
config.misc.rcused = ConfigInteger(default=1)
class Rc:
def __init__(self):
self['rc'] = MultiPixmap()
self['arrowdown'] = MovingPixmap()
self['arrowdown2'] = MovingPixmap()
self['arrowup'] = MovingPixmap()
self['arrowup2'] = MovingPixmap()
config.misc.rcused = ConfigInteger(default=1)
self.isDefaultRc = rc_model.rcIsDefault()
self.rcheight = 500
self.rcheighthalf = 250
self.selectpics = []
self.selectpics.append((self.rcheighthalf, ['arrowdown', 'arrowdown2'], (-18, -70)))
self.selectpics.append((self.rcheight, ['arrowup', 'arrowup2'], (-18, 0)))
self.readPositions()
self.clearSelectedKeys()
self.onShown.append(self.initRc)
def initRc(self):
if getBoxType() in ('uniboxhd1', 'uniboxhd2', 'uniboxhd3', 'sezam5000hd', 'mbtwin', 'beyonwizt3'):
self['rc'].setPixmapNum(config.misc.rcused.value)
elif self.isDefaultRc:
self['rc'].setPixmapNum(config.misc.rcused.value)
else:
self['rc'].setPixmapNum(0)
def readPositions(self):
if self.isDefaultRc:
target = resolveFilename(SCOPE_SKIN, 'rcpositions.xml')
else:
target = rc_model.getRcLocation() + 'rcpositions.xml'
tree = ElementTree(file=target)
rcs = tree.getroot()
self.rcs = {}
for rc in rcs:
id = int(rc.attrib['id'])
self.rcs[id] = {}
for key in rc:
name = key.attrib['name']
pos = key.attrib['pos'].split(',')
self.rcs[id][name] = (int(pos[0]), int(pos[1]))
def getSelectPic(self, pos):
for selectPic in self.selectpics:
if pos[1] <= selectPic[0]:
return (selectPic[1], selectPic[2])
return None
def hideRc(self):
self['rc'].hide()
self.hideSelectPics()
def showRc(self):
self['rc'].show()
def selectKey(self, key):
if self.isDefaultRc:
rc = self.rcs[config.misc.rcused.value]
else:
try:
rc = self.rcs[2]
except:
rc = self.rcs[config.misc.rcused.value]
if rc.has_key(key):
rcpos = self['rc'].getPosition()
pos = rc[key]
selectPics = self.getSelectPic(pos)
selectPic = None
for x in selectPics[0]:
if x not in self.selectedKeys:
selectPic = x
break
if selectPic is not None:
print 'selectPic:', selectPic
self[selectPic].moveTo(rcpos[0] + pos[0] + selectPics[1][0], rcpos[1] + pos[1] + selectPics[1][1], 1)
self[selectPic].startMoving()
self[selectPic].show()
self.selectedKeys.append(selectPic)
return
def clearSelectedKeys(self):
self.showRc()
self.selectedKeys = []
self.hideSelectPics()
<|fim▁hole|> for selectPic in self.selectpics:
for pic in selectPic[1]:
self[pic].hide()<|fim▁end|>
|
def hideSelectPics(self):
|
<|file_name|>trivial_client.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import time
import argparse
import grpc
from jaeger_client import Config
from grpc_opentracing import open_tracing_client_interceptor
from grpc_opentracing.grpcext import intercept_channel
import command_line_pb2
def run():
parser = argparse.ArgumentParser()
parser.add_argument(
'--log_payloads',
action='store_true',
help='log request/response objects to open-tracing spans')
args = parser.parse_args()
config = Config(
config={
'sampler': {
'type': 'const',
'param': 1,
},
'logging': True,
},
service_name='trivial-client')
tracer = config.initialize_tracer()
tracer_interceptor = open_tracing_client_interceptor(
tracer, log_payloads=args.log_payloads)
channel = grpc.insecure_channel('localhost:50051')
channel = intercept_channel(channel, tracer_interceptor)
stub = command_line_pb2.CommandLineStub(channel)
response = stub.Echo(command_line_pb2.CommandRequest(text='Hello, hello'))
print(response.text)
time.sleep(2)
tracer.close()
time.sleep(2)
if __name__ == '__main__':<|fim▁hole|><|fim▁end|>
|
run()
|
<|file_name|>masternodemanager.cpp<|end_file_name|><|fim▁begin|>#include "masternodemanager.h"
#include "ui_masternodemanager.h"
#include "addeditadrenalinenode.h"
#include "adrenalinenodeconfigdialog.h"
#include "sync.h"
#include "clientmodel.h"
#include "walletmodel.h"
#include "activemasternode.h"
#include "masternodeconfig.h"
#include "masternodeman.h"
#include "masternode.h"
#include "walletdb.h"
#include "wallet.h"
#include "init.h"
#include "rpcserver.h"
#include <boost/lexical_cast.hpp>
#include <fstream>
using namespace json_spirit;
using namespace std;
#include <QAbstractItemDelegate>
#include <QPainter>
#include <QTimer>
#include <QDebug>
#include <QScrollArea>
#include <QScroller>
#include <QDateTime>
#include <QApplication>
#include <QClipboard>
#include <QMessageBox>
#include <QThread>
#include <QtConcurrent/QtConcurrent>
#include <QScrollBar>
MasternodeManager::MasternodeManager(QWidget *parent) :
QWidget(parent),
ui(new Ui::MasternodeManager),
clientModel(0),
walletModel(0)
{
ui->setupUi(this);
ui->editButton->setEnabled(false);
ui->startButton->setEnabled(false);
ui->tableWidget->horizontalHeader()->setSectionResizeMode(QHeaderView::ResizeToContents);
ui->tableWidget_2->horizontalHeader()->setSectionResizeMode(QHeaderView::ResizeToContents);
ui->tableWidget_3->horizontalHeader()->setSectionResizeMode(QHeaderView::ResizeToContents);
timer = new QTimer(this);
connect(timer, SIGNAL(timeout()), this, SLOT(updateNodeList()));
if(!GetBoolArg("-reindexaddr", false))
timer->start(1000);
fFilterUpdated = true;
nTimeFilterUpdated = GetTime();
updateNodeList();
}
MasternodeManager::~MasternodeManager()
{
delete ui;
}
void MasternodeManager::on_tableWidget_2_itemSelectionChanged()
{
if(ui->tableWidget_2->selectedItems().count() > 0)
{
ui->editButton->setEnabled(true);
ui->startButton->setEnabled(true);
}
}
void MasternodeManager::updateAdrenalineNode(QString alias, QString addr, QString privkey, QString txHash, QString txIndex, QString donationAddress, QString donationPercentage, QString status)
{
LOCK(cs_adrenaline);
bool bFound = false;
int nodeRow = 0;
for(int i=0; i < ui->tableWidget_2->rowCount(); i++)
{
if(ui->tableWidget_2->item(i, 0)->text() == alias)
{
bFound = true;
nodeRow = i;
break;
}
}
if(nodeRow == 0 && !bFound)
ui->tableWidget_2->insertRow(0);
QTableWidgetItem *aliasItem = new QTableWidgetItem(alias);
QTableWidgetItem *addrItem = new QTableWidgetItem(addr);
QTableWidgetItem *donationAddressItem = new QTableWidgetItem(donationAddress);
QTableWidgetItem *donationPercentageItem = new QTableWidgetItem(donationPercentage);
QTableWidgetItem *statusItem = new QTableWidgetItem(status);
ui->tableWidget_2->setItem(nodeRow, 0, aliasItem);
ui->tableWidget_2->setItem(nodeRow, 1, addrItem);
ui->tableWidget_2->setItem(nodeRow, 2, donationPercentageItem);
ui->tableWidget_2->setItem(nodeRow, 3, donationAddressItem);
ui->tableWidget_2->setItem(nodeRow, 4, statusItem);
}
static QString seconds_to_DHMS(quint32 duration)<|fim▁hole|> int seconds = (int) (duration % 60);
duration /= 60;
int minutes = (int) (duration % 60);
duration /= 60;
int hours = (int) (duration % 24);
int days = (int) (duration / 24);
if((hours == 0)&&(days == 0))
return res.sprintf("%02dm:%02ds", minutes, seconds);
if (days == 0)
return res.sprintf("%02dh:%02dm:%02ds", hours, minutes, seconds);
return res.sprintf("%dd %02dh:%02dm:%02ds", days, hours, minutes, seconds);
}
void MasternodeManager::updateListConc() {
if (ui->tableWidget->isVisible())
{
ui->tableWidget_3->clearContents();
ui->tableWidget_3->setRowCount(0);
std::vector<CMasternode> vMasternodes = mnodeman.GetFullMasternodeVector();
ui->tableWidget_3->horizontalHeader()->setSortIndicator(ui->tableWidget->horizontalHeader()->sortIndicatorSection() ,ui->tableWidget->horizontalHeader()->sortIndicatorOrder());
BOOST_FOREACH(CMasternode& mn, vMasternodes)
{
int mnRow = 0;
ui->tableWidget_3->insertRow(0);
// populate list
// Address, Rank, Active, Active Seconds, Last Seen, Pub Key
QTableWidgetItem *activeItem = new QTableWidgetItem(QString::number(mn.IsEnabled()));
QTableWidgetItem *addressItem = new QTableWidgetItem(QString::fromStdString(mn.addr.ToString()));
QString Rank = QString::number(mnodeman.GetMasternodeRank(mn.vin, pindexBest->nHeight));
QTableWidgetItem *rankItem = new QTableWidgetItem(Rank.rightJustified(2, '0', false));
QTableWidgetItem *activeSecondsItem = new QTableWidgetItem(seconds_to_DHMS((qint64)(mn.lastTimeSeen - mn.sigTime)));
QTableWidgetItem *lastSeenItem = new QTableWidgetItem(QString::fromStdString(DateTimeStrFormat(mn.lastTimeSeen)));
CScript pubkey;
pubkey =GetScriptForDestination(mn.pubkey.GetID());
CTxDestination address1;
ExtractDestination(pubkey, address1);
CTransfercoinAddress address2(address1);
QTableWidgetItem *pubkeyItem = new QTableWidgetItem(QString::fromStdString(address2.ToString()));
ui->tableWidget_3->setItem(mnRow, 0, addressItem);
ui->tableWidget_3->setItem(mnRow, 1, rankItem);
ui->tableWidget_3->setItem(mnRow, 2, activeItem);
ui->tableWidget_3->setItem(mnRow, 3, activeSecondsItem);
ui->tableWidget_3->setItem(mnRow, 4, lastSeenItem);
ui->tableWidget_3->setItem(mnRow, 5, pubkeyItem);
}
ui->countLabel->setText(QString::number(ui->tableWidget_3->rowCount()));
on_UpdateButton_clicked();
ui->tableWidget->setVisible(0);
ui->tableWidget_3->setVisible(1);
ui->tableWidget_3->verticalScrollBar()->setSliderPosition(ui->tableWidget->verticalScrollBar()->sliderPosition());
}
else
{
ui->tableWidget->clearContents();
ui->tableWidget->setRowCount(0);
std::vector<CMasternode> vMasternodes = mnodeman.GetFullMasternodeVector();
ui->tableWidget->horizontalHeader()->setSortIndicator(ui->tableWidget_3->horizontalHeader()->sortIndicatorSection() ,ui->tableWidget_3->horizontalHeader()->sortIndicatorOrder());
BOOST_FOREACH(CMasternode& mn, vMasternodes)
{
int mnRow = 0;
ui->tableWidget->insertRow(0);
// populate list
// Address, Rank, Active, Active Seconds, Last Seen, Pub Key
QTableWidgetItem *activeItem = new QTableWidgetItem(QString::number(mn.IsEnabled()));
QTableWidgetItem *addressItem = new QTableWidgetItem(QString::fromStdString(mn.addr.ToString()));
QString Rank = QString::number(mnodeman.GetMasternodeRank(mn.vin, pindexBest->nHeight));
QTableWidgetItem *rankItem = new QTableWidgetItem(Rank.rightJustified(2, '0', false));
QTableWidgetItem *activeSecondsItem = new QTableWidgetItem(seconds_to_DHMS((qint64)(mn.lastTimeSeen - mn.sigTime)));
QTableWidgetItem *lastSeenItem = new QTableWidgetItem(QString::fromStdString(DateTimeStrFormat(mn.lastTimeSeen)));
CScript pubkey;
pubkey =GetScriptForDestination(mn.pubkey.GetID());
CTxDestination address1;
ExtractDestination(pubkey, address1);
CTransfercoinAddress address2(address1);
QTableWidgetItem *pubkeyItem = new QTableWidgetItem(QString::fromStdString(address2.ToString()));
ui->tableWidget->setItem(mnRow, 0, addressItem);
ui->tableWidget->setItem(mnRow, 1, rankItem);
ui->tableWidget->setItem(mnRow, 2, activeItem);
ui->tableWidget->setItem(mnRow, 3, activeSecondsItem);
ui->tableWidget->setItem(mnRow, 4, lastSeenItem);
ui->tableWidget->setItem(mnRow, 5, pubkeyItem);
}
ui->countLabel->setText(QString::number(ui->tableWidget->rowCount()));
on_UpdateButton_clicked();
ui->tableWidget_3->setVisible(0);
ui->tableWidget->setVisible(1);
ui->tableWidget->verticalScrollBar()->setSliderPosition(ui->tableWidget_3->verticalScrollBar()->sliderPosition());
}
}
void MasternodeManager::updateNodeList()
{
TRY_LOCK(cs_masternodes, lockMasternodes);
if(!lockMasternodes)
return;
static int64_t nTimeListUpdated = GetTime();
// to prevent high cpu usage update only once in MASTERNODELIST_UPDATE_SECONDS seconds
// or MASTERNODELIST_FILTER_COOLDOWN_SECONDS seconds after filter was last changed
int64_t nSecondsToWait = fFilterUpdated ? nTimeFilterUpdated - GetTime() + MASTERNODELIST_FILTER_COOLDOWN_SECONDS : nTimeListUpdated - GetTime() + MASTERNODELIST_UPDATE_SECONDS;
if (fFilterUpdated) ui->countLabel->setText(QString::fromStdString(strprintf("Please wait... %d", nSecondsToWait)));
if (nSecondsToWait > 0) return;
nTimeListUpdated = GetTime();
nTimeListUpdated = GetTime();
fFilterUpdated = false;
if (f1.isFinished())
f1 = QtConcurrent::run(this,&MasternodeManager::updateListConc);
}
void MasternodeManager::setClientModel(ClientModel *model)
{
this->clientModel = model;
if(model)
{
}
}
void MasternodeManager::setWalletModel(WalletModel *model)
{
this->walletModel = model;
if(model && model->getOptionsModel())
{
}
}
void MasternodeManager::on_createButton_clicked()
{
AddEditAdrenalineNode* aenode = new AddEditAdrenalineNode();
aenode->exec();
}
void MasternodeManager::on_startButton_clicked()
{
// start the node
QItemSelectionModel* selectionModel = ui->tableWidget_2->selectionModel();
QModelIndexList selected = selectionModel->selectedRows();
if(selected.count() == 0)
return;
QModelIndex index = selected.at(0);
int r = index.row();
std::string sAlias = ui->tableWidget_2->item(r, 0)->text().toStdString();
if(pwalletMain->IsLocked()) {
}
std::string statusObj;
statusObj += "<center>Alias: " + sAlias;
BOOST_FOREACH(CMasternodeConfig::CMasternodeEntry mne, masternodeConfig.getEntries()) {
if(mne.getAlias() == sAlias) {
std::string errorMessage;
std::string strDonateAddress = mne.getDonationAddress();
std::string strDonationPercentage = mne.getDonationPercentage();
bool result = activeMasternode.Register(mne.getIp(), mne.getPrivKey(), mne.getTxHash(), mne.getOutputIndex(), strDonateAddress, strDonationPercentage, errorMessage);
if(result) {
statusObj += "<br>Successfully started masternode." ;
} else {
statusObj += "<br>Failed to start masternode.<br>Error: " + errorMessage;
}
break;
}
}
statusObj += "</center>";
pwalletMain->Lock();
QMessageBox msg;
msg.setText(QString::fromStdString(statusObj));
msg.exec();
}
void MasternodeManager::on_startAllButton_clicked()
{
if(pwalletMain->IsLocked()) {
}
std::vector<CMasternodeConfig::CMasternodeEntry> mnEntries;
int total = 0;
int successful = 0;
int fail = 0;
std::string statusObj;
BOOST_FOREACH(CMasternodeConfig::CMasternodeEntry mne, masternodeConfig.getEntries()) {
total++;
std::string errorMessage;
std::string strDonateAddress = mne.getDonationAddress();
std::string strDonationPercentage = mne.getDonationPercentage();
bool result = activeMasternode.Register(mne.getIp(), mne.getPrivKey(), mne.getTxHash(), mne.getOutputIndex(), strDonateAddress, strDonationPercentage, errorMessage);
if(result) {
successful++;
} else {
fail++;
statusObj += "\nFailed to start " + mne.getAlias() + ". Error: " + errorMessage;
}
}
pwalletMain->Lock();
std::string returnObj;
returnObj = "Successfully started " + boost::lexical_cast<std::string>(successful) + " masternodes, failed to start " +
boost::lexical_cast<std::string>(fail) + ", total " + boost::lexical_cast<std::string>(total);
if (fail > 0)
returnObj += statusObj;
QMessageBox msg;
msg.setText(QString::fromStdString(returnObj));
msg.exec();
}
void MasternodeManager::on_UpdateButton_clicked()
{
BOOST_FOREACH(CMasternodeConfig::CMasternodeEntry mne, masternodeConfig.getEntries()) {
std::string errorMessage;
std::string strDonateAddress = mne.getDonationAddress();
std::string strDonationPercentage = mne.getDonationPercentage();
std::vector<CMasternode> vMasternodes = mnodeman.GetFullMasternodeVector();
if (errorMessage == ""){
updateAdrenalineNode(QString::fromStdString(mne.getAlias()), QString::fromStdString(mne.getIp()), QString::fromStdString(mne.getPrivKey()), QString::fromStdString(mne.getTxHash()),
QString::fromStdString(mne.getOutputIndex()), QString::fromStdString(strDonateAddress), QString::fromStdString(strDonationPercentage), QString::fromStdString("Not in the masternode list."));
}
else {
updateAdrenalineNode(QString::fromStdString(mne.getAlias()), QString::fromStdString(mne.getIp()), QString::fromStdString(mne.getPrivKey()), QString::fromStdString(mne.getTxHash()),
QString::fromStdString(mne.getOutputIndex()), QString::fromStdString(strDonateAddress), QString::fromStdString(strDonationPercentage), QString::fromStdString(errorMessage));
}
BOOST_FOREACH(CMasternode& mn, vMasternodes) {
if (mn.addr.ToString().c_str() == mne.getIp()){
updateAdrenalineNode(QString::fromStdString(mne.getAlias()), QString::fromStdString(mne.getIp()), QString::fromStdString(mne.getPrivKey()), QString::fromStdString(mne.getTxHash()),
QString::fromStdString(mne.getOutputIndex()), QString::fromStdString(strDonateAddress), QString::fromStdString(strDonationPercentage), QString::fromStdString("Masternode is Running."));
}
}
}
}<|fim▁end|>
|
{
QString res;
|
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>""" Tests for utils. """
import collections
from datetime import datetime, timedelta
from pytz import UTC
from django.test import TestCase
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, SharedModuleStoreTestCase
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xmodule.modulestore.django import modulestore
from xmodule.partitions.partitions import UserPartition, Group
from openedx.core.djangoapps.site_configuration.tests.test_util import with_site_configuration_context
from contentstore import utils
from contentstore.tests.utils import CourseTestCase
class LMSLinksTestCase(TestCase):
""" Tests for LMS links. """
def lms_link_test(self):
""" Tests get_lms_link_for_item. """
course_key = SlashSeparatedCourseKey('mitX', '101', 'test')
location = course_key.make_usage_key('vertical', 'contacting_us')
link = utils.get_lms_link_for_item(location, False)
self.assertEquals(link, "//localhost:8000/courses/mitX/101/test/jump_to/i4x://mitX/101/vertical/contacting_us")
# test preview
link = utils.get_lms_link_for_item(location, True)
self.assertEquals(
link,
"//preview.localhost/courses/mitX/101/test/jump_to/i4x://mitX/101/vertical/contacting_us"
)
# now test with the course' location
location = course_key.make_usage_key('course', 'test')
link = utils.get_lms_link_for_item(location)
self.assertEquals(link, "//localhost:8000/courses/mitX/101/test/jump_to/i4x://mitX/101/course/test")
def lms_link_for_certificate_web_view_test(self):
""" Tests get_lms_link_for_certificate_web_view. """
course_key = SlashSeparatedCourseKey('mitX', '101', 'test')
dummy_user = ModuleStoreEnum.UserID.test
mode = 'professional'
self.assertEquals(
utils.get_lms_link_for_certificate_web_view(dummy_user, course_key, mode),
"//localhost:8000/certificates/user/{user_id}/course/{course_key}?preview={mode}".format(
user_id=dummy_user,
course_key=course_key,
mode=mode
)
)
with with_site_configuration_context(configuration={"course_org_filter": "mitX", "LMS_BASE": "dummyhost:8000"}):
self.assertEquals(
utils.get_lms_link_for_certificate_web_view(dummy_user, course_key, mode),
"//dummyhost:8000/certificates/user/{user_id}/course/{course_key}?preview={mode}".format(
user_id=dummy_user,
course_key=course_key,
mode=mode
)
)
class ExtraPanelTabTestCase(TestCase):
""" Tests adding and removing extra course tabs. """
def get_tab_type_dicts(self, tab_types):
""" Returns an array of tab dictionaries. """
if tab_types:
return [{'tab_type': tab_type} for tab_type in tab_types.split(',')]
else:
return []
def get_course_with_tabs(self, tabs=None):
""" Returns a mock course object with a tabs attribute. """
if tabs is None:
tabs = []
course = collections.namedtuple('MockCourse', ['tabs'])
if isinstance(tabs, basestring):
course.tabs = self.get_tab_type_dicts(tabs)
else:
course.tabs = tabs
return course
class XBlockVisibilityTestCase(SharedModuleStoreTestCase):
"""Tests for xblock visibility for students."""
@classmethod
def setUpClass(cls):
super(XBlockVisibilityTestCase, cls).setUpClass()
cls.dummy_user = ModuleStoreEnum.UserID.test
cls.past = datetime(1970, 1, 1, tzinfo=UTC)
cls.future = datetime.now(UTC) + timedelta(days=1)
cls.course = CourseFactory.create()
def test_private_unreleased_xblock(self):
"""Verifies that a private unreleased xblock is not visible"""
self._test_visible_to_students(False, 'private_unreleased', self.future)
def test_private_released_xblock(self):
"""Verifies that a private released xblock is not visible"""
self._test_visible_to_students(False, 'private_released', self.past)
def test_public_unreleased_xblock(self):
"""Verifies that a public (published) unreleased xblock is not visible"""
self._test_visible_to_students(False, 'public_unreleased', self.future, publish=True)
def test_public_released_xblock(self):
"""Verifies that public (published) released xblock is visible if staff lock is not enabled."""
self._test_visible_to_students(True, 'public_released', self.past, publish=True)
def test_private_no_start_xblock(self):
"""Verifies that a private xblock with no start date is not visible"""
self._test_visible_to_students(False, 'private_no_start', None)
def test_public_no_start_xblock(self):
"""Verifies that a public (published) xblock with no start date is visible unless staff lock is enabled"""
self._test_visible_to_students(True, 'public_no_start', None, publish=True)
def test_draft_released_xblock(self):
"""Verifies that a xblock with an unreleased draft and a released published version is visible"""
vertical = self._create_xblock_with_start_date('draft_released', self.past, publish=True)
# Create an unreleased draft version of the xblock
vertical.start = self.future
modulestore().update_item(vertical, self.dummy_user)
self.assertTrue(utils.is_currently_visible_to_students(vertical))
def _test_visible_to_students(self, expected_visible_without_lock, name, start_date, publish=False):
"""
Helper method that checks that is_xblock_visible_to_students returns the correct value both
with and without visible_to_staff_only set.
"""
no_staff_lock = self._create_xblock_with_start_date(name, start_date, publish, visible_to_staff_only=False)
self.assertEqual(expected_visible_without_lock, utils.is_currently_visible_to_students(no_staff_lock))
# any xblock with visible_to_staff_only set to True should not be visible to students.
staff_lock = self._create_xblock_with_start_date(
name + "_locked", start_date, publish, visible_to_staff_only=True
)
self.assertFalse(utils.is_currently_visible_to_students(staff_lock))
def _create_xblock_with_start_date(self, name, start_date, publish=False, visible_to_staff_only=False):
"""Helper to create an xblock with a start date, optionally publishing it"""
vertical = modulestore().create_item(
self.dummy_user, self.course.location.course_key, 'vertical', name,
fields={'start': start_date, 'visible_to_staff_only': visible_to_staff_only}
)
if publish:
modulestore().publish(vertical.location, self.dummy_user)
return vertical
class ReleaseDateSourceTest(CourseTestCase):
"""Tests for finding the source of an xblock's release date."""
def setUp(self):
super(ReleaseDateSourceTest, self).setUp()
self.chapter = ItemFactory.create(category='chapter', parent_location=self.course.location)
self.sequential = ItemFactory.create(category='sequential', parent_location=self.chapter.location)
self.vertical = ItemFactory.create(category='vertical', parent_location=self.sequential.location)
# Read again so that children lists are accurate
self.chapter = self.store.get_item(self.chapter.location)
self.sequential = self.store.get_item(self.sequential.location)
self.vertical = self.store.get_item(self.vertical.location)
self.date_one = datetime(1980, 1, 1, tzinfo=UTC)
self.date_two = datetime(2020, 1, 1, tzinfo=UTC)
def _update_release_dates(self, chapter_start, sequential_start, vertical_start):
"""Sets the release dates of the chapter, sequential, and vertical"""
self.chapter.start = chapter_start
self.chapter = self.store.update_item(self.chapter, ModuleStoreEnum.UserID.test)
self.sequential.start = sequential_start
self.sequential = self.store.update_item(self.sequential, ModuleStoreEnum.UserID.test)
self.vertical.start = vertical_start
self.vertical = self.store.update_item(self.vertical, ModuleStoreEnum.UserID.test)
def _verify_release_date_source(self, item, expected_source):
"""Helper to verify that the release date source of a given item matches the expected source"""
source = utils.find_release_date_source(item)
self.assertEqual(source.location, expected_source.location)
self.assertEqual(source.start, expected_source.start)
def test_chapter_source_for_vertical(self):
"""Tests a vertical's release date being set by its chapter"""
self._update_release_dates(self.date_one, self.date_one, self.date_one)
self._verify_release_date_source(self.vertical, self.chapter)
def test_sequential_source_for_vertical(self):
"""Tests a vertical's release date being set by its sequential"""
self._update_release_dates(self.date_one, self.date_two, self.date_two)
self._verify_release_date_source(self.vertical, self.sequential)
def test_chapter_source_for_sequential(self):
"""Tests a sequential's release date being set by its chapter"""
self._update_release_dates(self.date_one, self.date_one, self.date_one)<|fim▁hole|>
def test_sequential_source_for_sequential(self):
"""Tests a sequential's release date being set by itself"""
self._update_release_dates(self.date_one, self.date_two, self.date_two)
self._verify_release_date_source(self.sequential, self.sequential)
class StaffLockTest(CourseTestCase):
"""Base class for testing staff lock functions."""
def setUp(self):
super(StaffLockTest, self).setUp()
self.chapter = ItemFactory.create(category='chapter', parent_location=self.course.location)
self.sequential = ItemFactory.create(category='sequential', parent_location=self.chapter.location)
self.vertical = ItemFactory.create(category='vertical', parent_location=self.sequential.location)
self.orphan = ItemFactory.create(category='vertical', parent_location=self.sequential.location)
# Read again so that children lists are accurate
self.chapter = self.store.get_item(self.chapter.location)
self.sequential = self.store.get_item(self.sequential.location)
self.vertical = self.store.get_item(self.vertical.location)
# Orphan the orphaned xblock
self.sequential.children = [self.vertical.location]
self.sequential = self.store.update_item(self.sequential, ModuleStoreEnum.UserID.test)
def _set_staff_lock(self, xblock, is_locked):
"""If is_locked is True, xblock is staff locked. Otherwise, the xblock staff lock field is removed."""
field = xblock.fields['visible_to_staff_only']
if is_locked:
field.write_to(xblock, True)
else:
field.delete_from(xblock)
return self.store.update_item(xblock, ModuleStoreEnum.UserID.test)
def _update_staff_locks(self, chapter_locked, sequential_locked, vertical_locked):
"""
Sets the staff lock on the chapter, sequential, and vertical
If the corresponding argument is False, then the field is deleted from the xblock
"""
self.chapter = self._set_staff_lock(self.chapter, chapter_locked)
self.sequential = self._set_staff_lock(self.sequential, sequential_locked)
self.vertical = self._set_staff_lock(self.vertical, vertical_locked)
class StaffLockSourceTest(StaffLockTest):
"""Tests for finding the source of an xblock's staff lock."""
def _verify_staff_lock_source(self, item, expected_source):
"""Helper to verify that the staff lock source of a given item matches the expected source"""
source = utils.find_staff_lock_source(item)
self.assertEqual(source.location, expected_source.location)
self.assertTrue(source.visible_to_staff_only)
def test_chapter_source_for_vertical(self):
"""Tests a vertical's staff lock being set by its chapter"""
self._update_staff_locks(True, False, False)
self._verify_staff_lock_source(self.vertical, self.chapter)
def test_sequential_source_for_vertical(self):
"""Tests a vertical's staff lock being set by its sequential"""
self._update_staff_locks(True, True, False)
self._verify_staff_lock_source(self.vertical, self.sequential)
self._update_staff_locks(False, True, False)
self._verify_staff_lock_source(self.vertical, self.sequential)
def test_vertical_source_for_vertical(self):
"""Tests a vertical's staff lock being set by itself"""
self._update_staff_locks(True, True, True)
self._verify_staff_lock_source(self.vertical, self.vertical)
self._update_staff_locks(False, True, True)
self._verify_staff_lock_source(self.vertical, self.vertical)
self._update_staff_locks(False, False, True)
self._verify_staff_lock_source(self.vertical, self.vertical)
def test_orphan_has_no_source(self):
"""Tests that a orphaned xblock has no staff lock source"""
self.assertIsNone(utils.find_staff_lock_source(self.orphan))
def test_no_source_for_vertical(self):
"""Tests a vertical with no staff lock set anywhere"""
self._update_staff_locks(False, False, False)
self.assertIsNone(utils.find_staff_lock_source(self.vertical))
class InheritedStaffLockTest(StaffLockTest):
"""Tests for determining if an xblock inherits a staff lock."""
def test_no_inheritance(self):
"""Tests that a locked or unlocked vertical with no locked ancestors does not have an inherited lock"""
self._update_staff_locks(False, False, False)
self.assertFalse(utils.ancestor_has_staff_lock(self.vertical))
self._update_staff_locks(False, False, True)
self.assertFalse(utils.ancestor_has_staff_lock(self.vertical))
def test_inheritance_in_locked_section(self):
"""Tests that a locked or unlocked vertical in a locked section has an inherited lock"""
self._update_staff_locks(True, False, False)
self.assertTrue(utils.ancestor_has_staff_lock(self.vertical))
self._update_staff_locks(True, False, True)
self.assertTrue(utils.ancestor_has_staff_lock(self.vertical))
def test_inheritance_in_locked_subsection(self):
"""Tests that a locked or unlocked vertical in a locked subsection has an inherited lock"""
self._update_staff_locks(False, True, False)
self.assertTrue(utils.ancestor_has_staff_lock(self.vertical))
self._update_staff_locks(False, True, True)
self.assertTrue(utils.ancestor_has_staff_lock(self.vertical))
def test_no_inheritance_for_orphan(self):
"""Tests that an orphaned xblock does not inherit staff lock"""
self.assertFalse(utils.ancestor_has_staff_lock(self.orphan))
class GroupVisibilityTest(CourseTestCase):
"""
Test content group access rules.
"""
def setUp(self):
super(GroupVisibilityTest, self).setUp()
chapter = ItemFactory.create(category='chapter', parent_location=self.course.location)
sequential = ItemFactory.create(category='sequential', parent_location=chapter.location)
vertical = ItemFactory.create(category='vertical', parent_location=sequential.location)
html = ItemFactory.create(category='html', parent_location=vertical.location)
problem = ItemFactory.create(
category='problem', parent_location=vertical.location, data="<problem></problem>"
)
self.sequential = self.store.get_item(sequential.location)
self.vertical = self.store.get_item(vertical.location)
self.html = self.store.get_item(html.location)
self.problem = self.store.get_item(problem.location)
# Add partitions to the course
self.course.user_partitions = [
UserPartition(
id=0,
name="Partition 0",
description="Partition 0",
scheme=UserPartition.get_scheme("random"),
groups=[
Group(id=0, name="Group A"),
Group(id=1, name="Group B"),
],
),
UserPartition(
id=1,
name="Partition 1",
description="Partition 1",
scheme=UserPartition.get_scheme("random"),
groups=[
Group(id=0, name="Group C"),
Group(id=1, name="Group D"),
],
),
UserPartition(
id=2,
name="Partition 2",
description="Partition 2",
scheme=UserPartition.get_scheme("random"),
groups=[
Group(id=0, name="Group E"),
Group(id=1, name="Group F"),
Group(id=2, name="Group G"),
Group(id=3, name="Group H"),
],
),
]
self.course = self.store.update_item(self.course, ModuleStoreEnum.UserID.test)
def set_group_access(self, xblock, value):
""" Sets group_access to specified value and calls update_item to persist the change. """
xblock.group_access = value
self.store.update_item(xblock, self.user.id)
def test_no_visibility_set(self):
""" Tests when group_access has not been set on anything. """
def verify_all_components_visible_to_all(): # pylint: disable=invalid-name
""" Verifies when group_access has not been set on anything. """
for item in (self.sequential, self.vertical, self.html, self.problem):
self.assertFalse(utils.has_children_visible_to_specific_content_groups(item))
self.assertFalse(utils.is_visible_to_specific_content_groups(item))
verify_all_components_visible_to_all()
# Test with group_access set to Falsey values.
self.set_group_access(self.vertical, {1: []})
self.set_group_access(self.html, {2: None})
verify_all_components_visible_to_all()
def test_sequential_and_problem_have_group_access(self):
""" Tests when group_access is set on a few different components. """
self.set_group_access(self.sequential, {1: [0]})
# This is a no-op.
self.set_group_access(self.vertical, {1: []})
self.set_group_access(self.problem, {2: [3, 4]})
# Note that "has_children_visible_to_specific_content_groups" only checks immediate children.
self.assertFalse(utils.has_children_visible_to_specific_content_groups(self.sequential))
self.assertTrue(utils.has_children_visible_to_specific_content_groups(self.vertical))
self.assertFalse(utils.has_children_visible_to_specific_content_groups(self.html))
self.assertFalse(utils.has_children_visible_to_specific_content_groups(self.problem))
self.assertTrue(utils.is_visible_to_specific_content_groups(self.sequential))
self.assertFalse(utils.is_visible_to_specific_content_groups(self.vertical))
self.assertFalse(utils.is_visible_to_specific_content_groups(self.html))
self.assertTrue(utils.is_visible_to_specific_content_groups(self.problem))
class GetUserPartitionInfoTest(ModuleStoreTestCase):
"""
Tests for utility function that retrieves user partition info
and formats it for consumption by the editing UI.
"""
def setUp(self):
"""Create a dummy course. """
super(GetUserPartitionInfoTest, self).setUp()
self.course = CourseFactory()
self.block = ItemFactory.create(category="problem", parent_location=self.course.location) # pylint: disable=no-member
# Set up some default partitions
self._set_partitions([
UserPartition(
id=0,
name="Cohort user partition",
scheme=UserPartition.get_scheme("cohort"),
description="Cohorted user partition",
groups=[
Group(id=0, name="Group A"),
Group(id=1, name="Group B"),
],
),
UserPartition(
id=1,
name="Random user partition",
scheme=UserPartition.get_scheme("random"),
description="Random user partition",
groups=[
Group(id=0, name="Group C"),
],
),
])
def test_retrieves_partition_info_with_selected_groups(self):
# Initially, no group access is set on the block, so no groups should
# be marked as selected.
expected = [
{
"id": 0,
"name": u"Cohort user partition",
"scheme": u"cohort",
"groups": [
{
"id": 0,
"name": u"Group A",
"selected": False,
"deleted": False,
},
{
"id": 1,
"name": u"Group B",
"selected": False,
"deleted": False,
},
]
},
{
"id": 1,
"name": u"Random user partition",
"scheme": u"random",
"groups": [
{
"id": 0,
"name": u"Group C",
"selected": False,
"deleted": False,
},
]
}
]
self.assertEqual(self._get_partition_info(schemes=["cohort", "random"]), expected)
# Update group access and expect that now one group is marked as selected.
self._set_group_access({0: [1]})
expected[0]["groups"][1]["selected"] = True
self.assertEqual(self._get_partition_info(schemes=["cohort", "random"]), expected)
def test_deleted_groups(self):
# Select a group that is not defined in the partition
self._set_group_access({0: [3]})
# Expect that the group appears as selected but is marked as deleted
partitions = self._get_partition_info()
groups = partitions[0]["groups"]
self.assertEqual(len(groups), 3)
self.assertEqual(groups[2], {
"id": 3,
"name": "Deleted Group",
"selected": True,
"deleted": True
})
def test_filter_by_partition_scheme(self):
partitions = self._get_partition_info(schemes=["random"])
self.assertEqual(len(partitions), 1)
self.assertEqual(partitions[0]["scheme"], "random")
def test_exclude_inactive_partitions(self):
# Include an inactive verification scheme
self._set_partitions([
UserPartition(
id=0,
name="Cohort user partition",
scheme=UserPartition.get_scheme("cohort"),
description="Cohorted user partition",
groups=[
Group(id=0, name="Group A"),
Group(id=1, name="Group B"),
],
),
UserPartition(
id=1,
name="Verification user partition",
scheme=UserPartition.get_scheme("verification"),
description="Verification user partition",
groups=[
Group(id=0, name="Group C"),
],
active=False,
),
])
# Expect that the inactive scheme is excluded from the results
partitions = self._get_partition_info(schemes=["cohort", "verification"])
self.assertEqual(len(partitions), 1)
self.assertEqual(partitions[0]["scheme"], "cohort")
def test_exclude_partitions_with_no_groups(self):
# The cohort partition has no groups defined
self._set_partitions([
UserPartition(
id=0,
name="Cohort user partition",
scheme=UserPartition.get_scheme("cohort"),
description="Cohorted user partition",
groups=[],
),
UserPartition(
id=1,
name="Verification user partition",
scheme=UserPartition.get_scheme("verification"),
description="Verification user partition",
groups=[
Group(id=0, name="Group C"),
],
),
])
# Expect that the partition with no groups is excluded from the results
partitions = self._get_partition_info(schemes=["cohort", "verification"])
self.assertEqual(len(partitions), 1)
self.assertEqual(partitions[0]["scheme"], "verification")
def _set_partitions(self, partitions):
"""Set the user partitions of the course descriptor. """
self.course.user_partitions = partitions
self.course = self.store.update_item(self.course, ModuleStoreEnum.UserID.test)
def _set_group_access(self, group_access):
"""Set group access of the block. """
self.block.group_access = group_access
self.block = self.store.update_item(self.block, ModuleStoreEnum.UserID.test)
def _get_partition_info(self, schemes=None):
"""Retrieve partition info and selected groups. """
return utils.get_user_partition_info(self.block, schemes=schemes)<|fim▁end|>
|
self._verify_release_date_source(self.sequential, self.chapter)
|
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"errors"
"fmt"
"github.com/hyperledger/fabric/core/chaincode/shim"
)
// SampleChaincode struct required to implement the shim.Chaincode interface
type SampleChaincode struct {
}
// Init method is called when the chaincode is first deployed onto the blockchain network
func (t *SampleChaincode) Init(stub shim.ChaincodeStubInterface, function string, args []string) ([]byte, error) {
if len(args) != 1 {
return nil, errors.New("Incorrect number of arguments. Expecting 1")
}
err := stub.PutState("hello_world", []byte(args[0]))
if err != nil {
return nil, err
}
return nil, nil
}
// Query method is invoked whenever any read/get/query operation needs to be performed on the blockchain state.
func (t *SampleChaincode) Query(stub shim.ChaincodeStubInterface, function string, args []string) ([]byte, error) {
fmt.Println("query is running " + function)
// Handle different functions
if function == "read" { //read a variable
return t.read(stub, args)
}
fmt.Println("query did not find func: " + function)
return nil, errors.New("Received unknown function query")
}
// Invoke method is invoked whenever the state of the blockchain is to be modified.
func (t *SampleChaincode) Invoke(stub shim.ChaincodeStubInterface, function string, args []string) ([]byte, error) {
fmt.Println("invoke is running " + function)
// Handle different functions
if function == "init" {
return t.Init(stub, "init", args)
} else if function == "write" {
return t.write(stub, args)
}
fmt.Println("invoke did not find func: " + function)
return nil, errors.New("Received unknown function invocation")
}
func (t *SampleChaincode) read(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {
var name, jsonResp string
var err error
if len(args) != 1 {
return nil, errors.New("Incorrect number of arguments. Expecting name of the var to query")
}
name = args[0]
valAsbytes, err := stub.GetState(name)
if err != nil {
jsonResp = "{\"Error\":\"Failed to get state for " + name + "\"}"
return nil, errors.New(jsonResp)
}
return valAsbytes, nil
}
func (t *SampleChaincode) write(stub shim.ChaincodeStubInterface, args []string) ([]byte, error) {
var name, value string
var err error
fmt.Println("running write()")
if len(args) != 2 {
return nil, errors.New("Incorrect number of arguments. Expecting 2. name of the variable and value to set")
}
name = args[0] //rename for fun
value = args[1]
err = stub.PutState(name, []byte(value)) //write the variable into the chaincode state
if err != nil {
return nil, err
}
return nil, nil
}
func main() {
err := shim.Start(new(SampleChaincode))<|fim▁hole|> } else {
fmt.Println("SampleChaincode successfully started")
}
}<|fim▁end|>
|
if err != nil {
fmt.Println("Could not start SampleChaincode")
|
<|file_name|>test_poisson_order.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
from __future__ import print_function
import warnings
import numpy
import pytest
import sympy
from dolfin import (
MPI,
Constant,
DirichletBC,
Expression,
FunctionSpace,
UnitSquareMesh,
errornorm,
pi,
triangle,
)
import helpers
import matplotlib.pyplot as plt
from maelstrom import heat
MAX_DEGREE = 5
def problem_sinsin():
"""cosine example.
"""
def mesh_generator(n):
return UnitSquareMesh(n, n, "left/right")
x = sympy.DeferredVector("x")
# Choose the solution such that the boundary conditions are fulfilled
# exactly. Also, multiply with x**2 to make sure that the right-hand side
# doesn't contain the term 1/x. Although it looks like a singularity at
# x=0, this terms is esentially harmless since the volume element 2*pi*x is
# used throughout the code, canceling out with the 1/x. However, Dolfin has
# problems with this, cf.
# <https://bitbucket.org/fenics-project/dolfin/issues/831/some-problems-with-quadrature-expressions>.
solution = {
"value": x[0] ** 2 * sympy.sin(pi * x[0]) * sympy.sin(pi * x[1]),
"degree": MAX_DEGREE,
}
# Produce a matching right-hand side.
phi = solution["value"]
kappa = 2.0
rho = 3.0
cp = 5.0
conv = [1.0, 2.0]
rhs_sympy = sympy.simplify(
-1.0 / x[0] * sympy.diff(kappa * x[0] * sympy.diff(phi, x[0]), x[0])
- 1.0 / x[0] * sympy.diff(kappa * x[0] * sympy.diff(phi, x[1]), x[1])
+ rho * cp * conv[0] * sympy.diff(phi, x[0])
+ rho * cp * conv[1] * sympy.diff(phi, x[1])
)
rhs = {
"value": Expression(helpers.ccode(rhs_sympy), degree=MAX_DEGREE),
"degree": MAX_DEGREE,
}
return mesh_generator, solution, rhs, triangle, kappa, rho, cp, Constant(conv)
@pytest.mark.parametrize("problem", [problem_sinsin])
@pytest.mark.parametrize("stabilization", [None, "supg"])
def test_order(problem, stabilization):
"""Assert the correct discretization order.
"""
mesh_sizes = [16, 32, 64]
errors, hmax = _compute_errors(problem, mesh_sizes, stabilization)
# Compute the numerical order of convergence.
order = helpers.compute_numerical_order_of_convergence(hmax, errors)
# The test is considered passed if the numerical order of convergence
# matches the expected order in at least the first step in the coarsest
# spatial discretization, and is not getting worse as the spatial
# discretizations are refining.
tol = 0.1
expected_order = 2.0
assert (order > expected_order - tol).all()
return
def _compute_errors(problem, mesh_sizes, stabilization):
mesh_generator, solution, f, cell_type, kappa, rho, cp, conv = problem()
if solution["degree"] > MAX_DEGREE:
warnings.warn(
"Expression degree ({}) > maximum degree ({}). Truncating.".format(
solution["degree"], MAX_DEGREE
)
)
degree = MAX_DEGREE<|fim▁hole|> else:
degree = solution["degree"]
sol = Expression(
helpers.ccode(solution["value"]), t=0.0, degree=degree, cell=cell_type
)
errors = numpy.empty(len(mesh_sizes))
hmax = numpy.empty(len(mesh_sizes))
for k, mesh_size in enumerate(mesh_sizes):
mesh = mesh_generator(mesh_size)
hmax[k] = MPI.max(MPI.comm_world, mesh.hmax())
Q = FunctionSpace(mesh, "CG", 1)
prob = heat.Heat(
Q,
kappa=kappa,
rho=rho,
cp=cp,
convection=conv,
source=f["value"],
dirichlet_bcs=[DirichletBC(Q, 0.0, "on_boundary")],
stabilization=stabilization,
)
phi_approx = prob.solve_stationary()
errors[k] = errornorm(sol, phi_approx)
return errors, hmax
def _show_order_info(problem, mesh_sizes, stabilization):
"""Performs consistency check for the given problem/method combination and
show some information about it. Useful for debugging.
"""
errors, hmax = _compute_errors(problem, mesh_sizes, stabilization)
order = helpers.compute_numerical_order_of_convergence(hmax, errors)
# Print the data
print()
print("hmax ||u - u_h|| conv. order")
print("{:e} {:e}".format(hmax[0], errors[0]))
for j in range(len(errors) - 1):
print(32 * " " + "{:2.5f}".format(order[j]))
print("{:e} {:e}".format(hmax[j + 1], errors[j + 1]))
# Plot the actual data.
plt.loglog(hmax, errors, "-o")
# Compare with order curves.
plt.autoscale(False)
e0 = errors[0]
for order in range(4):
plt.loglog(
[hmax[0], hmax[-1]], [e0, e0 * (hmax[-1] / hmax[0]) ** order], color="0.7"
)
plt.xlabel("hmax")
plt.ylabel("||u-u_h||")
plt.show()
return
if __name__ == "__main__":
# mesh_sizes_ = [16, 32, 64, 128]
# _show_order_info(problem_sinsin, mesh_sizes_, None)
test_order(problem_sinsin, "supg")<|fim▁end|>
| |
<|file_name|>SPRITE_OVERLAP.py<|end_file_name|><|fim▁begin|># !/usr/bin/env python
"""Testing a sprite.
The ball should bounce off the sides of the window. You may resize the
window.
This test should just run without failing.
"""
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import os
import unittest
from pyglet.gl import glClear
import pyglet.window
import pyglet.window.event
from pyglet import clock
from scene2d import Sprite, Image2d, FlatView
from scene2d.image import TintEffect
from scene2d.camera import FlatCamera
ball_png = os.path.join(os.path.dirname(__file__), 'ball.png')
class BouncySprite(Sprite):
def update(self):
# move, check bounds
p = self.properties
self.x += p['dx']
self.y += p['dy']
if self.left < 0:
self.left = 0
p['dx'] = -p['dx']
elif self.right > 320:
self.right = 320
p['dx'] = -p['dx']
if self.bottom < 0:
self.bottom = 0
p['dy'] = -p['dy']
elif self.top > 320:
self.top = 320
p['dy'] = -p['dy']
class SpriteOverlapTest(unittest.TestCase):
def test_sprite(self):
w = pyglet.window.Window(width=320, height=320)
image = Image2d.load(ball_png)
ball1 = BouncySprite(0, 0, 64, 64, image, properties=dict(dx=10, dy=5))
ball2 = BouncySprite(288, 0, 64, 64, image,
properties=dict(dx=-10, dy=5))
view = FlatView(0, 0, 320, 320, sprites=[ball1, ball2])
view.fx, view.fy = 160, 160
clock.set_fps_limit(60)
e = TintEffect((.5, 1, .5, 1))
while not w.has_exit:
clock.tick()
w.dispatch_events()
ball1.update()
ball2.update()
if ball1.overlaps(ball2):
if 'overlap' not in ball2.properties:<|fim▁hole|> ball2.add_effect(e)
elif 'overlap' in ball2.properties:
ball2.remove_effect(e)
del ball2.properties['overlap']
view.clear()
view.draw()
w.flip()
w.close()
unittest.main()<|fim▁end|>
|
ball2.properties['overlap'] = e
|
<|file_name|>atomic_386.go<|end_file_name|><|fim▁begin|>// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package runtime
<|fim▁hole|>
// The calls to nop are to keep these functions from being inlined.
// If they are inlined we have no guarantee that later rewrites of the
// code by optimizers will preserve the relative order of memory accesses.
//go:nosplit
func atomicload(ptr *uint32) uint32 {
nop()
return *ptr
}
//go:nosplit
func atomicloadp(ptr unsafe.Pointer) unsafe.Pointer {
nop()
return *(*unsafe.Pointer)(ptr)
}
//go:nosplit
func xadd64(ptr *uint64, delta int64) uint64 {
for {
old := *ptr
if cas64(ptr, old, old+uint64(delta)) {
return old + uint64(delta)
}
}
}
//go:nosplit
func xchg64(ptr *uint64, new uint64) uint64 {
for {
old := *ptr
if cas64(ptr, old, new) {
return old
}
}
}
//go:noescape
func xadd(ptr *uint32, delta int32) uint32
//go:noescape
func xchg(ptr *uint32, new uint32) uint32
// NO go:noescape annotation; see atomic_pointer.go.
func xchgp1(ptr unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer
//go:noescape
func xchguintptr(ptr *uintptr, new uintptr) uintptr
//go:noescape
func atomicload64(ptr *uint64) uint64
//go:noescape
func atomicor8(ptr *uint8, val uint8)
//go:noescape
func cas64(ptr *uint64, old, new uint64) bool
//go:noescape
func atomicstore(ptr *uint32, val uint32)
//go:noescape
func atomicstore64(ptr *uint64, val uint64)
// NO go:noescape annotation; see atomic_pointer.go.
func atomicstorep1(ptr unsafe.Pointer, val unsafe.Pointer)<|fim▁end|>
|
import "unsafe"
|
<|file_name|>submit.py<|end_file_name|><|fim▁begin|># Author: Sungchul Choi, sc82.choi at gachon.ac.kr
# Version: 0.1
# Description
# 가천대학교 프로그래밍 입문 시간에 활용되는 "숙제 자동 채점 프로그램"의 Client 프로그램입니다.
#
# HUMAN KNOWLEDGE BELONGS TO THE WORLD. -- From the movie "Antitrust"
# Copyright (C) 2015 TeamLab@Gachon University
import argparse
import pickle
import os
import types
import requests
import json
from importlib.machinery import SourceFileLoader
import unittest
TOKEN_PICKLE_FILE_NAME = "access_token"
HOST = "theteamlab.io"
ASSIGNMENT_NAME = "product_mix_problem.py"
def getArgumentsParser(argv=None):
parser = argparse.ArgumentParser(
prog='An program for autograder of your assignement. Coded by TeamLab@Gachon University ',)
parser.add_argument("-get", help="Write your assignment name that you want to download")
parser.add_argument("-submit", help="Write your assignment name that you want to submit")
argumentValue = parser.parse_args(argv)
if not (argumentValue.get or argumentValue.submit):
parser.error('One of -submit or -get must be given')
return argumentValue;
def printInformationMessage(actionType, assignmentName):
if (actionType == "get"):
message = "== Getting templates | "
else:
message = "== Submmting solutions | "
print (message + assignmentName)
# Get JWT token to access REST API
def getToken():
if os.path.isfile(TOKEN_PICKLE_FILE_NAME):
try:
with open(TOKEN_PICKLE_FILE_NAME, 'rb') as accesstoken:
token_file = pickle.load(accesstoken)
return token_file['token'], token_file['username']
except EOFError:
print ("Existing access_token is NOT validated")
return None, None
else:
return None,None
def getLoginInformation():
login_id = input("Login ID: ")
login_password = input("Password :")
return [login_id, login_password]
def getAccessTokenFromServer(username, login_password):
headers = {'Content-type': 'application/json'}
payload = {"password":login_password, "username":username}
access_token_jwt = requests.post("http://"+HOST+"/api-token-auth/", json=payload, headers=headers)
if (access_token_jwt.ok) : return access_token_jwt.text
else: return None
def makeAccessTokenPickle(access_token, username):
pickle_file_Name = "access_token"
pcikleObject = open(pickle_file_Name,'wb')<|fim▁hole|> username_json = {'username' : username}
toekn_json = {'token' : access_token}
data =json.loads(json.dumps(toekn_json , ensure_ascii=False))
data.update(username_json)
pickle.dump(data, pcikleObject)
return pickle
def checkTokenReplacement(username):
replacment = 'a'
while replacment.lower() not in ['t','yes','y','true', 'n','no','f','false']:
message = ("Use token from last successful submission (%s)? (Y/n): " % username)
replacment = input(message)
if replacment.lower() in ['t','yes','y','true']:
return True
elif replacment.lower() in ['n','no','f','false']:
return False
else:
print ("Wrong Input")
return True
def getFileContents(fileName):
with open (fileName, "r", encoding="utf8") as contens_file:
contens = contens_file.read()
return contens
def getAssignmentTemplateFileFromServer(access_token, assignment_name):
payload = {
"assignment_name" : assignment_name,
}
accesstoken_dict = json.loads(access_token)
headers = {'Authorization': 'JWT ' + accesstoken_dict['token']}
result = requests.post("http://"+HOST+"/autograder/assignments/%s/submissionready" % assignment_name, json=payload, headers=headers)
return result
def submitAssignmentFileToServer(access_token, assignment_file_name):
assignment_contents = getFileContents(assignment_file_name)
[basename, ext] = assignment_file_name.split(".")
payload = {
"template_file_name" : assignment_file_name,
"template_file_contents" : assignment_contents,
}
accesstoken_dict = json.loads(access_token)
headers = {'Authorization': 'JWT ' + accesstoken_dict['token']}
result = requests.post("http://"+HOST+"/autograder/assignments/%s/submission" % basename, json=payload, headers=headers)
#TODO Add exception handling
return result
def makeTemplateFile(result_text):
try:
data = json.loads(result_text, strict=False)
with open(data['template_file_name'], 'w') as f:
f.write(data['template_file_contents'])
print ("%s file is created for your %s assignment" % (data['template_file_name'], data['assignment_name']))
return True
except IOError:
print ("Unavailable making the template file: %s" % data['template_file_name'])
return False
except:
return False
def removeExpiredAccessKey():
if os.path.isfile(TOKEN_PICKLE_FILE_NAME):
os.remove(TOKEN_PICKLE_FILE_NAME)
else: ## Show an error ##
print("Error: %s file not found" % TOKEN_PICKLE_FILE_NAME)
def printTestResults(text):
json_data = json.loads(text)
a = "-"*20; b = "-"*10; c = "-"*20
print ( '%20s | %10s | %20s' % (a,b,c) )
print ( '%20s | %10s | %20s' % ("Function Name","Passed?","Feedback") )
print ( '%20s | %10s | %20s' % (a,b,c) )
for result in json_data:
if result['test_result'] == ('S'):
passed = 'PASS'
feedback = 'Good Job'
else:
passed = 'Not Yet'
if result['test_result'] == ('E'):
feedback = 'Check Your Logic'
if result['test_result'] == ('F'):
feedback = 'Check Your Grammar'
print ( '%20s | %10s | %20s' % (result['assignment_detail'],passed,feedback ) )
print ( '%20s | %10s | %20s' % (a,b,c) )
def main():
# Check Argument
# To download an assignment template file : -get <ASSIGNMENT_NAME>
# To submit an assignment template file : -submit <ASSIGNMENT_NAME>
# [actionType, assignment_name] = checkArguements(argumentValue)
actionType = "submit"
assignment_name = ASSIGNMENT_NAME
# Check User Login Information
printInformationMessage(actionType, assignment_name)
# Check Your Access Token
[access_token, username] = getToken()
# Get New Access Token
if access_token == None:
while (access_token == None):
[username, login_password] = getLoginInformation()
access_token = getAccessTokenFromServer(username, login_password)
if (access_token == None): print ("Wrong User ID or password. Please, input again.")
else:
answer = checkTokenReplacement(username)
if (answer == False):
access_token = None
while (access_token == None):
[username, login_password] = getLoginInformation()
access_token = getAccessTokenFromServer(username, login_password)
if (access_token == None): print ("Wrong User ID or password. Please, input again.")
# Make access pickle before end of program
makeAccessTokenPickle(access_token, username)
if (actionType == "get"):
result = getAssignmentTemplateFileFromServer(access_token, assignment_name)
if (result.status_code == 200):
is_file_created = makeTemplateFile(result.text)
if (is_file_created == True):
print ("Thank you for using the program. Enjoy Your Assignment - From TeamLab")
elif (result.status_code == 403):
print (result.text)
removeExpiredAccessKey()
print ("Your expired access key removed. Please, try again")
elif (result.status_code == 500):
print (result.text)
print ("Unexpected error exists. Please contact [email protected] ")
elif (actionType == "submit"):
result = submitAssignmentFileToServer(access_token, assignment_name)
if (result.status_code == 200):
printTestResults(result.text)
# Make access pickle before end of program
elif (result.status_code == 403):
print (result.text)
removeExpiredAccessKey()
print ("Your expired access key removed. Please, try again")
elif (result.status_code == 500):
print ("Unexpected error exists. Your code does not seem to work. Please, Run your code. \n python {0} ".format(ASSIGNMENT_NAME) )
if __name__ == "__main__":
main()<|fim▁end|>
| |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms
METHOD_CHOICES = (
("GET", "GET"),
("POST", "POST")
)
<|fim▁hole|> method = forms.ChoiceField(choices=METHOD_CHOICES, initial="GET")
path = forms.CharField(initial="/api/myself/")
data = forms.CharField(widget=forms.Textarea, required=False)<|fim▁end|>
|
class ApiCallForm(forms.Form):
|
<|file_name|>const.go<|end_file_name|><|fim▁begin|>// Copyright 2019 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package plugin
// Kind presents the kind of plugin.
type Kind uint8
<|fim▁hole|> // Audit indicates it is a Audit plugin.
Audit Kind = 1 + iota
// Authentication indicate it is a Authentication plugin.
Authentication
// Schema indicate a plugin that can change TiDB schema.
Schema
// Daemon indicate a plugin that can run as daemon task.
Daemon
)
func (k Kind) String() (str string) {
switch k {
case Audit:
str = "Audit"
case Authentication:
str = "Authentication"
case Schema:
str = "Schema"
case Daemon:
str = "Daemon"
}
return
}
// State present the state of plugin.
type State uint8
const (
// Uninitialized indicates plugin is uninitialized.
Uninitialized State = iota
// Ready indicates plugin is ready to work.
Ready
// Dying indicates plugin will be close soon.
Dying
// Disable indicate plugin is disabled.
Disable
)
func (s State) String() (str string) {
switch s {
case Uninitialized:
str = "Uninitialized"
case Ready:
str = "Ready"
case Dying:
str = "Dying"
case Disable:
str = "Disable"
}
return
}<|fim▁end|>
|
const (
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>Transactional workflow control for Django models.
"""<|fim▁end|>
|
"""
|
<|file_name|>account_config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2012 Andrea Cometa.
# Email: [email protected]
# Web site: http://www.andreacometa.it
# Copyright (C) 2012 Agile Business Group sagl (<http://www.agilebg.com>)
# Copyright (C) 2012 Domsense srl (<http://www.domsense.com>)
# Copyright (C) 2012 Associazione OpenERP Italia
# (<http://www.odoo-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class AccountConfigSettings(models.TransientModel):
_inherit = 'account.config.settings'
due_cost_service_id = fields.Many2one(
related='company_id.due_cost_service_id',
help='Default Service for RiBa Due Cost (collection fees) on invoice',
domain=[('type', '=', 'service')])
def default_get(self, cr, uid, fields, context=None):
res = super(AccountConfigSettings, self).default_get(
cr, uid, fields, context)
if res:
user = self.pool['res.users'].browse(cr, uid, uid, context)
res['due_cost_service_id'] = user.company_id.due_cost_service_id.id
return res
class ResCompany(models.Model):
<|fim▁hole|><|fim▁end|>
|
_inherit = 'res.company'
due_cost_service_id = fields.Many2one('product.product')
|
<|file_name|>drivers.py<|end_file_name|><|fim▁begin|># coding=utf-8
<|fim▁hole|>import urllib
import requests
class Driver(object):
def __init__(self):
self.driver_type = self.__class__.__name__
# Get credentials from conf files for CMDB
pass
def get_driver_type(self):
return self.driver_type
def get_ci(self, ci):
pass
def set_ci(self, ci):
pass
class Itop(Driver):
def get_ci(self, ci):
print("Get from itop")
return True
def set_ci(self, ci):
username = config.alexandria.conf_file.get_driver_parameters("itop", "loginItop")
password = config.alexandria.conf_file.get_driver_parameters("itop", "passwordItop")
config.logger.debug("login : {}, password : {}".format(
username,
password
)
)
# Craft request body and header
urlbase = config.alexandria.conf_file.get_driver_parameters("itop", "endpoint")
request = '{"operation":"core/create","comment":"Synchronization from Alexandria","class":"Server","output_fields":"id,name,ram", "fields":{"org_id": "3","name":"' + ci.data["Name"] + '","ram":"' + format((ci.data["MemorySummary"])["TotalSystemMemoryGiB"]) + '","serialnumber":"' + ci.data["SerialNumber"] + '"}}'
urlparam = {'version' : '1.0',
'auth_user' : username,
'auth_pwd' : password,
'json_data' : request
}
#header = {'Content-type': 'application/json'}
url = urlbase + '?' + urllib.urlencode(urlparam)
config.logger.debug(url)
#=======================================================================
# answer = requests.post(url,
# headers=header,
# verify="False"
# )
#=======================================================================
answer = requests.post(url,
auth=(username,password)
)
config.logger.debug(answer.status_code)
config.logger.debug(answer.text)
class Redfish(Driver):
def get_ci(self,ci):
print("Get from redfish")
import redfish
print(ci.ip_mgmt + " - " + ci.login + " - " + ci.password)
#remote_mgmt = redfish.connect(ci.ip_mgmt, ci.login, ci.password, verify_cert=False)
remote_mgmt = redfish.connect(ci.ip_mgmt, ci.login, ci.password, simulator=True, enforceSSL=False)
ci.ci_type = remote_mgmt.Systems.systems_list[0].get_parameter("@odata.type")
ci.data = remote_mgmt.Systems.systems_list[0].get_parameters()
#print("Redfish API version : {} \n".format(remote_mgmt.get_api_version()))
return True
def set_ci(self, ci):
print "Push to Redfish"
return True
class Ironic(Driver):
pass
class Mondorescue(Driver):
pass
class Fakecmdb(Driver):
def set_ci(self, ci):
# Determine ci type so we can do the proper action.
pp = pprint.PrettyPrinter(indent=4)
if ci.ci_type == "Manager":
print("We are in Fakecmdb driver !")
pp.pprint(ci.data)
# Simply write a json file with ci.data content.
with open("Fakecmdb.json", "w") as jsonfile:
json.dump(ci.data, jsonfile, indent=4)
jsonfile.close()
#
#=======================================================================
class Fakeprovider(Driver):
def get_ci(self, ci):
# Simulate a driver that will provide Manager data.
# TODO a connect method must be implemented
# Assuming the connection is ok.
# Now create a copy of manager model from reference model.
#ci.ci_type = "Manager"
#ci.data = config.alexandria.model.get_model("Manager")
# Update the structure with data
# TODO : think to encapsulate to not edit ci.data directly.
# This could be also a way to check source of truth.
# If data provided by our driver is not the source of truth
# then discard it.
#ci.data["ManagerType"] = "BMC"
#ci.data["Model"] = "Néné Manager"
#ci.data["FirmwareVersion"] = "1.00"
#if ci.data is config.alexandria.model.Manager:
# print "identical"
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(ci.ci_type)
class DriverCollection(list):
pass<|fim▁end|>
|
import pprint
import config
import json
|
<|file_name|>features.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package features
import (
"fmt"
"sort"
"strconv"
"strings"
utilfeature "k8s.io/apiserver/pkg/util/feature"
"k8s.io/kubernetes/pkg/util/version"
)
const (
// HighAvailability is alpha in v1.9
HighAvailability = "HighAvailability"
// CoreDNS is alpha in v1.9
CoreDNS = "CoreDNS"
// SelfHosting is beta in v1.8
SelfHosting = "SelfHosting"
// StoreCertsInSecrets is alpha in v1.8
StoreCertsInSecrets = "StoreCertsInSecrets"
// SupportIPVSProxyMode is alpha in v1.8
SupportIPVSProxyMode = "SupportIPVSProxyMode"
)<|fim▁hole|>var v190 = version.MustParseSemantic("v1.9.0-alpha.1")
// InitFeatureGates are the default feature gates for the init command
var InitFeatureGates = FeatureList{
SelfHosting: {FeatureSpec: utilfeature.FeatureSpec{Default: false, PreRelease: utilfeature.Beta}},
StoreCertsInSecrets: {FeatureSpec: utilfeature.FeatureSpec{Default: false, PreRelease: utilfeature.Alpha}},
HighAvailability: {FeatureSpec: utilfeature.FeatureSpec{Default: false, PreRelease: utilfeature.Alpha}, MinimumVersion: v190},
SupportIPVSProxyMode: {FeatureSpec: utilfeature.FeatureSpec{Default: false, PreRelease: utilfeature.Alpha}, MinimumVersion: v190},
CoreDNS: {FeatureSpec: utilfeature.FeatureSpec{Default: false, PreRelease: utilfeature.Alpha}, MinimumVersion: v190},
}
// Feature represents a feature being gated
type Feature struct {
utilfeature.FeatureSpec
MinimumVersion *version.Version
}
// FeatureList represents a list of feature gates
type FeatureList map[string]Feature
// ValidateVersion ensures that a feature gate list is compatible with the chosen kubernetes version
func ValidateVersion(allFeatures FeatureList, requestedFeatures map[string]bool, requestedVersion string) error {
if requestedVersion == "" {
return nil
}
parsedExpVersion, err := version.ParseSemantic(requestedVersion)
if err != nil {
return fmt.Errorf("Error parsing version %s: %v", requestedVersion, err)
}
for k := range requestedFeatures {
if minVersion := allFeatures[k].MinimumVersion; minVersion != nil {
if !parsedExpVersion.AtLeast(minVersion) {
return fmt.Errorf(
"the requested kubernetes version (%s) is incompatible with the %s feature gate, which needs %s as a minimum",
requestedVersion, k, minVersion)
}
}
}
return nil
}
// Enabled indicates whether a feature name has been enabled
func Enabled(featureList map[string]bool, featureName string) bool {
return featureList[string(featureName)]
}
// Supports indicates whether a feature name is supported on the given
// feature set
func Supports(featureList FeatureList, featureName string) bool {
for k := range featureList {
if featureName == string(k) {
return true
}
}
return false
}
// Keys returns a slice of feature names for a given feature set
func Keys(featureList FeatureList) []string {
var list []string
for k := range featureList {
list = append(list, string(k))
}
return list
}
// KnownFeatures returns a slice of strings describing the FeatureList features.
func KnownFeatures(f *FeatureList) []string {
var known []string
for k, v := range *f {
pre := ""
if v.PreRelease != utilfeature.GA {
pre = fmt.Sprintf("%s - ", v.PreRelease)
}
known = append(known, fmt.Sprintf("%s=true|false (%sdefault=%t)", k, pre, v.Default))
}
sort.Strings(known)
return known
}
// NewFeatureGate parse a string of the form "key1=value1,key2=value2,..." into a
// map[string]bool of known keys or returns an error.
func NewFeatureGate(f *FeatureList, value string) (map[string]bool, error) {
featureGate := map[string]bool{}
for _, s := range strings.Split(value, ",") {
if len(s) == 0 {
continue
}
arr := strings.SplitN(s, "=", 2)
if len(arr) != 2 {
return nil, fmt.Errorf("missing bool value for feature-gate key:%s", s)
}
k := strings.TrimSpace(arr[0])
v := strings.TrimSpace(arr[1])
if !Supports(*f, k) {
return nil, fmt.Errorf("unrecognized feature-gate key: %s", k)
}
boolValue, err := strconv.ParseBool(v)
if err != nil {
return nil, fmt.Errorf("invalid value %v for feature-gate key: %s, use true|false instead", v, k)
}
featureGate[k] = boolValue
}
return featureGate, nil
}<|fim▁end|>
| |
<|file_name|>calibrateCamera2.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# @Time : 2017/7/27 18:04
# @Author : play4fun
# @File : calibrateCamera2.py
# @Software: PyCharm
"""
calibrateCamera2.py:
"""
import cv2
import numpy as np
def draw_axis(img, charuco_corners, charuco_ids, board):
vecs = np.load("./calib.npz") # I already calibrated the camera
mtx, dist, _, _ = [vecs[i] for i in ('mtx', 'dist', 'rvecs', 'tvecs')]
ret, rvec, tvec = cv2.aruco.estimatePoseCharucoBoard(
charuco_corners, charuco_ids, board, mtx, dist)
if ret is not None and ret is True:
cv2.aruco.drawAxis(img, mtx, dist, rvec, tvec, 0.1)
def get_image(camera):
ret, img = camera.read()
return img
<|fim▁hole|> ret = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return ret
def main():
camera = cv2.VideoCapture(0)
img = get_image(camera)
while True:
cv2.imshow('calibration', img)
cv2.waitKey(10)
img = get_image(camera)
gray = make_grayscale(img)
corners, ids, rejected = cv2.aruco.detectMarkers(gray, aruco_dict,
corners, ids)
cv2.aruco.drawDetectedMarkers(img, corners, ids)
if ids is not None and corners is not None \
and len(ids) > 0 and len(ids) == len(corners):
diamond_corners, diamond_ids = \
cv2.aruco.detectCharucoDiamond(img, corners, ids,
0.05 / 0.03, cameraMatrix=mtx,
distCoeffs=dist)
cv2.aruco.drawDetectedDiamonds(img, diamond_corners, diamond_ids)
'''if diamond_ids is not None and len(diamond_ids) >= 4:
break'''
board = cv2.aruco.CharucoBoard_create(9, 6, 0.05, 0.03,
aruco_dict)
if diamond_corners is not None and diamond_ids is not None \
and len(diamond_corners) == len(diamond_ids):
count, char_corners, char_ids = \
cv2.aruco.interpolateCornersCharuco(diamond_corners,
diamond_ids, gray,
board)
if count >= 3:
draw_axis(img, char_corners, char_ids, board)
if __name__ == '__main__':
main()<|fim▁end|>
|
def make_grayscale(img):
|
<|file_name|>sql.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from "@angular/core";
import { SQLComponent } from "./sql.component";
import { sqlRouting } from "./sql.routes";
@NgModule({
imports: [ sqlRouting ],
declarations: [ SQLComponent ]
})
<|fim▁hole|><|fim▁end|>
|
export class SQLModule { };
|
<|file_name|>flat-mesh-intersection.js<|end_file_name|><|fim▁begin|>var _ = require('lodash');
var doFacesIntersect = require('./face-intersection').doFacesIntersect;
function flatMeshIntersects(mesh){<|fim▁hole|> });
for (var j = i + 1; j < numFaces; ++j) {
var secondFace = _.map(mesh.faces[j].vertices, function(vertexIndex){
return mesh.vertices[vertexIndex];
});
if (doFacesIntersect(firstFace, secondFace)) {
return true;
}
}
}
return false;
}
exports.flatMeshIntersects = flatMeshIntersects;<|fim▁end|>
|
var numFaces = mesh.faces.length;
for (var i = 0; i < numFaces; ++i) {
var firstFace = _.map(mesh.faces[i].vertices, function(vertexIndex){
return mesh.vertices[vertexIndex];
|
<|file_name|>gssapi.py<|end_file_name|><|fim▁begin|>#
# (C) Copyright 2008 Jelmer Vernooij <[email protected]>
# (C) Copyright 2011 Jacek Konieczny <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""GSSAPI authentication mechanism for PyXMPP SASL implementation.
Normative reference:
- `RFC 4752 <http://www.ietf.org/rfc/rfc4752.txt>`__
"""
__docformat__ = "restructuredtext en"
import base64
import kerberos
import logging
from .core import ClientAuthenticator, Response, Success
from .core import sasl_mechanism
logger = logging.getLogger("pyxmpp2.sasl.gssapi")
@sasl_mechanism("GSSAPI", 75)
class GSSAPIClientAuthenticator(ClientAuthenticator):
"""Provides client-side GSSAPI SASL (Kerberos 5) authentication."""<|fim▁hole|> self.username = None
self._gss = None
self.step = None
self.authzid = None
def start(self, username, authzid):
self.username = username
self.authzid = authzid
_unused, self._gss = kerberos.authGSSClientInit(authzid or
"{0}@{1}".format("xmpp",
self.password_manager.get_serv_host()))
self.step = 0
return self.challenge("")
def challenge(self, challenge):
if self.step == 0:
ret = kerberos.authGSSClientStep(self._gss,
base64.b64encode(challenge))
if ret != kerberos.AUTH_GSS_CONTINUE:
self.step = 1
elif self.step == 1:
ret = kerberos.authGSSClientUnwrap(self._gss,
base64.b64encode(challenge))
response = kerberos.authGSSClientResponse(self._gss)
ret = kerberos.authGSSClientWrap(self._gss, response, self.username)
response = kerberos.authGSSClientResponse(self._gss)
if response is None:
return Response("")
else:
return Response(base64.b64decode(response))
def finish(self, data):
self.username = kerberos.authGSSClientUserName(self._gss)
logger.debug("Authenticated as {0!r}".format(
kerberos.authGSSClientUserName(self._gss)))
return Success(self.username, None, self.authzid)
# vi: sts=4 et sw=4<|fim▁end|>
|
def __init__(self, password_manager):
ClientAuthenticator.__init__(self, password_manager)
self.password_manager = password_manager
|
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in"
" the directory containing %r. It appears you've customized "
"things.\nYou'll have to run django-admin.py, passing it your"
" settings module.\n(If the file settings.py does indeed exist,"
" it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":<|fim▁hole|><|fim▁end|>
|
execute_manager(settings)
|
<|file_name|>nm.go<|end_file_name|><|fim▁begin|>// Copyright 2016 syzkaller project authors. All rights reserved.
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
package symbolizer
import (
"bufio"
"bytes"
"os/exec"
"strconv"
)
type Symbol struct {
Addr uint64
Size int
}
// ReadSymbols returns list of text symbols in the binary bin.
func ReadSymbols(bin string) (map[string][]Symbol, error) {
cmd := exec.Command("nm", "-nS", bin)
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
defer stdout.Close()
if err := cmd.Start(); err != nil {
return nil, err
}
defer cmd.Wait()
symbols := make(map[string][]Symbol)
s := bufio.NewScanner(stdout)
text := [][]byte{[]byte(" t "), []byte(" T ")}
for s.Scan() {
// A line looks as: "ffffffff8104db90 0000000000000059 t snb_uncore_msr_enable_box"
ln := s.Bytes()
if bytes.Index(ln, text[0]) == -1 && bytes.Index(ln, text[1]) == -1 {
continue
}
sp1 := bytes.IndexByte(ln, ' ')
if sp1 == -1 {
continue
}
sp2 := bytes.IndexByte(ln[sp1+1:], ' ')<|fim▁hole|> if !bytes.HasPrefix(ln[sp2:], text[0]) && !bytes.HasPrefix(ln[sp2:], text[1]) {
continue
}
addr, err := strconv.ParseUint(string(ln[:sp1]), 16, 64)
if err != nil {
continue
}
size, err := strconv.ParseUint(string(ln[sp1+1:sp2]), 16, 64)
if err != nil {
continue
}
name := string(ln[sp2+len(text[0]):])
// Note: sizes reported by kernel do not match nm.
// Kernel probably subtracts address of this symbol from address of the next symbol.
// We could do the same, but for now we just round up size to 16.
symbols[name] = append(symbols[name], Symbol{addr, int(size+15) / 16 * 16})
}
if err := s.Err(); err != nil {
return nil, err
}
return symbols, nil
}<|fim▁end|>
|
if sp2 == -1 {
continue
}
sp2 += sp1 + 1
|
<|file_name|>extract_tokens.rs<|end_file_name|><|fim▁begin|>use super::{ExtractedLexicalGrammar, ExtractedSyntaxGrammar, InternedGrammar};
use crate::generate::grammars::{ExternalToken, Variable, VariableType};
use crate::generate::rules::{MetadataParams, Rule, Symbol, SymbolType};
use anyhow::{anyhow, Result};
use std::collections::HashMap;
use std::mem;
pub(super) fn extract_tokens(
mut grammar: InternedGrammar,
) -> Result<(ExtractedSyntaxGrammar, ExtractedLexicalGrammar)> {
let mut extractor = TokenExtractor {
current_variable_name: String::new(),
current_variable_token_count: 0,
extracted_variables: Vec::new(),
extracted_usage_counts: Vec::new(),
};
for mut variable in grammar.variables.iter_mut() {
extractor.extract_tokens_in_variable(&mut variable);
}
for mut variable in grammar.external_tokens.iter_mut() {
extractor.extract_tokens_in_variable(&mut variable);
}
let mut lexical_variables = Vec::with_capacity(extractor.extracted_variables.len());
for variable in extractor.extracted_variables {
lexical_variables.push(Variable {
name: variable.name,
kind: variable.kind,
rule: variable.rule,
});
}
// If a variable's entire rule was extracted as a token and that token didn't
// appear within any other rule, then remove that variable from the syntax
// grammar, giving its name to the token in the lexical grammar. Any symbols
// that pointed to that variable will need to be updated to point to the
// variable in the lexical grammar. Symbols that pointed to later variables
// will need to have their indices decremented.
let mut variables = Vec::new();
let mut symbol_replacer = SymbolReplacer {
replacements: HashMap::new(),
};
for (i, variable) in grammar.variables.into_iter().enumerate() {
if let Rule::Symbol(Symbol {
kind: SymbolType::Terminal,
index,
}) = variable.rule
{
if i > 0 && extractor.extracted_usage_counts[index] == 1 {
let mut lexical_variable = &mut lexical_variables[index];
lexical_variable.kind = variable.kind;
lexical_variable.name = variable.name;
symbol_replacer.replacements.insert(i, index);
continue;
}
}
variables.push(variable);
}
for variable in variables.iter_mut() {
variable.rule = symbol_replacer.replace_symbols_in_rule(&variable.rule);
}
let expected_conflicts = grammar
.expected_conflicts
.into_iter()
.map(|conflict| {
let mut result: Vec<_> = conflict
.iter()
.map(|symbol| symbol_replacer.replace_symbol(*symbol))
.collect();
result.sort_unstable();
result.dedup();
result
})
.collect();
let supertype_symbols = grammar
.supertype_symbols
.into_iter()
.map(|symbol| symbol_replacer.replace_symbol(symbol))
.collect();
let variables_to_inline = grammar
.variables_to_inline
.into_iter()
.map(|symbol| symbol_replacer.replace_symbol(symbol))
.collect();
let mut separators = Vec::new();
let mut extra_symbols = Vec::new();
for rule in grammar.extra_symbols {
if let Rule::Symbol(symbol) = rule {
extra_symbols.push(symbol_replacer.replace_symbol(symbol));
} else {
if let Some(index) = lexical_variables.iter().position(|v| v.rule == rule) {
extra_symbols.push(Symbol::terminal(index));
} else {
separators.push(rule);
}
}
}
let mut external_tokens = Vec::new();
for external_token in grammar.external_tokens {
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
if let Rule::Symbol(symbol) = rule {
if symbol.is_non_terminal() {
return Err(anyhow!(
"Rule '{}' cannot be used as both an external token and a non-terminal rule",
&variables[symbol.index].name,
));
}
if symbol.is_external() {
external_tokens.push(ExternalToken {
name: external_token.name,
kind: external_token.kind,
corresponding_internal_token: None,
})
} else {
external_tokens.push(ExternalToken {
name: lexical_variables[symbol.index].name.clone(),
kind: external_token.kind,
corresponding_internal_token: Some(symbol),
})
}
} else {
return Err(anyhow!(
"Non-symbol rules cannot be used as external tokens"
));
}
}
let mut word_token = None;
if let Some(token) = grammar.word_token {
let token = symbol_replacer.replace_symbol(token);
if token.is_non_terminal() {
return Err(anyhow!(
"Non-terminal symbol '{}' cannot be used as the word token",
&variables[token.index].name
));
}
word_token = Some(token);
}
Ok((
ExtractedSyntaxGrammar {
variables,
expected_conflicts,
extra_symbols,
variables_to_inline,
supertype_symbols,
external_tokens,
word_token,
precedence_orderings: grammar.precedence_orderings,
},
ExtractedLexicalGrammar {
variables: lexical_variables,
separators,
},
))
}
struct TokenExtractor {
current_variable_name: String,
current_variable_token_count: usize,
extracted_variables: Vec<Variable>,
extracted_usage_counts: Vec<usize>,
}
struct SymbolReplacer {
replacements: HashMap<usize, usize>,
}
impl TokenExtractor {
fn extract_tokens_in_variable(&mut self, variable: &mut Variable) {
self.current_variable_name.clear();
self.current_variable_name.push_str(&variable.name);
self.current_variable_token_count = 0;
let mut rule = Rule::Blank;
mem::swap(&mut rule, &mut variable.rule);
variable.rule = self.extract_tokens_in_rule(&rule);
}
fn extract_tokens_in_rule(&mut self, input: &Rule) -> Rule {
match input {
Rule::String(name) => self.extract_token(input, Some(name)).into(),
Rule::Pattern(..) => self.extract_token(input, None).into(),
Rule::Metadata { params, rule } => {
if params.is_token {
let mut params = params.clone();
params.is_token = false;
let mut string_value = None;
if let Rule::String(value) = rule.as_ref() {
string_value = Some(value);
}
let rule_to_extract = if params == MetadataParams::default() {
rule.as_ref()
} else {
input
};
self.extract_token(rule_to_extract, string_value).into()
} else {
Rule::Metadata {
params: params.clone(),
rule: Box::new(self.extract_tokens_in_rule((&rule).clone())),
}
}
}
Rule::Repeat(content) => Rule::Repeat(Box::new(self.extract_tokens_in_rule(content))),
Rule::Seq(elements) => Rule::Seq(
elements
.iter()
.map(|e| self.extract_tokens_in_rule(e))
.collect(),
),
Rule::Choice(elements) => Rule::Choice(
elements
.iter()
.map(|e| self.extract_tokens_in_rule(e))
.collect(),
),
_ => input.clone(),
}
}
fn extract_token(&mut self, rule: &Rule, string_value: Option<&String>) -> Symbol {
for (i, variable) in self.extracted_variables.iter_mut().enumerate() {
if variable.rule == *rule {
self.extracted_usage_counts[i] += 1;
return Symbol::terminal(i);
}
}
let index = self.extracted_variables.len();
let variable = if let Some(string_value) = string_value {
Variable {
name: string_value.clone(),
kind: VariableType::Anonymous,
rule: rule.clone(),
}
} else {
self.current_variable_token_count += 1;
Variable {
name: format!(
"{}_token{}",
&self.current_variable_name, self.current_variable_token_count
),
kind: VariableType::Auxiliary,
rule: rule.clone(),
}
};
self.extracted_variables.push(variable);
self.extracted_usage_counts.push(1);
Symbol::terminal(index)
}
}
impl SymbolReplacer {
fn replace_symbols_in_rule(&mut self, rule: &Rule) -> Rule {
match rule {
Rule::Symbol(symbol) => self.replace_symbol(*symbol).into(),
Rule::Choice(elements) => Rule::Choice(
elements
.iter()
.map(|e| self.replace_symbols_in_rule(e))
.collect(),
),
Rule::Seq(elements) => Rule::Seq(
elements
.iter()
.map(|e| self.replace_symbols_in_rule(e))
.collect(),
),
Rule::Repeat(content) => Rule::Repeat(Box::new(self.replace_symbols_in_rule(content))),
Rule::Metadata { rule, params } => Rule::Metadata {
params: params.clone(),
rule: Box::new(self.replace_symbols_in_rule(rule)),
},
_ => rule.clone(),
}
}
fn replace_symbol(&self, symbol: Symbol) -> Symbol {
if !symbol.is_non_terminal() {
return symbol;
}
if let Some(replacement) = self.replacements.get(&symbol.index) {
return Symbol::terminal(*replacement);
}
let mut adjusted_index = symbol.index;
for (replaced_index, _) in self.replacements.iter() {
if *replaced_index < symbol.index {
adjusted_index -= 1;
}
}
return Symbol::non_terminal(adjusted_index);
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::generate::grammars::VariableType;
#[test]
fn test_extraction() {
let (syntax_grammar, lexical_grammar) = extract_tokens(build_grammar(vec![
Variable::named(
"rule_0",
Rule::repeat(Rule::seq(vec![
Rule::string("a"),
Rule::pattern("b"),
Rule::choice(vec![
Rule::non_terminal(1),
Rule::non_terminal(2),
Rule::token(Rule::repeat(Rule::choice(vec![
Rule::string("c"),
Rule::string("d"),
]))),
]),
])),
),
Variable::named("rule_1", Rule::pattern("e")),
Variable::named("rule_2", Rule::pattern("b")),
Variable::named(
"rule_3",
Rule::seq(vec![Rule::non_terminal(2), Rule::Blank]),
),
]))
.unwrap();
assert_eq!(
syntax_grammar.variables,
vec![
Variable::named(
"rule_0",
Rule::repeat(Rule::seq(vec![
// The string "a" was replaced by a symbol referencing the lexical grammar
Rule::terminal(0),
// The pattern "b" was replaced by a symbol referencing the lexical grammar
Rule::terminal(1),<|fim▁hole|> // the lexical grammar.
Rule::terminal(3),
// The symbol referencing `rule_2` had its index decremented because
// `rule_1` was moved to the lexical grammar.
Rule::non_terminal(1),
// The rule wrapped in `token` was replaced by a symbol referencing
// the lexical grammar.
Rule::terminal(2),
])
]))
),
// The pattern "e" was only used in once place: as the definition of `rule_1`,
// so that rule was moved to the lexical grammar. The pattern "b" appeared in
// two places, so it was not moved into the lexical grammar.
Variable::named("rule_2", Rule::terminal(1)),
Variable::named(
"rule_3",
Rule::seq(vec![Rule::non_terminal(1), Rule::Blank,])
),
]
);
assert_eq!(
lexical_grammar.variables,
vec![
Variable::anonymous("a", Rule::string("a")),
Variable::auxiliary("rule_0_token1", Rule::pattern("b")),
Variable::auxiliary(
"rule_0_token2",
Rule::repeat(Rule::choice(vec![Rule::string("c"), Rule::string("d"),]))
),
Variable::named("rule_1", Rule::pattern("e")),
]
);
}
#[test]
fn test_start_rule_is_token() {
let (syntax_grammar, lexical_grammar) =
extract_tokens(build_grammar(vec![Variable::named(
"rule_0",
Rule::string("hello"),
)]))
.unwrap();
assert_eq!(
syntax_grammar.variables,
vec![Variable::named("rule_0", Rule::terminal(0)),]
);
assert_eq!(
lexical_grammar.variables,
vec![Variable::anonymous("hello", Rule::string("hello")),]
)
}
#[test]
fn test_extracting_extra_symbols() {
let mut grammar = build_grammar(vec![
Variable::named("rule_0", Rule::string("x")),
Variable::named("comment", Rule::pattern("//.*")),
]);
grammar.extra_symbols = vec![Rule::string(" "), Rule::non_terminal(1)];
let (syntax_grammar, lexical_grammar) = extract_tokens(grammar).unwrap();
assert_eq!(syntax_grammar.extra_symbols, vec![Symbol::terminal(1),]);
assert_eq!(lexical_grammar.separators, vec![Rule::string(" "),]);
}
#[test]
fn test_extract_externals() {
let mut grammar = build_grammar(vec![
Variable::named(
"rule_0",
Rule::seq(vec![
Rule::external(0),
Rule::string("a"),
Rule::non_terminal(1),
Rule::non_terminal(2),
]),
),
Variable::named("rule_1", Rule::string("b")),
Variable::named("rule_2", Rule::string("c")),
]);
grammar.external_tokens = vec![
Variable::named("external_0", Rule::external(0)),
Variable::anonymous("a", Rule::string("a")),
Variable::named("rule_2", Rule::non_terminal(2)),
];
let (syntax_grammar, _) = extract_tokens(grammar).unwrap();
assert_eq!(
syntax_grammar.external_tokens,
vec![
ExternalToken {
name: "external_0".to_string(),
kind: VariableType::Named,
corresponding_internal_token: None,
},
ExternalToken {
name: "a".to_string(),
kind: VariableType::Anonymous,
corresponding_internal_token: Some(Symbol::terminal(0)),
},
ExternalToken {
name: "rule_2".to_string(),
kind: VariableType::Named,
corresponding_internal_token: Some(Symbol::terminal(2)),
},
]
);
}
#[test]
fn test_error_on_external_with_same_name_as_non_terminal() {
let mut grammar = build_grammar(vec![
Variable::named(
"rule_0",
Rule::seq(vec![Rule::non_terminal(1), Rule::non_terminal(2)]),
),
Variable::named(
"rule_1",
Rule::seq(vec![Rule::non_terminal(2), Rule::non_terminal(2)]),
),
Variable::named("rule_2", Rule::string("a")),
]);
grammar.external_tokens = vec![Variable::named("rule_1", Rule::non_terminal(1))];
match extract_tokens(grammar) {
Err(e) => {
assert_eq!(e.to_string(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
}
_ => {
panic!("Expected an error but got no error");
}
}
}
fn build_grammar(variables: Vec<Variable>) -> InternedGrammar {
InternedGrammar {
variables,
..Default::default()
}
}
}<|fim▁end|>
|
Rule::choice(vec![
// The symbol referencing `rule_1` was replaced by a symbol referencing
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export * from './privacy.component';
|
<|file_name|>tnetstring.py<|end_file_name|><|fim▁begin|>"""
tnetstring: data serialization using typed netstrings
======================================================
This is a custom Python 3 implementation of tnetstrings.
Compared to other implementations, the main difference
is that this implementation supports a custom unicode datatype.
An ordinary tnetstring is a blob of data prefixed with its length and postfixed
with its type. Here are some examples:
>>> tnetstring.dumps("hello world")
11:hello world,
>>> tnetstring.dumps(12345)
5:12345#
>>> tnetstring.dumps([12345, True, 0])
19:5:12345#4:true!1:0#]
This module gives you the following functions:
:dump: dump an object as a tnetstring to a file
:dumps: dump an object as a tnetstring to a string
:load: load a tnetstring-encoded object from a file
:loads: load a tnetstring-encoded object from a string
Note that since parsing a tnetstring requires reading all the data into memory
at once, there's no efficiency gain from using the file-based versions of these
functions. They're only here so you can use load() to read precisely one
item from a file or socket without consuming any extra data.
The tnetstrings specification explicitly states that strings are binary blobs
and forbids the use of unicode at the protocol level.
**This implementation decodes dictionary keys as surrogate-escaped ASCII**,
all other strings are returned as plain bytes.
:Copyright: (c) 2012-2013 by Ryan Kelly <[email protected]>.
:Copyright: (c) 2014 by Carlo Pires <[email protected]>.
:Copyright: (c) 2016 by Maximilian Hils <[email protected]>.
:License: MIT
"""
import collections
import six
from typing import io, Union, Tuple # noqa
TSerializable = Union[None, bool, int, float, bytes, list, tuple, dict]
def dumps(value):
# type: (TSerializable) -> bytes
"""
This function dumps a python object as a tnetstring.
"""
# This uses a deque to collect output fragments in reverse order,
# then joins them together at the end. It's measurably faster
# than creating all the intermediate strings.
q = collections.deque()
_rdumpq(q, 0, value)
return b''.join(q)
def dump(value, file_handle):
# type: (TSerializable, io.BinaryIO) -> None
"""
This function dumps a python object as a tnetstring and
writes it to the given file.
"""<|fim▁hole|> file_handle.write(dumps(value))
def _rdumpq(q, size, value):
# type: (collections.deque, int, TSerializable) -> int
"""
Dump value as a tnetstring, to a deque instance, last chunks first.
This function generates the tnetstring representation of the given value,
pushing chunks of the output onto the given deque instance. It pushes
the last chunk first, then recursively generates more chunks.
When passed in the current size of the string in the queue, it will return
the new size of the string in the queue.
Operating last-chunk-first makes it easy to calculate the size written
for recursive structures without having to build their representation as
a string. This is measurably faster than generating the intermediate
strings, especially on deeply nested structures.
"""
write = q.appendleft
if value is None:
write(b'0:~')
return size + 3
elif value is True:
write(b'4:true!')
return size + 7
elif value is False:
write(b'5:false!')
return size + 8
elif isinstance(value, six.integer_types):
data = str(value).encode()
ldata = len(data)
span = str(ldata).encode()
write(b'%s:%s#' % (span, data))
return size + 2 + len(span) + ldata
elif isinstance(value, float):
# Use repr() for float rather than str().
# It round-trips more accurately.
# Probably unnecessary in later python versions that
# use David Gay's ftoa routines.
data = repr(value).encode()
ldata = len(data)
span = str(ldata).encode()
write(b'%s:%s^' % (span, data))
return size + 2 + len(span) + ldata
elif isinstance(value, bytes):
data = value
ldata = len(data)
span = str(ldata).encode()
write(b',')
write(data)
write(b':')
write(span)
return size + 2 + len(span) + ldata
elif isinstance(value, six.text_type):
data = value.encode("utf8")
ldata = len(data)
span = str(ldata).encode()
write(b';')
write(data)
write(b':')
write(span)
return size + 2 + len(span) + ldata
elif isinstance(value, (list, tuple)):
write(b']')
init_size = size = size + 1
for item in reversed(value):
size = _rdumpq(q, size, item)
span = str(size - init_size).encode()
write(b':')
write(span)
return size + 1 + len(span)
elif isinstance(value, dict):
write(b'}')
init_size = size = size + 1
for (k, v) in value.items():
size = _rdumpq(q, size, v)
size = _rdumpq(q, size, k)
span = str(size - init_size).encode()
write(b':')
write(span)
return size + 1 + len(span)
else:
raise ValueError("unserializable object: {} ({})".format(value, type(value)))
def loads(string):
# type: (bytes) -> TSerializable
"""
This function parses a tnetstring into a python object.
"""
return pop(string)[0]
def load(file_handle):
# type: (io.BinaryIO) -> TSerializable
"""load(file) -> object
This function reads a tnetstring from a file and parses it into a
python object. The file must support the read() method, and this
function promises not to read more data than necessary.
"""
# Read the length prefix one char at a time.
# Note that the netstring spec explicitly forbids padding zeros.
c = file_handle.read(1)
data_length = b""
while c.isdigit():
data_length += c
if len(data_length) > 9:
raise ValueError("not a tnetstring: absurdly large length prefix")
c = file_handle.read(1)
if c != b":":
raise ValueError("not a tnetstring: missing or invalid length prefix")
data = file_handle.read(int(data_length))
data_type = file_handle.read(1)[0]
return parse(data_type, data)
def parse(data_type, data):
# type: (int, bytes) -> TSerializable
if six.PY2:
data_type = ord(data_type)
if data_type == ord(b','):
return data
if data_type == ord(b';'):
return data.decode("utf8")
if data_type == ord(b'#'):
try:
if six.PY2:
return long(data)
return int(data)
except ValueError:
raise ValueError("not a tnetstring: invalid integer literal: {}".format(data))
if data_type == ord(b'^'):
try:
return float(data)
except ValueError:
raise ValueError("not a tnetstring: invalid float literal: {}".format(data))
if data_type == ord(b'!'):
if data == b'true':
return True
elif data == b'false':
return False
else:
raise ValueError("not a tnetstring: invalid boolean literal: {}".format(data))
if data_type == ord(b'~'):
if data:
raise ValueError("not a tnetstring: invalid null literal")
return None
if data_type == ord(b']'):
l = []
while data:
item, data = pop(data)
l.append(item)
return l
if data_type == ord(b'}'):
d = {}
while data:
key, data = pop(data)
val, data = pop(data)
d[key] = val
return d
raise ValueError("unknown type tag: {}".format(data_type))
def pop(data):
# type: (bytes) -> Tuple[TSerializable, bytes]
"""
This function parses a tnetstring into a python object.
It returns a tuple giving the parsed object and a string
containing any unparsed data from the end of the string.
"""
# Parse out data length, type and remaining string.
try:
length, data = data.split(b':', 1)
length = int(length)
except ValueError:
raise ValueError("not a tnetstring: missing or invalid length prefix: {}".format(data))
try:
data, data_type, remain = data[:length], data[length], data[length + 1:]
except IndexError:
# This fires if len(data) < dlen, meaning we don't need
# to further validate that data is the right length.
raise ValueError("not a tnetstring: invalid length prefix: {}".format(length))
# Parse the data based on the type tag.
return parse(data_type, data), remain
__all__ = ["dump", "dumps", "load", "loads", "pop"]<|fim▁end|>
| |
<|file_name|>DataInputStream.java<|end_file_name|><|fim▁begin|>/*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* This file incorporates work covered by the following license notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 .
*/
package mod._streams.uno;
import com.sun.star.io.XActiveDataSink;
import com.sun.star.io.XActiveDataSource;
import com.sun.star.io.XDataOutputStream;
import com.sun.star.io.XInputStream;
import com.sun.star.io.XOutputStream;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.uno.UnoRuntime;
import com.sun.star.uno.XInterface;
import java.io.PrintWriter;
import java.util.ArrayList;
import lib.TestCase;
import lib.TestEnvironment;
import lib.TestParameters;
/**
* Test for object which is represented by service
* <code>com.sun.star.io.DataInputStream</code>.
* <ul>
* <li> <code>com::sun::star::io::XInputStream</code></li>
* <li> <code>com::sun::star::io::XDataInputStream</code></li>
* <li> <code>com::sun::star::io::XConnectable</code></li>
* <li> <code>com::sun::star::io::XActiveDataSink</code></li>
* </ul>
* @see com.sun.star.io.DataInputStream
* @see com.sun.star.io.XInputStream
* @see com.sun.star.io.XDataInputStream
* @see com.sun.star.io.XConnectable
* @see com.sun.star.io.XActiveDataSink
* @see ifc.io._XInputStream
* @see ifc.io._XDataInputStream
* @see ifc.io._XConnectable
* @see ifc.io._XActiveDataSink
*/
public class DataInputStream extends TestCase {
/**
* Creates a TestEnvironment for the interfaces to be tested.
* Creates <code>com.sun.star.io.DataInputStream</code> object,
* connects it to <code>com.sun.star.io.DataOutputStream</code>
* through <code>com.sun.star.io.Pipe</code>. All of possible data
* types are written into <code>DataOutputStream</code>.
* Object relations created :
* <ul>
* <li> <code>'StreamData'</code> for
* {@link ifc.io._XDataInputStream}(the data that should be written into
* the stream) </li>
* <li> <code>'ByteData'</code> for
* {@link ifc.io._XInputStream}(the data that should be written into
* the stream) </li>
* <li> <code>'StreamWriter'</code> for
* {@link ifc.io._XDataInputStream}
* {@link ifc.io._XInputStream}(a stream to write data to) </li>
* <li> <code>'Connectable'</code> for
* {@link ifc.io._XConnectable}(another object that can be connected) </li>
* <li> <code>'InputStream'</code> for
* {@link ifc.io._XActiveDataSink}(an input stream to set and get) </li>
* </ul>
*/
@Override
protected TestEnvironment createTestEnvironment(TestParameters Param, PrintWriter log) throws Exception {
Object oInterface = null;
XMultiServiceFactory xMSF = Param.getMSF();
oInterface = xMSF.createInstance("com.sun.star.io.DataInputStream");
XInterface oObj = (XInterface) oInterface;
// creating and connecting DataOutputStream to the
// DataInputStream created through the Pipe
XActiveDataSink xDataSink = UnoRuntime.queryInterface(XActiveDataSink.class, oObj);
XInterface oPipe = (XInterface)
xMSF.createInstance("com.sun.star.io.Pipe");
XInputStream xPipeInput = UnoRuntime.queryInterface(XInputStream.class, oPipe);
XOutputStream xPipeOutput = UnoRuntime.queryInterface(XOutputStream.class, oPipe);
XInterface oDataOutput = (XInterface)
xMSF.createInstance("com.sun.star.io.DataOutputStream");
XDataOutputStream xDataOutput = UnoRuntime.queryInterface(XDataOutputStream.class, oDataOutput) ;
XActiveDataSource xDataSource = UnoRuntime.queryInterface(XActiveDataSource.class, oDataOutput) ;
xDataSource.setOutputStream(xPipeOutput) ;
xDataSink.setInputStream(xPipeInput) ;
// all data types for writing to an XDataInputStream
ArrayList<Object> data = new ArrayList<Object>() ;
data.add(Boolean.TRUE) ;
data.add(Byte.valueOf((byte)123)) ;
data.add(new Character((char)1234)) ;
data.add(Short.valueOf((short)1234)) ;
data.add(Integer.valueOf(123456)) ;
data.add(new Float(1.234)) ;
data.add(new Double(1.23456)) ;
data.add("DataInputStream") ;
// information for writing to the pipe
byte[] byteData = new byte[] {
1, 2, 3, 4, 5, 6, 7, 8 } ;
// creating a connectable object for XConnectable interface
XInterface xConnect = (XInterface)xMSF.createInstance(
"com.sun.star.io.DataInputStream") ;
<|fim▁hole|> "com.sun.star.io.Pipe" );
log.println("creating a new environment for object");
TestEnvironment tEnv = new TestEnvironment( oObj );
// adding sequence of data that must be read
// by XDataInputStream interface methods
tEnv.addObjRelation("StreamData", data) ;
// add a writer
tEnv.addObjRelation("StreamWriter", xDataOutput);
// add a connectable
tEnv.addObjRelation("Connectable", xConnect);
// add an inputStream
tEnv.addObjRelation("InputStream", oDataInput);
tEnv.addObjRelation("ByteData", byteData);
return tEnv;
} // finish method getTestEnvironment
}<|fim▁end|>
|
// creating an input stream to set in XActiveDataSink
XInterface oDataInput = (XInterface) xMSF.createInstance(
|
<|file_name|>test_noninstantiable.py<|end_file_name|><|fim▁begin|>import warnings
from apification.utils import Noninstantiable, NoninstantiableMeta
def test_noninstantiable():
e, o = None, None
try:
o = Noninstantiable()
except TypeError as e: pass
assert o is None
assert isinstance(e, TypeError)
def test_noninstantiable_keyword_self_check_invalid():
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
class C(Noninstantiable):
def func(self, a, b=1):
pass
assert len(w) == 1
assert issubclass(w[0].category, SyntaxWarning)
assert issubclass(C, Noninstantiable)
def test_noninstantiable_keyword_self_check_valid():
C = None
with warnings.catch_warnings(record=True) as w:
class C(Noninstantiable):
def func(cls, self, b=1): # self as non first argument is allowed for whatever reasons
pass
assert not w
assert issubclass(C, Noninstantiable)
def test_noninstantiable_classmethods():
class A(Noninstantiable):
def a(cls):
return cls
assert A.a() is A
def test_noninstantiable_keyword_self_check_suppression():
C = None
class C(Noninstantiable):
_allow_self_as_first_arg = True
def func(self, a, b=1):
pass
assert issubclass(C, Noninstantiable)
def test_noninstantable_inheritance():
class A(object):
_allow_self_as_first_arg = True
e = None
try:
class B(A):<|fim▁hole|> pass
assert e is None<|fim▁end|>
|
__metaclass__ = NoninstantiableMeta
def a(self):
pass
except TypeError as e:
|
<|file_name|>inputgroup.tsx<|end_file_name|><|fim▁begin|>import React from 'react';
import { LabIcon } from '../icon';
import { classes } from '../utils';
/**
* InputGroup component properties
*/
export interface IInputGroupProps
extends React.InputHTMLAttributes<HTMLInputElement> {
/**
* Right icon adornment
*/
rightIcon?: string | LabIcon;
}
/**
* InputGroup component
*
* @param props Component properties
* @returns Component<|fim▁hole|> const { className, rightIcon, ...others } = props;
return (
<div className={classes('jp-InputGroup', className)}>
<input {...others}></input>
{rightIcon && (
<span className="jp-InputGroupAction">
{typeof rightIcon === 'string' ? (
<LabIcon.resolveReact
icon={rightIcon}
elementPosition="center"
tag="span"
/>
) : (
<rightIcon.react elementPosition="center" tag="span" />
)}
</span>
)}
</div>
);
}<|fim▁end|>
|
*/
export function InputGroup(props: IInputGroupProps): JSX.Element {
|
<|file_name|>spineless.js<|end_file_name|><|fim▁begin|>(function() {
var Application, PubSub, Request, Router, Spineless;
Application = (function() {
function Application() {
return {
controllers: {},
helpers: {
_default: function(locals) {
return $.extend(true, {}, locals);
}
}<|fim▁hole|> };
}
return Application;
})();
Request = (function() {
function Request(controller, action, params) {
var p;
p = params != null ? params : {};
return {
controller: $(".controller[data-controller=" + controller + "]"),
view: $(".controller[data-controller=" + controller + "] .view[data-action=" + action + "]"),
params: $.extend(true, p, {
controller: controller,
action: action
})
};
}
return Request;
})();
Router = (function() {
function Router() {
return {
parseRoute: function(route) {
var hsh, str;
str = route + "";
hsh = $.extend(true, {}, {
controller: 'application',
action: 'index'
});
while (str.charAt(0) === '/') {
str = str.substr(1);
}
if (str.length > 0) {
$.each(str.split('/'), function(i, e) {
if (i === 0) {
hsh.controller = e;
}
if (i === 1) {
return hsh.action = e;
}
});
}
return hsh;
},
route: function(element) {
var route, url;
url = element.attr('href') || $(element).attr('data-href');
route = this.parseRoute(url);
return this.get(route.controller, route.action, route.params);
}
};
}
return Router;
})();
PubSub = (function() {
function PubSub() {
var o;
o = $({});
return {
subscribe: function() {
return o.on.apply(o, arguments);
},
unsubscribe: function() {
return o.off.apply(o, arguments);
},
publish: function() {
return o.trigger.apply(o, arguments);
}
};
}
return PubSub;
})();
Spineless = (function() {
function Spineless(options) {
var controllerActionAvailable, get, init, parseLocals, postRender, prepareRender, render, renderTemplate, root, templates;
root = this;
templates = function(method, locals) {
if (root.app.helpers.hasOwnProperty(method)) {
return root.app.helpers[method].apply(root.app, [locals]);
} else {
return root.app.helpers._default(locals);
}
};
parseLocals = function(view) {
var locals;
locals = $(view).attr('data-locals');
if (locals != null) {
return $.parseJSON(locals);
} else {
return {};
}
};
prepareRender = function() {
if (root.request.controller && root.request.view) {
$('.view.active').removeClass('active');
$('.controller.active').removeClass('active');
root.request.view.addClass('active');
root.request.controller.addClass("active");
return root.request.view.find("*[data-template]");
}
return [];
};
renderTemplate = function(view) {
var locals, name, template;
name = $(view).attr('data-template');
if (name != null) {
locals = parseLocals($(view));
template = $('.templates *[data-template-name=' + name + ']').html();
return view.html($.mustache(template, templates(name, locals)));
}
};
render = function(elements) {
return $.each(elements, function(i, e) {
return renderTemplate($(e));
});
};
controllerActionAvailable = function() {
return root.app.controllers.hasOwnProperty(root.request.params.controller) && root.app.controllers[root.request.params.controller].hasOwnProperty(root.request.params.action);
};
postRender = function() {
$('body').attr('data-controller', root.request.params.controller);
$('body').attr('data-action', root.request.params.action);
$('body').addClass('rendered');
return root.app.publish("afterRender", root.app);
};
get = function(controller, action, params) {
var itemsToRender;
root.request = new Request(controller, action, params);
root.app.request = root.request;
$('body').removeClass('rendered');
$('html,body').animate({
scrollTop: 0
}, 1);
itemsToRender = prepareRender();
if (controllerActionAvailable()) {
root.app.controllers[root.request.params.controller][root.request.params.action].apply(root.app, [itemsToRender, root.request]);
} else {
render(itemsToRender);
}
return postRender();
};
init = function(options) {
$(document).on('click', '.route', function(event) {
event.preventDefault();
return root.app.route($(this));
});
return $.extend(true, root.app, options);
};
this.app = new Application();
$.extend(true, this.app, new Router());
$.extend(true, this.app, new PubSub());
this.app.get = get;
this.app.render = render;
init(options);
return this.app;
}
return Spineless;
})();
$.spineless = function(options) {
return new Spineless(options);
};
}).call(this);<|fim▁end|>
| |
<|file_name|>cols.rs<|end_file_name|><|fim▁begin|>use transposed::{Cols, ColsMut};
use {Col, ColMut};
impl<'a, T> DoubleEndedIterator for Cols<'a, T> {
fn next_back(&mut self) -> Option<Col<'a, T>> {
self.0.next_back().map(|r| Col(r.0))
}
}
impl<'a, T> Iterator for Cols<'a, T> {
type Item = Col<'a, T>;
fn next(&mut self) -> Option<Col<'a, T>> {
self.0.next().map(|r| Col(r.0))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}
impl<'a, T> DoubleEndedIterator for ColsMut<'a, T> {
fn next_back(&mut self) -> Option<ColMut<'a, T>> {
self.0.next_back().map(|r| ColMut(Col((r.0).0)))
}
}
impl<'a, T> Iterator for ColsMut<'a, T> {
type Item = ColMut<'a, T>;
<|fim▁hole|> fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
}<|fim▁end|>
|
fn next(&mut self) -> Option<ColMut<'a, T>> {
self.0.next().map(|r| ColMut(Col((r.0).0)))
}
|
<|file_name|>standard.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import datetime
import re
import unicodedata
from decimal import Decimal, InvalidOperation
from itertools import islice
from collections import Iterator
from dateutil.parser import parse as parse_date
from weboob.capabilities.base import empty
from weboob.tools.compat import basestring
from weboob.exceptions import ParseError
from weboob.browser.url import URL
from weboob.tools.log import getLogger, DEBUG_FILTERS
class NoDefault(object):
def __repr__(self):
return 'NO_DEFAULT'
_NO_DEFAULT = NoDefault()
__all__ = ['FilterError', 'ColumnNotFound', 'RegexpError', 'ItemNotFound',
'Filter', 'Base', 'Env', 'TableCell', 'RawText',
'CleanText', 'Lower', 'CleanDecimal', 'Field', 'Regexp', 'Map',
'DateTime', 'Date', 'Time', 'DateGuesser', 'Duration',
'MultiFilter', 'CombineDate', 'Format', 'Join', 'Type',
'BrowserURL', 'Async', 'AsyncLoad']
class FilterError(ParseError):
pass
class ColumnNotFound(FilterError):
pass
class RegexpError(FilterError):
pass
class ItemNotFound(FilterError):
pass
class _Filter(object):
_creation_counter = 0
def __init__(self, default=_NO_DEFAULT):
self._key = None
self._obj = None
self.default = default
self._creation_counter = _Filter._creation_counter
_Filter._creation_counter += 1
def __or__(self, o):
self.default = o
return self
def __and__(self, o):
if isinstance(o, type) and issubclass(o, _Filter):
o = o()
o.selector = self
return o
def default_or_raise(self, exception):
if self.default is not _NO_DEFAULT:
return self.default
else:
raise exception
def __str__(self):
return self.__class__.__name__
def debug(*args):
"""
A decorator function to provide some debug information
in Filters.
It prints by default the name of the Filter and the input value.
"""
def wraper(function):
def print_debug(self, value):
logger = getLogger('b2filters')
result = ''
outputvalue = value
if isinstance(value, list):
from lxml import etree
outputvalue = ''
first = True
for element in value:
if first:
first = False
else:
outputvalue += ', '
if isinstance(element, etree.ElementBase):
outputvalue += "%s" % etree.tostring(element, encoding=unicode)
else:
outputvalue += "%r" % element
if self._obj is not None:
result += "%s" % self._obj._random_id
if self._key is not None:
result += ".%s" % self._key
name = str(self)
result += " %s(%r" % (name, outputvalue)
for arg in self.__dict__:
if arg.startswith('_') or arg == u"selector":
continue
if arg == u'default' and getattr(self, arg) == _NO_DEFAULT:
continue
result += ", %s=%r" % (arg, getattr(self, arg))
result += u')'
logger.log(DEBUG_FILTERS, result)
res = function(self, value)
return res
return print_debug
return wraper
class Filter(_Filter):
"""
Class used to filter on a HTML element given as call parameter to return
matching elements.
Filters can be chained, so the parameter supplied to constructor can be
either a xpath selector string, or an other filter called before.
>>> from lxml.html import etree
>>> f = CleanDecimal(CleanText('//p'), replace_dots=True)
>>> f(etree.fromstring('<html><body><p>blah: <span>229,90</span></p></body></html>'))
Decimal('229.90')
"""
def __init__(self, selector=None, default=_NO_DEFAULT):
super(Filter, self).__init__(default=default)
self.selector = selector
@classmethod
def select(cls, selector, item, obj=None, key=None):
if isinstance(selector, basestring):
return item.xpath(selector)
elif isinstance(selector, _Filter):
selector._key = key
selector._obj = obj
return selector(item)
elif callable(selector):
return selector(item)
else:
return selector
def __call__(self, item):
return self.filter(self.select(self.selector, item, key=self._key, obj=self._obj))
@debug()
def filter(self, value):
"""
This method have to be overrided by children classes.
"""
raise NotImplementedError()
class _Selector(Filter):
def filter(self, elements):
if elements is not None:
return elements
else:
return self.default_or_raise(ParseError('Element %r not found' % self.selector))
class AsyncLoad(Filter):
def __call__(self, item):
link = self.select(self.selector, item, key=self._key, obj=self._obj)
return item.page.browser.async_open(link)
class Async(_Filter):
def __init__(self, name, selector=None):
super(Async, self).__init__()
self.selector = selector
self.name = name
def __and__(self, o):
if isinstance(o, type) and issubclass(o, _Filter):
o = o()
self.selector = o
return self
def __call__(self, item):
result = item.loaders[self.name].result()
assert result.page is not None, 'The loaded url %s hasn\'t been matched by an URL object' % result.url
return self.selector(result.page.doc)
class Base(Filter):
"""
Change the base element used in filters.
>>> Base(Env('header'), CleanText('./h1')) # doctest: +SKIP
"""
def __call__(self, item):
base = self.select(self.base, item, obj=self._obj, key=self._key)
return self.selector(base)
def __init__(self, base, selector=None, default=_NO_DEFAULT):
super(Base, self).__init__(selector, default)
self.base = base
class Env(_Filter):
"""
Filter to get environment value of the item.
It is used for example to get page parameters, or when there is a parse()
method on ItemElement.
"""
def __init__(self, name, default=_NO_DEFAULT):
super(Env, self).__init__(default)
self.name = name
def __call__(self, item):
try:
return item.env[self.name]
except KeyError:
return self.default_or_raise(ParseError('Environment variable %s not found' % self.name))
class TableCell(_Filter):
"""
Used with TableElement, it get the cell value from its name.
For example:
>>> from weboob.capabilities.bank import Transaction
>>> from weboob.browser.elements import TableElement, ItemElement
>>> class table(TableElement):
... head_xpath = '//table/thead/th'
... item_xpath = '//table/tbody/tr'
... col_date = u'Date'
... col_label = [u'Name', u'Label']
... class item(ItemElement):
... klass = Transaction
... obj_date = Date(TableCell('date'))
... obj_label = CleanText(TableCell('label'))
...
"""
def __init__(self, *names, **kwargs):
super(TableCell, self).__init__(**kwargs)
self.names = names
def __call__(self, item):
for name in self.names:
idx = item.parent.get_colnum(name)
if idx is not None:
return item.xpath('./td[%s]' % (idx + 1))
return self.default_or_raise(ColumnNotFound('Unable to find column %s' % ' or '.join(self.names)))
class RawText(Filter):
@debug()
def filter(self, el):
if isinstance(el, (tuple, list)):
return u' '.join([self.filter(e) for e in el])
if el.text is None:
return self.default
else:
return unicode(el.text)
class CleanText(Filter):
"""
Get a cleaned text from an element.
It first replaces all tabs and multiple spaces
(including newlines if ``newlines`` is True)
to one space and strips the result string.
The result is coerced into unicode, and optionally normalized
according to the ``normalize`` argument.
Then it replaces all symbols given in the ``symbols`` argument.
>>> CleanText().filter('coucou ')
u'coucou'
>>> CleanText().filter(u'coucou\xa0coucou')
u'coucou coucou'
>>> CleanText(newlines=True).filter(u'coucou\\r\\n coucou ')
u'coucou coucou'
>>> CleanText(newlines=False).filter(u'coucou\\r\\n coucou ')
u'coucou\\ncoucou'
"""
def __init__(self, selector=None, symbols='', replace=[], children=True, newlines=True, normalize='NFC', **kwargs):
super(CleanText, self).__init__(selector, **kwargs)
self.symbols = symbols
self.toreplace = replace
self.children = children
self.newlines = newlines
self.normalize = normalize
@debug()
def filter(self, txt):
if isinstance(txt, (tuple, list)):
txt = u' '.join([self.clean(item, children=self.children) for item in txt])
txt = self.clean(txt, self.children, self.newlines, self.normalize)
txt = self.remove(txt, self.symbols)
txt = self.replace(txt, self.toreplace)
# ensure it didn't become str by mistake
return unicode(txt)
@classmethod
def clean(cls, txt, children=True, newlines=True, normalize='NFC'):
if not isinstance(txt, basestring):
if children:
txt = [t.strip() for t in txt.itertext()]
else:
txt = [txt.text.strip()]
txt = u' '.join(txt) # 'foo bar'
if newlines:
txt = re.compile(u'\s+', flags=re.UNICODE).sub(u' ', txt) # 'foo bar'
else:
# normalize newlines and clean what is inside
txt = '\n'.join([cls.clean(l) for l in txt.splitlines()])
txt = txt.strip()
# lxml under Python 2 returns str instead of unicode if it is pure ASCII
txt = unicode(txt)
# normalize to a standard Unicode form
if normalize:
txt = unicodedata.normalize(normalize, txt)
return txt
@classmethod
def remove(cls, txt, symbols):
for symbol in symbols:
txt = txt.replace(symbol, '')
return txt.strip()
@classmethod
def replace(cls, txt, replace):
for before, after in replace:
txt = txt.replace(before, after)
return txt
class Lower(CleanText):
@debug()
def filter(self, txt):
txt = super(Lower, self).filter(txt)
return txt.lower()
class CleanDecimal(CleanText):
"""
Get a cleaned Decimal value from an element.
replace_dots is False by default. A dot is interpreted as a decimal separator.
If replace_dots is set to True, we remove all the dots. The ',' is used as decimal
separator (often useful for French values)
If replace_dots is a tuple, the first element will be used as the thousands separator,
and the second as the decimal separator.
See http://en.wikipedia.org/wiki/Thousands_separator#Examples_of_use
For example, for the UK style (as in 1,234,567.89):
>>> CleanDecimal('./td[1]', replace_dots=(',', '.')) # doctest: +SKIP
"""
def __init__(self, selector=None, replace_dots=False, sign=None, default=_NO_DEFAULT):
super(CleanDecimal, self).__init__(selector, default=default)
self.replace_dots = replace_dots
self.sign = sign
@debug()
def filter(self, text):
if empty(text):
return self.default_or_raise(ParseError('Unable to parse %r' % text))
original_text = text = super(CleanDecimal, self).filter(text)
if self.replace_dots:
if type(self.replace_dots) is tuple:
thousands_sep, decimal_sep = self.replace_dots
else:
thousands_sep, decimal_sep = '.', ','
text = text.replace(thousands_sep, '').replace(decimal_sep, '.')
try:
v = Decimal(re.sub(r'[^\d\-\.]', '', text))
if self.sign:
v *= self.sign(original_text)
return v
except InvalidOperation as e:
return self.default_or_raise(e)
class Slugify(Filter):
@debug()
def filter(self, label):
label = re.sub(r'[^A-Za-z0-9]', ' ', label.lower()).strip()
label = re.sub(r'\s+', '-', label)
return label
class Type(Filter):
"""
Get a cleaned value of any type from an element text.
The type_func can be any callable (class, function, etc.).
By default an empty string will not be parsed but it can be changed
by specifying minlen=False. Otherwise, a minimal length can be specified.
>>> Type(CleanText('./td[1]'), type=int) # doctest: +SKIP
>>> Type(type=int).filter('42')
42
>>> Type(type=int, default='NaN').filter('')
'NaN'
>>> Type(type=str, minlen=False, default='a').filter('')
''
>>> Type(type=str, minlen=0, default='a').filter('')
'a'
"""
def __init__(self, selector=None, type=None, minlen=0, default=_NO_DEFAULT):
super(Type, self).__init__(selector, default=default)
self.type_func = type
self.minlen = minlen
@debug()
def filter(self, txt):
if empty(txt):
return self.default_or_raise(ParseError('Unable to parse %r' % txt))
if self.minlen is not False and len(txt) <= self.minlen:
return self.default_or_raise(ParseError('Unable to parse %r' % txt))
try:
return self.type_func(txt)
except ValueError as e:
return self.default_or_raise(ParseError('Unable to parse %r: %s' % (txt, e)))
class Field(_Filter):
"""
Get the attribute of object.
"""
def __init__(self, name):
super(Field, self).__init__()
self.name = name
def __call__(self, item):
return item.use_selector(getattr(item, 'obj_%s' % self.name), key=self._key)
# Based on nth from https://docs.python.org/2/library/itertools.html
def nth(iterable, n, default=None):
"Returns the nth item or a default value, n can be negative, or '*' for all"
if n == '*':
return iterable
if n < 0:
iterable = reversed(list(iterable))
n = abs(n) - 1
return next(islice(iterable, n, None), default)
def ordinal(n):
"To have some readable debug information: '*' => all, 0 => 1st, 1 => 2nd..."
if n == '*':
return 'all'
i = abs(n)
n = n - 1 if n < 0 else n + 1
return str(n) + ('th' if i > 2 else ['st', 'nd', 'rd'][i])
class Regexp(Filter):
r"""
Apply a regex.
>>> from lxml.html import etree
>>> doc = etree.fromstring('<html><body><p>Date: <span>13/08/1988</span></p></body></html>')
>>> Regexp(CleanText('//p'), r'Date: (\d+)/(\d+)/(\d+)', '\\3-\\2-\\1')(doc)
u'1988-08-13'
>>> (Regexp(CleanText('//body'), r'(\d+)', nth=1))(doc)
u'08'
>>> (Regexp(CleanText('//body'), r'(\d+)', nth=-1))(doc)
u'1988'
>>> (Regexp(CleanText('//body'), r'(\d+)', template='[\\1]', nth='*'))(doc)
[u'[13]', u'[08]', u'[1988]']
"""
def __init__(self, selector=None, pattern=None, template=None, nth=0, flags=0, default=_NO_DEFAULT):
super(Regexp, self).__init__(selector, default=default)
assert pattern is not None
self.pattern = pattern
self._regex = re.compile(pattern, flags)
self.template = template
self.nth = nth
def expand(self, m):
if self.template is None:<|fim▁hole|> return self.template(m) if callable(self.template) else m.expand(self.template)
@debug()
def filter(self, txt):
if isinstance(txt, (tuple, list)):
txt = u' '.join([t.strip() for t in txt.itertext()])
m = self._regex.search(txt) if self.nth == 0 else \
nth(self._regex.finditer(txt), self.nth)
if not m:
msg = 'Unable to find %s %s in %r' % (ordinal(self.nth), self.pattern, txt)
return self.default_or_raise(RegexpError(msg))
if isinstance(m, Iterator):
return map(self.expand, m)
return self.expand(m)
class Map(Filter):
def __init__(self, selector, map_dict, default=_NO_DEFAULT):
super(Map, self).__init__(selector, default=default)
self.map_dict = map_dict
@debug()
def filter(self, txt):
try:
return self.map_dict[txt]
except KeyError:
return self.default_or_raise(ItemNotFound('Unable to handle %r on %r' % (txt, self.map_dict)))
class DateTime(Filter):
def __init__(self, selector=None, default=_NO_DEFAULT, dayfirst=False, translations=None):
super(DateTime, self).__init__(selector, default=default)
self.dayfirst = dayfirst
self.translations = translations
@debug()
def filter(self, txt):
if empty(txt) or txt == '':
return self.default_or_raise(ParseError('Unable to parse %r' % txt))
try:
if self.translations:
for search, repl in self.translations:
txt = search.sub(repl, txt)
return parse_date(txt, dayfirst=self.dayfirst)
except (ValueError, TypeError) as e:
return self.default_or_raise(ParseError('Unable to parse %r: %s' % (txt, e)))
class Date(DateTime):
def __init__(self, selector=None, default=_NO_DEFAULT, dayfirst=False, translations=None):
super(Date, self).__init__(selector, default=default, dayfirst=dayfirst, translations=translations)
@debug()
def filter(self, txt):
datetime = super(Date, self).filter(txt)
if hasattr(datetime, 'date'):
return datetime.date()
else:
return datetime
class DateGuesser(Filter):
def __init__(self, selector, date_guesser, **kwargs):
super(DateGuesser, self).__init__(selector)
self.date_guesser = date_guesser
self.kwargs = kwargs
def __call__(self, item):
values = self.select(self.selector, item, obj=self._obj, key=self._key)
date_guesser = self.date_guesser
# In case Env() is used to kive date_guesser.
if isinstance(date_guesser, _Filter):
date_guesser = self.select(date_guesser, item, obj=self._obj, key=self._key)
if isinstance(values, basestring):
values = re.split('[/-]', values)
if len(values) == 2:
day, month = map(int, values)
else:
raise ParseError('Unable to take (day, month) tuple from %r' % values)
return date_guesser.guess_date(day, month, **self.kwargs)
class Time(Filter):
klass = datetime.time
_regexp = re.compile(r'(?P<hh>\d+):?(?P<mm>\d+)(:(?P<ss>\d+))?')
kwargs = {'hour': 'hh', 'minute': 'mm', 'second': 'ss'}
def __init__(self, selector=None, default=_NO_DEFAULT):
super(Time, self).__init__(selector, default=default)
@debug()
def filter(self, txt):
m = self._regexp.search(txt)
if m:
kwargs = {}
for key, index in self.kwargs.iteritems():
kwargs[key] = int(m.groupdict()[index] or 0)
return self.klass(**kwargs)
return self.default_or_raise(ParseError('Unable to find time in %r' % txt))
class Duration(Time):
klass = datetime.timedelta
regexp = re.compile(r'((?P<hh>\d+)[:;])?(?P<mm>\d+)[;:](?P<ss>\d+)')
kwargs = {'hours': 'hh', 'minutes': 'mm', 'seconds': 'ss'}
class MultiFilter(Filter):
def __init__(self, *args, **kwargs):
default = kwargs.pop('default', _NO_DEFAULT)
super(MultiFilter, self).__init__(args, default)
def __call__(self, item):
values = [self.select(selector, item, obj=self._obj, key=self._key) for selector in self.selector]
return self.filter(tuple(values))
def filter(self, values):
raise NotImplementedError()
class CombineDate(MultiFilter):
def __init__(self, date, time):
super(CombineDate, self).__init__(date, time)
@debug()
def filter(self, values):
return datetime.datetime.combine(values[0], values[1])
class Format(MultiFilter):
def __init__(self, fmt, *args):
super(Format, self).__init__(*args)
self.fmt = fmt
@debug()
def filter(self, values):
return self.fmt % values
class BrowserURL(MultiFilter):
def __init__(self, url_name, **kwargs):
super(BrowserURL, self).__init__(*kwargs.values())
self.url_name = url_name
self.keys = kwargs.keys()
def __call__(self, item):
values = super(BrowserURL, self).__call__(item)
url = getattr(item.page.browser, self.url_name)
assert isinstance(url, URL), "%s.%s must be an URL object" % (type(item.page.browser).__name__, self.url_name)
return url.build(**dict(zip(self.keys, values)))
@debug()
def filter(self, values):
return values
class Join(Filter):
def __init__(self, pattern, selector=None, textCleaner=CleanText):
super(Join, self).__init__(selector)
self.pattern = pattern
self.textCleaner = textCleaner
@debug()
def filter(self, el):
res = u''
for li in el:
res += self.pattern % self.textCleaner.clean(li)
return res
def test_CleanText():
# This test works poorly under a doctest, or would be hard to read
assert CleanText().filter(u' coucou \n\théhé') == u'coucou héhé'
assert CleanText().filter('coucou\xa0coucou') == CleanText().filter(u'coucou\xa0coucou') == u'coucou coucou'
# Unicode normalization
assert CleanText().filter(u'Éçã') == u'Éçã'
assert CleanText(normalize='NFKC').filter(u'…') == u'...'
assert CleanText().filter(u'…') == u'…'
# Diacritical mark (dakuten)
assert CleanText().filter(u'\u3053\u3099') == u'\u3054'
assert CleanText(normalize='NFD').filter(u'\u3053\u3099') == u'\u3053\u3099'
assert CleanText(normalize='NFD').filter(u'\u3054') == u'\u3053\u3099'
assert CleanText(normalize=False).filter(u'\u3053\u3099') == u'\u3053\u3099'<|fim▁end|>
|
return next(g for g in m.groups() if g is not None)
|
<|file_name|>robotsparser.py<|end_file_name|><|fim▁begin|>import requests
import logging
from fetcher import fetch
from os.path import join
from urlobj import URLObj
from urllib.parse import urljoin, urlsplit, urlunsplit<|fim▁hole|> def __init__(self, domain):
self.domain = domain
# Check if the file even exists first.
def exists(self):
resp = fetch(URLObj(join(self.domain, 'robots.txt')))
return (resp is not None) and (resp.status_code == requests.codes.ok)
# Actually parse the file.
def parse(self):
logging.info("Parsing robots.txt")
blackpaths = []
resp = fetch(URLObj(join(self.domain, 'robots.txt')))
for line in resp.text.split('\n'):
line = line.strip()
if line.startswith('#'):
continue
elif line is None:
continue
elif line.startswith('Disallow'):
badpath = line.split(':')[1].strip().strip('/')
blackpaths.append(badpath)
return [join(self.domain, b) for b in blackpaths]<|fim▁end|>
|
class RobotsParser:
|
<|file_name|>date_ex1.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
var today = new Date();
console.log(today);
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># For building youtube_downloader on windows
from distutils.core import setup
import py2exe
# Define where you want youtube_downloader to be built to below
build_dir =
data_files = [('',['settings.ini',
'LICENSE',<|fim▁hole|> ('sessions',[])]
options = {'py2exe': {
'dist_dir': build_dir}}
setup(
windows=['youtube_downloader.py'],
data_files=data_files,
options=options)<|fim▁end|>
|
'README.md']),
|
<|file_name|>hy-AM.js<|end_file_name|><|fim▁begin|>(function (global, factory) {
if (typeof define === "function" && define.amd) {
define('element/locale/hy-AM', ['module', 'exports'], factory);
} else if (typeof exports !== "undefined") {
factory(module, exports);
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports);
global.ELEMENT.lang = global.ELEMENT.lang || {};
global.ELEMENT.lang.hyAM = mod.exports;
}
})(this, function (module, exports) {
'use strict';
exports.__esModule = true;
exports.default = {
el: {
colorpicker: {
confirm: 'Լաւ',
clear: 'Մաքրել'
},
datepicker: {
now: 'Հիմա',
today: 'Այսօր',
cancel: 'Չեղարկել',
clear: 'Մաքրել',
confirm: 'Լաւ',
selectDate: 'Ընտրեք ամսաթիւը',
selectTime: 'Ընտրեք ժամանակը',
startDate: 'Սկզբ. ամսաթիւը',
startTime: 'Սկզբ. ժամանակը',
endDate: 'Վերջ. ամսաթիվը',
endTime: 'Վերջ. ժամանակը',
prevYear: 'Նախորդ տարի',
nextYear: 'Յաջորդ տարի',
prevMonth: 'Նախորդ ամիս',
nextMonth: 'Յաջորդ ամիս',
year: 'Տարի',
month1: 'Յունուար',
month2: 'Փետրուար',
month3: 'Մարտ',
month4: 'Ապրիլ',
month5: 'Մայիս',
month6: 'Յունիս',
month7: 'Յուլիս',
month8: 'Օգոստոս',
month9: 'Սեպտեմբեր',
month10: 'Յոկտեմբեր',
month11: 'Նոյեմբեր',
month12: 'Դեկտեմբեր',
week: 'Շաբաթ',
weeks: {
sun: 'Կիր',
mon: 'Երկ',
tue: 'Եր',
wed: 'Չոր',
thu: 'Հինգ',
fri: 'Ուրբ',
sat: 'Շաբ'
},
months: {
jan: 'Յունվ',
feb: 'Փետ',
mar: 'Մար',
apr: 'Ապր',
may: 'Մայ',
jun: 'Յուն',
jul: 'Յուլ',
aug: 'Օգ',
sep: 'Սեպտ',
oct: 'Յոկ',
nov: 'Նոյ',
dec: 'Դեկ'
}
},
select: {
loading: 'Բեռնում',
noMatch: 'Համապատասխան տուեալներ չկան',
noData: 'Տվյալներ չկան',
placeholder: 'Ընտրել'
},
cascader: {
noMatch: 'Համապատասխան տուեալներ չկան',
loading: 'Բեռնում',
placeholder: 'Ընտրել'
},
pagination: {<|fim▁hole|> pagesize: ' էջում',
total: 'Ընդամենը {total}',
pageClassifier: ''
},
messagebox: {
title: 'Հաղորդագրութիւն',
confirm: 'Լաւ',
cancel: 'Չեղարկել',
error: 'Անվաւեր տուեալների մուտք'
},
upload: {
deleteTip: 'Սեղմեք [Ջնջել] ջնջելու համար',
delete: 'Ջնջել',
preview: 'Նախադիտում',
continue: 'Շարունակել'
},
table: {
emptyText: 'Տուեալներ չկան',
confirmFilter: 'Յաստատել',
resetFilter: 'Վերագործարկել',
clearFilter: 'Բոլորը',
sumText: 'Գումարը'
},
tree: {
emptyText: 'Տուեալներ չկան'
},
transfer: {
noMatch: 'Համապատասխան տուեալներ չկան',
noData: 'Տուեալներ չկան',
titles: ['Ցուցակ 1', 'Ցուցակ 2'],
filterPlaceholder: 'Մուտքագրեք բանալի բառ',
noCheckedFormat: '{total} միաւոր',
hasCheckedFormat: '{checked}/{total} ընտրուած է'
}
}
};
module.exports = exports['default'];
});<|fim▁end|>
|
goto: 'Անցնել',
|
<|file_name|>motor_ina219.py<|end_file_name|><|fim▁begin|># Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Motor INA219 hardware monitor configuration."""
from makani.avionics.firmware.drivers import ina219_types
from makani.avionics.firmware.serial import motor_serial_params as rev
ina219_default = {
'name': '',
'address': 0x0,
'shunt_resistor': 0.01,
'bus_voltage': ina219_types.kIna219BusVoltage16V,
'range': ina219_types.kIna219Range40mv,
'bus_adc': ina219_types.kIna219Adc128Samples,
'shunt_adc': ina219_types.kIna219Adc128Samples,
'mode': ina219_types.kIna219ModeShuntAndBusContinuous,
'current_max': -1,
'voltage_limits_percent': [95, 105],
}
ina219_16v_40mv = dict(ina219_default, **{
'bus_voltage': ina219_types.kIna219BusVoltage16V,
'range': ina219_types.kIna219Range40mv,
})
ina219_16v_80mv = dict(ina219_default, **{
'bus_voltage': ina219_types.kIna219BusVoltage16V,
'range': ina219_types.kIna219Range80mv,
})
ina219_32v_40mv = dict(ina219_default, **{
'bus_voltage': ina219_types.kIna219BusVoltage32V,
'range': ina219_types.kIna219Range40mv,
})
ina219_32v_160mv = dict(ina219_default, **{
'bus_voltage': ina219_types.kIna219BusVoltage32V,
'range': ina219_types.kIna219Range160mv,
})<|fim▁hole|> dict(ina219_32v_40mv, name='12v', address=0x40, shunt_resistor=0.012),
dict(ina219_16v_40mv, name='1v2', address=0x42, shunt_resistor=0.02),
dict(ina219_16v_40mv, name='3v3', address=0x45, shunt_resistor=0.02),
]
gin_a2 = gin_a1
gin_a3 = [
dict(ina219_32v_160mv, name='12v', address=0x41, shunt_resistor=0.05),
dict(ina219_16v_80mv, name='1v2', address=0x42, shunt_resistor=0.05),
dict(ina219_16v_80mv, name='3v3', address=0x45, shunt_resistor=0.05),
]
ina219_config = (rev.MotorHardware, {
rev.MotorHardware.GIN_A1: gin_a1,
rev.MotorHardware.GIN_A2: gin_a2,
rev.MotorHardware.GIN_A3: gin_a3,
rev.MotorHardware.GIN_A4_CLK16: gin_a3,
rev.MotorHardware.GIN_A4_CLK8: gin_a3,
rev.MotorHardware.OZONE_A1: gin_a3,
})<|fim▁end|>
|
gin_a1 = [
|
<|file_name|>hud.py<|end_file_name|><|fim▁begin|>## INFO ########################################################################
## ##
## plastey ##
## ======= ##
## ##
## Oculus Rift + Leap Motion + Python 3 + C + Blender + Arch Linux ##
## Version: 0.2.0.980 (20150510) ##
## File: hud.py ##
## ##
## For more information about the project, visit ##
## <http://plastey.kibu.hu>. ##
## Copyright (C) 2015 Peter Varo, Kitchen Budapest ##
## ##
## This program is free software: you can redistribute it and/or modify it ##
## under the terms of the GNU General Public License as published by the ##
## Free Software Foundation, either version 3 of the License, or ##
## (at your option) any later version. ##
## ##
## This program is distributed in the hope that it will be useful, but ##
## WITHOUT ANY WARRANTY; without even the implied warranty of ##
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. ##
## See the GNU General Public License for more details. ##<|fim▁hole|>## ##
## You should have received a copy of the GNU General Public License ##
## along with this program, most likely a file in the root directory, ##
## called 'LICENSE'. If not, see <http://www.gnu.org/licenses>. ##
## ##
######################################################################## INFO ##
# Import python modules
from collections import deque
#------------------------------------------------------------------------------#
class Text:
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def __init__(self, text_first_object,
text_other_object,
time_getter,
interval):
self._text_first = text_first_object
self._text_other = text_other_object
self._get_time = time_getter
self._interval = interval
self._last_time = time_getter()
self._messages = deque()
self._still_empty = True
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def _update(self):
# Write the changed and constructed messages to display
messages = iter(self._messages)
try:
self._text_first.text = next(messages)
self._text_other.text = '\n'.join(messages)
except StopIteration:
self._text_first.text = self._text_other.text = ''
# Update timer
self._last_time = self._get_time()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def clear(self):
self._messages = deque()
self._update()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def update(self):
# If there are any messages left
if len(self._messages):
# If interval passed
if (self._last_time + self._interval) <= self._get_time():
# Remove oldest item
self._messages.pop()
# Update display
self._update()
# If deque just become empty
elif not self._still_empty:
# Switch state flag and update display
self._still_empty = True
self._update()
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def write(self, message):
# Add new message and update display
self._messages.appendleft(message)
self._update()<|fim▁end|>
| |
<|file_name|>MapsIntent.java<|end_file_name|><|fim▁begin|>package droidkit.app;
import android.content.Intent;
import android.net.Uri;
import android.support.annotation.NonNull;
import java.util.Locale;<|fim▁hole|>
/**
* @author Daniel Serdyukov
*/
public final class MapsIntent {
private static final String MAPS_URL = "https://maps.google.com/maps";
private MapsIntent() {
}
@NonNull
public static Intent openMaps() {
return new Intent(Intent.ACTION_VIEW, Uri.parse(MAPS_URL));
}
@NonNull
public static Intent openMaps(double lat, double lng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL + "?q=%f,%f", lat, lng)));
}
@NonNull
public static Intent route(double lat, double lng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL + "?daddr=%f,%f", lat, lng)));
}
@NonNull
public static Intent route(double fromLat, double fromLng, double toLat, double toLng) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(String.format(Locale.US, MAPS_URL +
"?saddr=%f,%f&daddr=%f,%f", fromLat, fromLng, toLat, toLng)));
}
@NonNull
public static Intent search(@NonNull String query) {
return new Intent(Intent.ACTION_VIEW, Uri.parse(MAPS_URL + "?q=" + query));
}
}<|fim▁end|>
| |
<|file_name|>subscription_manifest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Andrew Kofink <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: subscription_manifest
version_added: 1.0.0
short_description: Manage Subscription Manifests
description:
- Upload, refresh and delete Subscription Manifests
author: "Andrew Kofink (@akofink)"
options:
manifest_path:
description:
- Path to the manifest zip file
- This parameter will be ignored if I(state=absent) or I(state=refreshed)
type: path
state:
description:
- The state of the manifest
default: present
choices:
- absent
- present
- refreshed
type: str
repository_url:
description:
- URL to retrieve content from
aliases: [ redhat_repository_url ]
type: str
extends_documentation_fragment:
- theforeman.foreman.foreman
- theforeman.foreman.foreman.organization
'''
EXAMPLES = '''
- name: "Upload the RHEL developer edition manifest"
theforeman.foreman.subscription_manifest:
username: "admin"
password: "changeme"
server_url: "https://foreman.example.com"
organization: "Default Organization"
state: present
manifest_path: "/tmp/manifest.zip"
'''
RETURN = ''' # '''
from ansible_collections.theforeman.foreman.plugins.module_utils.foreman_helper import KatelloEntityAnsibleModule
def main():
module = KatelloEntityAnsibleModule(
argument_spec=dict(
manifest_path=dict(type='path'),
state=dict(default='present', choices=['absent', 'present', 'refreshed']),
repository_url=dict(aliases=['redhat_repository_url']),
),
foreman_spec=dict(
organization=dict(type='entity', required=True, thin=False),
),
required_if=[
['state', 'present', ['manifest_path']],
],
supports_check_mode=False,
)
module.task_timeout = 5 * 60
with module.api_connection():
organization = module.lookup_entity('organization')
scope = module.scope_for('organization')
try:
existing_manifest = organization['owner_details']['upstreamConsumer']
except KeyError:
existing_manifest = None
if module.state == 'present':
if 'repository_url' in module.foreman_params:
payload = {'redhat_repository_url': module.foreman_params['repository_url']}
org_spec = dict(id=dict(), redhat_repository_url=dict())
organization = module.ensure_entity('organizations', payload, organization, state='present', foreman_spec=org_spec)
try:
with open(module.foreman_params['manifest_path'], 'rb') as manifest_file:
files = {'content': (module.foreman_params['manifest_path'], manifest_file, 'application/zip')}
params = {}
if 'repository_url' in module.foreman_params:
params['repository_url'] = module.foreman_params['repository_url']
params.update(scope)
result = module.resource_action('subscriptions', 'upload', params, files=files, record_change=False, ignore_task_errors=True)
for error in result['humanized']['errors']:
if "same as existing data" in error:
# Nothing changed, but everything ok
break
if "older than existing data" in error:
module.fail_json(msg="Manifest is older than existing data.")
else:
module.fail_json(msg="Upload of the manifest failed: %s" % error)
else:
module.set_changed()
except IOError as e:
module.fail_json(msg="Unable to read the manifest file: %s" % e)
elif module.desired_absent and existing_manifest:
module.resource_action('subscriptions', 'delete_manifest', scope)<|fim▁hole|> elif module.state == 'refreshed':
if existing_manifest:
module.resource_action('subscriptions', 'refresh_manifest', scope)
else:
module.fail_json(msg="No manifest found to refresh.")
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>amp-a4a.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Services} from '../../../src/services';
import {SignatureVerifier, VerificationStatus} from './signature-verifier';
import {
is3pThrottled,
getAmpAdRenderOutsideViewport,
incrementLoadingAds,
} from '../../amp-ad/0.1/concurrent-load';
import {createElementWithAttributes} from '../../../src/dom';
import {cancellation, isCancellation} from '../../../src/error';
import {
installFriendlyIframeEmbed,
setFriendlyIframeEmbedVisible,
} from '../../../src/friendly-iframe-embed';
import {isLayoutSizeDefined, Layout} from '../../../src/layout';
import {isAdPositionAllowed} from '../../../src/ad-helper';
import {dev, user, duplicateErrorIfNecessary} from '../../../src/log';
import {dict} from '../../../src/utils/object';
import {getMode} from '../../../src/mode';
import {isArray, isObject, isEnumValue} from '../../../src/types';
import {utf8Decode} from '../../../src/utils/bytes';
import {getBinaryType, isExperimentOn} from '../../../src/experiments';
import {setStyle} from '../../../src/style';
import {
assertHttpsUrl,
isSecureUrl,
tryDecodeUriComponent,
} from '../../../src/url';
import {parseJson} from '../../../src/json';
import {handleClick} from '../../../ads/alp/handler';
import {
getDefaultBootstrapBaseUrl,
generateSentinel,
} from '../../../src/3p-frame';
import {
installUrlReplacementsForEmbed,
} from '../../../src/service/url-replacements-impl';
import {A4AVariableSource} from './a4a-variable-source';
// TODO(tdrl): Temporary. Remove when we migrate to using amp-analytics.
import {getTimingDataAsync} from '../../../src/service/variable-source';
import {getContextMetadata} from '../../../src/iframe-attributes';
import {getBinaryTypeNumericalCode} from '../../../ads/google/a4a/utils';
import {signingServerURLs} from '../../../ads/_a4a-config';
import {triggerAnalyticsEvent} from '../../../src/analytics';
import {insertAnalyticsElement} from '../../../src/extension-analytics';
/** @type {Array<string>} */
const METADATA_STRINGS = [
'<script amp-ad-metadata type=application/json>',
'<script type="application/json" amp-ad-metadata>',
'<script type=application/json amp-ad-metadata>'];
// TODO(tdrl): Temporary, while we're verifying whether SafeFrame is an
// acceptable solution to the 'Safari on iOS doesn't fetch iframe src from
// cache' issue. See https://github.com/ampproject/amphtml/issues/5614
/** @type {string} */
export const DEFAULT_SAFEFRAME_VERSION = '1-0-14';
/** @const {string} */
export const CREATIVE_SIZE_HEADER = 'X-CreativeSize';
/** @type {string} @visibleForTesting */
export const RENDERING_TYPE_HEADER = 'X-AmpAdRender';
/** @type {string} @visibleForTesting */
export const SAFEFRAME_VERSION_HEADER = 'X-AmpSafeFrameVersion';
/** @type {string} @visibleForTesting */
export const EXPERIMENT_FEATURE_HEADER_NAME = 'amp-ff-exps';
/** @type {string} */
const TAG = 'amp-a4a';
/** @type {string} */
const NO_CONTENT_RESPONSE = 'NO-CONTENT-RESPONSE';
/** @enum {string} */
export const XORIGIN_MODE = {
CLIENT_CACHE: 'client_cache',
SAFEFRAME: 'safeframe',
NAMEFRAME: 'nameframe',
};
/** @type {!Object} @private */
const SHARED_IFRAME_PROPERTIES = dict({
'frameborder': '0',
'allowfullscreen': '',
'allowtransparency': '',
'scrolling': 'no',
'marginwidth': '0',
'marginheight': '0',
});
/** @typedef {{width: number, height: number}} */
export let SizeInfoDef;
/** @typedef {{
minifiedCreative: string,
customElementExtensions: !Array<string>,
customStylesheets: !Array<{href: string}>,
images: (Array<string>|undefined),
}} */
let CreativeMetaDataDef;
/** @private */
export const LIFECYCLE_STAGES = {
// Note: Use strings as values here, rather than numbers, so that "0" does
// not test as `false` later.
adSlotCleared: '-1',
urlBuilt: '1',
adRequestStart: '2',
adRequestEnd: '3',
adResponseValidateStart: '5',
renderFriendlyStart: '6', // TODO(dvoytenko): this signal and similar are actually "embed-create", not "render-start".
renderCrossDomainStart: '7',
renderFriendlyEnd: '8',
renderCrossDomainEnd: '9',
preAdThrottle: '10',
renderSafeFrameStart: '11',
throttled3p: '12',
adResponseValidateEnd: '13',
xDomIframeLoaded: '14',
friendlyIframeLoaded: '15',
adSlotCollapsed: '16',
adSlotUnhidden: '17',
layoutAdPromiseDelay: '18',
signatureVerifySuccess: '19',
networkError: '20',
friendlyIframeIniLoad: '21',
visHalf: '22',
visHalfIniLoad: '23',
firstVisible: '24',
visLoadAndOneSec: '25',
iniLoad: '26',
resumeCallback: '27',
visIniLoad: '29',
upgradeDelay: '30',
// TODO(warrengm): This should replace xDomIframeLoaded once delayed fetch
// is fully deprecated. A new lifecycle stage, crossDomainIframeLoaded, was
// introduced since xDomIframeLoaded is handled in AmpAdXOriginIframeHandler
// outside A4A.
crossDomainIframeLoaded: '31',
};
/**
* Name of A4A lifecycle triggers.
* @enum {string}
*/
export const AnalyticsTrigger = {
AD_REQUEST_START: 'ad-request-start',
AD_RESPONSE_END: 'ad-response-end',
AD_RENDER_START: 'ad-render-start',
AD_RENDER_END: 'ad-render-end',
AD_IFRAME_LOADED: 'ad-iframe-loaded',
};
/**
* Maps the names of lifecycle events to analytics triggers.
* @const {!Object<string, !AnalyticsTrigger>}
*/
const LIFECYCLE_STAGE_TO_ANALYTICS_TRIGGER = {
'adRequestStart': AnalyticsTrigger.AD_REQUEST_START,
'adRequestEnd': AnalyticsTrigger.AD_RESPONSE_END,
'renderFriendlyStart': AnalyticsTrigger.AD_RENDER_START,
'renderCrossDomainStart': AnalyticsTrigger.AD_RENDER_START,
'renderSafeFrameStart': AnalyticsTrigger.AD_RENDER_START,
'renderFriendlyEnd': AnalyticsTrigger.AD_RENDER_END,
'renderCrossDomainEnd': AnalyticsTrigger.AD_RENDER_END,
'friendlyIframeIniLoad': AnalyticsTrigger.AD_IFRAME_LOADED,
'crossDomainIframeLoaded': AnalyticsTrigger.AD_IFRAME_LOADED,
};
/**
* Utility function that ensures any error thrown is handled by optional
* onError handler (if none provided or handler throws, error is swallowed and
* undefined is returned).
* @param {!Function} fn to protect
* @param {T=} inThis An optional object to use as the 'this' object
* when calling the function. If not provided, undefined is bound as this
* when calling function.
* @param {function(this:T, !Error, ...*):?=} onError function given error
* and arguments provided to function call.
* @return {!Function} protected function
* @template T
* @visibleForTesting
*/
export function protectFunctionWrapper(
fn, inThis = undefined, onError = undefined) {
return (...fnArgs) => {
try {
return fn.apply(inThis, fnArgs);
} catch (err) {
if (onError) {
try {
// Ideally we could use [err, ...var_args] but linter disallows
// spread so instead using unshift :(
fnArgs.unshift(err);
return onError.apply(inThis, fnArgs);
} catch (captureErr) {
// swallow error if error handler throws.
}
}
// In the event of no optional on error function or its execution throws,
// return undefined.
return undefined;
}
};
};
export class AmpA4A extends AMP.BaseElement {
// TODO: Add more error handling throughout code.
// TODO: Handle creatives that do not fill.
/**
* @param {!Element} element
*/
constructor(element) {
super(element);
dev().assert(AMP.AmpAdUIHandler);
dev().assert(AMP.AmpAdXOriginIframeHandler);
/** @private {?Promise<undefined>} */
this.keysetPromise_ = null;
/** @private {?Promise<?CreativeMetaDataDef>} */
this.adPromise_ = null;
/**
* @private {number} unique ID of the currently executing promise to allow
* for cancellation.
*/
this.promiseId_ = 0;
/** @private {?string} */
this.adUrl_ = null;
/** @private {?../../../src/friendly-iframe-embed.FriendlyIframeEmbed} */
this.friendlyIframeEmbed_ = null;
/** {?AMP.AmpAdUIHandler} */
this.uiHandler = null;
/** @private {?AMP.AmpAdXOriginIframeHandler} */
this.xOriginIframeHandler_ = null;
/** @private {boolean} whether creative has been verified as AMP */
this.isVerifiedAmpCreative_ = false;
/** @private {?ArrayBuffer} */
this.creativeBody_ = null;
/**
* Initialize this with the slot width/height attributes, and override
* later with what the network implementation returns via extractSize.
* Note: Either value may be 'auto' (i.e., non-numeric).
*
* @private {?({width, height}|../../../src/layout-rect.LayoutRectDef)}
*/
this.creativeSize_ = null;
/** @private {?../../../src/layout-rect.LayoutRectDef} */
this.originalSlotSize_ = null;
/**
* Note(keithwrightbos) - ensure the default here is null so that ios
* uses safeframe when response header is not specified.
* @private {?XORIGIN_MODE}
*/
this.experimentalNonAmpCreativeRenderMethod_ =
this.getNonAmpCreativeRenderingMethod();
/**
* Gets a notion of current time, in ms. The value is not necessarily
* absolute, so should be used only for computing deltas. When available,
* the performance system will be used; otherwise Date.now() will be
* returned.
*
* @const {function():number}
*/
this.getNow_ = (this.win.performance && this.win.performance.now) ?
this.win.performance.now.bind(this.win.performance) : Date.now;
/**
* Protected version of emitLifecycleEvent that ensures error does not
* cause promise chain to reject.
* @private {function(string, !Object=)}
*/
this.protectedEmitLifecycleEvent_ = protectFunctionWrapper(
this.emitLifecycleEvent, this,
(err, varArgs) => {
dev().error(TAG, this.element.getAttribute('type'),
'Error on emitLifecycleEvent', err, varArgs) ;
});
/** @const {string} */
this.sentinel = generateSentinel(window);
/**
* Used to indicate whether this slot should be collapsed or not. Marked
* true if the ad response has status 204, is null, or has a null
* arrayBuffer.
* @private {boolean}
*/
this.isCollapsed_ = false;
/**
* Frame in which the creative renders (friendly if validated AMP, xdomain
* otherwise).
* {?HTMLIframeElement}
*/
this.iframe = null;
/**
* TODO(keithwrightbos) - remove once resume behavior is verified.
* {boolean} whether most recent ad request was generated as part
* of resume callback.
*/
this.fromResumeCallback = false;
/** @protected {string} */
this.safeframeVersion = DEFAULT_SAFEFRAME_VERSION;
/**
* @protected {boolean} Indicates whether the ad is currently in the
* process of being refreshed.
*/
this.isRefreshing = false;
/** @protected {boolean} */
this.isRelayoutNeededFlag = false;
/**
* Used as a signal in some of the CSI pings.
* @private @const {string}
*/
this.releaseType_ = getBinaryTypeNumericalCode(getBinaryType(this.win)) ||
'-1';
/**
* Mapping of feature name to value extracted from ad response header
* amp-ff-exps with comma separated pairs of '=' separated key/value.
* @type {!Object<string,string>}
*/
this.postAdResponseExperimentFeatures = {};
/**
* The configuration for amp-analytics. If null, no amp-analytics element
* will be inserted and no analytics events will be fired.
* This will be initialized inside of buildCallback.
* @private {?JsonObject}
*/
this.a4aAnalyticsConfig_ = null;
/**
* The amp-analytics element that for this impl's analytics config. It will
* be null before buildCallback() executes or if the impl does not provide
* an analytice config.
* @private {?Element}
*/
this.a4aAnalyticsElement_ = null;
}
/** @override */
getPriority() {
// Priority used for scheduling preload and layout callback. Because
// AMP creatives will be injected as part of the promise chain created
// within onLayoutMeasure, this is only relevant to non-AMP creatives
// therefore we want this to match the 3p priority.
const isPWA = !this.element.getAmpDoc().isSingleDoc();
// give the ad higher priority if it is inside a PWA
return isPWA ? 1 : 2;
}
/** @override */
isLayoutSupported(layout) {
return isLayoutSizeDefined(layout);
}
/** @override */
isRelayoutNeeded() {
return this.isRelayoutNeededFlag;
}
/**
* @return {!Promise<boolean>} promise blocked on ad promise whose result is
* whether creative returned is validated as AMP.
*/
isVerifiedAmpCreativePromise() {
return this.adPromise_.then(() => this.isVerifiedAmpCreative_);
}
/** @override */
buildCallback() {
this.creativeSize_ = {
width: this.element.getAttribute('width'),
height: this.element.getAttribute('height'),
};
const upgradeDelayMs = Math.round(this.getResource().getUpgradeDelayMs());
dev().info(TAG,
`upgradeDelay ${this.element.getAttribute('type')}: ${upgradeDelayMs}`);
this.handleLifecycleStage_('upgradeDelay', {
'forced_delta': upgradeDelayMs,
});
this.uiHandler = new AMP.AmpAdUIHandler(this);
const verifier = signatureVerifierFor(this.win);
this.keysetPromise_ =
Services.viewerForDoc(this.getAmpDoc()).whenFirstVisible().then(() => {
this.getSigningServiceNames().forEach(signingServiceName => {
verifier.loadKeyset(signingServiceName);
});
});
this.a4aAnalyticsConfig_ = this.getA4aAnalyticsConfig();
if (this.a4aAnalyticsConfig_) {
// TODO(warrengm): Consider having page-level singletons for networks that
// use the same config for all ads.
this.a4aAnalyticsElement_ = insertAnalyticsElement(
this.element, this.a4aAnalyticsConfig_, true /* loadAnalytics */);
}
}
/** @override */
renderOutsideViewport() {
// Ensure non-verified AMP creatives are throttled.
if (!this.isVerifiedAmpCreative_ && is3pThrottled(this.win)) {
this.handleLifecycleStage_('throttled3p');
return false;
}
// Otherwise the ad is good to go.
const elementCheck = getAmpAdRenderOutsideViewport(this.element);
return elementCheck !== null ?
elementCheck : super.renderOutsideViewport();
}
/**
* To be overridden by network specific implementation indicating if element
* (and environment generally) are valid for sending XHR queries.
* @return {boolean} whether element is valid and ad request should be
* sent. If false, no ad request is sent and slot will be collapsed if
* possible.
*/
isValidElement() {
return true;
}
/**
* @return {boolean} whether ad request should be delayed until
* renderOutsideViewport is met.
*/
delayAdRequestEnabled() {
return false;
}
/**
* Returns preconnect urls for A4A. Ad network should overwrite in their
* Fast Fetch implementation and return an array of urls for the runtime to
* preconnect to.
* @return {!Array<string>}
*/
getPreconnectUrls() {
return [];
}
/**
* Returns prefetch urls for A4A. Ad network should overwrite in their
* Fast Fetch implementation and return an array of urls for the runtime to
* prefetch.
* @return {!Array<string>}
*/
getPrefetchUrls() {
return [];
}
/**
* Returns true if this element was loaded from an amp-ad element. For use by
* network-specific implementations that don't want to allow themselves to be
* embedded directly into a page.
* @return {boolean}
*/
isAmpAdElement() {
return this.element.tagName == 'AMP-AD' ||
this.element.tagName == 'AMP-EMBED';
}
/**
* Prefetches and preconnects URLs related to the ad using adPreconnect
* registration which assumes ad request domain used for 3p is applicable.
* @param {boolean=} unusedOnLayout
* @override
*/
preconnectCallback(unusedOnLayout) {
this.preconnect.preload(this.getSafeframePath_());
this.preconnect.preload(getDefaultBootstrapBaseUrl(this.win, 'nameframe'));
const preconnect = this.getPreconnectUrls();
// NOTE(keithwrightbos): using onLayout to indicate if preconnect should be
// given preferential treatment. Currently this would be false when
// relevant (i.e. want to preconnect on or before onLayoutMeasure) which
// causes preconnect to delay for 1 sec (see custom-element#preconnect)
// therefore hard coding to true.
// NOTE(keithwrightbos): Does not take isValidElement into account so could
// preconnect unnecessarily, however it is assumed that isValidElement
// matches amp-ad loader predicate such that A4A impl does not load.
if (preconnect) {
preconnect.forEach(p => {
this.preconnect.url(p, true);
});
}
}
/** @override */
resumeCallback() {
// FIE that was not destroyed on unlayoutCallback does not require a new
// ad request.
if (this.friendlyIframeEmbed_) {
return;
}
this.handleLifecycleStage_('resumeCallback');
this.fromResumeCallback = true;
// If layout of page has not changed, onLayoutMeasure will not be called
// so do so explicitly.
const resource = this.getResource();
if (resource.hasBeenMeasured() && !resource.isMeasureRequested()) {
this.onLayoutMeasure();
}
}
/**
* @return {!../../../src/service/resource.Resource}
* @visibileForTesting
*/
getResource() {
return this.element.getResources().getResourceForElement(this.element);
}
/**
* @return {boolean} whether adPromise was initialized (indicator of
* element validity).
* @protected
*/
hasAdPromise() {
return !!this.adPromise_;
}
/**
* @return {boolean} whether environment/element should initialize ad request
* promise chain.
* @private
*/
shouldInitializePromiseChain_() {
const slotRect = this.getIntersectionElementLayoutBox();
if (this.getLayout() != Layout.FLUID &&
(slotRect.height == 0 || slotRect.width == 0)) {
dev().fine(
TAG, 'onLayoutMeasure canceled due height/width 0', this.element);
return false;
}
if (!isAdPositionAllowed(this.element, this.win)) {
user().warn(TAG, `<${this.element.tagName}> is not allowed to be ` +
`placed in elements with position:fixed: ${this.element}`);
return false;
}
// OnLayoutMeasure can be called when page is in prerender so delay until
// visible. Assume that it is ok to call isValidElement as it should
// only being looking at window, immutable properties (i.e. location) and
// its element ancestry.
if (!this.isValidElement()) {
// TODO(kjwright): collapse?
user().warn(TAG, this.element.getAttribute('type'),
'Amp ad element ignored as invalid', this.element);
return false;
}
return true;
}
/** @override */
onLayoutMeasure() {
this.initiateAdRequest();
}
/**
* This is the entry point into the ad promise chain.
*
* Calling this function will initiate the following sequence of events: ad
* url construction, ad request issuance, creative verification, and metadata
* parsing.
*
* @protected
*/
initiateAdRequest() {
if (this.xOriginIframeHandler_) {
this.xOriginIframeHandler_.onLayoutMeasure();
}
if (this.adPromise_ || !this.shouldInitializePromiseChain_()) {
return;
}
// If in localDev `type=fake` Ad specifies `force3p`, it will be forced
// to go via 3p.
if (getMode().localDev &&
this.element.getAttribute('type') == 'fake' &&
this.element.getAttribute('force3p') == 'true') {
this.adUrl_ = this.getAdUrl();
this.adPromise_ = Promise.resolve();
return;
}
// Increment unique promise ID so that if its value changes within the
// promise chain due to cancel from unlayout, the promise will be rejected.
++this.promiseId_;
// Shorthand for: reject promise if current promise chain is out of date.
const checkStillCurrent = this.verifyStillCurrent();
// Return value from this chain: True iff rendering was "successful"
// (i.e., shouldn't try to render later via iframe); false iff should
// try to render later in iframe.
// Cases to handle in this chain:
// - Everything ok => Render; return true
// - Empty network response returned => Don't render; return true
// - Can't parse creative out of response => Don't render; return false
// - Can parse, but creative is empty => Don't render; return true
// - Validation fails => return false
// - Rendering fails => return false
// - Chain cancelled => don't return; drop error
// - Uncaught error otherwise => don't return; percolate error up
this.adPromise_ = Services.viewerForDoc(this.getAmpDoc()).whenFirstVisible()
.then(() => {
checkStillCurrent();
// See if experiment that delays request until slot is within
// renderOutsideViewport. Within render outside viewport will not
// resolve if already within viewport thus the check for already
// meeting the definition as opposed to waiting on the promise.
if (this.delayAdRequestEnabled() &&
!this.getResource().renderOutsideViewport()) {
return this.getResource().whenWithinRenderOutsideViewport();
}
})
// This block returns the ad URL, if one is available.
/** @return {!Promise<?string>} */
.then(() => {
checkStillCurrent();
return /** @type {!Promise<?string>} */(
this.getAdUrl(this.tryExecuteRealTimeConfig_()));
})
// This block returns the (possibly empty) response to the XHR request.
/** @return {!Promise<?Response>} */
.then(adUrl => {
checkStillCurrent();
this.adUrl_ = adUrl;
this.handleLifecycleStage_('urlBuilt');
return adUrl && this.sendXhrRequest(adUrl);
})
// The following block returns either the response (as a {bytes, headers}
// object), or null if no response is available / response is empty.
/** @return {?Promise<?{bytes: !ArrayBuffer, headers: !Headers}>} */
.then(fetchResponse => {
checkStillCurrent();
this.handleLifecycleStage_('adRequestEnd');
// If the response is null, we want to return null so that
// unlayoutCallback will attempt to render via x-domain iframe,
// assuming ad url or creative exist.
if (!fetchResponse) {
return null;
}
if (fetchResponse.headers && fetchResponse.headers.has(
EXPERIMENT_FEATURE_HEADER_NAME)) {
this.populatePostAdResponseExperimentFeatures_(
fetchResponse.headers.get(EXPERIMENT_FEATURE_HEADER_NAME));
}
if (getMode().localDev && this.win.location &&
this.win.location.search) {
// Allow for setting experiment features via query param which
// will potentially override values returned in response.
const match = /(?:\?|&)a4a_feat_exp=([^&]+)/.exec(
this.win.location.search);
if (match && match[1]) {
dev().info(TAG, `Using debug exp features: ${match[1]}`);
this.populatePostAdResponseExperimentFeatures_(
tryDecodeUriComponent(match[1]));
}
}
// If the response has response code 204, or arrayBuffer is null,
// collapse it.
if (!fetchResponse.arrayBuffer || fetchResponse.status == 204) {
this.forceCollapse();
return Promise.reject(NO_CONTENT_RESPONSE);
}
// TODO(tdrl): Temporary, while we're verifying whether SafeFrame is
// an acceptable solution to the 'Safari on iOS doesn't fetch
// iframe src from cache' issue. See
// https://github.com/ampproject/amphtml/issues/5614
const method = this.getNonAmpCreativeRenderingMethod(
fetchResponse.headers.get(RENDERING_TYPE_HEADER));
this.experimentalNonAmpCreativeRenderMethod_ = method;
const safeframeVersionHeader =
fetchResponse.headers.get(SAFEFRAME_VERSION_HEADER);
if (/^[0-9-]+$/.test(safeframeVersionHeader) &&
safeframeVersionHeader != DEFAULT_SAFEFRAME_VERSION) {
this.safeframeVersion = safeframeVersionHeader;
this.preconnect.preload(this.getSafeframePath_());
}
// Note: Resolving a .then inside a .then because we need to capture
// two fields of fetchResponse, one of which is, itself, a promise,
// and one of which isn't. If we just return
// fetchResponse.arrayBuffer(), the next step in the chain will
// resolve it to a concrete value, but we'll lose track of
// fetchResponse.headers.
return fetchResponse.arrayBuffer().then(bytes => {
if (bytes.byteLength == 0) {
// The server returned no content. Instead of displaying a blank
// rectangle, we collapse the slot instead.
this.forceCollapse();
return Promise.reject(NO_CONTENT_RESPONSE);
}
return {
bytes,
headers: fetchResponse.headers,
};
});
})
// This block returns the ad creative if it exists and validates as AMP;
// null otherwise.
/** @return {!Promise<?ArrayBuffer>} */
.then(responseParts => {
checkStillCurrent();
// Keep a handle to the creative body so that we can render into
// SafeFrame or NameFrame later, if necessary. TODO(tdrl): Temporary,
// while we
// assess whether this is the right solution to the Safari+iOS iframe
// src cache issue. If we decide to keep a SafeFrame-like solution,
// we should restructure the promise chain to pass this info along
// more cleanly, without use of an object variable outside the chain.
if (!responseParts) {
return Promise.resolve();
}
const {bytes, headers} = responseParts;
const size = this.extractSize(responseParts.headers);
this.creativeSize_ = size || this.creativeSize_;
if (this.experimentalNonAmpCreativeRenderMethod_ !=
XORIGIN_MODE.CLIENT_CACHE &&
bytes) {
this.creativeBody_ = bytes;
}
this.handleLifecycleStage_('adResponseValidateStart');
return this.keysetPromise_
.then(() => signatureVerifierFor(this.win)
.verify(bytes, headers, (eventName, extraVariables) => {
this.handleLifecycleStage_(
eventName, extraVariables);
}))
.then(status => {
if (getMode().localDev &&
this.element.getAttribute('type') == 'fake') {
// do not verify signature for fake type ad
status = VerificationStatus.OK;
}
this.handleLifecycleStage_('adResponseValidateEnd', {
'signatureValidationResult': status,
'releaseType': this.releaseType_,
});
switch (status) {
case VerificationStatus.OK:
return bytes;
case VerificationStatus.UNVERIFIED:
return null;
case VerificationStatus.CRYPTO_UNAVAILABLE:
return this.shouldPreferentialRenderWithoutCrypto() ?
bytes : null;
// TODO(@taymonbeal, #9274): differentiate between these
case VerificationStatus.ERROR_KEY_NOT_FOUND:
case VerificationStatus.ERROR_SIGNATURE_MISMATCH:
user().error(
TAG, this.element.getAttribute('type'),
'Signature verification failed');
return null;
}
});
})
.then(creative => {
checkStillCurrent();
// Need to know if creative was verified as part of render outside
// viewport but cannot wait on promise. Sadly, need a state a
// variable.
this.isVerifiedAmpCreative_ = !!creative;
return creative && utf8Decode(creative);
})
// This block returns CreativeMetaDataDef iff the creative was verified
// as AMP and could be properly parsed for friendly iframe render.
/** @return {?CreativeMetaDataDef} */
.then(creativeDecoded => {
checkStillCurrent();
// Note: It's critical that #getAmpAdMetadata_ be called
// on precisely the same creative that was validated
// via #validateAdResponse_. See GitHub issue
// https://github.com/ampproject/amphtml/issues/4187
let creativeMetaDataDef;
if (!creativeDecoded ||
!(creativeMetaDataDef = this.getAmpAdMetadata_(creativeDecoded))) {
return null;
}
// Update priority.
this.updatePriority(0);
// Load any extensions; do not wait on their promises as this
// is just to prefetch.
const extensions = Services.extensionsFor(this.win);
creativeMetaDataDef.customElementExtensions.forEach(
extensionId => extensions.preloadExtension(extensionId));
// Preload any fonts.
(creativeMetaDataDef.customStylesheets || []).forEach(font =>
this.preconnect.preload(font.href));
// Preload any AMP images.
(creativeMetaDataDef.images || []).forEach(image =>
isSecureUrl(image) && this.preconnect.preload(image));
return creativeMetaDataDef;
})
.catch(error => {
if (error == NO_CONTENT_RESPONSE) {
return {
minifiedCreative: '',
customElementExtensions: [],
customStylesheets: [],
};
}
// If error in chain occurs, report it and return null so that
// layoutCallback can render via cross domain iframe assuming ad
// url or creative exist.
this.promiseErrorHandler_(error);
return null;
});
}
/**
* Populates object mapping of feature to value used for post ad response
* behavior experimentation. Assumes comma separated, = delimited key/value
* pairs. If key appears more than once, last value wins.
* @param {string} input
* @private
*/
populatePostAdResponseExperimentFeatures_(input) {
input.split(',').forEach(line => {
if (!line) {
return;
}
const parts = line.split('=');
if (parts.length != 2 || !parts[0]) {
dev().warn(TAG, `invalid experiment feature ${line}`);
return;
}
this.postAdResponseExperimentFeatures[parts[0]] = parts[1];
});
}
/**
* Refreshes ad slot by fetching a new creative and rendering it. This leaves
* the current creative displayed until the next one is ready.
*
* @param {function()} refreshEndCallback When called, this function will
* restart the refresh cycle.
* @return {Promise} A promise that resolves when all asynchronous portions of
* the refresh function complete. This is particularly handy for testing.
*/
refresh(refreshEndCallback) {
dev().assert(!this.isRefreshing);
this.isRefreshing = true;<|fim▁hole|> this.initiateAdRequest();
dev().assert(this.adPromise_);
const promiseId = this.promiseId_;
return this.adPromise_.then(() => {
if (!this.isRefreshing || promiseId != this.promiseId_) {
// If this refresh cycle was canceled, such as in a no-content
// response case, keep showing the old creative.
refreshEndCallback();
return;
}
return this.mutateElement(() => {
this.togglePlaceholder(true);
// This delay provides a 1 second buffer where the ad loader is
// displayed in between the creatives.
return Services.timerFor(this.win).promise(1000).then(() => {
this.isRelayoutNeededFlag = true;
this.getResource().layoutCanceled();
Services.resourcesForDoc(this.getAmpDoc())
./*OK*/requireLayout(this.element);
});
});
});
}
/**
* Handles uncaught errors within promise flow.
* @param {*} error
* @param {boolean=} opt_ignoreStack
* @private
*/
promiseErrorHandler_(error, opt_ignoreStack) {
if (isCancellation(error)) {
// Rethrow if cancellation.
throw error;
}
if (error && error.message) {
error = duplicateErrorIfNecessary(/** @type {!Error} */(error));
} else {
error = new Error('unknown error ' + error);
}
if (opt_ignoreStack) {
error.ignoreStack = opt_ignoreStack;
}
// Add `type` to the message. Ensure to preserve the original stack.
const type = this.element.getAttribute('type') || 'notype';
if (error.message.indexOf(`${TAG}: ${type}:`) != 0) {
error.message = `${TAG}: ${type}: ${error.message}`;
}
// Additional arguments.
assignAdUrlToError(/** @type {!Error} */(error), this.adUrl_);
if (getMode().development || getMode().localDev || getMode().log) {
user().error(TAG, error);
} else {
user().warn(TAG, error);
// Report with 1% sampling as an expected dev error.
if (Math.random() < 0.01) {
dev().expectedError(TAG, error);
}
}
}
/** @override */
layoutCallback() {
if (this.isRefreshing) {
this.destroyFrame(true);
}
return this.attemptToRenderCreative();
}
/**
* Attemps to render the returned creative following the resolution of the
* adPromise.
*
* @return {!Promise<boolean>|!Promise<undefined>} A promise that resolves
* when the rendering attempt has finished.
* @protected
*/
attemptToRenderCreative() {
// Promise may be null if element was determined to be invalid for A4A.
if (!this.adPromise_) {
if (this.shouldInitializePromiseChain_()) {
dev().error(TAG, 'Null promise in layoutCallback');
}
return Promise.resolve();
}
// There's no real throttling with A4A, but this is the signal that is
// most comparable with the layout callback for 3p ads.
this.handleLifecycleStage_('preAdThrottle');
const layoutCallbackStart = this.getNow_();
const checkStillCurrent = this.verifyStillCurrent();
// Promise chain will have determined if creative is valid AMP.
return this.adPromise_.then(creativeMetaData => {
checkStillCurrent();
const delta = this.getNow_() - layoutCallbackStart;
this.handleLifecycleStage_('layoutAdPromiseDelay', {
layoutAdPromiseDelay: Math.round(delta),
isAmpCreative: !!creativeMetaData,
});
if (this.isCollapsed_) {
return Promise.resolve();
}
// If this.iframe already exists, and we're not currently in the middle
// of refreshing, bail out here. This should only happen in
// testing context, not in production.
if (this.iframe && !this.isRefreshing) {
this.handleLifecycleStage_('iframeAlreadyExists');
return Promise.resolve();
}
if (!creativeMetaData) {
// Non-AMP creative case, will verify ad url existence.
return this.renderNonAmpCreative_();
}
// Must be an AMP creative.
return this.renderAmpCreative_(creativeMetaData)
.catch(err => {
checkStillCurrent();
// Failed to render via AMP creative path so fallback to non-AMP
// rendering within cross domain iframe.
user().error(TAG, this.element.getAttribute('type'),
'Error injecting creative in friendly frame', err);
this.promiseErrorHandler_(err);
return this.renderNonAmpCreative_();
});
}).catch(error => {
this.promiseErrorHandler_(error);
throw cancellation();
});
}
/** @override **/
attemptChangeSize(newHeight, newWidth) {
// Store original size of slot in order to allow re-expansion on
// unlayoutCallback so that it is reverted to original size in case
// of resumeCallback.
this.originalSlotSize_ = this.originalSlotSize_ || this.getLayoutBox();
return super.attemptChangeSize(newHeight, newWidth).catch(() => {});
}
/** @override */
unlayoutCallback() {
this.tearDownSlot();
return true;
}
/**
* Attempts to tear down and set all state variables to initial conditions.
* @protected
*/
tearDownSlot() {
// Increment promiseId to cause any pending promise to cancel.
this.promiseId_++;
this.handleLifecycleStage_('adSlotCleared');
this.uiHandler.applyUnlayoutUI();
if (this.originalSlotSize_) {
super.attemptChangeSize(
this.originalSlotSize_.height, this.originalSlotSize_.width)
.then(() => {
this.originalSlotSize_ = null;
})
.catch(err => {
// TODO(keithwrightbos): if we are unable to revert size, on next
// trigger of promise chain the ad request may fail due to invalid
// slot size. Determine how to handle this case.
dev().warn(TAG, 'unable to revert to original size', err);
});
}
this.isCollapsed_ = false;
// Remove rendering frame, if it exists.
this.destroyFrame();
this.adPromise_ = null;
this.adUrl_ = null;
this.creativeBody_ = null;
this.isVerifiedAmpCreative_ = false;
this.fromResumeCallback = false;
this.experimentalNonAmpCreativeRenderMethod_ =
this.getNonAmpCreativeRenderingMethod();
this.postAdResponseExperimentFeatures = {};
}
/**
* Attempts to remove the current frame and free any associated resources.
* This function will no-op if this ad slot is currently in the process of
* being refreshed.
*
* @param {boolean=} force Forces the removal of the frame, even if
* this.isRefreshing is true.
* @protected
*/
destroyFrame(force = false) {
if (!force && this.isRefreshing) {
return;
}
if (this.iframe && this.iframe.parentElement) {
this.iframe.parentElement.removeChild(this.iframe);
this.iframe = null;
}
if (this.xOriginIframeHandler_) {
this.xOriginIframeHandler_.freeXOriginIframe();
this.xOriginIframeHandler_ = null;
}
// Allow embed to release its resources.
if (this.friendlyIframeEmbed_) {
this.friendlyIframeEmbed_.destroy();
this.friendlyIframeEmbed_ = null;
}
}
/** @override */
viewportCallback(inViewport) {
if (this.friendlyIframeEmbed_) {
setFriendlyIframeEmbedVisible(this.friendlyIframeEmbed_, inViewport);
}
if (this.xOriginIframeHandler_) {
this.xOriginIframeHandler_.viewportCallback(inViewport);
}
}
/** @override */
createPlaceholderCallback() {
return this.uiHandler.createPlaceholder();
}
/**
* Gets the Ad URL to send an XHR Request to. To be implemented
* by network.
* @param {Promise<!Array<rtcResponseDef>>=} opt_rtcResponsesPromise
* @return {!Promise<string>|string}
*/
getAdUrl(opt_rtcResponsesPromise) {
throw new Error('getAdUrl not implemented!');
}
/**
* Resets ad url state to null, used to prevent frame get fallback if error
* is thrown after url construction but prior to layoutCallback.
*/
resetAdUrl() {
this.adUrl_ = null;
}
/**
* @return {!function()} function that when called will verify if current
* ad retrieval is current (meaning unlayoutCallback was not executed).
* If not, will throw cancellation exception;
* @throws {Error}
*/
verifyStillCurrent() {
const promiseId = this.promiseId_;
return () => {
if (promiseId != this.promiseId_) {
throw cancellation();
}
};
}
/**
* Determine the desired size of the creative based on the HTTP response
* headers. Must be less than or equal to the original size of the ad slot
* along each dimension. May be overridden by network.
*
* @param {!../../../src/service/xhr-impl.FetchResponseHeaders} responseHeaders
* @return {?SizeInfoDef}
*/
extractSize(responseHeaders) {
const headerValue = responseHeaders.get(CREATIVE_SIZE_HEADER);
if (!headerValue) {
return null;
}
const match = /^([0-9]+)x([0-9]+)$/.exec(headerValue);
if (!match) {
// TODO(@taymonbeal, #9274): replace this with real error reporting
user().error(TAG, `Invalid size header: ${headerValue}`);
return null;
}
return /** @type {?SizeInfoDef} */ (
{width: Number(match[1]), height: Number(match[2])});
}
/**
* Forces the UI Handler to collapse this slot.
* @visibleForTesting
*/
forceCollapse() {
if (this.isRefreshing) {
// If, for whatever reason, the new creative would collapse this slot,
// stick with the old creative until the next refresh cycle.
this.isRefreshing = false;
return;
}
dev().assert(this.uiHandler);
// Store original size to allow for reverting on unlayoutCallback so that
// subsequent pageview allows for ad request.
this.originalSlotSize_ = this.originalSlotSize_ || this.getLayoutBox();
this.uiHandler.applyNoContentUI();
this.isCollapsed_ = true;
}
/**
* Callback executed when creative has successfully rendered within the
* publisher page but prior to load (or ini-load for friendly frame AMP
* creative render). To be overridden by network implementations as needed.
*
* @param {?CreativeMetaDataDef} creativeMetaData metadata if AMP creative,
* null otherwise.
*/
onCreativeRender(creativeMetaData) {
const lifecycleStage =
creativeMetaData ? 'renderFriendlyEnd' : 'renderCrossDomainEnd';
this.handleLifecycleStage_(lifecycleStage);
}
/**
* @param {!Element} iframe that was just created. To be overridden for
* testing.
* @visibleForTesting
*/
onCrossDomainIframeCreated(iframe) {
dev().info(TAG, this.element.getAttribute('type'),
`onCrossDomainIframeCreated ${iframe}`);
}
/**
* Send ad request, extract the creative and signature from the response.
* @param {string} adUrl Request URL to send XHR to.
* @return {!Promise<?../../../src/service/xhr-impl.FetchResponse>}
* @protected
*/
sendXhrRequest(adUrl) {
this.handleLifecycleStage_('adRequestStart');
const xhrInit = {
mode: 'cors',
method: 'GET',
credentials: 'include',
};
return Services.xhrFor(this.win)
.fetch(adUrl, xhrInit)
.catch(error => {
// If an error occurs, let the ad be rendered via iframe after delay.
// TODO(taymonbeal): Figure out a more sophisticated test for deciding
// whether to retry with an iframe after an ad request failure or just
// give up and render the fallback content (or collapse the ad slot).
this.handleLifecycleStage_('networkError');
const networkFailureHandlerResult =
this.onNetworkFailure(error, this.adUrl_);
dev().assert(!!networkFailureHandlerResult);
if (networkFailureHandlerResult.frameGetDisabled) {
// Reset adUrl to null which will cause layoutCallback to not
// fetch via frame GET.
dev().info(
TAG, 'frame get disabled as part of network failure handler');
this.resetAdUrl();
} else {
this.adUrl_ = networkFailureHandlerResult.adUrl || this.adUrl_;
}
return null;
});
}
/**
* Called on network failure sending XHR CORS ad request allowing for
* modification of ad url and prevent frame GET request on layoutCallback.
* By default, GET frame request will be executed with same ad URL as used
* for XHR CORS request.
* @param {*} unusedError from network failure
* @param {string} unusedAdUrl used for network request
* @return {!{adUrl: (string|undefined), frameGetDisabled: (boolean|undefined)}}
*/
onNetworkFailure(unusedError, unusedAdUrl) {
return {};
}
/**
* To be overridden by network specific implementation indicating which
* signing service(s) is to be used.
* @return {!Array<string>} A list of signing services.
*/
getSigningServiceNames() {
return getMode().localDev ? ['google', 'google-dev'] : ['google'];
}
/**
* Render non-AMP creative within cross domain iframe.
* @return {Promise<boolean>} Whether the creative was successfully rendered.
* @private
*/
renderNonAmpCreative_() {
if (this.element.getAttribute('disable3pfallback') == 'true') {
user().warn(TAG, this.element.getAttribute('type'),
'fallback to 3p disabled');
return Promise.resolve(false);
}
this.promiseErrorHandler_(
new Error('fallback to 3p'),
/* ignoreStack */ true);
// Haven't rendered yet, so try rendering via one of our
// cross-domain iframe solutions.
const method = this.experimentalNonAmpCreativeRenderMethod_;
let renderPromise = Promise.resolve(false);
if ((method == XORIGIN_MODE.SAFEFRAME ||
method == XORIGIN_MODE.NAMEFRAME) &&
this.creativeBody_) {
renderPromise = this.renderViaNameAttrOfXOriginIframe_(
this.creativeBody_);
this.creativeBody_ = null; // Free resources.
} else if (this.adUrl_) {
assertHttpsUrl(this.adUrl_, this.element);
renderPromise = this.renderViaCachedContentIframe_(this.adUrl_);
} else {
// Ad URL may not exist if buildAdUrl throws error or returns empty.
// If error occurred, it would have already been reported but let's
// report to user in case of empty.
user().warn(TAG, this.element.getAttribute('type'),
'No creative or URL available -- A4A can\'t render any ad');
}
incrementLoadingAds(this.win, renderPromise);
return renderPromise.then(
result => {
this.handleLifecycleStage_('crossDomainIframeLoaded');
// Pass on the result to the next value in the promise change.
return result;
});
}
/**
* Render a validated AMP creative directly in the parent page.
* @param {!CreativeMetaDataDef} creativeMetaData Metadata required to render
* AMP creative.
* @return {!Promise} Whether the creative was successfully rendered.
* @private
*/
renderAmpCreative_(creativeMetaData) {
dev().assert(creativeMetaData.minifiedCreative,
'missing minified creative');
dev().assert(!!this.element.ownerDocument, 'missing owner document?!');
this.handleLifecycleStage_('renderFriendlyStart');
// Create and setup friendly iframe.
this.iframe = /** @type {!HTMLIFrameElement} */(
createElementWithAttributes(
/** @type {!Document} */(this.element.ownerDocument), 'iframe',
dict({
// NOTE: It is possible for either width or height to be 'auto',
// a non-numeric value.
'height': this.creativeSize_.height,
'width': this.creativeSize_.width,
'frameborder': '0',
'allowfullscreen': '',
'allowtransparency': '',
'scrolling': 'no',
})));
this.applyFillContent(this.iframe);
const fontsArray = [];
if (creativeMetaData.customStylesheets) {
creativeMetaData.customStylesheets.forEach(s => {
const href = s['href'];
if (href) {
fontsArray.push(href);
}
});
}
const checkStillCurrent = this.verifyStillCurrent();
return installFriendlyIframeEmbed(
this.iframe, this.element, {
host: this.element,
url: this.adUrl_,
html: creativeMetaData.minifiedCreative,
extensionIds: creativeMetaData.customElementExtensions || [],
fonts: fontsArray,
}, embedWin => {
installUrlReplacementsForEmbed(this.getAmpDoc(), embedWin,
new A4AVariableSource(this.getAmpDoc(), embedWin));
}).then(friendlyIframeEmbed => {
checkStillCurrent();
this.friendlyIframeEmbed_ = friendlyIframeEmbed;
setFriendlyIframeEmbedVisible(
friendlyIframeEmbed, this.isInViewport());
// Ensure visibility hidden has been removed (set by boilerplate).
const frameDoc = friendlyIframeEmbed.iframe.contentDocument ||
friendlyIframeEmbed.win.document;
setStyle(frameDoc.body, 'visibility', 'visible');
// Bubble phase click handlers on the ad.
this.registerAlpHandler_(friendlyIframeEmbed.win);
// Capture timing info for friendly iframe load completion.
getTimingDataAsync(
friendlyIframeEmbed.win,
'navigationStart', 'loadEventEnd').then(delta => {
checkStillCurrent();
this.handleLifecycleStage_('friendlyIframeLoaded', {
'navStartToLoadEndDelta.AD_SLOT_ID': Math.round(delta),
});
}).catch(err => {
dev().error(TAG, this.element.getAttribute('type'),
'getTimingDataAsync for renderFriendlyEnd failed: ', err);
});
protectFunctionWrapper(this.onCreativeRender, this, err => {
dev().error(TAG, this.element.getAttribute('type'),
'Error executing onCreativeRender', err);
})(creativeMetaData);
// It's enough to wait for "ini-load" signal because in a FIE case
// we know that the embed no longer consumes significant resources
// after the initial load.
return friendlyIframeEmbed.whenIniLoaded();
}).then(() => {
checkStillCurrent();
// Capture ini-load ping.
this.handleLifecycleStage_('friendlyIframeIniLoad');
});
}
/**
* Shared functionality for cross-domain iframe-based rendering methods.
* @param {!JsonObject<string, string>} attributes The attributes of the iframe.
* @return {!Promise} awaiting load event for ad frame
* @private
*/
iframeRenderHelper_(attributes) {
const mergedAttributes = Object.assign(attributes, dict({
'height': this.creativeSize_.height,
'width': this.creativeSize_.width,
}));
if (this.sentinel) {
mergedAttributes['data-amp-3p-sentinel'] = this.sentinel;
}
this.iframe = createElementWithAttributes(
/** @type {!Document} */ (this.element.ownerDocument),
'iframe', /** @type {!JsonObject} */ (
Object.assign(mergedAttributes, SHARED_IFRAME_PROPERTIES)));
// TODO(keithwrightbos): noContentCallback?
this.xOriginIframeHandler_ = new AMP.AmpAdXOriginIframeHandler(this);
// Iframe is appended to element as part of xorigin frame handler init.
// Executive onCreativeRender after init to ensure it can get reference
// to frame but prior to load to allow for earlier access.
const frameLoadPromise =
this.xOriginIframeHandler_.init(this.iframe, /* opt_isA4A */ true);
protectFunctionWrapper(this.onCreativeRender, this, err => {
dev().error(TAG, this.element.getAttribute('type'),
'Error executing onCreativeRender', err);
})(null);
return frameLoadPromise;
}
/**
* Creates iframe whose src matches that of the ad URL. The response should
* have been cached causing the browser to render without callout. However,
* it is possible for cache miss to occur which can be detected server-side
* by missing ORIGIN header.
*
* Note: As of 2016-10-18, the fill-from-cache assumption appears to fail on
* Safari-on-iOS, which issues a fresh network request, even though the
* content is already in cache.
*
* @param {string} adUrl Ad request URL, as sent to #sendXhrRequest (i.e.,
* before any modifications that XHR module does to it.)
* @return {!Promise} awaiting ad completed insertion.
* @private
*/
renderViaCachedContentIframe_(adUrl) {
this.handleLifecycleStage_('renderCrossDomainStart', {
'isAmpCreative': this.isVerifiedAmpCreative_,
'releaseType': this.releaseType_,
});
return this.iframeRenderHelper_(dict({
'src': Services.xhrFor(this.win).getCorsUrl(this.win, adUrl),
'name': JSON.stringify(
getContextMetadata(this.win, this.element, this.sentinel)),
}));
}
/**
* Render the creative via some "cross domain iframe that accepts the creative
* in the name attribute". This could be SafeFrame or the AMP-native
* NameFrame.
*
* @param {!ArrayBuffer} creativeBody
* @return {!Promise} awaiting load event for ad frame
* @private
*/
renderViaNameAttrOfXOriginIframe_(creativeBody) {
/** @type {string} */
const method = this.experimentalNonAmpCreativeRenderMethod_;
dev().assert(method == XORIGIN_MODE.SAFEFRAME ||
method == XORIGIN_MODE.NAMEFRAME,
'Unrecognized A4A cross-domain rendering mode: %s', method);
this.handleLifecycleStage_('renderSafeFrameStart', {
'isAmpCreative': this.isVerifiedAmpCreative_,
'releaseType': this.releaseType_,
});
const checkStillCurrent = this.verifyStillCurrent();
return utf8Decode(creativeBody).then(creative => {
checkStillCurrent();
let srcPath;
let name = '';
switch (method) {
case XORIGIN_MODE.SAFEFRAME:
srcPath = this.getSafeframePath_() + '?n=0';
break;
case XORIGIN_MODE.NAMEFRAME:
srcPath = getDefaultBootstrapBaseUrl(this.win, 'nameframe');
// Name will be set for real below in nameframe case.
break;
default:
// Shouldn't be able to get here, but... Because of the assert, above,
// we can only get here in non-dev mode, so give user feedback.
user().error('A4A', 'A4A received unrecognized cross-domain name'
+ ' attribute iframe rendering mode request: %s. Unable to'
+ ' render a creative for'
+ ' slot %s.', method, this.element.getAttribute('id'));
return Promise.reject('Unrecognized rendering mode request');
}
// TODO(bradfrizzell): change name of function and var
let contextMetadata = getContextMetadata(
this.win, this.element, this.sentinel,
this.getAdditionalContextMetadata());
// TODO(bradfrizzell) Clean up name assigning.
if (method == XORIGIN_MODE.NAMEFRAME) {
contextMetadata['creative'] = creative;
name = JSON.stringify(contextMetadata);
} else if (method == XORIGIN_MODE.SAFEFRAME) {
contextMetadata = JSON.stringify(contextMetadata);
name = `${this.safeframeVersion};${creative.length};${creative}` +
`${contextMetadata}`;
}
return this.iframeRenderHelper_(dict({'src': srcPath, 'name': name}));
});
}
/**
*
* Throws {@code SyntaxError} if the metadata block delimiters are missing
* or corrupted or if the metadata content doesn't parse as JSON.
* @param {string} creative from which CSS is extracted
* @return {?CreativeMetaDataDef} Object result of parsing JSON data blob inside
* the metadata markers on the ad text, or null if no metadata markers are
* found.
* @private
* TODO(keithwrightbos@): report error cases
*/
getAmpAdMetadata_(creative) {
let metadataStart = -1;
let metadataString;
for (let i = 0; i < METADATA_STRINGS.length; i++) {
metadataString = METADATA_STRINGS[i];
metadataStart = creative.lastIndexOf(metadataString);
if (metadataStart >= 0) {
break;
}
}
if (metadataStart < 0) {
// Couldn't find a metadata blob.
dev().warn(TAG, this.element.getAttribute('type'),
'Could not locate start index for amp meta data in: %s', creative);
return null;
}
const metadataEnd = creative.lastIndexOf('</script>');
if (metadataEnd < 0) {
// Couldn't find a metadata blob.
dev().warn(TAG, this.element.getAttribute('type'),
'Could not locate closing script tag for amp meta data in: %s',
creative);
return null;
}
try {
const metaDataObj = parseJson(
creative.slice(metadataStart + metadataString.length, metadataEnd));
const ampRuntimeUtf16CharOffsets =
metaDataObj['ampRuntimeUtf16CharOffsets'];
if (!isArray(ampRuntimeUtf16CharOffsets) ||
ampRuntimeUtf16CharOffsets.length != 2 ||
typeof ampRuntimeUtf16CharOffsets[0] !== 'number' ||
typeof ampRuntimeUtf16CharOffsets[1] !== 'number') {
throw new Error('Invalid runtime offsets');
}
const metaData = {};
if (metaDataObj['customElementExtensions']) {
metaData.customElementExtensions =
metaDataObj['customElementExtensions'];
if (!isArray(metaData.customElementExtensions)) {
throw new Error(
'Invalid extensions', metaData.customElementExtensions);
}
} else {
metaData.customElementExtensions = [];
}
if (metaDataObj['customStylesheets']) {
// Expect array of objects with at least one key being 'href' whose
// value is URL.
metaData.customStylesheets = metaDataObj['customStylesheets'];
const errorMsg = 'Invalid custom stylesheets';
if (!isArray(metaData.customStylesheets)) {
throw new Error(errorMsg);
}
metaData.customStylesheets.forEach(stylesheet => {
if (!isObject(stylesheet) || !stylesheet['href'] ||
typeof stylesheet['href'] !== 'string' ||
!isSecureUrl(stylesheet['href'])) {
throw new Error(errorMsg);
}
});
}
if (isArray(metaDataObj['images'])) {
// Load maximum of 5 images.
metaData.images = metaDataObj['images'].splice(0, 5);
}
// TODO(keithwrightbos): OK to assume ampRuntimeUtf16CharOffsets is before
// metadata as its in the head?
metaData.minifiedCreative =
creative.slice(0, ampRuntimeUtf16CharOffsets[0]) +
creative.slice(ampRuntimeUtf16CharOffsets[1], metadataStart) +
creative.slice(metadataEnd + '</script>'.length);
return metaData;
} catch (err) {
dev().warn(
TAG, this.element.getAttribute('type'), 'Invalid amp metadata: %s',
creative.slice(metadataStart + metadataString.length, metadataEnd));
return null;
}
}
/**
* Registers a click handler for "A2A" (AMP-to-AMP navigation where the AMP
* viewer navigates to an AMP destination on our behalf.
* @param {!Window} iframeWin
*/
registerAlpHandler_(iframeWin) {
if (!isExperimentOn(this.win, 'alp-for-a4a')) {
return;
}
iframeWin.document.documentElement.addEventListener('click', event => {
handleClick(event, url => {
Services.viewerForDoc(this.getAmpDoc()).navigateTo(url, 'a4a');
});
});
}
/**
* @return {string} full url to safeframe implementation.
* @private
*/
getSafeframePath_() {
return 'https://tpc.googlesyndication.com/safeframe/' +
`${this.safeframeVersion}/html/container.html`;
}
/**
* Receive collapse notifications and record lifecycle events for them.
*
* @param unusedElement {!AmpElement}
* @override
*/
collapsedCallback(unusedElement) {
this.handleLifecycleStage_('adSlotCollapsed');
}
/**
* Handles a lifecycle event by triggering the corresponding analytics event
* (if such an event exists) and by forwarding the event to the impl-specific
* handler in #emitLifecycleEvent.
* @param {string} eventName
* @param {!Object<string, string>=} opt_vars
* @private
*/
handleLifecycleStage_(eventName, opt_vars) {
this.maybeTriggerAnalyticsEvent_(eventName);
this.protectedEmitLifecycleEvent_(eventName, opt_vars);
}
/**
* Checks if the given lifecycle event has a corresponding amp-analytics event
* and fires the analytics trigger if so.
* @param {string} lifecycleStage
* @private
*/
maybeTriggerAnalyticsEvent_(lifecycleStage) {
if (!this.a4aAnalyticsConfig_) {
// No config exists that will listen to this event.
return;
}
const analyticsEvent =
LIFECYCLE_STAGE_TO_ANALYTICS_TRIGGER[lifecycleStage];
if (!analyticsEvent) {
// No analytics event is defined for this lifecycle stage.
return;
}
const analyticsVars = Object.assign(
{'time': Math.round(this.getNow_())},
this.getA4aAnalyticsVars(analyticsEvent));
triggerAnalyticsEvent(this.element, analyticsEvent, analyticsVars);
}
/**
* Returns variables to be included on an analytics event. This can be
* overridden by specific network implementations.
* Note that this function is called for each time an analytics event is
* fired.
* @param {string} unusedAnalyticsEvent The name of the analytics event.
* @return {!Object<string, string>}
*/
getA4aAnalyticsVars(unusedAnalyticsEvent) { return {}; }
/**
* Returns network-specific config for amp-analytics. It should overridden
* with network-specific configurations.
* This function may return null. If so, no amp-analytics element will be
* added to this A4A element and no A4A triggers will be fired.
* @return {?JsonObject}
*/
getA4aAnalyticsConfig() { return null; }
/**
* To be overriden by network specific implementation.
* This function will be called for each lifecycle event as specified in the
* LIFECYCLE_STAGES enum declaration. It may additionally pass extra
* variables of the form { name: val }. It is up to the subclass what to
* do with those variables.
*
* @param {string} unusedEventName
* @param {!Object<string, string|number>=} opt_extraVariables
*/
emitLifecycleEvent(unusedEventName, opt_extraVariables) {}
/**
* Attempts to execute Real Time Config, if the ad network has enabled it.
* If it is not supported by the network, but the publisher has included
* the rtc-config attribute on the amp-ad element, warn.
* @return {Promise<!Array<!rtcResponseDef>>|undefined}
*/
tryExecuteRealTimeConfig_() {
if (!!AMP.maybeExecuteRealTimeConfig) {
try {
return AMP.maybeExecuteRealTimeConfig(
this, this.getCustomRealTimeConfigMacros_());
} catch (err) {
user().error(TAG, 'Could not perform Real Time Config.', err);
}
} else if (this.element.getAttribute('rtc-config')) {
user().error(TAG, 'RTC not supported for ad network ' +
`${this.element.getAttribute('type')}`);
}
}
/**
* To be overriden by network impl. Should return a mapping of macro keys
* to values for substitution in publisher-specified URLs for RTC.
* @return {?Object<string,
* !../../../src/service/variable-source.SyncResolverDef>}
*/
getCustomRealTimeConfigMacros_() {
return null;
}
/**
* Whether preferential render should still be utilized if web crypto is unavailable,
* and crypto signature header is present.
* @return {!boolean}
*/
shouldPreferentialRenderWithoutCrypto() {
return false;
}
/**
* @param {string=} headerValue Method as given in header.
*/
getNonAmpCreativeRenderingMethod(headerValue) {
if (headerValue) {
if (!isEnumValue(XORIGIN_MODE, headerValue)) {
dev().error(
'AMP-A4A', `cross-origin render mode header ${headerValue}`);
} else {
return headerValue;
}
}
return Services.platformFor(this.win).isIos() ?
XORIGIN_MODE.SAFEFRAME : null;
}
/**
* Returns base object that will be written to cross-domain iframe name
* attribute.
* @return {!JsonObject}
*/
getAdditionalContextMetadata() {
return /** @type {!JsonObject} */ ({});
}
}
/**
* Attachs query string portion of ad url to error.
* @param {!Error} error
* @param {string} adUrl
*/
export function assignAdUrlToError(error, adUrl) {
if (!adUrl || (error.args && error.args['au'])) {
return;
}
const adQueryIdx = adUrl.indexOf('?');
if (adQueryIdx == -1) {
return;
}
(error.args || (error.args = {}))['au'] =
adUrl.substring(adQueryIdx + 1, adQueryIdx + 251);
};
/**
* Returns the signature verifier for the given window. Lazily creates it if it
* doesn't already exist.
*
* This ensures that only one signature verifier exists per window, which allows
* multiple Fast Fetch ad slots on a page (even ones from different ad networks)
* to share the same cached public keys.
*
* @param {!Window} win
* @return {!SignatureVerifier}
* @visibleForTesting
*/
export function signatureVerifierFor(win) {
const propertyName = 'AMP_FAST_FETCH_SIGNATURE_VERIFIER_';
return win[propertyName] ||
(win[propertyName] = new SignatureVerifier(win, signingServerURLs));
}<|fim▁end|>
|
this.tearDownSlot();
|
<|file_name|>problem_006.py<|end_file_name|><|fim▁begin|>""" Copyright 2012, July 31
Written by Pattarapol (Cheer) Iamngamsup
E-mail: [email protected]
Sum square difference
Problem 6
The sum of the squares of the first ten natural numbers is,
1^2 + 2^2 + ... + 10^2 = 385
The square of the sum of the first ten natural numbers is,
(1 + 2 + ... + 10)^2 = 55^2 = 3025
Hence the difference between the sum of the squares of
the first ten natural numbers and the square of the sum is
3025 385 = 2640.
Find the difference between the sum of the squares of
the first one hundred natural numbers and the square of the sum.
"""
#################################################
# Importing libraries & modules
import datetime
#################################################
# Global variables
#################################################
# Functions
#################################################
# Classes
#################################################
# Main function
<|fim▁hole|> sumOfSquare += i*i
print( 'answer = {0}'.format( squareOfSum - sumOfSquare ) )
#################################################
# Main execution
if __name__ == '__main__':
# get starting date time
startingDateTime = datetime.datetime.utcnow()
print( 'startingDateTime = {0} UTC'.format( startingDateTime ) )
# call main function
main()
# get ending date time
endingdateTime = datetime.datetime.utcnow()
print( 'endingdateTime = {0} UTC'.format( endingdateTime ) )
# compute delta date time
deltaDateTime = endingdateTime - startingDateTime
print( 'deltaDateTime = {0}'.format( deltaDateTime ) )<|fim▁end|>
|
def main():
squareOfSum = ( ( ( 1+100 ) * 100 ) / 2)**2
sumOfSquare = 0
for i in range( 1, 101 ):
|
<|file_name|>20.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export { ChemistryReference20 as default } from "../../";
|
<|file_name|>marker.go<|end_file_name|><|fim▁begin|>package marker
import (
"fmt"
"image"
"image/color"
"image/draw"
"image/png"
"log"
"os"
"strconv"
)<|fim▁hole|> Code int
Size int
Division int
BlockSize int
matrix []int
Marker *image.NRGBA
Name string
}
// New returns a Marker with the given parameters.
// Using these parameters, it generates the visual representation of the marker
func New(code, division, blocksize int, name string, hasBorder bool) *Marker {
if division < 3 || division > 8 {
log.Fatal("The value of division must range from 3 to 8.")
}
if blocksize < 16 || blocksize > 32 {
log.Fatal("The value of blocksize must range from 16 to 32")
}
size := blocksize * (division + 4)
matrix := make([]int, division*division)
matrix[0] = 1
matrix[division-1] = -1
matrix[division*(division-1)] = -1
matrix[division*division-1] = -1
binary := reverse(strconv.FormatInt(int64(code), 2))
for i, j := 1, 0; i < len(matrix) && j < len(binary); i++ {
if matrix[i] != -1 {
matrix[i] = int(binary[j]) - 48
j++
}
}
m := image.NewNRGBA(image.Rect(0, 0, size, size))
if hasBorder {
draw.Draw(m, m.Bounds(), image.Black, image.ZP, draw.Src)
} else {
draw.Draw(m, m.Bounds(), image.Transparent, image.ZP, draw.Src)
}
fid := &Marker{
code,
size,
division,
blocksize,
matrix,
m,
name}
fid.draw(hasBorder)
return fid
}
// Save saves the fiducial marker into a PNG image.
// If no name is specified, 'code-<code>.png' will be used as the filename.
func (m *Marker) Save() error {
if m.Name == "" {
m.Name = fmt.Sprintf("code-%d.png", m.Code)
}
f, err := os.Create(m.Name)
if err != nil {
return err
}
defer f.Close()
if err = png.Encode(f, m.Marker); err != nil {
return err
}
return nil
}
func (m *Marker) draw(hasBorder bool) {
codeX := m.BlockSize * 2
codeY := m.BlockSize * 2
whiteBlock := image.Rect(m.BlockSize, m.BlockSize, m.Size-m.BlockSize, m.Size-m.BlockSize)
blackBlock := image.Rect(m.BlockSize*2, m.BlockSize*2, m.Size-m.BlockSize*2, m.Size-m.BlockSize*2)
draw.Draw(m.Marker, whiteBlock, image.White, image.ZP, draw.Src)
draw.Draw(m.Marker, blackBlock, image.Black, image.ZP, draw.Src)
for i, r := 0, 0; i < len(m.matrix); i++ {
if m.matrix[i] == 1 {
m.set(codeX, codeY, codeX+m.BlockSize, codeY+m.BlockSize, color.NRGBA{255, 255, 255, 255})
}
if r == m.Division-1 {
r = 0
codeY = codeY + m.BlockSize
codeX = m.BlockSize * 2
continue
}
codeX = codeX + m.BlockSize
r = r + 1
}
}
func (m *Marker) set(x1, y1, x2, y2 int, color color.Color) {
for x := x1; x < x2; x++ {
for y := y1; y < y2; y++ {
m.Marker.Set(x, y, color)
}
}
}<|fim▁end|>
|
// Marker represents the structure of a fiducial marker
type Marker struct {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|># from . import *<|fim▁end|>
| |
<|file_name|>xml.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2011 - 2015 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
# Some XML modules may be missing and Base.{load,dumps}_impl are not overriden:
# pylint: disable=import-error
"""XML files parser backend, should be available always.
.. versionchanged:: 0.1.0
Added XML dump support.
- Format to support: XML, e.g. http://www.w3.org/TR/xml11/
- Requirements: one of the followings
- lxml2.etree if available
- xml.etree.ElementTree in standard lib if python >= 2.5
- elementtree.ElementTree (otherwise)
- Limitations:
- '<prefix>attrs', '<prefix>text' and '<prefix>children' are used as special
parameter to keep XML structure of original data. You have to cusomize
<prefix> (default: '@') if any config parameters conflict with some of
them.
- Some data or structures of original XML file may be lost if make it backed
to XML file; XML file - (anyconfig.load) -> config - (anyconfig.dump) ->
XML file
- XML specific features (namespace, etc.) may not be processed correctly.
- Special Options: None supported
"""
from __future__ import absolute_import
from io import BytesIO
import sys
import anyconfig.backend.base
import anyconfig.compat
try:
# First, try lxml which is compatible with elementtree and looks faster a
# lot. See also: http://getpython3.com/diveintopython3/xml.html
from lxml2 import etree as ET
except ImportError:
try:
import xml.etree.ElementTree as ET
except ImportError:
import elementtree.ElementTree as ET
_PARAM_PREFIX = "@"
# It seems that ET.ElementTree.write() cannot process a parameter
# 'xml_declaration' in older python < 2.7:
_IS_OLDER_PYTHON = sys.version_info[0] < 3 and sys.version_info[1] < 7
def etree_to_container(root, cls, pprefix=_PARAM_PREFIX):
"""
Convert XML ElementTree to a collection of container objects.
:param root: etree root object or None
:param cls: Container class
:param pprefix: Special parameter name prefix
"""
(attrs, text, children) = [pprefix + x for x in ("attrs", "text",
"children")]
tree = cls()
if root is None:
return tree
tree[root.tag] = cls()
if root.attrib:
tree[root.tag][attrs] = cls(anyconfig.compat.iteritems(root.attrib))
if root.text and root.text.strip():
tree[root.tag][text] = root.text.strip()
if len(root): # It has children.
# Note: Configuration item cannot have both attributes and values
# (list) at the same time in current implementation:
tree[root.tag][children] = [etree_to_container(c, cls, pprefix)
for c in root]
return tree
def container_to_etree(obj, cls, parent=None, pprefix=_PARAM_PREFIX):
"""
Convert a container object to XML ElementTree.
:param obj: Container instance to convert to
:param cls: Container class
:param parent: XML ElementTree parent node object or None
:param pprefix: Special parameter name prefix
"""
if not isinstance(obj, (cls, dict)):
return # All attributes and text should be set already.
(attrs, text, children) = [pprefix + x for x in ("attrs", "text",
"children")]
for key, val in anyconfig.compat.iteritems(obj):
if key == attrs:
for attr, aval in anyconfig.compat.iteritems(val):
parent.set(attr, aval)
elif key == text:
parent.text = val
elif key == children:
for child in val: # child should be a dict-like object.
for ckey, cval in anyconfig.compat.iteritems(child):
celem = ET.Element(ckey)
container_to_etree(cval, cls, celem, pprefix)
parent.append(celem)
else:
elem = ET.Element(key)
container_to_etree(val, cls, elem, pprefix)
return ET.ElementTree(elem)
def etree_write(tree, stream):
"""
Write XML ElementTree `root` content into `stream`.
:param tree: XML ElementTree object
:param stream: File or file-like object can write to
"""
if _IS_OLDER_PYTHON:
tree.write(stream, encoding='UTF-8')
else:
tree.write(stream, encoding='UTF-8', xml_declaration=True)
class Parser(anyconfig.backend.base.D2Parser):
"""
Parser for XML files.
"""
_type = "xml"
_extensions = ["xml"]<|fim▁hole|> _open_flags = ('rb', 'wb')
def load_from_string(self, content, **kwargs):
"""
Load config from XML snippet (a string `content`).
:param content: XML snippet (a string)
:param kwargs: optional keyword parameters passed to
:return: self.container object holding config parameters
"""
root = ET.ElementTree(ET.fromstring(content)).getroot()
return etree_to_container(root, self.container)
def load_from_path(self, filepath, **kwargs):
"""
:param filepath: XML file path
:param kwargs: optional keyword parameters to be sanitized :: dict
:return: self.container object holding config parameters
"""
root = ET.parse(filepath).getroot()
return etree_to_container(root, self.container)
def load_from_stream(self, stream, **kwargs):
"""
:param stream: XML file or file-like object
:param kwargs: optional keyword parameters to be sanitized :: dict
:return: self.container object holding config parameters
"""
return self.load_from_path(stream, **kwargs)
def dump_to_string(self, cnf, **kwargs):
"""
:param cnf: Configuration data to dump :: self.container
:param kwargs: optional keyword parameters
:return: string represents the configuration
"""
tree = container_to_etree(cnf, self.container)
buf = BytesIO()
etree_write(tree, buf)
return buf.getvalue()
def dump_to_stream(self, cnf, stream, **kwargs):
"""
:param cnf: Configuration data to dump :: self.container
:param stream: Config file or file like object write to
:param kwargs: optional keyword parameters
"""
tree = container_to_etree(cnf, self.container)
etree_write(tree, stream)
# vim:sw=4:ts=4:et:<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.