prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>cancel_all.py<|end_file_name|><|fim▁begin|>import bot
import config
if __name__ == '__main__':
bot.init(config)<|fim▁hole|><|fim▁end|>
|
bot.cancel_all()
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(custom_derive, plugin)]
#![plugin(serde_macros)]
extern crate mui;
use std::cell::Cell;
use mui::prelude::*;
fn main() {
let mut app = mui::App::new("Mui Widgets Demo!");
// primitives
let rect = mui::Rect::new("r1", true);
rect.set_zpos(3);
app.add_widget(&rect);
let count = Cell::new(0);
let count2 = Cell::new(0);
// sa - stand alone
let mut sa_button = mui::Button::new("b1");<|fim▁hole|>
//let cl = sa_button.clone();
sa_button.set_on_click_fn(Box::new(move |this| {
this.set_text("Told you!");
}));
app.add_widget(&sa_button);
let mut button = mui::Button::new("b2");
button.set_text("Click me");
button.set_on_click_fn(Box::new(move |this| {
count.set(count.get()+1);
this.set_text(format!("Clicked {}", count.get()));
}));
let mut button2 = mui::Button::new("b3");
button2.set_text("Click me");
button2.set_on_click_fn(Box::new(move |this| {
count2.set(count2.get()+1);
this.set_text(format!("Clicked {}", count2.get()));
}));
let mut layout = mui::BoxLayout::new("l1");
layout.set_xy(300.0, 40.0);
layout.set_zpos(5);
layout.add_widget(&button);
layout.add_widget(&button2);
app.add_widget(&layout);
let sa_textbox = mui::TextBox::new("textbox");
sa_textbox.set_xy(600.0, 200.0);
app.add_widget(&sa_textbox);
app.run();
}<|fim▁end|>
|
sa_button.set_text("I'm a button");
sa_button.set_xy(700.0, 300.0);
|
<|file_name|>light.py<|end_file_name|><|fim▁begin|>"""Support for ISY994 lights."""
from typing import Callable, Dict
from pyisy.constants import ISY_VALUE_UNKNOWN
from homeassistant.components.light import (
DOMAIN as LIGHT,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
_LOGGER,
CONF_RESTORE_LIGHT_STATE,
DOMAIN as ISY994_DOMAIN,
ISY994_NODES,
)
from .entity import ISYNodeEntity
from .helpers import migrate_old_unique_ids
from .services import async_setup_device_services, async_setup_light_services
ATTR_LAST_BRIGHTNESS = "last_brightness"
async def async_setup_entry(
hass: HomeAssistantType,
entry: ConfigEntry,
async_add_entities: Callable[[list], None],
) -> bool:
"""Set up the ISY994 light platform."""
hass_isy_data = hass.data[ISY994_DOMAIN][entry.entry_id]
isy_options = entry.options
restore_light_state = isy_options.get(CONF_RESTORE_LIGHT_STATE, False)
devices = []
for node in hass_isy_data[ISY994_NODES][LIGHT]:
devices.append(ISYLightEntity(node, restore_light_state))
await migrate_old_unique_ids(hass, LIGHT, devices)
async_add_entities(devices)
async_setup_device_services(hass)<|fim▁hole|>class ISYLightEntity(ISYNodeEntity, LightEntity, RestoreEntity):
"""Representation of an ISY994 light device."""
def __init__(self, node, restore_light_state) -> None:
"""Initialize the ISY994 light device."""
super().__init__(node)
self._last_brightness = None
self._restore_light_state = restore_light_state
@property
def is_on(self) -> bool:
"""Get whether the ISY994 light is on."""
if self._node.status == ISY_VALUE_UNKNOWN:
return False
return int(self._node.status) != 0
@property
def brightness(self) -> float:
"""Get the brightness of the ISY994 light."""
if self._node.status == ISY_VALUE_UNKNOWN:
return None
return int(self._node.status)
def turn_off(self, **kwargs) -> None:
"""Send the turn off command to the ISY994 light device."""
self._last_brightness = self.brightness
if not self._node.turn_off():
_LOGGER.debug("Unable to turn off light")
def on_update(self, event: object) -> None:
"""Save brightness in the update event from the ISY994 Node."""
if self._node.status not in (0, ISY_VALUE_UNKNOWN):
self._last_brightness = self._node.status
super().on_update(event)
# pylint: disable=arguments-differ
def turn_on(self, brightness=None, **kwargs) -> None:
"""Send the turn on command to the ISY994 light device."""
if self._restore_light_state and brightness is None and self._last_brightness:
brightness = self._last_brightness
if not self._node.turn_on(val=brightness):
_LOGGER.debug("Unable to turn on light")
@property
def device_state_attributes(self) -> Dict:
"""Return the light attributes."""
attribs = super().device_state_attributes
attribs[ATTR_LAST_BRIGHTNESS] = self._last_brightness
return attribs
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
async def async_added_to_hass(self) -> None:
"""Restore last_brightness on restart."""
await super().async_added_to_hass()
self._last_brightness = self.brightness or 255
last_state = await self.async_get_last_state()
if not last_state:
return
if (
ATTR_LAST_BRIGHTNESS in last_state.attributes
and last_state.attributes[ATTR_LAST_BRIGHTNESS]
):
self._last_brightness = last_state.attributes[ATTR_LAST_BRIGHTNESS]
def set_on_level(self, value):
"""Set the ON Level for a device."""
self._node.set_on_level(value)
def set_ramp_rate(self, value):
"""Set the Ramp Rate for a device."""
self._node.set_ramp_rate(value)<|fim▁end|>
|
async_setup_light_services(hass)
|
<|file_name|>XMLServlet.java<|end_file_name|><|fim▁begin|>package org.oguz.servlet;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebInitParam;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
public class XMLServlet extends HttpServlet
{
/**
*
*/
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
response.setContentType("text/html");
PrintWriter out = response.getWriter();
String userName = request.getParameter("username");
String fullName = request.getParameter("fullname");
String profession = request.getParameter("profession");
// HttpSession session =request.getSession();
ServletContext context = request.getServletContext();
if (userName != "" && userName != null)
{
// session.setAttribute("savedUser",userName);
context.setAttribute("savedUser", userName);
out.println("<p>Hello context parameter " + (String)context.getAttribute("savedUser") +
" from GET method</p>");
}
else
{
out.println("<p>Hello default user " +
this.getServletConfig().getInitParameter("username") + " from GET method</p>");
}
if (fullName != "" && fullName != null)
{
// session.setAttribute("savedFull", fullName);
context.setAttribute("savedFull", fullName);
out.println("<p> your full name is: " + (String)context.getAttribute("savedFull") +
"</p>");
}
else
{
out.println("<p>Hello default fullname " +
this.getServletConfig().getInitParameter("fullname") + " from GET method</p>");
}
}
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException
{
response.setContentType("text/html");
PrintWriter out = response.getWriter();
String userName = request.getParameter("username");
String fullName = request.getParameter("fullname");
String profession = request.getParameter("profession");
// String location = request.getParameter("location");
String[] location = request.getParameterValues("location");
out.println("<p>Hello " + userName + " from POST method in XMLSERVLET response</p>");
out.println("<p> your full name is: " + fullName + "</p>");
out.println("<p>your profession is: " + profession + "</p>");
for (int i = 0; i < location.length; i++)<|fim▁hole|> {
out.println("<p>your location is: " + location[i].toUpperCase() + "</p>");
}
}
}<|fim▁end|>
| |
<|file_name|>filter_test.go<|end_file_name|><|fim▁begin|>package consul
import (
"reflect"
"testing"
"github.com/socketplane/socketplane/Godeps/_workspace/src/github.com/hashicorp/consul/acl"
"github.com/socketplane/socketplane/Godeps/_workspace/src/github.com/hashicorp/consul/consul/structs"
)
func TestFilterDirEnt(t *testing.T) {
policy, _ := acl.Parse(testFilterRules)
aclR, _ := acl.New(acl.DenyAll(), policy)
type tcase struct {
in []string
out []string
}
cases := []tcase{
tcase{
in: []string{"foo/test", "foo/priv/nope", "foo/other", "zoo"},
out: []string{"foo/test", "foo/other"},
},
tcase{
in: []string{"abe", "lincoln"},
out: nil,
},
tcase{
in: []string{"abe", "foo/1", "foo/2", "foo/3", "nope"},
out: []string{"foo/1", "foo/2", "foo/3"},
},
}
for _, tc := range cases {
ents := structs.DirEntries{}
for _, in := range tc.in {
ents = append(ents, &structs.DirEntry{Key: in})
}
ents = FilterDirEnt(aclR, ents)
var outL []string
for _, e := range ents {
outL = append(outL, e.Key)
}
if !reflect.DeepEqual(outL, tc.out) {
t.Fatalf("bad: %#v %#v", outL, tc.out)
}
}
}
func TestKeys(t *testing.T) {
policy, _ := acl.Parse(testFilterRules)
aclR, _ := acl.New(acl.DenyAll(), policy)
type tcase struct {
in []string
out []string
}
cases := []tcase{
tcase{
in: []string{"foo/test", "foo/priv/nope", "foo/other", "zoo"},
out: []string{"foo/test", "foo/other"},
},
tcase{
in: []string{"abe", "lincoln"},
out: []string{},
},
tcase{<|fim▁hole|>
for _, tc := range cases {
out := FilterKeys(aclR, tc.in)
if !reflect.DeepEqual(out, tc.out) {
t.Fatalf("bad: %#v %#v", out, tc.out)
}
}
}
var testFilterRules = `
key "" {
policy = "deny"
}
key "foo/" {
policy = "read"
}
key "foo/priv/" {
policy = "deny"
}
key "zip/" {
policy = "read"
}
`<|fim▁end|>
|
in: []string{"abe", "foo/1", "foo/2", "foo/3", "nope"},
out: []string{"foo/1", "foo/2", "foo/3"},
},
}
|
<|file_name|>ExtractArchive.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import with_statement
import os
import sys
import traceback
# monkey patch bug in python 2.6 and lower
# http://bugs.python.org/issue6122 , http://bugs.python.org/issue1236 , http://bugs.python.org/issue1731717
if sys.version_info < (2, 7) and os.name != "nt":
import errno
import subprocess
def _eintr_retry_call(func, *args):
while True:
try:
return func(*args)
except OSError, e:
if e.errno == errno.EINTR:
continue
raise
#: Unsued timeout option for older python version
def wait(self, timeout=0):
"""
Wait for child process to terminate. Returns returncode
attribute.
"""
if self.returncode is None:
try:
pid, sts = _eintr_retry_call(os.waitpid, self.pid, 0)
except OSError, e:
if e.errno != errno.ECHILD:
raise
#: This happens if SIGCLD is set to be ignored or waiting
#: For child processes has otherwise been disabled for our
#: process. This child is dead, we can't get the status.
sts = 0
self._handle_exitstatus(sts)
return self.returncode
subprocess.Popen.wait = wait
try:
import send2trash
except ImportError:
pass
from module.plugins.internal.Addon import Addon, Expose, threaded
from module.plugins.internal.Plugin import exists, replace_patterns
from module.plugins.internal.Extractor import ArchiveError, CRCError, PasswordError
from module.utils import fs_encode, save_join as fs_join, uniqify
class ArchiveQueue(object):
def __init__(self, plugin, storage):
self.plugin = plugin
self.storage = storage
def get(self):
try:
return [int(pid) for pid in self.plugin.retrieve("ExtractArchive:%s" % self.storage, "").decode('base64').split()]
except Exception:
return []
def set(self, value):
if isinstance(value, list):
item = str(value)[1:-1].replace(' ', '').replace(',', ' ')
else:
item = str(value).strip()
return self.plugin.store("ExtractArchive:%s" % self.storage, item.encode('base64')[:-1])
def delete(self):
return self.plugin.delete("ExtractArchive:%s" % self.storage)
def add(self, item):
queue = self.get()
if item not in queue:
return self.set(queue + [item])
else:
return True
def remove(self, item):
queue = self.get()
try:
queue.remove(item)
except ValueError:
pass
if queue is []:
return self.delete()
return self.set(queue)
class ExtractArchive(Addon):
__name__ = "ExtractArchive"
__type__ = "hook"
__version__ = "1.50"
__status__ = "testing"
__config__ = [("activated" , "bool" , "Activated" , True ),
("fullpath" , "bool" , "Extract with full paths" , True ),
("overwrite" , "bool" , "Overwrite files" , False ),
("keepbroken" , "bool" , "Try to extract broken archives" , False ),
("repair" , "bool" , "Repair broken archives (RAR required)" , False ),
("test" , "bool" , "Test archive before extracting" , False ),
("usepasswordfile", "bool" , "Use password file" , True ),
("passwordfile" , "file" , "Password file" , "passwords.txt" ),
("delete" , "bool" , "Delete archive after extraction" , True ),
("deltotrash" , "bool" , "Move to trash (recycle bin) instead delete", True ),
("subfolder" , "bool" , "Create subfolder for each package" , False ),
("destination" , "folder" , "Extract files to folder" , "" ),
("extensions" , "str" , "Extract archives ending with extension" , "7z,bz2,bzip2,gz,gzip,lha,lzh,lzma,rar,tar,taz,tbz,tbz2,tgz,xar,xz,z,zip"),
("excludefiles" , "str" , "Don't extract the following files" , "*.nfo,*.DS_Store,index.dat,thumb.db" ),
("recursive" , "bool" , "Extract archives in archives" , True ),
("waitall" , "bool" , "Run after all downloads was processed" , False ),
("renice" , "int" , "CPU priority" , 0 )]
__description__ = """Extract different kind of archives"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]"),
("Immenz" , "[email protected]" )]
NAME_REPLACEMENTS = [(r'\.part\d+\.rar$', ".part.rar")]
def init(self):
self.event_map = {'allDownloadsProcessed': "all_downloads_processed",
'packageDeleted' : "package_deleted" }
self.queue = ArchiveQueue(self, "Queue")
self.failed = ArchiveQueue(self, "Failed")
self.interval = 60
self.extracting = False
self.last_package = False
self.extractors = []
self.passwords = []
self.repair = False
def activate(self):
for p in ("UnRar", "SevenZip", "UnZip"):
try:
module = self.pyload.pluginManager.loadModule("internal", p)
klass = getattr(module, p)
if klass.find():
self.extractors.append(klass)
if klass.REPAIR:
self.repair = self.get_config('repair')
except OSError, e:
if e.errno == 2:
self.log_warning(_("No %s installed") % p)
else:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
except Exception, e:
self.log_warning(_("Could not activate: %s") % p, e)
if self.pyload.debug:
traceback.print_exc()
if self.extractors:
self.log_debug(*["Found %s %s" % (Extractor.__name__, Extractor.VERSION) for Extractor in self.extractors])
self.extract_queued() #: Resume unfinished extractions
else:
self.log_info(_("No Extract plugins activated"))
@threaded
def extract_queued(self, thread):
if self.extracting: #@NOTE: doing the check here for safty (called by coreReady)
return
self.extracting = True
packages = self.queue.get()
while packages:
if self.last_package: #: Called from allDownloadsProcessed
self.last_package = False
if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
self.manager.dispatchEvent("all_archives_extracted")
self.manager.dispatchEvent("all_archives_processed")
else:
if self.extract(packages, thread): #@NOTE: check only if all gone fine, no failed reporting for now
pass
packages = self.queue.get() #: Check for packages added during extraction
self.extracting = False
#: Deprecated method, use `extract_package` instead
@Expose
def extractPackage(self, *args, **kwargs):
"""
See `extract_package`
"""
return self.extract_package(*args, **kwargs)
@Expose
def extract_package(self, *ids):
"""
Extract packages with given id
"""
for id in ids:
self.queue.add(id)
if not self.get_config('waitall') and not self.extracting:
self.extract_queued()
def package_deleted(self, pid):
self.queue.remove(pid)
def package_finished(self, pypack):
self.queue.add(pypack.id)
if not self.get_config('waitall') and not self.extracting:
self.extract_queued()
def all_downloads_processed(self):
self.last_package = True
if self.get_config('waitall') and not self.extracting:
self.extract_queued()
@Expose
def extract(self, ids, thread=None): #@TODO: Use pypack, not pid to improve method usability
if not ids:
return False
processed = []
extracted = []
failed = []
toList = lambda string: string.replace(' ', '').replace(',', '|').replace(';', '|').split('|')
destination = self.get_config('destination')
subfolder = self.get_config('subfolder')
fullpath = self.get_config('fullpath')
overwrite = self.get_config('overwrite')
renice = self.get_config('renice')
recursive = self.get_config('recursive')
delete = self.get_config('delete')
keepbroken = self.get_config('keepbroken')
extensions = [x.lstrip('.').lower() for x in toList(self.get_config('extensions'))]
excludefiles = toList(self.get_config('excludefiles'))
if extensions:
self.log_debug("Use for extensions: %s" % "|.".join(extensions))
#: Reload from txt file
self.reload_passwords()
download_folder = self.pyload.config.get("general", "download_folder")
#: Iterate packages -> extractors -> targets
for pid in ids:
pypack = self.pyload.files.getPackage(pid)
if not pypack:
self.queue.remove(pid)
continue
self.log_info(_("Check package: %s") % pypack.name)
#: Determine output folder
out = fs_join(download_folder, pypack.folder, destination, "") #: Force trailing slash
if subfolder:
out = fs_join(out, pypack.folder)
if not exists(out):
os.makedirs(out)
matched = False
success = True
files_ids = dict((pylink['name'], ((fs_join(download_folder, pypack.folder, pylink['name'])), pylink['id'], out)) for pylink \
in sorted(pypack.getChildren().values(), key=lambda k: k['name'])).values() #: Remove duplicates
#: Check as long there are unseen files
while files_ids:
new_files_ids = []
if extensions:
files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
if filter(lambda ext: fname.lower().endswith(ext), extensions)]
for Extractor in self.extractors:
targets = Extractor.get_targets(files_ids)
if targets:
self.log_debug("Targets for %s: %s" % (Extractor.__name__, targets))
matched = True
for fname, fid, fout in targets:
name = os.path.basename(fname)
if not exists(fname):
self.log_debug(name, "File not found")
continue
self.log_info(name, _("Extract to: %s") % fout)
try:
pyfile = self.pyload.files.getFile(fid)
archive = Extractor(self,
fname,
fout,
fullpath,
overwrite,
excludefiles,
renice,
delete,
keepbroken,
fid)
thread.addActive(pyfile)
archive.init()
try:
new_files = self._extract(pyfile, archive, pypack.password)
finally:
pyfile.setProgress(100)
thread.finishFile(pyfile)
except Exception, e:
self.log_error(name, e)
success = False
continue
#: Remove processed file and related multiparts from list
files_ids = [(fname, fid, fout) for fname, fid, fout in files_ids \
if fname not in archive.get_delete_files()]
self.log_debug("Extracted files: %s" % new_files)
for file in new_files:
self.set_permissions(file)
for filename in new_files:
file = fs_encode(fs_join(os.path.dirname(archive.filename), filename))
if not exists(file):
self.log_debug("New file %s does not exists" % filename)
continue
if recursive and os.path.isfile(file):
new_files_ids.append((filename, fid, os.path.dirname(filename))) #: Append as new target
self.manager.dispatchEvent("archive_extracted", pyfile, archive)
files_ids = new_files_ids #: Also check extracted files
if matched:
if success:
extracted.append(pid)
self.manager.dispatchEvent("package_extracted", pypack)
else:
failed.append(pid)
self.manager.dispatchEvent("package_extract_failed", pypack)
self.failed.add(pid)
else:
self.log_info(_("No files found to extract"))
if not matched or not success and subfolder:
try:
os.rmdir(out)
except OSError:
pass
self.queue.remove(pid)
return True if not failed else False
def _extract(self, pyfile, archive, password):
name = os.path.basename(archive.filename)
pyfile.setStatus("processing")
encrypted = False
try:
self.log_debug("Password: %s" % (password or "None provided"))
passwords = uniqify([password] + self.get_passwords(False)) if self.get_config('usepasswordfile') else [password]
for pw in passwords:
try:
if self.get_config('test') or self.repair:
pyfile.setCustomStatus(_("archive testing"))
if pw:
self.log_debug("Testing with password: %s" % pw)
pyfile.setProgress(0)
archive.verify(pw)
pyfile.setProgress(100)
else:
archive.check(pw)
self.add_password(pw)
break
except PasswordError:
if not encrypted:
self.log_info(name, _("Password protected"))
encrypted = True
except CRCError, e:
self.log_debug(name, e)
self.log_info(name, _("CRC Error"))
if self.repair:
self.log_warning(name, _("Repairing..."))
<|fim▁hole|> pyfile.setCustomStatus(_("archive repairing"))
pyfile.setProgress(0)
repaired = archive.repair()
pyfile.setProgress(100)
if not repaired and not self.get_config('keepbroken'):
raise CRCError("Archive damaged")
self.add_password(pw)
break
raise CRCError("Archive damaged")
except ArchiveError, e:
raise ArchiveError(e)
pyfile.setCustomStatus(_("extracting"))
pyfile.setProgress(0)
if not encrypted or not self.get_config('usepasswordfile'):
self.log_debug("Extracting using password: %s" % (password or "None"))
archive.extract(password)
else:
for pw in filter(None, uniqify([password] + self.get_passwords(False))):
try:
self.log_debug("Extracting using password: %s" % pw)
archive.extract(pw)
self.add_password(pw)
break
except PasswordError:
self.log_debug("Password was wrong")
else:
raise PasswordError
pyfile.setProgress(100)
pyfile.setStatus("processing")
delfiles = archive.get_delete_files()
self.log_debug("Would delete: " + ", ".join(delfiles))
if self.get_config('delete'):
self.log_info(_("Deleting %s files") % len(delfiles))
deltotrash = self.get_config('deltotrash')
for f in delfiles:
file = fs_encode(f)
if not exists(file):
continue
if not deltotrash:
os.remove(file)
else:
try:
send2trash.send2trash(file)
except NameError:
self.log_warning(_("Unable to move %s to trash") % os.path.basename(f),
_("Send2Trash lib not found"))
except Exception, e:
self.log_warning(_("Unable to move %s to trash") % os.path.basename(f),
e.message)
else:
self.log_info(_("Moved %s to trash") % os.path.basename(f))
self.log_info(name, _("Extracting finished"))
extracted_files = archive.files or archive.list()
return extracted_files
except PasswordError:
self.log_error(name, _("Wrong password" if password else "No password found"))
except CRCError, e:
self.log_error(name, _("CRC mismatch"), e)
except ArchiveError, e:
self.log_error(name, _("Archive error"), e)
except Exception, e:
self.log_error(name, _("Unknown error"), e)
if self.pyload.debug:
traceback.print_exc()
self.manager.dispatchEvent("archive_extract_failed", pyfile, archive)
raise Exception(_("Extract failed"))
#: Deprecated method, use `get_passwords` instead
@Expose
def getPasswords(self, *args, **kwargs):
"""
See `get_passwords`
"""
return self.get_passwords(*args, **kwargs)
@Expose
def get_passwords(self, reload=True):
"""
List of saved passwords
"""
if reload:
self.reload_passwords()
return self.passwords
def reload_passwords(self):
try:
passwords = []
file = fs_encode(self.get_config('passwordfile'))
with open(file) as f:
for pw in f.read().splitlines():
passwords.append(pw)
except IOError, e:
self.log_error(e)
else:
self.passwords = passwords
#: Deprecated method, use `add_password` instead
@Expose
def addPassword(self, *args, **kwargs):
"""
See `add_password`
"""
return self.add_password(*args, **kwargs)
@Expose
def add_password(self, password):
"""
Adds a password to saved list
"""
try:
self.passwords = uniqify([password] + self.passwords)
file = fs_encode(self.get_config('passwordfile'))
with open(file, "wb") as f:
for pw in self.passwords:
f.write(pw + '\n')
except IOError, e:
self.log_error(e)<|fim▁end|>
| |
<|file_name|>puppet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: puppet
short_description: Runs puppet
description:
- Runs I(puppet) agent or apply in a reliable manner
version_added: "2.0"
options:
timeout:
description:
- How long to wait for I(puppet) to finish.
required: false
default: 30m
puppetmaster:
description:
- The hostname of the puppetmaster to contact.
required: false
default: None
modulepath:
description:
- Path to an alternate location for puppet modules
required: false
default: None
version_added: "2.4"
manifest:
description:
- Path to the manifest file to run puppet apply on.
required: false
default: None
facts:
description:
- A dict of values to pass in as persistent external facter facts
required: false
default: None
facter_basename:
description:
- Basename of the facter output file
required: false
default: ansible
environment:
description:
- Puppet environment to be used.
required: false
default: None
logdest:
description:
- Where the puppet logs should go, if puppet apply is being used
required: false
default: stdout
choices: [ 'stdout', 'syslog' ]
version_added: "2.1"
certname:
description:
- The name to use when handling certificates.
required: false
default: None
version_added: "2.1"
tags:
description:
- A comma-separated list of puppet tags to be used.
required: false
default: None
version_added: "2.1"
execute:
description:
- Execute a specific piece of Puppet code. It has no effect with
a puppetmaster.
required: false
default: None
version_added: "2.1"
requirements: [ puppet ]
author: "Monty Taylor (@emonty)"
'''
EXAMPLES = '''
# Run puppet agent and fail if anything goes wrong
- puppet
# Run puppet and timeout in 5 minutes
- puppet:
timeout: 5m
# Run puppet using a different environment
- puppet:
environment: testing
# Run puppet using a specific certname
- puppet:
certname: agent01.example.com
# Run puppet using a specific piece of Puppet code. Has no effect with a
# puppetmaster.
- puppet:
execute: 'include ::mymodule'
# Run puppet using a specific tags
- puppet:
tags: update,nginx
'''
import os
import pipes
import stat
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
# Let snippet from module_utils/basic.py return a proper error in this case
pass
def _get_facter_dir():
if os.getuid() == 0:
return '/etc/facter/facts.d'
else:
return os.path.expanduser('~/.facter/facts.d')
def _write_structured_data(basedir, basename, data):
if not os.path.exists(basedir):
os.makedirs(basedir)
file_path = os.path.join(basedir, "{0}.json".format(basename))
# This is more complex than you might normally expect because we want to
# open the file with only u+rw set. Also, we use the stat constants
# because ansible still supports python 2.4 and the octal syntax changed
out_file = os.fdopen(
os.open(
file_path, os.O_CREAT | os.O_WRONLY,
stat.S_IRUSR | stat.S_IWUSR), 'w')
out_file.write(json.dumps(data).encode('utf8'))
out_file.close()
def main():
module = AnsibleModule(
argument_spec=dict(
timeout=dict(default="30m"),
puppetmaster=dict(required=False, default=None),
modulepath=dict(required=False, default=None),
manifest=dict(required=False, default=None),
logdest=dict(
required=False, default='stdout',
choices=['stdout', 'syslog']),
show_diff=dict(
# internal code to work with --diff, do not use
default=False, aliases=['show-diff'], type='bool'),
facts=dict(default=None),
facter_basename=dict(default='ansible'),
environment=dict(required=False, default=None),
certname=dict(required=False, default=None),
tags=dict(required=False, default=None, type='list'),
execute=dict(required=False, default=None),
),
supports_check_mode=True,
mutually_exclusive=[
('puppetmaster', 'manifest'),
('puppetmaster', 'manifest', 'execute'),
('puppetmaster', 'modulepath')
],
)
p = module.params
global PUPPET_CMD
PUPPET_CMD = module.get_bin_path("puppet", False, ['/opt/puppetlabs/bin'])
if not PUPPET_CMD:
module.fail_json(
msg="Could not find puppet. Please ensure it is installed.")
global TIMEOUT_CMD
TIMEOUT_CMD = module.get_bin_path("timeout", False)
if p['manifest']:
if not os.path.exists(p['manifest']):
module.fail_json(
msg="Manifest file %(manifest)s not found." % dict(
manifest=p['manifest']))
# Check if puppet is disabled here
if not p['manifest']:
rc, stdout, stderr = module.run_command(
PUPPET_CMD + " config print agent_disabled_lockfile")
if os.path.exists(stdout.strip()):
module.fail_json(
msg="Puppet agent is administratively disabled.",
disabled=True)
elif rc != 0:
module.fail_json(
msg="Puppet agent state could not be determined.")
if module.params['facts'] and not module.check_mode:
_write_structured_data(
_get_facter_dir(),
module.params['facter_basename'],
module.params['facts'])
if TIMEOUT_CMD:
base_cmd = "%(timeout_cmd)s -s 9 %(timeout)s %(puppet_cmd)s" % dict(
timeout_cmd=TIMEOUT_CMD,
timeout=pipes.quote(p['timeout']),
puppet_cmd=PUPPET_CMD)
else:
base_cmd = PUPPET_CMD
if not p['manifest']:
cmd = ("%(base_cmd)s agent --onetime"
" --ignorecache --no-daemonize --no-usecacheonfailure --no-splay"
" --detailed-exitcodes --verbose --color 0") % dict(
base_cmd=base_cmd,
)
if p['puppetmaster']:
cmd += " --server %s" % pipes.quote(p['puppetmaster'])
if p['show_diff']:
cmd += " --show_diff"
if p['environment']:
cmd += " --environment '%s'" % p['environment']
if p['tags']:
cmd += " --tags '%s'" % ','.join(p['tags'])
if p['certname']:
cmd += " --certname='%s'" % p['certname']<|fim▁hole|> else:
cmd += " --no-noop"
else:
cmd = "%s apply --detailed-exitcodes " % base_cmd
if p['logdest'] == 'syslog':
cmd += "--logdest syslog "
if p['modulepath']:
cmd += "--modulepath='%s'" % p['modulepath']
if p['environment']:
cmd += "--environment '%s' " % p['environment']
if p['certname']:
cmd += " --certname='%s'" % p['certname']
if p['execute']:
cmd += " --execute '%s'" % p['execute']
if p['tags']:
cmd += " --tags '%s'" % ','.join(p['tags'])
if module.check_mode:
cmd += "--noop "
else:
cmd += "--no-noop "
cmd += pipes.quote(p['manifest'])
rc, stdout, stderr = module.run_command(cmd)
if rc == 0:
# success
module.exit_json(rc=rc, changed=False, stdout=stdout, stderr=stderr)
elif rc == 1:
# rc==1 could be because it's disabled
# rc==1 could also mean there was a compilation failure
disabled = "administratively disabled" in stdout
if disabled:
msg = "puppet is disabled"
else:
msg = "puppet did not run"
module.exit_json(
rc=rc, disabled=disabled, msg=msg,
error=True, stdout=stdout, stderr=stderr)
elif rc == 2:
# success with changes
module.exit_json(rc=0, changed=True, stdout=stdout, stderr=stderr)
elif rc == 124:
# timeout
module.exit_json(
rc=rc, msg="%s timed out" % cmd, stdout=stdout, stderr=stderr)
else:
# failure
module.fail_json(
rc=rc, msg="%s failed with return code: %d" % (cmd, rc),
stdout=stdout, stderr=stderr)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()<|fim▁end|>
|
if module.check_mode:
cmd += " --noop"
|
<|file_name|>ValidationMessage.java<|end_file_name|><|fim▁begin|>package uk.gov.prototype.vitruvius.parser.validator;
import java.util.List;
public class ValidationMessage {
private String message;
private ValidationType type;
public ValidationMessage() {
}
public ValidationMessage(String message, ValidationType type) {
this.message = message;
this.type = type;
}
public String getMessage() {
return message;
}
public ValidationType getType() {
return type;
}
@Override
public String toString() {
return "ValidationMessage{" +
"message='" + message + '\'' +
", type=" + type +
'}';
}
public enum ValidationType {<|fim▁hole|> }
public static ValidationMessage createErrorMessage(String message) {
return new ValidationMessage(message, ValidationType.ERROR);
}
public static ValidationMessage createWarning(String message) {
return new ValidationMessage(message, ValidationType.WARNING);
}
public static boolean hasErrors(List<ValidationMessage> messages) {
for (ValidationMessage validationMessage : messages) {
if (validationMessage.getType() == ValidationType.ERROR) {
return true;
}
}
return false;
}
}<|fim▁end|>
|
ERROR,
WARNING
|
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { BrowserModule } from '@angular/platform-browser';
import { AppRoutingModule } from './app-routing.module';
import { AppComponent } from './app.component';
import { NgbModule } from '@ng-bootstrap/ng-bootstrap';
import { ChatWindowComponent } from './chat-window/chat-window.component';
<|fim▁hole|> ],
imports: [
BrowserModule,
AppRoutingModule,
FormsModule,
NgbModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }<|fim▁end|>
|
@NgModule({
declarations: [
AppComponent,
ChatWindowComponent
|
<|file_name|>comedycentral.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import re
from .mtv import MTVServicesInfoExtractor
from ..utils import (
compat_str,
compat_urllib_parse,
ExtractorError,
float_or_none,
unified_strdate,
)
class ComedyCentralIE(MTVServicesInfoExtractor):
_VALID_URL = r'''(?x)https?://(?:www\.)?cc\.com/
(video-clips|episodes|cc-studios|video-collections|full-episodes)
/(?P<title>.*)'''
_FEED_URL = 'http://comedycentral.com/feeds/mrss/'
_TEST = {
'url': 'http://www.cc.com/video-clips/kllhuv/stand-up-greg-fitzsimmons--uncensored---too-good-of-a-mother',
'md5': 'c4f48e9eda1b16dd10add0744344b6d8',
'info_dict': {
'id': 'cef0cbb3-e776-4bc9-b62e-8016deccb354',
'ext': 'mp4',
'title': 'CC:Stand-Up|Greg Fitzsimmons: Life on Stage|Uncensored - Too Good of a Mother',
'description': 'After a certain point, breastfeeding becomes c**kblocking.',
},
}
class ComedyCentralShowsIE(MTVServicesInfoExtractor):
IE_DESC = 'The Daily Show / The Colbert Report'
# urls can be abbreviations like :thedailyshow or :colbert
# urls for episodes like:
# or urls for clips like: http://www.thedailyshow.com/watch/mon-december-10-2012/any-given-gun-day
# or: http://www.colbertnation.com/the-colbert-report-videos/421667/november-29-2012/moon-shattering-news
# or: http://www.colbertnation.com/the-colbert-report-collections/422008/festival-of-lights/79524
_VALID_URL = r'''(?x)^(:(?P<shortname>tds|thedailyshow|cr|colbert|colbertnation|colbertreport)
|https?://(:www\.)?
(?P<showname>thedailyshow|thecolbertreport)\.(?:cc\.)?com/
((?:full-)?episodes/(?:[0-9a-z]{6}/)?(?P<episode>.*)|
(?P<clip>
(?:(?:guests/[^/]+|videos|video-playlists|special-editions|news-team/[^/]+)/[^/]+/(?P<videotitle>[^/?#]+))
|(the-colbert-report-(videos|collections)/(?P<clipID>[0-9]+)/[^/]*/(?P<cntitle>.*?))
|(watch/(?P<date>[^/]*)/(?P<tdstitle>.*))
)|
(?P<interview>
extended-interviews/(?P<interID>[0-9a-z]+)/(?:playlist_tds_extended_)?(?P<interview_title>.*?)(/.*?)?)))
(?:[?#].*|$)'''
_TESTS = [{
'url': 'http://thedailyshow.cc.com/watch/thu-december-13-2012/kristen-stewart',
'md5': '4e2f5cb088a83cd8cdb7756132f9739d',
'info_dict': {
'id': 'ab9ab3e7-5a98-4dbe-8b21-551dc0523d55',
'ext': 'mp4',
'upload_date': '20121213',
'description': 'Kristen Stewart learns to let loose in "On the Road."',
'uploader': 'thedailyshow',
'title': 'thedailyshow kristen-stewart part 1',
}
}, {
'url': 'http://thedailyshow.cc.com/extended-interviews/xm3fnq/andrew-napolitano-extended-interview',
'only_matching': True,
}, {
'url': 'http://thecolbertreport.cc.com/videos/29w6fx/-realhumanpraise-for-fox-news',
'only_matching': True,
}, {
'url': 'http://thecolbertreport.cc.com/videos/gh6urb/neil-degrasse-tyson-pt--1?xrs=eml_col_031114',
'only_matching': True,
}, {
'url': 'http://thedailyshow.cc.com/guests/michael-lewis/3efna8/exclusive---michael-lewis-extended-interview-pt--3',
'only_matching': True,
}, {
'url': 'http://thedailyshow.cc.com/episodes/sy7yv0/april-8--2014---denis-leary',
'only_matching': True,
}, {
'url': 'http://thecolbertreport.cc.com/episodes/8ase07/april-8--2014---jane-goodall',
'only_matching': True,
}, {
'url': 'http://thedailyshow.cc.com/video-playlists/npde3s/the-daily-show-19088-highlights',
'only_matching': True,
}, {
'url': 'http://thedailyshow.cc.com/special-editions/2l8fdb/special-edition---a-look-back-at-food',
'only_matching': True,
}, {
'url': 'http://thedailyshow.cc.com/news-team/michael-che/7wnfel/we-need-to-talk-about-israel',
'only_matching': True,
}]
_available_formats = ['3500', '2200', '1700', '1200', '750', '400']
_video_extensions = {
'3500': 'mp4',
'2200': 'mp4',
'1700': 'mp4',
'1200': 'mp4',
'750': 'mp4',
'400': 'mp4',
}
_video_dimensions = {
'3500': (1280, 720),
'2200': (960, 540),
'1700': (768, 432),
'1200': (640, 360),
'750': (512, 288),
'400': (384, 216),
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
if mobj.group('shortname'):
if mobj.group('shortname') in ('tds', 'thedailyshow'):
url = 'http://thedailyshow.cc.com/full-episodes/'
else:
url = 'http://thecolbertreport.cc.com/full-episodes/'
mobj = re.match(self._VALID_URL, url, re.VERBOSE)
assert mobj is not None
if mobj.group('clip'):
if mobj.group('videotitle'):
epTitle = mobj.group('videotitle')
elif mobj.group('showname') == 'thedailyshow':
epTitle = mobj.group('tdstitle')
else:
epTitle = mobj.group('cntitle')<|fim▁hole|> elif mobj.group('interview'):
epTitle = mobj.group('interview_title')
dlNewest = False
else:
dlNewest = not mobj.group('episode')
if dlNewest:
epTitle = mobj.group('showname')
else:
epTitle = mobj.group('episode')
show_name = mobj.group('showname')
webpage, htmlHandle = self._download_webpage_handle(url, epTitle)
if dlNewest:
url = htmlHandle.geturl()
mobj = re.match(self._VALID_URL, url, re.VERBOSE)
if mobj is None:
raise ExtractorError('Invalid redirected URL: ' + url)
if mobj.group('episode') == '':
raise ExtractorError('Redirected URL is still not specific: ' + url)
epTitle = (mobj.group('episode') or mobj.group('videotitle')).rpartition('/')[-1]
mMovieParams = re.findall('(?:<param name="movie" value="|var url = ")(http://media.mtvnservices.com/([^"]*(?:episode|video).*?:.*?))"', webpage)
if len(mMovieParams) == 0:
# The Colbert Report embeds the information in a without
# a URL prefix; so extract the alternate reference
# and then add the URL prefix manually.
altMovieParams = re.findall('data-mgid="([^"]*(?:episode|video|playlist).*?:.*?)"', webpage)
if len(altMovieParams) == 0:
raise ExtractorError('unable to find Flash URL in webpage ' + url)
else:
mMovieParams = [("http://media.mtvnservices.com/" + altMovieParams[0], altMovieParams[0])]
uri = mMovieParams[0][1]
# Correct cc.com in uri
uri = re.sub(r'(episode:[^.]+)(\.cc)?\.com', r'\1.cc.com', uri)
index_url = 'http://%s.cc.com/feeds/mrss?%s' % (show_name, compat_urllib_parse.urlencode({'uri': uri}))
idoc = self._download_xml(
index_url, epTitle,
'Downloading show index', 'Unable to download episode index')
title = idoc.find('./channel/title').text
description = idoc.find('./channel/description').text
entries = []
item_els = idoc.findall('.//item')
for part_num, itemEl in enumerate(item_els):
upload_date = unified_strdate(itemEl.findall('./pubDate')[0].text)
thumbnail = itemEl.find('.//{http://search.yahoo.com/mrss/}thumbnail').attrib.get('url')
content = itemEl.find('.//{http://search.yahoo.com/mrss/}content')
duration = float_or_none(content.attrib.get('duration'))
mediagen_url = content.attrib['url']
guid = itemEl.find('./guid').text.rpartition(':')[-1]
cdoc = self._download_xml(
mediagen_url, epTitle,
'Downloading configuration for segment %d / %d' % (part_num + 1, len(item_els)))
turls = []
for rendition in cdoc.findall('.//rendition'):
finfo = (rendition.attrib['bitrate'], rendition.findall('./src')[0].text)
turls.append(finfo)
formats = []
for format, rtmp_video_url in turls:
w, h = self._video_dimensions.get(format, (None, None))
formats.append({
'format_id': 'vhttp-%s' % format,
'url': self._transform_rtmp_url(rtmp_video_url),
'ext': self._video_extensions.get(format, 'mp4'),
'height': h,
'width': w,
})
formats.append({
'format_id': 'rtmp-%s' % format,
'url': rtmp_video_url.replace('viacomccstrm', 'viacommtvstrm'),
'ext': self._video_extensions.get(format, 'mp4'),
'height': h,
'width': w,
})
self._sort_formats(formats)
virtual_id = show_name + ' ' + epTitle + ' part ' + compat_str(part_num + 1)
entries.append({
'id': guid,
'title': virtual_id,
'formats': formats,
'uploader': show_name,
'upload_date': upload_date,
'duration': duration,
'thumbnail': thumbnail,
'description': description,
})
return {
'_type': 'playlist',
'entries': entries,
'title': show_name + ' ' + title,
'description': description,
}<|fim▁end|>
|
dlNewest = False
|
<|file_name|>authtest.py<|end_file_name|><|fim▁begin|>from com.googlecode.fascinator.common import JsonSimple
class AuthtestData:
def __init__(self):
pass
def __activate__(self, context):
request = context["request"]
response = context["response"]
writer = response.getPrintWriter("text/javascript; charset=UTF-8")
result = JsonSimple()
## Look for the JSONP callback to use
jsonpCallback = request.getParameter("callback")
if jsonpCallback is None:
jsonpCallback = request.getParameter("jsonp_callback")
if jsonpCallback is None:
response.setStatus(403)
writer.println("Error: This interface only responds to JSONP")
writer.close()
return
if context["page"].authentication.is_logged_in():
result.getJsonObject().put("isAuthenticated", "true")<|fim▁hole|> else:
result.getJsonObject().put("isAuthenticated", "false")
writer.println(jsonpCallback + "(" + result.toString() + ")")
writer.close()<|fim▁end|>
| |
<|file_name|>typedoc.js<|end_file_name|><|fim▁begin|>module.exports = {
out: "./docs/",
readme: "README.md",
name: "Persian Tools",<|fim▁hole|> excludeExternals: true,
includeVersion: true,
excludePrivate: false,
};<|fim▁end|>
|
includes: "./src",
entryPoints: ["./src/index.ts"],
exclude: ["**/test/**/*", "**/*.js", "**/dist/**/*", "**/src/dummy/**"],
|
<|file_name|>pageview.go<|end_file_name|><|fim▁begin|>package controller
import (
"beam/cmd/segments/app"
"beam/model"
"time"
"github.com/goadesign/goa"
)
// PageviewController implements the event resource.
type PageviewController struct {
*goa.Controller
PageviewStorage model.PageviewStorage
}
// NewPageviewController creates a pageview controller.
func NewPageviewController(service *goa.Service, ps model.PageviewStorage) *PageviewController {
return &PageviewController{
Controller: service.NewController("PageviewController"),
PageviewStorage: ps,
}
}
// Count runs the count action.
func (c *PageviewController) Count(ctx *app.CountPageviewsContext) error {
o, err := aggregateOptionsFromPageviewOptions(ctx.Payload)
if err != nil {
return err
}
o.Action = ctx.Action
crc, ok, err := c.PageviewStorage.Count(o)
if err != nil {
return err
}
if !ok {<|fim▁hole|> cr := model.CountRow{
Tags: make(map[string]string),
Count: 0,
}
crc = model.CountRowCollection{}
crc = append(crc, cr)
}
acrc := CountRowCollection(crc).ToMediaType()
return ctx.OK(acrc)
}
// Sum runs the sum action.
func (c *PageviewController) Sum(ctx *app.SumPageviewsContext) error {
o, err := aggregateOptionsFromPageviewOptions(ctx.Payload)
if err != nil {
return err
}
o.Action = ctx.Action
src, ok, err := c.PageviewStorage.Sum(o)
if err != nil {
return err
}
if !ok {
sr := model.SumRow{
Tags: make(map[string]string),
Sum: 0,
}
src = model.SumRowCollection{}
src = append(src, sr)
}
asrc := SumRowCollection(src).ToMediaType()
return ctx.OK(asrc)
}
// Avg runs the avg action.
func (c *PageviewController) Avg(ctx *app.AvgPageviewsContext) error {
o, err := aggregateOptionsFromPageviewOptions(ctx.Payload)
if err != nil {
return err
}
o.Action = ctx.Action
src, ok, err := c.PageviewStorage.Avg(o)
if err != nil {
return err
}
if !ok {
sr := model.AvgRow{
Tags: make(map[string]string),
Avg: 0,
}
src = model.AvgRowCollection{}
src = append(src, sr)
}
asrc := AvgRowCollection(src).ToMediaType()
return ctx.OK(asrc)
}
// Unique runs the cardinality count action.
func (c *PageviewController) Unique(ctx *app.UniquePageviewsContext) error {
o, err := aggregateOptionsFromPageviewOptions(ctx.Payload)
if err != nil {
return err
}
o.Action = ctx.Action
src, ok, err := c.PageviewStorage.Unique(o, ctx.Item)
if err != nil {
return err
}
if !ok {
sr := model.CountRow{
Tags: make(map[string]string),
Count: 0,
}
src = model.CountRowCollection{}
src = append(src, sr)
}
asrc := CountRowCollection(src).ToMediaType()
return ctx.OK(asrc)
}
// List runs the list action.
func (c *PageviewController) List(ctx *app.ListPageviewsContext) error {
aggOptions, err := aggregateOptionsFromPageviewOptions(ctx.Payload.Conditions)
if err != nil {
return err
}
o := model.ListPageviewsOptions{
AggregateOptions: aggOptions,
SelectFields: ctx.Payload.SelectFields,
LoadTimespent: ctx.Payload.LoadTimespent,
}
prc, err := c.PageviewStorage.List(o)
if err != nil {
return err
}
mt, err := PageviewRowCollection(prc).ToMediaType()
if err != nil {
return err
}
return ctx.OK(mt)
}
// Categories runs the categories action.
func (c *PageviewController) Categories(ctx *app.CategoriesPageviewsContext) error {
categories, err := c.PageviewStorage.Categories()
if err != nil {
return err
}
return ctx.OK(categories)
}
// Actions runs the action action. :)
func (c *PageviewController) Actions(ctx *app.ActionsPageviewsContext) error {
actions, err := c.PageviewStorage.Actions(ctx.Category)
if err != nil {
return err
}
return ctx.OK(actions)
}
// aggregateOptionsFromPageviewOptions converts payload data to AggregateOptions.
func aggregateOptionsFromPageviewOptions(payload *app.PageviewOptionsPayload) (model.AggregateOptions, error) {
var o model.AggregateOptions
for _, val := range payload.FilterBy {
fb := &model.FilterBy{
Tag: val.Tag,
Values: val.Values,
Inverse: false,
}
if val.Inverse != nil {
fb.Inverse = *val.Inverse
}
o.FilterBy = append(o.FilterBy, fb)
}
o.GroupBy = payload.GroupBy
if payload.TimeAfter != nil {
o.TimeAfter = *payload.TimeAfter
}
if payload.TimeBefore != nil {
o.TimeBefore = *payload.TimeBefore
}
if payload.TimeHistogram != nil {
o.TimeHistogram = &model.TimeHistogram{
Interval: payload.TimeHistogram.Interval,
}
if payload.TimeHistogram.TimeZone != nil {
location, err := time.LoadLocation(*payload.TimeHistogram.TimeZone)
if err != nil {
return o, err
}
o.TimeHistogram.TimeZone = location
}
}
if payload.CountHistogram != nil {
o.CountHistogram = &model.CountHistogram{
Field: payload.CountHistogram.Field,
Interval: payload.CountHistogram.Interval,
}
}
return o, nil
}<|fim▁end|>
| |
<|file_name|>parsers.js<|end_file_name|><|fim▁begin|>import type {ResponseType} from "./base.type";
function parseJSON(response: ResponseType): Object {
return response.json();<|fim▁hole|>
export {parseJSON};<|fim▁end|>
|
}
|
<|file_name|>FilmActor.py<|end_file_name|><|fim▁begin|>import unreal_engine as ue
import json
class FilmActor:
def begin_play(self):<|fim▁hole|>
def getjson(self):
ue.log("@@@@video getting json:")
loc = self.uobject.get_actor_location()
rot = self.uobject.get_actor_forward()
data = {
"x":loc.x,"y":loc.y,"z":loc.z,
"rx":rot.x, "ry":rot.y, "rz": rot.z
}
return json.dumps(data)
def addtoworld(self):
ue.log("@@@@video add to world")
return ""
def setjson(self,js):
ue.log("@@@@video setting json:")
data = json.loads(js)
loc = self.uobject.get_actor_location()
loc.x = data["x"]
loc.y = data["y"]
loc.z = data["z"]
self.uobject.set_actor_location(loc)
rot = self.uobject.get_actor_forward()
return True
def tick(self, delta_time):
pass<|fim▁end|>
|
self.pawn = self.uobject.get_owner()
|
<|file_name|>size_bmp32.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
"""
This script checks HighGUI's cvGetCaptureProperty functionality for correct return
of the frame width and height of an .avi file containing uncompressed 32bit Bitmap frames.
"""
# name if this test and it's requirements
TESTNAME = "size_bmp32"
REQUIRED = []<|fim▁hole|>import sys
import works
import size_test
# check requirements and delete old flag file, if it exists
if not works.check_files(REQUIRED,TESTNAME):
sys.exit(77)
# name of file we check here
FILENAME='bmp32.avi'
# run check routine
result=size_test.size_ok(FILENAME)
# create flag file for following tests
works.set_file(TESTNAME)
# return result of test routine
sys.exit(result)<|fim▁end|>
|
# needed for sys.exit(int), .works file handling and check routine
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Django
from django.views.generic import View, TemplateView
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
# CRMA
from .models import Subscription, EmailScheduler
from .models import cancel_subscription
from .utils import decode_id
class UnsubscribeCompleted(TemplateView):
template_name = 'crma/unsubscribe_completed.html'
class UnsubscribeView(View):
success_url = 'crma_unsubscribe_completed'
def get(self, request, *args, **kwargs):
key = kwargs['key'].lower()
subs_info = get_object_or_404(Subscription, unsubscribe_key=key)
cancel_subscription(subs_info)
return redirect(reverse(self.success_url))
class ViewWebMail(View):
template_name = 'crma/mail_viewer.html'<|fim▁hole|> # Read the schedule item
scheduler_id = decode_id(kwargs['scheduler_id'])
token = kwargs['scheduler_token']
item = get_object_or_404(EmailScheduler, id=scheduler_id)
return item.render(request, token=token, template=self.template_name)<|fim▁end|>
|
def get(self, request, *args, **kwargs):
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
name = "neurogenesis"
|
<|file_name|>chroot.py<|end_file_name|><|fim▁begin|>import json
from unit.http import TestHTTP
from unit.option import option
http = TestHTTP()
<|fim▁hole|> url='/config',
sock_type='unix',
addr=option.temp_dir + '/control.unit.sock',
body=json.dumps(
{
"listeners": {"*:7080": {"pass": "routes"}},
"routes": [
{
"action": {
"share": option.temp_dir,
"chroot": option.temp_dir,
}
}
],
}
),
)
if 'success' in resp['body']:
available['features']['chroot'] = True<|fim▁end|>
|
def check_chroot():
available = option.available
resp = http.put(
|
<|file_name|>filtersSpec.js<|end_file_name|><|fim▁begin|>//filters.js eventsApp.filter('durations'
'use strict';
describe('durations', function(){
beforeEach(module("eventsApp"));
it('should return "Half Hour" when given a 1', inject(function(durationFilter){
expect(durationFilter(1)).toEqual('Half Hour');
}))
it('should return "1 Hour" when given a 2', inject(function(durationFilter){
expect(durationFilter(2)).toEqual('1 Hour');
}))
it('should return "Half Day" when given a 3', inject(function(durationFilter){
expect(durationFilter(3)).toEqual('Half Day');<|fim▁hole|> expect(durationFilter(4)).toEqual('Full Day');
}))
})<|fim▁end|>
|
}))
it('should return " Hour" when given a 4', inject(function(durationFilter){
|
<|file_name|>symbolDeclarationEmit8.js<|end_file_name|><|fim▁begin|>//// [symbolDeclarationEmit8.ts]
var obj = {
[Symbol.isConcatSpreadable]: 0
}
//// [symbolDeclarationEmit8.js]
var obj = {
[Symbol.isConcatSpreadable]: 0
};
//// [symbolDeclarationEmit8.d.ts]
<|fim▁hole|> [Symbol.isConcatSpreadable]: number;
};<|fim▁end|>
|
declare var obj: {
|
<|file_name|>cisco_xr_ssh.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import unicode_literals
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class CiscoXrSSH(CiscoSSHConnection):
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
def send_config_set(self, config_commands=None, exit_config_mode=True, **kwargs):
"""IOS-XR requires you not exit from configuration mode."""
return super(CiscoXrSSH, self).send_config_set(config_commands=config_commands,
exit_config_mode=False, **kwargs)
def commit(self, confirm=False, confirm_delay=None, comment='', label='', delay_factor=1):
"""
Commit the candidate configuration.
default (no options):
command_string = commit
confirm and confirm_delay:
command_string = commit confirmed <confirm_delay>
label (which is a label name):
command_string = commit label <label>
comment:
command_string = commit comment <comment>
supported combinations
label and confirm:
command_string = commit label <label> confirmed <confirm_delay>
label and comment:
command_string = commit label <label> comment <comment>
All other combinations will result in an exception.
failed commit message:
% Failed to commit one or more configuration items during a pseudo-atomic operation. All
changes made have been reverted. Please issue 'show configuration failed [inheritance]'
from this session to view the errors
message XR shows if other commits occurred:
One or more commits have occurred from other configuration sessions since this session
started or since the last commit was made from this session. You can use the 'show
configuration commit changes' command to browse the changes.
Exit of configuration mode with pending changes will cause the changes to be discarded and
an exception to be generated.
"""
delay_factor = self.select_delay_factor(delay_factor)
if confirm and not confirm_delay:
raise ValueError("Invalid arguments supplied to XR commit")
if confirm_delay and not confirm:
raise ValueError("Invalid arguments supplied to XR commit")
if comment and confirm:
raise ValueError("Invalid arguments supplied to XR commit")
# wrap the comment in quotes
if comment:
if '"' in comment:
raise ValueError("Invalid comment contains double quote")
comment = '"{0}"'.format(comment)
label = str(label)
error_marker = 'Failed to'
alt_error_marker = 'One or more commits have occurred from other'
# Select proper command string based on arguments provided
if label:
if comment:
command_string = 'commit label {0} comment {1}'.format(label, comment)
elif confirm:<|fim▁hole|> command_string = 'commit label {0} confirmed {1}'.format(label, str(confirm_delay))
else:
command_string = 'commit label {0}'.format(label)
elif confirm:
command_string = 'commit confirmed {0}'.format(str(confirm_delay))
elif comment:
command_string = 'commit comment {0}'.format(comment)
else:
command_string = 'commit'
# Enter config mode (if necessary)
output = self.config_mode()
output += self.send_command_expect(command_string, strip_prompt=False, strip_command=False,
delay_factor=delay_factor)
if error_marker in output:
raise ValueError("Commit failed with the following errors:\n\n{0}".format(output))
if alt_error_marker in output:
# Other commits occurred, don't proceed with commit
output += self.send_command_timing("no", strip_prompt=False, strip_command=False,
delay_factor=delay_factor)
raise ValueError("Commit failed with the following errors:\n\n{0}".format(output))
return output
def exit_config_mode(self, exit_config='end'):
"""Exit configuration mode."""
output = ''
if self.check_config_mode():
output = self.send_command_timing(exit_config, strip_prompt=False, strip_command=False)
if "Uncommitted changes found" in output:
output += self.send_command_timing('no\n', strip_prompt=False, strip_command=False)
if self.check_config_mode():
raise ValueError("Failed to exit configuration mode")
return output
@staticmethod
def normalize_linefeeds(a_string):
"""Convert '\r\n','\r\r\n', '\n\r', or '\r' to '\n."""
newline = re.compile(r'(\r\r\n|\r\n|\n\r|\r)')
return newline.sub('\n', a_string)<|fim▁end|>
| |
<|file_name|>gate_setting_sun.cpp<|end_file_name|><|fim▁begin|>/*
Dungeon : Gate of the Setting Sun 90 Heroic
Instance General Script<|fim▁hole|>
#include "gate_setting_sun.h"
#include "ScriptMgr.h"
#include "ScriptedCreature.h"
#include "Vehicle.h"
enum spells
{
SPELL_MANTID_MUNITION_EXPLOSION = 107153,
SPELL_EXPLOSE_GATE = 115456,
SPELL_BOMB_CAST_VISUAL = 106729,
SPELL_BOMB_AURA = 106875,
};
class mob_serpent_spine_defender : public CreatureScript
{
public:
mob_serpent_spine_defender() : CreatureScript("mob_serpent_spine_defender") { }
struct mob_serpent_spine_defenderAI : public ScriptedAI
{
mob_serpent_spine_defenderAI(Creature* creature) : ScriptedAI(creature) {}
uint32 attackTimer;
void Reset()
{
attackTimer = urand(1000, 5000);
}
void DamageDealt(Unit* /*target*/, uint32& damage, DamageEffectType /*damageType*/)
{
damage = 0;
}
void UpdateAI(uint32 diff)
{
if (!me->IsInCombat())
{
if (attackTimer <= diff)
{
if (Unit* target = me->SelectNearestTarget(5.0f))
if (!target->IsFriendlyTo(me))
AttackStart(target);
}
else
attackTimer -= diff;
}
DoMeleeAttackIfReady();
}
};
CreatureAI* GetAI(Creature* creature) const
{
return new mob_serpent_spine_defenderAI(creature);
}
};
class npc_krikthik_bombarder : public CreatureScript
{
public:
npc_krikthik_bombarder() : CreatureScript("npc_krikthik_bombarder") { }
struct npc_krikthik_bombarderAI : public ScriptedAI
{
npc_krikthik_bombarderAI(Creature* creature) : ScriptedAI(creature)
{
pInstance = creature->GetInstanceScript();
}
InstanceScript* pInstance;
uint32 bombTimer;
void Reset()
{
me->GetMotionMaster()->MoveRandom(5.0f);
bombTimer = urand(1000, 7500);
}
// Called when spell hits a target
void SpellHitTarget(Unit* target, SpellInfo const* /*spell*/)
{
if (target->GetEntry() == NPC_BOMB_STALKER)
me->AddAura(SPELL_BOMB_AURA, target);
}
void UpdateAI(uint32 diff)
{
if (bombTimer <= diff)
{
if (Unit* stalker = pInstance->instance->GetCreature(pInstance->GetData64(DATA_RANDOM_BOMB_STALKER)))
if (!stalker->HasAura(SPELL_BOMB_AURA))
me->CastSpell(stalker, SPELL_BOMB_CAST_VISUAL, true);
bombTimer = urand(1000, 5000);
}
else bombTimer -= diff;
}
};
CreatureAI* GetAI(Creature* creature) const
{
return new npc_krikthik_bombarderAI (creature);
}
};
//8359
class AreaTrigger_at_first_door : public AreaTriggerScript
{
public:
AreaTrigger_at_first_door() : AreaTriggerScript("at_first_door") {}
bool OnTrigger(Player* player, AreaTriggerEntry const* /*trigger*/)
{
if (player->GetInstanceScript())
player->GetInstanceScript()->SetData(DATA_OPEN_FIRST_DOOR, DONE);
return false;
}
};
class go_setting_sun_brasier : public GameObjectScript
{
public:
go_setting_sun_brasier() : GameObjectScript("go_setting_sun_brasier") { }
bool OnGossipHello(Player* player, GameObject* /*go*/)
{
if (player->GetInstanceScript())
player->GetInstanceScript()->SetData(DATA_BRASIER_CLICKED, DONE);
return false;
}
};
class go_setting_sun_temp_portal : public GameObjectScript
{
public:
go_setting_sun_temp_portal() : GameObjectScript("go_setting_sun_temp_portal") { }
bool OnGossipHello(Player* player, GameObject* go)
{
switch (go->GetEntry())
{
case 400001:
player->NearTeleportTo(1078.96f, 2305.48f, 381.55f, 0.01f);
break;
case 400002:
if (go->GetPositionZ() < 400.0f)
player->NearTeleportTo(go->GetPositionX(), go->GetPositionY(), 431.0f, go->GetOrientation());
else
player->NearTeleportTo(go->GetPositionX(), go->GetPositionY(), 388.5f, go->GetOrientation());
break;
}
return false;
}
};
class vehicle_artillery_to_wall : public VehicleScript
{
public:
vehicle_artillery_to_wall() : VehicleScript("vehicle_artillery_to_wall") {}
void OnAddPassenger(Vehicle* veh, Unit* /*passenger*/, int8 /*seatId*/)
{
if (veh->GetBase())
if (veh->GetBase()->ToCreature())
if (veh->GetBase()->ToCreature()->AI())
veh->GetBase()->ToCreature()->AI()->DoAction(0);
}
struct vehicle_artillery_to_wallAI : public ScriptedAI
{
vehicle_artillery_to_wallAI(Creature* creature) : ScriptedAI(creature)
{}
uint32 launchEventTimer;
void Reset()
{
launchEventTimer = 0;
}
void DoAction(int32 action)
{
launchEventTimer = 2500;
}
void UpdateAI(uint32 diff)
{
if (!launchEventTimer)
return;
if (launchEventTimer <= diff)
{
if (me->GetVehicleKit())
{
if (Unit* passenger = me->GetVehicleKit()->GetPassenger(0))
{
passenger->ExitVehicle();
passenger->GetMotionMaster()->MoveJump(1100.90f, 2304.58f, 381.23f, 30.0f, 50.0f);
}
}
launchEventTimer = 0;
}
else launchEventTimer -= diff;
}
};
CreatureAI* GetAI(Creature* creature) const
{
return new vehicle_artillery_to_wallAI(creature);
}
};
void AddSC_gate_setting_sun()
{
new mob_serpent_spine_defender();
new npc_krikthik_bombarder();
new AreaTrigger_at_first_door();
new go_setting_sun_brasier();
new go_setting_sun_temp_portal();
new vehicle_artillery_to_wall();
}<|fim▁end|>
|
*/
|
<|file_name|>vengine_cpy.py<|end_file_name|><|fim▁begin|>#
# DEPRECATED: implementation for ffi.verify()
#
import sys, imp
from . import model
from .error import VerificationError
<|fim▁hole|> _gen_python_module = True
def __init__(self, verifier):
self.verifier = verifier
self.ffi = verifier.ffi
self._struct_pending_verification = {}
self._types_of_builtin_functions = {}
def patch_extension_kwds(self, kwds):
pass
def find_module(self, module_name, path, so_suffixes):
try:
f, filename, descr = imp.find_module(module_name, path)
except ImportError:
return None
if f is not None:
f.close()
# Note that after a setuptools installation, there are both .py
# and .so files with the same basename. The code here relies on
# imp.find_module() locating the .so in priority.
if descr[0] not in so_suffixes:
return None
return filename
def collect_types(self):
self._typesdict = {}
self._generate("collecttype")
def _prnt(self, what=''):
self._f.write(what + '\n')
def _gettypenum(self, type):
# a KeyError here is a bug. please report it! :-)
return self._typesdict[type]
def _do_collect_type(self, tp):
if ((not isinstance(tp, model.PrimitiveType)
or tp.name == 'long double')
and tp not in self._typesdict):
num = len(self._typesdict)
self._typesdict[tp] = num
def write_source_to_f(self):
self.collect_types()
#
# The new module will have a _cffi_setup() function that receives
# objects from the ffi world, and that calls some setup code in
# the module. This setup code is split in several independent
# functions, e.g. one per constant. The functions are "chained"
# by ending in a tail call to each other.
#
# This is further split in two chained lists, depending on if we
# can do it at import-time or if we must wait for _cffi_setup() to
# provide us with the <ctype> objects. This is needed because we
# need the values of the enum constants in order to build the
# <ctype 'enum'> that we may have to pass to _cffi_setup().
#
# The following two 'chained_list_constants' items contains
# the head of these two chained lists, as a string that gives the
# call to do, if any.
self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)']
#
prnt = self._prnt
# first paste some standard set of lines that are mostly '#define'
prnt(cffimod_header)
prnt()
# then paste the C source given by the user, verbatim.
prnt(self.verifier.preamble)
prnt()
#
# call generate_cpy_xxx_decl(), for every xxx found from
# ffi._parser._declarations. This generates all the functions.
self._generate("decl")
#
# implement the function _cffi_setup_custom() as calling the
# head of the chained list.
self._generate_setup_custom()
prnt()
#
# produce the method table, including the entries for the
# generated Python->C function wrappers, which are done
# by generate_cpy_function_method().
prnt('static PyMethodDef _cffi_methods[] = {')
self._generate("method")
prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},')
prnt(' {NULL, NULL, 0, NULL} /* Sentinel */')
prnt('};')
prnt()
#
# standard init.
modname = self.verifier.get_module_name()
constants = self._chained_list_constants[False]
prnt('#if PY_MAJOR_VERSION >= 3')
prnt()
prnt('static struct PyModuleDef _cffi_module_def = {')
prnt(' PyModuleDef_HEAD_INIT,')
prnt(' "%s",' % modname)
prnt(' NULL,')
prnt(' -1,')
prnt(' _cffi_methods,')
prnt(' NULL, NULL, NULL, NULL')
prnt('};')
prnt()
prnt('PyMODINIT_FUNC')
prnt('PyInit_%s(void)' % modname)
prnt('{')
prnt(' PyObject *lib;')
prnt(' lib = PyModule_Create(&_cffi_module_def);')
prnt(' if (lib == NULL)')
prnt(' return NULL;')
prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,))
prnt(' Py_DECREF(lib);')
prnt(' return NULL;')
prnt(' }')
prnt(' return lib;')
prnt('}')
prnt()
prnt('#else')
prnt()
prnt('PyMODINIT_FUNC')
prnt('init%s(void)' % modname)
prnt('{')
prnt(' PyObject *lib;')
prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname)
prnt(' if (lib == NULL)')
prnt(' return;')
prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,))
prnt(' return;')
prnt(' return;')
prnt('}')
prnt()
prnt('#endif')
def load_library(self, flags=None):
# XXX review all usages of 'self' here!
# import it as a new extension module
imp.acquire_lock()
try:
if hasattr(sys, "getdlopenflags"):
previous_flags = sys.getdlopenflags()
try:
if hasattr(sys, "setdlopenflags") and flags is not None:
sys.setdlopenflags(flags)
module = imp.load_dynamic(self.verifier.get_module_name(),
self.verifier.modulefilename)
except ImportError as e:
error = "importing %r: %s" % (self.verifier.modulefilename, e)
raise VerificationError(error)
finally:
if hasattr(sys, "setdlopenflags"):
sys.setdlopenflags(previous_flags)
finally:
imp.release_lock()
#
# call loading_cpy_struct() to get the struct layout inferred by
# the C compiler
self._load(module, 'loading')
#
# the C code will need the <ctype> objects. Collect them in
# order in a list.
revmapping = dict([(value, key)
for (key, value) in self._typesdict.items()])
lst = [revmapping[i] for i in range(len(revmapping))]
lst = list(map(self.ffi._get_cached_btype, lst))
#
# build the FFILibrary class and instance and call _cffi_setup().
# this will set up some fields like '_cffi_types', and only then
# it will invoke the chained list of functions that will really
# build (notably) the constant objects, as <cdata> if they are
# pointers, and store them as attributes on the 'library' object.
class FFILibrary(object):
_cffi_python_module = module
_cffi_ffi = self.ffi
_cffi_dir = []
def __dir__(self):
return FFILibrary._cffi_dir + list(self.__dict__)
library = FFILibrary()
if module._cffi_setup(lst, VerificationError, library):
import warnings
warnings.warn("reimporting %r might overwrite older definitions"
% (self.verifier.get_module_name()))
#
# finally, call the loaded_cpy_xxx() functions. This will perform
# the final adjustments, like copying the Python->C wrapper
# functions from the module to the 'library' object, and setting
# up the FFILibrary class with properties for the global C variables.
self._load(module, 'loaded', library=library)
module._cffi_original_ffi = self.ffi
module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions
return library
def _get_declarations(self):
lst = [(key, tp) for (key, (tp, qual)) in
self.ffi._parser._declarations.items()]
lst.sort()
return lst
def _generate(self, step_name):
for name, tp in self._get_declarations():
kind, realname = name.split(' ', 1)
try:
method = getattr(self, '_generate_cpy_%s_%s' % (kind,
step_name))
except AttributeError:
raise VerificationError(
"not implemented in verify(): %r" % name)
try:
method(tp, realname)
except Exception as e:
model.attach_exception_info(e, name)
raise
def _load(self, module, step_name, **kwds):
for name, tp in self._get_declarations():
kind, realname = name.split(' ', 1)
method = getattr(self, '_%s_cpy_%s' % (step_name, kind))
try:
method(tp, realname, module, **kwds)
except Exception as e:
model.attach_exception_info(e, name)
raise
def _generate_nothing(self, tp, name):
pass
def _loaded_noop(self, tp, name, module, **kwds):
pass
# ----------
def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode):
extraarg = ''
if isinstance(tp, model.PrimitiveType):
if tp.is_integer_type() and tp.name != '_Bool':
converter = '_cffi_to_c_int'
extraarg = ', %s' % tp.name
else:
converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''),
tp.name.replace(' ', '_'))
errvalue = '-1'
#
elif isinstance(tp, model.PointerType):
self._convert_funcarg_to_c_ptr_or_array(tp, fromvar,
tovar, errcode)
return
#
elif isinstance(tp, (model.StructOrUnion, model.EnumType)):
# a struct (not a struct pointer) as a function argument
self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
% (tovar, self._gettypenum(tp), fromvar))
self._prnt(' %s;' % errcode)
return
#
elif isinstance(tp, model.FunctionPtrType):
converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('')
extraarg = ', _cffi_type(%d)' % self._gettypenum(tp)
errvalue = 'NULL'
#
else:
raise NotImplementedError(tp)
#
self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg))
self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % (
tovar, tp.get_c_name(''), errvalue))
self._prnt(' %s;' % errcode)
def _extra_local_variables(self, tp, localvars, freelines):
if isinstance(tp, model.PointerType):
localvars.add('Py_ssize_t datasize')
localvars.add('struct _cffi_freeme_s *large_args_free = NULL')
freelines.add('if (large_args_free != NULL)'
' _cffi_free_array_arguments(large_args_free);')
def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode):
self._prnt(' datasize = _cffi_prepare_pointer_call_argument(')
self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % (
self._gettypenum(tp), fromvar, tovar))
self._prnt(' if (datasize != 0) {')
self._prnt(' %s = ((size_t)datasize) <= 640 ? '
'alloca((size_t)datasize) : NULL;' % (tovar,))
self._prnt(' if (_cffi_convert_array_argument(_cffi_type(%d), %s, '
'(char **)&%s,' % (self._gettypenum(tp), fromvar, tovar))
self._prnt(' datasize, &large_args_free) < 0)')
self._prnt(' %s;' % errcode)
self._prnt(' }')
def _convert_expr_from_c(self, tp, var, context):
if isinstance(tp, model.PrimitiveType):
if tp.is_integer_type() and tp.name != '_Bool':
return '_cffi_from_c_int(%s, %s)' % (var, tp.name)
elif tp.name != 'long double':
return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var)
else:
return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
var, self._gettypenum(tp))
elif isinstance(tp, (model.PointerType, model.FunctionPtrType)):
return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
var, self._gettypenum(tp))
elif isinstance(tp, model.ArrayType):
return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
var, self._gettypenum(model.PointerType(tp.item)))
elif isinstance(tp, model.StructOrUnion):
if tp.fldnames is None:
raise TypeError("'%s' is used as %s, but is opaque" % (
tp._get_c_name(), context))
return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % (
var, self._gettypenum(tp))
elif isinstance(tp, model.EnumType):
return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % (
var, self._gettypenum(tp))
else:
raise NotImplementedError(tp)
# ----------
# typedefs: generates no code so far
_generate_cpy_typedef_collecttype = _generate_nothing
_generate_cpy_typedef_decl = _generate_nothing
_generate_cpy_typedef_method = _generate_nothing
_loading_cpy_typedef = _loaded_noop
_loaded_cpy_typedef = _loaded_noop
# ----------
# function declarations
def _generate_cpy_function_collecttype(self, tp, name):
assert isinstance(tp, model.FunctionPtrType)
if tp.ellipsis:
self._do_collect_type(tp)
else:
# don't call _do_collect_type(tp) in this common case,
# otherwise test_autofilled_struct_as_argument fails
for type in tp.args:
self._do_collect_type(type)
self._do_collect_type(tp.result)
def _generate_cpy_function_decl(self, tp, name):
assert isinstance(tp, model.FunctionPtrType)
if tp.ellipsis:
# cannot support vararg functions better than this: check for its
# exact type (including the fixed arguments), and build it as a
# constant function pointer (no CPython wrapper)
self._generate_cpy_const(False, name, tp)
return
prnt = self._prnt
numargs = len(tp.args)
if numargs == 0:
argname = 'noarg'
elif numargs == 1:
argname = 'arg0'
else:
argname = 'args'
prnt('static PyObject *')
prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname))
prnt('{')
#
context = 'argument of %s' % name
for i, type in enumerate(tp.args):
prnt(' %s;' % type.get_c_name(' x%d' % i, context))
#
localvars = set()
freelines = set()
for type in tp.args:
self._extra_local_variables(type, localvars, freelines)
for decl in sorted(localvars):
prnt(' %s;' % (decl,))
#
if not isinstance(tp.result, model.VoidType):
result_code = 'result = '
context = 'result of %s' % name
prnt(' %s;' % tp.result.get_c_name(' result', context))
prnt(' PyObject *pyresult;')
else:
result_code = ''
#
if len(tp.args) > 1:
rng = range(len(tp.args))
for i in rng:
prnt(' PyObject *arg%d;' % i)
prnt()
prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % (
'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng])))
prnt(' return NULL;')
prnt()
#
for i, type in enumerate(tp.args):
self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i,
'return NULL')
prnt()
#
prnt(' Py_BEGIN_ALLOW_THREADS')
prnt(' _cffi_restore_errno();')
prnt(' { %s%s(%s); }' % (
result_code, name,
', '.join(['x%d' % i for i in range(len(tp.args))])))
prnt(' _cffi_save_errno();')
prnt(' Py_END_ALLOW_THREADS')
prnt()
#
prnt(' (void)self; /* unused */')
if numargs == 0:
prnt(' (void)noarg; /* unused */')
if result_code:
prnt(' pyresult = %s;' %
self._convert_expr_from_c(tp.result, 'result', 'result type'))
for freeline in freelines:
prnt(' ' + freeline)
prnt(' return pyresult;')
else:
for freeline in freelines:
prnt(' ' + freeline)
prnt(' Py_INCREF(Py_None);')
prnt(' return Py_None;')
prnt('}')
prnt()
def _generate_cpy_function_method(self, tp, name):
if tp.ellipsis:
return
numargs = len(tp.args)
if numargs == 0:
meth = 'METH_NOARGS'
elif numargs == 1:
meth = 'METH_O'
else:
meth = 'METH_VARARGS'
self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth))
_loading_cpy_function = _loaded_noop
def _loaded_cpy_function(self, tp, name, module, library):
if tp.ellipsis:
return
func = getattr(module, name)
setattr(library, name, func)
self._types_of_builtin_functions[func] = tp
# ----------
# named structs
_generate_cpy_struct_collecttype = _generate_nothing
def _generate_cpy_struct_decl(self, tp, name):
assert name == tp.name
self._generate_struct_or_union_decl(tp, 'struct', name)
def _generate_cpy_struct_method(self, tp, name):
self._generate_struct_or_union_method(tp, 'struct', name)
def _loading_cpy_struct(self, tp, name, module):
self._loading_struct_or_union(tp, 'struct', name, module)
def _loaded_cpy_struct(self, tp, name, module, **kwds):
self._loaded_struct_or_union(tp)
_generate_cpy_union_collecttype = _generate_nothing
def _generate_cpy_union_decl(self, tp, name):
assert name == tp.name
self._generate_struct_or_union_decl(tp, 'union', name)
def _generate_cpy_union_method(self, tp, name):
self._generate_struct_or_union_method(tp, 'union', name)
def _loading_cpy_union(self, tp, name, module):
self._loading_struct_or_union(tp, 'union', name, module)
def _loaded_cpy_union(self, tp, name, module, **kwds):
self._loaded_struct_or_union(tp)
def _generate_struct_or_union_decl(self, tp, prefix, name):
if tp.fldnames is None:
return # nothing to do with opaque structs
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
cname = ('%s %s' % (prefix, name)).strip()
#
prnt = self._prnt
prnt('static void %s(%s *p)' % (checkfuncname, cname))
prnt('{')
prnt(' /* only to generate compile-time warnings or errors */')
prnt(' (void)p;')
for fname, ftype, fbitsize, fqual in tp.enumfields():
if (isinstance(ftype, model.PrimitiveType)
and ftype.is_integer_type()) or fbitsize >= 0:
# accept all integers, but complain on float or double
prnt(' (void)((p->%s) << 1);' % fname)
else:
# only accept exactly the type declared.
try:
prnt(' { %s = &p->%s; (void)tmp; }' % (
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
fname))
except VerificationError as e:
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
prnt('}')
prnt('static PyObject *')
prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,))
prnt('{')
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
prnt(' static Py_ssize_t nums[] = {')
prnt(' sizeof(%s),' % cname)
prnt(' offsetof(struct _cffi_aligncheck, y),')
for fname, ftype, fbitsize, fqual in tp.enumfields():
if fbitsize >= 0:
continue # xxx ignore fbitsize for now
prnt(' offsetof(%s, %s),' % (cname, fname))
if isinstance(ftype, model.ArrayType) and ftype.length is None:
prnt(' 0, /* %s */' % ftype._get_c_name())
else:
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
prnt(' -1')
prnt(' };')
prnt(' (void)self; /* unused */')
prnt(' (void)noarg; /* unused */')
prnt(' return _cffi_get_struct_layout(nums);')
prnt(' /* the next line is not executed, but compiled */')
prnt(' %s(0);' % (checkfuncname,))
prnt('}')
prnt()
def _generate_struct_or_union_method(self, tp, prefix, name):
if tp.fldnames is None:
return # nothing to do with opaque structs
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname,
layoutfuncname))
def _loading_struct_or_union(self, tp, prefix, name, module):
if tp.fldnames is None:
return # nothing to do with opaque structs
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
#
function = getattr(module, layoutfuncname)
layout = function()
if isinstance(tp, model.StructOrUnion) and tp.partial:
# use the function()'s sizes and offsets to guide the
# layout of the struct
totalsize = layout[0]
totalalignment = layout[1]
fieldofs = layout[2::2]
fieldsize = layout[3::2]
tp.force_flatten()
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
else:
cname = ('%s %s' % (prefix, name)).strip()
self._struct_pending_verification[tp] = layout, cname
def _loaded_struct_or_union(self, tp):
if tp.fldnames is None:
return # nothing to do with opaque structs
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
if tp in self._struct_pending_verification:
# check that the layout sizes and offsets match the real ones
def check(realvalue, expectedvalue, msg):
if realvalue != expectedvalue:
raise VerificationError(
"%s (we have %d, but C compiler says %d)"
% (msg, expectedvalue, realvalue))
ffi = self.ffi
BStruct = ffi._get_cached_btype(tp)
layout, cname = self._struct_pending_verification.pop(tp)
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
i = 2
for fname, ftype, fbitsize, fqual in tp.enumfields():
if fbitsize >= 0:
continue # xxx ignore fbitsize for now
check(layout[i], ffi.offsetof(BStruct, fname),
"wrong offset for field %r" % (fname,))
if layout[i+1] != 0:
BField = ffi._get_cached_btype(ftype)
check(layout[i+1], ffi.sizeof(BField),
"wrong size for field %r" % (fname,))
i += 2
assert i == len(layout)
# ----------
# 'anonymous' declarations. These are produced for anonymous structs
# or unions; the 'name' is obtained by a typedef.
_generate_cpy_anonymous_collecttype = _generate_nothing
def _generate_cpy_anonymous_decl(self, tp, name):
if isinstance(tp, model.EnumType):
self._generate_cpy_enum_decl(tp, name, '')
else:
self._generate_struct_or_union_decl(tp, '', name)
def _generate_cpy_anonymous_method(self, tp, name):
if not isinstance(tp, model.EnumType):
self._generate_struct_or_union_method(tp, '', name)
def _loading_cpy_anonymous(self, tp, name, module):
if isinstance(tp, model.EnumType):
self._loading_cpy_enum(tp, name, module)
else:
self._loading_struct_or_union(tp, '', name, module)
def _loaded_cpy_anonymous(self, tp, name, module, **kwds):
if isinstance(tp, model.EnumType):
self._loaded_cpy_enum(tp, name, module, **kwds)
else:
self._loaded_struct_or_union(tp)
# ----------
# constants, likely declared with '#define'
def _generate_cpy_const(self, is_int, name, tp=None, category='const',
vartp=None, delayed=True, size_too=False,
check_value=None):
prnt = self._prnt
funcname = '_cffi_%s_%s' % (category, name)
prnt('static int %s(PyObject *lib)' % funcname)
prnt('{')
prnt(' PyObject *o;')
prnt(' int res;')
if not is_int:
prnt(' %s;' % (vartp or tp).get_c_name(' i', name))
else:
assert category == 'const'
#
if check_value is not None:
self._check_int_constant_value(name, check_value)
#
if not is_int:
if category == 'var':
realexpr = '&' + name
else:
realexpr = name
prnt(' i = (%s);' % (realexpr,))
prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i',
'variable type'),))
assert delayed
else:
prnt(' o = _cffi_from_c_int_const(%s);' % name)
prnt(' if (o == NULL)')
prnt(' return -1;')
if size_too:
prnt(' {')
prnt(' PyObject *o1 = o;')
prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));'
% (name,))
prnt(' Py_DECREF(o1);')
prnt(' if (o == NULL)')
prnt(' return -1;')
prnt(' }')
prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name)
prnt(' Py_DECREF(o);')
prnt(' if (res < 0)')
prnt(' return -1;')
prnt(' return %s;' % self._chained_list_constants[delayed])
self._chained_list_constants[delayed] = funcname + '(lib)'
prnt('}')
prnt()
def _generate_cpy_constant_collecttype(self, tp, name):
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
if not is_int:
self._do_collect_type(tp)
def _generate_cpy_constant_decl(self, tp, name):
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
self._generate_cpy_const(is_int, name, tp)
_generate_cpy_constant_method = _generate_nothing
_loading_cpy_constant = _loaded_noop
_loaded_cpy_constant = _loaded_noop
# ----------
# enums
def _check_int_constant_value(self, name, value, err_prefix=''):
prnt = self._prnt
if value <= 0:
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
name, name, value))
else:
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
name, name, value))
prnt(' char buf[64];')
prnt(' if ((%s) <= 0)' % name)
prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name)
prnt(' else')
prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' %
name)
prnt(' PyErr_Format(_cffi_VerificationError,')
prnt(' "%s%s has the real value %s, not %s",')
prnt(' "%s", "%s", buf, "%d");' % (
err_prefix, name, value))
prnt(' return -1;')
prnt(' }')
def _enum_funcname(self, prefix, name):
# "$enum_$1" => "___D_enum____D_1"
name = name.replace('$', '___D_')
return '_cffi_e_%s_%s' % (prefix, name)
def _generate_cpy_enum_decl(self, tp, name, prefix='enum'):
if tp.partial:
for enumerator in tp.enumerators:
self._generate_cpy_const(True, enumerator, delayed=False)
return
#
funcname = self._enum_funcname(prefix, name)
prnt = self._prnt
prnt('static int %s(PyObject *lib)' % funcname)
prnt('{')
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
self._check_int_constant_value(enumerator, enumvalue,
"enum %s: " % name)
prnt(' return %s;' % self._chained_list_constants[True])
self._chained_list_constants[True] = funcname + '(lib)'
prnt('}')
prnt()
_generate_cpy_enum_collecttype = _generate_nothing
_generate_cpy_enum_method = _generate_nothing
def _loading_cpy_enum(self, tp, name, module):
if tp.partial:
enumvalues = [getattr(module, enumerator)
for enumerator in tp.enumerators]
tp.enumvalues = tuple(enumvalues)
tp.partial_resolved = True
def _loaded_cpy_enum(self, tp, name, module, library):
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
setattr(library, enumerator, enumvalue)
# ----------
# macros: for now only for integers
def _generate_cpy_macro_decl(self, tp, name):
if tp == '...':
check_value = None
else:
check_value = tp # an integer
self._generate_cpy_const(True, name, check_value=check_value)
_generate_cpy_macro_collecttype = _generate_nothing
_generate_cpy_macro_method = _generate_nothing
_loading_cpy_macro = _loaded_noop
_loaded_cpy_macro = _loaded_noop
# ----------
# global variables
def _generate_cpy_variable_collecttype(self, tp, name):
if isinstance(tp, model.ArrayType):
tp_ptr = model.PointerType(tp.item)
else:
tp_ptr = model.PointerType(tp)
self._do_collect_type(tp_ptr)
def _generate_cpy_variable_decl(self, tp, name):
if isinstance(tp, model.ArrayType):
tp_ptr = model.PointerType(tp.item)
self._generate_cpy_const(False, name, tp, vartp=tp_ptr,
size_too = (tp.length == '...'))
else:
tp_ptr = model.PointerType(tp)
self._generate_cpy_const(False, name, tp_ptr, category='var')
_generate_cpy_variable_method = _generate_nothing
_loading_cpy_variable = _loaded_noop
def _loaded_cpy_variable(self, tp, name, module, library):
value = getattr(library, name)
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
# sense that "a=..." is forbidden
if tp.length == '...':
assert isinstance(value, tuple)
(value, size) = value
BItemType = self.ffi._get_cached_btype(tp.item)
length, rest = divmod(size, self.ffi.sizeof(BItemType))
if rest != 0:
raise VerificationError(
"bad size: %r does not seem to be an array of %s" %
(name, tp.item))
tp = tp.resolve_length(length)
# 'value' is a <cdata 'type *'> which we have to replace with
# a <cdata 'type[N]'> if the N is actually known
if tp.length is not None:
BArray = self.ffi._get_cached_btype(tp)
value = self.ffi.cast(BArray, value)
setattr(library, name, value)
return
# remove ptr=<cdata 'int *'> from the library instance, and replace
# it by a property on the class, which reads/writes into ptr[0].
ptr = value
delattr(library, name)
def getter(library):
return ptr[0]
def setter(library, value):
ptr[0] = value
setattr(type(library), name, property(getter, setter))
type(library)._cffi_dir.append(name)
# ----------
def _generate_setup_custom(self):
prnt = self._prnt
prnt('static int _cffi_setup_custom(PyObject *lib)')
prnt('{')
prnt(' return %s;' % self._chained_list_constants[True])
prnt('}')
cffimod_header = r'''
#include <Python.h>
#include <stddef.h>
/* this block of #ifs should be kept exactly identical between
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
and cffi/_cffi_include.h */
#if defined(_MSC_VER)
# include <malloc.h> /* for alloca() */
# if _MSC_VER < 1600 /* MSVC < 2010 */
typedef __int8 int8_t;
typedef __int16 int16_t;
typedef __int32 int32_t;
typedef __int64 int64_t;
typedef unsigned __int8 uint8_t;
typedef unsigned __int16 uint16_t;
typedef unsigned __int32 uint32_t;
typedef unsigned __int64 uint64_t;
typedef __int8 int_least8_t;
typedef __int16 int_least16_t;
typedef __int32 int_least32_t;
typedef __int64 int_least64_t;
typedef unsigned __int8 uint_least8_t;
typedef unsigned __int16 uint_least16_t;
typedef unsigned __int32 uint_least32_t;
typedef unsigned __int64 uint_least64_t;
typedef __int8 int_fast8_t;
typedef __int16 int_fast16_t;
typedef __int32 int_fast32_t;
typedef __int64 int_fast64_t;
typedef unsigned __int8 uint_fast8_t;
typedef unsigned __int16 uint_fast16_t;
typedef unsigned __int32 uint_fast32_t;
typedef unsigned __int64 uint_fast64_t;
typedef __int64 intmax_t;
typedef unsigned __int64 uintmax_t;
# else
# include <stdint.h>
# endif
# if _MSC_VER < 1800 /* MSVC < 2013 */
# ifndef __cplusplus
typedef unsigned char _Bool;
# endif
# endif
#else
# include <stdint.h>
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
# include <alloca.h>
# endif
#endif
#if PY_MAJOR_VERSION < 3
# undef PyCapsule_CheckExact
# undef PyCapsule_GetPointer
# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule))
# define PyCapsule_GetPointer(capsule, name) \
(PyCObject_AsVoidPtr(capsule))
#endif
#if PY_MAJOR_VERSION >= 3
# define PyInt_FromLong PyLong_FromLong
#endif
#define _cffi_from_c_double PyFloat_FromDouble
#define _cffi_from_c_float PyFloat_FromDouble
#define _cffi_from_c_long PyInt_FromLong
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
#define _cffi_from_c_longlong PyLong_FromLongLong
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
#define _cffi_from_c__Bool PyBool_FromLong
#define _cffi_to_c_double PyFloat_AsDouble
#define _cffi_to_c_float PyFloat_AsDouble
#define _cffi_from_c_int_const(x) \
(((x) > 0) ? \
((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \
PyInt_FromLong((long)(x)) : \
PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \
((long long)(x) >= (long long)LONG_MIN) ? \
PyInt_FromLong((long)(x)) : \
PyLong_FromLongLong((long long)(x)))
#define _cffi_from_c_int(x, type) \
(((type)-1) > 0 ? /* unsigned */ \
(sizeof(type) < sizeof(long) ? \
PyInt_FromLong((long)x) : \
sizeof(type) == sizeof(long) ? \
PyLong_FromUnsignedLong((unsigned long)x) : \
PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
(sizeof(type) <= sizeof(long) ? \
PyInt_FromLong((long)x) : \
PyLong_FromLongLong((long long)x)))
#define _cffi_to_c_int(o, type) \
((type)( \
sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
: (type)_cffi_to_c_i8(o)) : \
sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
: (type)_cffi_to_c_i16(o)) : \
sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
: (type)_cffi_to_c_i32(o)) : \
sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
: (type)_cffi_to_c_i64(o)) : \
(Py_FatalError("unsupported size for type " #type), (type)0)))
#define _cffi_to_c_i8 \
((int(*)(PyObject *))_cffi_exports[1])
#define _cffi_to_c_u8 \
((int(*)(PyObject *))_cffi_exports[2])
#define _cffi_to_c_i16 \
((int(*)(PyObject *))_cffi_exports[3])
#define _cffi_to_c_u16 \
((int(*)(PyObject *))_cffi_exports[4])
#define _cffi_to_c_i32 \
((int(*)(PyObject *))_cffi_exports[5])
#define _cffi_to_c_u32 \
((unsigned int(*)(PyObject *))_cffi_exports[6])
#define _cffi_to_c_i64 \
((long long(*)(PyObject *))_cffi_exports[7])
#define _cffi_to_c_u64 \
((unsigned long long(*)(PyObject *))_cffi_exports[8])
#define _cffi_to_c_char \
((int(*)(PyObject *))_cffi_exports[9])
#define _cffi_from_c_pointer \
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10])
#define _cffi_to_c_pointer \
((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11])
#define _cffi_get_struct_layout \
((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12])
#define _cffi_restore_errno \
((void(*)(void))_cffi_exports[13])
#define _cffi_save_errno \
((void(*)(void))_cffi_exports[14])
#define _cffi_from_c_char \
((PyObject *(*)(char))_cffi_exports[15])
#define _cffi_from_c_deref \
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16])
#define _cffi_to_c \
((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17])
#define _cffi_from_c_struct \
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18])
#define _cffi_to_c_wchar_t \
((wchar_t(*)(PyObject *))_cffi_exports[19])
#define _cffi_from_c_wchar_t \
((PyObject *(*)(wchar_t))_cffi_exports[20])
#define _cffi_to_c_long_double \
((long double(*)(PyObject *))_cffi_exports[21])
#define _cffi_to_c__Bool \
((_Bool(*)(PyObject *))_cffi_exports[22])
#define _cffi_prepare_pointer_call_argument \
((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23])
#define _cffi_convert_array_from_object \
((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24])
#define _CFFI_NUM_EXPORTS 25
typedef struct _ctypedescr CTypeDescrObject;
static void *_cffi_exports[_CFFI_NUM_EXPORTS];
static PyObject *_cffi_types, *_cffi_VerificationError;
static int _cffi_setup_custom(PyObject *lib); /* forward */
static PyObject *_cffi_setup(PyObject *self, PyObject *args)
{
PyObject *library;
int was_alive = (_cffi_types != NULL);
(void)self; /* unused */
if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError,
&library))
return NULL;
Py_INCREF(_cffi_types);
Py_INCREF(_cffi_VerificationError);
if (_cffi_setup_custom(library) < 0)
return NULL;
return PyBool_FromLong(was_alive);
}
union _cffi_union_alignment_u {
unsigned char m_char;
unsigned short m_short;
unsigned int m_int;
unsigned long m_long;
unsigned long long m_longlong;
float m_float;
double m_double;
long double m_longdouble;
};
struct _cffi_freeme_s {
struct _cffi_freeme_s *next;
union _cffi_union_alignment_u alignment;
};
#ifdef __GNUC__
__attribute__((unused))
#endif
static int _cffi_convert_array_argument(CTypeDescrObject *ctptr, PyObject *arg,
char **output_data, Py_ssize_t datasize,
struct _cffi_freeme_s **freeme)
{
char *p;
if (datasize < 0)
return -1;
p = *output_data;
if (p == NULL) {
struct _cffi_freeme_s *fp = (struct _cffi_freeme_s *)PyObject_Malloc(
offsetof(struct _cffi_freeme_s, alignment) + (size_t)datasize);
if (fp == NULL)
return -1;
fp->next = *freeme;
*freeme = fp;
p = *output_data = (char *)&fp->alignment;
}
memset((void *)p, 0, (size_t)datasize);
return _cffi_convert_array_from_object(p, ctptr, arg);
}
#ifdef __GNUC__
__attribute__((unused))
#endif
static void _cffi_free_array_arguments(struct _cffi_freeme_s *freeme)
{
do {
void *p = (void *)freeme;
freeme = freeme->next;
PyObject_Free(p);
} while (freeme != NULL);
}
static int _cffi_init(void)
{
PyObject *module, *c_api_object = NULL;
module = PyImport_ImportModule("_cffi_backend");
if (module == NULL)
goto failure;
c_api_object = PyObject_GetAttrString(module, "_C_API");
if (c_api_object == NULL)
goto failure;
if (!PyCapsule_CheckExact(c_api_object)) {
PyErr_SetNone(PyExc_ImportError);
goto failure;
}
memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"),
_CFFI_NUM_EXPORTS * sizeof(void *));
Py_DECREF(module);
Py_DECREF(c_api_object);
return 0;
failure:
Py_XDECREF(module);
Py_XDECREF(c_api_object);
return -1;
}
#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num))
/**********/
'''<|fim▁end|>
|
class VCPythonEngine(object):
_class_key = 'x'
|
<|file_name|>rbfTool.py<|end_file_name|><|fim▁begin|>"""
General descrition of your module here.
"""
from functools import partial
from maya import OpenMaya
from maya import OpenMayaUI
from maya import cmds
from PySide import QtCore
from PySide import QtGui
from shiboken import wrapInstance
from shiboken import getCppPointer
class RbfSettings(object):
"""
Class for storing rbf network creation options.
"""
def __init__(self):
self.connectMatrix = False
self.connectRgbValues = False
self.connectAlphaValues = False
self.useAttributeAlias = False
self.visualizeFalloff = False
class RbfManager(object):
"""
Pose driver mixing contribution of various elements in n spaces.
"""
def __init__(self):
self.pluginState = self.initPlugins()
def createNetwork(self, inputRbfSettings):
if self.pluginState is False:
return
def vizualizeSigma(self):
pass
def createSigmaShader(self):
pass
def initPlugins(self):
try:
#you dont seem to use the class elements nor Api related encapsulation
#of pymel so basically you can stick to maya python commands?
cmds.loadPlugin('jsRadial.mll')
except:
cmds.error('ERROR: jsRadial.mll not loaded.')
class RbfOptionsWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(RbfOptionsWidget, self).__init__(parent)
self.setupUI()
def setupUI(self):
#create widget
self.connectMatrixCheckBox = QtGui.QCheckBox('Connect Matrix')
self.connectRgbCheckBox = QtGui.QCheckBox('Connect RGB Values from Material')
self.connectAlphaCheckBox = QtGui.QCheckBox('Connect Alpha Values from Material')
self.useAliasCheckBox = QtGui.QCheckBox('Use Aliases for Targets on RBF Node')
sphereLabel = 'Create Spheres to Visualize Falloff (most accurate for Gaussian)'
self.createSphereCheckBox = QtGui.QCheckBox(sphereLabel)
#Create layout
self.mainLayout = QtGui.QVBoxLayout()
#Set properties
self.mainLayout.setContentsMargins(5, 5, 5, 5)
for widget in [self.connectMatrixCheckBox,
self.connectRgbCheckBox,
self.connectAlphaCheckBox,
self.useAliasCheckBox,
self.createSphereCheckBox]:
#Set properties
widget.setChecked(True)
#Assign widget to layouts
self.mainLayout.addWidget(widget)
#set the main layout for this UI part
self.setLayout(self.mainLayout)
class RbfListWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(RbfListWidget, self).__init__(parent)
self.setupUI()
def setupUI(self):
#create widget
self.poseListWidget = QtGui.QListView()
self.targetListWidget = QtGui.QListView()
#Create layout
self.poselistLayout = QtGui.QVBoxLayout()
#Set properties
self.poseListWidget.setMaximumHeight(20)
self.poseListWidget.setMinimumWidth(190)
self.targetListWidget.setMinimumHeight(260)
self.poselistLayout.setContentsMargins(0, 0, 0, 0)
self.poselistLayout.setSpacing(14)
#Assign widget to layouts
self.poselistLayout.addWidget(self.poseListWidget)
self.poselistLayout.addWidget(self.targetListWidget)
#set the main layout for this UI part
self.setLayout(self.poselistLayout)
class RbfDataIoWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(RbfDataIoWidget, self).__init__(parent)
self.setupUI()
def setupUI(self):
#create widget
self.anchorWidget = QtGui.QWidget()
self.addPoseButton = QtGui.QPushButton('Add Pose')
self.removePoseButton = QtGui.QPushButton('Remove Pose')
self.addTargetButton= QtGui.QPushButton('Add Target')
self.removeTargetButton = QtGui.QPushButton('Remove Target')
#Create layout
self.ioLayout = QtGui.QGridLayout()
self.mainLayout = QtGui.QVBoxLayout()
#Set properties
ioWidth = 78
self.ioLayout.setContentsMargins(0, 0, 0, 0)
self.ioLayout.setColumnMinimumWidth(0, ioWidth)
self.ioLayout.setColumnMinimumWidth(1, ioWidth)
self.ioLayout.setSpacing(10)
self.mainLayout.setContentsMargins(0, 0, 0, 0)
#Assign widget to layouts
self.ioLayout.addWidget(self.removePoseButton, 0 , 0)
self.ioLayout.addWidget(self.addPoseButton, 0 , 1)
self.ioLayout.addWidget(self.removeTargetButton, 1 , 0)
self.ioLayout.addWidget(self.addTargetButton, 1 , 1)
self.mainLayout.addWidget(self.anchorWidget)
self.mainLayout.addStretch()
#set the main layout for this UI part
self.anchorWidget.setLayout(self.ioLayout)
self.setLayout(self.mainLayout)
#Connect signals
self.addPoseButton.clicked.connect(self._addPose)
self.removePoseButton.clicked.connect(self._removePose)
self.addTargetButton.clicked.connect(self._addTargets)
self.removeTargetButton.clicked.connect(self._removeTargets)
def _addPose(self):
pass
def _addTargets(self):
pass
def _removeTargets(self):
pass
def _removePose(self):
pass
class RbfHeaderWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(RbfHeaderWidget, self).__init__(parent)
self.setupUI()
def setupUI(self):
#create widget
self.headerLabel = QtGui.QLabel('RBF Network Builder')
self.creditLabel = QtGui.QLabel('by James Sumner III')
self.websiteLabel = QtGui.QLabel('www.jamessumneriii.com')
#Create layout
self.headerLayout = QtGui.QVBoxLayout()
#Set properties
self.headerLabel.setStyleSheet('font-size: 16pt' )
self.creditLabel.setStyleSheet('color: rgb(140,140,140)')
self.websiteLabel.setStyleSheet('color: rgb(140,140,140); link-decoration: none;')
#Assign widget to layouts
self.headerLayout.addWidget(self.headerLabel)
self.headerLayout.addWidget(self.creditLabel)
self.headerLayout.addWidget(self.websiteLabel)
#set the main layout for this UI part
self.setLayout(self.headerLayout)
class RbfManagerTool(QtGui.QDialog):
"""
General UI used to create and maintain pose drivers.
"""
def __init__(self, parent=None):
super(RbfManagerTool, self).__init__(parent=parent)
#Parent widget under Maya main window
self.setParent(parent)
self.setWindowFlags(QtCore.Qt.Window)
self.toolName = 'RBF Tool'
self.pose = []
self.targets = []
self.setupUI()
def setupUI(self):
#cmds.undoInfo(openChunk=True) will bundle a list of commands
#which will modify the Dag or the dg hence the separation in the
#API into 2 classes MDAGModifier / MDGModifier.
#not sure about its usefulness for UI?
#create widget
self.tabWidget = QtGui.QTabWidget()
self.headerWidget = RbfHeaderWidget()
self.createTab = self._buildCreateTab()
#Create layout
self.mainLayout = QtGui.QVBoxLayout()
#Set properties
self.setWindowTitle(self.toolName)
self.mainLayout.setContentsMargins(10, 10, 10, 10)
#Assign widget to layouts
self.tabWidget.addTab(self.createTab, 'Create')
#self.tabWidget.addTab(self.editTab, 'Edit')
self.mainLayout.addWidget(self.headerWidget)
self.mainLayout.addWidget(self.tabWidget)
self.setLayout(self.mainLayout)
def _buildCreateTab(self):
#create widget
self.createTabWidget = QtGui.QWidget()
self.createTabAnchor = QtGui.QWidget()
self.ioWidget = RbfDataIoWidget()
self.poseListWidget = RbfListWidget()
<|fim▁hole|> #Create layout
self.createTabLayout = QtGui.QHBoxLayout()
self.createTabOptionLayout = QtGui.QVBoxLayout()
#Set properties
self.createTabLayout.setContentsMargins(5, 5, 5, 5)
self.createTabOptionLayout.setContentsMargins(0, 0, 0, 0)
#Assign widget to layouts
self.createTabOptionLayout.addWidget(self.createTabAnchor)
self.createTabOptionLayout.addWidget(self.optionsWidget)
self.createTabLayout.addWidget(self.ioWidget)
self.createTabLayout.addWidget(self.poseListWidget)
self.createTabWidget.setLayout(self.createTabOptionLayout)
self.createTabAnchor.setLayout(self.createTabLayout)
return self.createTabWidget
def DeleteWindowInstances(mayaMainWindow):
"""
Close tool by type.
"""
checkWidget = RbfManagerTool()
#Check if window exists
for child in mayaMainWindow.children():
if not isinstance(child, QtGui.QWidget):
continue
#delete previous UI instance (isinstance was giving weird result)
if child.__class__.__name__ == checkWidget.__class__.__name__:
child.deleteLater()
child.parent = None
checkWidget = None
def Run():
mayaMainWindowPtr = OpenMayaUI.MQtUtil.mainWindow()
mayaMainWindow = wrapInstance(long(mayaMainWindowPtr), QtGui.QWidget)
DeleteWindowInstances(mayaMainWindow)
tool = RbfManagerTool(parent=mayaMainWindow)
tool.show()
return tool<|fim▁end|>
|
self.optionsWidget = RbfOptionsWidget()
|
<|file_name|>update-translations.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Run this script from the root of the repository to update all translations from
transifex.
It will do the following automatically:
- fetch all translations using the tx tool
- post-process them into valid and committable format
- remove invalid control characters
- remove location tags (makes diffs less noisy)
TODO:
- auto-add new translations to the build system according to the translation process
'''
from __future__ import division, print_function
import subprocess
import re
import sys
import os
import io
import xml.etree.ElementTree as ET
# Name of transifex tool
TX = 'tx'
# Name of source language file
SOURCE_LANG = 'barnacoin_en.ts'
# Directory with locale files
LOCALE_DIR = 'src/qt/locale'
def check_at_repository_root():
if not os.path.exists('.git'):
print('No .git directory found')
print('Execute this script at the root of the repository', file=sys.stderr)
exit(1)
def fetch_all_translations():
if subprocess.call([TX, 'pull', '-f']):
print('Error while fetching translations', file=sys.stderr)
exit(1)
def find_format_specifiers(s):
'''Find all format specifiers in a string.'''
pos = 0
specifiers = []
while True:
percent = s.find('%', pos)
if percent < 0:
break
specifiers.append(s[percent+1])
pos = percent+2
return specifiers
def split_format_specifiers(specifiers):
'''Split format specifiers between numeric (Qt) and others (strprintf)'''
numeric = []
other = []
for s in specifiers:
if s in {'1','2','3','4','5','6','7','8','9'}:
numeric.append(s)
else:
other.append(s)
# numeric (Qt) can be present in any order, others (strprintf) must be in specified order
return set(numeric),other
def sanitize_string(s):
'''Sanitize string for printing'''
return s.replace('\n',' ')
def check_format_specifiers(source, translation, errors):
source_f = split_format_specifiers(find_format_specifiers(source))
# assert that no source messages contain both Qt and strprintf format specifiers
# if this fails, go change the source as this is hacky and confusing!
assert(not(source_f[0] and source_f[1]))
try:
translation_f = split_format_specifiers(find_format_specifiers(translation))
except IndexError:
errors.append("Parse error in translation '%s'" % sanitize_string(translation))
return False
else:
if source_f != translation_f:
errors.append("Mismatch between '%s' and '%s'" % (sanitize_string(source), sanitize_string(translation)))
return False
return True
def all_ts_files(suffix=''):
for filename in os.listdir(LOCALE_DIR):
# process only language files, and do not process source language
if not filename.endswith('.ts'+suffix) or filename == SOURCE_LANG+suffix:
continue
if suffix: # remove provided suffix
filename = filename[0:-len(suffix)]
filepath = os.path.join(LOCALE_DIR, filename)
yield(filename, filepath)
FIX_RE = re.compile(b'[\x00-\x09\x0b\x0c\x0e-\x1f]')
def remove_invalid_characters(s):
'''Remove invalid characters from translation string'''
return FIX_RE.sub(b'', s)
# Override cdata escape function to make our output match Qt's (optional, just for cleaner diffs for
# comparison, disable by default)
_orig_escape_cdata = None
def escape_cdata(text):
text = _orig_escape_cdata(text)
text = text.replace("'", ''')
text = text.replace('"', '"')
return text
def postprocess_translations(reduce_diff_hacks=False):
print('Checking and postprocessing...')
if reduce_diff_hacks:
global _orig_escape_cdata
_orig_escape_cdata = ET._escape_cdata
ET._escape_cdata = escape_cdata
for (filename,filepath) in all_ts_files():
os.rename(filepath, filepath+'.orig')
have_errors = False<|fim▁hole|> data = f.read()
# remove control characters; this must be done over the entire file otherwise the XML parser will fail
data = remove_invalid_characters(data)
tree = ET.parse(io.BytesIO(data), parser=parser)
# iterate over all messages in file
root = tree.getroot()
for context in root.findall('context'):
for message in context.findall('message'):
numerus = message.get('numerus') == 'yes'
source = message.find('source').text
translation_node = message.find('translation')
# pick all numerusforms
if numerus:
translations = [i.text for i in translation_node.findall('numerusform')]
else:
translations = [translation_node.text]
for translation in translations:
if translation is None:
continue
errors = []
valid = check_format_specifiers(source, translation, errors)
for error in errors:
print('%s: %s' % (filename, error))
if not valid: # set type to unfinished and clear string if invalid
translation_node.clear()
translation_node.set('type', 'unfinished')
have_errors = True
# Remove location tags
for location in message.findall('location'):
message.remove(location)
# Remove entire message if it is an unfinished translation
if translation_node.get('type') == 'unfinished':
context.remove(message)
# write fixed-up tree
# if diff reduction requested, replace some XML to 'sanitize' to qt formatting
if reduce_diff_hacks:
out = io.BytesIO()
tree.write(out, encoding='utf-8')
out = out.getvalue()
out = out.replace(b' />', b'/>')
with open(filepath, 'wb') as f:
f.write(out)
else:
tree.write(filepath, encoding='utf-8')
return have_errors
if __name__ == '__main__':
check_at_repository_root()
fetch_all_translations()
postprocess_translations()<|fim▁end|>
|
for (filename,filepath) in all_ts_files('.orig'):
# pre-fixups to cope with transifex output
parser = ET.XMLParser(encoding='utf-8') # need to override encoding because 'utf8' is not understood only 'utf-8'
with open(filepath + '.orig', 'rb') as f:
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*-coding:utf-8-*-
<|fim▁hole|>__all__ = ['database', 'guide', 'gff', 'uniprot']<|fim▁end|>
| |
<|file_name|>F9.py<|end_file_name|><|fim▁begin|>import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER COMMITTEE ID NUMBER', 'number': '2'},
{'name': 'ENTITY TYPE', 'number': '3'},
{'name': 'ORGANIZATION NAME', 'number': '4'},
{'name': 'INDIVIDUAL LAST NAME', 'number': '5'},
{'name': 'INDIVIDUAL FIRST NAME', 'number': '6'},
{'name': 'INDIVIDUAL MIDDLE NAME', 'number': '7'},
{'name': 'INDIVIDUAL PREFIX', 'number': '8'},
{'name': 'INDIVIDUAL SUFFIX', 'number': '9'},
{'name': 'CHANGE OF ADDRESS', 'number': '10'},
{'name': 'STREET 1', 'number': '11'},
{'name': 'STREET 2', 'number': '12'},
{'name': 'CITY', 'number': '13'},
{'name': 'STATE', 'number': '14'},
{'name': 'ZIP', 'number': '15'},
{'name': 'INDIVIDUAL EMPLOYER', 'number': '16'},
{'name': 'INDIVIDUAL OCCUPATION', 'number': '17'},
{'name': 'COVERAGE FROM DATE', 'number': '18'},
{'name': 'COVERAGE THROUGH DATE', 'number': '19'},
{'name': 'DATE OF PUBLIC DISTRIBUTION', 'number': '20'},
{'name': 'COMMUNICATION TITLE', 'number': '21'},
{'name': 'FILER CODE', 'number': '22'},
{'name': 'FILER CODE DESCRIPTION', 'number': '23'},
{'name': 'SEGREGATED BANK ACCOUNT', 'number': '24'},
{'name': 'CUSTODIAN LAST NAME', 'number': '25'},
{'name': 'CUSTODIAN FIRST NAME', 'number': '26'},
{'name': 'CUSTODIAN MIDDLE NAME', 'number': '27'},
{'name': 'CUSTODIAN PREFIX', 'number': '28'},<|fim▁hole|> {'name': 'CUSTODIAN STREET 1', 'number': '30'},
{'name': 'CUSTODIAN STREET 2', 'number': '31'},
{'name': 'CUSTODIAN CITY', 'number': '32'},
{'name': 'CUSTODIAN STATE', 'number': '33'},
{'name': 'CUSTODIAN ZIP', 'number': '34'},
{'name': 'CUSTODIAN EMPLOYER', 'number': '35'},
{'name': 'CUSTODIAN OCCUPATION', 'number': '36'},
{'name': 'TOTAL DONATIONS THIS STATEMENT', 'number': '37-9.'},
{'name': 'TOTAL DISB./OBLIG. THIS STATEMENT', 'number': '38-10.'},
{'name': 'PERSON COMPLETING LAST NAME', 'number': '39'},
{'name': 'PERSON COMPLETING FIRST NAME', 'number': '40'},
{'name': 'PERSON COMPLETING MIDDLE NAME', 'number': '41'},
{'name': 'PERSON COMPLETING PREFIX', 'number': '42'},
{'name': 'PERSON COMPLETING SUFFIX', 'number': '43'},
{'name': 'DATE SIGNED', 'number': '44'},
]
self.fields_names = self.hash_names(self.fields)<|fim▁end|>
|
{'name': 'CUSTODIAN SUFFIX', 'number': '29'},
|
<|file_name|>renderer-texture.rs<|end_file_name|><|fim▁begin|>extern crate sdl2;
use sdl2::pixels::PixelFormatEnum;
use sdl2::rect::Rect;
use sdl2::event::Event;
use sdl2::keyboard::Keycode;
pub fn main() {
let sdl_context = sdl2::init().unwrap();
let video_subsystem = sdl_context.video().unwrap();
let window = video_subsystem.window("rust-sdl2 demo: Video", 800, 600)
.position_centered()
.opengl()
.build()
.unwrap();
let mut renderer = window.renderer().build().unwrap();<|fim▁hole|>
let mut texture = renderer.create_texture_streaming(
PixelFormatEnum::RGB24, 256, 256).unwrap();
// Create a red-green gradient
texture.with_lock(None, |buffer: &mut [u8], pitch: usize| {
for y in 0..256 {
for x in 0..256 {
let offset = y*pitch + x*3;
buffer[offset + 0] = x as u8;
buffer[offset + 1] = y as u8;
buffer[offset + 2] = 0;
}
}
}).unwrap();
renderer.clear();
renderer.copy(&texture, None, Some(Rect::new(100, 100, 256, 256))).unwrap();
renderer.copy_ex(&texture, None,
Some(Rect::new(450, 100, 256, 256)), 30.0, None, false, false).unwrap();
renderer.present();
let mut event_pump = sdl_context.event_pump().unwrap();
'running: loop {
for event in event_pump.poll_iter() {
match event {
Event::Quit {..}
| Event::KeyDown { keycode: Some(Keycode::Escape), .. } => {
break 'running
},
_ => {}
}
}
// The rest of the game loop goes here...
}
}<|fim▁end|>
| |
<|file_name|>test_carrot.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for remote procedure calls using carrot<|fim▁hole|>from nova.rpc import impl_carrot
from nova.tests.rpc import common
LOG = logging.getLogger('nova.tests.rpc')
class RpcCarrotTestCase(common._BaseRpcTestCase):
def setUp(self):
self.rpc = impl_carrot
super(RpcCarrotTestCase, self).setUp()
def tearDown(self):
super(RpcCarrotTestCase, self).tearDown()
def test_connectionpool_single(self):
"""Test that ConnectionPool recycles a single connection."""
conn1 = self.rpc.ConnectionPool.get()
self.rpc.ConnectionPool.put(conn1)
conn2 = self.rpc.ConnectionPool.get()
self.rpc.ConnectionPool.put(conn2)
self.assertEqual(conn1, conn2)<|fim▁end|>
|
"""
from nova import log as logging
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.conf.urls import patterns, url
from haystack.views import SearchView
from elections.forms import ElectionForm
from elections.views import ElectionsSearchByTagView, HomeView, ElectionDetailView,\
CandidateDetailView, SoulMateDetailView, FaceToFaceView, AreaDetailView, \
CandidateFlatPageDetailView, ElectionRankingView, QuestionsPerCandidateView
from sitemaps import *
from django.views.decorators.cache import cache_page
from elections.preguntales_views import MessageDetailView, ElectionAskCreateView, AnswerWebHook
media_root = getattr(settings, 'MEDIA_ROOT', '/')
new_answer_endpoint = r"^new_answer/%s/?$" % (settings.NEW_ANSWER_ENDPOINT)
sitemaps = {
'elections': ElectionsSitemap,
'candidates': CandidatesSitemap,
}
urlpatterns = patterns('',
url(new_answer_endpoint,AnswerWebHook.as_view(), name='new_answer_endpoint' ),
url(r'^/?$', cache_page(60 * settings.CACHE_MINUTES)(HomeView.as_view(template_name='elections/home.html')), name='home'),
url(r'^buscar/?$', SearchView(template='search.html',
form_class=ElectionForm), name='search'),
url(r'^busqueda_tags/?$', ElectionsSearchByTagView.as_view(), name='tags_search'),
url(r'^election/(?P<slug>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/election_detail.html')),
name='election_view'),
url(r'^election/(?P<slug>[-\w]+)/questionary/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/election_questionary.html')),
name='questionary_detail_view'),
#compare two candidates
url(r'^election/(?P<slug>[-\w]+)/face-to-face/(?P<slug_candidate_one>[-\w]+)/(?P<slug_candidate_two>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(FaceToFaceView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_two_candidates_detail_view'),
#one candidate for compare
url(r'^election/(?P<slug>[-\w]+)/face-to-face/(?P<slug_candidate_one>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_one_candidate_detail_view'),
#no one candidate
url(r'^election/(?P<slug>[-\w]+)/face-to-face/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionDetailView.as_view(template_name='elections/compare_candidates.html')),
name='face_to_face_no_candidate_detail_view'),
#soulmate
url(r'^election/(?P<slug>[-\w]+)/soul-mate/?$',
SoulMateDetailView.as_view(template_name='elections/soulmate_candidate.html'),
name='soul_mate_detail_view'),
# Preguntales
url(r'^election/(?P<election_slug>[-\w]+)/messages/(?P<pk>\d+)/?$',
MessageDetailView.as_view(template_name='elections/message_detail.html'),
name='message_detail'),
#ranking
url(r'^election/(?P<slug>[-\w]+)/ranking/?$',
cache_page(60 * settings.CACHE_MINUTES)(ElectionRankingView.as_view(template_name='elections/ranking_candidates.html')),
name='ranking_view'),
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/questions?$',
QuestionsPerCandidateView.as_view(template_name='elections/questions_per_candidate.html'),
name='questions_per_candidate'
),
#ask
url(r'^election/(?P<slug>[-\w]+)/ask/?$',
ElectionAskCreateView.as_view(template_name='elections/ask_candidate.html'),
name='ask_detail_view'),
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(CandidateDetailView.as_view(template_name='elections/candidate_detail.html')),
name='candidate_detail_view'
),
# End Preguntales
url(r'^election/(?P<election_slug>[-\w]+)/(?P<slug>[-\w]+)/(?P<url>[-\w]+)/?$',
cache_page(60 * settings.CACHE_MINUTES)(CandidateFlatPageDetailView.as_view()),
name='candidate_flatpage'
),
url(r'^election/(?P<slug>[-\w]+)/extra_info.html$',
ElectionDetailView.as_view(template_name='elections/extra_info.html'),
name='election_extra_info'),
url(r'^area/(?P<slug>[-\w]+)/?$',<|fim▁hole|> name='area'),
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
)
urlpatterns += patterns('',
url(r'^cache/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': media_root})
)<|fim▁end|>
|
AreaDetailView.as_view(template_name='elections/area.html'),
|
<|file_name|>17b.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import sys
def main(args):
#cnt = 2017
cnt = 50000000
n = 3
n = 345
size = 0
buf = [0]
pos = 0
at_1 = None
for i in range(cnt):
pos = (pos + n) % (i+1)
if pos == 0:
at_1 = i+1
pos += 1
<|fim▁hole|><|fim▁end|>
|
print(at_1)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::BRR {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);<|fim▁hole|> }
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct DIV_MANTISSAR {
bits: u16,
}
impl DIV_MANTISSAR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct DIV_FRACTIONR {
bits: u8,
}
impl DIV_FRACTIONR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _DIV_MANTISSAW<'a> {
w: &'a mut W,
}
impl<'a> _DIV_MANTISSAW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 4095;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _DIV_FRACTIONW<'a> {
w: &'a mut W,
}
impl<'a> _DIV_FRACTIONW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 4:15 - mantissa of USARTDIV"]
#[inline(always)]
pub fn div_mantissa(&self) -> DIV_MANTISSAR {
let bits = {
const MASK: u16 = 4095;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) as u16
};
DIV_MANTISSAR { bits }
}
#[doc = "Bits 0:3 - fraction of USARTDIV"]
#[inline(always)]
pub fn div_fraction(&self) -> DIV_FRACTIONR {
let bits = {
const MASK: u8 = 15;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
DIV_FRACTIONR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 4:15 - mantissa of USARTDIV"]
#[inline(always)]
pub fn div_mantissa(&mut self) -> _DIV_MANTISSAW {
_DIV_MANTISSAW { w: self }
}
#[doc = "Bits 0:3 - fraction of USARTDIV"]
#[inline(always)]
pub fn div_fraction(&mut self) -> _DIV_FRACTIONW {
_DIV_FRACTIONW { w: self }
}
}<|fim▁end|>
| |
<|file_name|>NedGenerator.py<|end_file_name|><|fim▁begin|>__author__ = 'sianwahl'
from string import Template
class NedGenerator:
def __init__(self, number_of_channels):
self.number_of_channels = number_of_channels
def generate(self):
return self._generate_tuplefeeder_ned(), self._generate_m2etis_ned()
def _generate_tuplefeeder_ned(self):
template = """
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see http://www.gnu.org/licenses/.
//
package m2etis.applications.TupleFeeder;
import oversim.common.BaseApp;
import oversim.common.ITier;
simple TupleFeeder extends BaseApp
{
parameters:
@class(TupleFeeder);
int largestKey; // largest key we can pick
int numSubs;
int numPubs;
int numPubSubs;
int numRend;
int channelCount;
double stopAvg;
int waitForSubscribe @unit(s);
int waitForPublish @unit(s);
$channel_specific_parameters
}
module TupleFeederModules like ITier
{
parameters:
@display("i=block/segm");
gates:
input from_lowerTier; // gate from the lower tier
input from_upperTier; // gate from the upper tier
output to_lowerTier; // gate to the lower tier
output to_upperTier; // gate to the upper tier
input trace_in; // gate for trace file commands
input udpIn;
output udpOut;
input tcpIn;
output tcpOut;<|fim▁hole|> connections allowunconnected:
from_lowerTier --> tupleFeeder.from_lowerTier;
to_lowerTier <-- tupleFeeder.to_lowerTier;
trace_in --> tupleFeeder.trace_in;
udpIn --> tupleFeeder.udpIn;
udpOut <-- tupleFeeder.udpOut;
}
"""
channel_specific_parameters = ""
for i in range(0, self.number_of_channels):
channel_specific_parameters += "int numToSend_" + str(i) + ";\n\t"
channel_specific_parameters += "int burstAmount_" + str(i) + ";\n\t"
channel_specific_parameters += "int burstFrequency_" + str(i) + " @unit(s);\n\t"
channel_specific_parameters += "int burstDuration_" + str(i) + " @unit(s);\n\t"
channel_specific_parameters += "int chanceToUnsubscribe_" + str(i) + ";\n\t"
channel_specific_parameters += "int timeToUnsubscribe_" + str(i) + " @unit(s);\n\t"
channel_specific_parameters += "int timeToSubscribe_" + str(i) + " @unit(s);\n\t"
channel_specific_parameters += "int dropChance_" + str(i) + ";\n\t"
channel_specific_parameters += "bool compensateDrop_" + str(i) + ";\n\t"
channel_specific_parameters += "double fluctuation_" + str(i) + ";\n\t"
template_prepared = Template(template)
result = template_prepared.substitute(
channel_specific_parameters=channel_specific_parameters
)
return result
def _generate_m2etis_ned(self):
template = """
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see http://www.gnu.org/licenses/.
//
package m2etis.middleware;
import oversim.common.BaseApp;
import oversim.common.ITier;
//
// TODO auto-generated type
//
simple M2etisAdapter extends BaseApp
{
parameters:
@class(M2etisAdapter);
$disable_overlays
int packetSize @unit(B);
int queueSize @unit(B);
int channelCount;
int downstream @unit(bps);
int upstream @unit(bps);
int headerSize @unit(B);
int startRoot;
int endRoot;
int rendezvousNode;
double stopAvg;
double simulationResolution @unit(s);
bool queueDisabled;
}
module M2etisPubSub like ITier
{
gates:
input udpIn; // gate from the UDP layer
output udpOut; // gate to the UDP layer
input from_lowerTier; // gate from the lower tier
input from_upperTier; // gate from the upper tier
output to_lowerTier; // gate to the lower tier
output to_upperTier; // gate to the upper tier
input trace_in; // gate for trace file commands
input tcpIn; // gate from the TCP layer
output tcpOut; // gate to the TCP layer
submodules:
m2etis: M2etisAdapter;
connections allowunconnected:
from_lowerTier --> m2etis.from_lowerTier;
to_lowerTier <-- m2etis.to_lowerTier;
from_upperTier --> m2etis.from_upperTier;
to_upperTier <-- m2etis.to_upperTier;
udpIn --> m2etis.udpIn;
udpOut <-- m2etis.udpOut;
}
"""
disable_overlays = ""
for i in range(0, self.number_of_channels):
disable_overlays += "bool disableOverlay_" + str(i) + ";\n\t"
template_prepared = Template(template)
result = template_prepared.substitute(
disable_overlays=disable_overlays
)
return result<|fim▁end|>
|
submodules:
tupleFeeder: TupleFeeder;
|
<|file_name|>run_electrum_creditbit_server.py<|end_file_name|><|fim▁begin|><|fim▁hole|># it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
import argparse
import ConfigParser
import logging
import socket
import sys
import time
import threading
import json
import os
import imp
if os.path.dirname(os.path.realpath(__file__)) == os.getcwd():
imp.load_module('electrumcreditbitserver', *imp.find_module('src'))
from electrumcreditbitserver import storage, networks, utils
from electrumcreditbitserver.processor import Dispatcher, print_log
from electrumcreditbitserver.server_processor import ServerProcessor
from electrumcreditbitserver.blockchain_processor import BlockchainProcessor
from electrumcreditbitserver.stratum_tcp import TcpServer
from electrumcreditbitserver.stratum_http import HttpServer
logging.basicConfig()
if sys.maxsize <= 2**32:
print "Warning: it looks like you are using a 32bit system. You may experience crashes caused by mmap"
if os.getuid() == 0:
print "Do not run this program as root!"
print "Run the install script to create a non-privileged user."
sys.exit()
def attempt_read_config(config, filename):
try:
with open(filename, 'r') as f:
config.readfp(f)
except IOError:
pass
def load_banner(config):
try:
with open(config.get('server', 'banner_file'), 'r') as f:
config.set('server', 'banner', f.read())
except IOError:
pass
def setup_network_params(config):
type = config.get('network', 'type')
params = networks.params.get(type)
utils.PUBKEY_ADDRESS = int(params.get('pubkey_address'))
utils.SCRIPT_ADDRESS = int(params.get('script_address'))
storage.GENESIS_HASH = params.get('genesis_hash')
if config.has_option('network', 'pubkey_address'):
utils.PUBKEY_ADDRESS = config.getint('network', 'pubkey_address')
if config.has_option('network', 'script_address'):
utils.SCRIPT_ADDRESS = config.getint('network', 'script_address')
if config.has_option('network', 'genesis_hash'):
storage.GENESIS_HASH = config.get('network', 'genesis_hash')
def create_config(filename=None):
config = ConfigParser.ConfigParser()
# set some defaults, which will be overwritten by the config file
config.add_section('server')
config.set('server', 'banner', 'Welcome to Creditbit Electrum!')
config.set('server', 'banner_file', '/etc/electrum-creditbit.banner')
config.set('server', 'host', 'localhost')
config.set('server', 'electrum_rpc_port', '8002')
config.set('server', 'report_host', '')
config.set('server', 'stratum_tcp_port', '50001')
config.set('server', 'stratum_http_port', '8081')
config.set('server', 'stratum_tcp_ssl_port', '50002')
config.set('server', 'stratum_http_ssl_port', '8082')
config.set('server', 'report_stratum_tcp_port', '50001')
config.set('server', 'report_stratum_http_port', '8081')
config.set('server', 'report_stratum_tcp_ssl_port', '50002')
config.set('server', 'report_stratum_http_ssl_port', '8082')
config.set('server', 'ssl_certfile', '')
config.set('server', 'ssl_keyfile', '')
config.set('server', 'irc', 'no')
config.set('server', 'irc_nick', '')
config.set('server', 'coin', 'creditbit')
config.set('server', 'logfile', '/var/log/electrum-creditbit.log')
config.set('server', 'donation_address', '')
config.set('server', 'max_subscriptions', '10000')
config.add_section('leveldb')
config.set('leveldb', 'path', '/dev/shm/electrum-creditbit_db')
config.set('leveldb', 'pruning_limit', '100')
config.set('leveldb', 'utxo_cache', str(64*1024*1024))
config.set('leveldb', 'hist_cache', str(128*1024*1024))
config.set('leveldb', 'addr_cache', str(16*1024*1024))
config.set('leveldb', 'profiler', 'no')
# set network parameters
config.add_section('network')
config.set('network', 'type', 'creditbit_main')
# try to find the config file in the default paths
if not filename:
for path in ('/etc/', ''):
filename = path + 'electrum-creditbit.conf'
if os.path.isfile(filename):
break
if not os.path.isfile(filename):
print 'could not find electrum configuration file "%s"' % filename
sys.exit(1)
attempt_read_config(config, filename)
load_banner(config)
return config
def run_rpc_command(params, electrum_rpc_port):
cmd = params[0]
import xmlrpclib
server = xmlrpclib.ServerProxy('http://localhost:%d' % electrum_rpc_port)
func = getattr(server, cmd)
r = func(*params[1:])
if cmd == 'sessions':
now = time.time()
print 'type address sub version time'
for item in r:
print '%4s %21s %3s %7s %.2f' % (item.get('name'),
item.get('address'),
item.get('subscriptions'),
item.get('version'),
(now - item.get('time')),
)
else:
print json.dumps(r, indent=4, sort_keys=True)
def cmd_banner_update():
load_banner(dispatcher.shared.config)
return True
def cmd_getinfo():
return {
'blocks': chain_proc.storage.height,
'peers': len(server_proc.peers),
'sessions': len(dispatcher.request_dispatcher.get_sessions()),
'watched': len(chain_proc.watched_addresses),
'cached': len(chain_proc.history_cache),
}
def cmd_sessions():
return map(lambda s: {"time": s.time,
"name": s.name,
"address": s.address,
"version": s.version,
"subscriptions": len(s.subscriptions)},
dispatcher.request_dispatcher.get_sessions())
def cmd_numsessions():
return len(dispatcher.request_dispatcher.get_sessions())
def cmd_peers():
return server_proc.peers.keys()
def cmd_numpeers():
return len(server_proc.peers)
def cmd_debug(s):
import traceback
from guppy import hpy;
hp = hpy()
if s:
try:
result = str(eval(s))
except:
err_lines = traceback.format_exc().splitlines()
result = '%s | %s' % (err_lines[-3], err_lines[-1])
return result
def get_port(config, name):
try:
return config.getint('server', name)
except:
return None
# global
shared = None
chain_proc = None
server_proc = None
dispatcher = None
transports = []
def start_server(config):
global shared, chain_proc, server_proc, dispatcher
logfile = config.get('server', 'logfile')
utils.init_logger(logfile)
host = config.get('server', 'host')
stratum_tcp_port = get_port(config, 'stratum_tcp_port')
stratum_http_port = get_port(config, 'stratum_http_port')
stratum_tcp_ssl_port = get_port(config, 'stratum_tcp_ssl_port')
stratum_http_ssl_port = get_port(config, 'stratum_http_ssl_port')
ssl_certfile = config.get('server', 'ssl_certfile')
ssl_keyfile = config.get('server', 'ssl_keyfile')
setup_network_params(config)
if ssl_certfile is '' or ssl_keyfile is '':
stratum_tcp_ssl_port = None
stratum_http_ssl_port = None
print_log("Starting Electrum server on", host)
# Create hub
dispatcher = Dispatcher(config)
shared = dispatcher.shared
# handle termination signals
import signal
def handler(signum = None, frame = None):
print_log('Signal handler called with signal', signum)
shared.stop()
for sig in [signal.SIGTERM, signal.SIGHUP, signal.SIGQUIT]:
signal.signal(sig, handler)
# Create and register processors
chain_proc = BlockchainProcessor(config, shared)
dispatcher.register('blockchain', chain_proc)
server_proc = ServerProcessor(config, shared)
dispatcher.register('server', server_proc)
# Create various transports we need
if stratum_tcp_port:
tcp_server = TcpServer(dispatcher, host, stratum_tcp_port, False, None, None)
transports.append(tcp_server)
if stratum_tcp_ssl_port:
tcp_server = TcpServer(dispatcher, host, stratum_tcp_ssl_port, True, ssl_certfile, ssl_keyfile)
transports.append(tcp_server)
if stratum_http_port:
http_server = HttpServer(dispatcher, host, stratum_http_port, False, None, None)
transports.append(http_server)
if stratum_http_ssl_port:
http_server = HttpServer(dispatcher, host, stratum_http_ssl_port, True, ssl_certfile, ssl_keyfile)
transports.append(http_server)
for server in transports:
server.start()
def stop_server():
shared.stop()
server_proc.join()
chain_proc.join()
print_log("Electrum Server stopped")
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--conf', metavar='path', default=None, help='specify a configuration file')
parser.add_argument('command', nargs='*', default=[], help='send a command to the server')
args = parser.parse_args()
config = create_config(args.conf)
electrum_rpc_port = get_port(config, 'electrum_rpc_port')
if len(args.command) >= 1:
try:
run_rpc_command(args.command, electrum_rpc_port)
except socket.error:
print "server not running"
sys.exit(1)
sys.exit(0)
try:
run_rpc_command(['getpid'], electrum_rpc_port)
is_running = True
except socket.error:
is_running = False
if is_running:
print "server already running"
sys.exit(1)
start_server(config)
from SimpleXMLRPCServer import SimpleXMLRPCServer
server = SimpleXMLRPCServer(('localhost', electrum_rpc_port), allow_none=True, logRequests=False)
server.register_function(lambda: os.getpid(), 'getpid')
server.register_function(shared.stop, 'stop')
server.register_function(cmd_getinfo, 'getinfo')
server.register_function(cmd_sessions, 'sessions')
server.register_function(cmd_numsessions, 'numsessions')
server.register_function(cmd_peers, 'peers')
server.register_function(cmd_numpeers, 'numpeers')
server.register_function(cmd_debug, 'debug')
server.register_function(cmd_banner_update, 'banner_update')
server.socket.settimeout(1)
while not shared.stopped():
try:
server.handle_request()
except socket.timeout:
continue
except:
stop_server()<|fim▁end|>
|
#!/usr/bin/env python
# Copyright(C) 2012 thomasv@gitorious
# This program is free software: you can redistribute it and/or modify
|
<|file_name|>resolve_lifetime.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Name resolution for lifetimes.
*
* Name resolution for lifetimes follows MUCH simpler rules than the
* full resolve. For example, lifetime names are never exported or
* used between functions, and they operate in a purely top-down
* way. Therefore we break lifetime name resolution into a separate pass.
*/
use driver::session::Session;
use middle::subst;
use syntax::ast;
use syntax::codemap::Span;
use syntax::owned_slice::OwnedSlice;
use syntax::parse::token::special_idents;
use syntax::parse::token;
use syntax::print::pprust::{lifetime_to_string};
use syntax::visit;
use syntax::visit::Visitor;
use util::nodemap::NodeMap;
#[deriving(Clone, PartialEq, Eq, Hash, Encodable, Decodable, Show)]
pub enum DefRegion {
DefStaticRegion,
DefEarlyBoundRegion(/* space */ subst::ParamSpace,
/* index */ uint,
/* lifetime decl */ ast::NodeId),
DefLateBoundRegion(/* binder_id */ ast::NodeId,
/* depth */ uint,
/* lifetime decl */ ast::NodeId),
DefFreeRegion(/* block scope */ ast::NodeId,
/* lifetime decl */ ast::NodeId),
}
// maps the id of each lifetime reference to the lifetime decl
// that it corresponds to
pub type NamedRegionMap = NodeMap<DefRegion>;
// Returns an instance of some type that implements std::fmt::Show
fn lifetime_show(lt_name: &ast::Name) -> token::InternedString {
token::get_name(*lt_name)
}
struct LifetimeContext<'a> {
sess: &'a Session,
named_region_map: &'a mut NamedRegionMap,
scope: Scope<'a>
}
enum ScopeChain<'a> {
/// EarlyScope(i, ['a, 'b, ...], s) extends s with early-bound
/// lifetimes, assigning indexes 'a => i, 'b => i+1, ... etc.
EarlyScope(subst::ParamSpace, &'a Vec<ast::LifetimeDef>, Scope<'a>),
/// LateScope(binder_id, ['a, 'b, ...], s) extends s with late-bound
/// lifetimes introduced by the declaration binder_id.
LateScope(ast::NodeId, &'a Vec<ast::LifetimeDef>, Scope<'a>),
/// lifetimes introduced by items within a code block are scoped
/// to that block.
BlockScope(ast::NodeId, Scope<'a>),
RootScope
}
type Scope<'a> = &'a ScopeChain<'a>;
static ROOT_SCOPE: ScopeChain<'static> = RootScope;
pub fn krate(sess: &Session, krate: &ast::Crate) -> NamedRegionMap {
let mut named_region_map = NodeMap::new();
visit::walk_crate(&mut LifetimeContext {
sess: sess,
named_region_map: &mut named_region_map,
scope: &ROOT_SCOPE
}, krate);
sess.abort_if_errors();
named_region_map
}
impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> {
fn visit_item(&mut self, item: &ast::Item) {
let lifetimes = match item.node {
ast::ItemFn(..) | // fn lifetimes get added in visit_fn below
ast::ItemMod(..) |
ast::ItemMac(..) |
ast::ItemForeignMod(..) |
ast::ItemStatic(..) => {
self.with(|_, f| f(RootScope), |v| visit::walk_item(v, item));
return;
}
ast::ItemTy(_, ref generics) |
ast::ItemEnum(_, ref generics) |
ast::ItemStruct(_, ref generics) |
ast::ItemTrait(ref generics, _, _, _) => {
self.with(|scope, f| {
f(EarlyScope(subst::TypeSpace,
&generics.lifetimes,
scope))
}, |v| v.check_lifetime_defs(&generics.lifetimes));
&generics.lifetimes
}
ast::ItemImpl(ref generics, _, _, _) => {
self.with(|scope, f| {
f(EarlyScope(subst::TypeSpace,
&generics.lifetimes,
scope))
}, |v| v.check_lifetime_defs(&generics.lifetimes));
&generics.lifetimes
}
};
self.with(|_, f| f(EarlyScope(subst::TypeSpace, lifetimes, &ROOT_SCOPE)), |v| {
debug!("entering scope {:?}", v.scope);
v.check_lifetime_defs(lifetimes);
visit::walk_item(v, item);
debug!("exiting scope {:?}", v.scope);
});
}
fn visit_fn(&mut self, fk: visit::FnKind<'v>, fd: &'v ast::FnDecl,
b: &'v ast::Block, s: Span, n: ast::NodeId) {
match fk {
visit::FkItemFn(_, generics, _, _) |
visit::FkMethod(_, generics, _) => {
self.visit_fn_decl(n, generics, |v| visit::walk_fn(v, fk, fd, b, s))
}
visit::FkFnBlock(..) => {
visit::walk_fn(self, fk, fd, b, s)
}
}
}
fn visit_ty(&mut self, ty: &ast::Ty) {
let lifetimes = match ty.node {
ast::TyClosure(ref c) | ast::TyProc(ref c) => &c.lifetimes,
ast::TyBareFn(ref c) => &c.lifetimes,
_ => return visit::walk_ty(self, ty)
};
self.with(|scope, f| f(LateScope(ty.id, lifetimes, scope)), |v| {
v.check_lifetime_defs(lifetimes);
debug!("pushing fn scope id={} due to type", ty.id);
visit::walk_ty(v, ty);
debug!("popping fn scope id={} due to type", ty.id);
});
}
fn visit_ty_method(&mut self, m: &ast::TypeMethod) {
self.visit_fn_decl(m.id, &m.generics, |v| visit::walk_ty_method(v, m))
}
fn visit_block(&mut self, b: &ast::Block) {
debug!("pushing block scope {}", b.id);
self.with(|scope, f| f(BlockScope(b.id, scope)), |v| visit::walk_block(v, b));
debug!("popping block scope {}", b.id);
}
fn visit_lifetime_ref(&mut self, lifetime_ref: &ast::Lifetime) {
if lifetime_ref.name == special_idents::static_lifetime.name {
self.insert_lifetime(lifetime_ref, DefStaticRegion);
return;
}
self.resolve_lifetime_ref(lifetime_ref);
}
fn visit_generics(&mut self, generics: &ast::Generics) {
for ty_param in generics.ty_params.iter() {
self.visit_ty_param_bounds(&ty_param.bounds);
match ty_param.default {
Some(ref ty) => self.visit_ty(&**ty),
None => {}
}
}
for predicate in generics.where_clause.predicates.iter() {
self.visit_ident(predicate.span, predicate.ident);
self.visit_ty_param_bounds(&predicate.bounds);
}
}
}
impl<'a> LifetimeContext<'a> {
fn with(&mut self, wrap_scope: |Scope, |ScopeChain||, f: |&mut LifetimeContext|) {
let LifetimeContext { sess, ref mut named_region_map, scope} = *self;
wrap_scope(scope, |scope1| f(&mut LifetimeContext {
sess: sess,
named_region_map: *named_region_map,
scope: &scope1
}))
}
fn visit_ty_param_bounds(&mut self,
bounds: &OwnedSlice<ast::TyParamBound>) {
for bound in bounds.iter() {
match *bound {
ast::TraitTyParamBound(ref trait_ref) => {
self.visit_trait_ref(trait_ref);
}
ast::UnboxedFnTyParamBound(ref fn_decl) => {
self.visit_unboxed_fn_ty_param_bound(&**fn_decl);
}
ast::RegionTyParamBound(ref lifetime) => {
self.visit_lifetime_ref(lifetime);
}
}
}
}
fn visit_trait_ref(&mut self, trait_ref: &ast::TraitRef) {
self.with(|scope, f| {
f(LateScope(trait_ref.ref_id, &trait_ref.lifetimes, scope))
}, |v| {
v.check_lifetime_defs(&trait_ref.lifetimes);
for lifetime in trait_ref.lifetimes.iter() {
v.visit_lifetime_decl(lifetime);
}
v.visit_path(&trait_ref.path, trait_ref.ref_id);
})
}
fn visit_unboxed_fn_ty_param_bound(&mut self,
bound: &ast::UnboxedFnBound) {
self.with(|scope, f| {
f(LateScope(bound.ref_id, &bound.lifetimes, scope))
}, |v| {
for argument in bound.decl.inputs.iter() {
v.visit_ty(&*argument.ty);
}
v.visit_ty(&*bound.decl.output);
})
}
/// Visits self by adding a scope and handling recursive walk over the contents with `walk`.
fn visit_fn_decl(&mut self,
n: ast::NodeId,
generics: &ast::Generics,
walk: |&mut LifetimeContext|) {
/*!
* Handles visiting fns and methods. These are a bit
* complicated because we must distinguish early- vs late-bound
* lifetime parameters. We do this by checking which lifetimes
* appear within type bounds; those are early bound lifetimes,
* and the rest are late bound.
*
* For example:
*
* fn foo<'a,'b,'c,T:Trait<'b>>(...)
*
* Here `'a` and `'c` are late bound but `'b` is early
* bound. Note that early- and late-bound lifetimes may be
* interspersed together.
*
* If early bound lifetimes are present, we separate them into
* their own list (and likewise for late bound). They will be
* numbered sequentially, starting from the lowest index that
* is already in scope (for a fn item, that will be 0, but for
* a method it might not be). Late bound lifetimes are
* resolved by name and associated with a binder id (`n`), so
* the ordering is not important there.
*/
let referenced_idents = early_bound_lifetime_names(generics);
debug!("pushing fn scope id={} due to fn item/method\
referenced_idents={:?}",
n,
referenced_idents.iter().map(lifetime_show).collect::<Vec<token::InternedString>>());
let lifetimes = &generics.lifetimes;
if referenced_idents.is_empty() {
self.with(|scope, f| f(LateScope(n, lifetimes, scope)), |v| {
v.check_lifetime_defs(lifetimes);
walk(v);
});
} else {
let (early, late) = lifetimes.clone().partition(
|l| referenced_idents.iter().any(|&i| i == l.lifetime.name));
self.with(|scope, f| f(EarlyScope(subst::FnSpace, &early, scope)), |v| {
v.with(|scope1, f| f(LateScope(n, &late, scope1)), |v| {
v.check_lifetime_defs(lifetimes);
walk(v);
});
});
}
debug!("popping fn scope id={} due to fn item/method", n);
}
fn resolve_lifetime_ref(&mut self, lifetime_ref: &ast::Lifetime) {
// Walk up the scope chain, tracking the number of fn scopes
// that we pass through, until we find a lifetime with the
// given name or we run out of scopes. If we encounter a code
// block, then the lifetime is not bound but free, so switch
// over to `resolve_free_lifetime_ref()` to complete the
// search.
let mut depth = 0;
let mut scope = self.scope;
loop {
match *scope {
BlockScope(id, s) => {
return self.resolve_free_lifetime_ref(id, lifetime_ref, s);
}
RootScope => {
break;
}
EarlyScope(space, lifetimes, s) => {
match search_lifetimes(lifetimes, lifetime_ref) {
Some((index, decl_id)) => {
let def = DefEarlyBoundRegion(space, index, decl_id);
self.insert_lifetime(lifetime_ref, def);
return;
}
None => {
depth += 1;
scope = s;
}
}
}
LateScope(binder_id, lifetimes, s) => {
match search_lifetimes(lifetimes, lifetime_ref) {
Some((_index, decl_id)) => {
let def = DefLateBoundRegion(binder_id, depth, decl_id);
self.insert_lifetime(lifetime_ref, def);
return;
}
None => {
depth += 1;
scope = s;
}
}
}
}
}
self.unresolved_lifetime_ref(lifetime_ref);
}
fn resolve_free_lifetime_ref(&mut self,
scope_id: ast::NodeId,
lifetime_ref: &ast::Lifetime,
scope: Scope) {
// Walk up the scope chain, tracking the outermost free scope,
// until we encounter a scope that contains the named lifetime
// or we run out of scopes.
let mut scope_id = scope_id;
let mut scope = scope;
let mut search_result = None;
loop {
match *scope {
BlockScope(id, s) => {
scope_id = id;
scope = s;
}
RootScope => {
break;
}<|fim▁hole|> EarlyScope(_, lifetimes, s) |
LateScope(_, lifetimes, s) => {
search_result = search_lifetimes(lifetimes, lifetime_ref);
if search_result.is_some() {
break;
}
scope = s;
}
}
}
match search_result {
Some((_depth, decl_id)) => {
let def = DefFreeRegion(scope_id, decl_id);
self.insert_lifetime(lifetime_ref, def);
}
None => {
self.unresolved_lifetime_ref(lifetime_ref);
}
}
}
fn unresolved_lifetime_ref(&self, lifetime_ref: &ast::Lifetime) {
self.sess.span_err(
lifetime_ref.span,
format!("use of undeclared lifetime name `{}`",
token::get_name(lifetime_ref.name)).as_slice());
}
fn check_lifetime_defs(&mut self, lifetimes: &Vec<ast::LifetimeDef>) {
for i in range(0, lifetimes.len()) {
let lifetime_i = lifetimes.get(i);
let special_idents = [special_idents::static_lifetime];
for lifetime in lifetimes.iter() {
if special_idents.iter().any(|&i| i.name == lifetime.lifetime.name) {
self.sess.span_err(
lifetime.lifetime.span,
format!("illegal lifetime parameter name: `{}`",
token::get_name(lifetime.lifetime.name))
.as_slice());
}
}
for j in range(i + 1, lifetimes.len()) {
let lifetime_j = lifetimes.get(j);
if lifetime_i.lifetime.name == lifetime_j.lifetime.name {
self.sess.span_err(
lifetime_j.lifetime.span,
format!("lifetime name `{}` declared twice in \
the same scope",
token::get_name(lifetime_j.lifetime.name))
.as_slice());
}
}
for bound in lifetime_i.bounds.iter() {
self.resolve_lifetime_ref(bound);
}
}
}
fn insert_lifetime(&mut self,
lifetime_ref: &ast::Lifetime,
def: DefRegion) {
if lifetime_ref.id == ast::DUMMY_NODE_ID {
self.sess.span_bug(lifetime_ref.span,
"lifetime reference not renumbered, \
probably a bug in syntax::fold");
}
debug!("lifetime_ref={} id={} resolved to {:?}",
lifetime_to_string(lifetime_ref),
lifetime_ref.id,
def);
self.named_region_map.insert(lifetime_ref.id, def);
}
}
fn search_lifetimes(lifetimes: &Vec<ast::LifetimeDef>,
lifetime_ref: &ast::Lifetime)
-> Option<(uint, ast::NodeId)> {
for (i, lifetime_decl) in lifetimes.iter().enumerate() {
if lifetime_decl.lifetime.name == lifetime_ref.name {
return Some((i, lifetime_decl.lifetime.id));
}
}
return None;
}
///////////////////////////////////////////////////////////////////////////
pub fn early_bound_lifetimes<'a>(generics: &'a ast::Generics) -> Vec<ast::LifetimeDef> {
let referenced_idents = early_bound_lifetime_names(generics);
if referenced_idents.is_empty() {
return Vec::new();
}
generics.lifetimes.iter()
.filter(|l| referenced_idents.iter().any(|&i| i == l.lifetime.name))
.map(|l| (*l).clone())
.collect()
}
fn early_bound_lifetime_names(generics: &ast::Generics) -> Vec<ast::Name> {
/*!
* Given a set of generic declarations, returns a list of names
* containing all early bound lifetime names for those
* generics. (In fact, this list may also contain other names.)
*/
// Create two lists, dividing the lifetimes into early/late bound.
// Initially, all of them are considered late, but we will move
// things from late into early as we go if we find references to
// them.
let mut early_bound = Vec::new();
let mut late_bound = generics.lifetimes.iter()
.map(|l| l.lifetime.name)
.collect();
// Any lifetime that appears in a type bound is early.
{
let mut collector =
FreeLifetimeCollector { early_bound: &mut early_bound,
late_bound: &mut late_bound };
for ty_param in generics.ty_params.iter() {
visit::walk_ty_param_bounds(&mut collector, &ty_param.bounds);
}
for predicate in generics.where_clause.predicates.iter() {
visit::walk_ty_param_bounds(&mut collector, &predicate.bounds);
}
}
// Any lifetime that either has a bound or is referenced by a
// bound is early.
for lifetime_def in generics.lifetimes.iter() {
if !lifetime_def.bounds.is_empty() {
shuffle(&mut early_bound, &mut late_bound,
lifetime_def.lifetime.name);
for bound in lifetime_def.bounds.iter() {
shuffle(&mut early_bound, &mut late_bound,
bound.name);
}
}
}
return early_bound;
struct FreeLifetimeCollector<'a> {
early_bound: &'a mut Vec<ast::Name>,
late_bound: &'a mut Vec<ast::Name>,
}
impl<'a, 'v> Visitor<'v> for FreeLifetimeCollector<'a> {
fn visit_lifetime_ref(&mut self, lifetime_ref: &ast::Lifetime) {
shuffle(self.early_bound, self.late_bound,
lifetime_ref.name);
}
}
fn shuffle(early_bound: &mut Vec<ast::Name>,
late_bound: &mut Vec<ast::Name>,
name: ast::Name) {
match late_bound.iter().position(|n| *n == name) {
Some(index) => {
late_bound.swap_remove(index);
early_bound.push(name);
}
None => { }
}
}
}<|fim▁end|>
| |
<|file_name|>test_notifications.py<|end_file_name|><|fim▁begin|>"""Integration test for Notifications."""
import github3
from .helper import IntegrationHelper
class TestThread(IntegrationHelper):
"""Integration test for methods on Test class"""
def test_subscription(self):
"""Show that a user can retrieve notifications for repository"""
self.token_login()
cassette_name = self.cassette_name("subscription")
with self.recorder.use_cassette(cassette_name):
repository = self.gh.repository('sigmavirus24', 'github3.py')
threads = list(repository.notifications(all=True))
assert len(threads) > 0
thread = threads[0]
assert isinstance(thread, github3.notifications.Thread)
assert isinstance(thread.subscription(),
github3.notifications.Subscription)
class TestSubscription(IntegrationHelper):
<|fim▁hole|> """Show that user can successful set subscription"""
self.token_login()
cassette_name = self.cassette_name("set")
with self.recorder.use_cassette(cassette_name):
repository = self.gh.repository('sigmavirus24', 'github3.py')
threads = list(repository.notifications(all='true'))
assert len(threads) > 0
subscription = threads[0].subscription()
assert subscription.set(True, False) is None
assert isinstance(subscription, github3.notifications.Subscription)<|fim▁end|>
|
"""Integration test for methods on Test class"""
def test_set(self):
|
<|file_name|>demo_tokenizer_roberta.py<|end_file_name|><|fim▁begin|>from transformers import RobertaTokenizerFast
import scattertext as st
tokenizer_fast = RobertaTokenizerFast.from_pretrained(
"roberta-base", add_prefix_space=True)
tokenizer = st.RobertaTokenizerWrapper(tokenizer_fast)
df = st.SampleCorpora.ConventionData2012.get_data().assign(
parse = lambda df: df.text.apply(tokenizer.tokenize)
)
<|fim▁hole|> category_col='party',
parsed_col='parse',
feat_and_offset_getter=st.TokenFeatAndOffsetGetter()
).build()
# Remove words occur less than 5 times
corpus = corpus.remove_infrequent_words(5, non_text=True)
plot_df = corpus.get_metadata_freq_df('').assign(
Y=lambda df: df.democrat,
X=lambda df: df.republican,
Ypos=lambda df: st.Scalers.dense_rank(df.Y),
Xpos=lambda df: st.Scalers.dense_rank(df.X),
SuppressDisplay=False,
ColorScore=lambda df: st.Scalers.scale_center_zero(df.Ypos - df.Xpos),
)
html = st.dataframe_scattertext(
corpus,
plot_df=plot_df,
category='democrat',
category_name='Democratic',
not_category_name='Republican',
width_in_pixels=1000,
suppress_text_column='Display',
metadata=corpus.get_df()['speaker'],
use_non_text_features=True,
ignore_categories=False,
use_offsets=True,
unified_context=False,
color_score_column='ColorScore',
left_list_column='ColorScore',
y_label='Democarats',
x_label='Republicans',
header_names={'upper': 'Top Democratic', 'lower': 'Top Republican', 'right': 'Most Frequent'},
subword_encoding='RoBERTa'
)
fn = 'roberta_sentence_piece.html'
with open(fn, 'w') as of:
of.write(html)
print("Open ./" + fn + ' in Chrome.')<|fim▁end|>
|
corpus = st.OffsetCorpusFactory(
df,
|
<|file_name|>sidebar.client.controller.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('core').controller('SidebarController', ['$scope', 'Authentication',<|fim▁hole|> $scope.authentication = Authentication;
}
]);<|fim▁end|>
|
function($scope, Authentication) {
|
<|file_name|>Trial6_pySerial_Mod.py<|end_file_name|><|fim▁begin|>import serial
port = "COM5"
baud = 19200
try:
ser = serial.Serial(port, baud, timeout=1)
ser.isOpen() # try to open port, if possible print message and proceed with 'while True:'
<|fim▁hole|>
except IOError: # if port is already opened, close it and open it again and print message
ser.close()
ser.open()
print ("port was already open, was closed and opened again!")
def main():
while True:
cmd = raw_input("Enter command or 'exit':")
# for Python 2
# cmd = input("Enter command or 'exit':")
# for Python 3
if cmd == 'exit':
ser.close()
exit()
else:
ser.write(cmd.encode('ascii'))
# out = ser.read()
# print('Receiving...'+out)
if __name__ == "__main__":
main()<|fim▁end|>
|
print ("port is opened!")
|
<|file_name|>SplineSeries.js<|end_file_name|><|fim▁begin|>/* *
* (c) 2010-2019 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
import H from './Globals.js';
import './Utilities.js';
import './Options.js';
import './Series.js';
var pick = H.pick,
seriesType = H.seriesType;
/**
* Spline series type.
*
* @private
* @class
* @name Highcharts.seriesTypes.spline
*
* @augments Highcarts.Series
*/
seriesType(
'spline',
'line',
/**
* A spline series is a special type of line series, where the segments
* between the data points are smoothed.
*
* @sample {highcharts} highcharts/demo/spline-irregular-time/
* Spline chart
* @sample {highstock} stock/demo/spline/
* Spline chart
*
* @extends plotOptions.series
* @excluding step
* @product highcharts highstock
* @optionparent plotOptions.spline
*/
{
},
/** @lends seriesTypes.spline.prototype */ {
/**
* Get the spline segment from a given point's previous neighbour to the
* given point.
*
* @private
* @function Highcharts.seriesTypes.spline#getPointSpline
*
* @param {Array<Highcharts.Point>}
*
* @param {Highcharts.Point} point
*
* @param {number} i
*
* @return {Highcharts.SVGPathArray}
*/
getPointSpline: function (points, point, i) {
var
// 1 means control points midway between points, 2 means 1/3
// from the point, 3 is 1/4 etc
smoothing = 1.5,
denom = smoothing + 1,
plotX = point.plotX,
plotY = point.plotY,
lastPoint = points[i - 1],
nextPoint = points[i + 1],<|fim▁hole|> rightContX,
rightContY,
ret;
function doCurve(otherPoint) {
return otherPoint &&
!otherPoint.isNull &&
otherPoint.doCurve !== false &&
!point.isCliff; // #6387, area splines next to null
}
// Find control points
if (doCurve(lastPoint) && doCurve(nextPoint)) {
var lastX = lastPoint.plotX,
lastY = lastPoint.plotY,
nextX = nextPoint.plotX,
nextY = nextPoint.plotY,
correction = 0;
leftContX = (smoothing * plotX + lastX) / denom;
leftContY = (smoothing * plotY + lastY) / denom;
rightContX = (smoothing * plotX + nextX) / denom;
rightContY = (smoothing * plotY + nextY) / denom;
// Have the two control points make a straight line through main
// point
if (rightContX !== leftContX) { // #5016, division by zero
correction = (
((rightContY - leftContY) * (rightContX - plotX)) /
(rightContX - leftContX) + plotY - rightContY
);
}
leftContY += correction;
rightContY += correction;
// to prevent false extremes, check that control points are
// between neighbouring points' y values
if (leftContY > lastY && leftContY > plotY) {
leftContY = Math.max(lastY, plotY);
// mirror of left control point
rightContY = 2 * plotY - leftContY;
} else if (leftContY < lastY && leftContY < plotY) {
leftContY = Math.min(lastY, plotY);
rightContY = 2 * plotY - leftContY;
}
if (rightContY > nextY && rightContY > plotY) {
rightContY = Math.max(nextY, plotY);
leftContY = 2 * plotY - rightContY;
} else if (rightContY < nextY && rightContY < plotY) {
rightContY = Math.min(nextY, plotY);
leftContY = 2 * plotY - rightContY;
}
// record for drawing in next point
point.rightContX = rightContX;
point.rightContY = rightContY;
}
// Visualize control points for debugging
/*
if (leftContX) {
this.chart.renderer.circle(
leftContX + this.chart.plotLeft,
leftContY + this.chart.plotTop,
2
)
.attr({
stroke: 'red',
'stroke-width': 2,
fill: 'none',
zIndex: 9
})
.add();
this.chart.renderer.path(['M', leftContX + this.chart.plotLeft,
leftContY + this.chart.plotTop,
'L', plotX + this.chart.plotLeft, plotY + this.chart.plotTop])
.attr({
stroke: 'red',
'stroke-width': 2,
zIndex: 9
})
.add();
}
if (rightContX) {
this.chart.renderer.circle(
rightContX + this.chart.plotLeft,
rightContY + this.chart.plotTop,
2
)
.attr({
stroke: 'green',
'stroke-width': 2,
fill: 'none',
zIndex: 9
})
.add();
this.chart.renderer.path(['M', rightContX + this.chart.plotLeft,
rightContY + this.chart.plotTop,
'L', plotX + this.chart.plotLeft, plotY + this.chart.plotTop])
.attr({
stroke: 'green',
'stroke-width': 2,
zIndex: 9
})
.add();
}
// */
ret = [
'C',
pick(lastPoint.rightContX, lastPoint.plotX),
pick(lastPoint.rightContY, lastPoint.plotY),
pick(leftContX, plotX),
pick(leftContY, plotY),
plotX,
plotY
];
// reset for updating series later
lastPoint.rightContX = lastPoint.rightContY = null;
return ret;
}
}
);
/**
* A `spline` series. If the [type](#series.spline.type) option is
* not specified, it is inherited from [chart.type](#chart.type).
*
* @extends series,plotOptions.spline
* @excluding dataParser, dataURL, step
* @product highcharts highstock
* @apioption series.spline
*/
/**
* An array of data points for the series. For the `spline` series type,
* points can be given in the following ways:
*
* 1. An array of numerical values. In this case, the numerical values will be
* interpreted as `y` options. The `x` values will be automatically
* calculated, either starting at 0 and incremented by 1, or from
* `pointStart` and `pointInterval` given in the series options. If the axis
* has categories, these will be used. Example:
* ```js
* data: [0, 5, 3, 5]
* ```
*
* 2. An array of arrays with 2 values. In this case, the values correspond to
* `x,y`. If the first value is a string, it is applied as the name of the
* point, and the `x` value is inferred.
* ```js
* data: [
* [0, 9],
* [1, 2],
* [2, 8]
* ]
* ```
*
* 3. An array of objects with named values. The following snippet shows only a
* few settings, see the complete options set below. If the total number of
* data points exceeds the series'
* [turboThreshold](#series.spline.turboThreshold), this option is not
* available.
* ```js
* data: [{
* x: 1,
* y: 9,
* name: "Point2",
* color: "#00FF00"
* }, {
* x: 1,
* y: 0,
* name: "Point1",
* color: "#FF00FF"
* }]
* ```
*
* @sample {highcharts} highcharts/chart/reflow-true/
* Numerical values
* @sample {highcharts} highcharts/series/data-array-of-arrays/
* Arrays of numeric x and y
* @sample {highcharts} highcharts/series/data-array-of-arrays-datetime/
* Arrays of datetime x and y
* @sample {highcharts} highcharts/series/data-array-of-name-value/
* Arrays of point.name and y
* @sample {highcharts} highcharts/series/data-array-of-objects/
* Config objects
*
* @type {Array<number|Array<(number|string),number>|*>}
* @extends series.line.data
* @product highcharts highstock
* @apioption series.spline.data
*/<|fim▁end|>
|
leftContX,
leftContY,
|
<|file_name|>HomeController.java<|end_file_name|><|fim▁begin|>package es.ucm.fdi.iw.controller;
import java.security.Principal;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpSession;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.web.servletapi.SecurityContextHolderAwareRequestWrapper;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import com.google.gson.Gson;
import es.ucm.fdi.iw.model.Intercambio;
import es.ucm.fdi.iw.model.Usuario;
import es.ucm.fdi.iw.model.UsuarioJSON;
@Controller
@RequestMapping("home")
public class HomeController {
private static Logger log = Logger.getLogger(HomeController.class);
@Autowired
private EntityManager entityManager;
// Incluimos ${prefix} en todas las páginas
@ModelAttribute
public void addAttributes(Model m) {
m.addAttribute("prefix", "../static/");
m.addAttribute("prefijo", "../");
}
@GetMapping({ "", "/" })
public String root(Model model, Principal principal, HttpSession session,
SecurityContextHolderAwareRequestWrapper request) {
añadirCSSyJSAlModelo(model);
Usuario usuarioActual = (Usuario) entityManager.createNamedQuery("userByUserField")
.setParameter("userParam", principal.getName()).getSingleResult();
if (principal != null && session.getAttribute("user") == null) {
try {
if (!usuarioActual.isActivo()){
throw new Exception();
}
session.setAttribute("user", usuarioActual);
} catch (Exception e) {
log.info("No such user: " + principal.getName());
return "redirect:index";
}
}
@SuppressWarnings("unchecked")
ArrayList<Usuario> usuarios = (ArrayList<Usuario>) entityManager.createNamedQuery("getActiveUsers")
.setParameter("roleParam", "USER").setParameter("activeParam", true)
.setParameter("actual", principal.getName()).getResultList();
Gson gson = new Gson();
String json = "{";
json +="\"usuarios\":[";
for(Usuario u : usuarios)<|fim▁hole|> if(usuarios.indexOf(u) != usuarios.size()- 1)
{
json+= ',';
}
}
json += "]}";
model.addAttribute("usuariosJSON",json);
model.addAttribute("usuarios", usuarios);
if (request.isUserInRole("ROLE_ADMIN"))
return "redirect:admin";
//Enviamos al modelo el usuarioActual (en JSON y normal)
añadirUsuarioActualJSON(model, usuarioActual);
model.addAttribute("usuarioActual",usuarioActual);
mensajesPendientes(model,usuarioActual);
return "home";
}
private void añadirUsuarioActualJSON(Model model, Usuario usuarioActual)
{
UsuarioJSON usuarioActualJSON = new UsuarioJSON(usuarioActual);
Gson gson = new Gson();
String jsonAux = gson.toJson(usuarioActualJSON);
model.addAttribute("usuarioActualJSON", jsonAux);
}
@SuppressWarnings("unchecked")
private void mensajesPendientes(Model model, Usuario usuarioActual)
{
List<Intercambio> intercambios = entityManager.createNamedQuery("allIntercambiosUsuarioPendiente")
.setParameter("estado", "Pendiente")
.setParameter("user", usuarioActual)
.getResultList();
model.addAttribute("numeroDeMensajes",intercambios.size());
}
public static void añadirCSSyJSAlModelo(Model model) {
List<String> listaCSS = new ArrayList<String>();
listaCSS.add("styleHome.css");
listaCSS.add("popup.css");
listaCSS.add("star-rating.min.css");
List<String> listaJS = new ArrayList<String>();
listaJS.add("jquery-3.1.1.min.js");
listaJS.add("jquery-ui-1.12.1/jquery-ui.min.js");
listaJS.add("bootstrap.min.js");
listaJS.add("star-rating.min.js");
listaJS.add("home.js");
model.addAttribute("pageExtraCSS", listaCSS);
model.addAttribute("pageExtraScripts", listaJS);
}
}<|fim▁end|>
|
{
UsuarioJSON usuarioJSON = new UsuarioJSON(u);
json += gson.toJson(usuarioJSON);
|
<|file_name|>validate.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
let prettyPrintTypes = function (types) {
const addArticle = (str) => {
let vowels = ['a', 'e', 'i', 'o', 'u'];
if (vowels.indexOf(str[0]) !== -1) {
return 'an ' + str;
}
return 'a ' + str;
};
return types.map(addArticle).join(' or ');
};
let isArrayOfNotation = function (typeDefinition) {
return /array of /.test(typeDefinition);
};
let extractTypeFromArrayOfNotation = function (typeDefinition) {
// The notation is e.g. 'array of string'
return typeDefinition.split(' of ')[1];
};
let isValidTypeDefinition = (typeStr) => {
if (isArrayOfNotation(typeStr)) {
return isValidTypeDefinition(extractTypeFromArrayOfNotation(typeStr));
}
return [
'string',
'number',
'boolean',
'array',
'object',
'buffer',
'null',
'undefined',
'function'
].some(function (validType) {
return validType === typeStr;
});
};
const detectType = function (value) {
if (value === null) {
return 'null';
}
if (Array.isArray(value)) {
return 'array';<|fim▁hole|> }
return typeof value;
};
const onlyUniqueValuesInArrayFilter = function (value, index, self) {
return self.indexOf(value) === index;
};
let detectTypeDeep = function (value) {
let type = detectType(value);
let typesInArray;
if (type === 'array') {
typesInArray = value
.map((element) => {
return detectType(element);
})
.filter(onlyUniqueValuesInArrayFilter);
type += ' of ' + typesInArray.join(', ');
}
return type;
};
let validateArray = function (argumentValue, typeToCheck) {
let allowedTypeInArray = extractTypeFromArrayOfNotation(typeToCheck);
if (detectType(argumentValue) !== 'array') {
return false;
}
return argumentValue.every(function (element) {
return detectType(element) === allowedTypeInArray;
});
};
function validateArgument(methodName, argumentName, argumentValue, argumentMustBe) {
let isOneOfAllowedTypes = argumentMustBe.some(function (type) {
if (!isValidTypeDefinition(type)) {
throw new Error('Unknown type "' + type + '"');
}
if (isArrayOfNotation(type)) {
return validateArray(argumentValue, type);
}
return type === detectType(argumentValue);
});
if (!isOneOfAllowedTypes) {
throw new Error('Argument "' + argumentName + '" passed to ' + methodName + ' must be '
+ prettyPrintTypes(argumentMustBe) + '. Received ' + detectTypeDeep(argumentValue));
}
return false;
}
exports.validateArgument = validateArgument;
;
function validateOptions(methodName, optionsObjName, obj, allowedOptions) {
if (obj !== undefined) {
validateArgument(methodName, optionsObjName, obj, ['object']);
Object.keys(obj).forEach(function (key) {
let argName = optionsObjName + '.' + key;
if (allowedOptions.hasOwnProperty(key)) {
validateArgument(methodName, argName, obj[key], allowedOptions[key]);
}
else {
throw new Error('Unknown argument "' + argName + '" passed to ' + methodName);
}
});
}
}
exports.validateOptions = validateOptions;
;
//# sourceMappingURL=validate.js.map<|fim▁end|>
|
}
if (Buffer.isBuffer(value)) {
return 'buffer';
|
<|file_name|>test_del_contact.py<|end_file_name|><|fim▁begin|>from model.info_contact import Infos
import random
def test_delete_some_contact(app, db, check_ui):
if app.contact.count() == 0:
app.contact.create(Infos(firstname="AAAAA"))<|fim▁hole|> old_contacts.remove(contact)
assert old_contacts == new_contacts
if check_ui:
assert sorted(map(app.contact.clean, new_contacts), key=Infos.id_or_max) == sorted(app.contact.get_contact_list(), key=Infos.id_or_max)<|fim▁end|>
|
old_contacts = db.get_contact_list()
contact = random.choice(old_contacts)
app.contact.delete_contact_by_id(contact.id)
new_contacts = db.get_contact_list()
|
<|file_name|>core.py<|end_file_name|><|fim▁begin|>import os
import sys
import codecs
from contextlib import contextmanager
from itertools import repeat
from functools import update_wrapper
from .types import convert_type, IntRange, BOOL
from .utils import make_str, make_default_short_help, echo
from .exceptions import ClickException, UsageError, BadParameter, Abort, \
MissingParameter
from .termui import prompt, confirm
from .formatting import HelpFormatter, join_options
from .parser import OptionParser, split_opt
from .globals import push_context, pop_context
from ._compat import PY2, isidentifier, iteritems, _check_for_unicode_literals
_missing = object()
SUBCOMMAND_METAVAR = 'COMMAND [ARGS]...'
SUBCOMMANDS_METAVAR = 'COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]...'
def _bashcomplete(cmd, prog_name, complete_var=None):
"""Internal handler for the bash completion support."""
if complete_var is None:
complete_var = '_%s_COMPLETE' % (prog_name.replace('-', '_')).upper()
complete_instr = os.environ.get(complete_var)
if not complete_instr:
return
from ._bashcomplete import bashcomplete
if bashcomplete(cmd, prog_name, complete_var, complete_instr):
sys.exit(1)
def batch(iterable, batch_size):
return list(zip(*repeat(iter(iterable), batch_size)))
def invoke_param_callback(callback, ctx, param, value):
code = getattr(callback, '__code__', None)
args = getattr(code, 'co_argcount', 3)
if args < 3:
# This will become a warning in Click 3.0:
from warnings import warn
warn(Warning('Invoked legacy parameter callback "%s". The new '
'signature for such callbacks starting with '
'click 2.0 is (ctx, param, value).'
% callback), stacklevel=3)
return callback(ctx, value)
return callback(ctx, param, value)
@contextmanager
def augment_usage_errors(ctx, param=None):
"""Context manager that attaches extra information to exceptions that
fly.
"""
try:
yield
except BadParameter as e:
if e.ctx is None:
e.ctx = ctx
if param is not None and e.param is None:
e.param = param
raise
except UsageError as e:
if e.ctx is None:
e.ctx = ctx
raise
def iter_params_for_processing(invocation_order, declaration_order):
"""Given a sequence of parameters in the order as should be considered
for processing and an iterable of parameters that exist, this returns
a list in the correct order as they should be processed.
"""
def sort_key(item):
try:
idx = invocation_order.index(item)
except ValueError:
idx = float('inf')
return (not item.is_eager, idx)
return sorted(declaration_order, key=sort_key)
class Context(object):
"""The context is a special internal object that holds state relevant
for the script execution at every single level. It's normally invisible
to commands unless they opt-in to getting access to it.
The context is useful as it can pass internal objects around and can
control special execution features such as reading data from
environment variables.
A context can be used as context manager in which case it will call
:meth:`close` on teardown.
.. versionadded:: 2.0
Added the `resilient_parsing`, `help_option_names`,
`token_normalize_func` parameters.
.. versionadded:: 3.0
Added the `allow_extra_args` and `allow_interspersed_args`
parameters.
.. versionadded:: 4.0
Added the `color`, `ignore_unknown_options`, and
`max_content_width` parameters.
:param command: the command class for this context.
:param parent: the parent context.
:param info_name: the info name for this invocation. Generally this
is the most descriptive name for the script or
command. For the toplevel script it is usually
the name of the script, for commands below it it's
the name of the script.
:param obj: an arbitrary object of user data.
:param auto_envvar_prefix: the prefix to use for automatic environment
variables. If this is `None` then reading
from environment variables is disabled. This
does not affect manually set environment
variables which are always read.
:param default_map: a dictionary (like object) with default values
for parameters.
:param terminal_width: the width of the terminal. The default is
inherit from parent context. If no context
defines the terminal width then auto
detection will be applied.
:param max_content_width: the maximum width for content rendered by
Click (this currently only affects help
pages). This defaults to 80 characters if
not overridden. In other words: even if the
terminal is larger than that, Click will not
format things wider than 80 characters by
default. In addition to that, formatters might
add some safety mapping on the right.
:param resilient_parsing: if this flag is enabled then Click will
parse without any interactivity or callback
invocation. This is useful for implementing
things such as completion support.
:param allow_extra_args: if this is set to `True` then extra arguments
at the end will not raise an error and will be
kept on the context. The default is to inherit
from the command.
:param allow_interspersed_args: if this is set to `False` then options
and arguments cannot be mixed. The
default is to inherit from the command.
:param ignore_unknown_options: instructs click to ignore options it does
not know and keeps them for later
processing.
:param help_option_names: optionally a list of strings that define how
the default help parameter is named. The
default is ``['--help']``.
:param token_normalize_func: an optional function that is used to
normalize tokens (options, choices,
etc.). This for instance can be used to
implement case insensitive behavior.
:param color: controls if the terminal supports ANSI colors or not. The
default is autodetection. This is only needed if ANSI
codes are used in texts that Click prints which is by
default not the case. This for instance would affect
help output.
"""
def __init__(self, command, parent=None, info_name=None, obj=None,
auto_envvar_prefix=None, default_map=None,
terminal_width=None, max_content_width=None,
resilient_parsing=False, allow_extra_args=None,
allow_interspersed_args=None,
ignore_unknown_options=None, help_option_names=None,
token_normalize_func=None, color=None):
#: the parent context or `None` if none exists.
self.parent = parent
#: the :class:`Command` for this context.
self.command = command
#: the descriptive information name
self.info_name = info_name
#: the parsed parameters except if the value is hidden in which
#: case it's not remembered.
self.params = {}
#: the leftover arguments.
self.args = []
if obj is None and parent is not None:
obj = parent.obj
#: the user object stored.
self.obj = obj
self._meta = getattr(parent, 'meta', {})
#: A dictionary (-like object) with defaults for parameters.
if default_map is None \
and parent is not None \
and parent.default_map is not None:
default_map = parent.default_map.get(info_name)
self.default_map = default_map
#: This flag indicates if a subcommand is going to be executed. A
#: group callback can use this information to figure out if it's
#: being executed directly or because the execution flow passes
#: onwards to a subcommand. By default it's None, but it can be
#: the name of the subcommand to execute.
#:
#: If chaining is enabled this will be set to ``'*'`` in case
#: any commands are executed. It is however not possible to
#: figure out which ones. If you require this knowledge you
#: should use a :func:`resultcallback`.
self.invoked_subcommand = None
if terminal_width is None and parent is not None:
terminal_width = parent.terminal_width
#: The width of the terminal (None is autodetection).
self.terminal_width = terminal_width
if max_content_width is None and parent is not None:
max_content_width = parent.max_content_width
#: The maximum width of formatted content (None implies a sensible
#: default which is 80 for most things).
self.max_content_width = max_content_width
if allow_extra_args is None:
allow_extra_args = command.allow_extra_args
#: Indicates if the context allows extra args or if it should
#: fail on parsing.
#:
#: .. versionadded:: 3.0
self.allow_extra_args = allow_extra_args
if allow_interspersed_args is None:
allow_interspersed_args = command.allow_interspersed_args
#: Indicates if the context allows mixing of arguments and
#: options or not.
#:
#: .. versionadded:: 3.0
self.allow_interspersed_args = allow_interspersed_args
if ignore_unknown_options is None:
ignore_unknown_options = command.ignore_unknown_options
#: Instructs click to ignore options that a command does not
#: understand and will store it on the context for later
#: processing. This is primarily useful for situations where you
#: want to call into external programs. Generally this pattern is
#: strongly discouraged because it's not possibly to losslessly
#: forward all arguments.
#:
#: .. versionadded:: 4.0
self.ignore_unknown_options = ignore_unknown_options
if help_option_names is None:
if parent is not None:
help_option_names = parent.help_option_names
else:
help_option_names = ['--help']
#: The names for the help options.
self.help_option_names = help_option_names
if token_normalize_func is None and parent is not None:
token_normalize_func = parent.token_normalize_func
#: An optional normalization function for tokens. This is
#: options, choices, commands etc.
self.token_normalize_func = token_normalize_func
#: Indicates if resilient parsing is enabled. In that case Click
#: will do its best to not cause any failures.
self.resilient_parsing = resilient_parsing
# If there is no envvar prefix yet, but the parent has one and
# the command on this level has a name, we can expand the envvar
# prefix automatically.
if auto_envvar_prefix is None:
if parent is not None \
and parent.auto_envvar_prefix is not None and \
self.info_name is not None:
auto_envvar_prefix = '%s_%s' % (parent.auto_envvar_prefix,
self.info_name.upper())
else:
self.auto_envvar_prefix = auto_envvar_prefix.upper()
self.auto_envvar_prefix = auto_envvar_prefix
if color is None and parent is not None:
color = parent.color
#: Controls if styling output is wanted or not.
self.color = color
self._close_callbacks = []
self._depth = 0
def __enter__(self):
self._depth += 1
push_context(self)
return self
def __exit__(self, exc_type, exc_value, tb):
pop_context()
self._depth -= 1
if self._depth == 0:
self.close()
@contextmanager
def scope(self, cleanup=True):
"""This helper method can be used with the context object to promote
it to the current thread local (see :func:`get_current_context`).
The default behavior of this is to invoke the cleanup functions which
can be disabled by setting `cleanup` to `False`. The cleanup
functions are typically used for things such as closing file handles.
If the cleanup is intended the context object can also be directly
used as a context manager.
Example usage::
with ctx.scope():
assert get_current_context() is ctx
This is equivalent::
with ctx:
assert get_current_context() is ctx
.. versionadded:: 5.0
:param cleanup: controls if the cleanup functions should be run or
not. The default is to run these functions. In
some situations the context only wants to be
temporarily pushed in which case this can be disabled.
Nested pushes automatically defer the cleanup.
"""
if not cleanup:
self._depth += 1
try:
with self as rv:
yield rv
finally:
if not cleanup:
self._depth -= 1
@property
def meta(self):
"""This is a dictionary which is shared with all the contexts
that are nested. It exists so that click utiltiies can store some
state here if they need to. It is however the responsibility of
that code to manage this dictionary well.
The keys are supposed to be unique dotted strings. For instance
module paths are a good choice for it. What is stored in there is
irrelevant for the operation of click. However what is important is
that code that places data here adheres to the general semantics of
the system.
Example usage::
LANG_KEY = __name__ + '.lang'
def set_language(value):
ctx = get_current_context()
ctx.meta[LANG_KEY] = value
def get_language():
return get_current_context().meta.get(LANG_KEY, 'en_US')
.. versionadded:: 5.0
"""
return self._meta
def make_formatter(self):
"""Creates the formatter for the help and usage output."""
return HelpFormatter(width=self.terminal_width,
max_width=self.max_content_width)
def call_on_close(self, f):
"""This decorator remembers a function as callback that should be
executed when the context tears down. This is most useful to bind
resource handling to the script execution. For instance, file objects
opened by the :class:`File` type will register their close callbacks
here.
:param f: the function to execute on teardown.
"""
self._close_callbacks.append(f)
return f
def close(self):
"""Invokes all close callbacks."""
for cb in self._close_callbacks:
cb()
self._close_callbacks = []
@property
def command_path(self):
"""The computed command path. This is used for the ``usage``
information on the help page. It's automatically created by
combining the info names of the chain of contexts to the root.
"""
rv = ''
if self.info_name is not None:
rv = self.info_name
if self.parent is not None:
rv = self.parent.command_path + ' ' + rv
return rv.lstrip()
def find_root(self):
"""Finds the outermost context."""
node = self
while node.parent is not None:
node = node.parent
return node
def find_object(self, object_type):
"""Finds the closest object of a given type."""
node = self
while node is not None:
if isinstance(node.obj, object_type):
return node.obj
node = node.parent
def ensure_object(self, object_type):
"""Like :meth:`find_object` but sets the innermost object to a
new instance of `object_type` if it does not exist.
"""
rv = self.find_object(object_type)
if rv is None:
self.obj = rv = object_type()
return rv
def lookup_default(self, name):
"""Looks up the default for a parameter name. This by default
looks into the :attr:`default_map` if available.
"""
if self.default_map is not None:
rv = self.default_map.get(name)
if callable(rv):
rv = rv()
return rv
def fail(self, message):
"""Aborts the execution of the program with a specific error
message.
:param message: the error message to fail with.
"""
raise UsageError(message, self)
def abort(self):
"""Aborts the script."""
raise Abort()
def exit(self, code=0):
"""Exits the application with a given exit code."""
sys.exit(code)
def get_usage(self):
"""Helper method to get formatted usage string for the current
context and command.
"""
return self.command.get_usage(self)
def get_help(self):
"""Helper method to get formatted help page for the current
context and command.
"""
return self.command.get_help(self)
def invoke(*args, **kwargs):
"""Invokes a command callback in exactly the way it expects. There
are two ways to invoke this method:
1. the first argument can be a callback and all other arguments and
keyword arguments are forwarded directly to the function.
2. the first argument is a click command object. In that case all
arguments are forwarded as well but proper click parameters
(options and click arguments) must be keyword arguments and Click
will fill in defaults.
Note that before Click 3.2 keyword arguments were not properly filled
in against the intention of this code and no context was created. For
more information about this change and why it was done in a bugfix
release see :ref:`upgrade-to-3.2`.
"""
self, callback = args[:2]
ctx = self
# It's also possible to invoke another command which might or
# might not have a callback. In that case we also fill
# in defaults and make a new context for this command.
if isinstance(callback, Command):
other_cmd = callback
callback = other_cmd.callback
ctx = Context(other_cmd, info_name=other_cmd.name, parent=self)
if callback is None:
raise TypeError('The given command does not have a '
'callback that can be invoked.')
for param in other_cmd.params:
if param.name not in kwargs and param.expose_value:
kwargs[param.name] = param.get_default(ctx)
args = args[2:]
with augment_usage_errors(self):
with ctx:
return callback(*args, **kwargs)
def forward(*args, **kwargs):
"""Similar to :meth:`invoke` but fills in default keyword
arguments from the current context if the other command expects
it. This cannot invoke callbacks directly, only other commands.
"""
self, cmd = args[:2]
# It's also possible to invoke another command which might or
# might not have a callback.
if not isinstance(cmd, Command):
raise TypeError('Callback is not a command.')
for param in self.params:
if param not in kwargs:
kwargs[param] = self.params[param]
return self.invoke(cmd, **kwargs)
class BaseCommand(object):
"""The base command implements the minimal API contract of commands.
Most code will never use this as it does not implement a lot of useful
functionality but it can act as the direct subclass of alternative<|fim▁hole|> For instance, this can be used to bridge Click and other systems like
argparse or docopt.
Because base commands do not implement a lot of the API that other
parts of Click take for granted, they are not supported for all
operations. For instance, they cannot be used with the decorators
usually and they have no built-in callback system.
.. versionchanged:: 2.0
Added the `context_settings` parameter.
:param name: the name of the command to use unless a group overrides it.
:param context_settings: an optional dictionary with defaults that are
passed to the context object.
"""
#: the default for the :attr:`Context.allow_extra_args` flag.
allow_extra_args = False
#: the default for the :attr:`Context.allow_interspersed_args` flag.
allow_interspersed_args = True
#: the default for the :attr:`Context.ignore_unknown_options` flag.
ignore_unknown_options = False
def __init__(self, name, context_settings=None):
#: the name the command thinks it has. Upon registering a command
#: on a :class:`Group` the group will default the command name
#: with this information. You should instead use the
#: :class:`Context`\'s :attr:`~Context.info_name` attribute.
self.name = name
if context_settings is None:
context_settings = {}
#: an optional dictionary with defaults passed to the context.
self.context_settings = context_settings
def get_usage(self, ctx):
raise NotImplementedError('Base commands cannot get usage')
def get_help(self, ctx):
raise NotImplementedError('Base commands cannot get help')
def make_context(self, info_name, args, parent=None, **extra):
"""This function when given an info name and arguments will kick
off the parsing and create a new :class:`Context`. It does not
invoke the actual command callback though.
:param info_name: the info name for this invokation. Generally this
is the most descriptive name for the script or
command. For the toplevel script it's usually
the name of the script, for commands below it it's
the name of the script.
:param args: the arguments to parse as list of strings.
:param parent: the parent context if available.
:param extra: extra keyword arguments forwarded to the context
constructor.
"""
for key, value in iteritems(self.context_settings):
if key not in extra:
extra[key] = value
ctx = Context(self, info_name=info_name, parent=parent, **extra)
with ctx.scope(cleanup=False):
self.parse_args(ctx, args)
return ctx
def parse_args(self, ctx, args):
"""Given a context and a list of arguments this creates the parser
and parses the arguments, then modifies the context as necessary.
This is automatically invoked by :meth:`make_context`.
"""
raise NotImplementedError('Base commands do not know how to parse '
'arguments.')
def invoke(self, ctx):
"""Given a context, this invokes the command. The default
implementation is raising a not implemented error.
"""
raise NotImplementedError('Base commands are not invokable by default')
def main(self, args=None, prog_name=None, complete_var=None,
standalone_mode=True, **extra):
"""This is the way to invoke a script with all the bells and
whistles as a command line application. This will always terminate
the application after a call. If this is not wanted, ``SystemExit``
needs to be caught.
This method is also available by directly calling the instance of
a :class:`Command`.
.. versionadded:: 3.0
Added the `standalone_mode` flag to control the standalone mode.
:param args: the arguments that should be used for parsing. If not
provided, ``sys.argv[1:]`` is used.
:param prog_name: the program name that should be used. By default
the program name is constructed by taking the file
name from ``sys.argv[0]``.
:param complete_var: the environment variable that controls the
bash completion support. The default is
``"_<prog_name>_COMPLETE"`` with prog name in
uppercase.
:param standalone_mode: the default behavior is to invoke the script
in standalone mode. Click will then
handle exceptions and convert them into
error messages and the function will never
return but shut down the interpreter. If
this is set to `False` they will be
propagated to the caller and the return
value of this function is the return value
of :meth:`invoke`.
:param extra: extra keyword arguments are forwarded to the context
constructor. See :class:`Context` for more information.
"""
# If we are in Python 3, we will verify that the environment is
# sane at this point of reject further execution to avoid a
# broken script.
if not PY2:
try:
import locale
fs_enc = codecs.lookup(locale.getpreferredencoding()).name
except Exception:
fs_enc = 'ascii'
if fs_enc == 'ascii':
raise RuntimeError('Click will abort further execution '
'because Python 3 was configured to use '
'ASCII as encoding for the environment. '
'Either switch to Python 2 or consult '
'http://click.pocoo.org/python3/ '
'for mitigation steps.')
else:
_check_for_unicode_literals()
if args is None:
args = sys.argv[1:]
else:
args = list(args)
if prog_name is None:
prog_name = make_str(os.path.basename(
sys.argv and sys.argv[0] or __file__))
# Hook for the Bash completion. This only activates if the Bash
# completion is actually enabled, otherwise this is quite a fast
# noop.
_bashcomplete(self, prog_name, complete_var)
try:
try:
with self.make_context(prog_name, args, **extra) as ctx:
rv = self.invoke(ctx)
if not standalone_mode:
return rv
ctx.exit()
except (EOFError, KeyboardInterrupt):
echo(file=sys.stderr)
raise Abort()
except ClickException as e:
if not standalone_mode:
raise
e.show()
sys.exit(e.exit_code)
except Abort:
if not standalone_mode:
raise
echo('Aborted!', file=sys.stderr)
sys.exit(1)
def __call__(self, *args, **kwargs):
"""Alias for :meth:`main`."""
return self.main(*args, **kwargs)
class Command(BaseCommand):
"""Commands are the basic building block of command line interfaces in
Click. A basic command handles command line parsing and might dispatch
more parsing to commands nested below it.
.. versionchanged:: 2.0
Added the `context_settings` parameter.
:param name: the name of the command to use unless a group overrides it.
:param context_settings: an optional dictionary with defaults that are
passed to the context object.
:param callback: the callback to invoke. This is optional.
:param params: the parameters to register with this command. This can
be either :class:`Option` or :class:`Argument` objects.
:param help: the help string to use for this command.
:param epilog: like the help string but it's printed at the end of the
help page after everything else.
:param short_help: the short help to use for this command. This is
shown on the command listing of the parent command.
:param add_help_option: by default each command registers a ``--help``
option. This can be disabled by this parameter.
"""
def __init__(self, name, context_settings=None, callback=None,
params=None, help=None, epilog=None, short_help=None,
options_metavar='[OPTIONS]', add_help_option=True):
BaseCommand.__init__(self, name, context_settings)
#: the callback to execute when the command fires. This might be
#: `None` in which case nothing happens.
self.callback = callback
#: the list of parameters for this command in the order they
#: should show up in the help page and execute. Eager parameters
#: will automatically be handled before non eager ones.
self.params = params or []
self.help = help
self.epilog = epilog
self.options_metavar = options_metavar
if short_help is None and help:
short_help = make_default_short_help(help)
self.short_help = short_help
self.add_help_option = add_help_option
def get_usage(self, ctx):
formatter = ctx.make_formatter()
self.format_usage(ctx, formatter)
return formatter.getvalue().rstrip('\n')
def get_params(self, ctx):
rv = self.params
help_option = self.get_help_option(ctx)
if help_option is not None:
rv = rv + [help_option]
return rv
def format_usage(self, ctx, formatter):
"""Writes the usage line into the formatter."""
pieces = self.collect_usage_pieces(ctx)
formatter.write_usage(ctx.command_path, ' '.join(pieces))
def collect_usage_pieces(self, ctx):
"""Returns all the pieces that go into the usage line and returns
it as a list of strings.
"""
rv = [self.options_metavar]
for param in self.get_params(ctx):
rv.extend(param.get_usage_pieces(ctx))
return rv
def get_help_option_names(self, ctx):
"""Returns the names for the help option."""
all_names = set(ctx.help_option_names)
for param in self.params:
all_names.difference_update(param.opts)
all_names.difference_update(param.secondary_opts)
return all_names
def get_help_option(self, ctx):
"""Returns the help option object."""
help_options = self.get_help_option_names(ctx)
if not help_options or not self.add_help_option:
return
def show_help(ctx, param, value):
if value and not ctx.resilient_parsing:
echo(ctx.get_help(), color=ctx.color)
ctx.exit()
return Option(help_options, is_flag=True,
is_eager=True, expose_value=False,
callback=show_help,
help='Show this message and exit.')
def make_parser(self, ctx):
"""Creates the underlying option parser for this command."""
parser = OptionParser(ctx)
parser.allow_interspersed_args = ctx.allow_interspersed_args
parser.ignore_unknown_options = ctx.ignore_unknown_options
for param in self.get_params(ctx):
param.add_to_parser(parser, ctx)
return parser
def get_help(self, ctx):
"""Formats the help into a string and returns it. This creates a
formatter and will call into the following formatting methods:
"""
formatter = ctx.make_formatter()
self.format_help(ctx, formatter)
return formatter.getvalue().rstrip('\n')
def format_help(self, ctx, formatter):
"""Writes the help into the formatter if it exists.
This calls into the following methods:
- :meth:`format_usage`
- :meth:`format_help_text`
- :meth:`format_options`
- :meth:`format_epilog`
"""
self.format_usage(ctx, formatter)
self.format_help_text(ctx, formatter)
self.format_options(ctx, formatter)
self.format_epilog(ctx, formatter)
def format_help_text(self, ctx, formatter):
"""Writes the help text to the formatter if it exists."""
if self.help:
formatter.write_paragraph()
with formatter.indentation():
formatter.write_text(self.help)
def format_options(self, ctx, formatter):
"""Writes all the options into the formatter if they exist."""
opts = []
for param in self.get_params(ctx):
rv = param.get_help_record(ctx)
if rv is not None:
opts.append(rv)
if opts:
with formatter.section('Options'):
formatter.write_dl(opts)
def format_epilog(self, ctx, formatter):
"""Writes the epilog into the formatter if it exists."""
if self.epilog:
formatter.write_paragraph()
with formatter.indentation():
formatter.write_text(self.epilog)
def parse_args(self, ctx, args):
parser = self.make_parser(ctx)
opts, args, param_order = parser.parse_args(args=args)
for param in iter_params_for_processing(
param_order, self.get_params(ctx)):
value, args = param.handle_parse_result(ctx, opts, args)
if args and not ctx.allow_extra_args and not ctx.resilient_parsing:
ctx.fail('Got unexpected extra argument%s (%s)'
% (len(args) != 1 and 's' or '',
' '.join(map(make_str, args))))
ctx.args = args
return args
def invoke(self, ctx):
"""Given a context, this invokes the attached callback (if it exists)
in the right way.
"""
if self.callback is not None:
return ctx.invoke(self.callback, **ctx.params)
class MultiCommand(Command):
"""A multi command is the basic implementation of a command that
dispatches to subcommands. The most common version is the
:class:`Group`.
:param invoke_without_command: this controls how the multi command itself
is invoked. By default it's only invoked
if a subcommand is provided.
:param no_args_is_help: this controls what happens if no arguments are
provided. This option is enabled by default if
`invoke_without_command` is disabled or disabled
if it's enabled. If enabled this will add
``--help`` as argument if no arguments are
passed.
:param subcommand_metavar: the string that is used in the documentation
to indicate the subcommand place.
:param chain: if this is set to `True` chaining of multiple subcommands
is enabled. This restricts the form of commands in that
they cannot have optional arguments but it allows
multiple commands to be chained together.
:param result_callback: the result callback to attach to this multi
command.
"""
allow_extra_args = True
allow_interspersed_args = False
def __init__(self, name=None, invoke_without_command=False,
no_args_is_help=None, subcommand_metavar=None,
chain=False, result_callback=None, **attrs):
Command.__init__(self, name, **attrs)
if no_args_is_help is None:
no_args_is_help = not invoke_without_command
self.no_args_is_help = no_args_is_help
self.invoke_without_command = invoke_without_command
if subcommand_metavar is None:
if chain:
subcommand_metavar = SUBCOMMANDS_METAVAR
else:
subcommand_metavar = SUBCOMMAND_METAVAR
self.subcommand_metavar = subcommand_metavar
self.chain = chain
#: The result callback that is stored. This can be set or
#: overridden with the :func:`resultcallback` decorator.
self.result_callback = result_callback
def collect_usage_pieces(self, ctx):
rv = Command.collect_usage_pieces(self, ctx)
rv.append(self.subcommand_metavar)
return rv
def format_options(self, ctx, formatter):
Command.format_options(self, ctx, formatter)
self.format_commands(ctx, formatter)
def resultcallback(self, replace=False):
"""Adds a result callback to the chain command. By default if a
result callback is already registered this will chain them but
this can be disabled with the `replace` parameter. The result
callback is invoked with the return value of the subcommand
(or the list of return values from all subcommands if chaining
is enabled) as well as the parameters as they would be passed
to the main callback.
Example::
@click.group()
@click.option('-i', '--input', default=23)
def cli(input):
return 42
@cli.resultcallback()
def process_result(result, input):
return result + input
.. versionadded:: 3.0
:param replace: if set to `True` an already existing result
callback will be removed.
"""
def decorator(f):
old_callback = self.result_callback
if old_callback is None or replace:
self.result_callback = f
return f
def function(__value, *args, **kwargs):
return f(old_callback(__value, *args, **kwargs),
*args, **kwargs)
self.result_callback = rv = update_wrapper(function, f)
return rv
return decorator
def format_commands(self, ctx, formatter):
"""Extra format methods for multi methods that adds all the commands
after the options.
"""
rows = []
for subcommand in self.list_commands(ctx):
cmd = self.get_command(ctx, subcommand)
# What is this, the tool lied about a command. Ignore it
if cmd is None:
continue
help = cmd.short_help or ''
rows.append((subcommand, help))
if rows:
with formatter.section('Commands'):
formatter.write_dl(rows)
def parse_args(self, ctx, args):
if not args and self.no_args_is_help and not ctx.resilient_parsing:
echo(ctx.get_help(), color=ctx.color)
ctx.exit()
return Command.parse_args(self, ctx, args)
def invoke(self, ctx):
def _process_result(value):
if self.result_callback is not None:
value = ctx.invoke(self.result_callback, value,
**ctx.params)
return value
if not ctx.args:
# If we are invoked without command the chain flag controls
# how this happens. If we are not in chain mode, the return
# value here is the return value of the command.
# If however we are in chain mode, the return value is the
# return value of the result processor invoked with an empty
# list (which means that no subcommand actually was executed).
if self.invoke_without_command:
if not self.chain:
return Command.invoke(self, ctx)
with ctx:
Command.invoke(self, ctx)
return _process_result([])
ctx.fail('Missing command.')
args = ctx.args
# If we're not in chain mode, we only allow the invocation of a
# single command but we also inform the current context about the
# name of the command to invoke.
if not self.chain:
# Make sure the context is entered so we do not clean up
# resources until the result processor has worked.
with ctx:
cmd_name, cmd, args = self.resolve_command(ctx, args)
ctx.invoked_subcommand = cmd_name
Command.invoke(self, ctx)
sub_ctx = cmd.make_context(cmd_name, args, parent=ctx)
with sub_ctx:
return _process_result(sub_ctx.command.invoke(sub_ctx))
# In chain mode we create the contexts step by step, but after the
# base command has been invoked. Because at that point we do not
# know the subcommands yet, the invoked subcommand attribute is
# set to ``*`` to inform the command that subcommands are executed
# but nothing else.
with ctx:
ctx.invoked_subcommand = args and '*' or None
Command.invoke(self, ctx)
# Otherwise we make every single context and invoke them in a
# chain. In that case the return value to the result processor
# is the list of all invoked subcommand's results.
contexts = []
while args:
cmd_name, cmd, args = self.resolve_command(ctx, args)
sub_ctx = cmd.make_context(cmd_name, args, parent=ctx,
allow_extra_args=True,
allow_interspersed_args=False)
contexts.append(sub_ctx)
args = sub_ctx.args
rv = []
for sub_ctx in contexts:
with sub_ctx:
rv.append(sub_ctx.command.invoke(sub_ctx))
return _process_result(rv)
def resolve_command(self, ctx, args):
cmd_name = make_str(args[0])
original_cmd_name = cmd_name
# Get the command
cmd = self.get_command(ctx, cmd_name)
# If we can't find the command but there is a normalization
# function available, we try with that one.
if cmd is None and ctx.token_normalize_func is not None:
cmd_name = ctx.token_normalize_func(cmd_name)
cmd = self.get_command(ctx, cmd_name)
# If we don't find the command we want to show an error message
# to the user that it was not provided. However, there is
# something else we should do: if the first argument looks like
# an option we want to kick off parsing again for arguments to
# resolve things like --help which now should go to the main
# place.
if cmd is None:
if split_opt(cmd_name)[0]:
self.parse_args(ctx, ctx.args)
ctx.fail('No such command "%s".' % original_cmd_name)
return cmd_name, cmd, args[1:]
def get_command(self, ctx, cmd_name):
"""Given a context and a command name, this returns a
:class:`Command` object if it exists or returns `None`.
"""
raise NotImplementedError()
def list_commands(self, ctx):
"""Returns a list of subcommand names in the order they should
appear.
"""
return []
class Group(MultiCommand):
"""A group allows a command to have subcommands attached. This is the
most common way to implement nesting in Click.
:param commands: a dictionary of commands.
"""
def __init__(self, name=None, commands=None, **attrs):
MultiCommand.__init__(self, name, **attrs)
#: the registered subcommands by their exported names.
self.commands = commands or {}
def add_command(self, cmd, name=None):
"""Registers another :class:`Command` with this group. If the name
is not provided, the name of the command is used.
"""
name = name or cmd.name
if name is None:
raise TypeError('Command has no name.')
self.commands[name] = cmd
def command(self, *args, **kwargs):
"""A shortcut decorator for declaring and attaching a command to
the group. This takes the same arguments as :func:`command` but
immediately registers the created command with this instance by
calling into :meth:`add_command`.
"""
def decorator(f):
cmd = command(*args, **kwargs)(f)
self.add_command(cmd)
return cmd
return decorator
def group(self, *args, **kwargs):
"""A shortcut decorator for declaring and attaching a group to
the group. This takes the same arguments as :func:`group` but
immediately registers the created command with this instance by
calling into :meth:`add_command`.
"""
def decorator(f):
cmd = group(*args, **kwargs)(f)
self.add_command(cmd)
return cmd
return decorator
def get_command(self, ctx, cmd_name):
return self.commands.get(cmd_name)
def list_commands(self, ctx):
return sorted(self.commands)
class CommandCollection(MultiCommand):
"""A command collection is a multi command that merges multiple multi
commands together into one. This is a straightforward implementation
that accepts a list of different multi commands as sources and
provides all the commands for each of them.
"""
def __init__(self, name=None, sources=None, **attrs):
MultiCommand.__init__(self, name, **attrs)
#: The list of registered multi commands.
self.sources = sources or []
def add_source(self, multi_cmd):
"""Adds a new multi command to the chain dispatcher."""
self.sources.append(multi_cmd)
def get_command(self, ctx, cmd_name):
for source in self.sources:
rv = source.get_command(ctx, cmd_name)
if rv is not None:
return rv
def list_commands(self, ctx):
rv = set()
for source in self.sources:
rv.update(source.list_commands(ctx))
return sorted(rv)
class Parameter(object):
"""A parameter to a command comes in two versions: they are either
:class:`Option`\s or :class:`Argument`\s. Other subclasses are currently
not supported by design as some of the internals for parsing are
intentionally not finalized.
Some settings are supported by both options and arguments.
.. versionchanged:: 2.0
Changed signature for parameter callback to also be passed the
parameter. In Click 2.0, the old callback format will still work,
but it will raise a warning to give you change to migrate the
code easier.
:param param_decls: the parameter declarations for this option or
argument. This is a list of flags or argument
names.
:param type: the type that should be used. Either a :class:`ParamType`
or a Python type. The later is converted into the former
automatically if supported.
:param required: controls if this is optional or not.
:param default: the default value if omitted. This can also be a callable,
in which case it's invoked when the default is needed
without any arguments.
:param callback: a callback that should be executed after the parameter
was matched. This is called as ``fn(ctx, param,
value)`` and needs to return the value. Before Click
2.0, the signature was ``(ctx, value)``.
:param nargs: the number of arguments to match. If not ``1`` the return
value is a tuple instead of single value. The default for
nargs is ``1`` (except if the type is a tuple, then it's
the arity of the tuple).
:param metavar: how the value is represented in the help page.
:param expose_value: if this is `True` then the value is passed onwards
to the command callback and stored on the context,
otherwise it's skipped.
:param is_eager: eager values are processed before non eager ones. This
should not be set for arguments or it will inverse the
order of processing.
:param envvar: a string or list of strings that are environment variables
that should be checked.
"""
param_type_name = 'parameter'
def __init__(self, param_decls=None, type=None, required=False,
default=None, callback=None, nargs=None, metavar=None,
expose_value=True, is_eager=False, envvar=None):
self.name, self.opts, self.secondary_opts = \
self._parse_decls(param_decls or (), expose_value)
self.type = convert_type(type, default)
# Default nargs to what the type tells us if we have that
# information available.
if nargs is None:
if self.type.is_composite:
nargs = self.type.arity
else:
nargs = 1
self.required = required
self.callback = callback
self.nargs = nargs
self.multiple = False
self.expose_value = expose_value
self.default = default
self.is_eager = is_eager
self.metavar = metavar
self.envvar = envvar
@property
def human_readable_name(self):
"""Returns the human readable name of this parameter. This is the
same as the name for options, but the metavar for arguments.
"""
return self.name
def make_metavar(self):
if self.metavar is not None:
return self.metavar
metavar = self.type.get_metavar(self)
if metavar is None:
metavar = self.type.name.upper()
if self.nargs != 1:
metavar += '...'
return metavar
def get_default(self, ctx):
"""Given a context variable this calculates the default value."""
# Otherwise go with the regular default.
if callable(self.default):
rv = self.default()
else:
rv = self.default
return self.type_cast_value(ctx, rv)
def add_to_parser(self, parser, ctx):
pass
def consume_value(self, ctx, opts):
value = opts.get(self.name)
if value is None:
value = ctx.lookup_default(self.name)
if value is None:
value = self.value_from_envvar(ctx)
return value
def type_cast_value(self, ctx, value):
"""Given a value this runs it properly through the type system.
This automatically handles things like `nargs` and `multiple` as
well as composite types.
"""
if self.type.is_composite:
if self.nargs <= 1:
raise TypeError('Attempted to invoke composite type '
'but nargs has been set to %s. This is '
'not supported; nargs needs to be set to '
'a fixed value > 1.' % self.nargs)
if self.multiple:
return tuple(self.type(x or (), self, ctx) for x in value or ())
return self.type(value or (), self, ctx)
def _convert(value, level):
if level == 0:
return self.type(value, self, ctx)
return tuple(_convert(x, level - 1) for x in value or ())
return _convert(value, (self.nargs != 1) + bool(self.multiple))
def process_value(self, ctx, value):
"""Given a value and context this runs the logic to convert the
value as necessary.
"""
# If the value we were given is None we do nothing. This way
# code that calls this can easily figure out if something was
# not provided. Otherwise it would be converted into an empty
# tuple for multiple invocations which is inconvenient.
if value is not None:
return self.type_cast_value(ctx, value)
def value_is_missing(self, value):
if value is None:
return True
if (self.nargs != 1 or self.multiple) and value == ():
return True
return False
def full_process_value(self, ctx, value):
value = self.process_value(ctx, value)
if value is None:
value = self.get_default(ctx)
if self.required and self.value_is_missing(value):
raise MissingParameter(ctx=ctx, param=self)
return value
def resolve_envvar_value(self, ctx):
if self.envvar is None:
return
if isinstance(self.envvar, (tuple, list)):
for envvar in self.envvar:
rv = os.environ.get(envvar)
if rv is not None:
return rv
else:
return os.environ.get(self.envvar)
def value_from_envvar(self, ctx):
rv = self.resolve_envvar_value(ctx)
if rv is not None and self.nargs != 1:
rv = self.type.split_envvar_value(rv)
return rv
def handle_parse_result(self, ctx, opts, args):
with augment_usage_errors(ctx, param=self):
value = self.consume_value(ctx, opts)
try:
value = self.full_process_value(ctx, value)
except Exception:
if not ctx.resilient_parsing:
raise
value = None
if self.callback is not None:
try:
value = invoke_param_callback(
self.callback, ctx, self, value)
except Exception:
if not ctx.resilient_parsing:
raise
if self.expose_value:
ctx.params[self.name] = value
return value, args
def get_help_record(self, ctx):
pass
def get_usage_pieces(self, ctx):
return []
class Option(Parameter):
"""Options are usually optional values on the command line and
have some extra features that arguments don't have.
All other parameters are passed onwards to the parameter constructor.
:param show_default: controls if the default value should be shown on the
help page. Normally, defaults are not shown.
:param prompt: if set to `True` or a non empty string then the user will
be prompted for input if not set. If set to `True` the
prompt will be the option name capitalized.
:param confirmation_prompt: if set then the value will need to be confirmed
if it was prompted for.
:param hide_input: if this is `True` then the input on the prompt will be
hidden from the user. This is useful for password
input.
:param is_flag: forces this option to act as a flag. The default is
auto detection.
:param flag_value: which value should be used for this flag if it's
enabled. This is set to a boolean automatically if
the option string contains a slash to mark two options.
:param multiple: if this is set to `True` then the argument is accepted
multiple times and recorded. This is similar to ``nargs``
in how it works but supports arbitrary number of
arguments.
:param count: this flag makes an option increment an integer.
:param allow_from_autoenv: if this is enabled then the value of this
parameter will be pulled from an environment
variable in case a prefix is defined on the
context.
:param help: the help string.
"""
param_type_name = 'option'
def __init__(self, param_decls=None, show_default=False,
prompt=False, confirmation_prompt=False,
hide_input=False, is_flag=None, flag_value=None,
multiple=False, count=False, allow_from_autoenv=True,
type=None, help=None, **attrs):
default_is_missing = attrs.get('default', _missing) is _missing
Parameter.__init__(self, param_decls, type=type, **attrs)
if prompt is True:
prompt_text = self.name.replace('_', ' ').capitalize()
elif prompt is False:
prompt_text = None
else:
prompt_text = prompt
self.prompt = prompt_text
self.confirmation_prompt = confirmation_prompt
self.hide_input = hide_input
# Flags
if is_flag is None:
if flag_value is not None:
is_flag = True
else:
is_flag = bool(self.secondary_opts)
if is_flag and default_is_missing:
self.default = False
if flag_value is None:
flag_value = not self.default
self.is_flag = is_flag
self.flag_value = flag_value
if self.is_flag and isinstance(self.flag_value, bool) \
and type is None:
self.type = BOOL
self.is_bool_flag = True
else:
self.is_bool_flag = False
# Counting
self.count = count
if count:
if type is None:
self.type = IntRange(min=0)
if default_is_missing:
self.default = 0
self.multiple = multiple
self.allow_from_autoenv = allow_from_autoenv
self.help = help
self.show_default = show_default
# Sanity check for stuff we don't support
if __debug__:
if self.nargs < 0:
raise TypeError('Options cannot have nargs < 0')
if self.prompt and self.is_flag and not self.is_bool_flag:
raise TypeError('Cannot prompt for flags that are not bools.')
if not self.is_bool_flag and self.secondary_opts:
raise TypeError('Got secondary option for non boolean flag.')
if self.is_bool_flag and self.hide_input \
and self.prompt is not None:
raise TypeError('Hidden input does not work with boolean '
'flag prompts.')
if self.count:
if self.multiple:
raise TypeError('Options cannot be multiple and count '
'at the same time.')
elif self.is_flag:
raise TypeError('Options cannot be count and flags at '
'the same time.')
def _parse_decls(self, decls, expose_value):
opts = []
secondary_opts = []
name = None
possible_names = []
for decl in decls:
if isidentifier(decl):
if name is not None:
raise TypeError('Name defined twice')
name = decl
else:
split_char = decl[:1] == '/' and ';' or '/'
if split_char in decl:
first, second = decl.split(split_char, 1)
first = first.rstrip()
possible_names.append(split_opt(first))
opts.append(first)
secondary_opts.append(second.lstrip())
else:
possible_names.append(split_opt(decl))
opts.append(decl)
if name is None and possible_names:
possible_names.sort(key=lambda x: len(x[0]))
name = possible_names[-1][1].replace('-', '_').lower()
if not isidentifier(name):
name = None
if name is None:
if not expose_value:
return None, opts, secondary_opts
raise TypeError('Could not determine name for option')
if not opts and not secondary_opts:
raise TypeError('No options defined but a name was passed (%s). '
'Did you mean to declare an argument instead '
'of an option?' % name)
return name, opts, secondary_opts
def add_to_parser(self, parser, ctx):
kwargs = {
'dest': self.name,
'nargs': self.nargs,
'obj': self,
}
if self.multiple:
action = 'append'
elif self.count:
action = 'count'
else:
action = 'store'
if self.is_flag:
kwargs.pop('nargs', None)
if self.is_bool_flag and self.secondary_opts:
parser.add_option(self.opts, action=action + '_const',
const=True, **kwargs)
parser.add_option(self.secondary_opts, action=action +
'_const', const=False, **kwargs)
else:
parser.add_option(self.opts, action=action + '_const',
const=self.flag_value,
**kwargs)
else:
kwargs['action'] = action
parser.add_option(self.opts, **kwargs)
def get_help_record(self, ctx):
any_prefix_is_slash = []
def _write_opts(opts):
rv, any_slashes = join_options(opts)
if any_slashes:
any_prefix_is_slash[:] = [True]
if not self.is_flag and not self.count:
rv += ' ' + self.make_metavar()
return rv
rv = [_write_opts(self.opts)]
if self.secondary_opts:
rv.append(_write_opts(self.secondary_opts))
help = self.help or ''
extra = []
if self.default is not None and self.show_default:
extra.append('default: %s' % (
', '.join('%s' % d for d in self.default)
if isinstance(self.default, (list, tuple))
else self.default, ))
if self.required:
extra.append('required')
if extra:
help = '%s[%s]' % (help and help + ' ' or '', '; '.join(extra))
return ((any_prefix_is_slash and '; ' or ' / ').join(rv), help)
def get_default(self, ctx):
# If we're a non boolean flag out default is more complex because
# we need to look at all flags in the same group to figure out
# if we're the the default one in which case we return the flag
# value as default.
if self.is_flag and not self.is_bool_flag:
for param in ctx.command.params:
if param.name == self.name and param.default:
return param.flag_value
return None
return Parameter.get_default(self, ctx)
def prompt_for_value(self, ctx):
"""This is an alternative flow that can be activated in the full
value processing if a value does not exist. It will prompt the
user until a valid value exists and then returns the processed
value as result.
"""
# Calculate the default before prompting anything to be stable.
default = self.get_default(ctx)
# If this is a prompt for a flag we need to handle this
# differently.
if self.is_bool_flag:
return confirm(self.prompt, default)
return prompt(self.prompt, default=default,
hide_input=self.hide_input,
confirmation_prompt=self.confirmation_prompt,
value_proc=lambda x: self.process_value(ctx, x))
def resolve_envvar_value(self, ctx):
rv = Parameter.resolve_envvar_value(self, ctx)
if rv is not None:
return rv
if self.allow_from_autoenv and \
ctx.auto_envvar_prefix is not None:
envvar = '%s_%s' % (ctx.auto_envvar_prefix, self.name.upper())
return os.environ.get(envvar)
def value_from_envvar(self, ctx):
rv = self.resolve_envvar_value(ctx)
if rv is None:
return None
value_depth = (self.nargs != 1) + bool(self.multiple)
if value_depth > 0 and rv is not None:
rv = self.type.split_envvar_value(rv)
if self.multiple and self.nargs != 1:
rv = batch(rv, self.nargs)
return rv
def full_process_value(self, ctx, value):
if value is None and self.prompt is not None \
and not ctx.resilient_parsing:
return self.prompt_for_value(ctx)
return Parameter.full_process_value(self, ctx, value)
class Argument(Parameter):
"""Arguments are positional parameters to a command. They generally
provide fewer features than options but can have infinite ``nargs``
and are required by default.
All parameters are passed onwards to the parameter constructor.
"""
param_type_name = 'argument'
def __init__(self, param_decls, required=None, **attrs):
if required is None:
if attrs.get('default') is not None:
required = False
else:
required = attrs.get('nargs', 1) > 0
Parameter.__init__(self, param_decls, required=required, **attrs)
@property
def human_readable_name(self):
if self.metavar is not None:
return self.metavar
return self.name.upper()
def make_metavar(self):
if self.metavar is not None:
return self.metavar
var = self.name.upper()
if not self.required:
var = '[%s]' % var
if self.nargs != 1:
var += '...'
return var
def _parse_decls(self, decls, expose_value):
if not decls:
if not expose_value:
return None, [], []
raise TypeError('Could not determine name for argument')
if len(decls) == 1:
name = arg = decls[0]
name = name.replace('-', '_').lower()
elif len(decls) == 2:
name, arg = decls
else:
raise TypeError('Arguments take exactly one or two '
'parameter declarations, got %d' % len(decls))
return name, [arg], []
def get_usage_pieces(self, ctx):
return [self.make_metavar()]
def add_to_parser(self, parser, ctx):
parser.add_argument(dest=self.name, nargs=self.nargs,
obj=self)
# Circular dependency between decorators and core
from .decorators import command, group<|fim▁end|>
|
parsing methods that do not depend on the Click parser.
|
<|file_name|>better_zoom_test_case.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
""" Tests for the BetterZoom Chaco tool """
import unittest
import numpy
from chaco.api import create_line_plot
from chaco.tools.api import BetterZoom
from enable.testing import EnableTestAssistant
class TestBetterZoomTool(EnableTestAssistant, unittest.TestCase):
""" Tests for the BetterZoom Chaco tool """
def setUp(self):
values = numpy.arange(10)
self.plot = create_line_plot((values, values))
self.plot.bounds = [100, 100]
self.plot._window = self.create_mock_window()
self.tool = BetterZoom(component=self.plot)
self.plot.active_tool = self.tool
self.plot.do_layout()
def tearDown(self):
del self.tool
del self.plot
def test_default_position(self):
tool = self.tool
<|fim▁hole|> # this doesn't throw an exception
self.send_key(tool, '+')
self.assertEqual(tool.position, (50, 50))
# expected behaviour for a normal zoom in operation
self.assertNotEqual(tool._index_factor, 1.0)
self.assertNotEqual(tool._value_factor, 1.0)
self.assertEqual(len(tool._history), 2)<|fim▁end|>
| |
<|file_name|>get.cc<|end_file_name|><|fim▁begin|>// Copyright (C) 2007, 2008, 2009 EPITA Research and Development Laboratory (LRDE)
//
// This file is part of Olena.
//
// Olena is free software: you can redistribute it and/or modify it under
// the terms of the GNU General Public License as published by the Free
// Software Foundation, version 2 of the License.
//
// Olena is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Olena. If not, see <http://www.gnu.org/licenses/>.
//
// As a special exception, you may use this file as part of a free
// software project without restriction. Specifically, if other files
// instantiate templates or use macros or inline functions from this
// file, or you compile this file and link it with other files to produce
// an executable, this file does not by itself cause the resulting
// executable to be covered by the GNU General Public License. This
// exception does not however invalidate any other reasons why the
// executable file might be covered by the GNU General Public License.
#include <mln/core/image/image2d.hh>
#include <mln/core/image/dmorph/sub_image.hh>
#include <mln/core/image/dmorph/image_if.hh>
#include <mln/fun/p2b/chess.hh>
#include <mln/border/get.hh>
#include <mln/literal/origin.hh>
struct f_box2d_t : mln::Function_v2b< f_box2d_t >
{
f_box2d_t(const mln::box2d& b)
: b_(b)
{
}
mln::box2d b_;
bool operator()(const mln::point2d& p) const
{
return b_.has(p);
}
};
int main()
{
using namespace mln;<|fim▁hole|>
box2d b(literal::origin, point2d(1,1));
f_box2d_t f_b(b);
I ima(3,3, 51);
mln_assertion(border::get(ima) == 51);
mln_assertion(ima.has(point2d(2,2)) == true);
sub_image<I, box2d> sub(ima, b);
mln_assertion(sub.has(point2d(2,2)) == false &&
sub.has(point2d(2,2)) == false);
mln_assertion(border::get(sub) == 0);
image_if<I, f_box2d_t> imaif(ima, f_b);
mln_assertion(imaif.has(point2d(2,2)) == false &&
ima.has(point2d(2,2)) == true);
mln_assertion(border::get(imaif) == 0);
mln_assertion(border::get((ima | b) | f_b) == 0);
}<|fim▁end|>
|
typedef image2d<int> I;
|
<|file_name|>references.py<|end_file_name|><|fim▁begin|>"""
Module responsible for translating reference sequence data into GA4GH native
objects.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import hashlib
import json
import os
import random
import pysam
import ga4gh.datamodel as datamodel
import ga4gh.protocol as protocol
import ga4gh.exceptions as exceptions
DEFAULT_REFERENCESET_NAME = "Default"
"""
This is the name used for any reference set referred to in a BAM
file that does not provide the 'AS' tag in the @SQ header.
"""
class AbstractReferenceSet(datamodel.DatamodelObject):
"""
Class representing ReferenceSets. A ReferenceSet is a set of
References which typically comprise a reference assembly, such as
GRCh38.
"""
compoundIdClass = datamodel.ReferenceSetCompoundId
def __init__(self, localId):
super(AbstractReferenceSet, self).__init__(None, localId)
self._referenceIdMap = {}
self._referenceNameMap = {}
self._referenceIds = []
self._assemblyId = None
self._description = None
self._isDerived = False
self._ncbiTaxonId = None
self._sourceAccessions = []
self._sourceUri = None
def addReference(self, reference):
"""
Adds the specified reference to this ReferenceSet.
"""
id_ = reference.getId()
self._referenceIdMap[id_] = reference
self._referenceNameMap[reference.getLocalId()] = reference
self._referenceIds.append(id_)
def getReferences(self):
"""
Returns the References in this ReferenceSet.
"""
return [self._referenceIdMap[id_] for id_ in self._referenceIds]
def getNumReferences(self):
"""
Returns the number of references in this ReferenceSet.
"""
return len(self._referenceIds)
def getReferenceByIndex(self, index):
"""
Returns the reference at the specified index in this ReferenceSet.
"""
return self._referenceIdMap[self._referenceIds[index]]
def getReferenceByName(self, name):
"""
Returns the reference with the specified name.
"""
if name not in self._referenceNameMap:
raise exceptions.ReferenceNameNotFoundException(name)
return self._referenceNameMap[name]
def getReference(self, id_):
"""
Returns the Reference with the specified ID or raises a
ReferenceNotFoundException if it does not exist.
"""
if id_ not in self._referenceIdMap:
raise exceptions.ReferenceNotFoundException(id_)
return self._referenceIdMap[id_]
def getMd5Checksum(self):
"""
Returns the MD5 checksum for this reference set. This checksum is
calculated by making a list of `Reference.md5checksum` for all
`Reference`s in this set. We then sort this list, and take the
MD5 hash of all the strings concatenated together.
"""
references = sorted(
self.getReferences(),
key=lambda ref: ref.getMd5Checksum())
checksums = ''.join([ref.getMd5Checksum() for ref in references])
md5checksum = hashlib.md5(checksums).hexdigest()
return md5checksum
def getAssemblyId(self):
"""
Returns the assembly ID for this reference set.
This is the public id of this reference set, such as `GRCh37`
"""
return self._assemblyId
def getDescription(self):
"""
Returns the free text description of this reference set.
"""
return self._description
def getIsDerived(self):
"""
Returns True if this ReferenceSet is derived. A ReferenceSet
may be derived from a source if it contains additional sequences,
or some of the sequences within it are derived.
"""
return self._isDerived
def getSourceAccessions(self):
"""
Returns the list of source accession strings. These are all known
corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally
with a version number, e.g. `NC_000001.11`.
"""
return self._sourceAccessions
def getSourceUri(self):
"""
Returns the sourceURI for this ReferenceSet.
"""
return self._sourceUri
def getNcbiTaxonId(self):
"""
Returns the NCBI Taxon ID for this reference set. This is the
ID from http://www.ncbi.nlm.nih.gov/taxonomy (e.g. 9606->human)
indicating the species which this assembly is intended to model.
Note that contained `Reference`s may specify a different
`ncbiTaxonId`, as assemblies may contain reference sequences
which do not belong to the modeled species, e.g. EBV in a
human reference genome.
"""
return self._ncbiTaxonId
def toProtocolElement(self):
"""
Returns the GA4GH protocol representation of this ReferenceSet.
"""
ret = protocol.ReferenceSet()
ret.assemblyId = self.getAssemblyId()
ret.description = self.getDescription()
ret.id = self.getId()
ret.isDerived = self.getIsDerived()
ret.md5checksum = self.getMd5Checksum()
ret.ncbiTaxonId = self.getNcbiTaxonId()
ret.referenceIds = self._referenceIds
ret.sourceAccessions = self.getSourceAccessions()
ret.sourceURI = self.getSourceUri()
ret.name = self.getLocalId()
return ret
class AbstractReference(datamodel.DatamodelObject):
"""
Class representing References. A Reference is a canonical
assembled contig, intended to act as a reference coordinate space
for other genomic annotations. A single Reference might represent
the human chromosome 1, for instance.
"""
compoundIdClass = datamodel.ReferenceCompoundId
def __init__(self, parentContainer, localId):
super(AbstractReference, self).__init__(parentContainer, localId)
self._length = -1
self._md5checksum = ""
self._sourceUri = None
self._sourceAccessions = []
self._isDerived = False
self._sourceDivergence = None
self._ncbiTaxonId = None
def getLength(self):
"""
Returns the length of this reference's sequence string.
"""
return self._length
def getName(self):
"""
Returns the name of this reference, e.g., '22'.
"""
return self.getLocalId()
def getIsDerived(self):
"""
Returns True if this Reference is derived. A sequence X is said to be
derived from source sequence Y, if X and Y are of the same length and
the per-base sequence divergence at A/C/G/T bases is sufficiently
small. Two sequences derived from the same official sequence share the
same coordinates and annotations, and can be replaced with the official
sequence for certain use cases.
"""
return self._isDerived
def getSourceDivergence(self):
"""
Returns the source divergence for this reference. The sourceDivergence
is the fraction of non-indel bases that do not match the
reference this record was derived from.
"""
return self._sourceDivergence
def getSourceAccessions(self):
"""
Returns the list of source accession strings. These are all known
corresponding accession IDs in INSDC (GenBank/ENA/DDBJ) ideally
with a version number, e.g. `NC_000001.11`.
"""
return self._sourceAccessions
def getSourceUri(self):
"""
The URI from which the sequence was obtained. Specifies a FASTA format
file/string with one name, sequence pair.
"""
return self._sourceUri
def getNcbiTaxonId(self):
"""
Returns the NCBI Taxon ID for this reference. This is the
ID from http://www.ncbi.nlm.nih.gov/taxonomy (e.g. 9606->human)
indicating the species which this assembly is intended to model.
Note that contained `Reference`s may specify a different
`ncbiTaxonId`, as assemblies may contain reference sequences
which do not belong to the modeled species, e.g. EBV in a
human reference genome.
"""
return self._ncbiTaxonId
def getMd5Checksum(self):
"""
Returns the MD5 checksum uniquely representing this `Reference` as a
lower-case hexadecimal string, calculated as the MD5 of the upper-case
sequence excluding all whitespace characters.
"""
return self._md5checksum
def toProtocolElement(self):
"""
Returns the GA4GH protocol representation of this Reference.
"""
reference = protocol.Reference()
reference.id = self.getId()
reference.isDerived = self.getIsDerived()
reference.length = self.getLength()
reference.md5checksum = self.getMd5Checksum()
reference.name = self.getName()
reference.ncbiTaxonId = self.getNcbiTaxonId()
reference.sourceAccessions = self.getSourceAccessions()
reference.sourceDivergence = self.getSourceDivergence()
reference.sourceURI = self.getSourceUri()<|fim▁hole|> """
Checks to ensure that the query range is valid within this reference.
If not, raise ReferenceRangeErrorException.
"""
condition = (
(start < 0 or end > self.getLength()) or
start > end)
if condition:
raise exceptions.ReferenceRangeErrorException(
self.getId(), start, end)
def getBases(self, start, end):
"""
Returns the string representing the bases of this reference from
start (inclusive) to end (exclusive).
"""
raise NotImplemented()
##################################################################
#
# Simulated references
#
##################################################################
class SimulatedReferenceSet(AbstractReferenceSet):
"""
A simulated referenceSet
"""
def __init__(self, localId, randomSeed=0, numReferences=1):
super(SimulatedReferenceSet, self).__init__(localId)
self._randomSeed = randomSeed
self._randomGenerator = random.Random()
self._randomGenerator.seed(self._randomSeed)
self._description = "Simulated reference set"
self._assemblyId = str(random.randint(0, 2**32))
self._isDerived = bool(random.randint(0, 1))
self._ncbiTaxonId = random.randint(0, 2**16)
self._sourceAccessions = []
for i in range(random.randint(1, 3)):
self._sourceAccessions.append("sim_accession_{}".format(
random.randint(1, 2**32)))
self._sourceUri = "http://example.com/reference.fa"
for i in range(numReferences):
referenceSeed = self._randomGenerator.getrandbits(32)
referenceLocalId = "srs{}".format(i)
reference = SimulatedReference(
self, referenceLocalId, referenceSeed)
self.addReference(reference)
class SimulatedReference(AbstractReference):
"""
A simulated reference. Stores a random sequence of a given length, and
generates remaining attributes randomly.
"""
def __init__(self, parentContainer, localId, randomSeed=0, length=200):
super(SimulatedReference, self).__init__(parentContainer, localId)
rng = random.Random()
rng.seed(randomSeed)
self._length = length
bases = [rng.choice('ACGT') for _ in range(self._length)]
self._bases = ''.join(bases)
self._md5checksum = hashlib.md5(self._bases).hexdigest()
self._isDerived = bool(rng.randint(0, 1))
self._sourceDivergence = 0
if self._isDerived:
self._sourceDivergence = rng.uniform(0, 0.1)
self._ncbiTaxonId = random.randint(0, 2**16)
self._sourceAccessions = []
for i in range(random.randint(1, 3)):
self._sourceAccessions.append("sim_accession_{}".format(
random.randint(1, 2**32)))
self._sourceUri = "http://example.com/reference.fa"
def getBases(self, start, end):
self.checkQueryRange(start, end)
return self._bases[start:end]
##################################################################
#
# References based on htslib's FASTA file handling.
#
##################################################################
class HtslibReferenceSet(datamodel.PysamDatamodelMixin, AbstractReferenceSet):
"""
A referenceSet based on data on a file system
"""
def __init__(self, localId, dataDir, backend):
super(HtslibReferenceSet, self).__init__(localId)
self._dataDir = dataDir
self._setMetadata()
self._scanDataFiles(dataDir, ["*.fa.gz"])
def _setMetadata(self):
metadataFileName = '{}.json'.format(self._dataDir)
with open(metadataFileName) as metadataFile:
metadata = json.load(metadataFile)
try:
self._assemblyId = metadata['assemblyId']
self._description = metadata['description']
self._isDerived = metadata['isDerived']
self._ncbiTaxonId = metadata['ncbiTaxonId']
self._sourceAccessions = metadata['sourceAccessions']
self._sourceUri = metadata['sourceUri']
except KeyError as err:
raise exceptions.MissingReferenceSetMetadata(
metadataFileName, str(err))
def _addDataFile(self, path):
dirname, filename = os.path.split(path)
localId = filename.split(".")[0]
metadataFileName = os.path.join(dirname, "{}.json".format(localId))
with open(metadataFileName) as metadataFile:
metadata = json.load(metadataFile)
reference = HtslibReference(self, localId, path, metadata)
self.addReference(reference)
class HtslibReference(datamodel.PysamDatamodelMixin, AbstractReference):
"""
A reference based on data stored in a file on the file system
"""
def __init__(self, parentContainer, localId, dataFile, metadata):
super(HtslibReference, self).__init__(parentContainer, localId)
self._fastaFilePath = dataFile
fastaFile = self.getFileHandle(dataFile)
numReferences = len(fastaFile.references)
if numReferences != 1:
raise exceptions.NotExactlyOneReferenceException(
self._fastaFilePath, numReferences)
if fastaFile.references[0] != localId:
raise exceptions.InconsistentReferenceNameException(
self._fastaFilePath)
self._length = fastaFile.lengths[0]
try:
self._md5checksum = metadata["md5checksum"]
self._sourceUri = metadata["sourceUri"]
self._ncbiTaxonId = metadata["ncbiTaxonId"]
self._isDerived = metadata["isDerived"]
self._sourceDivergence = metadata["sourceDivergence"]
self._sourceAccessions = metadata["sourceAccessions"]
except KeyError as err:
raise exceptions.MissingReferenceMetadata(dataFile, str(err))
def getFastaFilePath(self):
"""
Returns the fasta file that this reference is derived from.
"""
return self._fastaFilePath
def openFile(self, dataFile):
return pysam.FastaFile(dataFile)
def getBases(self, start, end):
self.checkQueryRange(start, end)
fastaFile = self.getFileHandle(self._fastaFilePath)
# TODO we should have some error checking here...
bases = fastaFile.fetch(self.getLocalId(), start, end)
return bases<|fim▁end|>
|
return reference
def checkQueryRange(self, start, end):
|
<|file_name|>HTMLExtractorTest.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright 2017 Adobe Systems Incorporated
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*<|fim▁hole|> *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package io.sightly.tck.html;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class HTMLExtractorTest {
@Test
public void testHasAttribute() {
assertTrue(HTMLExtractor.hasAttribute("hasAttribute-t01", "<div id='test' title></div>", "#test", "title"));
assertTrue(HTMLExtractor.hasAttribute("hasAttribute-t02", "<div id='test' title='a title'></div>", "#test", "title"));
assertTrue(HTMLExtractor.hasAttribute("hasAttribute-t03", "<div id='test' title=''></div>", "#test", "title"));
assertFalse(HTMLExtractor.hasAttribute("hasAttribute-t04", "<div id='test'></div>", "#test", "title"));
}
@Test
public void testHasAttributeValue() {
class Combination {
private String url;
private String markup;
private String selector;
private String attributeName;
private String attributeValue;
private boolean expectedTrue;
private Combination(String url,
String markup,
String selector,
String attributeName,
String attributeValue,
boolean expectedTrue) {
this.url = url;
this.markup = markup;
this.selector = selector;
this.attributeName = attributeName;
this.attributeValue = attributeValue;
this.expectedTrue = expectedTrue;
}
}
Combination[] combinations = new Combination[]{
new Combination("hasAttributeValue-t01", "<div id=\"test\" title=\"something\"></div>", "#test", "title", "", false),
new Combination("hasAttributeValue-t01", "<div id=\"test\" title=\"something\"></div>", "#test", "title", "something", true),
new Combination("hasAttributeValue-t02", "<div id=\"test\" title=\"\"></div>", "#test", "title", "", true),
new Combination("hasAttributeValue-t02", "<div id=\"test\" title=\"\"></div>", "#test", "title", "something", false),
new Combination("hasAttributeValue-t03", "<div id=\"test\" title></div>", "#test", "title", "", true),
new Combination("hasAttributeValue-t03", "<div id=\"test\" title></div>", "#test", "title", "something", false),
new Combination("hasAttributeValue-t04", "<div id=\"test\"></div>", "#test", "title", "", false),
new Combination("hasAttributeValue-t04", "<div id=\"test\"></div>", "#test", "title", "something", false)
};
StringBuilder sb = new StringBuilder();
int index = 0;
for (Combination c : combinations) {
String message =
String.format("%s: Expected %s when looking up a%s existing attribute named %s with value %s for selector %s in \n " +
" %s",
c.url,
c.expectedTrue,
c.expectedTrue ? "n" : " not",
"'" + c.attributeName + "'",
c.attributeValue == null ? null : "'" + c.attributeValue + "'",
c.selector,
c.markup);
if (c.expectedTrue != HTMLExtractor.hasAttributeValue(c.url, c.markup, c.selector, c.attributeName, c.attributeValue)) {
if (index++ == 0) {
sb.append("\n");
}
sb.append(message).append("\n");
}
}
if (sb.length() > 0) {
fail(sb.toString());
}
}
}<|fim▁end|>
|
* http://www.apache.org/licenses/LICENSE-2.0
|
<|file_name|>popstateevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public<|fim▁hole|> * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::EventBinding::EventMethods;
use dom::bindings::codegen::Bindings::PopStateEventBinding;
use dom::bindings::codegen::Bindings::PopStateEventBinding::PopStateEventMethods;
use dom::bindings::error::Fallible;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{MutHeapJSVal, Root};
use dom::bindings::reflector::reflect_dom_object;
use dom::bindings::str::DOMString;
use dom::event::Event;
use dom::window::Window;
use js::jsapi::{HandleValue, JSContext};
use js::jsval::JSVal;
use servo_atoms::Atom;
// https://html.spec.whatwg.org/multipage/#the-popstateevent-interface
#[dom_struct]
pub struct PopStateEvent {
event: Event,
#[ignore_heap_size_of = "Defined in rust-mozjs"]
state: MutHeapJSVal,
}
impl PopStateEvent {
fn new_inherited() -> PopStateEvent {
PopStateEvent {
event: Event::new_inherited(),
state: MutHeapJSVal::new(),
}
}
pub fn new_uninitialized(window: &Window) -> Root<PopStateEvent> {
reflect_dom_object(box PopStateEvent::new_inherited(),
window,
PopStateEventBinding::Wrap)
}
pub fn new(window: &Window,
type_: Atom,
bubbles: bool,
cancelable: bool,
state: HandleValue)
-> Root<PopStateEvent> {
let ev = PopStateEvent::new_uninitialized(window);
ev.state.set(state.get());
{
let event = ev.upcast::<Event>();
event.init_event(type_, bubbles, cancelable);
}
ev
}
#[allow(unsafe_code)]
pub fn Constructor(window: &Window,
type_: DOMString,
init: &PopStateEventBinding::PopStateEventInit)
-> Fallible<Root<PopStateEvent>> {
Ok(PopStateEvent::new(window,
Atom::from(type_),
init.parent.bubbles,
init.parent.cancelable,
unsafe { HandleValue::from_marked_location(&init.state) }))
}
}
impl PopStateEventMethods for PopStateEvent {
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-popstateevent-state
unsafe fn State(&self, _cx: *mut JSContext) -> JSVal {
self.state.get()
}
// https://dom.spec.whatwg.org/#dom-event-istrusted
fn IsTrusted(&self) -> bool {
self.event.IsTrusted()
}
}<|fim▁end|>
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
<|file_name|>nodecounterssnapshotdto.go<|end_file_name|><|fim▁begin|>package model
type NodeCountersSnapshotDTO struct {
NodeId string `json:"nodeId"` // The unique ID that identifies the node<|fim▁hole|>}<|fim▁end|>
|
Address string `json:"address"` // The API address of the node
ApiPort int32 `json:"apiPort"` // The API port used to communicate with the node
Snapshot CountersSnapshotDTO `json:"snapshot"` // The counters from the node.
|
<|file_name|>SingleProposalMisc.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##Copyright (C) [2003] [Jürgen Hamel, D-32584 Löhne]
##This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as
##published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
##This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
##warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
##for more details.
##You should have received a copy of the GNU General Public License along with this program; if not, write to the
##Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from cuon.Databases.SingleData import SingleData
import logging
import pygtk
pygtk.require('2.0')
import gtk<|fim▁hole|>
class SingleProposalMisc(SingleData):
def __init__(self, allTables):
SingleData.__init__(self)
# tables.dbd and address
self.sNameOfTable = "proposalmisc"
self.xmlTableDef = 0
# self.loadTable()
# self.saveTable()
self.loadTable(allTables)
#self.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT) )
#self.listHeader['names'] = ['number', 'designation', 'ID']
#self.listHeader['size'] = [25,10,25,25,10]
#print "number of Columns "
#print len(self.table.Columns)
#
self.ordernumber = 0
#self.statusfields = ['lastname', 'firstname']
def readNonWidgetEntries(self, dicValues):
print 'readNonWidgetEntries(self) by SingleorderGets'
dicValues['orderid'] = [self.ordernumber, 'int']
return dicValues<|fim▁end|>
|
import gtk.glade
import gobject
#from gtk import TRUE, FALSE
|
<|file_name|>_WithWrapper.test.js<|end_file_name|><|fim▁begin|>/* global describe, test, expect, jest */
import React from "react";
import { mount } from "enzyme";
import Form from "../Form";
describe("HOC (WithWrapper)", () => {
test("[handleOnKeyup] should reset the error state", () => {
const onKeyUp = jest.fn();
const comp = mount(
<Form.Input onChange={spy} value="hey" error="asdas" onKeyUp={onKeyUp} />
);
const inst = comp.instance();
const spy = jest.spyOn(inst, "setState");
inst.handleOnKeyup({});
expect(spy).toHaveBeenCalledWith({ error: null });
expect(onKeyUp).toHaveBeenCalledWith({});
comp.setProps({ onKeyUp: undefined, error: undefined });
inst.handleOnKeyup({});
expect(onKeyUp).toHaveBeenCalledTimes(1);<|fim▁hole|> // The Input is actually wrapped in a HOC
const spy = jest.fn();
const comp = mount(<Form.Input onChange={spy} value="hey" />);
const inst = comp.instance();
inst.handleOnChange("some value", { formValue: "some value" });
expect(comp.state("formValue")).toBe("some value");
expect(spy).toHaveBeenCalled();
comp.setProps({ onChange: undefined });
inst.handleOnChange("");
expect(comp.state("formValue")).toBe(undefined);
expect(spy).toHaveBeenCalledTimes(1);
});
test("hasValue gets updated when new props arrive", () => {
const comp = mount(<Form.Input />);
expect(comp.find(".hasValue").exists()).toBe(false);
comp.setProps({ value: "hey" });
expect(comp.state("formValue")).toBe("hey");
});
test("Handles initialvalues for Form.Select", () => {
const options = [
{
value: "blue",
text: "Blue"
},
{
value: "red",
text: "Red"
}
];
const comp = mount(
<Form.Select name="color" selected="blue" options={options} />
);
expect(comp.find(".hasValue").exists()).toBe(true);
expect(comp.state("formValue")).toBe("blue");
});
describe("snapshot", () => {
test("should include a label span when label is not defined in the child component", () => {
const comp = mount(
<Form.Input label="My awesome label" value="hello-world" />
);
expect(comp).toMatchSnapshot();
});
test("should not include a label span when the label is defined in the child component", () => {
const comp = mount(<Form.Checkbox checked />);
expect(comp).toMatchSnapshot();
});
test("should display an error span when there is an error", () => {
const comp = mount(
<Form.Checkbox checked error="Cannot proceed like this" />
);
expect(comp).toMatchSnapshot();
});
});
});<|fim▁end|>
|
});
test("[handleOnChange] should set a hasValue and call the onChange prop", () => {
|
<|file_name|>background.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("Background", inherited=False) %>
${helpers.predefined_type("background-color", "CSSColor",
"::cssparser::Color::RGBA(::cssparser::RGBA { red: 0., green: 0., blue: 0., alpha: 0. }) /* transparent */",
spec="https://drafts.csswg.org/css-backgrounds/#background-color",
animatable=True, complex_color=True)}
<%helpers:vector_longhand name="background-image" animatable="False"
spec="https://drafts.csswg.org/css-backgrounds/#the-background-image"
has_uncacheable_values="${product == 'gecko'}">
use std::fmt;
use style_traits::ToCss;
use values::specified::Image;
use values::NoViewportPercentage;
pub mod computed_value {
use values::computed;
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct T(pub Option<computed::Image>);
}
impl ToCss for computed_value::T {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match self.0 {
None => dest.write_str("none"),
Some(ref image) => image.to_css(dest),
}
}
}
impl NoViewportPercentage for SpecifiedValue {}
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct SpecifiedValue(pub Option<Image>);
impl ToCss for SpecifiedValue {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
SpecifiedValue(Some(ref image)) => image.to_css(dest),
SpecifiedValue(None) => dest.write_str("none"),
}
}
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
computed_value::T(None)
}
#[inline]
pub fn get_initial_specified_value() -> SpecifiedValue {
SpecifiedValue(None)
}
pub fn parse(context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
if input.try(|input| input.expect_ident_matching("none")).is_ok() {
Ok(SpecifiedValue(None))
} else {
Ok(SpecifiedValue(Some(try!(Image::parse(context, input)))))
}
}
impl ToComputedValue for SpecifiedValue {
type ComputedValue = computed_value::T;
#[inline]
fn to_computed_value(&self, context: &Context) -> computed_value::T {
match *self {
SpecifiedValue(None) => computed_value::T(None),
SpecifiedValue(Some(ref image)) =>
computed_value::T(Some(image.to_computed_value(context))),
}
}
#[inline]
fn from_computed_value(computed: &computed_value::T) -> Self {
match *computed {
computed_value::T(None) => SpecifiedValue(None),
computed_value::T(Some(ref image)) =>
SpecifiedValue(Some(ToComputedValue::from_computed_value(image))),
}
}
}
</%helpers:vector_longhand>
<%helpers:vector_longhand name="background-position-x" animatable="True"
spec="https://drafts.csswg.org/css-backgrounds-4/#propdef-background-position-x">
use std::fmt;
use style_traits::ToCss;
use values::HasViewportPercentage;
use values::specified::position::HorizontalPosition;
#[allow(missing_docs)]
pub mod computed_value {
use values::computed::position::HorizontalPosition;
use properties::animated_properties::{Interpolate, RepeatableListInterpolate};
pub type T = HorizontalPosition;
}
#[allow(missing_docs)]
pub type SpecifiedValue = HorizontalPosition;
#[inline]
#[allow(missing_docs)]
pub fn get_initial_value() -> computed_value::T {
use values::computed::position::HorizontalPosition;
HorizontalPosition(computed::LengthOrPercentage::Percentage(0.0))
}
#[inline]
#[allow(missing_docs)]
pub fn get_initial_specified_value() -> SpecifiedValue {
use values::specified::position::Keyword;
HorizontalPosition {
keyword: Some(Keyword::Left),
position: None,
}
}
#[inline]
#[allow(missing_docs)]
pub fn get_initial_position_value() -> SpecifiedValue {
use values::specified::{LengthOrPercentage, Percentage};
HorizontalPosition {
keyword: None,
position: Some(LengthOrPercentage::Percentage(Percentage(0.0))),
}
}
#[allow(missing_docs)]
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<SpecifiedValue, ()> {
HorizontalPosition::parse(context, input)
}
</%helpers:vector_longhand>
<%helpers:vector_longhand name="background-position-y" animatable="True"
spec="https://drafts.csswg.org/css-backgrounds-4/#propdef-background-position-y">
use std::fmt;
use style_traits::ToCss;
use values::HasViewportPercentage;
use values::specified::position::VerticalPosition;
#[allow(missing_docs)]
pub mod computed_value {
use values::computed::position::VerticalPosition;
use properties::animated_properties::{Interpolate, RepeatableListInterpolate};
pub type T = VerticalPosition;
}
#[allow(missing_docs)]
pub type SpecifiedValue = VerticalPosition;
#[inline]
#[allow(missing_docs)]
pub fn get_initial_value() -> computed_value::T {
use values::computed::position::VerticalPosition;
VerticalPosition(computed::LengthOrPercentage::Percentage(0.0))
}
#[inline]
#[allow(missing_docs)]
pub fn get_initial_specified_value() -> SpecifiedValue {
use values::specified::position::Keyword;
VerticalPosition {
keyword: Some(Keyword::Top),
position: None,
}
}
#[inline]
#[allow(missing_docs)]
pub fn get_initial_position_value() -> SpecifiedValue {
use values::specified::{LengthOrPercentage, Percentage};
VerticalPosition {
keyword: None,
position: Some(LengthOrPercentage::Percentage(Percentage(0.0))),
}
}
#[inline]
#[allow(missing_docs)]
pub fn parse(context: &ParserContext, input: &mut Parser)
-> Result<SpecifiedValue, ()> {
VerticalPosition::parse(context, input)
}
</%helpers:vector_longhand>
${helpers.single_keyword("background-repeat",
"repeat repeat-x repeat-y space round no-repeat",
vector=True,
spec="https://drafts.csswg.org/css-backgrounds/#the-background-repeat",
animatable=False)}
${helpers.single_keyword("background-attachment",
"scroll fixed" + (" local" if product == "gecko" else ""),
vector=True,
spec="https://drafts.csswg.org/css-backgrounds/#the-background-attachment",
animatable=False)}
${helpers.single_keyword("background-clip",
"border-box padding-box content-box",
extra_gecko_values="text",
vector=True, extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-backgrounds/#the-background-clip",
animatable=False)}
${helpers.single_keyword("background-origin",
"padding-box border-box content-box",
vector=True, extra_prefixes="webkit",
spec="https://drafts.csswg.org/css-backgrounds/#the-background-origin",
animatable=False)}
<%helpers:vector_longhand name="background-size" animatable="True" extra_prefixes="webkit"
spec="https://drafts.csswg.org/css-backgrounds/#the-background-size">
use cssparser::Token;
use std::ascii::AsciiExt;
use std::fmt;
use style_traits::ToCss;
use values::HasViewportPercentage;
#[allow(missing_docs)]
pub mod computed_value {
use values::computed::LengthOrPercentageOrAuto;
use properties::animated_properties::{Interpolate, RepeatableListInterpolate};
#[derive(PartialEq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct ExplicitSize {
pub width: LengthOrPercentageOrAuto,
pub height: LengthOrPercentageOrAuto,
}
#[derive(PartialEq, Clone, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum T {
Explicit(ExplicitSize),
Cover,
Contain,
}
impl RepeatableListInterpolate for T {}
impl Interpolate for T {
fn interpolate(&self, other: &Self, time: f64) -> Result<Self, ()> {
use properties::longhands::background_size::single_value::computed_value::ExplicitSize;
match (self, other) {
(&T::Explicit(ref me), &T::Explicit(ref other)) => {
Ok(T::Explicit(ExplicitSize {
width: try!(me.width.interpolate(&other.width, time)),
height: try!(me.height.interpolate(&other.height, time)),
}))
}
_ => Err(()),
}
}
}
}<|fim▁hole|> fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
computed_value::T::Explicit(ref size) => size.to_css(dest),
computed_value::T::Cover => dest.write_str("cover"),
computed_value::T::Contain => dest.write_str("contain"),
}
}
}
impl HasViewportPercentage for ExplicitSize {
fn has_viewport_percentage(&self) -> bool {
return self.width.has_viewport_percentage() || self.height.has_viewport_percentage();
}
}
#[derive(Clone, PartialEq, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
#[allow(missing_docs)]
pub struct ExplicitSize {
pub width: specified::LengthOrPercentageOrAuto,
pub height: specified::LengthOrPercentageOrAuto,
}
impl ToCss for ExplicitSize {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(self.width.to_css(dest));
try!(dest.write_str(" "));
self.height.to_css(dest)
}
}
impl ToCss for computed_value::ExplicitSize {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(self.width.to_css(dest));
try!(dest.write_str(" "));
self.height.to_css(dest)
}
}
impl HasViewportPercentage for SpecifiedValue {
fn has_viewport_percentage(&self) -> bool {
match *self {
SpecifiedValue::Explicit(ref explicit_size) => explicit_size.has_viewport_percentage(),
_ => false
}
}
}
#[derive(Clone, PartialEq, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum SpecifiedValue {
Explicit(ExplicitSize),
Cover,
Contain,
}
impl ToCss for SpecifiedValue {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
match *self {
SpecifiedValue::Explicit(ref size) => size.to_css(dest),
SpecifiedValue::Cover => dest.write_str("cover"),
SpecifiedValue::Contain => dest.write_str("contain"),
}
}
}
impl ToComputedValue for SpecifiedValue {
type ComputedValue = computed_value::T;
#[inline]
fn to_computed_value(&self, context: &Context) -> computed_value::T {
match *self {
SpecifiedValue::Explicit(ref size) => {
computed_value::T::Explicit(computed_value::ExplicitSize {
width: size.width.to_computed_value(context),
height: size.height.to_computed_value(context),
})
}
SpecifiedValue::Cover => computed_value::T::Cover,
SpecifiedValue::Contain => computed_value::T::Contain,
}
}
#[inline]
fn from_computed_value(computed: &computed_value::T) -> Self {
match *computed {
computed_value::T::Explicit(ref size) => {
SpecifiedValue::Explicit(ExplicitSize {
width: ToComputedValue::from_computed_value(&size.width),
height: ToComputedValue::from_computed_value(&size.height),
})
}
computed_value::T::Cover => SpecifiedValue::Cover,
computed_value::T::Contain => SpecifiedValue::Contain,
}
}
}
#[inline]
pub fn get_initial_value() -> computed_value::T {
computed_value::T::Explicit(computed_value::ExplicitSize {
width: computed::LengthOrPercentageOrAuto::Auto,
height: computed::LengthOrPercentageOrAuto::Auto,
})
}
#[inline]
pub fn get_initial_specified_value() -> SpecifiedValue {
SpecifiedValue::Explicit(ExplicitSize {
width: specified::LengthOrPercentageOrAuto::Auto,
height: specified::LengthOrPercentageOrAuto::Auto,
})
}
pub fn parse(context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue,()> {
let width;
if let Ok(value) = input.try(|input| {
match input.next() {
Err(_) => Err(()),
Ok(Token::Ident(ref ident)) if ident.eq_ignore_ascii_case("cover") => {
Ok(SpecifiedValue::Cover)
}
Ok(Token::Ident(ref ident)) if ident.eq_ignore_ascii_case("contain") => {
Ok(SpecifiedValue::Contain)
}
Ok(_) => Err(()),
}
}) {
return Ok(value)
} else {
width = try!(specified::LengthOrPercentageOrAuto::parse(context, input))
}
let height;
if let Ok(value) = input.try(|input| {
match input.next() {
Err(_) => Ok(specified::LengthOrPercentageOrAuto::Auto),
Ok(_) => Err(()),
}
}) {
height = value
} else {
height = try!(specified::LengthOrPercentageOrAuto::parse(context, input));
}
Ok(SpecifiedValue::Explicit(ExplicitSize {
width: width,
height: height,
}))
}
</%helpers:vector_longhand>
// https://drafts.fxtf.org/compositing/#background-blend-mode
${helpers.single_keyword("background-blend-mode",
"""normal multiply screen overlay darken lighten color-dodge
color-burn hard-light soft-light difference exclusion hue
saturation color luminosity""",
vector="true", products="gecko", animatable=False,
spec="https://drafts.fxtf.org/compositing/#background-blend-mode")}<|fim▁end|>
|
impl ToCss for computed_value::T {
|
<|file_name|>acl.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from ..helpers import arguments
from ..helpers.command import Command
from ..helpers.orm import Permissions
@Command('acl', ['config', 'db'], role="owner")
def cmd(send, msg, args):
"""Handles permissions
Syntax: {command} (--add|--remove) --nick (nick) --role (admin)
"""
parser = arguments.ArgParser(args['config'])
parser.add_argument('--nick', action=arguments.NickParser, required=True)
parser.add_argument('--role', choices=['admin'], required=True)
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--add', action='store_true')
group.add_argument('--remove', action='store_true')
try:
cmdargs = parser.parse_args(msg)
except arguments.ArgumentException as e:
send(str(e))
return
session = args['db']
admin = session.query(Permissions).filter(Permissions.nick == cmdargs.nick).first()
if cmdargs.add:
if admin is None:
session.add(Permissions(nick=cmdargs.nick, role=cmdargs.role))
send(f"{cmdargs.nick} is now an {cmdargs.role}.")
else:
send(f"{admin.nick} is already an {admin.role}.")<|fim▁hole|> else:
session.delete(admin)
send(f"{admin.nick} is no longer an {admin.role}.")<|fim▁end|>
|
else:
if admin is None:
send(f"{cmdargs.nick} was not an {cmdargs.role}.")
|
<|file_name|>horizontal.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This file is part of pygal
#
# A python svg graph plotting library
# Copyright © 2012-2014 Kozea
#
# This library is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pygal. If not, see <http://www.gnu.org/licenses/>.
"""
Horizontal graph base
"""
from pygal.graph.graph import Graph
from pygal.view import HorizontalView, HorizontalLogView
class HorizontalGraph(Graph):
"""Horizontal graph"""<|fim▁hole|> def __init__(self, *args, **kwargs):
self.horizontal = True
super(HorizontalGraph, self).__init__(*args, **kwargs)
def _post_compute(self):
self._x_labels, self._y_labels = self._y_labels, self._x_labels
self._x_2nd_labels, self._y_2nd_labels = (
self._y_2nd_labels, self._x_2nd_labels)
def _axes(self):
self.view._force_vertical = True
super(HorizontalGraph, self)._axes()
self.view._force_vertical = False
def _set_view(self):
"""Assign a view to current graph"""
if self.logarithmic:
view_class = HorizontalLogView
else:
view_class = HorizontalView
self.view = view_class(
self.width - self.margin.x,
self.height - self.margin.y,
self._box)<|fim▁end|>
| |
<|file_name|>contacts-test.js<|end_file_name|><|fim▁begin|>import { moduleFor, test } from 'ember-qunit';
moduleFor('adapter:contacts', 'Unit | Adapter | contacts', {
// Specify the other units that are required for this test.
// needs: ['serializer:foo']
});
// Replace this with your real tests.
test('it exists', function(assert) {
let adapter = this.subject();<|fim▁hole|><|fim▁end|>
|
assert.ok(adapter);
});
|
<|file_name|>canvas_paint_thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use azure::azure::{AzColor, AzFloat};
use azure::azure_hl::{AntialiasMode, CapStyle, CompositionOp, JoinStyle};
use azure::azure_hl::{BackendType, DrawOptions, DrawTarget, Pattern, StrokeOptions, SurfaceFormat};
use azure::azure_hl::{ColorPattern, DrawSurfaceOptions, Filter, PathBuilder};
use canvas_traits::*;
use euclid::matrix2d::Matrix2D;
use euclid::point::Point2D;
use euclid::rect::Rect;
use euclid::size::Size2D;
use gfx_traits::color;
use ipc_channel::ipc::IpcSharedMemory;
use ipc_channel::ipc::{self, IpcSender};
use num::ToPrimitive;
use premultiplytable::PREMULTIPLY_TABLE;
use std::borrow::ToOwned;
use std::mem;
use util::opts;
use util::thread::spawn_named;
use util::vec::byte_swap;
use webrender_traits;
impl<'a> CanvasPaintThread<'a> {
/// It reads image data from the canvas
/// canvas_size: The size of the canvas we're reading from
/// read_rect: The area of the canvas we want to read from
fn read_pixels(&self, read_rect: Rect<i32>, canvas_size: Size2D<f64>) -> Vec<u8>{
let canvas_size = canvas_size.to_i32();
let canvas_rect = Rect::new(Point2D::new(0i32, 0i32), canvas_size);
let src_read_rect = canvas_rect.intersection(&read_rect).unwrap_or(Rect::zero());
let mut image_data = Vec::new();
if src_read_rect.is_empty() || canvas_size.width <= 0 && canvas_size.height <= 0 {
return image_data;
}
let data_surface = self.drawtarget.snapshot().get_data_surface();
let mut src_data = Vec::new();
data_surface.with_data(|element| { src_data = element.to_vec(); });
let stride = data_surface.stride();
//start offset of the copyable rectangle
let mut src = (src_read_rect.origin.y * stride + src_read_rect.origin.x * 4) as usize;
//copy the data to the destination vector
for _ in 0..src_read_rect.size.height {
let row = &src_data[src .. src + (4 * src_read_rect.size.width) as usize];
image_data.extend_from_slice(row);
src += stride as usize;
}
image_data
}
}
pub struct CanvasPaintThread<'a> {
drawtarget: DrawTarget,
/// TODO(pcwalton): Support multiple paths.
path_builder: PathBuilder,
state: CanvasPaintState<'a>,
saved_states: Vec<CanvasPaintState<'a>>,
webrender_api: Option<webrender_traits::RenderApi>,
webrender_image_key: Option<webrender_traits::ImageKey>,
}
#[derive(Clone)]
struct CanvasPaintState<'a> {
draw_options: DrawOptions,
fill_style: Pattern,
stroke_style: Pattern,
stroke_opts: StrokeOptions<'a>,
/// The current 2D transform matrix.
transform: Matrix2D<f32>,
shadow_offset_x: f64,
shadow_offset_y: f64,
shadow_blur: f64,
shadow_color: AzColor,
}
impl<'a> CanvasPaintState<'a> {
fn new() -> CanvasPaintState<'a> {
let antialias = if opts::get().enable_canvas_antialiasing {
AntialiasMode::Default
} else {
AntialiasMode::None
};
CanvasPaintState {
draw_options: DrawOptions::new(1.0, CompositionOp::Over, antialias),
fill_style: Pattern::Color(ColorPattern::new(color::black())),
stroke_style: Pattern::Color(ColorPattern::new(color::black())),
stroke_opts: StrokeOptions::new(1.0, JoinStyle::MiterOrBevel, CapStyle::Butt, 10.0, &[]),
transform: Matrix2D::identity(),
shadow_offset_x: 0.0,
shadow_offset_y: 0.0,
shadow_blur: 0.0,
shadow_color: color::transparent(),
}
}
}
impl<'a> CanvasPaintThread<'a> {
fn new(size: Size2D<i32>,
webrender_api_sender: Option<webrender_traits::RenderApiSender>) -> CanvasPaintThread<'a> {
let draw_target = CanvasPaintThread::create(size);
let path_builder = draw_target.create_path_builder();
let webrender_api = webrender_api_sender.map(|wr| wr.create_api());
let webrender_image_key = webrender_api.as_ref().map(|wr| wr.alloc_image());
CanvasPaintThread {
drawtarget: draw_target,
path_builder: path_builder,
state: CanvasPaintState::new(),
saved_states: Vec::new(),
webrender_api: webrender_api,
webrender_image_key: webrender_image_key,
}
}
/// Creates a new `CanvasPaintThread` and returns the out-of-process sender and the in-process
/// sender for it.
pub fn start(size: Size2D<i32>,
webrender_api_sender: Option<webrender_traits::RenderApiSender>)
-> IpcSender<CanvasMsg> {
// TODO(pcwalton): Ask the pipeline to create this for us instead of spawning it directly.
// This will be needed for multiprocess Servo.
let (sender, receiver) = ipc::channel::<CanvasMsg>().unwrap();
spawn_named("CanvasThread".to_owned(), move || {
let mut painter = CanvasPaintThread::new(size, webrender_api_sender);
loop {
let msg = receiver.recv();
match msg.unwrap() {
CanvasMsg::Canvas2d(message) => {
match message {
Canvas2dMsg::FillRect(ref rect) => painter.fill_rect(rect),
Canvas2dMsg::StrokeRect(ref rect) => painter.stroke_rect(rect),
Canvas2dMsg::ClearRect(ref rect) => painter.clear_rect(rect),
Canvas2dMsg::BeginPath => painter.begin_path(),
Canvas2dMsg::ClosePath => painter.close_path(),
Canvas2dMsg::Fill => painter.fill(),
Canvas2dMsg::Stroke => painter.stroke(),
Canvas2dMsg::Clip => painter.clip(),
Canvas2dMsg::IsPointInPath(x, y, fill_rule, chan) => {
painter.is_point_in_path(x, y, fill_rule, chan)
},
Canvas2dMsg::DrawImage(imagedata, image_size, dest_rect, source_rect,
smoothing_enabled) => {
painter.draw_image(imagedata, image_size, dest_rect, source_rect, smoothing_enabled)
}
Canvas2dMsg::DrawImageSelf(image_size, dest_rect, source_rect, smoothing_enabled) => {
painter.draw_image_self(image_size, dest_rect, source_rect, smoothing_enabled)
}
Canvas2dMsg::MoveTo(ref point) => painter.move_to(point),
Canvas2dMsg::LineTo(ref point) => painter.line_to(point),
Canvas2dMsg::Rect(ref rect) => painter.rect(rect),
Canvas2dMsg::QuadraticCurveTo(ref cp, ref pt) => {
painter.quadratic_curve_to(cp, pt)
}
Canvas2dMsg::BezierCurveTo(ref cp1, ref cp2, ref pt) => {
painter.bezier_curve_to(cp1, cp2, pt)
}
Canvas2dMsg::Arc(ref center, radius, start, end, ccw) => {
painter.arc(center, radius, start, end, ccw)
}
Canvas2dMsg::ArcTo(ref cp1, ref cp2, radius) => {
painter.arc_to(cp1, cp2, radius)
}
Canvas2dMsg::RestoreContext => painter.restore_context_state(),
Canvas2dMsg::SaveContext => painter.save_context_state(),
Canvas2dMsg::SetFillStyle(style) => painter.set_fill_style(style),
Canvas2dMsg::SetStrokeStyle(style) => painter.set_stroke_style(style),
Canvas2dMsg::SetLineWidth(width) => painter.set_line_width(width),
Canvas2dMsg::SetLineCap(cap) => painter.set_line_cap(cap),
Canvas2dMsg::SetLineJoin(join) => painter.set_line_join(join),
Canvas2dMsg::SetMiterLimit(limit) => painter.set_miter_limit(limit),
Canvas2dMsg::SetTransform(ref matrix) => painter.set_transform(matrix),
Canvas2dMsg::SetGlobalAlpha(alpha) => painter.set_global_alpha(alpha),
Canvas2dMsg::SetGlobalComposition(op) => painter.set_global_composition(op),
Canvas2dMsg::GetImageData(dest_rect, canvas_size, chan)
=> painter.image_data(dest_rect, canvas_size, chan),
Canvas2dMsg::PutImageData(imagedata, offset, image_data_size, dirty_rect)
=> painter.put_image_data(imagedata, offset, image_data_size, dirty_rect),
Canvas2dMsg::SetShadowOffsetX(value) => painter.set_shadow_offset_x(value),
Canvas2dMsg::SetShadowOffsetY(value) => painter.set_shadow_offset_y(value),
Canvas2dMsg::SetShadowBlur(value) => painter.set_shadow_blur(value),
Canvas2dMsg::SetShadowColor(ref color) => painter.set_shadow_color(color.to_azcolor()),
}
},
CanvasMsg::Common(message) => {
match message {
CanvasCommonMsg::Close => break,
CanvasCommonMsg::Recreate(size) => painter.recreate(size),
}
},
CanvasMsg::FromLayout(message) => {
match message {
FromLayoutMsg::SendData(chan) => {
painter.send_data(chan)
}
}
}
CanvasMsg::WebGL(_) => panic!("Wrong message sent to Canvas2D thread"),
}
}
});
sender
}
fn save_context_state(&mut self) {
self.saved_states.push(self.state.clone());
}
fn restore_context_state(&mut self) {
if let Some(state) = self.saved_states.pop() {
mem::replace(&mut self.state, state);
self.drawtarget.set_transform(&self.state.transform);
self.drawtarget.pop_clip();
}
}
fn fill_rect(&self, rect: &Rect<f32>) {
if is_zero_size_gradient(&self.state.fill_style) {
return; // Paint nothing if gradient size is zero.
}
let draw_rect = Rect::new(rect.origin,
match self.state.fill_style {
Pattern::Surface(ref surface) => {
let surface_size = surface.size();
match (surface.repeat_x, surface.repeat_y) {
(true, true) => rect.size,
(true, false) => Size2D::new(rect.size.width, surface_size.height as f32),
(false, true) => Size2D::new(surface_size.width as f32, rect.size.height),
(false, false) => Size2D::new(surface_size.width as f32, surface_size.height as f32),
}
},
_ => rect.size,
}
);
if self.need_to_draw_shadow() {
self.draw_with_shadow(&draw_rect, |new_draw_target: &DrawTarget| {
new_draw_target.fill_rect(&draw_rect, self.state.fill_style.to_pattern_ref(),
Some(&self.state.draw_options));
});
} else {
self.drawtarget.fill_rect(&draw_rect, self.state.fill_style.to_pattern_ref(),
Some(&self.state.draw_options));
}
}
fn clear_rect(&self, rect: &Rect<f32>) {
self.drawtarget.clear_rect(rect);
}
fn stroke_rect(&self, rect: &Rect<f32>) {
if is_zero_size_gradient(&self.state.stroke_style) {
return; // Paint nothing if gradient size is zero.
}
if self.need_to_draw_shadow() {
self.draw_with_shadow(&rect, |new_draw_target: &DrawTarget| {
new_draw_target.stroke_rect(rect, self.state.stroke_style.to_pattern_ref(),
&self.state.stroke_opts, &self.state.draw_options);
});
} else if rect.size.width == 0. || rect.size.height == 0. {
let cap = match self.state.stroke_opts.line_join {
JoinStyle::Round => CapStyle::Round,
_ => CapStyle::Butt
};
let stroke_opts =
StrokeOptions::new(self.state.stroke_opts.line_width,
self.state.stroke_opts.line_join,
cap,
self.state.stroke_opts.miter_limit,
self.state.stroke_opts.mDashPattern);
self.drawtarget.stroke_line(rect.origin, rect.bottom_right(),
self.state.stroke_style.to_pattern_ref(),
&stroke_opts, &self.state.draw_options);
} else {
self.drawtarget.stroke_rect(rect, self.state.stroke_style.to_pattern_ref(),
&self.state.stroke_opts, &self.state.draw_options);
}
}
fn begin_path(&mut self) {
self.path_builder = self.drawtarget.create_path_builder()
}
fn close_path(&self) {
self.path_builder.close()
}
fn fill(&self) {
if is_zero_size_gradient(&self.state.fill_style) {
return; // Paint nothing if gradient size is zero.
}
self.drawtarget.fill(&self.path_builder.finish(),
self.state.fill_style.to_pattern_ref(),
&self.state.draw_options);
}
fn stroke(&self) {
if is_zero_size_gradient(&self.state.stroke_style) {
return; // Paint nothing if gradient size is zero.
}
self.drawtarget.stroke(&self.path_builder.finish(),
self.state.stroke_style.to_pattern_ref(),
&self.state.stroke_opts,
&self.state.draw_options);
}
fn clip(&self) {
self.drawtarget.push_clip(&self.path_builder.finish());
}
fn is_point_in_path(&mut self, x: f64, y: f64,
_fill_rule: FillRule, chan: IpcSender<bool>) {
let path = self.path_builder.finish();
let result = path.contains_point(x, y, &self.state.transform);
self.path_builder = path.copy_to_builder();
chan.send(result).unwrap();
}
fn draw_image(&self, image_data: Vec<u8>, image_size: Size2D<f64>,
dest_rect: Rect<f64>, source_rect: Rect<f64>, smoothing_enabled: bool) {
// We round up the floating pixel values to draw the pixels
let source_rect = source_rect.ceil();
// It discards the extra pixels (if any) that won't be painted
let image_data = crop_image(image_data, image_size, source_rect);
if self.need_to_draw_shadow() {
let rect = Rect::new(Point2D::new(dest_rect.origin.x as f32, dest_rect.origin.y as f32),
Size2D::new(dest_rect.size.width as f32, dest_rect.size.height as f32));
self.draw_with_shadow(&rect, |new_draw_target: &DrawTarget| {
write_image(&new_draw_target, image_data, source_rect.size, dest_rect,
smoothing_enabled, self.state.draw_options.composition,
self.state.draw_options.alpha);
});
} else {
write_image(&self.drawtarget, image_data, source_rect.size, dest_rect,
smoothing_enabled, self.state.draw_options.composition,
self.state.draw_options.alpha);
}
}
fn draw_image_self(&self, image_size: Size2D<f64>,
dest_rect: Rect<f64>, source_rect: Rect<f64>,
smoothing_enabled: bool) {
// Reads pixels from source image
// In this case source and target are the same canvas
let image_data = self.read_pixels(source_rect.to_i32(), image_size);
if self.need_to_draw_shadow() {
let rect = Rect::new(Point2D::new(dest_rect.origin.x as f32, dest_rect.origin.y as f32),
Size2D::new(dest_rect.size.width as f32, dest_rect.size.height as f32));
self.draw_with_shadow(&rect, |new_draw_target: &DrawTarget| {
write_image(&new_draw_target, image_data, source_rect.size, dest_rect,
smoothing_enabled, self.state.draw_options.composition,
self.state.draw_options.alpha);
});
} else {
// Writes on target canvas
write_image(&self.drawtarget, image_data, image_size, dest_rect,
smoothing_enabled, self.state.draw_options.composition,
self.state.draw_options.alpha);
}
}
fn move_to(&self, point: &Point2D<AzFloat>) {
self.path_builder.move_to(*point)
}
fn line_to(&self, point: &Point2D<AzFloat>) {
self.path_builder.line_to(*point)
}
fn rect(&self, rect: &Rect<f32>) {
self.path_builder.move_to(Point2D::new(rect.origin.x, rect.origin.y));
self.path_builder.line_to(Point2D::new(rect.origin.x + rect.size.width, rect.origin.y));
self.path_builder.line_to(Point2D::new(rect.origin.x + rect.size.width,
rect.origin.y + rect.size.height));
self.path_builder.line_to(Point2D::new(rect.origin.x, rect.origin.y + rect.size.height));
self.path_builder.close();
}
fn quadratic_curve_to(&self,
cp: &Point2D<AzFloat>,
endpoint: &Point2D<AzFloat>) {
self.path_builder.quadratic_curve_to(cp, endpoint)
}
fn bezier_curve_to(&self,
cp1: &Point2D<AzFloat>,
cp2: &Point2D<AzFloat>,
endpoint: &Point2D<AzFloat>) {
self.path_builder.bezier_curve_to(cp1, cp2, endpoint)
}
fn arc(&self,
center: &Point2D<AzFloat>,
radius: AzFloat,
start_angle: AzFloat,
end_angle: AzFloat,
ccw: bool) {
self.path_builder.arc(*center, radius, start_angle, end_angle, ccw)
}
fn arc_to(&self,
cp1: &Point2D<AzFloat>,
cp2: &Point2D<AzFloat>,
radius: AzFloat) {
let cp0 = self.path_builder.get_current_point();
let cp1 = *cp1;
let cp2 = *cp2;
if (cp0.x == cp1.x && cp0.y == cp1.y) || cp1 == cp2 || radius == 0.0 {
self.line_to(&cp1);
return;
}
// if all three control points lie on a single straight line,
// connect the first two by a straight line
let direction = (cp2.x - cp1.x) * (cp0.y - cp1.y) + (cp2.y - cp1.y) * (cp1.x - cp0.x);
if direction == 0.0 {
self.line_to(&cp1);
return;
}
// otherwise, draw the Arc
let a2 = (cp0.x - cp1.x).powi(2) + (cp0.y - cp1.y).powi(2);
let b2 = (cp1.x - cp2.x).powi(2) + (cp1.y - cp2.y).powi(2);
let d = {
let c2 = (cp0.x - cp2.x).powi(2) + (cp0.y - cp2.y).powi(2);
let cosx = (a2 + b2 - c2) / (2.0 * (a2 * b2).sqrt());
let sinx = (1.0 - cosx.powi(2)).sqrt();
radius / ((1.0 - cosx) / sinx)
};
// first tangent point
let anx = (cp1.x - cp0.x) / a2.sqrt();
let any = (cp1.y - cp0.y) / a2.sqrt();
let tp1 = Point2D::new(cp1.x - anx * d, cp1.y - any * d);
// second tangent point
let bnx = (cp1.x - cp2.x) / b2.sqrt();
let bny = (cp1.y - cp2.y) / b2.sqrt();
let tp2 = Point2D::new(cp1.x - bnx * d, cp1.y - bny * d);
// arc center and angles
let anticlockwise = direction < 0.0;
let cx = tp1.x + any * radius * if anticlockwise { 1.0 } else { -1.0 };
let cy = tp1.y - anx * radius * if anticlockwise { 1.0 } else { -1.0 };
let angle_start = (tp1.y - cy).atan2(tp1.x - cx);
let angle_end = (tp2.y - cy).atan2(tp2.x - cx);
self.line_to(&tp1);
if [cx, cy, angle_start, angle_end].iter().all(|x| x.is_finite()) {
self.arc(&Point2D::new(cx, cy), radius,
angle_start, angle_end, anticlockwise);
}
}
fn set_fill_style(&mut self, style: FillOrStrokeStyle) {
if let Some(pattern) = style.to_azure_pattern(&self.drawtarget) {
self.state.fill_style = pattern
}
}
fn set_stroke_style(&mut self, style: FillOrStrokeStyle) {
if let Some(pattern) = style.to_azure_pattern(&self.drawtarget) {
self.state.stroke_style = pattern
}
}
fn set_line_width(&mut self, width: f32) {
self.state.stroke_opts.line_width = width;
}
fn set_line_cap(&mut self, cap: LineCapStyle) {
self.state.stroke_opts.line_cap = cap.to_azure_style();
}
fn set_line_join(&mut self, join: LineJoinStyle) {
self.state.stroke_opts.line_join = join.to_azure_style();
}
fn set_miter_limit(&mut self, limit: f32) {
self.state.stroke_opts.miter_limit = limit;
}
fn set_transform(&mut self, transform: &Matrix2D<f32>) {
self.state.transform = *transform;
self.drawtarget.set_transform(transform)
}
fn set_global_alpha(&mut self, alpha: f32) {
self.state.draw_options.alpha = alpha;
}
fn set_global_composition(&mut self, op: CompositionOrBlending) {
self.state.draw_options.set_composition_op(op.to_azure_style());
}
fn create(size: Size2D<i32>) -> DrawTarget {
DrawTarget::new(BackendType::Skia, size, SurfaceFormat::B8G8R8A8)
}
fn recreate(&mut self, size: Size2D<i32>) {
self.drawtarget = CanvasPaintThread::create(size);
}
fn send_data(&mut self, chan: IpcSender<CanvasData>) {
self.drawtarget.snapshot().get_data_surface().with_data(|element| {
if let Some(ref webrender_api) = self.webrender_api {
let size = self.drawtarget.get_size();
let mut bytes = Vec::new();
bytes.extend_from_slice(element);
webrender_api.update_image(self.webrender_image_key.unwrap(),
size.width as u32,
size.height as u32,
webrender_traits::ImageFormat::RGBA8,
bytes);
}
let pixel_data = CanvasPixelData {
image_data: IpcSharedMemory::from_bytes(element),
image_key: self.webrender_image_key,
};
chan.send(CanvasData::Pixels(pixel_data)).unwrap();
})
}
fn image_data(&self,
dest_rect: Rect<i32>,
canvas_size: Size2D<f64>,
chan: IpcSender<Vec<u8>>) {
let mut dest_data = self.read_pixels(dest_rect, canvas_size);
// bgra -> rgba
byte_swap(&mut dest_data);
chan.send(dest_data).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata
fn put_image_data(&mut self, imagedata: Vec<u8>,
offset: Point2D<f64>,
image_data_size: Size2D<f64>,
mut dirty_rect: Rect<f64>) {
if image_data_size.width <= 0.0 || image_data_size.height <= 0.0 {
return
}
assert!(image_data_size.width * image_data_size.height * 4.0 == imagedata.len() as f64);
// Step 1. TODO (neutered data)
// Step 2.
if dirty_rect.size.width < 0.0f64 {
dirty_rect.origin.x += dirty_rect.size.width;
dirty_rect.size.width = -dirty_rect.size.width;
}
if dirty_rect.size.height < 0.0f64 {
dirty_rect.origin.y += dirty_rect.size.height;
dirty_rect.size.height = -dirty_rect.size.height;
}
// Step 3.
if dirty_rect.origin.x < 0.0f64 {
dirty_rect.size.width += dirty_rect.origin.x;
dirty_rect.origin.x = 0.0f64;
}
if dirty_rect.origin.y < 0.0f64 {
dirty_rect.size.height += dirty_rect.origin.y;
dirty_rect.origin.y = 0.0f64;
}
// Step 4.
if dirty_rect.max_x() > image_data_size.width {
dirty_rect.size.width = image_data_size.width - dirty_rect.origin.x;
}
if dirty_rect.max_y() > image_data_size.height {
dirty_rect.size.height = image_data_size.height - dirty_rect.origin.y;
}
// 5) If either dirtyWidth or dirtyHeight is negative or zero,
// stop without affecting any bitmaps
if dirty_rect.size.width <= 0.0 || dirty_rect.size.height <= 0.0 {
return
}
// Step 6.
let dest_rect = dirty_rect.translate(&offset).to_i32();
// azure_hl operates with integers. We need to cast the image size
let image_size = image_data_size.to_i32();
let first_pixel = dest_rect.origin - offset.to_i32();
let mut src_line = (first_pixel.y * (image_size.width * 4) + first_pixel.x * 4) as usize;
let mut dest =
Vec::with_capacity((dest_rect.size.width * dest_rect.size.height * 4) as usize);
for _ in 0 .. dest_rect.size.height {
let mut src_offset = src_line;
for _ in 0 .. dest_rect.size.width {
// Premultiply alpha and swap RGBA -> BGRA.
let alpha = imagedata[src_offset + 3] as usize;
dest.push(PREMULTIPLY_TABLE[256 * alpha + imagedata[src_offset + 2] as usize]);
dest.push(PREMULTIPLY_TABLE[256 * alpha + imagedata[src_offset + 1] as usize]);
dest.push(PREMULTIPLY_TABLE[256 * alpha + imagedata[src_offset + 0] as usize]);
dest.push(imagedata[src_offset + 3]);
src_offset += 4;
}
src_line += (image_size.width * 4) as usize;
}
if let Some(source_surface) = self.drawtarget.create_source_surface_from_data(
&dest,
dest_rect.size,
dest_rect.size.width * 4,
SurfaceFormat::B8G8R8A8) {
self.drawtarget.copy_surface(source_surface,
Rect::new(Point2D::new(0, 0), dest_rect.size),
dest_rect.origin);
}
}
fn set_shadow_offset_x(&mut self, value: f64) {
self.state.shadow_offset_x = value;
}
fn set_shadow_offset_y(&mut self, value: f64) {
self.state.shadow_offset_y = value;
}
fn set_shadow_blur(&mut self, value: f64) {
self.state.shadow_blur = value;
}
fn set_shadow_color(&mut self, value: AzColor) {
self.state.shadow_color = value;
}
// https://html.spec.whatwg.org/multipage/#when-shadows-are-drawn
fn need_to_draw_shadow(&self) -> bool {
self.state.shadow_color.a != 0.0f32 &&
(self.state.shadow_offset_x != 0.0f64 ||
self.state.shadow_offset_y != 0.0f64 ||
self.state.shadow_blur != 0.0f64)
}
fn create_draw_target_for_shadow(&self, source_rect: &Rect<f32>) -> DrawTarget {
let draw_target = self.drawtarget.create_similar_draw_target(&Size2D::new(source_rect.size.width as i32,
source_rect.size.height as i32),
self.drawtarget.get_format());
let matrix = Matrix2D::identity().translate(-source_rect.origin.x as AzFloat,
-source_rect.origin.y as AzFloat)
.mul(&self.state.transform);
draw_target.set_transform(&matrix);
draw_target
}
fn draw_with_shadow<F>(&self, rect: &Rect<f32>, draw_shadow_source: F)
where F: FnOnce(&DrawTarget)
{
let shadow_src_rect = self.state.transform.transform_rect(rect);
let new_draw_target = self.create_draw_target_for_shadow(&shadow_src_rect);
draw_shadow_source(&new_draw_target);
self.drawtarget.draw_surface_with_shadow(new_draw_target.snapshot(),
&Point2D::new(shadow_src_rect.origin.x as AzFloat,
shadow_src_rect.origin.y as AzFloat),
&self.state.shadow_color,
&Point2D::new(self.state.shadow_offset_x as AzFloat,
self.state.shadow_offset_y as AzFloat),
(self.state.shadow_blur / 2.0f64) as AzFloat,
self.state.draw_options.composition);
}
}
/// Used by drawImage to get rid of the extra pixels of the image data that
/// won't be copied to the canvas
/// image_data: Color pixel data of the image
/// image_size: Image dimensions
/// crop_rect: It determines the area of the image we want to keep
fn crop_image(image_data: Vec<u8>,
image_size: Size2D<f64>,
crop_rect: Rect<f64>) -> Vec<u8>{
// We're going to iterate over a pixel values array so we need integers
let crop_rect = crop_rect.to_i32();
let image_size = image_size.to_i32();
// Assuming 4 bytes per pixel and row-major order for storage
// (consecutive elements in a pixel row of the image are contiguous in memory)
let stride = image_size.width * 4;
let image_bytes_length = image_size.height * image_size.width * 4;
let crop_area_bytes_length = crop_rect.size.height * crop_rect.size.height * 4;
// If the image size is less or equal than the crop area we do nothing
if image_bytes_length <= crop_area_bytes_length {
return image_data;
}
let mut new_image_data = Vec::new();
let mut src = (crop_rect.origin.y * stride + crop_rect.origin.x * 4) as usize;
for _ in 0..crop_rect.size.height {
let row = &image_data[src .. src + (4 * crop_rect.size.width) as usize];
new_image_data.extend_from_slice(row);
src += stride as usize;
}
new_image_data
}
/// It writes an image to the destination target
/// draw_target: the destination target where the image_data will be copied
/// image_data: Pixel information of the image to be written. It takes RGBA8
/// image_size: The size of the image to be written
/// dest_rect: Area of the destination target where the pixels will be copied
/// smoothing_enabled: It determines if smoothing is applied to the image result
fn write_image(draw_target: &DrawTarget,
mut image_data: Vec<u8>,
image_size: Size2D<f64>,
dest_rect: Rect<f64>,
smoothing_enabled: bool,
composition_op: CompositionOp,
global_alpha: f32) {
if image_data.is_empty() {
return
}<|fim▁hole|> let image_rect = Rect::new(Point2D::zero(), image_size);
// rgba -> bgra
byte_swap(&mut image_data);
// From spec https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
// When scaling up, if the imageSmoothingEnabled attribute is set to true, the user agent should attempt
// to apply a smoothing algorithm to the image data when it is scaled.
// Otherwise, the image must be rendered using nearest-neighbor interpolation.
let filter = if smoothing_enabled {
Filter::Linear
} else {
Filter::Point
};
// azure_hl operates with integers. We need to cast the image size
let image_size = image_size.to_i32();
if let Some(source_surface) =
draw_target.create_source_surface_from_data(&image_data,
image_size,
image_size.width * 4,
SurfaceFormat::B8G8R8A8) {
let draw_surface_options = DrawSurfaceOptions::new(filter, true);
let draw_options = DrawOptions::new(global_alpha, composition_op, AntialiasMode::None);
draw_target.draw_surface(source_surface,
dest_rect.to_azfloat(),
image_rect.to_azfloat(),
draw_surface_options,
draw_options);
}
}
fn is_zero_size_gradient(pattern: &Pattern) -> bool {
if let &Pattern::LinearGradient(ref gradient) = pattern {
if gradient.is_zero_size() {
return true;
}
}
false
}
pub trait PointToi32 {
fn to_i32(&self) -> Point2D<i32>;
}
impl PointToi32 for Point2D<f64> {
fn to_i32(&self) -> Point2D<i32> {
Point2D::new(self.x.to_i32().unwrap(),
self.y.to_i32().unwrap())
}
}
pub trait SizeToi32 {
fn to_i32(&self) -> Size2D<i32>;
}
impl SizeToi32 for Size2D<f64> {
fn to_i32(&self) -> Size2D<i32> {
Size2D::new(self.width.to_i32().unwrap(),
self.height.to_i32().unwrap())
}
}
pub trait RectToi32 {
fn to_i32(&self) -> Rect<i32>;
fn ceil(&self) -> Rect<f64>;
}
impl RectToi32 for Rect<f64> {
fn to_i32(&self) -> Rect<i32> {
Rect::new(Point2D::new(self.origin.x.to_i32().unwrap(),
self.origin.y.to_i32().unwrap()),
Size2D::new(self.size.width.to_i32().unwrap(),
self.size.height.to_i32().unwrap()))
}
fn ceil(&self) -> Rect<f64> {
Rect::new(Point2D::new(self.origin.x.ceil(),
self.origin.y.ceil()),
Size2D::new(self.size.width.ceil(),
self.size.height.ceil()))
}
}
pub trait ToAzFloat {
fn to_azfloat(&self) -> Rect<AzFloat>;
}
impl ToAzFloat for Rect<f64> {
fn to_azfloat(&self) -> Rect<AzFloat> {
Rect::new(Point2D::new(self.origin.x as AzFloat, self.origin.y as AzFloat),
Size2D::new(self.size.width as AzFloat, self.size.height as AzFloat))
}
}<|fim▁end|>
| |
<|file_name|>router.js<|end_file_name|><|fim▁begin|>PhotoAlbums.Router.map(function() {
this.resource('login');
this.resource('album', {path: '/:album_id'});
this.resource('photo', {path: 'photos/:photo_id'});<|fim▁hole|><|fim▁end|>
|
});
|
<|file_name|>test_rule_300.py<|end_file_name|><|fim▁begin|>import os
import unittest
from vsg.rules import iteration_scheme
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_300_test_input.vhd'))
dIndentMap = utils.read_indent_file()
lExpected = []
lExpected.append('')
utils.read_file(os.path.join(sTestDir, 'rule_300_test_input.fixed.vhd'), lExpected)
class test_iteration_scheme_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
self.oFile.set_indent_map(dIndentMap)
def test_rule_300(self):
oRule = iteration_scheme.rule_300()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'iteration_scheme')
self.assertEqual(oRule.identifier, '300')
lExpected = [13, 17]
oRule.analyze(self.oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_fix_rule_300(self):
oRule = iteration_scheme.rule_300()<|fim▁hole|> lActual = self.oFile.get_lines()
self.assertEqual(lExpected, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])<|fim▁end|>
|
oRule.fix(self.oFile)
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
try: # for pip >= 10
from pip._internal.req import parse_requirements
except ImportError: # for pip <= 9.0.3
from pip.req import parse_requirements
import re, ast
# get version from __version__ variable in bench/__init__.py
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('bench/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
requirements = parse_requirements("requirements.txt", session="")
setup(
name='bench',
description='Metadata driven, full-stack web framework',<|fim▁hole|> version=version,
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=[str(ir.req) for ir in requirements],
dependency_links=[str(ir._link) for ir in requirements if ir._link],
entry_points='''
[console_scripts]
bench=bench.cli:cli
''',
)<|fim▁end|>
|
author='Frappe Technologies',
author_email='[email protected]',
|
<|file_name|>uniq.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use lib::llvm::ValueRef;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::common::*;
use middle::trans::datum::immediate_rvalue;
use middle::trans::datum;
use middle::trans::glue;
use middle::ty;
pub fn make_free_glue(bcx: block, vptrptr: ValueRef, box_ty: ty::t)
-> block {
let _icx = push_ctxt("uniq::make_free_glue");
let box_datum = immediate_rvalue(Load(bcx, vptrptr), box_ty);
let not_null = IsNotNull(bcx, box_datum.val);
do with_cond(bcx, not_null) |bcx| {
let body_datum = box_datum.box_body(bcx);
let bcx = glue::drop_ty(bcx, body_datum.to_ref_llval(bcx),
body_datum.ty);
if ty::type_contents(bcx.tcx(), box_ty).contains_managed() {
glue::trans_free(bcx, box_datum.val)
} else {
glue::trans_exchange_free(bcx, box_datum.val)
}
}
}
pub fn duplicate(bcx: block, src_box: ValueRef, src_ty: ty::t) -> Result {
let _icx = push_ctxt("uniq::duplicate");
// Load the body of the source (*src)
let src_datum = immediate_rvalue(src_box, src_ty);<|fim▁hole|>
// Malloc space in exchange heap and copy src into it
let MallocResult {
bcx: bcx,
box: dst_box,
body: dst_body
} = malloc_unique(bcx, body_datum.ty);
body_datum.copy_to(bcx, datum::INIT, dst_body);
rslt(bcx, dst_box)
}<|fim▁end|>
|
let body_datum = src_datum.box_body(bcx);
|
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from seedsdb.models import (
Plant, Tag, Harvest, Activity
)
class TestPlant(TestCase):
def tearDown(self):
Plant.objects.all().delete()
def make_test_plant(self, aliases=None):
if aliases:
aliases = "|".join(aliases)
else:
aliases = ""
plant = Plant.objects.create(
name="Love in idleness",
description="Test description",
aliases=aliases
)
return plant
def test_unicode(self):
"The unicode method of a plant returns the expected value"
plant = self.make_test_plant()
self.assertEqual(u"Love in idleness", unicode(plant))
def test_slug_create(self):
"Creating a new plant sets the slug as expected "
plant = self.make_test_plant()
self.assertEqual("love-in-idleness", plant.slug)
def test_slug_update(self):
"Renaming an existinmg plant updates the slug as expected "
plant = self.make_test_plant()
plant.name = 'Love lies oozing'
plant.save()
self.assertEqual("love-lies-oozing", plant.slug)
def test_get_absolute_url(self):
plant = self.make_test_plant()<|fim▁hole|> self.assertEqual(expected_url, plant.get_absolute_url())
def test_aliases_string_none(self):
"Ensure the liases_string property works when no alias is defined"
plant = self.make_test_plant()
self.assertEqual(u"", plant.aliases_string)
def test_aliases_string_one(self):
"Ensure the aliases_string property works when one alias is defined"
plant = self.make_test_plant(aliases=["Alternative"])
self.assertEqual(u"Alternative", plant.aliases_string)
def test_aliases_string_multiple(self):
"Ensure the aliases property works when more than one alias is defined"
plant = self.make_test_plant(aliases=["Alternative", "Beta"])
self.assertEqual(u"Alternative, Beta", plant.aliases_string)
def test_aliases_search_none(self):
"Ensure the aliases_search property works when no alias is defined"
plant = self.make_test_plant()
self.assertEqual(u"", plant.aliases_search)
def test_aliases_search_one(self):
"Ensure the aliases_search property works when one alias is defined"
plant = self.make_test_plant(aliases=["Alternative"])
self.assertEqual(u"Alternative", plant.aliases_search)
def test_aliases_search_multiple(self):
"Ensure the aliases_search property works when more than one alias is defined"
plant = self.make_test_plant(aliases=["Alternative", "Beta"])
self.assertEqual(u"Alternative Beta", plant.aliases_search)
class TestTag(TestCase):
def tearDown(self):
Tag.objects.all().delete()
def test_unicode(self):
"The unicode method of a tag returns the expected value"
tag = Tag.objects.create(caption="test tag")
self.assertEqual(u"test tag", unicode(tag))
def test_tag_normalisation(self):
"A tag is normalised on save as expecgted"
tag = Tag.objects.create(caption=" VALUE ")
self.assertEqual("value", tag.caption)
class TestHarvest(TestCase):
def setUp(self):
self.test_plant = Plant.objects.create(
name="Love in idleness",
description="Test description",
)
def tearDown(self):
Harvest.objects.all().delete()
def test_unicode(self):
"The unicode method of a harvest returns the expected value"
harvest = Harvest.objects.create(season=2014, plant=self.test_plant)
self.assertEqual(u"2014 harvest of Love in idleness", unicode(harvest))
class TestActivity(TestCase):
def setUp(self):
self.test_plant = Plant.objects.create(
name="Love in idleness",
description="Test description",
)
def tearDown(self):
Activity.objects.all().delete()
def test_unicode(self):
"The unicode method of an activity returns the expected value"
activities = ['Sow', 'Plant out', 'Flowering', 'Harvest']
months = ['January', 'February', 'March', 'April', 'May', 'June',
'July', 'August', 'September', 'October', 'November', 'December']
for i, activity_name in enumerate(activities):
for j, month in enumerate(months):
activity = Activity.objects.create(plant=self.test_plant,
activity=i + 1,
month=j + 1)
expected = u"{0} Love in idleness in {1}".format(activity_name, month)
self.assertEqual(expected, unicode(activity))<|fim▁end|>
|
expected_url = "/plants/detail/love-in-idleness/"
|
<|file_name|>InstallerUtil.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2009-2013 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.installer.driver;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import javax.xml.bind.JAXBContext;<|fim▁hole|>
import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
import edu.uci.ics.asterix.event.schema.cluster.Cluster;
import edu.uci.ics.asterix.event.schema.cluster.Node;
public class InstallerUtil {
private static final String DEFAULT_ASTERIX_CONFIGURATION_PATH = "conf" + File.separator
+ "asterix-configuration.xml";
public static final String TXN_LOG_DIR = "txnLogs";
public static final String TXN_LOG_DIR_KEY_SUFFIX = "txnLogDir";
public static final String ASTERIX_CONFIGURATION_FILE = "asterix-configuration.xml";
public static final String TXN_LOG_CONFIGURATION_FILE = "log.properties";
public static final int CLUSTER_NET_PORT_DEFAULT = 1098;
public static final int CLIENT_NET_PORT_DEFAULT = 1099;
public static final int HTTP_PORT_DEFAULT = 8888;
public static final int WEB_INTERFACE_PORT_DEFAULT = 19001;
public static String getNodeDirectories(String asterixInstanceName, Node node, Cluster cluster) {
String storeDataSubDir = asterixInstanceName + File.separator + "data" + File.separator;
String[] storeDirs = null;
StringBuffer nodeDataStore = new StringBuffer();
String storeDirValue = node.getStore();
if (storeDirValue == null) {
storeDirValue = cluster.getStore();
if (storeDirValue == null) {
throw new IllegalStateException(" Store not defined for node " + node.getId());
}
storeDataSubDir = node.getId() + File.separator + storeDataSubDir;
}
storeDirs = storeDirValue.split(",");
for (String ns : storeDirs) {
nodeDataStore.append(ns + File.separator + storeDataSubDir.trim());
nodeDataStore.append(",");
}
nodeDataStore.deleteCharAt(nodeDataStore.length() - 1);
return nodeDataStore.toString();
}
public static AsterixConfiguration getAsterixConfiguration(String asterixConf) throws FileNotFoundException,
IOException, JAXBException {
if (asterixConf == null) {
asterixConf = InstallerDriver.getManagixHome() + File.separator + DEFAULT_ASTERIX_CONFIGURATION_PATH;
}
File file = new File(asterixConf);
JAXBContext ctx = JAXBContext.newInstance(AsterixConfiguration.class);
Unmarshaller unmarshaller = ctx.createUnmarshaller();
AsterixConfiguration asterixConfiguration = (AsterixConfiguration) unmarshaller.unmarshal(file);
return asterixConfiguration;
}
}<|fim▁end|>
|
import javax.xml.bind.JAXBException;
import javax.xml.bind.Unmarshaller;
|
<|file_name|>cilexer.py<|end_file_name|><|fim▁begin|># CodeIgniter
# http://codeigniter.com
#
# An open source application development framework for PHP
#
# This content is released under the MIT License (MIT)
#
# Copyright (c) 2014 - 2015, British Columbia Institute of Technology
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#<|fim▁hole|># The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Copyright (c) 2008 - 2014, EllisLab, Inc. (http://ellislab.com/)
# Copyright (c) 2014 - 2015, British Columbia Institute of Technology (http://bcit.ca/)
#
# http://opensource.org/licenses/MIT MIT License
import re
import copy
from pygments.lexer import DelegatingLexer
from pygments.lexers.web import PhpLexer, HtmlLexer
__all__ = [ 'CodeIgniterLexer' ]
class CodeIgniterLexer(DelegatingLexer):
"""
Handles HTML, PHP, JavaScript, and CSS is highlighted
PHP is highlighted with the "startline" option
"""
name = 'CodeIgniter'
aliases = [ 'ci', 'codeigniter' ]
filenames = [ '*.html', '*.css', '*.php', '*.xml', '*.static' ]
mimetypes = [ 'text/html', 'application/xhtml+xml' ]
def __init__(self, **options):
super(CodeIgniterLexer, self).__init__(HtmlLexer,
PhpLexer,
startinline=True)<|fim▁end|>
| |
<|file_name|>cli_update.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public<|fim▁hole|># License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from firefox_ui_harness.arguments import UpdateArguments
from firefox_ui_harness.runners import UpdateTestRunner
from firefox_ui_harness.runtests import cli
def cli_update():
cli(runner_class=UpdateTestRunner, parser_class=UpdateArguments)
if __name__ == '__main__':
cli_update()<|fim▁end|>
| |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|># coding: utf-8
# <pycompressor - compress and merge static files (css,js) in html files>
# Copyright (C) <2012> Marcel Nicolay <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from optparse import OptionParser
import sys
class CLI(object):
color = {
"PINK": "",
"BLUE": "",
"CYAN": "",
"GREEN": "",
"YELLOW": "",
"RED": "",
"END": "",
}
@staticmethod
def show_colors():
CLI.color = {
"PINK": "\033[35m",
"BLUE": "\033[34m",
"CYAN": "\033[36m",
"GREEN": "\033[32m",<|fim▁hole|> }
def __init__(self):
self.__config_parser()
def __config_parser(self):
self.__parser = OptionParser(usage="usage: %prog [options] start")
self.__parser.add_option("-c", "--config",
dest="config_file",
default="compressor.yaml",
help="Use a specific config file. If not provided, will search for 'compressor.yaml' in the current directory.")
self.__parser.add_option("-s", "--sync",
dest="sync",
action="store_true",
default=False,
help="Sync files with S3")
self.__parser.add_option("-v", "--version",
action="store_true",
dest="compressor_version",
default=False,
help="Displays compressor version and exit.")
self.__parser.add_option("--color",
action="store_true",
dest="show_colors",
default=False,
help="Output with beautiful colors.")
self.__parser.add_option("--prefix",
dest="prefix",
default="min",
help="Use prefix in output js and css.")
def get_parser(self):
return self.__parser
def parse(self):
return self.__parser.parse_args()
def error_and_exit(self, msg):
self.msg("[ERROR] %s\n" % msg, "RED")
sys.exit(1)
def info_and_exit(self, msg):
self.msg("%s\n" % msg, "BLUE")
sys.exit(0)
def msg(self, msg, color="CYAN"):
print "%s%s%s" % (self.color[color], msg, self.color["END"])<|fim▁end|>
|
"YELLOW": "\033[33m",
"RED": "\033[31m",
"END": "\033[0m",
|
<|file_name|>element.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Element nodes.
use dom::bindings::utils::{BindingObject, CacheableWrapper, DOMString, ErrorResult, Fallible, WrapperCache};
use dom::bindings::utils::{null_str_as_empty, null_str_as_empty_ref};
use dom::htmlcollection::HTMLCollection;
use dom::clientrect::ClientRect;
use dom::clientrectlist::ClientRectList;
use dom::node::{ElementNodeTypeId, Node, ScriptView, AbstractNode};
use layout_interface::{ContentBoxQuery, ContentBoxResponse, ContentBoxesQuery};
use layout_interface::{ContentBoxesResponse};
use newcss::stylesheet::Stylesheet;
use js::jsapi::{JSContext, JSObject};
use std::cell::Cell;
use std::comm;
use std::str::eq_slice;
use std::ascii::StrAsciiExt;
pub struct Element {
node: Node<ScriptView>,
tag_name: ~str, // TODO: This should be an atom, not a ~str.
attrs: ~[Attr],
style_attribute: Option<Stylesheet>,
}
impl CacheableWrapper for Element {
fn get_wrappercache(&mut self) -> &mut WrapperCache {<|fim▁hole|> }
fn wrap_object_shared(@mut self, _cx: *JSContext, _scope: *JSObject) -> *JSObject {
fail!("no wrapping")
}
}
impl BindingObject for Element {
fn GetParentObject(&self, cx: *JSContext) -> Option<@mut CacheableWrapper> {
self.node.GetParentObject(cx)
}
}
#[deriving(Eq)]
pub enum ElementTypeId {
HTMLElementTypeId,
HTMLAnchorElementTypeId,
HTMLAppletElementTypeId,
HTMLAreaElementTypeId,
HTMLAudioElementTypeId,
HTMLBaseElementTypeId,
HTMLBRElementTypeId,
HTMLBodyElementTypeId,
HTMLButtonElementTypeId,
HTMLCanvasElementTypeId,
HTMLDataElementTypeId,
HTMLDataListElementTypeId,
HTMLDirectoryElementTypeId,
HTMLDListElementTypeId,
HTMLDivElementTypeId,
HTMLEmbedElementTypeId,
HTMLFieldSetElementTypeId,
HTMLFontElementTypeId,
HTMLFormElementTypeId,
HTMLFrameElementTypeId,
HTMLFrameSetElementTypeId,
HTMLHRElementTypeId,
HTMLHeadElementTypeId,
HTMLHeadingElementTypeId,
HTMLHtmlElementTypeId,
HTMLIframeElementTypeId,
HTMLImageElementTypeId,
HTMLInputElementTypeId,
HTMLLabelElementTypeId,
HTMLLegendElementTypeId,
HTMLLinkElementTypeId,
HTMLLIElementTypeId,
HTMLMapElementTypeId,
HTMLMediaElementTypeId,
HTMLMetaElementTypeId,
HTMLMeterElementTypeId,
HTMLModElementTypeId,
HTMLObjectElementTypeId,
HTMLOListElementTypeId,
HTMLOptGroupElementTypeId,
HTMLOptionElementTypeId,
HTMLOutputElementTypeId,
HTMLParagraphElementTypeId,
HTMLParamElementTypeId,
HTMLPreElementTypeId,
HTMLProgressElementTypeId,
HTMLQuoteElementTypeId,
HTMLScriptElementTypeId,
HTMLSelectElementTypeId,
HTMLSourceElementTypeId,
HTMLSpanElementTypeId,
HTMLStyleElementTypeId,
HTMLTableElementTypeId,
HTMLTableCaptionElementTypeId,
HTMLTableCellElementTypeId,
HTMLTableColElementTypeId,
HTMLTableRowElementTypeId,
HTMLTableSectionElementTypeId,
HTMLTemplateElementTypeId,
HTMLTextAreaElementTypeId,
HTMLTimeElementTypeId,
HTMLTitleElementTypeId,
HTMLTrackElementTypeId,
HTMLUListElementTypeId,
HTMLVideoElementTypeId,
HTMLUnknownElementTypeId,
}
//
// Element methods
//
impl<'self> Element {
pub fn new(type_id: ElementTypeId, tag_name: ~str) -> Element {
Element {
node: Node::new(ElementNodeTypeId(type_id)),
tag_name: tag_name,
attrs: ~[],
style_attribute: None,
}
}
pub fn get_attr(&'self self, name: &str) -> Option<&'self str> {
// FIXME: Need an each() that links lifetimes in Rust.
for attr in self.attrs.iter() {
if eq_slice(attr.name, name) {
let val: &str = attr.value;
return Some(val);
}
}
return None;
}
pub fn set_attr(&mut self,
abstract_self: AbstractNode<ScriptView>,
raw_name: &DOMString,
raw_value: &DOMString) {
let name = null_str_as_empty(raw_name);
let value_cell = Cell::new(null_str_as_empty(raw_value));
let mut found = false;
for attr in self.attrs.mut_iter() {
if eq_slice(attr.name, name) {
attr.value = value_cell.take().clone();
found = true;
break;
}
}
if !found {
self.attrs.push(Attr::new(name.to_str(), value_cell.take().clone()));
}
if "style" == name {
self.style_attribute = Some(
Stylesheet::from_attribute(
FromStr::from_str("http://www.example.com/").unwrap(),
null_str_as_empty_ref(raw_value)));
}
//XXXjdm We really need something like a vtable so we can call AfterSetAttr.
// This hardcoding is awful.
match abstract_self.type_id() {
ElementNodeTypeId(HTMLImageElementTypeId) => {
do abstract_self.with_mut_image_element |image| {
image.AfterSetAttr(raw_name, raw_value);
}
}
ElementNodeTypeId(HTMLIframeElementTypeId) => {
do abstract_self.with_mut_iframe_element |iframe| {
iframe.AfterSetAttr(raw_name, raw_value);
}
}
_ => ()
}
match self.node.owner_doc {
Some(owner) => do owner.with_base |owner| { owner.content_changed() },
None => {}
}
}
fn get_scope_and_cx(&self) -> (*JSObject, *JSContext) {
let doc = self.node.owner_doc.unwrap();
let win = doc.with_base(|doc| doc.window.unwrap());
let cx = win.page.js_info.get_ref().js_compartment.cx.ptr;
let cache = win.get_wrappercache();
let scope = cache.get_wrapper();
(scope, cx)
}
}
impl Element {
pub fn TagName(&self) -> DOMString {
Some(self.tag_name.to_owned().to_ascii_upper())
}
pub fn Id(&self) -> DOMString {
None
}
pub fn SetId(&self, _id: &DOMString) {
}
pub fn GetAttribute(&self, name: &DOMString) -> DOMString {
self.get_attr(null_str_as_empty_ref(name)).map(|s| s.to_owned())
}
pub fn GetAttributeNS(&self, _namespace: &DOMString, _localname: &DOMString) -> DOMString {
None
}
pub fn SetAttribute(&mut self,
abstract_self: AbstractNode<ScriptView>,
name: &DOMString,
value: &DOMString) -> ErrorResult {
self.set_attr(abstract_self, name, value);
Ok(())
}
pub fn SetAttributeNS(&self, _namespace: &DOMString, _localname: &DOMString, _value: &DOMString) -> ErrorResult {
Ok(())
}
pub fn RemoveAttribute(&self, _name: &DOMString) -> ErrorResult {
Ok(())
}
pub fn RemoveAttributeNS(&self, _namespace: &DOMString, _localname: &DOMString) -> ErrorResult {
Ok(())
}
pub fn HasAttribute(&self, _name: &DOMString) -> bool {
false
}
pub fn HasAttributeNS(&self, _nameapce: &DOMString, _localname: &DOMString) -> bool {
false
}
pub fn GetElementsByTagName(&self, _localname: &DOMString) -> @mut HTMLCollection {
let (scope, cx) = self.get_scope_and_cx();
HTMLCollection::new(~[], cx, scope)
}
pub fn GetElementsByTagNameNS(&self, _namespace: &DOMString, _localname: &DOMString) -> Fallible<@mut HTMLCollection> {
let (scope, cx) = self.get_scope_and_cx();
Ok(HTMLCollection::new(~[], cx, scope))
}
pub fn GetElementsByClassName(&self, _names: &DOMString) -> @mut HTMLCollection {
let (scope, cx) = self.get_scope_and_cx();
HTMLCollection::new(~[], cx, scope)
}
pub fn MozMatchesSelector(&self, _selector: &DOMString) -> Fallible<bool> {
Ok(false)
}
pub fn SetCapture(&self, _retargetToElement: bool) {
}
pub fn ReleaseCapture(&self) {
}
pub fn MozRequestFullScreen(&self) {
}
pub fn MozRequestPointerLock(&self) {
}
pub fn GetClientRects(&self, abstract_self: AbstractNode<ScriptView>) -> @mut ClientRectList {
let (rects, cx, scope) = match self.node.owner_doc {
Some(doc) => {
match doc.with_base(|doc| doc.window) {
Some(win) => {
let node = abstract_self;
assert!(node.is_element());
let page = win.page;
let (port, chan) = comm::stream();
match page.query_layout(ContentBoxesQuery(node, chan), port) {
ContentBoxesResponse(rects) => {
let cx = page.js_info.get_ref().js_compartment.cx.ptr;
let cache = win.get_wrappercache();
let scope = cache.get_wrapper();
let rects = do rects.map |r| {
ClientRect::new(
r.origin.y.to_f32(),
(r.origin.y + r.size.height).to_f32(),
r.origin.x.to_f32(),
(r.origin.x + r.size.width).to_f32(),
cx,
scope)
};
Some((rects, cx, scope))
},
}
}
None => {
debug!("no window");
None
}
}
}
None => {
debug!("no document");
None
}
}.unwrap();
ClientRectList::new(rects, cx, scope)
}
pub fn GetBoundingClientRect(&self, abstract_self: AbstractNode<ScriptView>) -> @mut ClientRect {
match self.node.owner_doc {
Some(doc) => {
match doc.with_base(|doc| doc.window) {
Some(win) => {
let page = win.page;
let node = abstract_self;
assert!(node.is_element());
let (port, chan) = comm::stream();
match page.query_layout(ContentBoxQuery(node, chan), port) {
ContentBoxResponse(rect) => {
let cx = page.js_info.get_ref().js_compartment.cx.ptr;
let cache = win.get_wrappercache();
let scope = cache.get_wrapper();
ClientRect::new(
rect.origin.y.to_f32(),
(rect.origin.y + rect.size.height).to_f32(),
rect.origin.x.to_f32(),
(rect.origin.x + rect.size.width).to_f32(),
cx,
scope)
}
}
}
None => fail!("no window")
}
}
None => fail!("no document")
}
}
pub fn ScrollIntoView(&self, _top: bool) {
}
pub fn ScrollTop(&self) -> i32 {
0
}
pub fn SetScrollTop(&mut self, _scroll_top: i32) {
}
pub fn ScrollLeft(&self) -> i32 {
0
}
pub fn SetScrollLeft(&mut self, _scroll_left: i32) {
}
pub fn ScrollWidth(&self) -> i32 {
0
}
pub fn ScrollHeight(&self) -> i32 {
0
}
pub fn ClientTop(&self) -> i32 {
0
}
pub fn ClientLeft(&self) -> i32 {
0
}
pub fn ClientWidth(&self) -> i32 {
0
}
pub fn ClientHeight(&self) -> i32 {
0
}
pub fn GetInnerHTML(&self) -> Fallible<DOMString> {
Ok(None)
}
pub fn SetInnerHTML(&mut self, _value: &DOMString) -> ErrorResult {
Ok(())
}
pub fn GetOuterHTML(&self) -> Fallible<DOMString> {
Ok(None)
}
pub fn SetOuterHTML(&mut self, _value: &DOMString) -> ErrorResult {
Ok(())
}
pub fn InsertAdjacentHTML(&mut self, _position: &DOMString, _text: &DOMString) -> ErrorResult {
Ok(())
}
pub fn QuerySelector(&self, _selectors: &DOMString) -> Fallible<Option<AbstractNode<ScriptView>>> {
Ok(None)
}
}
pub struct Attr {
name: ~str,
value: ~str,
}
impl Attr {
pub fn new(name: ~str, value: ~str) -> Attr {
Attr {
name: name,
value: value
}
}
}<|fim▁end|>
|
self.node.get_wrappercache()
|
<|file_name|>challenge13.rs<|end_file_name|><|fim▁begin|>use aes::Aes128;
use aes::{chunks_count, BLOCK_SIZE};
use crate::errors::*;
use crate::prefix_suffix_oracles::Oracle;
use crate::prefix_suffix_oracles::Oracle13;
use super::challenge12::prefix_plus_suffix_length;
use super::prefix_length;
// The following function works under the single assumption that the target value "user" (to be
// replaced by "admin") is stored at the very end of the profile.
pub fn run() -> Result<()> {
let oracle = Oracle13::new()?;
let prefix_len = prefix_length(&oracle)?;
let (prefix_chunks_count, prefix_fill_len) = chunks_count(prefix_len);
let target_cleartext = b"admin".pad();
let mut input = vec![0; prefix_fill_len];
input.extend_from_slice(&target_cleartext);
// Determine the ciphertext for target_cleartext
let target_last_block = &oracle
.encrypt(&input)?
.split_off(prefix_chunks_count * BLOCK_SIZE)[0..BLOCK_SIZE];
// The following input is chosen in such a way that the cleartext in oracle looks as follows:
// email=\0 ... \0 || \0 ...\0&uid=10&role= || user <- padding ->
let (chunks_count, fill_len) = chunks_count(prefix_plus_suffix_length(&oracle)?);
let mut ciphertext = oracle.encrypt(&vec![0; fill_len + "user".len()])?;
// Sanity check
compare_eq((chunks_count + 1) * BLOCK_SIZE, ciphertext.len())?;
<|fim▁hole|> // Replace last block with target_last_block
ciphertext[chunks_count * BLOCK_SIZE..].copy_from_slice(target_last_block);
oracle.verify_solution(&ciphertext)
}<|fim▁end|>
| |
<|file_name|>log.go<|end_file_name|><|fim▁begin|>package sdialog // import "github.com/nathanaelle/sdialog/v2"
import (
"fmt"
"log"
)
type (
// LogLevel describe a log handler for a severity
LogLevel byte
logWriter LogLevel
)
const (
LogEMERG LogLevel = iota + '0' // system is unusable
LogALERT // action must be taken immediately
LogCRIT // critical conditions
LogERR // error conditions
LogWARNING // warning conditions
LogNOTICE // normal but significant condition
LogINFO // informational
LogDEBUG // debug-level messages
)
func stderr(l LogLevel, m string) {
logWriter(l).Write([]byte(m))
}
func (l logWriter) Write(mb []byte) (int, error) {
encoded := make([]byte, 0, len(mb)+4)
encoded = append(encoded, '<')
encoded = append(encoded, byte(l))
encoded = append(encoded, '>')
encoded = append(encoded, mb...)
encoded = append(encoded, '\n')
sdcRead(func(sdc sdConf) error {
sdc.logdest.Write(encoded)
return nil
})
return len(mb), nil
}
// Logf log a log to for a LogLevel after formating
func Logf(l LogLevel, format string, v ...interface{}) error {
return l.Logf(format, v...)
}
// Log log a log to for a LogLevel
func Log(l LogLevel, message string) error {
return l.Log(message)
}
// Logger expose a log.Logger for a sdialog.LogLevel
func (l LogLevel) Logger(prefix string, flag int) *log.Logger {
return log.New(logWriter(l), prefix, flag)
}
// Logf log a log to for a LogLevel after formating
func (l LogLevel) Logf(format string, v ...interface{}) error {
if noSdAvailable() {
return ErrNoSDialogAvailable
}
return l.Log(fmt.Sprintf(format, v...))
}
// Log log a log to for a LogLevel
func (l LogLevel) Log(message string) error {
if noSdAvailable() {
return ErrNoSDialogAvailable
}
if l < LogEMERG || l > LogDEBUG {
err := &outOfBoundsLogLevelError{l, message}
stderr(LogCRIT, err.Error())
return err
}
stderr(l, message)
return nil
}
// LogError log a error to for a LogLevel
func (l LogLevel) LogError(message error) error {
if noSdAvailable() {
return ErrNoSDialogAvailable
}
if l < LogEMERG || l > LogDEBUG {
err := &outOfBoundsLogLevelError{l, message.Error()}
stderr(LogCRIT, err.Error())
return err
}<|fim▁hole|>
stderr(l, message.Error())
return nil
}<|fim▁end|>
| |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth import views as auth
def login(request):
return auth.login(request, template_name="registration/login.haml")
<|fim▁hole|>
def password_change_done(request):
return auth.password_change_done(request, template_name="registration/password_change_done.haml")<|fim▁end|>
|
def password_change(request):
return auth.password_change(request, template_name="registration/password_change_form.haml")
|
<|file_name|>hello.spec.ts<|end_file_name|><|fim▁begin|>import {expect} from "chai";
import {generateHello} from "./hello";<|fim▁hole|>describe("generateHello()", () => {
it("should error when a name contains a number", () => {
const name = "Freddie12";
const result = generateHello(name);
expect(result).to.be.instanceof(Error);
});
it("should error when a name contains a special character", () => {
const name = "Freddie£";
const result = generateHello(name);
expect(result).to.be.instanceof(Error);
});
it("should generate a welcome message if the name is alphabetic", () => {
const name = "Freddie";
const result = generateHello(name);
expect(result).to.eql({
message: "hello Freddie",
});
});
});<|fim▁end|>
| |
<|file_name|>delicious.py<|end_file_name|><|fim▁begin|># originally from
# http://code.google.com/p/django-syncr/source/browse/trunk/app/delicious.py
# supousse django-syncr should be installed as external app and this code
# inherit from that, but django-syncr have not a setup.py file (so can't be
# added in src/pinax/requirements/external_apps.txt) and is not migrated to git
# (so can add a setup.file in a different fork from svn trunk)
import time, datetime, calendar
import httplib
import urllib, urllib2
import base64
#from syncr.delicious.models import Bookmark
from bookmarks.models import Bookmark, BookmarkInstance
from bookmarks.forms import BookmarkInstanceForm
try:
import xml.etree.ElementTree as ET
except:
import elementtree.ElementTree as ET
class DeliciousAPI:
"""
DeliciousAPI is a bare-bones interface to the del.icio.us API. It's
used by DeliciousSyncr objects and it's not recommended to use it
directly.
"""
_deliciousApiHost = 'https://api.del.icio.us/'
_deliciousApiURL = 'https://api.del.icio.us/v1/'
def __init__(self, user, passwd):
"""
Initialize a DeliciousAPI object.
Required arguments
user: The del.icio.us username as a string.
passwd: The username's password as a string.
"""
self.user = user
self.passwd = passwd
# pm = urllib2.HTTPPasswordMgrWithDefaultRealm()
# pm.add_password(None, 'https://' + self._deliciousApiHost, self.user, self.passwd)
# handler = urllib2.HTTPBasicAuthHandler(pm)
# self.opener = urllib2.build_opener(handler)
def _request(self, path, params=None):
# time.sleep(1.5)
# if params:
# post_data = urllib.urlencode(params)
# url = self._deliciousApiURL + path + post_data
# else:
# url = self._deliciousApiURL + path
# request = urllib2.Request(url)
# request.add_header('User-Agent', 'django/syncr.app.delicious')
# credentials = base64.encodestring("%s:%s" % (self.user, self.passwd))
# request.add_header('Authorization', ('Basic %s' % credentials))
# f = self.opener.open(request)
f = open('/home/julia/hacktivism/testing-parsing-bookmarks/all.xml')
return ET.parse(f)
class DeliciousSyncr:
"""<|fim▁hole|>
There are three ways to sync:
- All bookmarks for the user
- Only recent bookmarks for the user
- Bookmarks based on a limited search/query functionality. Currently
based on date, tag, and URL.
This app requires the excellent ElementTree, which is included in
Python 2.5. Otherwise available at:
http://effbot.org/zone/element-index.htm
"""
def __init__(self, username, password):
"""
Construct a new DeliciousSyncr.
Required arguments
username: a del.icio.us username
password: the user's password
"""
self.delicious = DeliciousAPI(username, password)
def clean_tags(self, tags):
"""
Utility method to clean up del.icio.us tags, removing double
quotes, duplicate tags and return a unicode string.
Required arguments
tags: a tag string
"""
tags = tags.lower().replace('\"', '').split(' ')
tags = set(tags)
tags = " ".join(tags)
return u'%s' % tags
def _syncPost(self, post_elem, user):
time_lst = time.strptime(post_elem.attrib['time'], "%Y-%m-%dT%H:%M:%SZ")
time_obj = datetime.datetime(*time_lst[0:7])
tags = self.clean_tags(post_elem.attrib['tag'])
try:
extended = post_elem.attrib['extended']
except KeyError:
extended = ''
default_dict = {
'description': post_elem.attrib['description'],
'tags': tags,
'url': post_elem.attrib['href'],
# Is post_hash attrib unique to the post/URL or post/username ?!
'post_hash': post_hash,
'saved_date': time_obj,
'extended_info': extended,
}
# Save only shared bookmarks
# try:
# is_shared = post_elem.attrib['shared'] # Only set, when it isn't shared
# except KeyError:
# obj, created = Bookmark.objects.get_or_create(
# post_hash=post_hash, defaults=default_dict)
# return obj
# return None
# to save pinax Bookmark
try:
unicode(default_dict['description'].decode('latin-1'))
except:
default_dict['description'] = ''
print default_dict['description']
bookmark_instance_form = BookmarkInstanceForm(user,default_dict)
if bookmark_instance_form.is_valid():
bookmark_instance = bookmark_instance_form.save(commit=False)
bookmark_instance.user = user
bookmark_instance.save()
print bookmark_instance
bookmark = bookmark_instance.bookmark
try:
headers = {
"Accept" : "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5",
"Accept-Language" : "en-us,en;q=0.5",
"Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.7",
"Connection" : "close",
##"User-Agent": settings.URL_VALIDATOR_USER_AGENT
}
req = urllib2.Request(bookmark.get_favicon_url(force=True), None, headers)
u = urllib2.urlopen(req)
has_favicon = True
except:
has_favicon = False
bookmark.has_favicon = has_favicon
bookmark.favicon_checked = datetime.datetime.now()
# bookmark.added = bookmark['add_date']
bookmark.save()
# print bookmark
else:
print "bookmark_instance_form no es valido"
return
def syncRecent(self, count=15, tag=None):
"""
Synchronize the user's recent bookmarks.
Optional arguments:
count: The number of bookmarks to return, default 15, max 100.
tag: A string. Limit to recent bookmarks that match this tag.
"""
params = {'count': count}
if tag: params['tag'] = tag
result = self.delicious._request('posts/recent?', params)
root = result.getroot()
for post in list(root):
self._syncPost(post)
def syncAll(self, user, tag=None):
"""
Synchronize all of the user's bookmarks. WARNING this may take
a while! Excessive use may get you throttled.
Optional arguments
tag: A string. Limit to all bookmarks that match this tag.
"""
params = dict()
if tag: params = {'tag': tag}
result = self.delicious._request('posts/all?', params)
root = result.getroot()
for post in list(root):
self._syncPost(post, user)
def datetime2delicious(self, dt):
"""
Utility method to convert a Python datetime to a string format
suitable for the del.icio.us API.
Required arguments
dt: a datetime object
"""
return dt.strftime("%Y-%m-%dT%H:%M:%SZ")
def syncBookmarks(self, **kwargs):
"""
Synchronize bookmarks. If no arguments are used, today's
bookmarks will be sync'd.
Optional keyword arguments
date: A datetime object. Sync only bookmarks from this date.
tag: A string. Limit to bookmarks matching this tag.
url: A string. Limit to bookmarks matching this URL.
"""
params = kwargs
if kwargs.has_key('date'):
params['date'] = self.datetime2delicious(params['date'])
result = self.delicious._request('posts/get?', )
root = result.getroot()
for post in list(root):
self._syncPost(post)<|fim▁end|>
|
DeliciousSyncr objects sync del.icio.us bookmarks to the Django
backend. The constructor requires a username and password for
authenticated access to the API.
|
<|file_name|>test_future.py<|end_file_name|><|fim▁begin|>import asyncio
import sys
import traceback
import pytest
from pykka import Future, Timeout, get_all
def run_async(coroutine):
loop = asyncio.get_event_loop()
f = asyncio.ensure_future(coroutine, loop=loop)
return loop.run_until_complete(f)
def test_base_future_get_is_not_implemented():
future = Future()
with pytest.raises(NotImplementedError):
future.get()
def test_base_future_set_is_not_implemented():
future = Future()
with pytest.raises(NotImplementedError):
future.set(None)
def test_base_future_set_exception_is_not_implemented():
future = Future()
with pytest.raises(NotImplementedError):
future.set_exception(None)
def test_set_multiple_times_fails(future):
future.set(0)
with pytest.raises(Exception):
future.set(0)
def test_get_all_blocks_until_all_futures_are_available(futures):
futures[0].set(0)
futures[1].set(1)
futures[2].set(2)
result = get_all(futures)
assert result == [0, 1, 2]
def test_get_all_raises_timeout_if_not_all_futures_are_available(futures):
futures[0].set(0)<|fim▁hole|> futures[1].set(1)
# futures[2] is unset
with pytest.raises(Timeout):
get_all(futures, timeout=0)
def test_get_all_can_be_called_multiple_times(futures):
futures[0].set(0)
futures[1].set(1)
futures[2].set(2)
result1 = get_all(futures)
result2 = get_all(futures)
assert result1 == result2
def test_future_in_future_works(runtime):
inner_future = runtime.future_class()
inner_future.set("foo")
outer_future = runtime.future_class()
outer_future.set(inner_future)
assert outer_future.get().get() == "foo"
def test_get_raises_exception_with_full_traceback(runtime):
exc_class_get = None
exc_class_set = None
exc_instance_get = None
exc_instance_set = None
exc_traceback_get = None
exc_traceback_set = None
future = runtime.future_class()
try:
raise NameError("foo")
except NameError:
exc_class_set, exc_instance_set, exc_traceback_set = sys.exc_info()
future.set_exception()
# We could move to another thread at this point
try:
future.get()
except NameError:
exc_class_get, exc_instance_get, exc_traceback_get = sys.exc_info()
assert exc_class_set == exc_class_get
assert exc_instance_set == exc_instance_get
exc_traceback_list_set = list(reversed(traceback.extract_tb(exc_traceback_set)))
exc_traceback_list_get = list(reversed(traceback.extract_tb(exc_traceback_get)))
# All frames from the first traceback should be included in the
# traceback from the future.get() reraise
assert len(exc_traceback_list_set) < len(exc_traceback_list_get)
for i, frame in enumerate(exc_traceback_list_set):
assert frame == exc_traceback_list_get[i]
def test_future_supports_await_syntax(future):
async def get_value():
return await future
future.set(1)
assert run_async(get_value()) == 1
def test_future_supports_yield_from_syntax(future):
def get_value():
val = yield from future
return val
future.set(1)
assert run_async(get_value()) == 1
def test_filter_excludes_items_not_matching_predicate(future):
filtered = future.filter(lambda x: x > 10)
future.set([1, 3, 5, 7, 9, 11, 13, 15, 17, 19])
assert filtered.get(timeout=0) == [11, 13, 15, 17, 19]
def test_filter_on_noniterable(future):
filtered = future.filter(lambda x: x > 10)
future.set(1)
with pytest.raises(TypeError):
filtered.get(timeout=0)
def test_filter_preserves_the_timeout_kwarg(future):
filtered = future.filter(lambda x: x > 10)
with pytest.raises(Timeout):
filtered.get(timeout=0)
def test_filter_reuses_result_if_called_multiple_times(future, mocker):
raise_on_reuse_func = mocker.Mock(side_effect=[False, True, Exception])
filtered = future.filter(raise_on_reuse_func)
future.set([1, 2])
assert filtered.get(timeout=0) == [2]
assert filtered.get(timeout=0) == [2] # First result is reused
assert filtered.get(timeout=0) == [2] # First result is reused
def test_join_combines_multiple_futures_into_one(futures):
joined = futures[0].join(futures[1], futures[2])
futures[0].set(0)
futures[1].set(1)
futures[2].set(2)
assert joined.get(timeout=0) == [0, 1, 2]
def test_join_preserves_timeout_kwarg(futures):
joined = futures[0].join(futures[1], futures[2])
futures[0].set(0)
futures[1].set(1)
# futures[2] is unset
with pytest.raises(Timeout):
joined.get(timeout=0)
def test_map_returns_future_which_passes_result_through_func(future):
mapped = future.map(lambda x: x + 10)
future.set(30)
assert mapped.get(timeout=0) == 40
def test_map_works_on_dict(future):
# Regression test for issue #64
mapped = future.map(lambda x: x["foo"])
future.set({"foo": "bar"})
assert mapped.get(timeout=0) == "bar"
def test_map_does_not_map_each_value_in_futures_iterable_result(future):
# Behavior changed in Pykka 2.0:
# This used to map each value in the future's result through the func,
# yielding [20, 30, 40].
mapped = future.map(lambda x: x + 10)
future.set([10, 20, 30])
with pytest.raises(TypeError):
mapped.get(timeout=0)
def test_map_preserves_timeout_kwarg(future):
mapped = future.map(lambda x: x + 10)
with pytest.raises(Timeout):
mapped.get(timeout=0)
def test_map_reuses_result_if_called_multiple_times(future, mocker):
raise_on_reuse_func = mocker.Mock(side_effect=[10, Exception])
mapped = future.map(raise_on_reuse_func)
future.set(30)
assert mapped.get(timeout=0) == 10
assert mapped.get(timeout=0) == 10 # First result is reused
def test_reduce_applies_function_cumulatively_from_the_left(future):
reduced = future.reduce(lambda x, y: x + y)
future.set([1, 2, 3, 4])
assert reduced.get(timeout=0) == 10
def test_reduce_accepts_an_initial_value(future):
reduced = future.reduce(lambda x, y: x + y, 5)
future.set([1, 2, 3, 4])
assert reduced.get(timeout=0) == 15
def test_reduce_on_noniterable(future):
reduced = future.reduce(lambda x, y: x + y)
future.set(1)
with pytest.raises(TypeError):
reduced.get(timeout=0)
def test_reduce_preserves_the_timeout_kwarg(future):
reduced = future.reduce(lambda x, y: x + y)
with pytest.raises(Timeout):
reduced.get(timeout=0)
def test_reduce_reuses_result_if_called_multiple_times(future, mocker):
raise_on_reuse_func = mocker.Mock(side_effect=[3, 6, Exception])
reduced = future.reduce(raise_on_reuse_func)
future.set([1, 2, 3])
assert reduced.get(timeout=0) == 6
assert reduced.get(timeout=0) == 6 # First result is reused
assert reduced.get(timeout=0) == 6 # First result is reused<|fim▁end|>
| |
<|file_name|>types.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1beta1
import (
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/intstr"
)
const (
ControllerRevisionHashLabelKey = "controller-revision-hash"
StatefulSetRevisionLabel = ControllerRevisionHashLabelKey
StatefulSetPodNameLabel = "statefulset.kubernetes.io/pod-name"
)
// ScaleSpec describes the attributes of a scale subresource
type ScaleSpec struct {
// desired number of instances for the scaled object.
// +optional
Replicas int32 `json:"replicas,omitempty" protobuf:"varint,1,opt,name=replicas"`
}
// ScaleStatus represents the current status of a scale subresource.
type ScaleStatus struct {
// actual number of observed instances of the scaled object.
Replicas int32 `json:"replicas" protobuf:"varint,1,opt,name=replicas"`
// label query over pods that should match the replicas count. More info: http://kubernetes.io/docs/user-guide/labels#label-selectors
// +optional
Selector map[string]string `json:"selector,omitempty" protobuf:"bytes,2,rep,name=selector"`
// label selector for pods that should match the replicas count. This is a serializated
// version of both map-based and more expressive set-based selectors. This is done to
// avoid introspection in the clients. The string will be in the same format as the
// query-param syntax. If the target type only supports map-based selectors, both this
// field and map-based selector field are populated.
// More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
// +optional
TargetSelector string `json:"targetSelector,omitempty" protobuf:"bytes,3,opt,name=targetSelector"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:prerelease-lifecycle-gen:introduced=1.6
// +k8s:prerelease-lifecycle-gen:deprecated=1.8
// +k8s:prerelease-lifecycle-gen:removed=1.16
// +k8s:prerelease-lifecycle-gen:replacement=autoscaling,v1,Scale
// Scale represents a scaling request for a resource.
type Scale struct {
metav1.TypeMeta `json:",inline"`
// Standard object metadata; More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata.
// +optional
metav1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// defines the behavior of the scale. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status.
// +optional
Spec ScaleSpec `json:"spec,omitempty" protobuf:"bytes,2,opt,name=spec"`
// current status of the scale. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status. Read-only.
// +optional
Status ScaleStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}
// +genclient
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:prerelease-lifecycle-gen:introduced=1.5
// +k8s:prerelease-lifecycle-gen:deprecated=1.8
// +k8s:prerelease-lifecycle-gen:removed=1.16
// +k8s:prerelease-lifecycle-gen:replacement=apps,v1,StatefulSet
// DEPRECATED - This group version of StatefulSet is deprecated by apps/v1beta2/StatefulSet. See the release notes for
// more information.
// StatefulSet represents a set of pods with consistent identities.
// Identities are defined as:
// - Network: A single stable DNS and hostname.
// - Storage: As many VolumeClaims as requested.
// The StatefulSet guarantees that a given network identity will always
// map to the same storage identity.
type StatefulSet struct {
metav1.TypeMeta `json:",inline"`
// +optional
metav1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// Spec defines the desired identities of pods in this set.
// +optional
Spec StatefulSetSpec `json:"spec,omitempty" protobuf:"bytes,2,opt,name=spec"`
// Status is the current status of Pods in this StatefulSet. This data
// may be out of date by some window of time.
// +optional
Status StatefulSetStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}
// PodManagementPolicyType defines the policy for creating pods under a stateful set.
type PodManagementPolicyType string
const (
// OrderedReadyPodManagement will create pods in strictly increasing order on
// scale up and strictly decreasing order on scale down, progressing only when
// the previous pod is ready or terminated. At most one pod will be changed
// at any time.
OrderedReadyPodManagement PodManagementPolicyType = "OrderedReady"
// ParallelPodManagement will create and delete pods as soon as the stateful set
// replica count is changed, and will not wait for pods to be ready or complete
// termination.
ParallelPodManagement PodManagementPolicyType = "Parallel"
)
// StatefulSetUpdateStrategy indicates the strategy that the StatefulSet
// controller will use to perform updates. It includes any additional parameters
// necessary to perform the update for the indicated strategy.
type StatefulSetUpdateStrategy struct {
// Type indicates the type of the StatefulSetUpdateStrategy.
Type StatefulSetUpdateStrategyType `json:"type,omitempty" protobuf:"bytes,1,opt,name=type,casttype=StatefulSetStrategyType"`
// RollingUpdate is used to communicate parameters when Type is RollingUpdateStatefulSetStrategyType.
RollingUpdate *RollingUpdateStatefulSetStrategy `json:"rollingUpdate,omitempty" protobuf:"bytes,2,opt,name=rollingUpdate"`
}
// StatefulSetUpdateStrategyType is a string enumeration type that enumerates
// all possible update strategies for the StatefulSet controller.
type StatefulSetUpdateStrategyType string
const (
// RollingUpdateStatefulSetStrategyType indicates that update will be
// applied to all Pods in the StatefulSet with respect to the StatefulSet
// ordering constraints. When a scale operation is performed with this
// strategy, new Pods will be created from the specification version indicated
// by the StatefulSet's updateRevision.
RollingUpdateStatefulSetStrategyType StatefulSetUpdateStrategyType = "RollingUpdate"
// OnDeleteStatefulSetStrategyType triggers the legacy behavior. Version
// tracking and ordered rolling restarts are disabled. Pods are recreated
// from the StatefulSetSpec when they are manually deleted. When a scale
// operation is performed with this strategy,specification version indicated
// by the StatefulSet's currentRevision.
OnDeleteStatefulSetStrategyType StatefulSetUpdateStrategyType = "OnDelete"
)
// RollingUpdateStatefulSetStrategy is used to communicate parameter for RollingUpdateStatefulSetStrategyType.
type RollingUpdateStatefulSetStrategy struct {
// Partition indicates the ordinal at which the StatefulSet should be
// partitioned.
Partition *int32 `json:"partition,omitempty" protobuf:"varint,1,opt,name=partition"`
}
// A StatefulSetSpec is the specification of a StatefulSet.
type StatefulSetSpec struct {
// replicas is the desired number of replicas of the given Template.
// These are replicas in the sense that they are instantiations of the
// same Template, but individual replicas also have a consistent identity.
// If unspecified, defaults to 1.
// TODO: Consider a rename of this field.
// +optional
Replicas *int32 `json:"replicas,omitempty" protobuf:"varint,1,opt,name=replicas"`
// selector is a label query over pods that should match the replica count.
// If empty, defaulted to labels on the pod template.
// More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/labels/#label-selectors
// +optional
Selector *metav1.LabelSelector `json:"selector,omitempty" protobuf:"bytes,2,opt,name=selector"`
// template is the object that describes the pod that will be created if
// insufficient replicas are detected. Each pod stamped out by the StatefulSet
// will fulfill this Template, but have a unique identity from the rest
// of the StatefulSet.
Template v1.PodTemplateSpec `json:"template" protobuf:"bytes,3,opt,name=template"`
// volumeClaimTemplates is a list of claims that pods are allowed to reference.
// The StatefulSet controller is responsible for mapping network identities to
// claims in a way that maintains the identity of a pod. Every claim in
// this list must have at least one matching (by name) volumeMount in one
// container in the template. A claim in this list takes precedence over
// any volumes in the template, with the same name.
// TODO: Define the behavior if a claim already exists with the same name.
// +optional
VolumeClaimTemplates []v1.PersistentVolumeClaim `json:"volumeClaimTemplates,omitempty" protobuf:"bytes,4,rep,name=volumeClaimTemplates"`
// serviceName is the name of the service that governs this StatefulSet.
// This service must exist before the StatefulSet, and is responsible for
// the network identity of the set. Pods get DNS/hostnames that follow the
// pattern: pod-specific-string.serviceName.default.svc.cluster.local
// where "pod-specific-string" is managed by the StatefulSet controller.
ServiceName string `json:"serviceName" protobuf:"bytes,5,opt,name=serviceName"`
// podManagementPolicy controls how pods are created during initial scale up,
// when replacing pods on nodes, or when scaling down. The default policy is
// `OrderedReady`, where pods are created in increasing order (pod-0, then
// pod-1, etc) and the controller will wait until each pod is ready before
// continuing. When scaling down, the pods are removed in the opposite order.
// The alternative policy is `Parallel` which will create pods in parallel
// to match the desired scale without waiting, and on scale down will delete
// all pods at once.
// +optional
PodManagementPolicy PodManagementPolicyType `json:"podManagementPolicy,omitempty" protobuf:"bytes,6,opt,name=podManagementPolicy,casttype=PodManagementPolicyType"`
// updateStrategy indicates the StatefulSetUpdateStrategy that will be
// employed to update Pods in the StatefulSet when a revision is made to
// Template.
UpdateStrategy StatefulSetUpdateStrategy `json:"updateStrategy,omitempty" protobuf:"bytes,7,opt,name=updateStrategy"`
// revisionHistoryLimit is the maximum number of revisions that will
// be maintained in the StatefulSet's revision history. The revision history
// consists of all revisions not represented by a currently applied
// StatefulSetSpec version. The default value is 10.
RevisionHistoryLimit *int32 `json:"revisionHistoryLimit,omitempty" protobuf:"varint,8,opt,name=revisionHistoryLimit"`
// Minimum number of seconds for which a newly created pod should be ready
// without any of its container crashing for it to be considered available.
// Defaults to 0 (pod will be considered available as soon as it is ready)
// This is an alpha field and requires enabling StatefulSetMinReadySeconds feature gate.
// +optional
MinReadySeconds int32 `json:"minReadySeconds,omitempty" protobuf:"varint,9,opt,name=minReadySeconds"`
}
// StatefulSetStatus represents the current state of a StatefulSet.
type StatefulSetStatus struct {
// observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the
// StatefulSet's generation, which is updated on mutation by the API Server.
// +optional
ObservedGeneration *int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
// replicas is the number of Pods created by the StatefulSet controller.
Replicas int32 `json:"replicas" protobuf:"varint,2,opt,name=replicas"`
// readyReplicas is the number of pods created by this StatefulSet controller with a Ready Condition.
ReadyReplicas int32 `json:"readyReplicas,omitempty" protobuf:"varint,3,opt,name=readyReplicas"`
// currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version
// indicated by currentRevision.
CurrentReplicas int32 `json:"currentReplicas,omitempty" protobuf:"varint,4,opt,name=currentReplicas"`
// updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version
// indicated by updateRevision.
UpdatedReplicas int32 `json:"updatedReplicas,omitempty" protobuf:"varint,5,opt,name=updatedReplicas"`
// currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the
// sequence [0,currentReplicas).
CurrentRevision string `json:"currentRevision,omitempty" protobuf:"bytes,6,opt,name=currentRevision"`
// updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence
// [replicas-updatedReplicas,replicas)
UpdateRevision string `json:"updateRevision,omitempty" protobuf:"bytes,7,opt,name=updateRevision"`
// collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller
// uses this field as a collision avoidance mechanism when it needs to create the name for the
// newest ControllerRevision.
// +optional
CollisionCount *int32 `json:"collisionCount,omitempty" protobuf:"varint,9,opt,name=collisionCount"`
// Represents the latest available observations of a statefulset's current state.
// +optional
// +patchMergeKey=type
// +patchStrategy=merge
Conditions []StatefulSetCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,10,rep,name=conditions"`
// Total number of available pods (ready for at least minReadySeconds) targeted by this StatefulSet.
// This is a beta field and enabled/disabled by StatefulSetMinReadySeconds feature gate.
AvailableReplicas int32 `json:"availableReplicas" protobuf:"varint,11,opt,name=availableReplicas"`
}
type StatefulSetConditionType string
// StatefulSetCondition describes the state of a statefulset at a certain point.
type StatefulSetCondition struct {
// Type of statefulset condition.
Type StatefulSetConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=StatefulSetConditionType"`
// Status of the condition, one of True, False, Unknown.
Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
// Last time the condition transitioned from one status to another.
// +optional
LastTransitionTime metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,3,opt,name=lastTransitionTime"`
// The reason for the condition's last transition.
// +optional
Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
// A human readable message indicating details about the transition.
// +optional
Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:prerelease-lifecycle-gen:introduced=1.5
// +k8s:prerelease-lifecycle-gen:deprecated=1.8
// +k8s:prerelease-lifecycle-gen:removed=1.16
// +k8s:prerelease-lifecycle-gen:replacement=apps,v1,StatefulSetList
// StatefulSetList is a collection of StatefulSets.
type StatefulSetList struct {
metav1.TypeMeta `json:",inline"`
// +optional
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
Items []StatefulSet `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// +genclient
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:prerelease-lifecycle-gen:introduced=1.6
// +k8s:prerelease-lifecycle-gen:deprecated=1.8
// +k8s:prerelease-lifecycle-gen:removed=1.16<|fim▁hole|>// more information.
// Deployment enables declarative updates for Pods and ReplicaSets.
type Deployment struct {
metav1.TypeMeta `json:",inline"`
// Standard object metadata.
// +optional
metav1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// Specification of the desired behavior of the Deployment.
// +optional
Spec DeploymentSpec `json:"spec,omitempty" protobuf:"bytes,2,opt,name=spec"`
// Most recently observed status of the Deployment.
// +optional
Status DeploymentStatus `json:"status,omitempty" protobuf:"bytes,3,opt,name=status"`
}
// DeploymentSpec is the specification of the desired behavior of the Deployment.
type DeploymentSpec struct {
// Number of desired pods. This is a pointer to distinguish between explicit
// zero and not specified. Defaults to 1.
// +optional
Replicas *int32 `json:"replicas,omitempty" protobuf:"varint,1,opt,name=replicas"`
// Label selector for pods. Existing ReplicaSets whose pods are
// selected by this will be the ones affected by this deployment.
// +optional
Selector *metav1.LabelSelector `json:"selector,omitempty" protobuf:"bytes,2,opt,name=selector"`
// Template describes the pods that will be created.
Template v1.PodTemplateSpec `json:"template" protobuf:"bytes,3,opt,name=template"`
// The deployment strategy to use to replace existing pods with new ones.
// +optional
// +patchStrategy=retainKeys
Strategy DeploymentStrategy `json:"strategy,omitempty" patchStrategy:"retainKeys" protobuf:"bytes,4,opt,name=strategy"`
// Minimum number of seconds for which a newly created pod should be ready
// without any of its container crashing, for it to be considered available.
// Defaults to 0 (pod will be considered available as soon as it is ready)
// +optional
MinReadySeconds int32 `json:"minReadySeconds,omitempty" protobuf:"varint,5,opt,name=minReadySeconds"`
// The number of old ReplicaSets to retain to allow rollback.
// This is a pointer to distinguish between explicit zero and not specified.
// Defaults to 2.
// +optional
RevisionHistoryLimit *int32 `json:"revisionHistoryLimit,omitempty" protobuf:"varint,6,opt,name=revisionHistoryLimit"`
// Indicates that the deployment is paused.
// +optional
Paused bool `json:"paused,omitempty" protobuf:"varint,7,opt,name=paused"`
// DEPRECATED.
// The config this deployment is rolling back to. Will be cleared after rollback is done.
// +optional
RollbackTo *RollbackConfig `json:"rollbackTo,omitempty" protobuf:"bytes,8,opt,name=rollbackTo"`
// The maximum time in seconds for a deployment to make progress before it
// is considered to be failed. The deployment controller will continue to
// process failed deployments and a condition with a ProgressDeadlineExceeded
// reason will be surfaced in the deployment status. Note that progress will
// not be estimated during the time a deployment is paused. Defaults to 600s.
// +optional
ProgressDeadlineSeconds *int32 `json:"progressDeadlineSeconds,omitempty" protobuf:"varint,9,opt,name=progressDeadlineSeconds"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:prerelease-lifecycle-gen:introduced=1.6
// +k8s:prerelease-lifecycle-gen:deprecated=1.8
// +k8s:prerelease-lifecycle-gen:removed=1.16
// +k8s:prerelease-lifecycle-gen:replacement=apps,v1,DeploymentRollback
// DEPRECATED.
// DeploymentRollback stores the information required to rollback a deployment.
type DeploymentRollback struct {
metav1.TypeMeta `json:",inline"`
// Required: This must match the Name of a deployment.
Name string `json:"name" protobuf:"bytes,1,opt,name=name"`
// The annotations to be updated to a deployment
// +optional
UpdatedAnnotations map[string]string `json:"updatedAnnotations,omitempty" protobuf:"bytes,2,rep,name=updatedAnnotations"`
// The config of this deployment rollback.
RollbackTo RollbackConfig `json:"rollbackTo" protobuf:"bytes,3,opt,name=rollbackTo"`
}
// DEPRECATED.
type RollbackConfig struct {
// The revision to rollback to. If set to 0, rollback to the last revision.
// +optional
Revision int64 `json:"revision,omitempty" protobuf:"varint,1,opt,name=revision"`
}
const (
// DefaultDeploymentUniqueLabelKey is the default key of the selector that is added
// to existing ReplicaSets (and label key that is added to its pods) to prevent the existing ReplicaSets
// to select new pods (and old pods being select by new ReplicaSet).
DefaultDeploymentUniqueLabelKey string = "pod-template-hash"
)
// DeploymentStrategy describes how to replace existing pods with new ones.
type DeploymentStrategy struct {
// Type of deployment. Can be "Recreate" or "RollingUpdate". Default is RollingUpdate.
// +optional
Type DeploymentStrategyType `json:"type,omitempty" protobuf:"bytes,1,opt,name=type,casttype=DeploymentStrategyType"`
// Rolling update config params. Present only if DeploymentStrategyType =
// RollingUpdate.
//---
// TODO: Update this to follow our convention for oneOf, whatever we decide it
// to be.
// +optional
RollingUpdate *RollingUpdateDeployment `json:"rollingUpdate,omitempty" protobuf:"bytes,2,opt,name=rollingUpdate"`
}
type DeploymentStrategyType string
const (
// Kill all existing pods before creating new ones.
RecreateDeploymentStrategyType DeploymentStrategyType = "Recreate"
// Replace the old ReplicaSets by new one using rolling update i.e gradually scale down the old ReplicaSets and scale up the new one.
RollingUpdateDeploymentStrategyType DeploymentStrategyType = "RollingUpdate"
)
// Spec to control the desired behavior of rolling update.
type RollingUpdateDeployment struct {
// The maximum number of pods that can be unavailable during the update.
// Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%).
// Absolute number is calculated from percentage by rounding down.
// This can not be 0 if MaxSurge is 0.
// Defaults to 25%.
// Example: when this is set to 30%, the old ReplicaSet can be scaled down to 70% of desired pods
// immediately when the rolling update starts. Once new pods are ready, old ReplicaSet
// can be scaled down further, followed by scaling up the new ReplicaSet, ensuring
// that the total number of pods available at all times during the update is at
// least 70% of desired pods.
// +optional
MaxUnavailable *intstr.IntOrString `json:"maxUnavailable,omitempty" protobuf:"bytes,1,opt,name=maxUnavailable"`
// The maximum number of pods that can be scheduled above the desired number of
// pods.
// Value can be an absolute number (ex: 5) or a percentage of desired pods (ex: 10%).
// This can not be 0 if MaxUnavailable is 0.
// Absolute number is calculated from percentage by rounding up.
// Defaults to 25%.
// Example: when this is set to 30%, the new ReplicaSet can be scaled up immediately when
// the rolling update starts, such that the total number of old and new pods do not exceed
// 130% of desired pods. Once old pods have been killed,
// new ReplicaSet can be scaled up further, ensuring that total number of pods running
// at any time during the update is at most 130% of desired pods.
// +optional
MaxSurge *intstr.IntOrString `json:"maxSurge,omitempty" protobuf:"bytes,2,opt,name=maxSurge"`
}
// DeploymentStatus is the most recently observed status of the Deployment.
type DeploymentStatus struct {
// The generation observed by the deployment controller.
// +optional
ObservedGeneration int64 `json:"observedGeneration,omitempty" protobuf:"varint,1,opt,name=observedGeneration"`
// Total number of non-terminated pods targeted by this deployment (their labels match the selector).
// +optional
Replicas int32 `json:"replicas,omitempty" protobuf:"varint,2,opt,name=replicas"`
// Total number of non-terminated pods targeted by this deployment that have the desired template spec.
// +optional
UpdatedReplicas int32 `json:"updatedReplicas,omitempty" protobuf:"varint,3,opt,name=updatedReplicas"`
// readyReplicas is the number of pods targeted by this Deployment controller with a Ready Condition.
// +optional
ReadyReplicas int32 `json:"readyReplicas,omitempty" protobuf:"varint,7,opt,name=readyReplicas"`
// Total number of available pods (ready for at least minReadySeconds) targeted by this deployment.
// +optional
AvailableReplicas int32 `json:"availableReplicas,omitempty" protobuf:"varint,4,opt,name=availableReplicas"`
// Total number of unavailable pods targeted by this deployment. This is the total number of
// pods that are still required for the deployment to have 100% available capacity. They may
// either be pods that are running but not yet available or pods that still have not been created.
// +optional
UnavailableReplicas int32 `json:"unavailableReplicas,omitempty" protobuf:"varint,5,opt,name=unavailableReplicas"`
// Represents the latest available observations of a deployment's current state.
// +patchMergeKey=type
// +patchStrategy=merge
Conditions []DeploymentCondition `json:"conditions,omitempty" patchStrategy:"merge" patchMergeKey:"type" protobuf:"bytes,6,rep,name=conditions"`
// Count of hash collisions for the Deployment. The Deployment controller uses this
// field as a collision avoidance mechanism when it needs to create the name for the
// newest ReplicaSet.
// +optional
CollisionCount *int32 `json:"collisionCount,omitempty" protobuf:"varint,8,opt,name=collisionCount"`
}
type DeploymentConditionType string
// These are valid conditions of a deployment.
const (
// Available means the deployment is available, ie. at least the minimum available
// replicas required are up and running for at least minReadySeconds.
DeploymentAvailable DeploymentConditionType = "Available"
// Progressing means the deployment is progressing. Progress for a deployment is
// considered when a new replica set is created or adopted, and when new pods scale
// up or old pods scale down. Progress is not estimated for paused deployments or
// when progressDeadlineSeconds is not specified.
DeploymentProgressing DeploymentConditionType = "Progressing"
// ReplicaFailure is added in a deployment when one of its pods fails to be created
// or deleted.
DeploymentReplicaFailure DeploymentConditionType = "ReplicaFailure"
)
// DeploymentCondition describes the state of a deployment at a certain point.
type DeploymentCondition struct {
// Type of deployment condition.
Type DeploymentConditionType `json:"type" protobuf:"bytes,1,opt,name=type,casttype=DeploymentConditionType"`
// Status of the condition, one of True, False, Unknown.
Status v1.ConditionStatus `json:"status" protobuf:"bytes,2,opt,name=status,casttype=k8s.io/api/core/v1.ConditionStatus"`
// The last time this condition was updated.
LastUpdateTime metav1.Time `json:"lastUpdateTime,omitempty" protobuf:"bytes,6,opt,name=lastUpdateTime"`
// Last time the condition transitioned from one status to another.
LastTransitionTime metav1.Time `json:"lastTransitionTime,omitempty" protobuf:"bytes,7,opt,name=lastTransitionTime"`
// The reason for the condition's last transition.
Reason string `json:"reason,omitempty" protobuf:"bytes,4,opt,name=reason"`
// A human readable message indicating details about the transition.
Message string `json:"message,omitempty" protobuf:"bytes,5,opt,name=message"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:prerelease-lifecycle-gen:introduced=1.6
// +k8s:prerelease-lifecycle-gen:deprecated=1.8
// +k8s:prerelease-lifecycle-gen:removed=1.16
// +k8s:prerelease-lifecycle-gen:replacement=apps,v1,DeploymentList
// DeploymentList is a list of Deployments.
type DeploymentList struct {
metav1.TypeMeta `json:",inline"`
// Standard list metadata.
// +optional
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// Items is the list of Deployments.
Items []Deployment `json:"items" protobuf:"bytes,2,rep,name=items"`
}
// +genclient
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:prerelease-lifecycle-gen:introduced=1.7
// +k8s:prerelease-lifecycle-gen:deprecated=1.8
// +k8s:prerelease-lifecycle-gen:removed=1.16
// +k8s:prerelease-lifecycle-gen:replacement=apps,v1,ControllerRevision
// DEPRECATED - This group version of ControllerRevision is deprecated by apps/v1beta2/ControllerRevision. See the
// release notes for more information.
// ControllerRevision implements an immutable snapshot of state data. Clients
// are responsible for serializing and deserializing the objects that contain
// their internal state.
// Once a ControllerRevision has been successfully created, it can not be updated.
// The API Server will fail validation of all requests that attempt to mutate
// the Data field. ControllerRevisions may, however, be deleted. Note that, due to its use by both
// the DaemonSet and StatefulSet controllers for update and rollback, this object is beta. However,
// it may be subject to name and representation changes in future releases, and clients should not
// depend on its stability. It is primarily for internal use by controllers.
type ControllerRevision struct {
metav1.TypeMeta `json:",inline"`
// Standard object's metadata.
// More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
// +optional
metav1.ObjectMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// Data is the serialized representation of the state.
Data runtime.RawExtension `json:"data,omitempty" protobuf:"bytes,2,opt,name=data"`
// Revision indicates the revision of the state represented by Data.
Revision int64 `json:"revision" protobuf:"varint,3,opt,name=revision"`
}
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +k8s:prerelease-lifecycle-gen:introduced=1.7
// +k8s:prerelease-lifecycle-gen:deprecated=1.8
// +k8s:prerelease-lifecycle-gen:removed=1.16
// +k8s:prerelease-lifecycle-gen:replacement=apps,v1,ControllerRevisionList
// ControllerRevisionList is a resource containing a list of ControllerRevision objects.
type ControllerRevisionList struct {
metav1.TypeMeta `json:",inline"`
// More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
// +optional
metav1.ListMeta `json:"metadata,omitempty" protobuf:"bytes,1,opt,name=metadata"`
// Items is the list of ControllerRevisions
Items []ControllerRevision `json:"items" protobuf:"bytes,2,rep,name=items"`
}<|fim▁end|>
|
// +k8s:prerelease-lifecycle-gen:replacement=apps,v1,Deployment
// DEPRECATED - This group version of Deployment is deprecated by apps/v1beta2/Deployment. See the release notes for
|
<|file_name|>GuildRankInfo.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2014-2019 AscEmu Team <http://www.ascemu.org>
This file is released under the MIT license. See README-MIT for more information.
*/
#include "GuildRankInfo.h"
#include "GuildBankRightsAndSlots.h"
#include "Log.hpp"
#include "Database/Database.h"
#include "Server/MainServerDefines.h"
GuildRankInfo::GuildRankInfo() : mGuildId(0), mRankId(GUILD_RANK_NONE), mRights(GR_RIGHT_EMPTY), mBankMoneyPerDay(0)
{
}
GuildRankInfo::GuildRankInfo(uint32_t guildId) : mGuildId(guildId), mRankId(GUILD_RANK_NONE), mRights(GR_RIGHT_EMPTY), mBankMoneyPerDay(0)
{
}
GuildRankInfo::GuildRankInfo(uint32_t guildId, uint8_t rankId, std::string const& name, uint32_t rights, uint32_t money) :
mGuildId(guildId), mRankId(rankId), mName(name), mRights(rights), mBankMoneyPerDay(money)
{
}
void GuildRankInfo::loadGuildRankFromDB(Field* fields)
{
mRankId = fields[1].GetUInt8();
mName = fields[2].GetString();
mRights = fields[3].GetUInt32();
mBankMoneyPerDay = fields[4].GetUInt32();
if (mRankId == GR_GUILDMASTER)
{
mRights |= GR_RIGHT_ALL;
}
}
void GuildRankInfo::saveGuildRankToDB(bool _delete) const
{
if (_delete)
{
CharacterDatabase.Execute("DELETE FROM guild_ranks WHERE guildId = %u AND rankId = %u", mGuildId, (uint32_t)mRankId);
}
else
{
CharacterDatabase.Execute("DELETE FROM guild_ranks WHERE guildId = %u AND rankId = %u", mGuildId, (uint32_t)mRankId);
CharacterDatabase.Execute("INSERT INTO guild_ranks (guildId, rankId, rankName, rankRights, goldLimitPerDay) VALUES ('%u', '%u', '%s', '%u', '0')",
mGuildId, (uint32_t)mRankId, mName.c_str(), mRights);
}
}
uint8_t GuildRankInfo::getId() const
{
return mRankId;
}
std::string const& GuildRankInfo::getName() const
{
return mName;
}
void GuildRankInfo::setName(std::string const& name)
{
if (mName == name)
return;
mName = name;
CharacterDatabase.Execute("UPDATE guild_ranks SET rankName = '%s', rankId = %u WHERE guildId = %u", mName.c_str(), static_cast<uint32_t>(mRankId), mGuildId);
}
uint32_t GuildRankInfo::getRights() const
{
return mRights;
}
void GuildRankInfo::setRights(uint32_t rights)
{
if (mRankId == GR_GUILDMASTER)
rights = GR_RIGHT_ALL;
<|fim▁hole|>
CharacterDatabase.Execute("UPDATE guild_ranks SET rankRights = %u WHERE guildId = %u AND rankId = %u", mRights, mGuildId, static_cast<uint32_t>(mRankId));
}
int32_t GuildRankInfo::getBankMoneyPerDay() const
{
return mBankMoneyPerDay;
}
void GuildRankInfo::setBankMoneyPerDay(uint32_t money)
{
if (mRankId == GR_GUILDMASTER)
money = uint32_t(UNLIMITED_WITHDRAW_MONEY);
if (mBankMoneyPerDay == money)
return;
mBankMoneyPerDay = money;
CharacterDatabase.Execute("UPDATE guild_ranks SET goldLimitPerDay = '%u', rankId = '%u' WHERE guildId = %u", money, static_cast<uint32_t>(mRankId), mGuildId);
}
int8_t GuildRankInfo::getBankTabRights(uint8_t tabId) const
{
return tabId < MAX_GUILD_BANK_TABS ? mBankTabRightsAndSlots[tabId].getRights() : 0;
}
int32_t GuildRankInfo::getBankTabSlotsPerDay(uint8_t tabId) const
{
return tabId < MAX_GUILD_BANK_TABS ? mBankTabRightsAndSlots[tabId].getSlots() : 0;
}
void GuildRankInfo::createMissingTabsIfNeeded(uint8_t tabs, bool /*_delete*/, bool logOnCreate)
{
for (uint8_t i = 0; i < tabs; ++i)
{
GuildBankRightsAndSlots& rightsAndSlots = mBankTabRightsAndSlots[i];
if (rightsAndSlots.getTabId() == i)
continue;
rightsAndSlots.setTabId(i);
if (mRankId == GR_GUILDMASTER)
rightsAndSlots.SetGuildMasterValues();
if (logOnCreate)
LogError("Guild %u has broken Tab %u for rank %u. Created default tab.", mGuildId, i, static_cast<uint32_t>(mRankId));
CharacterDatabase.Execute("REPLACE INTO guild_bank_rights VALUES(%u, %u, %u, %u, %u);",
mGuildId, i, static_cast<uint32_t>(mRankId), static_cast<uint32_t>(rightsAndSlots.getRights()), rightsAndSlots.getSlots());
}
}
void GuildRankInfo::setBankTabSlotsAndRights(GuildBankRightsAndSlots rightsAndSlots, bool saveToDB)
{
if (mRankId == GR_GUILDMASTER)
rightsAndSlots.SetGuildMasterValues();
GuildBankRightsAndSlots& guildBR = mBankTabRightsAndSlots[rightsAndSlots.getTabId()];
guildBR = rightsAndSlots;
if (saveToDB)
{
CharacterDatabase.Execute("REPLACE INTO guild_bank_rights VALUES(%u, %u, %u, %u, %u)",
mGuildId, static_cast<uint32_t>(guildBR.getTabId()), static_cast<uint32_t>(mRankId), static_cast<uint32_t>(guildBR.getRights()), guildBR.getSlots());
}
}<|fim▁end|>
|
if (mRights == rights)
return;
mRights = rights;
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF8 -*-
#
# Provides access to portaudio.
# Copyright (C) 2010 Josiah Gordon <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" A portaudio module.
"""
<|fim▁hole|>__all__ = ['_portaudio']<|fim▁end|>
| |
<|file_name|>blunderbuss.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mungers
import (
"math"
"math/rand"
"k8s.io/contrib/mungegithub/features"
"k8s.io/contrib/mungegithub/github"
"github.com/golang/glog"
"github.com/spf13/cobra"
)
// weightMap is a map of user to a weight for that user.
type weightMap map[string]int64
// A BlunderbussConfig maps a set of file prefixes to a set of owner names (github users)
type BlunderbussConfig struct {
PrefixMap map[string][]string `json:"prefixMap,omitempty" yaml:"prefixMap,omitempty"`
}
// BlunderbussMunger will assign issues to users based on the config file
// provided by --blunderbuss-config.
type BlunderbussMunger struct {
config *BlunderbussConfig
features *features.Features
blunderbussReassign bool
}
func init() {
blunderbuss := &BlunderbussMunger{}
RegisterMungerOrDie(blunderbuss)
}
// Name is the name usable in --pr-mungers
func (b *BlunderbussMunger) Name() string { return "blunderbuss" }
// RequiredFeatures is a slice of 'features' that must be provided
func (b *BlunderbussMunger) RequiredFeatures() []string { return []string{features.RepoFeatureName} }
// Initialize will initialize the munger
func (b *BlunderbussMunger) Initialize(config *github.Config, features *features.Features) error {
glog.Infof("blunderbuss-reassign: %#v\n", b.blunderbussReassign)
b.features = features
return nil
}
// EachLoop is called at the start of every munge loop
func (b *BlunderbussMunger) EachLoop() error { return nil }
// AddFlags will add any request flags to the cobra `cmd`
func (b *BlunderbussMunger) AddFlags(cmd *cobra.Command, config *github.Config) {
cmd.Flags().BoolVar(&b.blunderbussReassign, "blunderbuss-reassign", false, "Assign PRs even if they're already assigned; use with -dry-run to judge changes to the assignment algorithm")
}
func chance(val, total int64) float64 {
return 100.0 * float64(val) / float64(total)
}
func printChance(owners weightMap, total int64) {
if !glog.V(4) {
return
}
glog.Infof("Owner\tPercent")
for name, weight := range owners {
glog.Infof("%s\t%02.2f%%", name, chance(weight, total))
}
}
// Munge is the workhorse the will actually make updates to the PR
func (b *BlunderbussMunger) Munge(obj *github.MungeObject) {
if !obj.IsPR() {
return
}
issue := obj.Issue
if !b.blunderbussReassign && issue.Assignee != nil {
glog.V(6).Infof("skipping %v: reassign: %v assignee: %v", *issue.Number, b.blunderbussReassign, github.DescribeUser(issue.Assignee))
return
}
files, err := obj.ListFiles()
if err != nil {
return
}
potentialOwners := weightMap{}
weightSum := int64(0)
for _, file := range files {
fileWeight := int64(1)
if file.Changes != nil && *file.Changes != 0 {
fileWeight = int64(*file.Changes)
}
// Judge file size on a log scale-- effectively this
// makes three buckets, we shouldn't have many 10k+
// line changes.
fileWeight = int64(math.Log10(float64(fileWeight))) + 1
fileOwners := b.features.Repos.LeafAssignees(*file.Filename)
if fileOwners.Len() == 0 {
glog.Warningf("Couldn't find an owner for: %s", *file.Filename)
}
for _, owner := range fileOwners.List() {
if owner == *issue.User.Login {
continue
}
potentialOwners[owner] = potentialOwners[owner] + fileWeight
weightSum += fileWeight
}
}
if len(potentialOwners) == 0 {
glog.Errorf("No owners found for PR %d", *issue.Number)
return<|fim▁hole|> c := chance(potentialOwners[cur], weightSum)
glog.Infof("Current assignee %v has a %02.2f%% chance of having been chosen", cur, c)
}
selection := rand.Int63n(weightSum)
owner := ""
for o, w := range potentialOwners {
owner = o
selection -= w
if selection <= 0 {
break
}
}
c := chance(potentialOwners[owner], weightSum)
glog.Infof("Assigning %v to %v who had a %02.2f%% chance to be assigned (previously assigned to %v)", *issue.Number, owner, c, github.DescribeUser(issue.Assignee))
obj.AssignPR(owner)
}<|fim▁end|>
|
}
printChance(potentialOwners, weightSum)
if issue.Assignee != nil {
cur := *issue.Assignee.Login
|
<|file_name|>object_store_test.go<|end_file_name|><|fim▁begin|>// Copyright 2018 The Kubeflow Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package storage
import (
"bytes"
"io"
"testing"
"github.com/kubeflow/pipelines/backend/src/common/util"
minio "github.com/minio/minio-go"
"github.com/pkg/errors"
"github.com/stretchr/testify/assert"
"google.golang.org/grpc/codes"
)
type Foo struct{ ID int }
type FakeBadMinioClient struct {
}
func (c *FakeBadMinioClient) PutObject(bucketName, objectName string, reader io.Reader,
objectSize int64, opts minio.PutObjectOptions) (n int64, err error) {
return 0, errors.New("some error")
}
func (c *FakeBadMinioClient) GetObject(bucketName, objectName string,
opts minio.GetObjectOptions) (io.Reader, error) {
return nil, errors.New("some error")
}
func (c *FakeBadMinioClient) DeleteObject(bucketName, objectName string) error {
return errors.New("some error")
}
func TestAddFile(t *testing.T) {
minioClient := NewFakeMinioClient()
manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"}
error := manager.AddFile([]byte("abc"), manager.GetPipelineKey("1"))
assert.Nil(t, error)
assert.Equal(t, 1, minioClient.GetObjectCount())
assert.True(t, minioClient.ExistObject("pipeline/1"))
}
func TestAddFileError(t *testing.T) {
manager := &MinioObjectStore{minioClient: &FakeBadMinioClient{}}
error := manager.AddFile([]byte("abc"), manager.GetPipelineKey("1"))
assert.Equal(t, codes.Internal, error.(*util.UserError).ExternalStatusCode())
}
func TestGetFile(t *testing.T) {
manager := &MinioObjectStore{minioClient: NewFakeMinioClient(), baseFolder: "pipeline"}
manager.AddFile([]byte("abc"), manager.GetPipelineKey("1"))
file, error := manager.GetFile(manager.GetPipelineKey("1"))
assert.Nil(t, error)
assert.Equal(t, file, []byte("abc"))
}
func TestGetFileError(t *testing.T) {
manager := &MinioObjectStore{minioClient: &FakeBadMinioClient{}, baseFolder: "pipeline"}
_, error := manager.GetFile(manager.GetPipelineKey("1"))
assert.Equal(t, codes.Internal, error.(*util.UserError).ExternalStatusCode())
}
func TestDeleteFile(t *testing.T) {
minioClient := NewFakeMinioClient()
manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"}
manager.AddFile([]byte("abc"), manager.GetPipelineKey("1"))
error := manager.DeleteFile(manager.GetPipelineKey("1"))
assert.Nil(t, error)
assert.Equal(t, 0, minioClient.GetObjectCount())
}
func TestDeleteFileError(t *testing.T) {
manager := &MinioObjectStore{minioClient: &FakeBadMinioClient{}}
error := manager.DeleteFile(manager.GetPipelineKey("1"))
assert.Equal(t, codes.Internal, error.(*util.UserError).ExternalStatusCode())
}
func TestAddAsYamlFile(t *testing.T) {
minioClient := NewFakeMinioClient()
manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"}
error := manager.AddAsYamlFile(Foo{ID: 1}, manager.GetPipelineKey("1"))<|fim▁hole|> assert.True(t, minioClient.ExistObject("pipeline/1"))
}
func TestGetFromYamlFile(t *testing.T) {
minioClient := NewFakeMinioClient()
manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"}
manager.minioClient.PutObject(
"", manager.GetPipelineKey("1"),
bytes.NewReader([]byte("id: 1")), -1,
minio.PutObjectOptions{ContentType: "application/octet-stream"})
expectedFoo := Foo{ID: 1}
var foo Foo
error := manager.GetFromYamlFile(&foo, manager.GetPipelineKey("1"))
assert.Nil(t, error)
assert.Equal(t, expectedFoo, foo)
}
func TestGetFromYamlFile_UnmarshalError(t *testing.T) {
minioClient := NewFakeMinioClient()
manager := &MinioObjectStore{minioClient: minioClient, baseFolder: "pipeline"}
manager.minioClient.PutObject(
"", manager.GetPipelineKey("1"),
bytes.NewReader([]byte("invalid")), -1,
minio.PutObjectOptions{ContentType: "application/octet-stream"})
var foo Foo
error := manager.GetFromYamlFile(&foo, manager.GetPipelineKey("1"))
assert.Equal(t, codes.Internal, error.(*util.UserError).ExternalStatusCode())
assert.Contains(t, error.Error(), "Failed to unmarshal")
}<|fim▁end|>
|
assert.Nil(t, error)
assert.Equal(t, 1, minioClient.GetObjectCount())
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var async = require('async');
var rp = require('request-promise');
var Promise = require('bluebird');
/*
LoL API object that deals with everything.
*/
var LoLAPI = {
init: function(inputObj) {
/*
SET UP LOGGER
*/
if(typeof inputObj.logger !== 'undefined') {
this.logger = inputObj.logger;
}
else {
this.logger = console;
}
/*
END SET UP LOGGER
*/
/*
SET UP ERROR HANDLER
*/
if(typeof inputObj.errorHandler !== 'undefined') {
this.errorHandler = inputObj.errorHandler;
}
else {
this.errorHandler = this.logger.error;
}
/*
END ERROR HANDLER
*/
/*
SET UP CACHE TODO: replace with CHECK that global redis exists
*/
if(!inputObj.cache) {
var redis = require('redis');
Promise.promisifyAll(redis.RedisClient.prototype);
Promise.promisifyAll(redis.Multi.prototype);
this.cache = redis.createClient('redis://' + inputObj.cacheServer + ':' + (inputObj.cachePort || '6379'));
}
else {
this.cache = inputObj.cache;
this.cache.on("error", function (err) {
this.errorHandle(err);
}.bind(this));
}
this.cache.on('connect', function() {
this.logger.log('LoL API Connected to Redis');
this.getOneHourCount().then(count => {
this.logger.log(inputObj.limit_one_hour - count + ' API requests available in the hour.');
return this.timeToHourExpiry();
})
.then(ttl => {
this.logger.log(ttl + ' seconds left until hour cache expiry');
});
}.bind(this));
/*
END CACHE SETUP
*/
this.setApiKey(inputObj.api_key);
this.failCount = inputObj.fail_count || 5;
//Load all the handlers in the handlers dir.
require('fs').readdirSync(__dirname + '/lib/handlers').forEach(function(file) {
if (file.match(/\.js$/) !== null && file !== 'index.js') {
var r = require('./lib/handlers/' + file);
this.request[r.name] = r.handler.bind(this);
}
}.bind(this));
//Load all the helpers in the helpers dir.
this.helper = {};
require('fs').readdirSync(__dirname + '/lib/helpers').forEach(function(file) {
if (file.match(/\.js$/) !== null && file !== 'index.js') {
var r = require('./lib/helpers/' + file);
this.helper[file.replace(/\.js$/, '')] = r;
}
}.bind(this));
//Load all the route builders in the route builders dir.
this.routeStem = {};
require('fs').readdirSync(__dirname + '/lib/route-stem').forEach(function(file) {
if (file.match(/\.js$/) !== null && file !== 'index.js') {
var r = require('./lib/route-stem/' + file);
this.routeStem[file.replace(/\.js$/, '')] = r;
}
}.bind(this));
//TODO: do we definitely want -1?
this.setRateLimit(inputObj.limit_ten_seconds-1, inputObj.limit_one_hour);
//Set the timeouts for the queue master
this.beginQueueInterval();
return this;
},
beginQueueInterval: function() {
this.queueInterval = setInterval(function() {
return this.checkRateLimit()
.then((spaces)=> {
if(spaces && (this.queue.length > 0)) {
return this.execQueue(spaces);
}
else {
return;
}
}).bind(this);
}.bind(this), 10);
this.logger.log('Created LoL API Request Handler');
return;
},
setApiKey: function(key) {
return this.apiKey = key;
},
timeToHourExpiry: function() {
return this.cache.ttlAsync('lolapi_onehour');
},
refreshCache: function() {
return this.cache.delAsync('lolapi_tenseconds', 'lolapi_onehour');
},
incrementTenSecondsCount: function() {
//If not set then set
return this.cache.multi().incr('lolapi_tenseconds').expire('lolapi_tenseconds', 11).execAsync()
.then((value)=> {
if(!value) {
return this.logger("Couldn't set the 10 second rate key");
}
return value;
}).bind(this);
},
incrementOneHourCount: function() {
//If not set then set
return this.cache.multi().incr('lolapi_onehour').expire('lolapi_onehour', 3601).execAsync()
.then((value)=> {
if(!value) {
return this.logger("Couldn't set one hour key.");
}
return value;
}).bind(this);
},
getTenSecondsCount: function() {
return this.cache.getAsync('lolapi_tenseconds')
.then((key)=> {
if(key) {
return key;
}
else {
return 0;
}
});
},
getOneHourCount: function() {
return this.cache.getAsync('lolapi_onehour')
.then((key)=> {
if(key) {
return key;
}
else {
return 0;
}
});
},
rateLimit: {
tenSeconds: null,
oneHour: null,
},
requestCount: {
tenSeconds: 0,
oneHour: 0,
outstandingRequests: 0
},
failCount: 5,
setRateLimit: function(ten_seconds, one_hour) {
this.rateLimit.tenSeconds = ten_seconds;
this.rateLimit.oneHour = one_hour;
},
// If a 429 is discovered then it sends a retry-after seconds count, test if it greater than remaining time
retryRateLimitOverride: function(retry_after) {
//TODO: do I need to parse int here?
var r = parseInt(retry_after) * 1000;
//Always clear the 10s timeout just to be certain.
//Clear interval and reset after retry after is cleared
clearInterval(this.tenSecondsTimeout);
this.logger.log(this.tenSecondsTimeout);
},
checkRateLimit: function() {
return this.getOneHourCount() //Get this first because we care about it less
.then((oneHour)=> {
return this.getTenSecondsCount()
.then((tenSeconds)=> { //NESTED SO WE CAN ACCESS UPPER VARS IN SCOPE
//TODO: there is a wierd type error here........ for some reason it outputs number for tenseconds and a string for hour
if((parseInt(tenSeconds) + this.requestCount.outstandingRequests) >= this.rateLimit.tenSeconds) {
return 0;
}
else if((parseInt(oneHour) + this.requestCount.outstandingRequests) >= this.rateLimit.oneHour) {
return this.timeToHourExpiry()
.then(ttl => {
this.logger.log('Hit hour limit: ' + oneHour + '. ' + ttl + ' seconds to go until cache reset.');
return 0; // 0 Spaces
})
}
else {
//return the smaller of the requests available
var requests_left_hour = this.rateLimit.oneHour - parseInt(oneHour) - this.requestCount.outstandingRequests;
var requests_left_ten_seconds = this.rateLimit.tenSeconds - parseInt(tenSeconds) - this.requestCount.outstandingRequests;
//As we dont' need to worry about race conditions we don't have to recheck if positive
if(requests_left_hour > requests_left_ten_seconds) {
if(requests_left_ten_seconds > 0) {
return requests_left_ten_seconds;
}
else {
return 0;
}
}
else {
if(requests_left_hour > 0) {
return requests_left_hour;
}
else {
return 0;
}
}
}
});
});
},
initRequest: function(endpoint, returnVars) {
//Add the request and set up as a promise
var cb = function(endpoint, returnVars, times_failed) {
return this.incrementOneHourCount()
.then((oneHour)=> {
return this.incrementTenSecondsCount()
.then((tenSeconds)=> {
this.requestCount.outstandingRequests += 1;
var options = {
uri: encodeURI(endpoint + '&api_key=' + this.apiKey), //Assume the ? has already been added by our endpoint
json: true,
resolveWithFullResponse: true
}
this.logger.log('Using ' + options.uri);
this.logger.log(this.requestCount.outstandingRequests);
this.logger.log(tenSeconds + ' ' + oneHour);
return rp(options)
.then(
function(response) {
this.requestCount.outstandingRequests -= 1;
if(returnVars) {
if(typeof returnVars === 'string') {
if(response.body[returnVars]) {
return response.body[returnVars]; //Resolve promise
}
else {
this.infoHandle("Couldn't locate the requested returnVar " + returnVars + '. Returning full response.');
}
}
else {
var tmp = {};
returnVars.forEach(function(item, i) {
if(response[item]) {
tmp[item] = response.body[item];
}
else {
var bFailedReturnVar = true;
}
}.bind(this));
if(!bFailedReturnVar) {
return tmp; //Resolve promise
}
else {
this.infoHandle("Couldn't locate the requested returnVar " + item + '. Returning full response.');
return response.body; //Resolve Promise
}
}<|fim▁hole|> }
}.bind(this),
//REJECTION
function(reason) {
this.requestCount.outstandingRequests -= 1;
if(reason.statusCode === 429) {
this.logger.log('Rate limit reached!')
//NOTE: Riot have been known to remove the header so including this to avoid breaking.
if(typeof reason.response['headers']['retry-after'] !== 'undefined') {
this.logger.log('Retrying after ' + reason.response['headers']['retry-after'] + 's');
// this.retryRateLimitOverride(reason.response['headers']['retry-after']);
}
else {
this.logger.log('No Retry-After header');
this.logger.log(reason.response['headers']);
}
}
if(reason.error.code == 'ENOTFOUND') {
throw 'Request ' + endpoint + ' did not access a valid endpoint, please check the parameter structure of your request realm and/or platform names. NOT adding back to queue.';
}
if(reason.statusCode === 404) {
//404 isn't an error per se, so we don't throw this.
return this.notFoundHandle('Request ' + endpoint + ' REJECTED with reason: ' + reason + '. NOT adding back to queue');
}
if(typeof times_failed !== 'number') {
times_failed = 1;
}
else {
times_failed++;
}
this.infoHandle('Request ' + endpoint + ' REJECTED with reason: ' + reason + '. Adding back to queue. Failed ' + times_failed + ' times.');
return this.addToQueue(cb.bind(this, endpoint, returnVars, times_failed), times_failed, endpoint);
}.bind(this))
.catch(err => {
return this.errorHandle(err);
});
}); //NOTE: I'm not sure why we can't bind here but if we do it causes times_failed to not increment
});
}
return this.addToQueue(cb.bind(this, endpoint, returnVars), 0, endpoint);
},
infoHandle: function(str) {
return this.logger.info(str);
},
notFoundHandle: function(str) {
return this.logger.info(str);
},
addToQueue: function(fn, times_failed, endpoint) {
if(times_failed >= this.failCount) {
this.infoHandle('Request from endpoint "' + endpoint + '" exceeded fail count!');
throw 'Request from endpoint "' + endpoint + '" exceeded fail count!';
}
else {
//Turns function to deferred promise and adds to queue.
this.logger.log('Adding ' + endpoint + ' to queue.');
var resolve, reject;
var promise = new Promise(function(reso, reje) {
resolve = reso;
reject = reje;
})
.then(function(times_failed) {
this.logger.log('Executing queue item!');
return fn(); //NOTE: fn is prebound with arguments
}.bind(this));
this.queue.push({
resolve: resolve,
reject: reject,
promise: promise
});
return promise;
}
},
execQueue: function(end_index) {
while(this.queue.length > 0 && end_index > 0 && this.cache.connected === true) {
bUnloaded = true;
var w = this.queue.shift();
w.resolve();
end_index--;
}
if(this.cache.connected === false) {
this.logger.errorHandle('Attempted to execute queue but cache disconnected');
}
if(bUnloaded) {
this.logger.log(this.queue.length + ' in queue after unloading.');
}
return;
},
queue: [],
request: {}, //contains all the handlers. Created in the INIT function above.
helper: {}, // All the helpers
replaceEndpointVariables: function(realm, endpoint, platform) { //Replaces $r and $p with platform and realm
//Realm matches $r
endpoint = endpoint.replace(/\$r/g, realm);
if(platform) {
endpoint = endpoint.replace(/\$p/g, platform);
}
return endpoint;
},
errorHandle: function(str) {
return this.errorHandler(str);
},
shutdown: function(now) {
return new Promise((resolve, reject) => {
this.logger.log('LoL API shutting down...');
clearInterval(this.queueInterval);
if(now) {
this.cache.end(true);
}
else {
this.cache.quit();
}
this.cache.on('end', function() {
this.logger.log('Redis connected severed.');
resolve(true);
}.bind(this));
}).bind(this)
}
}
module.exports = LoLAPI;<|fim▁end|>
|
}
else {
this.logger.log('SUCCESSFUL RESPONSE FROM: ' + endpoint);
return response.body; //Resolve promise
|
<|file_name|>newtype-struct-with-dtor.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::libc::c_int;
use std::libc;
pub struct Fd(c_int);
impl Drop for Fd {
fn drop(&mut self) {
unsafe {
let Fd(s) = *self;
libc::close(s);
}
}
}
pub fn main() {
}<|fim▁end|>
|
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
|
<|file_name|>qt_generic.py<|end_file_name|><|fim▁begin|>from functools import partial
import threading
from PIL import Image
from PyQt4.Qt import Qt
from PyQt4.Qt import QGridLayout, QInputDialog, QPushButton
from PyQt4.Qt import QVBoxLayout, QLabel, SIGNAL
from electrum_gui.qt.main_window import StatusBarButton
from electrum_gui.qt.password_dialog import PasswordDialog
from electrum_gui.qt.util import *
from .plugin import TrezorCompatiblePlugin, TIM_NEW, TIM_RECOVER, TIM_MNEMONIC<|fim▁hole|>from electrum.util import PrintError
from electrum.wallet import Wallet, BIP44_Wallet
from electrum.wizard import UserCancelled
# By far the trickiest thing about this handler is the window stack;
# MacOSX is very fussy the modal dialogs are perfectly parented
class QtHandler(PrintError):
'''An interface between the GUI (here, QT) and the device handling
logic for handling I/O. This is a generic implementation of the
Trezor protocol; derived classes can customize it.'''
def __init__(self, win, pin_matrix_widget_class, device):
win.connect(win, SIGNAL('clear_dialog'), self.clear_dialog)
win.connect(win, SIGNAL('error_dialog'), self.error_dialog)
win.connect(win, SIGNAL('message_dialog'), self.message_dialog)
win.connect(win, SIGNAL('pin_dialog'), self.pin_dialog)
win.connect(win, SIGNAL('passphrase_dialog'), self.passphrase_dialog)
win.connect(win, SIGNAL('word_dialog'), self.word_dialog)
self.win = win
self.pin_matrix_widget_class = pin_matrix_widget_class
self.device = device
self.dialog = None
self.done = threading.Event()
def top_level_window(self):
return self.win.top_level_window()
def watching_only_changed(self):
self.win.emit(SIGNAL('watching_only_changed'))
def show_message(self, msg, cancel_callback=None):
self.win.emit(SIGNAL('message_dialog'), msg, cancel_callback)
def show_error(self, msg):
self.win.emit(SIGNAL('error_dialog'), msg)
def finished(self):
self.win.emit(SIGNAL('clear_dialog'))
def get_pin(self, msg):
self.done.clear()
self.win.emit(SIGNAL('pin_dialog'), msg)
self.done.wait()
return self.response
def get_word(self, msg):
self.done.clear()
self.win.emit(SIGNAL('word_dialog'), msg)
self.done.wait()
return self.word
def get_passphrase(self, msg):
self.done.clear()
self.win.emit(SIGNAL('passphrase_dialog'), msg)
self.done.wait()
return self.passphrase
def pin_dialog(self, msg):
# Needed e.g. when resetting a device
self.clear_dialog()
dialog = WindowModalDialog(self.top_level_window(), _("Enter PIN"))
matrix = self.pin_matrix_widget_class()
vbox = QVBoxLayout()
vbox.addWidget(QLabel(msg))
vbox.addWidget(matrix)
vbox.addLayout(Buttons(CancelButton(dialog), OkButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
self.response = str(matrix.get_value())
self.done.set()
def passphrase_dialog(self, msg):
d = PasswordDialog(self.top_level_window(), None, msg,
PasswordDialog.PW_PASSPHRASE)
confirmed, p, passphrase = d.run()
if confirmed:
passphrase = BIP44_Wallet.normalize_passphrase(passphrase)
self.passphrase = passphrase
self.done.set()
def word_dialog(self, msg):
dialog = WindowModalDialog(self.top_level_window(), "")
hbox = QHBoxLayout(dialog)
hbox.addWidget(QLabel(msg))
text = QLineEdit()
text.setMaximumWidth(100)
text.returnPressed.connect(dialog.accept)
hbox.addWidget(text)
hbox.addStretch(1)
if not dialog.exec_():
return None
self.word = unicode(text.text())
self.done.set()
def message_dialog(self, msg, cancel_callback):
# Called more than once during signing, to confirm output and fee
self.clear_dialog()
title = _('Please check your %s device') % self.device
self.dialog = dialog = WindowModalDialog(self.top_level_window(), title)
l = QLabel(msg)
vbox = QVBoxLayout(dialog)
if cancel_callback:
vbox.addLayout(Buttons(CancelButton(dialog)))
dialog.connect(dialog, SIGNAL('rejected()'), cancel_callback)
vbox.addWidget(l)
dialog.show()
def error_dialog(self, msg):
self.win.show_error(msg, parent=self.top_level_window())
def clear_dialog(self):
if self.dialog:
self.dialog.accept()
self.dialog = None
def query_choice(self, msg, labels):
return self.win.query_choice(msg, labels)
def request_trezor_init_settings(self, method, device):
wizard = self.win
vbox = QVBoxLayout()
main_label = QLabel(_("Initialization settings for your %s:") % device)
vbox.addWidget(main_label)
OK_button = OkButton(wizard, _('Next'))
def clean_text(widget):
text = unicode(widget.toPlainText()).strip()
return ' '.join(text.split())
if method in [TIM_NEW, TIM_RECOVER]:
gb = QGroupBox()
vbox1 = QVBoxLayout()
gb.setLayout(vbox1)
vbox.addWidget(gb)
gb.setTitle(_("Select your seed length:"))
choices = [
_("12 words"),
_("18 words"),
_("24 words"),
]
bg = QButtonGroup()
for i, choice in enumerate(choices):
rb = QRadioButton(gb)
rb.setText(choice)
bg.addButton(rb)
bg.setId(rb, i)
vbox1.addWidget(rb)
rb.setChecked(True)
cb_pin = QCheckBox(_('Enable PIN protection'))
cb_pin.setChecked(True)
else:
text = QTextEdit()
text.setMaximumHeight(60)
if method == TIM_MNEMONIC:
msg = _("Enter your BIP39 mnemonic:")
else:
msg = _("Enter the master private key beginning with xprv:")
def set_enabled():
OK_button.setEnabled(Wallet.is_xprv(clean_text(text)))
text.textChanged.connect(set_enabled)
OK_button.setEnabled(False)
vbox.addWidget(QLabel(msg))
vbox.addWidget(text)
pin = QLineEdit()
pin.setValidator(QRegExpValidator(QRegExp('[1-9]{0,10}')))
pin.setMaximumWidth(100)
hbox_pin = QHBoxLayout()
hbox_pin.addWidget(QLabel(_("Enter your PIN (digits 1-9):")))
hbox_pin.addWidget(pin)
hbox_pin.addStretch(1)
label = QLabel(_("Enter a label to name your device:"))
name = QLineEdit()
hl = QHBoxLayout()
hl.addWidget(label)
hl.addWidget(name)
hl.addStretch(1)
vbox.addLayout(hl)
if method in [TIM_NEW, TIM_RECOVER]:
vbox.addWidget(cb_pin)
else:
vbox.addLayout(hbox_pin)
cb_phrase = QCheckBox(_('Enable Passphrase protection'))
cb_phrase.setChecked(False)
vbox.addWidget(cb_phrase)
vbox.addStretch(1)
vbox.addLayout(Buttons(CancelButton(wizard), OK_button))
wizard.set_layout(vbox)
if not wizard.exec_():
raise UserCancelled
if method in [TIM_NEW, TIM_RECOVER]:
item = bg.checkedId()
pin = cb_pin.isChecked()
else:
item = ' '.join(str(clean_text(text)).split())
pin = str(pin.text())
return (item, unicode(name.text()), pin, cb_phrase.isChecked())
def qt_plugin_class(base_plugin_class):
class QtPlugin(base_plugin_class):
# Derived classes must provide the following class-static variables:
# icon_file
# pin_matrix_widget_class
def create_handler(self, window):
return QtHandler(window, self.pin_matrix_widget_class(), self.device)
@hook
def load_wallet(self, wallet, window):
if type(wallet) != self.wallet_class:
return
window.tzb = StatusBarButton(QIcon(self.icon_file), self.device,
partial(self.settings_dialog, window))
window.statusBar().addPermanentWidget(window.tzb)
wallet.handler = self.create_handler(window)
# Trigger a pairing
self.get_client(wallet)
def on_create_wallet(self, wallet, wizard):
assert type(wallet) == self.wallet_class
wallet.handler = self.create_handler(wizard)
self.select_device(wallet)
wallet.create_hd_account(None)
@hook
def receive_menu(self, menu, addrs, wallet):
if type(wallet) == self.wallet_class and len(addrs) == 1:
menu.addAction(_("Show on %s") % self.device,
lambda: self.show_address(wallet, addrs[0]))
def settings_dialog(self, window):
hid_id = self.choose_device(window)
if hid_id:
SettingsDialog(window, self, hid_id).exec_()
def choose_device(self, window):
'''This dialog box should be usable even if the user has
forgotten their PIN or it is in bootloader mode.'''
handler = window.wallet.handler
hid_id = self.device_manager().wallet_hid_id(window.wallet)
if not hid_id:
clients, labels = self.unpaired_clients(handler)
if clients:
msg = _("Select a %s device:") % self.device
choice = self.query_choice(window, msg, labels)
if choice is not None:
hid_id = clients[choice].hid_id()
else:
handler.show_error(_("No devices found"))
return hid_id
def query_choice(self, window, msg, choices):
dialog = WindowModalDialog(window)
clayout = ChoicesLayout(msg, choices)
layout = clayout.layout()
layout.addStretch(1)
layout.addLayout(Buttons(CancelButton(dialog), OkButton(dialog)))
dialog.setLayout(layout)
if not dialog.exec_():
return None
return clayout.selected_index()
return QtPlugin
class SettingsDialog(WindowModalDialog):
'''This dialog doesn't require a device be paired with a wallet.
We want users to be able to wipe a device even if they've forgotten
their PIN.'''
def __init__(self, window, plugin, hid_id):
title = _("%s Settings") % plugin.device
super(SettingsDialog, self).__init__(window, title)
self.setMaximumWidth(540)
devmgr = plugin.device_manager()
handler = window.wallet.handler
# wallet can be None, needn't be window.wallet
wallet = devmgr.wallet_by_hid_id(hid_id)
hs_rows, hs_cols = (64, 128)
def get_client():
client = devmgr.client_by_hid_id(hid_id, handler)
if not client:
self.show_error("Device not connected!")
raise RuntimeError("Device not connected")
return client
def update():
# self.features for outer scopes
client = get_client()
features = self.features = client.features
set_label_enabled()
bl_hash = features.bootloader_hash.encode('hex')
bl_hash = "\n".join([bl_hash[:32], bl_hash[32:]])
noyes = [_("No"), _("Yes")]
endis = [_("Enable Passphrases"), _("Disable Passphrases")]
setchange = [_("Set a PIN"), _("Change PIN")]
version = "%d.%d.%d" % (features.major_version,
features.minor_version,
features.patch_version)
coins = ", ".join(coin.coin_name for coin in features.coins)
device_label.setText(features.label)
pin_set_label.setText(noyes[features.pin_protection])
bl_hash_label.setText(bl_hash)
label_edit.setText(features.label)
device_id_label.setText(features.device_id)
serial_number_label.setText(client.hid_id())
initialized_label.setText(noyes[features.initialized])
version_label.setText(version)
coins_label.setText(coins)
clear_pin_button.setVisible(features.pin_protection)
clear_pin_warning.setVisible(features.pin_protection)
pin_button.setText(setchange[features.pin_protection])
pin_msg.setVisible(not features.pin_protection)
passphrase_button.setText(endis[features.passphrase_protection])
language_label.setText(features.language)
def set_label_enabled():
label_apply.setEnabled(label_edit.text() != self.features.label)
def rename():
get_client().change_label(unicode(label_edit.text()))
update()
def toggle_passphrase():
title = _("Confirm Toggle Passphrase Protection")
msg = _("This will cause your Electrum wallet to be unpaired "
"unless your passphrase was or will be empty.\n\n"
"This is because addresses will no "
"longer correspond to those used by your %s.\n\n"
"You will need to create a new Electrum wallet "
"with the install wizard so that they match.\n\n"
"Are you sure you want to proceed?") % plugin.device
if not self.question(msg, title=title):
return
get_client().toggle_passphrase()
devmgr.unpair(hid_id)
update()
def change_homescreen():
dialog = QFileDialog(self, _("Choose Homescreen"))
filename = dialog.getOpenFileName()
if filename:
im = Image.open(str(filename))
if im.size != (hs_cols, hs_rows):
raise Exception('Image must be 64 x 128 pixels')
im = im.convert('1')
pix = im.load()
img = ''
for j in range(hs_rows):
for i in range(hs_cols):
img += '1' if pix[i, j] else '0'
img = ''.join(chr(int(img[i:i + 8], 2))
for i in range(0, len(img), 8))
get_client().change_homescreen(img)
def clear_homescreen():
get_client().change_homescreen('\x00')
def set_pin(remove=False):
get_client().set_pin(remove=remove)
update()
def clear_pin():
set_pin(remove=True)
def wipe_device():
if wallet and sum(wallet.get_balance()):
title = _("Confirm Device Wipe")
msg = _("Are you SURE you want to wipe the device?\n"
"Your wallet still has bitcoins in it!")
if not self.question(msg, title=title,
icon=QMessageBox.Critical):
return
get_client().wipe_device()
devmgr.unpair(hid_id)
update()
def slider_moved():
mins = timeout_slider.sliderPosition()
timeout_minutes.setText(_("%2d minutes") % mins)
def slider_released():
seconds = timeout_slider.sliderPosition() * 60
wallet.set_session_timeout(seconds)
dialog_vbox = QVBoxLayout(self)
# Information tab
info_tab = QWidget()
info_layout = QVBoxLayout(info_tab)
info_glayout = QGridLayout()
info_glayout.setColumnStretch(2, 1)
device_label = QLabel()
pin_set_label = QLabel()
version_label = QLabel()
device_id_label = QLabel()
serial_number_label = QLabel()
bl_hash_label = QLabel()
bl_hash_label.setWordWrap(True)
coins_label = QLabel()
coins_label.setWordWrap(True)
language_label = QLabel()
initialized_label = QLabel()
rows = [
(_("Device Label"), device_label),
(_("PIN set"), pin_set_label),
(_("Firmware Version"), version_label),
(_("Device ID"), device_id_label),
(_("Serial Number"), serial_number_label),
(_("Bootloader Hash"), bl_hash_label),
(_("Supported Coins"), coins_label),
(_("Language"), language_label),
(_("Initialized"), initialized_label),
]
for row_num, (label, widget) in enumerate(rows):
info_glayout.addWidget(QLabel(label), row_num, 0)
info_glayout.addWidget(widget, row_num, 1)
info_layout.addLayout(info_glayout)
# Settings tab
settings_tab = QWidget()
settings_layout = QVBoxLayout(settings_tab)
settings_glayout = QGridLayout()
# Settings tab - Label
label_msg = QLabel(_("Name this %s. If you have mutiple devices "
"their labels help distinguish them.")
% plugin.device)
label_msg.setWordWrap(True)
label_label = QLabel(_("Device Label"))
label_edit = QLineEdit()
label_edit.setMinimumWidth(150)
label_edit.setMaxLength(plugin.MAX_LABEL_LEN)
label_apply = QPushButton(_("Apply"))
label_apply.clicked.connect(rename)
label_edit.textChanged.connect(set_label_enabled)
settings_glayout.addWidget(label_label, 0, 0)
settings_glayout.addWidget(label_edit, 0, 1, 1, 2)
settings_glayout.addWidget(label_apply, 0, 3)
settings_glayout.addWidget(label_msg, 1, 1, 1, -1)
# Settings tab - PIN
pin_label = QLabel(_("PIN Protection"))
pin_button = QPushButton()
pin_button.clicked.connect(set_pin)
settings_glayout.addWidget(pin_label, 2, 0)
settings_glayout.addWidget(pin_button, 2, 1)
pin_msg = QLabel(_("PIN protection is strongly recommended. "
"A PIN is your only protection against someone "
"stealing your bitcoins if they obtain physical "
"access to your %s.") % plugin.device)
pin_msg.setWordWrap(True)
pin_msg.setStyleSheet("color: red")
settings_glayout.addWidget(pin_msg, 3, 1, 1, -1)
# Settings tab - Homescreen
homescreen_layout = QHBoxLayout()
homescreen_label = QLabel(_("Homescreen"))
homescreen_change_button = QPushButton(_("Change..."))
homescreen_clear_button = QPushButton(_("Reset"))
homescreen_change_button.clicked.connect(change_homescreen)
homescreen_clear_button.clicked.connect(clear_homescreen)
homescreen_msg = QLabel(_("You can set the homescreen on your device "
"to personalize it. You must choose a "
"%d x %d monochrome black and white image.")
% (hs_rows, hs_cols))
homescreen_msg.setWordWrap(True)
settings_glayout.addWidget(homescreen_label, 4, 0)
settings_glayout.addWidget(homescreen_change_button, 4, 1)
settings_glayout.addWidget(homescreen_clear_button, 4, 2)
settings_glayout.addWidget(homescreen_msg, 5, 1, 1, -1)
# Settings tab - Session Timeout
if wallet:
timeout_label = QLabel(_("Session Timeout"))
timeout_minutes = QLabel()
timeout_slider = QSlider(Qt.Horizontal)
timeout_slider.setRange(1, 60)
timeout_slider.setSingleStep(1)
timeout_slider.setTickInterval(5)
timeout_slider.setTickPosition(QSlider.TicksBelow)
timeout_slider.setTracking(True)
timeout_msg = QLabel(
_("Clear the session after the specified period "
"of inactivity. Once a session has timed out, "
"your PIN and passphrase (if enabled) must be "
"re-entered to use the device."))
timeout_msg.setWordWrap(True)
timeout_slider.setSliderPosition(wallet.session_timeout // 60)
slider_moved()
timeout_slider.valueChanged.connect(slider_moved)
timeout_slider.sliderReleased.connect(slider_released)
settings_glayout.addWidget(timeout_label, 6, 0)
settings_glayout.addWidget(timeout_slider, 6, 1, 1, 3)
settings_glayout.addWidget(timeout_minutes, 6, 4)
settings_glayout.addWidget(timeout_msg, 7, 1, 1, -1)
settings_layout.addLayout(settings_glayout)
settings_layout.addStretch(1)
# Advanced tab
advanced_tab = QWidget()
advanced_layout = QVBoxLayout(advanced_tab)
advanced_glayout = QGridLayout()
# Advanced tab - clear PIN
clear_pin_button = QPushButton(_("Disable PIN"))
clear_pin_button.clicked.connect(clear_pin)
clear_pin_warning = QLabel(
_("If you disable your PIN, anyone with physical access to your "
"%s device can spend your bitcoins.") % plugin.device)
clear_pin_warning.setWordWrap(True)
clear_pin_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(clear_pin_button, 0, 2)
advanced_glayout.addWidget(clear_pin_warning, 1, 0, 1, 5)
# Advanced tab - toggle passphrase protection
passphrase_button = QPushButton()
passphrase_button.clicked.connect(toggle_passphrase)
passphrase_msg = QLabel(
_("Passphrases allow you to access new wallets, each "
"hidden behind a particular case-sensitive passphrase. You "
"need to create a separate Electrum wallet for each passphrase "
"you use as they each generate different addresses. Changing "
"your passphrase does not lose other wallets, each is still "
"accessible behind its own passphrase."))
passphrase_msg.setWordWrap(True)
passphrase_warning = QLabel(
_("If you forget a passphrase you will be unable to access any "
"bitcoins in the wallet behind it. A passphrase is not a PIN. "
"Only change this if you are sure you understand it."))
passphrase_warning.setWordWrap(True)
passphrase_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(passphrase_button, 3, 2)
advanced_glayout.addWidget(passphrase_msg, 4, 0, 1, 5)
advanced_glayout.addWidget(passphrase_warning, 5, 0, 1, 5)
# Advanced tab - wipe device
wipe_device_button = QPushButton(_("Wipe Device"))
wipe_device_button.clicked.connect(wipe_device)
wipe_device_msg = QLabel(
_("Wipe the device, removing all data from it. The firmware "
"is left unchanged."))
wipe_device_msg.setWordWrap(True)
wipe_device_warning = QLabel(
_("Only wipe a device if you have the recovery seed written down "
"and the device wallet(s) are empty, otherwise the bitcoins "
"will be lost forever."))
wipe_device_warning.setWordWrap(True)
wipe_device_warning.setStyleSheet("color: red")
advanced_glayout.addWidget(wipe_device_button, 6, 2)
advanced_glayout.addWidget(wipe_device_msg, 7, 0, 1, 5)
advanced_glayout.addWidget(wipe_device_warning, 8, 0, 1, 5)
advanced_layout.addLayout(advanced_glayout)
advanced_layout.addStretch(1)
tabs = QTabWidget(self)
tabs.addTab(info_tab, _("Information"))
tabs.addTab(settings_tab, _("Settings"))
tabs.addTab(advanced_tab, _("Advanced"))
# Update information
update()
dialog_vbox.addWidget(tabs)
dialog_vbox.addLayout(Buttons(CloseButton(self)))<|fim▁end|>
|
from electrum.i18n import _
from electrum.plugins import hook, DeviceMgr
|
<|file_name|>commonutils.go<|end_file_name|><|fim▁begin|>/*
Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package utils
import (
"errors"
"fmt"
"time"
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/ptypes/timestamp"
"github.com/hyperledger/fabric/common/crypto"
cb "github.com/hyperledger/fabric/protos/common"
pb "github.com/hyperledger/fabric/protos/peer"
)
// MarshalOrPanic serializes a protobuf message and panics if this operation fails.
func MarshalOrPanic(pb proto.Message) []byte {
data, err := proto.Marshal(pb)
if err != nil {
panic(err)
}
return data
}
// Marshal serializes a protobuf message.
func Marshal(pb proto.Message) ([]byte, error) {
return proto.Marshal(pb)
}
// CreateNonceOrPanic generates a nonce using the common/crypto package
// and panics if this operation fails.
func CreateNonceOrPanic() []byte {
nonce, err := crypto.GetRandomNonce()
if err != nil {
panic(fmt.Errorf("Cannot generate random nonce: %s", err))
}
return nonce
}
// CreateNonce generates a nonce using the common/crypto package.
func CreateNonce() ([]byte, error) {
nonce, err := crypto.GetRandomNonce()
if err != nil {
return nil, fmt.Errorf("Cannot generate random nonce: %s", err)
}
return nonce, nil
}
// UnmarshalPayloadOrPanic unmarshals bytes to a Payload structure or panics on error
func UnmarshalPayloadOrPanic(encoded []byte) *cb.Payload {
payload, err := UnmarshalPayload(encoded)
if err != nil {
panic(fmt.Errorf("Error unmarshaling data to payload: %s", err))
}
return payload
}
// UnmarshalPayload unmarshals bytes to a Payload structure
func UnmarshalPayload(encoded []byte) (*cb.Payload, error) {
payload := &cb.Payload{}
err := proto.Unmarshal(encoded, payload)
if err != nil {
return nil, err
}
return payload, err
}
// UnmarshalEnvelopeOrPanic unmarshals bytes to an Envelope structure or panics on error
func UnmarshalEnvelopeOrPanic(encoded []byte) *cb.Envelope {
envelope, err := UnmarshalEnvelope(encoded)
if err != nil {
panic(fmt.Errorf("Error unmarshaling data to envelope: %s", err))
}
return envelope
}
// UnmarshalEnvelope unmarshals bytes to an Envelope structure
func UnmarshalEnvelope(encoded []byte) (*cb.Envelope, error) {
envelope := &cb.Envelope{}
err := proto.Unmarshal(encoded, envelope)
if err != nil {
return nil, err
}
return envelope, err
}
// UnmarshalEnvelopeOfType unmarshals an envelope of the specified type, including
// the unmarshaling the payload data
func UnmarshalEnvelopeOfType(envelope *cb.Envelope, headerType cb.HeaderType, message proto.Message) (*cb.ChannelHeader, error) {
payload, err := UnmarshalPayload(envelope.Payload)
if err != nil {
return nil, err
}
if payload.Header == nil {
return nil, fmt.Errorf("Envelope must have a Header")
}
chdr, err := UnmarshalChannelHeader(payload.Header.ChannelHeader)
if err != nil {
return nil, fmt.Errorf("Invalid ChannelHeader")
}
if chdr.Type != int32(headerType) {
return nil, fmt.Errorf("Not a tx of type %v", headerType)
}
if err = proto.Unmarshal(payload.Data, message); err != nil {
return nil, fmt.Errorf("Error unmarshaling message for type %v: %s", headerType, err)
}
return chdr, nil
}<|fim▁hole|>func ExtractEnvelopeOrPanic(block *cb.Block, index int) *cb.Envelope {
envelope, err := ExtractEnvelope(block, index)
if err != nil {
panic(err)
}
return envelope
}
// ExtractEnvelope retrieves the requested envelope from a given block and unmarshals it.
func ExtractEnvelope(block *cb.Block, index int) (*cb.Envelope, error) {
if block.Data == nil {
return nil, fmt.Errorf("No data in block")
}
envelopeCount := len(block.Data.Data)
if index < 0 || index >= envelopeCount {
return nil, fmt.Errorf("Envelope index out of bounds")
}
marshaledEnvelope := block.Data.Data[index]
envelope, err := GetEnvelopeFromBlock(marshaledEnvelope)
if err != nil {
return nil, fmt.Errorf("Block data does not carry an envelope at index %d: %s", index, err)
}
return envelope, nil
}
// ExtractPayloadOrPanic retrieves the payload of a given envelope and unmarshals it -- it panics if either of these operations fail.
func ExtractPayloadOrPanic(envelope *cb.Envelope) *cb.Payload {
payload, err := ExtractPayload(envelope)
if err != nil {
panic(err)
}
return payload
}
// ExtractPayload retrieves the payload of a given envelope and unmarshals it.
func ExtractPayload(envelope *cb.Envelope) (*cb.Payload, error) {
payload := &cb.Payload{}
if err := proto.Unmarshal(envelope.Payload, payload); err != nil {
return nil, fmt.Errorf("Envelope does not carry a Payload: %s", err)
}
return payload, nil
}
// MakeChannelHeader creates a ChannelHeader.
func MakeChannelHeader(headerType cb.HeaderType, version int32, chainID string, epoch uint64) *cb.ChannelHeader {
return &cb.ChannelHeader{
Type: int32(headerType),
Version: version,
Timestamp: ×tamp.Timestamp{
Seconds: time.Now().Unix(),
Nanos: 0,
},
ChannelId: chainID,
Epoch: epoch,
}
}
// MakeSignatureHeader creates a SignatureHeader.
func MakeSignatureHeader(serializedCreatorCertChain []byte, nonce []byte) *cb.SignatureHeader {
return &cb.SignatureHeader{
Creator: serializedCreatorCertChain,
Nonce: nonce,
}
}
func SetTxID(channelHeader *cb.ChannelHeader, signatureHeader *cb.SignatureHeader) error {
txid, err := ComputeProposalTxID(
signatureHeader.Nonce,
signatureHeader.Creator,
)
if err != nil {
return err
}
channelHeader.TxId = txid
return nil
}
// MakePayloadHeader creates a Payload Header.
func MakePayloadHeader(ch *cb.ChannelHeader, sh *cb.SignatureHeader) *cb.Header {
return &cb.Header{
ChannelHeader: MarshalOrPanic(ch),
SignatureHeader: MarshalOrPanic(sh),
}
}
// NewSignatureHeaderOrPanic returns a signature header and panics on error.
func NewSignatureHeaderOrPanic(signer crypto.LocalSigner) *cb.SignatureHeader {
if signer == nil {
panic(errors.New("Invalid signer. Must be different from nil."))
}
signatureHeader, err := signer.NewSignatureHeader()
if err != nil {
panic(fmt.Errorf("Failed generating a new SignatureHeader [%s]", err))
}
return signatureHeader
}
// SignOrPanic signs a message and panics on error.
func SignOrPanic(signer crypto.LocalSigner, msg []byte) []byte {
if signer == nil {
panic(errors.New("Invalid signer. Must be different from nil."))
}
sigma, err := signer.Sign(msg)
if err != nil {
panic(fmt.Errorf("Failed generting signature [%s]", err))
}
return sigma
}
// UnmarshalChannelHeader returns a ChannelHeader from bytes
func UnmarshalChannelHeader(bytes []byte) (*cb.ChannelHeader, error) {
chdr := &cb.ChannelHeader{}
err := proto.Unmarshal(bytes, chdr)
if err != nil {
return nil, fmt.Errorf("UnmarshalChannelHeader failed, err %s", err)
}
return chdr, nil
}
// UnmarshalChaincodeID returns a ChaincodeID from bytes
func UnmarshalChaincodeID(bytes []byte) (*pb.ChaincodeID, error) {
ccid := &pb.ChaincodeID{}
err := proto.Unmarshal(bytes, ccid)
if err != nil {
return nil, fmt.Errorf("UnmarshalChaincodeID failed, err %s", err)
}
return ccid, nil
}
// IsConfigBlock validates whenever given block contains configuration
// update transaction
func IsConfigBlock(block *cb.Block) bool {
envelope, err := ExtractEnvelope(block, 0)
if err != nil {
return false
}
payload, err := GetPayload(envelope)
if err != nil {
return false
}
hdr, err := UnmarshalChannelHeader(payload.Header.ChannelHeader)
if err != nil {
return false
}
return cb.HeaderType(hdr.Type) == cb.HeaderType_CONFIG
}<|fim▁end|>
|
// ExtractEnvelopeOrPanic retrieves the requested envelope from a given block and unmarshals it -- it panics if either of these operation fail.
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
import os
for module in os.listdir(os.path.dirname(__file__)):
if module == "__init__.py" or module[-3:] != ".py":<|fim▁hole|>del module
"""
import src.itemFolder.logistics.container
import src.itemFolder.logistics.mover
import src.itemFolder.logistics.pathingNode
import src.itemFolder.logistics.sorter
import src.itemFolder.logistics.typedStockpileManager
import src.itemFolder.logistics.uniformStockpileManager<|fim▁end|>
|
continue
__import__("src.itemFolder.logistics." + module[:-3], locals(), globals())
|
<|file_name|>data.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from digits.utils import subclass, override, constants
from digits.extensions.data.interface import DataIngestionInterface
from .forms import DatasetForm, InferenceForm
import numpy as np
import os
TEMPLATE = "templates/template.html"
INFERENCE_TEMPLATE = "templates/inference_template.html"
@subclass
class DataIngestion(DataIngestionInterface):
"""
A data ingestion extension for an image gradient dataset
"""
def __init__(self, is_inference_db=False, **kwargs):
super(DataIngestion, self).__init__(**kwargs)
self.userdata['is_inference_db'] = is_inference_db
# Used to calculate the gradients later
self.yy, self.xx = np.mgrid[:self.image_height,
:self.image_width].astype('float')
@override
def encode_entry(self, entry):
xslope, yslope = entry
label = np.array([xslope, yslope])
a = xslope * 255 / self.image_width
b = yslope * 255 / self.image_height
image = a * (self.xx - self.image_width/2) + b * (self.yy - self.image_height/2) + 127.5
image = image.astype('uint8')
# convert to 3D tensors
image = image[np.newaxis, ...]
label = label[np.newaxis, np.newaxis, ...]
return image, label
@staticmethod
@override
def get_category():
return "Images"
@staticmethod
@override
def get_id():
return "image-gradients"
@staticmethod
@override
def get_dataset_form():
return DatasetForm()
@staticmethod
@override
def get_dataset_template(form):
"""
parameters:
- form: form returned by get_dataset_form(). This may be populated
with values if the job was cloned
return:
- (template, context) tuple
- template is a Jinja template to use for rendering dataset creation
options
- context is a dictionary of context variables to use for rendering
the form
"""
extension_dir = os.path.dirname(os.path.abspath(__file__))<|fim▁hole|>
@override
def get_inference_form(self):
return InferenceForm()
@staticmethod
@override
def get_inference_template(form):
extension_dir = os.path.dirname(os.path.abspath(__file__))
template = open(os.path.join(extension_dir, INFERENCE_TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
@staticmethod
@override
def get_title():
return "Gradients"
@override
def itemize_entries(self, stage):
count = 0
if self.userdata['is_inference_db']:
if stage == constants.TEST_DB:
if self.test_image_count:
count = self.test_image_count
else:
return [(self.gradient_x, self.gradient_y)]
else:
if stage == constants.TRAIN_DB:
count = self.train_image_count
elif stage == constants.VAL_DB:
count = self.val_image_count
elif stage == constants.TEST_DB:
count = self.test_image_count
return [np.random.random_sample(2) - 0.5 for i in xrange(count)] if count > 0 else []<|fim▁end|>
|
template = open(os.path.join(extension_dir, TEMPLATE), "r").read()
context = {'form': form}
return (template, context)
|
<|file_name|>async2.rs<|end_file_name|><|fim▁begin|>// compile-flags: --edition=2018
use core::{
future::Future,
marker::Send,
pin::Pin,
};
fn non_async_func() {
println!("non_async_func was covered");
let b = true;
if b {
println!("non_async_func println in block");
}
}
<|fim▁hole|> if b {
println!("async_func println in block");
}
}
async fn async_func_just_println() {
println!("async_func_just_println was covered");
}
fn main() {
println!("codecovsample::main");
non_async_func();
executor::block_on(async_func());
executor::block_on(async_func_just_println());
}
mod executor {
use core::{
future::Future,
pin::Pin,
task::{Context, Poll, RawWaker, RawWakerVTable, Waker},
};
pub fn block_on<F: Future>(mut future: F) -> F::Output {
let mut future = unsafe { Pin::new_unchecked(&mut future) };
use std::hint::unreachable_unchecked;
static VTABLE: RawWakerVTable = RawWakerVTable::new(
|_| unsafe { unreachable_unchecked() }, // clone
|_| unsafe { unreachable_unchecked() }, // wake
|_| unsafe { unreachable_unchecked() }, // wake_by_ref
|_| (),
);
let waker = unsafe { Waker::from_raw(RawWaker::new(core::ptr::null(), &VTABLE)) };
let mut context = Context::from_waker(&waker);
loop {
if let Poll::Ready(val) = future.as_mut().poll(&mut context) {
break val;
}
}
}
}<|fim▁end|>
|
async fn async_func() {
println!("async_func was covered");
let b = true;
|
<|file_name|>qluamode_lua.cpp<|end_file_name|><|fim▁begin|>/* -*- C++ -*- */
#include <QtGlobal>
#include <QtAlgorithms>
#include <QChar>
#include <QDebug>
#include <QList>
#include <QMap>
#include <QPointer>
#include <QRegExp>
#include <QSettings>
#include <QSharedData>
#include <QSharedDataPointer>
#include "qluaapplication.h"
#include "qtluaengine.h"
#include "qluamainwindow.h"
#include "qluatextedit.h"
#include "qluamode.h"
#include <string.h>
#include <ctype.h>
#define DEBUG 0
// ========================================
// USERDATA
namespace {
enum TokenType {
// generic tokens
Other, Identifier, Number, String,
// tokens
SemiColon, ThreeDots, Comma, Dot, Colon,
LParen, RParen, LBracket, RBracket, LBrace, RBrace,
// keywords
Kand, Kfalse, Kfunction, Knil,
Knot, Kor, Ktrue, Kin,
// keywords that kill statements
Kbreak, Kdo, Kelse, Kelseif, Kend, Kfor,
Kif, Klocal, Krepeat, Kreturn,
Kthen, Kuntil, Kwhile,
// special
Eof,
Chunk,
Statement,
StatementCont,
FunctionBody,
FunctionName,
FirstKeyword = Kand,
FirstStrictKeyword = Kbreak,
};
struct Keywords {
const char *text;
TokenType type;
};
Keywords skeywords[] = {
{"and", Kand}, {"break", Kbreak}, {"do", Kdo}, {"else", Kelse},
{"elseif", Kelseif}, {"end", Kend}, {"false", Kfalse}, {"for", Kfor},
{"function", Kfunction}, {"if", Kif}, {"in", Kin}, {"local", Klocal},
{"nil", Knil}, {"not", Knot}, {"or", Kor}, {"repeat", Krepeat},
{"return", Kreturn}, {"then", Kthen}, {"true", Ktrue},
{"until", Kuntil}, {"while", Kwhile},
{";", SemiColon}, {"...", ThreeDots}, {",", Comma},
{".", Dot}, {":", Colon},
{"(", LParen}, {")", RParen}, {"[", LBracket},
{"]", RBracket}, {"{", LBrace}, {"}", RBrace},
{0}
};
struct Node;
struct PNode : public QSharedDataPointer<Node> {
PNode();
PNode(TokenType t, int p, int l, PNode n);
PNode(TokenType t, int p, int l, int i, PNode n);
TokenType type() const;
int pos() const;
int len() const;
int indent() const;
PNode next() const;
};
struct Node : public QSharedData {
Node(TokenType t, int p, int l, PNode n)
: next(n), type(t),pos(p),len(l),indent(-1) {}
Node(TokenType t, int p, int l, int i, PNode n)
: next(n), type(t),pos(p),len(l),indent(i) {}
PNode next;
TokenType type;
int pos;
int len;
int indent;
};
PNode::PNode()
: QSharedDataPointer<Node>() {}
PNode::PNode(TokenType t, int p, int l, PNode n)
: QSharedDataPointer<Node>(new Node(t,p,l,n)) {}
PNode::PNode(TokenType t, int p, int l, int i, PNode n)
: QSharedDataPointer<Node>(new Node(t,p,l,i,n)) {}
inline TokenType PNode::type() const {
const Node *n = constData();
return (n) ? n->type : Chunk;
}
inline int PNode::pos() const {
const Node *n = constData();
return (n) ? n->pos : 0;
}
inline int PNode::len() const {
const Node *n = constData();
return (n) ? n->len : 0;
}
inline int PNode::indent() const {
const Node *n = constData();
return (n) ? n->indent : 0;
}
inline PNode PNode::next() const {
const Node *n = constData();
return (n) ? n->next : PNode();
}
struct UserData : public QLuaModeUserData
{
// lexical state
int lexState;
int lexPos;
int lexN;
// parser state
PNode nodes;
int lastPos;
// initialize
UserData() : lexState(0), lexPos(0), lexN(0), lastPos(0) {}
virtual int highlightState() { return (lexState<<16)^lexN; }
};
}
// ========================================
// QLUAMODELUA
class QLuaModeLua : public QLuaMode
{
Q_OBJECT
public:
QLuaModeLua(QLuaTextEditModeFactory *f, QLuaTextEdit *e);
void gotLine(UserData *d, int pos, int len, QString);
void gotToken(UserData *d, int pos, int len, QString, TokenType);
bool supportsComplete() { return true; }
bool supportsLua() { return true; }
virtual void parseBlock(int pos, const QTextBlock &block,
const QLuaModeUserData *idata,
QLuaModeUserData *&odata );
QStringList computeFileCompletions(QString s, bool escape, QString &stem);
QStringList computeSymbolCompletions(QString s, QString &stem);
virtual bool doComplete();
private:
QMap<QString,TokenType> keywords;
QRegExp reNum, reSym, reId;
int bi;
};
QLuaModeLua::QLuaModeLua(QLuaTextEditModeFactory *f, QLuaTextEdit *e)
: QLuaMode(f,e),
reNum("^(0x[0-9a-fA-F]+|\\.[0-9]+|[0-9]+(\\.[0-9]*)?([Ee][-+]?[0-9]*)?)"),
reSym("^(\\.\\.\\.|<=|>=|==|~=|.)"),
reId("^[A-Za-z_][A-Za-z0-9_]*"),
bi(3)
{
// basic indent
QSettings s;
s.beginGroup("luaMode");
bi = s.value("basicIndent", 3).toInt();
// tokens
for (int i=0; skeywords[i].text; i++)
keywords[QString(skeywords[i].text)] = skeywords[i].type;
}
void
QLuaModeLua::parseBlock(int pos, const QTextBlock &block,
const QLuaModeUserData *idata,
QLuaModeUserData *&odata )
{
QString text = block.text();
UserData *data = new UserData;
// input state
if (idata)
*data = *static_cast<const UserData*>(idata);
// hack for statements that seem complete
if (data->nodes.type() == Statement)
setIndent(data->lastPos, data->nodes.next().indent());
// process line
gotLine(data, pos, block.length(), block.text());
// flush parser stack on last block
if (! block.next().isValid())
gotToken(data, data->lastPos+1, 0, QString(), Eof);
// output state
odata = data;
}
// ========================================
// QLUAMODELUA - LEXICAL ANALYSIS
void
QLuaModeLua::gotLine(UserData *d, int pos, int len, QString s)
{
// default indent
if (pos == 0)
setIndent(-1, 0);
// lexical analysis
int p = 0;
int n = d->lexN;
int r = d->lexPos - pos;
int state = d->lexState;
int slen = s.size();
while (p < len)
{
int c = (p < slen) ? s[p].toAscii() : '\n';
switch(state)
{
case 0:
state = -1;
if (c == '#') {
r = p; n = 0; state = -4;
}
continue;
default:
case -1:
if (isspace(c)) {
break;
} if (isalpha(c) || c=='_') {
r = p; state = -2;
} else if (c=='\'') {
setIndentOverlay(pos+p+1, -1);
r = p; n = -c; state = -3;
} else if (c=='\"') {
setIndentOverlay(pos+p+1, -1);
r = p; n = -c; state = -3;
} else if (c=='[') {
r = p; n = 0; state = -3;
int t = p + 1;
while (t < slen && s[t] == '=')
t += 1;
if (t < slen && s[t] == '[') {
n = t - p;
setIndentOverlay(pos+p, -1);
} else {
state = -1;
gotToken(d, pos+p, 1, QString(), LBracket);
}
} else if (c=='-' && p+1 < slen && s[p+1]=='-') {
r = p; n = 0; state = -4;
if (p+2 < slen && s[p+2]=='[') {
int t = p + 3;
while (t < slen && s[t] == '=')
t += 1;
if (t < slen && s[t] == '[') {
n = t - p - 2;
setIndentOverlay(pos+p, 0);
setIndentOverlay(pos+t+1, (n > 1) ? -1 :
e->indentAfter(pos+t+1, +1) );
}
}
} else if (reNum.indexIn(s,p,QRegExp::CaretAtOffset)>=0) {
int l = reNum.matchedLength();
QString m = s.mid(p,l);
setFormat(pos+p, l, "number");
gotToken(d, pos+p, l, m, Number);
p += l - 1;
} else if (reSym.indexIn(s,p,QRegExp::CaretAtOffset)>=0) {
int l = reSym.matchedLength();
QString m = s.mid(p,l);
if (keywords.contains(m))
gotToken(d, pos+p, l, QString(), keywords[m]);
else
gotToken(d, pos+p, l, m, Other);
p += l - 1;
}
break;
case -2: // identifier
if (!isalnum(c) && c!='_') {
QString m = s.mid(r, p-r);
if (keywords.contains(m)) {
setFormat(pos+r, p-r, "keyword");
gotToken(d, pos+r, p-r, QString(), keywords[m]);
} else
gotToken(d, pos+r, p-r, m, Identifier);
state = -1; continue;
}
break;
case -3: // string
if (n <= 0 && (c == -n || c == '\n' || c == '\r')) {
setFormat(pos+r,p-r+1,"string");
setIndentOverlay(pos+p+1);
gotToken(d, pos+r,p-r+1, QString(), String);
state = -1;
} else if (n <= 0 && c=='\\') {
p += 1;
} else if (n > 0 && c==']' && p>=n && s[p-n]==']') {
int t = p - n + 1;
while (t < slen && s[t] == '=')
t += 1;
if (t == p) {
setFormat(pos+r,p-r+1,"string");
setIndentOverlay(pos+p+1);
gotToken(d, pos+r,p-r+1,QString(),String);
state = -1;
}
}
break;
case -4: // comment
if (n <= 0 && (c == '\n' || c == '\r')) {
setFormat(pos+r, p-r, "comment");
state = -1;
} else if (n > 0 && c==']' && p>=n && s[p-n]==']') {
int t = p - n + 1;
while (t < slen && s[t] == '=')
t += 1;
if (t == p) {
setFormat(pos+r, p-r+1, "comment");
setIndentOverlay(pos+p-n, 2);
setIndentOverlay(pos+p+1);
state = -1;
}
}
break;
}
p += 1;
}
// save state
d->lexN = n;
d->lexPos = r + pos;
d->lexState = state;
// format incomplete tokens
if (state == -4)
setFormat(qMax(pos,pos+r),qMin(len,len-r),"comment");
else if (state == -3)
setFormat(qMax(pos,pos+r),qMin(len,len-r),"string");
}
// ========================================
// QLUAMODELUA - PARSING
#if DEBUG
QDebug operator<<(QDebug d, const TokenType &t)
{
d.nospace();
# define DO(x) if (t==x) d << #x; else
DO(Other) DO(Identifier) DO(Number) DO(String)
DO(SemiColon) DO(ThreeDots) DO(Comma) DO(Dot) DO(Colon)
DO(LParen) DO(RParen) DO(LBracket)
DO(RBracket) DO(LBrace) DO(RBrace)
DO(Kand) DO(Kfalse) DO(Kfunction) DO(Knil)
DO(Knot) DO(Kor) DO(Ktrue) DO(Kin)
DO(Kbreak) DO(Kdo) DO(Kelse) DO(Kelseif) DO(Kend) DO(Kfor)
DO(Kif) DO(Klocal) DO(Krepeat) DO(Kreturn)
DO(Kthen) DO(Kuntil) DO(Kwhile)
DO(Statement) DO(StatementCont) DO(Chunk)
DO(FunctionBody) DO(FunctionName) DO(Eof)
# undef DO
d << "<Unknown>";
return d.space();
}
#endif
void
QLuaModeLua::gotToken(UserData *d, int pos, int len,
QString s, TokenType ltype)
{
PNode &n = d->nodes;
TokenType ntype = n.type();
#if DEBUG
qDebug() << " node:" << n << ntype << n.pos() << n.len()
<< n.indent() << n.next().type() << n.next().next().type();
if (s.isEmpty())
qDebug() << " token:" << pos << len << ltype;
else
qDebug() << " token:" << pos << len << ltype << s;
#endif
// close statements
if ( ((ntype==Statement)
&& (ltype==Identifier || ltype==Kfunction) ) ||
((ntype==Statement || ntype==StatementCont ||
ntype==Klocal || ntype==Kreturn )
&& (ltype==SemiColon || ltype>=FirstStrictKeyword) ) )
{
int epos = (ltype==SemiColon) ? pos+len : d->lastPos;
int spos = n.pos();
n = n.next();
setBalance(spos, epos, n.type()==Chunk);
setIndent(epos, n.indent());
}
if ((ntype == FunctionName || ntype == Kfunction) &&
(ltype!=Identifier && ltype!=Dot && ltype!=Colon) )
{
if (ntype == FunctionName) n=n.next();
ntype = n->type = FunctionBody;
setIndent(pos, n.indent());
}
ntype = n.type();
// fixup hacked indents
if (ntype == StatementCont)
n->type = Statement;
if (d->lastPos < pos && ntype == Statement)
setIndent(pos, n.indent());
d->lastPos = pos + len;
// parse
switch (ltype)
{
badOne:
{
setIndent(pos, -1);
while (n.type() != Chunk && n.len() == 0) n = n.next();
setErrorMatch(pos, len, n.pos(), n.len());
n = n.next();
setIndent(pos+len, n.indent());
break;
}
case RParen:
if (ntype != LParen)
goto badOne;
goto rightOne;
case RBracket:
if (ntype != LBracket)
goto badOne;
goto rightOne;
case RBrace:
if (ntype != LBrace)
goto badOne;
goto rightOne;
case Kend:
if (ntype!=Kdo && ntype!=Kelse && ntype!=Kthen
&& ntype!=Kfunction && ntype!=FunctionBody)
goto badOne;
rightOne:
{
setRightMatch(pos, len, n.pos(), n.len());
int fpos = followMatch(n.pos(),n.len());
int indent = n.indent();
n = n.next();
if (ltype < FirstKeyword)
indent = qMin(qMax(0,indent-bi),e->indentAt(fpos));
else
indent = n.indent();
setIndent(pos, indent);
setIndent(pos+len, n.indent());
setBalance(fpos, pos+len, n.type()==Chunk);
break;
}
case Kuntil:
if (ntype != Krepeat)
goto badOne;
{
setRightMatch(pos, len, n.pos(), n.len());
setIndent(pos, n.next().indent());
setIndent(pos+len, n.indent());
n->len = 0;
n->type = StatementCont;
break;
}
case Kthen:
if (ntype!=Kif && ntype!=Kelseif)
goto badOne;
goto middleOne;
case Kelse: case Kelseif:
if (ntype!=Kthen)
goto badOne;
middleOne:
{
setMiddleMatch(pos, len, n.pos(), n.len());
setIndent(pos, n.next().indent());
setIndent(pos+len, n.indent());
n->type = ltype;
n->pos = pos;
n->len = len;
break;
}
case Kdo:
if (ntype==Kfor || ntype==Kwhile)
goto middleOne;
goto leftOne;
case Kfunction:
if (ntype == Klocal)
goto middleOne;
goto leftOne;
case Kfor: case Kif: case Kwhile:
case Krepeat: case Klocal: case Kreturn:
case LParen: case LBracket: case LBrace:
leftOne:
{
int indent = n.indent() + bi;
if (ltype == LBrace && ntype == StatementCont)
indent = n.indent();
else if (ltype < FirstKeyword)
indent = e->indentAfter(pos+len);
setIndent(pos, n.indent());
n = PNode(ltype, pos, len, indent, n);
setIndent(pos+len, indent);
setLeftMatch(pos, len);
break;
}
case SemiColon:
case Eof:
break;
case Identifier:
if (ntype == Kfunction)
n = PNode(FunctionName, pos, len, n.indent(), n);
if (n.type() == FunctionName)
setFormat(pos, len, "function");
goto openStatement;
case Dot: case Colon:
if (ntype == FunctionName)
setFormat(pos, len, "function");
case Kand: case Kor: case Knot:
case Kin: case Comma: case Other:
if (n.type() == Statement)
{
n->type = StatementCont;
setIndent(pos, n.indent());
}
default:
openStatement:
{
if (ntype==Chunk || ntype==Kdo || ntype==Kthen ||
ntype==Kelse || ntype==Krepeat || ntype==FunctionBody)
{
int indent = n.indent() + bi;
n = PNode(Statement, pos, 0, indent, n);
setIndent(pos+len, indent);
}
else if (ntype==Klocal)
n->type = StatementCont;
else if (ntype==Kreturn)<|fim▁hole|>}
// ========================================
// COMPLETION
static int
comp_lex(QString s, int len, int state, int n, int &q)
{
QChar z;
int p = 0;
while (p < len)
{
switch(state)
{
default:
case -1: // misc
if (isalpha(s[p].toAscii()) || s[p]=='_') {
q = p; state = -2;
} else if (s[p]=='\'') {
q = p+1; z = s[p]; n = 0; state = -3;
} else if (s[p]=='\"') {
q = p+1; z = s[p]; n = 0; state = -3;
} else if (s[p]=='[') {
n = 0; state = -3;
int t = p + 1;
while (t < len && s[t] == '=')
t += 1;
if (t < len && s[t] == '[') {
q = t + 1;
n = t - p;
} else
state = -1;
} else if (s[p]=='-' && s[p+1]=='-') {
n = 0; state = -4;
if (s[p+2]=='[') {
int t = p + 3;
while (t < len && s[t] == '=')
t += 1;
if (t < len && s[t] == '[')
n = t - p - 2;
}
}
break;
case -2: // identifier
if (!isalnum(s[p].toAscii()) && s[p]!='_' && s[p]!='.' && s[p]!=':') {
state = -1; continue;
}
break;
case -3: // string
if (n == 0 && s[p] == z) {
state = -1;
} else if (n == 0 && s[p]=='\\') {
p += 1;
} else if (n && s[p]==']' && p>=n && s[p-n]==']') {
int t = p - n + 1;
while (t < len && s[t] == '=')
t += 1;
if (t == p)
state = -1;
}
break;
case -4: // comment
if (n == 0 && (s[p] == '\n' || s[p] == '\r')) {
state = -1;
} else if (n && s[p]==']' && p>=n && s[p-n]==']') {
int t = p - n + 1;
while (t < len && s[t] == '=')
t += 1;
if (t == p)
state = -1;
}
break;
}
p += 1;
}
return state;
}
bool
QLuaModeLua::doComplete()
{
QString stem;
QStringList completions;
QTextCursor c = e->textCursor();
QTextBlock b = c.block();
int len = c.position() - b.position();
QString text = b.text().left(len);
int state = -1;
int q = 0;
int n = 0;
QTextBlock pb = b.previous();
if (pb.isValid())
{
UserData *data = static_cast<UserData*>(pb.userData());
if (! data)
return false;
state = data->lexState;
n = data->lexN;
}
state = comp_lex(text, len, state, n, q);
if (state == -3 && q >= 0 && q <= len)
completions = computeFileCompletions(text.mid(q, len-q), n>0, stem);
if (state == -2 && q >= 0 && q <= len)
completions = computeSymbolCompletions(text.mid(q, len-q), stem);
int selected = 0;
if (completions.size() > 1)
{
qSort(completions.begin(), completions.end());
for (int i=completions.size()-2; i>=0; i--)
if (completions[i] == completions[i+1])
completions.removeAt(i);
selected = askCompletion(stem, completions);
}
if (selected >= 0 && selected < completions.size())
{
c.insertText(completions[selected]);
e->setTextCursor(c);
return true;
}
return false;
}
static const char *escape1 = "abfnrtv";
static const char *escape2 = "\a\b\f\n\r\t\v";
static QByteArray
unescapeString(const char *s)
{
int c;
QByteArray r;
while ((c = *s++))
{
if (c != '\\')
r += c;
else {
c = *s++;
const char *e = strchr(escape1, c);
if (e)
r += escape2[e - escape1];
else if (c >= '0' && c <= '7') {
c = c - '0';
if (*s >= '0' && *s <= '7')
c = c * 8 + *s++ - '0';
if (*s >= '0' && *s <= '7')
c = c * 8 + *s++ - '0';
r += c;
} else
r += c;
}
}
return r;
}
static QString
unescapeString(QString s)
{
return QString::fromLocal8Bit(unescapeString(s.toLocal8Bit().constData()));
}
static QByteArray
escapeString(const char *s)
{
int c;
QByteArray r;
while ((c = *s++))
{
const char *e;
if (! isascii(c))
r += c;
else if (iscntrl(c) && (e = strchr(escape2, c)))
r += escape1[e - escape2];
else if (isprint(c) || isspace(c))
r += c;
else {
char buffer[8];
sprintf(buffer, "\\%03o", c);
r += buffer;
}
}
return r;
}
static QString
escapeString(QString s)
{
return QString::fromLocal8Bit(escapeString(s.toLocal8Bit().constData()));
}
QStringList
QLuaModeLua::computeFileCompletions(QString s, bool escape, QString &stem)
{
QStringList list;
s.remove(QRegExp("^.*\\s"));
stem = s;
if (escape)
stem = unescapeString(s);
fileCompletion(stem, list);
if (escape)
{
QStringList nl;
foreach(QString s, list)
nl += escapeString(s);
stem = escapeString(stem);
list = nl;
}
return list;
}
static const char *
comp_keywords[] =
{
"and", "break", "do", "else", "elseif",
"end", "false", "for", "function",
"if", "in", "local", "nil", "not",
"or", "repeat", "return", "then",
"true", "until", "while", 0
};
QStringList
QLuaModeLua::computeSymbolCompletions(QString s, QString &stem)
{
QStringList list;
QByteArray f = s.toLocal8Bit();
int flen = f.size();
// stem
stem = s.remove(QRegExp("^.*[.:]"));
// keywords
for (const char **k = comp_keywords; *k; k++)
if (!strncmp(f.constData(), *k, flen))
list += QString::fromLocal8Bit(*k + flen);
// symbols
QtLuaEngine *engine = QLuaApplication::engine();
if (engine)
{
QtLuaLocker lua(engine, 250);
struct lua_State *L = lua;
if (lua)
{
lua_pushcfunction(L, luaQ_complete);
lua_pushlstring(L, f.constData(), flen);
if (!lua_pcall(L, 1, 1, 0) && lua_istable(L, -1)) {
int n = lua_objlen(L, -1);
for (int j=1; j<=n; j++) {
lua_rawgeti(L, -1, j);
list += QString::fromLocal8Bit(lua_tostring(L, -1));
lua_pop(L, 1);
}
}
lua_pop(L, 1);
}
else
{
QWidget *w = e->window();
QLuaMainWindow *m = qobject_cast<QLuaMainWindow*>(w);
if (m)
m->showStatusMessage(tr("Auto-completions is restricted "
"while Lua is running.") );
QLuaApplication::beep();
}
}
return list;
}
// ========================================
// FACTORY
static QLuaModeFactory<QLuaModeLua> textModeFactory("Lua", "lua");
// ========================================
// MOC
#include "qluamode_lua.moc"
/* -------------------------------------------------------------
Local Variables:
c++-font-lock-extra-types: ("\\sw+_t" "\\(lua_\\)?[A-Z]\\sw*[a-z]\\sw*")
End:
------------------------------------------------------------- */<|fim▁end|>
|
n->type = Statement;
break;
}
}
|
<|file_name|>orm.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import django
import patchy
from django.db.models.deletion import get_candidate_relations_to_delete
from django.db.models.query import QuerySet
from django.db.models.query_utils import Q
from django.db.models.sql.query import Query
def patch_ORM_to_be_deterministic():
"""
Django's ORM is non-deterministic with regards to the queries it outputs
for e.g. OR clauses. We need it to be deterministic so that we can compare
queries between runs, so we make a couple patches to its internals to do
this. Mostly this is done by adding sorted() in some places so we're not
affected by the vagaries of random dict iteration order.
There is no undo for this, but it doesn't make the ORM much slower or
anything bad.
"""
if patch_ORM_to_be_deterministic.have_patched:
return
patch_ORM_to_be_deterministic.have_patched = True
patch_QuerySet()
patch_Query()
patch_Q()
version = django.get_version()
if version.startswith('1.8') or version.startswith('1.9'):
patch_delete()
patch_ORM_to_be_deterministic.have_patched = False
def patch_QuerySet():
patchy.patch(QuerySet.annotate, """\
@@ -17,7 +17,7 @@
except (AttributeError, TypeError):
raise TypeError("Complex annotations require an alias")
annotations[arg.default_alias] = arg
- annotations.update(kwargs)
+ annotations.update(sorted(kwargs.items()))
clone = self._clone()
names = self._fields
""")
def patch_Query():
patchy.patch(Query.add_extra, """\
@@ -13,7 +13,7 @@
param_iter = iter(select_params)
else:
param_iter = iter([])
- for name, entry in select.items():
+ for name, entry in sorted(select.items()):
entry = force_text(entry)
entry_params = []
pos = entry.find("%s")
""")
def patch_Q():
# This one can't be done by patchy since __init__ is different in Python 3,
# maybe one day https://github.com/adamchainz/patchy/issues/31 will be
# fixed.
def __init__(self, *args, **kwargs):
super(Q, self).__init__(children=list(args) + sorted(kwargs.items()))
Q.__init__ = __init__
def patch_delete():
patchy.patch(get_candidate_relations_to_delete, """\
@@ -4,9 +4,12 @@ def get_candidate_relations_to_delete(opts):
candidate_models = {opts}
candidate_models = candidate_models.union(opts.concrete_model._meta.proxied_children)
# For each model, get all candidate fields.
- candidate_model_fields = set(chain.from_iterable(
- opts.get_fields(include_hidden=True) for opts in candidate_models
- ))
+ from collections import OrderedDict<|fim▁hole|>+ candidate_model_fields = candidates_dict.values()
# The candidate relations are the ones that come from N-1 and 1-1 relations.
# N-N (i.e., many-to-many) relations aren't candidates for deletion.
return (
""")<|fim▁end|>
|
+ candidates_dict = OrderedDict()
+ for opts in candidate_models:
+ for field in opts.get_fields(include_hidden=True):
+ candidates_dict[field.name] = field
|
<|file_name|>hw5_start.py<|end_file_name|><|fim▁begin|># CIS 410/510pm
# Homework 5 beta 0.0.1
# Cameron Palk
# May 2016
#
# Special thanks to Daniel Lowd for the skeletor code
import sys
import tokenize
from functools import reduce
global_card = []
num_vars = 0
''' Calc Strides
'''
def calcStrides( scope ):
rev_scope = list( reversed( scope ) )
res = [ 1 ] + [ 0 ] * ( len( scope ) - 1 )
for idx in range( 1, len( rev_scope ) ):
res[ idx ] = res[ idx - 1 ] * global_card[ rev_scope[ idx - 1 ] ]
stride = list( reversed( res ) )
return { scope[i] : stride[i] for i in range( len( scope ) ) }
# FACTOR CLASS DEFINITION
class Factor( dict ):
# Constructor
def __init__(self, scope_, vals_):
self.scope = scope_
self.vals = vals_
self.stride = calcStrides( scope_ )
#
# Are two object EQual, True of False
def __eq__(self, other):
return (self.scope == other.scope and
self.vals == other.vals and<|fim▁hole|> def __repr__( self ):
style = "\n{0}\nScope: {1}\nStride: {2}\nCard: {3}\nVals:\n{4}\n{0}\n"
vertBar = ''.join( ['-'] * 50 )
return style.format( vertBar, self.scope, self.stride,
{ v : global_card[v] for v in self.scope },
'\n'.join( [ str( round( e, 3 ) ) for e in self.vals ] ) )
#
# What the '*' character does between our objects
def __mul__( self, other ):
new_scope = list( set( self.scope ).union( set( other.scope ) ) )
assignment = { e : 0 for e in new_scope }
card = { u : global_card[ u ] for u in new_scope }
val_count = reduce( lambda agg, x: agg * global_card[x], new_scope, 1 )
new_vals = [ 0 ] * val_count
idx1 = idx2 = 0
for i in range( 0, val_count ):
new_vals[ i ] = self.vals[ idx1 ] * other.vals[ idx2 ]
for rv in reversed( new_scope ):
if assignment[ rv ] == card[ rv ] - 1:
idx1 -= assignment[ rv ] * self.stride [ rv ] if rv in self.stride else 0
idx2 -= assignment[ rv ] * other.stride[ rv ] if rv in other.stride else 0
assignment[ rv ] = 0
else:
idx1 += self.stride [ rv ] if rv in self.scope else 0
idx2 += other.stride[ rv ] if rv in other.scope else 0
assignment[ rv ] += 1
break
#
return Factor( new_scope, new_vals )
#
# Sum out the variable and return a new Factor
def sumOut( self ):
# TODO Sum out a RV
return
#
# Helper Functions:
def containsRV( self, rv ):
return rv in self.scope
#
# END FACTOR CLASS DEFINITION
# IGNORE DANIELS READER BELOW
#
# Read in all tokens from stdin. Save it to a (global) buf that we use
# later. (Is there a better way to do this? Almost certainly.)
curr_token = 0
token_buf = []
def read_tokens():
global token_buf
for line in sys.stdin:
token_buf.extend(line.strip().split())
#
def next_token():
global curr_token
global token_buf
curr_token += 1
return token_buf[ curr_token - 1 ]
#
def next_int():
return int( next_token() )
#
def next_float():
return float( next_token() )
#
def read_model():
# Read in all tokens and throw away the first (expected to be "MARKOV")
read_tokens()
s = next_token()
# Get number of vars, followed by their ranges
global num_vars
num_vars = next_int()
global global_card
global_card = [ next_int() for i in range( num_vars ) ]
# Get number and scopes of factors
num_factors = int(next_token())
factor_scopes = []
for i in range(num_factors):
factor_scopes.append( [ next_int() for i in range( next_int() ) ] )
# Read in all factor values
factor_vals = []
for i in range(num_factors):
factor_vals.append( [ next_float() for i in range( next_int() ) ] )
return [ Factor(s,v) for (s,v) in zip( factor_scopes, factor_vals ) ]
#
# IGNORE DANIELS READER ABOVE
''' Factor Count With Var
@input factors Factors we want to look through
@input rv A RV
@return [int] The number of times the rv occures in the factors scopes
'''
def factorCountWithVar( factors, rv ):
return sum( [ 1 if f.containsRV( rv ) else 0 for f in factors ] )
''' Factor Stats
'''
def factorStats( factors, possibleVariables ):
return { v: factorCountWithVar(factors,v) for v in range( num_vars ) if v in possibleVariables }
''' Compute Partition Function
@input factors An array of Factor objects representing the graph
@return [float] The partition function ( why is it called a function? )
'''
def computePartitionFunction( factors ):
# TODO: Implement a faster way to computer partition function by summing out variables
f = reduce( Factor.__mul__, factors )
z = sum( f.vals )
return z
#
''' Main '''
def main():
# Read file
factors = read_model()
# Computer partition function
z = computePartitionFunction( factors )
# Print results
print( "Z =", z )
return
# Run main if this module is being run directly
if __name__ == '__main__':
main()<|fim▁end|>
|
self.stride == other.stride )
#
# A string used for printing the Factor Objects
|
<|file_name|>HTMLStyleElement.py<|end_file_name|><|fim▁begin|>########################################################################
#
# File Name: HTMLStyleElement
#
# Documentation: http://docs.4suite.com/4DOM/HTMLStyleElement.html
#
### This file is automatically generated by GenerateHtml.py.
### DO NOT EDIT!
"""
WWW: http://4suite.com/4DOM e-mail: [email protected]
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
import string
from xml.dom import Node
from xml.dom.html.HTMLElement import HTMLElement
class HTMLStyleElement(HTMLElement):
def __init__(self, ownerDocument, nodeName="STYLE"):
HTMLElement.__init__(self, ownerDocument, nodeName)
### Attribute Methods ###
def _get_disabled(self):
return self.hasAttribute("DISABLED")
def _set_disabled(self, value):
if value:
self.setAttribute("DISABLED", "DISABLED")
else:
self.removeAttribute("DISABLED")
def _get_media(self):
return self.getAttribute("MEDIA")
def _set_media(self, value):
self.setAttribute("MEDIA", value)
def _get_type(self):
return self.getAttribute("TYPE")
def _set_type(self, value):
self.setAttribute("TYPE", value)
### Attribute Access Mappings ###
_readComputedAttrs = HTMLElement._readComputedAttrs.copy()
_readComputedAttrs.update({
"disabled" : _get_disabled,
"media" : _get_media,
"type" : _get_type<|fim▁hole|> })
_writeComputedAttrs = HTMLElement._writeComputedAttrs.copy()
_writeComputedAttrs.update({
"disabled" : _set_disabled,
"media" : _set_media,
"type" : _set_type
})
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
HTMLElement._readOnlyAttrs + _readComputedAttrs.keys())<|fim▁end|>
| |
<|file_name|>euler41.rs<|end_file_name|><|fim▁begin|>use std::char;
pub fn main() {
let primes = (2..).filter(|&n| (2..(n as f32).sqrt() as u32 + 1).all(|i| n % i != 0));
let pandigital = |number: &u32| {
let s = number.to_string();
(1..s.len() + 1).all(|n| s.chars().any(|p| p == char::from_digit(n as u32, 10).unwrap()))
};
let ans = primes.take_while(|&p| p <= 7654321).filter(pandigital).last().unwrap();<|fim▁hole|>}<|fim▁end|>
|
println!("{}", ans);
|
<|file_name|>gs-indexbuffer.cpp<|end_file_name|><|fim▁begin|>/*
* Modern effects for a modern Streamer
* Copyright (C) 2017 Michael Fabian Dirks
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
*/
#include "gs-indexbuffer.hpp"
#include <stdexcept>
#include "gs-limits.hpp"
#include "obs/gs/gs-helper.hpp"
streamfx::obs::gs::index_buffer::index_buffer(uint32_t maximumVertices)
{
this->reserve(maximumVertices);
auto gctx = streamfx::obs::gs::context();
_index_buffer = gs_indexbuffer_create(gs_index_type::GS_UNSIGNED_LONG, this->data(), maximumVertices, GS_DYNAMIC);
}
streamfx::obs::gs::index_buffer::index_buffer() : index_buffer(MAXIMUM_VERTICES) {}
streamfx::obs::gs::index_buffer::index_buffer(index_buffer& other) : index_buffer(static_cast<uint32_t>(other.size()))
{
std::copy(other.begin(), other.end(), this->end());
}
<|fim▁hole|> : index_buffer(static_cast<uint32_t>(other.size()))
{
std::copy(other.begin(), other.end(), this->end());
}
streamfx::obs::gs::index_buffer::~index_buffer()
{
auto gctx = streamfx::obs::gs::context();
gs_indexbuffer_destroy(_index_buffer);
}
gs_indexbuffer_t* streamfx::obs::gs::index_buffer::get()
{
return get(true);
}
gs_indexbuffer_t* streamfx::obs::gs::index_buffer::get(bool refreshGPU)
{
if (refreshGPU) {
auto gctx = streamfx::obs::gs::context();
gs_indexbuffer_flush(_index_buffer);
}
return _index_buffer;
}<|fim▁end|>
|
streamfx::obs::gs::index_buffer::index_buffer(std::vector<uint32_t>& other)
|
<|file_name|>PlayPauseReceiver.java<|end_file_name|><|fim▁begin|>package insanityradio.insanityradio;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
public class PlayPauseReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
try {
FragmentNowPlaying.getInstance().playPauseButtonTapped(false);
} catch (NullPointerException e) {
Intent startActivityIntent = new Intent(context.getApplicationContext(), MainActivity.class);
startActivityIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(startActivityIntent);
}<|fim▁hole|><|fim▁end|>
|
}
}
|
<|file_name|>20200402_1113_933665578547_migrate_review_conditions_from_settings.py<|end_file_name|><|fim▁begin|>"""Migrate review conditions from settings
Revision ID: 933665578547
Revises: 02bf20df06b3
Create Date: 2020-04-02 11:13:58.931020
"""
import json<|fim▁hole|>
from indico.modules.events.editing.models.editable import EditableType
# revision identifiers, used by Alembic.
revision = '933665578547'
down_revision = '02bf20df06b3'
branch_labels = None
depends_on = None
def upgrade():
if context.is_offline_mode():
raise Exception('This upgrade is only possible in online mode')
conn = op.get_bind()
for type_ in EditableType:
res = conn.execute(
"SELECT event_id, value FROM events.settings WHERE module = 'editing' AND name = %s",
(f'{type_.name}_review_conditions',),
)
for event_id, value in res:
for condition in value:
res2 = conn.execute(
'INSERT INTO event_editing.review_conditions (type, event_id) VALUES (%s, %s) RETURNING id',
(type_, event_id),
)
revcon_id = res2.fetchone()[0]
for file_type in condition[1]:
conn.execute('''
INSERT INTO event_editing.review_condition_file_types (file_type_id, review_condition_id)
VALUES (%s, %s)
''', (file_type, revcon_id),
)
conn.execute(
"DELETE FROM events.settings WHERE module = 'editing' AND name = %s",
(f'{type_.name}_review_conditions',),
)
def downgrade():
if context.is_offline_mode():
raise Exception('This upgrade is only possible in online mode')
conn = op.get_bind()
for type_ in EditableType:
res = conn.execute('SELECT id, event_id FROM event_editing.review_conditions WHERE type = %s', (type_.value,))
review_conditions = defaultdict(list)
for id, event_id in res:
file_types = conn.execute(
'SELECT file_type_id FROM event_editing.review_condition_file_types WHERE review_condition_id = %s',
(id,),
)
value = [str(uuid4()), [f[0] for f in file_types.fetchall()]]
review_conditions[event_id].append(value)
for key, value in review_conditions.items():
conn.execute(
"INSERT INTO events.settings (event_id, module, name, value) VALUES (%s, 'editing', %s, %s)",
(key, f'{type_.name}_review_conditions', json.dumps(value)),
)
conn.execute('DELETE FROM event_editing.review_condition_file_types')
conn.execute('DELETE FROM event_editing.review_conditions')<|fim▁end|>
|
from collections import defaultdict
from uuid import uuid4
from alembic import context, op
|
<|file_name|>layout.py<|end_file_name|><|fim▁begin|># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import os
from io import BytesIO
from flask import flash, redirect, request, session
from PIL import Image
from werkzeug.exceptions import NotFound
from indico.core.db import db
from indico.modules.events.layout import layout_settings, logger
from indico.modules.events.layout.forms import (LayoutForm, LogoForm, CSSForm, CSSSelectionForm)
from indico.modules.events.layout.util import get_css_url
from indico.modules.events.layout.views import WPLayoutEdit
from indico.util.fs import secure_filename
from indico.util.i18n import _
from indico.util.string import to_unicode, crc32
from indico.web.flask.util import url_for, send_file
from indico.web.forms.base import FormDefaults
from indico.web.util import jsonify_data
from MaKaC.webinterface.pages.conferences import WPConfModifPreviewCSS
from MaKaC.webinterface.rh.conferenceModif import RHConferenceModifBase
from MaKaC.webinterface.rh.conferenceDisplay import RHConferenceBaseDisplay
def _logo_data(event):
return {
'url': event.logo_url,
'filename': event.logo_metadata['filename'],
'size': event.logo_metadata['size'],
'content_type': event.logo_metadata['content_type']
}
def _css_file_data(event):
return {
'filename': event.stylesheet_metadata['filename'],
'size': event.stylesheet_metadata['size'],
'content_type': 'text/css'
}
class RHLayoutBase(RHConferenceModifBase):
CSRF_ENABLED = True
def _checkParams(self, params):
RHConferenceModifBase._checkParams(self, params)
self.event = self._conf.as_event
class RHLayoutEdit(RHLayoutBase):
def _checkProtection(self):
RHLayoutBase._checkProtection(self)
if self._conf.getType() != 'conference':
raise NotFound('Only conferences have layout settings')
def _process(self):
defaults = FormDefaults(**layout_settings.get_all(self._conf))
form = LayoutForm(obj=defaults, event=self.event)
css_form = CSSForm()
logo_form = LogoForm()
if form.validate_on_submit():
data = {unicode(key): value for key, value in form.data.iteritems() if key in layout_settings.defaults}
layout_settings.set_multi(self._conf, data)
if form.theme.data == '_custom':
layout_settings.set(self._conf, 'use_custom_css', True)
flash(_('Settings saved'), 'success')
return redirect(url_for('event_layout.index', self._conf))
else:
if self.event.logo_metadata:
logo_form.logo.data = _logo_data(self.event)
if self.event.has_stylesheet:
css_form.css_file.data = _css_file_data(self.event)
return WPLayoutEdit.render_template('layout.html', self._conf, form=form, event=self._conf,
logo_form=logo_form, css_form=css_form)
class RHLayoutLogoUpload(RHLayoutBase):
def _process(self):
f = request.files['file']
try:
img = Image.open(f)
except IOError:
flash(_('You cannot upload this file as a logo.'), 'error')
return jsonify_data(content=None)
if img.format.lower() not in {'jpeg', 'png', 'gif'}:
flash(_('The file has an invalid format ({format})').format(format=img.format), 'error')
return jsonify_data(content=None)
if img.mode == 'CMYK':
flash(_('The logo you uploaded is using the CMYK colorspace and has been converted to RGB. Please check if '
'the colors are correct and convert it manually if necessary.'), 'warning')
img = img.convert('RGB')
image_bytes = BytesIO()
img.save(image_bytes, 'PNG')
image_bytes.seek(0)
content = image_bytes.read()
self.event.logo = content
self.event.logo_metadata = {
'hash': crc32(content),
'size': len(content),
'filename': os.path.splitext(secure_filename(f.filename, 'logo'))[0] + '.png',
'content_type': 'image/png'
}
flash(_('New logo saved'), 'success')
logger.info("New logo '%s' uploaded by %s (%s)", f.filename, session.user, self.event)
return jsonify_data(content=_logo_data(self.event))
class RHLayoutLogoDelete(RHLayoutBase):
def _process(self):
self.event.logo = None
self.event.logo_metadata = None
flash(_('Logo deleted'), 'success')
logger.info("Logo of %s deleted by %s", self.event, session.user)
return jsonify_data(content=None)
class RHLayoutCSSUpload(RHLayoutBase):
def _process(self):
f = request.files['file']
self.event.stylesheet = to_unicode(f.read()).strip()
self.event.stylesheet_metadata = {
'hash': crc32(self.event.stylesheet),
'size': len(self.event.stylesheet),
'filename': secure_filename(f.filename, 'stylesheet.css')
}
db.session.flush()
flash(_('New CSS file saved. Do not forget to enable it ("Use custom CSS") after verifying that it is correct '
'using the preview.'), 'success')
logger.info('CSS file for %s uploaded by %s', self.event, session.user)
return jsonify_data(content=_css_file_data(self.event))
class RHLayoutCSSDelete(RHLayoutBase):
def _process(self):
self.event.stylesheet = None
self.event.stylesheet_metadata = None
layout_settings.set(self.event, 'use_custom_css', False)
flash(_('CSS file deleted'), 'success')
logger.info("CSS file for %s deleted by %s", self.event, session.user)
return jsonify_data(content=None)
class RHLayoutCSSPreview(RHLayoutBase):
def _process(self):
form = CSSSelectionForm(event=self.event, formdata=request.args, csrf_enabled=False)
css_url = None
if form.validate():
css_url = get_css_url(self.event, force_theme=form.theme.data, for_preview=True)
return WPConfModifPreviewCSS(self, self._conf, form=form, css_url=css_url).display()
class RHLayoutCSSSaveTheme(RHLayoutBase):
def _process(self):
form = CSSSelectionForm(event=self.event)
if form.validate_on_submit():
layout_settings.set(self.event, 'use_custom_css', form.theme.data == '_custom')
if form.theme.data != '_custom':
layout_settings.set(self._conf, 'theme', form.theme.data)
flash(_('Settings saved'), 'success')
return redirect(url_for('event_layout.index', self.event))
class RHLogoDisplay(RHConferenceBaseDisplay):
def _process(self):
event = self._conf.as_event
if not event.has_logo:
raise NotFound
metadata = event.logo_metadata
return send_file(metadata['filename'], BytesIO(event.logo), mimetype=metadata['content_type'], conditional=True)
class RHLayoutCSSDisplay(RHConferenceBaseDisplay):
def _process(self):
event = self._conf.as_event<|fim▁hole|> data = BytesIO(event.stylesheet.encode('utf-8'))
return send_file(event.stylesheet_metadata['filename'], data, mimetype='text/css', conditional=True)<|fim▁end|>
|
if not event.has_stylesheet:
raise NotFound
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Django settings for SimpleChat project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3o-kw!!=*b7o3mz6nmbllne##wiu7m_lzk%9j&p@@(ecsue&f7'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'chat',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',<|fim▁hole|> 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'SimpleChat.urls'
WSGI_APPLICATION = 'SimpleChat.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
BASE_URL = "http://192.168.0.60:9000"
try:
from local_settings import *
except ImportError:
pass<|fim▁end|>
|
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
|
<|file_name|>koenig-menu-content.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export {default} from 'koenig-editor/components/koenig-menu-content';
|
<|file_name|>detector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#*********************************************************************
# Software License Agreement (BSD License)
#
# Copyright (c) 2011 andrewtron3000
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Willow Garage nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#********************************************************************/
import roslib; roslib.load_manifest('face_detection')
import rospy
import sys
import cv
from cv_bridge import CvBridge
from sensor_msgs.msg import Image
from geometry_msgs.msg import Point
from geometry_msgs.msg import PointStamped
#
# Instantiate a new opencv to ROS bridge adaptor
#
cv_bridge = CvBridge()
#
# Define the callback that will be called when a new image is received.
#
def callback(publisher, coord_publisher, cascade, imagemsg):
#
# Convert the ROS imagemsg to an opencv image.
#
image = cv_bridge.imgmsg_to_cv(imagemsg, 'mono8')
#
# Blur the image.
#
cv.Smooth(image, image, cv.CV_GAUSSIAN)
#
# Allocate some storage for the haar detect operation.
#
storage = cv.CreateMemStorage(0)
#
# Call the face detector function.
#
faces = cv.HaarDetectObjects(image, cascade, storage, 1.2, 2,
cv.CV_HAAR_DO_CANNY_PRUNING, (100,100))
#
# If faces are detected, compute the centroid of all the faces
# combined.
#
face_centroid_x = 0.0
face_centroid_y = 0.0
if len(faces) > 0:
#
# For each face, draw a rectangle around it in the image,
# and also add the position of the face to the centroid
# of all faces combined.
#
for (i, n) in faces:
x = int(i[0])
y = int(i[1])
width = int(i[2])
height = int(i[3])
cv.Rectangle(image,
(x, y),
(x + width, y + height),
cv.CV_RGB(0,255,0), 3, 8, 0)
face_centroid_x += float(x) + (float(width) / 2.0)
face_centroid_y += float(y) + (float(height) / 2.0)
#
# Finish computing the face_centroid by dividing by the
# number of faces found above.
#
face_centroid_x /= float(len(faces))
face_centroid_y /= float(len(faces))
#
# Lastly, if faces were detected, publish a PointStamped
# message that contains the centroid values.
#
pt = Point(x = face_centroid_x, y = face_centroid_y, z = 0.0)
pt_stamped = PointStamped(point = pt)
coord_publisher.publish(pt_stamped)
#
# Convert the opencv image back to a ROS image using the
# cv_bridge.
#
newmsg = cv_bridge.cv_to_imgmsg(image, 'mono8')
#
# Republish the image. Note this image has boxes around
# faces if faces were found.
#
publisher.publish(newmsg)
def listener(publisher, coord_publisher):
rospy.init_node('face_detector', anonymous=True)
#<|fim▁hole|> #
cascadeFileName = rospy.get_param("~classifier")
cascade = cv.Load(cascadeFileName)
rospy.Subscriber("/stereo/left/image_rect",
Image,
lambda image: callback(publisher, coord_publisher, cascade, image))
rospy.spin()
# This is called first.
if __name__ == '__main__':
publisher = rospy.Publisher('face_view', Image)
coord_publisher = rospy.Publisher('face_coords', PointStamped)
listener(publisher, coord_publisher)<|fim▁end|>
|
# Load the haar cascade. Note we get the
# filename from the "classifier" parameter
# that is configured in the launch script.
|
<|file_name|>link_to_test.js<|end_file_name|><|fim▁begin|>import "ember";
import EmberHandlebars from "ember-handlebars";
var compile = EmberHandlebars.compile;
var Router, App, AppView, router, container;
var set = Ember.set;
function bootApplication() {
router = container.lookup('router:main');
Ember.run(App, 'advanceReadiness');
}
// IE includes the host name
function normalizeUrl(url) {
return url.replace(/https?:\/\/[^\/]+/,'');
}
function shouldNotBeActive(selector) {
checkActive(selector, false);
}
function shouldBeActive(selector) {
checkActive(selector, true);
}
function checkActive(selector, active) {
var classList = Ember.$(selector, '#qunit-fixture')[0].className;
equal(classList.indexOf('active') > -1, active, selector + " active should be " + active.toString());
}
var updateCount, replaceCount;
function sharedSetup() {
App = Ember.Application.create({
name: "App",
rootElement: '#qunit-fixture'
});
App.deferReadiness();
updateCount = replaceCount = 0;
App.Router.reopen({
location: Ember.NoneLocation.createWithMixins({
setURL: function(path) {
updateCount++;
set(this, 'path', path);
},
replaceURL: function(path) {
replaceCount++;
set(this, 'path', path);
}
})
});
Router = App.Router;
container = App.__container__;
}
function sharedTeardown() {
Ember.run(function() { App.destroy(); });
Ember.TEMPLATES = {};
}
QUnit.module("The {{link-to}} helper", {
setup: function() {
Ember.run(function() {
sharedSetup();
Ember.TEMPLATES.app = compile("{{outlet}}");
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{#link-to 'about' id='about-link'}}About{{/link-to}}{{#link-to 'index' id='self-link'}}Self{{/link-to}}");
Ember.TEMPLATES.about = compile("<h3>About</h3>{{#link-to 'index' id='home-link'}}Home{{/link-to}}{{#link-to 'about' id='self-link'}}Self{{/link-to}}");
Ember.TEMPLATES.item = compile("<h3>Item</h3><p>{{name}}</p>{{#link-to 'index' id='home-link'}}Home{{/link-to}}");
AppView = Ember.View.extend({
templateName: 'app'
});
container.register('view:app', AppView);
container.register('router:main', Router);
});
},
teardown: sharedTeardown
});
test("The {{link-to}} helper moves into the named route", function() {
Router.map(function(match) {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
equal(Ember.$('h3:contains(Home)', '#qunit-fixture').length, 1, "The home template was rendered");
equal(Ember.$('#self-link.active', '#qunit-fixture').length, 1, "The self-link was rendered with active class");
equal(Ember.$('#about-link:not(.active)', '#qunit-fixture').length, 1, "The other link was rendered without active class");
Ember.run(function() {
Ember.$('#about-link', '#qunit-fixture').click();
});
equal(Ember.$('h3:contains(About)', '#qunit-fixture').length, 1, "The about template was rendered");
equal(Ember.$('#self-link.active', '#qunit-fixture').length, 1, "The self-link was rendered with active class");
equal(Ember.$('#home-link:not(.active)', '#qunit-fixture').length, 1, "The other link was rendered without active class");
});
test("The {{link-to}} helper supports URL replacement", function() {
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{#link-to 'about' id='about-link' replace=true}}About{{/link-to}}");
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
equal(updateCount, 0, 'precond: setURL has not been called');
equal(replaceCount, 0, 'precond: replaceURL has not been called');
Ember.run(function() {
Ember.$('#about-link', '#qunit-fixture').click();
});
equal(updateCount, 0, 'setURL should not be called');
equal(replaceCount, 1, 'replaceURL should be called once');
});
test("the {{link-to}} helper doesn't add an href when the tagName isn't 'a'", function() {
Ember.TEMPLATES.index = compile("{{#link-to 'about' id='about-link' tagName='div'}}About{{/link-to}}");
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
equal(Ember.$('#about-link').attr('href'), undefined, "there is no href attribute");
});
test("the {{link-to}} applies a 'disabled' class when disabled", function () {
Ember.TEMPLATES.index = compile('{{#link-to "about" id="about-link" disabledWhen="shouldDisable"}}About{{/link-to}}');
App.IndexController = Ember.Controller.extend({
shouldDisable: true
});
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
equal(Ember.$('#about-link.disabled', '#qunit-fixture').length, 1, "The link is disabled when its disabledWhen is true");
});
test("the {{link-to}} doesn't apply a 'disabled' class if disabledWhen is not provided", function () {
Ember.TEMPLATES.index = compile('{{#link-to "about" id="about-link"}}About{{/link-to}}');
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
ok(!Ember.$('#about-link', '#qunit-fixture').hasClass("disabled"), "The link is not disabled if disabledWhen not provided");
});
test("the {{link-to}} helper supports a custom disabledClass", function () {
Ember.TEMPLATES.index = compile('{{#link-to "about" id="about-link" disabledWhen="shouldDisable" disabledClass="do-not-want"}}About{{/link-to}}');
App.IndexController = Ember.Controller.extend({
shouldDisable: true
});
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
equal(Ember.$('#about-link.do-not-want', '#qunit-fixture').length, 1, "The link can apply a custom disabled class");
});
test("the {{link-to}} helper does not respond to clicks when disabled", function () {
Ember.TEMPLATES.index = compile('{{#link-to "about" id="about-link" disabledWhen="shouldDisable"}}About{{/link-to}}');
App.IndexController = Ember.Controller.extend({
shouldDisable: true
});
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
Ember.run(function() {
Ember.$('#about-link', '#qunit-fixture').click();
});
equal(Ember.$('h3:contains(About)', '#qunit-fixture').length, 0, "Transitioning did not occur");
});
test("The {{link-to}} helper supports a custom activeClass", function() {
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{#link-to 'about' id='about-link'}}About{{/link-to}}{{#link-to 'index' id='self-link' activeClass='zomg-active'}}Self{{/link-to}}");
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
equal(Ember.$('h3:contains(Home)', '#qunit-fixture').length, 1, "The home template was rendered");
equal(Ember.$('#self-link.zomg-active', '#qunit-fixture').length, 1, "The self-link was rendered with active class");
equal(Ember.$('#about-link:not(.active)', '#qunit-fixture').length, 1, "The other link was rendered without active class");
});
test("The {{link-to}} helper supports leaving off .index for nested routes", function() {
Router.map(function() {
this.resource("about", function() {
this.route("item");
});
});
Ember.TEMPLATES.about = compile("<h1>About</h1>{{outlet}}");
Ember.TEMPLATES['about/index'] = compile("<div id='index'>Index</div>");
Ember.TEMPLATES['about/item'] = compile("<div id='item'>{{#link-to 'about'}}About{{/link-to}}</div>");
bootApplication();
Ember.run(router, 'handleURL', '/about/item');
equal(normalizeUrl(Ember.$('#item a', '#qunit-fixture').attr('href')), '/about');
});
test("The {{link-to}} helper supports currentWhen (DEPRECATED)", function() {
expectDeprecation('Using currentWhen with {{link-to}} is deprecated in favor of `current-when`.');
Router.map(function(match) {
this.resource("index", { path: "/" }, function() {
this.route("about");
});
this.route("item");
});
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{outlet}}");
Ember.TEMPLATES['index/about'] = compile("{{#link-to 'item' id='other-link' currentWhen='index'}}ITEM{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/about");
});
equal(Ember.$('#other-link.active', '#qunit-fixture').length, 1, "The link is active since current-when is a parent route");
});
test("The {{link-to}} helper supports custom, nested, current-when", function() {
Router.map(function(match) {
this.resource("index", { path: "/" }, function() {
this.route("about");
});
this.route("item");
});
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{outlet}}");
Ember.TEMPLATES['index/about'] = compile("{{#link-to 'item' id='other-link' current-when='index'}}ITEM{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/about");
});
equal(Ember.$('#other-link.active', '#qunit-fixture').length, 1, "The link is active since current-when is a parent route");
});
test("The {{link-to}} helper does not disregard current-when when it is given explicitly for a resource", function() {
Router.map(function(match) {
this.resource("index", { path: "/" }, function() {
this.route("about");
});
this.resource("items",function(){
this.route('item');
});
});
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{outlet}}");
Ember.TEMPLATES['index/about'] = compile("{{#link-to 'items' id='other-link' current-when='index'}}ITEM{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/about");
});
equal(Ember.$('#other-link.active', '#qunit-fixture').length, 1, "The link is active when current-when is given for explicitly for a resource");
});
if (Ember.FEATURES.isEnabled("ember-routing-multi-current-when")) {
test("The {{link-to}} helper supports multiple current-when routes", function() {
Router.map(function(match) {
this.resource("index", { path: "/" }, function() {
this.route("about");
});
this.route("item");
this.route("foo");
});
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{outlet}}");
Ember.TEMPLATES['index/about'] = compile("{{#link-to 'item' id='link1' current-when='item index'}}ITEM{{/link-to}}");
Ember.TEMPLATES['item'] = compile("{{#link-to 'item' id='link2' current-when='item index'}}ITEM{{/link-to}}");
Ember.TEMPLATES['foo'] = compile("{{#link-to 'item' id='link3' current-when='item index'}}ITEM{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/about");
});
equal(Ember.$('#link1.active', '#qunit-fixture').length, 1, "The link is active since current-when contains the parent route");
Ember.run(function() {
router.handleURL("/item");
});
equal(Ember.$('#link2.active', '#qunit-fixture').length, 1, "The link is active since you are on the active route");
Ember.run(function() {
router.handleURL("/foo");
});
equal(Ember.$('#link3.active', '#qunit-fixture').length, 0, "The link is not active since current-when does not contain the active route");
});
}
test("The {{link-to}} helper defaults to bubbling", function() {
Ember.TEMPLATES.about = compile("<div {{action 'hide'}}>{{#link-to 'about.contact' id='about-contact'}}About{{/link-to}}</div>{{outlet}}");
Ember.TEMPLATES['about/contact'] = compile("<h1 id='contact'>Contact</h1>");
Router.map(function() {
this.resource("about", function() {
this.route("contact");
});
});
var hidden = 0;
App.AboutRoute = Ember.Route.extend({
actions: {
hide: function() {
hidden++;
}
}
});
bootApplication();
Ember.run(function() {
router.handleURL("/about");
});
Ember.run(function() {
Ember.$('#about-contact', '#qunit-fixture').click();
});
equal(Ember.$("#contact", "#qunit-fixture").text(), "Contact", "precond - the link worked");
equal(hidden, 1, "The link bubbles");
});
test("The {{link-to}} helper supports bubbles=false", function() {
Ember.TEMPLATES.about = compile("<div {{action 'hide'}}>{{#link-to 'about.contact' id='about-contact' bubbles=false}}About{{/link-to}}</div>{{outlet}}");
Ember.TEMPLATES['about/contact'] = compile("<h1 id='contact'>Contact</h1>");
Router.map(function() {
this.resource("about", function() {
this.route("contact");
});
});
var hidden = 0;
App.AboutRoute = Ember.Route.extend({
actions: {
hide: function() {
hidden++;
}
}
});
bootApplication();
Ember.run(function() {
router.handleURL("/about");
});
Ember.run(function() {
Ember.$('#about-contact', '#qunit-fixture').click();
});
equal(Ember.$("#contact", "#qunit-fixture").text(), "Contact", "precond - the link worked");
equal(hidden, 0, "The link didn't bubble");
});
test("The {{link-to}} helper moves into the named route with context", function() {
Router.map(function(match) {
this.route("about");
this.resource("item", { path: "/item/:id" });
});
Ember.TEMPLATES.about = compile("<h3>List</h3><ul>{{#each person in controller}}<li>{{#link-to 'item' person}}{{person.name}}{{/link-to}}</li>{{/each}}</ul>{{#link-to 'index' id='home-link'}}Home{{/link-to}}");
App.AboutRoute = Ember.Route.extend({
model: function() {
return Ember.A([
{ id: "yehuda", name: "Yehuda Katz" },
{ id: "tom", name: "Tom Dale" },
{ id: "erik", name: "Erik Brynroflsson" }
]);
}
});
App.ItemRoute = Ember.Route.extend({
serialize: function(object) {
return { id: object.id };
}
});
bootApplication();
Ember.run(function() {
router.handleURL("/about");
});
equal(Ember.$('h3:contains(List)', '#qunit-fixture').length, 1, "The home template was rendered");
equal(normalizeUrl(Ember.$('#home-link').attr('href')), '/', "The home link points back at /");
Ember.run(function() {
Ember.$('li a:contains(Yehuda)', '#qunit-fixture').click();
});
equal(Ember.$('h3:contains(Item)', '#qunit-fixture').length, 1, "The item template was rendered");
equal(Ember.$('p', '#qunit-fixture').text(), "Yehuda Katz", "The name is correct");
Ember.run(function() { Ember.$('#home-link').click(); });
Ember.run(function() { Ember.$('#about-link').click(); });
equal(normalizeUrl(Ember.$('li a:contains(Yehuda)').attr('href')), "/item/yehuda");
equal(normalizeUrl(Ember.$('li a:contains(Tom)').attr('href')), "/item/tom");
equal(normalizeUrl(Ember.$('li a:contains(Erik)').attr('href')), "/item/erik");
Ember.run(function() {
Ember.$('li a:contains(Erik)', '#qunit-fixture').click();
});
equal(Ember.$('h3:contains(Item)', '#qunit-fixture').length, 1, "The item template was rendered");
equal(Ember.$('p', '#qunit-fixture').text(), "Erik Brynroflsson", "The name is correct");
});
test("The {{link-to}} helper binds some anchor html tag common attributes", function() {
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{#link-to 'index' id='self-link' title='title-attr' rel='rel-attr' tabindex='-1'}}Self{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
var link = Ember.$('#self-link', '#qunit-fixture');
equal(link.attr('title'), 'title-attr', "The self-link contains title attribute");
equal(link.attr('rel'), 'rel-attr', "The self-link contains rel attribute");
equal(link.attr('tabindex'), '-1', "The self-link contains tabindex attribute");
});
if(Ember.FEATURES.isEnabled('ember-routing-linkto-target-attribute')) {
test("The {{link-to}} helper supports `target` attribute", function() {
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{#link-to 'index' id='self-link' target='_blank'}}Self{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
var link = Ember.$('#self-link', '#qunit-fixture');
equal(link.attr('target'), '_blank', "The self-link contains `target` attribute");
});
test("The {{link-to}} helper does not call preventDefault if `target` attribute is provided", function() {
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{#link-to 'index' id='self-link' target='_blank'}}Self{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
var event = Ember.$.Event("click");
Ember.$('#self-link', '#qunit-fixture').trigger(event);
equal(event.isDefaultPrevented(), false, "should not preventDefault when target attribute is specified");
});
test("The {{link-to}} helper should preventDefault when `target = _self`", function() {
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{#link-to 'index' id='self-link' target='_self'}}Self{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
var event = Ember.$.Event("click");
Ember.$('#self-link', '#qunit-fixture').trigger(event);
equal(event.isDefaultPrevented(), true, "should preventDefault when target attribute is `_self`");
});
test("The {{link-to}} helper should not transition if target is not equal to _self or empty", function() {
Ember.TEMPLATES.index = compile("{{#link-to 'about' id='about-link' replace=true target='_blank'}}About{{/link-to}}");
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
Ember.run(function() {
Ember.$('#about-link', '#qunit-fixture').click();
});
notEqual(container.lookup('controller:application').get('currentRouteName'), 'about', 'link-to should not transition if target is not equal to _self or empty');
});
}
test("The {{link-to}} helper accepts string/numeric arguments", function() {
Router.map(function() {
this.route('filter', { path: '/filters/:filter' });
this.route('post', { path: '/post/:post_id' });
this.route('repo', { path: '/repo/:owner/:name' });
});
App.FilterController = Ember.Controller.extend({
filter: "unpopular",
repo: Ember.Object.create({owner: 'ember', name: 'ember.js'}),
post_id: 123
});
Ember.TEMPLATES.filter = compile('<p>{{filter}}</p>{{#link-to "filter" "unpopular" id="link"}}Unpopular{{/link-to}}{{#link-to "filter" filter id="path-link"}}Unpopular{{/link-to}}{{#link-to "post" post_id id="post-path-link"}}Post{{/link-to}}{{#link-to "post" 123 id="post-number-link"}}Post{{/link-to}}{{#link-to "repo" repo id="repo-object-link"}}Repo{{/link-to}}');
Ember.TEMPLATES.index = compile(' ');
bootApplication();
Ember.run(function() { router.handleURL("/filters/popular"); });
equal(normalizeUrl(Ember.$('#link', '#qunit-fixture').attr('href')), "/filters/unpopular");
equal(normalizeUrl(Ember.$('#path-link', '#qunit-fixture').attr('href')), "/filters/unpopular");
equal(normalizeUrl(Ember.$('#post-path-link', '#qunit-fixture').attr('href')), "/post/123");
equal(normalizeUrl(Ember.$('#post-number-link', '#qunit-fixture').attr('href')), "/post/123");
equal(normalizeUrl(Ember.$('#repo-object-link', '#qunit-fixture').attr('href')), "/repo/ember/ember.js");
});
test("Issue 4201 - Shorthand for route.index shouldn't throw errors about context arguments", function() {
expect(2);
Router.map(function() {
this.resource('lobby', function() {
this.route('index', { path: ':lobby_id' });
this.route('list');
});
});
App.LobbyIndexRoute = Ember.Route.extend({
model: function(params) {
equal(params.lobby_id, 'foobar');
return params.lobby_id;
}
});
Ember.TEMPLATES['lobby/index'] = compile("{{#link-to 'lobby' 'foobar' id='lobby-link'}}Lobby{{/link-to}}");
Ember.TEMPLATES.index = compile("");
Ember.TEMPLATES['lobby/list'] = compile("{{#link-to 'lobby' 'foobar' id='lobby-link'}}Lobby{{/link-to}}");
bootApplication();
Ember.run(router, 'handleURL', '/lobby/list');
Ember.run(Ember.$('#lobby-link'), 'click');
shouldBeActive('#lobby-link');
});
test("The {{link-to}} helper unwraps controllers", function() {
expect(5);
Router.map(function() {
this.route('filter', { path: '/filters/:filter' });
});
var indexObject = { filter: 'popular' };
App.FilterRoute = Ember.Route.extend({
model: function(params) {
return indexObject;
},
serialize: function(passedObject) {
equal(passedObject, indexObject, "The unwrapped object is passed");
return { filter: 'popular' };
}
});
App.IndexRoute = Ember.Route.extend({
model: function() {
return indexObject;
}
});
Ember.TEMPLATES.filter = compile('<p>{{filter}}</p>');
Ember.TEMPLATES.index = compile('{{#link-to "filter" this id="link"}}Filter{{/link-to}}');
bootApplication();
Ember.run(function() { router.handleURL("/"); });
Ember.$('#link', '#qunit-fixture').trigger('click');
});
test("The {{link-to}} helper doesn't change view context", function() {
App.IndexView = Ember.View.extend({
elementId: 'index',
name: 'test',
isTrue: true
});
Ember.TEMPLATES.index = compile("{{view.name}}-{{#link-to 'index' id='self-link'}}Link: {{view.name}}-{{#if view.isTrue}}{{view.name}}{{/if}}{{/link-to}}");
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
equal(Ember.$('#index', '#qunit-fixture').text(), 'test-Link: test-test', "accesses correct view");
});
test("Quoteless route param performs property lookup", function() {
Ember.TEMPLATES.index = compile("{{#link-to 'index' id='string-link'}}string{{/link-to}}{{#link-to foo id='path-link'}}path{{/link-to}}{{#link-to view.foo id='view-link'}}{{view.foo}}{{/link-to}}");
function assertEquality(href) {
equal(normalizeUrl(Ember.$('#string-link', '#qunit-fixture').attr('href')), '/');
equal(normalizeUrl(Ember.$('#path-link', '#qunit-fixture').attr('href')), href);
equal(normalizeUrl(Ember.$('#view-link', '#qunit-fixture').attr('href')), href);
}
App.IndexView = Ember.View.extend({
foo: 'index',
elementId: 'index-view'
});
App.IndexController = Ember.Controller.extend({
foo: 'index'
});
App.Router.map(function() {
this.route('about');
});
bootApplication();
Ember.run(router, 'handleURL', '/');
assertEquality('/');
var controller = container.lookup('controller:index');
var view = Ember.View.views['index-view'];
Ember.run(function() {
controller.set('foo', 'about');
view.set('foo', 'about');
});
assertEquality('/about');
});
test("link-to with null/undefined dynamic parameters are put in a loading state", function() {
expect(19);
var oldWarn = Ember.Logger.warn, warnCalled = false;
Ember.Logger.warn = function() { warnCalled = true; };
Ember.TEMPLATES.index = compile("{{#link-to destinationRoute routeContext loadingClass='i-am-loading' id='context-link'}}string{{/link-to}}{{#link-to secondRoute loadingClass='i-am-loading' id='static-link'}}string{{/link-to}}");
var thing = Ember.Object.create({ id: 123 });
App.IndexController = Ember.Controller.extend({
destinationRoute: null,
routeContext: null
});
App.AboutRoute = Ember.Route.extend({
activate: function() {
ok(true, "About was entered");
}
});
App.Router.map(function() {
this.route('thing', { path: '/thing/:thing_id' });
this.route('about');
});
bootApplication();
Ember.run(router, 'handleURL', '/');
function assertLinkStatus($link, url) {
if (url) {
equal(normalizeUrl($link.attr('href')), url, "loaded link-to has expected href");
ok(!$link.hasClass('i-am-loading'), "loaded linkView has no loadingClass");
} else {
equal(normalizeUrl($link.attr('href')), '#', "unloaded link-to has href='#'");
ok($link.hasClass('i-am-loading'), "loading linkView has loadingClass");
}
}
var $contextLink = Ember.$('#context-link', '#qunit-fixture');
var $staticLink = Ember.$('#static-link', '#qunit-fixture');
var controller = container.lookup('controller:index');
assertLinkStatus($contextLink);
assertLinkStatus($staticLink);
Ember.run(function() {
warnCalled = false;
$contextLink.click();
ok(warnCalled, "Logger.warn was called from clicking loading link");
});
// Set the destinationRoute (context is still null).
Ember.run(controller, 'set', 'destinationRoute', 'thing');
assertLinkStatus($contextLink);
// Set the routeContext to an id
Ember.run(controller, 'set', 'routeContext', '456');
assertLinkStatus($contextLink, '/thing/456');
// Test that 0 isn't interpreted as falsy.
Ember.run(controller, 'set', 'routeContext', 0);
assertLinkStatus($contextLink, '/thing/0');
// Set the routeContext to an object
Ember.run(controller, 'set', 'routeContext', thing);
assertLinkStatus($contextLink, '/thing/123');
// Set the destinationRoute back to null.
Ember.run(controller, 'set', 'destinationRoute', null);
assertLinkStatus($contextLink);
Ember.run(function() {
warnCalled = false;
$staticLink.click();
ok(warnCalled, "Logger.warn was called from clicking loading link");
});
Ember.run(controller, 'set', 'secondRoute', 'about');
assertLinkStatus($staticLink, '/about');
// Click the now-active link
Ember.run($staticLink, 'click');
Ember.Logger.warn = oldWarn;
});
test("The {{link-to}} helper refreshes href element when one of params changes", function() {
Router.map(function() {
this.route('post', { path: '/posts/:post_id' });
});
var post = Ember.Object.create({id: '1'});
var secondPost = Ember.Object.create({id: '2'});
Ember.TEMPLATES.index = compile('{{#link-to "post" post id="post"}}post{{/link-to}}');
<|fim▁hole|>
Ember.run(function() { indexController.set('post', post); });
bootApplication();
Ember.run(function() { router.handleURL("/"); });
equal(normalizeUrl(Ember.$('#post', '#qunit-fixture').attr('href')), '/posts/1', 'precond - Link has rendered href attr properly');
Ember.run(function() { indexController.set('post', secondPost); });
equal(Ember.$('#post', '#qunit-fixture').attr('href'), '/posts/2', 'href attr was updated after one of the params had been changed');
Ember.run(function() { indexController.set('post', null); });
equal(Ember.$('#post', '#qunit-fixture').attr('href'), '#', 'href attr becomes # when one of the arguments in nullified');
});
test("The {{link-to}} helper's bound parameter functionality works as expected in conjunction with an ObjectProxy/Controller", function() {
Router.map(function() {
this.route('post', { path: '/posts/:post_id' });
});
var post = Ember.Object.create({id: '1'});
var secondPost = Ember.Object.create({id: '2'});
Ember.TEMPLATES = {
index: compile(' '),
post: compile('{{#link-to "post" this id="self-link"}}selflink{{/link-to}}')
};
App.PostController = Ember.ObjectController.extend();
var postController = container.lookup('controller:post');
bootApplication();
Ember.run(router, 'transitionTo', 'post', post);
var $link = Ember.$('#self-link', '#qunit-fixture');
equal(normalizeUrl($link.attr('href')), '/posts/1', 'self link renders post 1');
Ember.run(postController, 'set', 'model', secondPost);
equal(normalizeUrl($link.attr('href')), '/posts/2', 'self link updated to post 2');
});
test("{{linkTo}} is aliased", function() {
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{#linkTo 'about' id='about-link' replace=true}}About{{/linkTo}}");
Router.map(function() {
this.route("about");
});
expectDeprecation(function() {
bootApplication();
}, "The 'linkTo' view helper is deprecated in favor of 'link-to'");
Ember.run(function() {
router.handleURL("/");
});
Ember.run(function() {
Ember.$('#about-link', '#qunit-fixture').click();
});
equal(container.lookup('controller:application').get('currentRouteName'), 'about', 'linkTo worked properly');
});
test("The {{link-to}} helper is active when a resource is active", function() {
Router.map(function() {
this.resource("about", function() {
this.route("item");
});
});
Ember.TEMPLATES.about = compile("<div id='about'>{{#link-to 'about' id='about-link'}}About{{/link-to}} {{#link-to 'about.item' id='item-link'}}Item{{/link-to}} {{outlet}}</div>");
Ember.TEMPLATES['about/item'] = compile(" ");
Ember.TEMPLATES['about/index'] = compile(" ");
bootApplication();
Ember.run(router, 'handleURL', '/about');
equal(Ember.$('#about-link.active', '#qunit-fixture').length, 1, "The about resource link is active");
equal(Ember.$('#item-link.active', '#qunit-fixture').length, 0, "The item route link is inactive");
Ember.run(router, 'handleURL', '/about/item');
equal(Ember.$('#about-link.active', '#qunit-fixture').length, 1, "The about resource link is active");
equal(Ember.$('#item-link.active', '#qunit-fixture').length, 1, "The item route link is active");
});
test("The {{link-to}} helper works in an #each'd array of string route names", function() {
Router.map(function() {
this.route('foo');
this.route('bar');
this.route('rar');
});
App.IndexController = Ember.Controller.extend({
routeNames: Ember.A(['foo', 'bar', 'rar']),
route1: 'bar',
route2: 'foo'
});
Ember.TEMPLATES = {
index: compile('{{#each routeName in routeNames}}{{#link-to routeName}}{{routeName}}{{/link-to}}{{/each}}{{#each routeNames}}{{#link-to this}}{{this}}{{/link-to}}{{/each}}{{#link-to route1}}a{{/link-to}}{{#link-to route2}}b{{/link-to}}')
};
expectDeprecation(function() {
bootApplication();
}, 'Using the context switching form of {{each}} is deprecated. Please use the keyword form (`{{#each foo in bar}}`) instead. See http://emberjs.com/guides/deprecations/#toc_more-consistent-handlebars-scope for more details.');
function linksEqual($links, expected) {
equal($links.length, expected.length, "Has correct number of links");
var idx;
for (idx = 0; idx < $links.length; idx++) {
var href = Ember.$($links[idx]).attr('href');
// Old IE includes the whole hostname as well
equal(href.slice(-expected[idx].length), expected[idx], "Expected link to be '"+expected[idx]+"', but was '"+href+"'");
}
}
linksEqual(Ember.$('a', '#qunit-fixture'), ["/foo", "/bar", "/rar", "/foo", "/bar", "/rar", "/bar", "/foo"]);
var indexController = container.lookup('controller:index');
Ember.run(indexController, 'set', 'route1', 'rar');
linksEqual(Ember.$('a', '#qunit-fixture'), ["/foo", "/bar", "/rar", "/foo", "/bar", "/rar", "/rar", "/foo"]);
Ember.run(indexController.routeNames, 'shiftObject');
linksEqual(Ember.$('a', '#qunit-fixture'), ["/bar", "/rar", "/bar", "/rar", "/rar", "/foo"]);
});
test("The non-block form {{link-to}} helper moves into the named route", function() {
expect(3);
Router.map(function(match) {
this.route("contact");
});
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{link-to 'Contact us' 'contact' id='contact-link'}}{{#link-to 'index' id='self-link'}}Self{{/link-to}}");
Ember.TEMPLATES.contact = compile("<h3>Contact</h3>{{link-to 'Home' 'index' id='home-link'}}{{link-to 'Self' 'contact' id='self-link'}}");
bootApplication();
Ember.run(function() {
Ember.$('#contact-link', '#qunit-fixture').click();
});
equal(Ember.$('h3:contains(Contact)', '#qunit-fixture').length, 1, "The contact template was rendered");
equal(Ember.$('#self-link.active', '#qunit-fixture').length, 1, "The self-link was rendered with active class");
equal(Ember.$('#home-link:not(.active)', '#qunit-fixture').length, 1, "The other link was rendered without active class");
});
test("The non-block form {{link-to}} helper updates the link text when it is a binding", function() {
expect(8);
Router.map(function(match) {
this.route("contact");
});
App.IndexController = Ember.Controller.extend({
contactName: 'Jane'
});
Ember.TEMPLATES.index = compile("<h3>Home</h3>{{link-to contactName 'contact' id='contact-link'}}{{#link-to 'index' id='self-link'}}Self{{/link-to}}");
Ember.TEMPLATES.contact = compile("<h3>Contact</h3>{{link-to 'Home' 'index' id='home-link'}}{{link-to 'Self' 'contact' id='self-link'}}");
bootApplication();
Ember.run(function() {
router.handleURL("/");
});
var controller = container.lookup('controller:index');
equal(Ember.$('#contact-link:contains(Jane)', '#qunit-fixture').length, 1, "The link title is correctly resolved");
Ember.run(function() {
controller.set('contactName', 'Joe');
});
equal(Ember.$('#contact-link:contains(Joe)', '#qunit-fixture').length, 1, "The link title is correctly updated when the bound property changes");
Ember.run(function() {
controller.set('contactName', 'Robert');
});
equal(Ember.$('#contact-link:contains(Robert)', '#qunit-fixture').length, 1, "The link title is correctly updated when the bound property changes a second time");
Ember.run(function() {
Ember.$('#contact-link', '#qunit-fixture').click();
});
equal(Ember.$('h3:contains(Contact)', '#qunit-fixture').length, 1, "The contact template was rendered");
equal(Ember.$('#self-link.active', '#qunit-fixture').length, 1, "The self-link was rendered with active class");
equal(Ember.$('#home-link:not(.active)', '#qunit-fixture').length, 1, "The other link was rendered without active class");
Ember.run(function() {
Ember.$('#home-link', '#qunit-fixture').click();
});
equal(Ember.$('h3:contains(Home)', '#qunit-fixture').length, 1, "The index template was rendered");
equal(Ember.$('#contact-link:contains(Robert)', '#qunit-fixture').length, 1, "The link title is correctly updated when the route changes");
});
test("The non-block form {{link-to}} helper moves into the named route with context", function() {
expect(5);
Router.map(function(match) {
this.route("item", { path: "/item/:id" });
});
App.IndexRoute = Ember.Route.extend({
model: function() {
return Ember.A([
{ id: "yehuda", name: "Yehuda Katz" },
{ id: "tom", name: "Tom Dale" },
{ id: "erik", name: "Erik Brynroflsson" }
]);
}
});
App.ItemRoute = Ember.Route.extend({
serialize: function(object) {
return { id: object.id };
}
});
Ember.TEMPLATES.index = compile("<h3>Home</h3><ul>{{#each person in controller}}<li>{{link-to person.name 'item' person}}</li>{{/each}}</ul>");
Ember.TEMPLATES.item = compile("<h3>Item</h3><p>{{name}}</p>{{#link-to 'index' id='home-link'}}Home{{/link-to}}");
bootApplication();
Ember.run(function() {
Ember.$('li a:contains(Yehuda)', '#qunit-fixture').click();
});
equal(Ember.$('h3:contains(Item)', '#qunit-fixture').length, 1, "The item template was rendered");
equal(Ember.$('p', '#qunit-fixture').text(), "Yehuda Katz", "The name is correct");
Ember.run(function() { Ember.$('#home-link').click(); });
equal(normalizeUrl(Ember.$('li a:contains(Yehuda)').attr('href')), "/item/yehuda");
equal(normalizeUrl(Ember.$('li a:contains(Tom)').attr('href')), "/item/tom");
equal(normalizeUrl(Ember.$('li a:contains(Erik)').attr('href')), "/item/erik");
});
test("The non-block form {{link-to}} performs property lookup", function() {
Ember.TEMPLATES.index = compile("{{link-to 'string' 'index' id='string-link'}}{{link-to path foo id='path-link'}}{{link-to view.foo view.foo id='view-link'}}");
function assertEquality(href) {
equal(normalizeUrl(Ember.$('#string-link', '#qunit-fixture').attr('href')), '/');
equal(normalizeUrl(Ember.$('#path-link', '#qunit-fixture').attr('href')), href);
equal(normalizeUrl(Ember.$('#view-link', '#qunit-fixture').attr('href')), href);
}
App.IndexView = Ember.View.extend({
foo: 'index',
elementId: 'index-view'
});
App.IndexController = Ember.Controller.extend({
foo: 'index'
});
App.Router.map(function() {
this.route('about');
});
bootApplication();
Ember.run(router, 'handleURL', '/');
assertEquality('/');
var controller = container.lookup('controller:index');
var view = Ember.View.views['index-view'];
Ember.run(function() {
controller.set('foo', 'about');
view.set('foo', 'about');
});
assertEquality('/about');
});
test("The non-block form {{link-to}} protects against XSS", function() {
Ember.TEMPLATES.application = compile("{{link-to display 'index' id='link'}}");
App.ApplicationController = Ember.Controller.extend({
display: 'blahzorz'
});
bootApplication();
Ember.run(router, 'handleURL', '/');
var controller = container.lookup('controller:application');
equal(Ember.$('#link', '#qunit-fixture').text(), 'blahzorz');
Ember.run(function() {
controller.set('display', '<b>BLAMMO</b>');
});
equal(Ember.$('#link', '#qunit-fixture').text(), '<b>BLAMMO</b>');
equal(Ember.$('b', '#qunit-fixture').length, 0);
});
test("the {{link-to}} helper calls preventDefault", function(){
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(router, 'handleURL', '/');
var event = Ember.$.Event("click");
Ember.$('#about-link', '#qunit-fixture').trigger(event);
equal(event.isDefaultPrevented(), true, "should preventDefault");
});
test("the {{link-to}} helper does not call preventDefault if `preventDefault=false` is passed as an option", function(){
Ember.TEMPLATES.index = compile("{{#link-to 'about' id='about-link' preventDefault=false}}About{{/link-to}}");
Router.map(function() {
this.route("about");
});
bootApplication();
Ember.run(router, 'handleURL', '/');
var event = Ember.$.Event("click");
Ember.$('#about-link', '#qunit-fixture').trigger(event);
equal(event.isDefaultPrevented(), false, "should not preventDefault");
});
test("the {{link-to}} helper does not throw an error if its route has exited", function(){
expect(0);
Ember.TEMPLATES.application = compile("{{#link-to 'index' id='home-link'}}Home{{/link-to}}{{#link-to 'post' defaultPost id='default-post-link'}}Default Post{{/link-to}}{{#if currentPost}}{{#link-to 'post' id='post-link'}}Post{{/link-to}}{{/if}}");
App.ApplicationController = Ember.Controller.extend({
needs: ['post'],
currentPost: Ember.computed.alias('controllers.post.model')
});
App.PostController = Ember.Controller.extend({
model: {id: 1}
});
Router.map(function() {
this.route("post", {path: 'post/:post_id'});
});
bootApplication();
Ember.run(router, 'handleURL', '/');
Ember.run(function() {
Ember.$('#default-post-link', '#qunit-fixture').click();
});
Ember.run(function() {
Ember.$('#home-link', '#qunit-fixture').click();
});
});
test("{{link-to}} active property respects changing parent route context", function() {
Ember.TEMPLATES.application = compile(
"{{link-to 'OMG' 'things' 'omg' id='omg-link'}} " +
"{{link-to 'LOL' 'things' 'lol' id='lol-link'}} ");
Router.map(function() {
this.resource('things', { path: '/things/:name' }, function() {
this.route('other');
});
});
bootApplication();
Ember.run(router, 'handleURL', '/things/omg');
shouldBeActive('#omg-link');
shouldNotBeActive('#lol-link');
Ember.run(router, 'handleURL', '/things/omg/other');
shouldBeActive('#omg-link');
shouldNotBeActive('#lol-link');
});
test("{{link-to}} populates href with default query param values even without query-params object", function() {
App.IndexController = Ember.Controller.extend({
queryParams: ['foo'],
foo: '123'
});
Ember.TEMPLATES.index = compile("{{#link-to 'index' id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), "/", "link has right href");
});
test("{{link-to}} populates href with default query param values with empty query-params object", function() {
App.IndexController = Ember.Controller.extend({
queryParams: ['foo'],
foo: '123'
});
Ember.TEMPLATES.index = compile("{{#link-to 'index' (query-params) id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), "/", "link has right href");
});
test("{{link-to}} populates href with supplied query param values", function() {
App.IndexController = Ember.Controller.extend({
queryParams: ['foo'],
foo: '123'
});
Ember.TEMPLATES.index = compile("{{#link-to 'index' (query-params foo='456') id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), "/?foo=456", "link has right href");
});
test("{{link-to}} populates href with partially supplied query param values", function() {
App.IndexController = Ember.Controller.extend({
queryParams: ['foo', 'bar'],
foo: '123',
bar: 'yes'
});
Ember.TEMPLATES.index = compile("{{#link-to 'index' (query-params foo='456') id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), "/?foo=456", "link has right href");
});
test("{{link-to}} populates href with partially supplied query param values, but omits if value is default value", function() {
App.IndexController = Ember.Controller.extend({
queryParams: ['foo', 'bar'],
foo: '123',
bar: 'yes'
});
Ember.TEMPLATES.index = compile("{{#link-to 'index' (query-params foo='123') id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), "/", "link has right href");
});
test("{{link-to}} populates href with fully supplied query param values", function() {
App.IndexController = Ember.Controller.extend({
queryParams: ['foo', 'bar'],
foo: '123',
bar: 'yes'
});
Ember.TEMPLATES.index = compile("{{#link-to 'index' (query-params foo='456' bar='NAW') id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), "/?bar=NAW&foo=456", "link has right href");
});
QUnit.module("The {{link-to}} helper: invoking with query params", {
setup: function() {
Ember.run(function() {
sharedSetup();
App.IndexController = Ember.Controller.extend({
queryParams: ['foo', 'bar', 'abool'],
foo: '123',
bar: 'abc',
boundThing: "OMG",
abool: true
});
App.AboutController = Ember.Controller.extend({
queryParams: ['baz', 'bat'],
baz: 'alex',
bat: 'borf'
});
container.register('router:main', Router);
});
},
teardown: sharedTeardown
});
test("doesn't update controller QP properties on current route when invoked", function() {
Ember.TEMPLATES.index = compile("{{#link-to 'index' id='the-link'}}Index{{/link-to}}");
bootApplication();
Ember.run(Ember.$('#the-link'), 'click');
var indexController = container.lookup('controller:index');
deepEqual(indexController.getProperties('foo', 'bar'), { foo: '123', bar: 'abc' }, "controller QP properties not");
});
test("doesn't update controller QP properties on current route when invoked (empty query-params obj)", function() {
Ember.TEMPLATES.index = compile("{{#link-to 'index' (query-params) id='the-link'}}Index{{/link-to}}");
bootApplication();
Ember.run(Ember.$('#the-link'), 'click');
var indexController = container.lookup('controller:index');
deepEqual(indexController.getProperties('foo', 'bar'), { foo: '123', bar: 'abc' }, "controller QP properties not");
});
test("link-to with no params throws", function() {
Ember.TEMPLATES.index = compile("{{#link-to id='the-link'}}Index{{/link-to}}");
expectAssertion(function() {
bootApplication();
}, /one or more/);
});
test("doesn't update controller QP properties on current route when invoked (empty query-params obj, inferred route)", function() {
Ember.TEMPLATES.index = compile("{{#link-to (query-params) id='the-link'}}Index{{/link-to}}");
bootApplication();
Ember.run(Ember.$('#the-link'), 'click');
var indexController = container.lookup('controller:index');
deepEqual(indexController.getProperties('foo', 'bar'), { foo: '123', bar: 'abc' }, "controller QP properties not");
});
test("updates controller QP properties on current route when invoked", function() {
Ember.TEMPLATES.index = compile("{{#link-to 'index' (query-params foo='456') id='the-link'}}Index{{/link-to}}");
bootApplication();
Ember.run(Ember.$('#the-link'), 'click');
var indexController = container.lookup('controller:index');
deepEqual(indexController.getProperties('foo', 'bar'), { foo: '456', bar: 'abc' }, "controller QP properties updated");
});
test("updates controller QP properties on current route when invoked (inferred route)", function() {
Ember.TEMPLATES.index = compile("{{#link-to (query-params foo='456') id='the-link'}}Index{{/link-to}}");
bootApplication();
Ember.run(Ember.$('#the-link'), 'click');
var indexController = container.lookup('controller:index');
deepEqual(indexController.getProperties('foo', 'bar'), { foo: '456', bar: 'abc' }, "controller QP properties updated");
});
test("updates controller QP properties on other route after transitioning to that route", function() {
Router.map(function() {
this.route('about');
});
Ember.TEMPLATES.index = compile("{{#link-to 'about' (query-params baz='lol') id='the-link'}}About{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), '/about?baz=lol');
Ember.run(Ember.$('#the-link'), 'click');
var aboutController = container.lookup('controller:about');
deepEqual(aboutController.getProperties('baz', 'bat'), { baz: 'lol', bat: 'borf' }, "about controller QP properties updated");
equal(container.lookup('controller:application').get('currentPath'), "about");
});
test("supplied QP properties can be bound", function() {
var indexController = container.lookup('controller:index');
Ember.TEMPLATES.index = compile("{{#link-to (query-params foo=boundThing) id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), '/?foo=OMG');
Ember.run(indexController, 'set', 'boundThing', "ASL");
equal(Ember.$('#the-link').attr('href'), '/?foo=ASL');
});
test("supplied QP properties can be bound (booleans)", function() {
var indexController = container.lookup('controller:index');
Ember.TEMPLATES.index = compile("{{#link-to (query-params abool=boundThing) id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), '/?abool=OMG');
Ember.run(indexController, 'set', 'boundThing', false);
equal(Ember.$('#the-link').attr('href'), '/?abool=false');
Ember.run(Ember.$('#the-link'), 'click');
deepEqual(indexController.getProperties('foo', 'bar', 'abool'), { foo: '123', bar: 'abc', abool: false });
});
test("href updates when unsupplied controller QP props change", function() {
var indexController = container.lookup('controller:index');
Ember.TEMPLATES.index = compile("{{#link-to (query-params foo='lol') id='the-link'}}Index{{/link-to}}");
bootApplication();
equal(Ember.$('#the-link').attr('href'), '/?foo=lol');
Ember.run(indexController, 'set', 'bar', 'BORF');
equal(Ember.$('#the-link').attr('href'), '/?bar=BORF&foo=lol');
Ember.run(indexController, 'set', 'foo', 'YEAH');
equal(Ember.$('#the-link').attr('href'), '/?bar=BORF&foo=lol');
});
test("The {{link-to}} applies activeClass when query params are not changed", function() {
Ember.TEMPLATES.index = compile(
"{{#link-to (query-params foo='cat') id='cat-link'}}Index{{/link-to}} " +
"{{#link-to (query-params foo='dog') id='dog-link'}}Index{{/link-to}} " +
"{{#link-to 'index' id='change-nothing'}}Index{{/link-to}}"
);
Ember.TEMPLATES.search = compile(
"{{#link-to (query-params search='same') id='same-search'}}Index{{/link-to}} " +
"{{#link-to (query-params search='change') id='change-search'}}Index{{/link-to}} " +
"{{#link-to (query-params search='same' archive=true) id='same-search-add-archive'}}Index{{/link-to}} " +
"{{#link-to (query-params archive=true) id='only-add-archive'}}Index{{/link-to}} " +
"{{#link-to (query-params search='same' archive=true) id='both-same'}}Index{{/link-to}} " +
"{{#link-to (query-params search='different' archive=true) id='change-one'}}Index{{/link-to}} " +
"{{#link-to (query-params search='different' archive=false) id='remove-one'}}Index{{/link-to}} " +
"{{outlet}}"
);
Ember.TEMPLATES['search/results'] = compile(
"{{#link-to (query-params sort='title') id='same-sort-child-only'}}Index{{/link-to}} " +
"{{#link-to (query-params search='same') id='same-search-parent-only'}}Index{{/link-to}} " +
"{{#link-to (query-params search='change') id='change-search-parent-only'}}Index{{/link-to}} " +
"{{#link-to (query-params search='same' sort='title') id='same-search-same-sort-child-and-parent'}}Index{{/link-to}} " +
"{{#link-to (query-params search='same' sort='author') id='same-search-different-sort-child-and-parent'}}Index{{/link-to}} " +
"{{#link-to (query-params search='change' sort='title') id='change-search-same-sort-child-and-parent'}}Index{{/link-to}} " +
"{{#link-to (query-params foo='dog') id='dog-link'}}Index{{/link-to}} "
);
Router.map(function() {
this.resource("search", function() {
this.route("results");
});
});
App.SearchController = Ember.Controller.extend({
queryParams: ['search', 'archive'],
search: '',
archive: false
});
App.SearchResultsController = Ember.Controller.extend({
queryParams: ['sort', 'showDetails'],
sort: 'title',
showDetails: true
});
bootApplication();
//Basic tests
shouldNotBeActive('#cat-link');
shouldNotBeActive('#dog-link');
Ember.run(router, 'handleURL', '/?foo=cat');
shouldBeActive('#cat-link');
shouldNotBeActive('#dog-link');
Ember.run(router, 'handleURL', '/?foo=dog');
shouldBeActive('#dog-link');
shouldNotBeActive('#cat-link');
shouldBeActive('#change-nothing');
//Multiple params
Ember.run(function() {
router.handleURL("/search?search=same");
});
shouldBeActive('#same-search');
shouldNotBeActive('#change-search');
shouldNotBeActive('#same-search-add-archive');
shouldNotBeActive('#only-add-archive');
shouldNotBeActive('#remove-one');
Ember.run(function() {
router.handleURL("/search?search=same&archive=true");
});
shouldBeActive('#both-same');
shouldNotBeActive('#change-one');
//Nested Controllers
Ember.run(function() {
// Note: this is kind of a strange case; sort's default value is 'title',
// so this URL shouldn't have been generated in the first place, but
// we should also be able to gracefully handle these cases.
router.handleURL("/search/results?search=same&sort=title&showDetails=true");
});
//shouldBeActive('#same-sort-child-only');
shouldBeActive('#same-search-parent-only');
shouldNotBeActive('#change-search-parent-only');
shouldBeActive('#same-search-same-sort-child-and-parent');
shouldNotBeActive('#same-search-different-sort-child-and-parent');
shouldNotBeActive('#change-search-same-sort-child-and-parent');
});
test("The {{link-to}} applies active class when query-param is number", function() {
Ember.TEMPLATES.index = compile(
"{{#link-to (query-params page=pageNumber) id='page-link'}}Index{{/link-to}} ");
App.IndexController = Ember.Controller.extend({
queryParams: ['page'],
page: 1,
pageNumber: 5
});
bootApplication();
shouldNotBeActive('#page-link');
Ember.run(router, 'handleURL', '/?page=5');
shouldBeActive('#page-link');
});
test("The {{link-to}} applies active class when query-param is array", function() {
Ember.TEMPLATES.index = compile(
"{{#link-to (query-params pages=pagesArray) id='array-link'}}Index{{/link-to}} " +
"{{#link-to (query-params pages=biggerArray) id='bigger-link'}}Index{{/link-to}} " +
"{{#link-to (query-params pages=emptyArray) id='empty-link'}}Index{{/link-to}} "
);
App.IndexController = Ember.Controller.extend({
queryParams: ['pages'],
pages: [],
pagesArray: [1,2],
biggerArray: [1,2,3],
emptyArray: []
});
bootApplication();
shouldNotBeActive('#array-link');
Ember.run(router, 'handleURL', '/?pages=%5B1%2C2%5D');
shouldBeActive('#array-link');
shouldNotBeActive('#bigger-link');
shouldNotBeActive('#empty-link');
Ember.run(router, 'handleURL', '/?pages=%5B2%2C1%5D');
shouldNotBeActive('#array-link');
shouldNotBeActive('#bigger-link');
shouldNotBeActive('#empty-link');
Ember.run(router, 'handleURL', '/?pages=%5B1%2C2%2C3%5D');
shouldBeActive('#bigger-link');
shouldNotBeActive('#array-link');
shouldNotBeActive('#empty-link');
});
test("The {{link-to}} helper applies active class to parent route", function() {
App.Router.map(function() {
this.resource('parent', function() {
this.route('child');
});
});
Ember.TEMPLATES.application = compile(
"{{#link-to 'parent' id='parent-link'}}Parent{{/link-to}} " +
"{{#link-to 'parent.child' id='parent-child-link'}}Child{{/link-to}} " +
"{{#link-to 'parent' (query-params foo=cat) id='parent-link-qp'}}Parent{{/link-to}} " +
"{{outlet}}"
);
App.ParentChildController = Ember.ObjectController.extend({
queryParams: ['foo'],
foo: 'bar'
});
bootApplication();
shouldNotBeActive('#parent-link');
shouldNotBeActive('#parent-child-link');
shouldNotBeActive('#parent-link-qp');
Ember.run(router, 'handleURL', '/parent/child?foo=dog');
shouldBeActive('#parent-link');
shouldNotBeActive('#parent-link-qp');
});
test("The {{link-to}} helper disregards query-params in activeness computation when current-when specified", function() {
App.Router.map(function() {
this.route('parent');
});
Ember.TEMPLATES.application = compile(
"{{#link-to 'parent' (query-params page=1) current-when='parent' id='app-link'}}Parent{{/link-to}} {{outlet}}");
Ember.TEMPLATES.parent = compile(
"{{#link-to 'parent' (query-params page=1) current-when='parent' id='parent-link'}}Parent{{/link-to}} {{outlet}}");
App.ParentController = Ember.ObjectController.extend({
queryParams: ['page'],
page: 1
});
bootApplication();
equal(Ember.$('#app-link').attr('href'), '/parent');
shouldNotBeActive('#app-link');
Ember.run(router, 'handleURL', '/parent?page=2');
equal(Ember.$('#app-link').attr('href'), '/parent');
shouldBeActive('#app-link');
equal(Ember.$('#parent-link').attr('href'), '/parent');
shouldBeActive('#parent-link');
var parentController = container.lookup('controller:parent');
equal(parentController.get('page'), 2);
Ember.run(parentController, 'set', 'page', 3);
equal(router.get('location.path'), '/parent?page=3');
shouldBeActive('#app-link');
shouldBeActive('#parent-link');
Ember.$('#app-link').click();
equal(router.get('location.path'), '/parent');
});
function basicEagerURLUpdateTest(setTagName) {
expect(6);
if (setTagName) {
Ember.TEMPLATES.application = compile("{{outlet}}{{link-to 'Index' 'index' id='index-link'}}{{link-to 'About' 'about' id='about-link' tagName='span'}}");
}
bootApplication();
equal(updateCount, 0);
Ember.run(Ember.$('#about-link'), 'click');
// URL should be eagerly updated now
equal(updateCount, 1);
equal(router.get('location.path'), '/about');
// Resolve the promise.
Ember.run(aboutDefer, 'resolve');
equal(router.get('location.path'), '/about');
// Shouldn't have called update url again.
equal(updateCount, 1);
equal(router.get('location.path'), '/about');
}
var aboutDefer;
QUnit.module("The {{link-to}} helper: eager URL updating", {
setup: function() {
Ember.run(function() {
sharedSetup();
container.register('router:main', Router);
Router.map(function() {
this.route('about');
});
App.AboutRoute = Ember.Route.extend({
model: function() {
aboutDefer = Ember.RSVP.defer();
return aboutDefer.promise;
}
});
Ember.TEMPLATES.application = compile("{{outlet}}{{link-to 'Index' 'index' id='index-link'}}{{link-to 'About' 'about' id='about-link'}}");
});
},
teardown: function() {
sharedTeardown();
aboutDefer = null;
}
});
test("invoking a link-to with a slow promise eager updates url", function() {
basicEagerURLUpdateTest(false);
});
test("when link-to eagerly updates url, the path it provides does NOT include the rootURL", function() {
expect(2);
// HistoryLocation is the only Location class that will cause rootURL to be
// prepended to link-to href's right now
var HistoryTestLocation = Ember.HistoryLocation.extend({
location: {
hash: '',
hostname: 'emberjs.com',
href: 'http://emberjs.com/app/',
pathname: '/app/',
protocol: 'http:',
port: '',
search: ''
},
// Don't actually touch the URL
replaceState: function(path) {},
pushState: function(path) {},
setURL: function(path) {
set(this, 'path', path);
},
replaceURL: function(path) {
set(this, 'path', path);
}
});
container.register('location:historyTest', HistoryTestLocation);
Router.reopen({
location: 'historyTest',
rootURL: '/app/'
});
bootApplication();
// href should have rootURL prepended
equal(Ember.$('#about-link').attr('href'), '/app/about');
Ember.run(Ember.$('#about-link'), 'click');
// Actual path provided to Location class should NOT have rootURL
equal(router.get('location.path'), '/about');
});
test("non `a` tags also eagerly update URL", function() {
basicEagerURLUpdateTest(true);
});
test("invoking a link-to with a promise that rejects on the run loop doesn't update url", function() {
App.AboutRoute = Ember.Route.extend({
model: function() {
return Ember.RSVP.reject();
}
});
bootApplication();
Ember.run(Ember.$('#about-link'), 'click');
// Shouldn't have called update url.
equal(updateCount, 0);
equal(router.get('location.path'), '', 'url was not updated');
});
test("invoking a link-to whose transition gets aborted in will transition doesn't update the url", function() {
App.IndexRoute = Ember.Route.extend({
actions: {
willTransition: function(transition) {
ok(true, "aborting transition");
transition.abort();
}
}
});
bootApplication();
Ember.run(Ember.$('#about-link'), 'click');
// Shouldn't have called update url.
equal(updateCount, 0);
equal(router.get('location.path'), '', 'url was not updated');
});<|fim▁end|>
|
App.IndexController = Ember.Controller.extend();
var indexController = container.lookup('controller:index');
|
<|file_name|>ZouHeNode.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <stdexcept>
#include <vector>
#include "latticeBase.hpp"
#include "collisionBase.hxx"
#include "latticeNode.hxx"
#include "latticeModel.hxx"
#include "ZouHeNode.hpp"
#include "latticeNode.hxx"
ZouHeNode::ZouHeNode
(
latticeBase &lb,
collisionBase &cb,
latticeModelD2Q9 &D2Q9,
fluidField &field
)
: boundaryNode(false, false, lb),
nodes {},
cb_ (cb),
is_normal_flow_ {false},
beta1_ {},
beta2_ {},
beta3_ {},
D2Q9_ (D2Q9),
field_ (field)
{
const auto c = lb_.getLatticeSpeed();
const auto cs_sqr = c * c / 3.0;
beta1_ = c / (9.0 *cs_sqr);
beta2_ = 0.5 / c;
beta3_ = beta2_ - beta1_;
}
void ZouHeNode::addNode
(
std::size_t x,
std::size_t y,
double u_x,
double u_y
)
{
const auto nx = lb_.getNumberOfNx();
const auto ny = lb_.getNumberOfNy();
const auto n = y * nx + x;
const auto left = x == 0;
const auto right = x == nx - 1;
const auto bottom = y == 0;
const auto top = y == ny - 1;
auto edge_i = -1;
if (right) edge_i = 0;
if (top) edge_i = 1;
if (left) edge_i = 2;
if (bottom) edge_i = 3;
// adds a corner node
if ((top || bottom) && (left || right))
{
auto corner_i = -1;
if (bottom && left) corner_i = 0;
if (bottom && right) corner_i = 1;
if (top && left) corner_i = 2;
if (top && right) corner_i = 3;
nodes.push_back(latticeNode(x, y, n, u_x, u_y, true, corner_i));
}
// adds a side node
else
{
nodes.push_back(latticeNode(x, y, n, u_x, u_y, false, edge_i));
}
}
void ZouHeNode::updateNode
(
std::vector<std::vector<double>> &df,
bool is_modify_stream
)
{
if (!is_modify_stream)
{
for (auto node : nodes)
{
if (node.corner)
{
ZouHeNode::updateCorner(df, node);
}
else
{
ZouHeNode::updateEdge(df, node);
}
} // n
}
}
void ZouHeNode::updateEdge
(
std::vector<std::vector<double>> &df,
latticeNode &node
)
{
const auto n = node.n_node;
const auto nx = lb_.getNumberOfNx();
const auto c = lb_.getLatticeSpeed();
switch(node.index_i)
{
case 0:
{ // right
auto vel = is_normal_flow_ ? field_.u[n - 1] : node.u_node;
const auto rho_node = (df[n][0] + df[n][D2Q9_.N] + df[n][D2Q9_.S] + 2.0 * (df[n][D2Q9_.E] +
df[n][D2Q9_.NE] + df[n][D2Q9_.SE])) / (1.0 + vel[0] / c);
const auto df_diff = 0.5 * (df[n][D2Q9_.S] - df[n][D2Q9_.N]);
for (auto &u : vel) u *= rho_node;
df[n][D2Q9_.W] = df[n][D2Q9_.E] - 2.0 * beta1_ * vel[0];
df[n][D2Q9_.NW] = df[n][D2Q9_.SE] + df_diff - beta3_ * vel[0] + beta2_ * vel[1];
df[n][D2Q9_.SW] = df[n][D2Q9_.NE] - df_diff - beta3_ * vel[0] - beta2_ * vel[1];
break;
}
case 1:
{ // top
auto vel = is_normal_flow_ ? field_.u[n - nx] : node.u_node;
const auto rho_node = (df[n][0] + df[n][D2Q9_.E] + df[n][D2Q9_.W] + 2.0 * (df[n][D2Q9_.N] +
df[n][D2Q9_.NE] + df[n][D2Q9_.NW])) / (1.0 + vel[1] / c);
const auto df_diff = 0.5 * (df[n][D2Q9_.E] - df[n][D2Q9_.W]);
for (auto &u : vel) u *= rho_node;
df[n][D2Q9_.S] = df[n][D2Q9_.N] - 2.0 * beta1_ * vel[1];
df[n][D2Q9_.SW] = df[n][D2Q9_.NE] + df_diff - beta2_ * vel[0] - beta3_ * vel[1];
df[n][D2Q9_.SE] = df[n][D2Q9_.NW] - df_diff + beta2_ * vel[0] - beta3_ * vel[1];
break;
}
case 2:
{ // left
auto vel = is_normal_flow_ ? field_.u[n + 1] : node.u_node;
const auto rho_node = (df[n][0] + df[n][D2Q9_.N] + df[n][D2Q9_.S] + 2.0 * (df[n][D2Q9_.W] +
df[n][D2Q9_.NW] + df[n][D2Q9_.SW])) / (1.0 - vel[0] / c);
const auto df_diff = 0.5 * (df[n][D2Q9_.S] - df[n][D2Q9_.N]);
for (auto &u : vel) u *= rho_node;
df[n][D2Q9_.E] = df[n][D2Q9_.W] + 2.0 * beta1_ * vel[0];
df[n][D2Q9_.NE] = df[n][D2Q9_.SW] + df_diff + beta3_ * vel[0] + beta2_ * vel[1];
df[n][D2Q9_.SE] = df[n][D2Q9_.NW] - df_diff + beta3_ * vel[0] - beta2_ * vel[1];
break;
}
case 3:
{ // bottom
auto vel = is_normal_flow_ ? field_.u[n + nx] : node.u_node;
const auto rho_node = (df[n][0] + df[n][D2Q9_.E] + df[n][D2Q9_.W] + 2.0 * (df[n][D2Q9_.S] +
df[n][D2Q9_.SW] + df[n][D2Q9_.SE])) / (1.0 - vel[1] / c);
const auto df_diff = 0.5 * (df[n][D2Q9_.W] - df[n][D2Q9_.E]);
for (auto &u : vel) u *= rho_node;
df[n][D2Q9_.N] = df[n][D2Q9_.S] + 2.0 * beta1_ * vel[1];
df[n][D2Q9_.NE] = df[n][D2Q9_.SW] + df_diff + beta2_ * vel[0] + beta3_ * vel[1];
df[n][D2Q9_.NW] = df[n][D2Q9_.SE] - df_diff - beta2_ * vel[0] + beta3_ * vel[1];
break;
}
default:
{
throw std::runtime_error("Not a side");
}
}
}
void ZouHeNode::updateCorner
(
std::vector<std::vector<double>> &df,
latticeNode &node
)
{
const auto n = node.n_node;
auto vel = node.u_node;
const auto nx = lb_.getNumberOfNx();
const auto nc = lb_.getNumberOfDirections();
switch (node.index_i)
{
case 0:
{ // bottom-left
auto rho_node = 0.5 * (cb_.rho_[n + nx] + cb_.rho_[n + 1]);
for (auto &u : vel) u *= rho_node;
df[n][D2Q9_.E] = df[n][D2Q9_.W] + 2.0 * beta1_ * vel[0];
df[n][D2Q9_.N] = df[n][D2Q9_.S] + 2.0 * beta1_ * vel[1];
df[n][D2Q9_.NE] = df[n][D2Q9_.SW] + 0.5 * beta1_ * vel[0] + 0.5 * beta1_ * vel[1];
df[n][D2Q9_.NW] = -0.5 * beta3_ * vel[0] + 0.5 * beta3_ * vel[1];
df[n][D2Q9_.SE] = 0.5 * beta3_ * vel[0] - 0.5 * beta3_ * vel[1];
for (auto i = 1u; i < nc; ++i) rho_node -= df[n][i];
df[n][0] = rho_node;
break;
}
case 1:
{ // bottom-right
auto rho_node = 0.5 * (cb_.rho_[n + nx] + cb_.rho_[n - 1]);
for (auto &u : vel) u *= rho_node;
df[n][D2Q9_.W] = df[n][D2Q9_.E] - 2.0 * beta1_ * vel[0];
df[n][D2Q9_.N] = df[n][D2Q9_.S] + 2.0 * beta1_ * vel[1];
df[n][D2Q9_.NW] = df[n][D2Q9_.SE] - 0.5 * beta1_ * vel[0] + 0.5 * beta1_ * vel[1];
df[n][D2Q9_.NE] = 0.5 * beta3_ * vel[0] + 0.5 * beta3_ * vel[1];
df[n][D2Q9_.SW] = -0.5 * beta3_ * vel[0] - 0.5 * beta3_ * vel[1];
for (auto i = 1u; i < nc; ++i) rho_node -= df[n][i];
df[n][0] = rho_node;
break;<|fim▁hole|> { // top-left
auto rho_node = 0.5 * (cb_.rho_[n - nx] + cb_.rho_[n + 1]);
for (auto &u : vel) u *= rho_node;
df[n][D2Q9_.E] = df[n][D2Q9_.W] + 2.0 * beta1_ * vel[0];
df[n][D2Q9_.S] = df[n][D2Q9_.N] - 2.0 * beta1_ * vel[1];
df[n][D2Q9_.SE] = df[n][D2Q9_.NW] + 0.5 * beta1_ * vel[0] - 0.5 * beta1_ * vel[1];
df[n][D2Q9_.NE] = 0.5 * beta3_ * vel[0] + 0.5 * beta3_ * vel[1];
df[n][D2Q9_.SW] = -0.5 * beta3_ * vel[0] - 0.5 * beta3_ * vel[1];
for (auto i = 1u; i < nc; ++i) rho_node -= df[n][i];
df[n][0] = rho_node;
break;
}
case 3:
{ // top-right
auto rho_node = 0.5 * (cb_.rho_[n - nx] + cb_.rho_[n - 1]);
for (auto &u : vel) u *= rho_node;
df[n][D2Q9_.W] = df[n][D2Q9_.E] - 2.0 * beta1_ * vel[0];
df[n][D2Q9_.S] = df[n][D2Q9_.N] - 2.0 * beta1_ * vel[1];
df[n][D2Q9_.SW] = df[n][D2Q9_.NE] - 0.5 * beta1_ * vel[0] - 0.5 * beta1_ * vel[1];
df[n][D2Q9_.NW] = -0.5 * beta3_ * vel[0] + 0.5 * beta3_ * vel[1];
df[n][D2Q9_.SE] = 0.5 * beta3_ * vel[0] - 0.5 * beta3_ * vel[1];
for (auto i = 1u; i < nc; ++i) rho_node -= df[n][i];
df[n][0] = rho_node;
break;
}
default:
{
throw std::runtime_error("Not a corner");
}
}
}
void ZouHeNode::toggleNormalFlow()
{
is_normal_flow_ = true;
}<|fim▁end|>
|
}
case 2:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.