prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>PeopleYouMayKnow.java<|end_file_name|><|fim▁begin|>package com.linkedin.automation;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.openqa.selenium.By;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
public class PeopleYouMayKnow {
public void scrollThePage(WebDriver webDriver) {
int sleepTime = 100;
JavascriptExecutor js = (JavascriptExecutor) webDriver;
js.executeScript("window.scrollTo(0, (document.body.scrollHeight)/2)");
sleep(sleepTime);
js.executeScript("window.scrollTo(0, (document.body.scrollHeight)/6)");
sleep(sleepTime);
}
public void sleep(int time) {
try {
Thread.sleep(time);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws InterruptedException {
PeopleYouMayKnow obj = new PeopleYouMayKnow();
WebDriver driver;
System.setProperty("webdriver.chrome.driver", "/temp/chromedriver_win32/chromedriver.exe");
driver = new ChromeDriver();
driver.get("https://www.linkedin.com");
driver.manage().window().maximize();
WebElement account = driver.findElements(By.xpath(".//input[@id='login-email']")).get(0);
account.sendKeys("17091275816");
driver.manage().timeouts().implicitlyWait(1, TimeUnit.SECONDS);
WebElement pass = driver.findElement(By.xpath(".//input[@id='login-password']"));
pass.sendKeys("hiro12345");
driver.manage().timeouts().implicitlyWait(2, TimeUnit.SECONDS);
WebElement button = driver.findElement(By.xpath(".//input[@id='login-submit']"));
button.click();
driver.manage().timeouts().implicitlyWait(2, TimeUnit.SECONDS);
for (int i = 0; i < 50; i++) {<|fim▁hole|> while (true) {
try {
driver.manage().timeouts().implicitlyWait(3, TimeUnit.SECONDS);
List<WebElement> elements = driver
.findElements(By.xpath(".//button[@class='button-secondary-small']/span[text()='加为好友']"));
if (!elements.isEmpty()) {
elements.get(0).click();
driver.manage().timeouts().implicitlyWait(2, TimeUnit.SECONDS);
Thread.sleep(10000);
count++;
} else {
break;
}
} catch (Exception e) {
break;
}
if (count % 6 == 0) {
obj.scrollThePage(driver);
}
}
}
}
}<|fim▁end|> | driver.get("http://www.linkedin.com/mynetwork/");
driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS);
int count = 0; |
<|file_name|>WorkflowReplacementComponent.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'
import { WorkflowReplacementModel, WorkflowReplacementItemEmbedded, CaseActivityEntity, WorkflowOperation, NewTasksEmbedded } from '../Signum.Entities.Workflow'
import { TypeContext } from '@framework/Lines'
import { ValueSearchControlLine } from '@framework/Search'
import { symbolNiceName } from '@framework/Reflection'
import { PreviewTask } from '../WorkflowClient'
import { is } from "@framework/Signum.Entities";
import { useForceUpdate } from '@framework/Hooks'
export default function WorkflowReplacementComponent(p: { ctx: TypeContext<WorkflowReplacementModel> }) {
var ctx = p.ctx;
var newTasks = ctx.value.newTasks.map(a => a.element);
return (
<div>
{ctx.value.replacements.length > 0 &&
<table className="table">
<thead>
<tr>
<td>{WorkflowReplacementModel.nicePropertyName(a => a.replacements[0].element.oldNode)}</td>
<td>{WorkflowReplacementModel.nicePropertyName(a => a.replacements[0].element.newNode)}</td>
</tr>
</thead>
<tbody>
{ctx.mlistItemCtxs(a => a.replacements).map(ectx =>
<tr>
<td>
<ValueSearchControlLine ctx={ectx}
labelText={ectx.value.oldNode.toStr}
findOptions={{
queryName: CaseActivityEntity,
filterOptions: [
<|fim▁hole|> { token: CaseActivityEntity.token(e => e.workflowActivity), value: ectx.value.oldNode },
{ token: CaseActivityEntity.token(e => e.doneDate), value: null }
]
}} />
</td>
<td>
<WorkflowReplacementItemCombo
ctx={ectx}
previewTasks={newTasks} />
</td>
</tr>)
}
</tbody>
</table>}
</div>
);
}
export function WorkflowReplacementItemCombo(p: { ctx: TypeContext<WorkflowReplacementItemEmbedded>, previewTasks: NewTasksEmbedded[] }) {
const forceUpdate = useForceUpdate();
function handleChange(e: React.FormEvent<any>) {
p.ctx.subCtx(a => a.newNode).value = (e.currentTarget as HTMLSelectElement).value;
forceUpdate();
}
const ctx = p.ctx;
return (
<select value={ctx.value.newNode ?? ""} className="form-select form-select-sm" onChange={handleChange}>
<option value=""> - {symbolNiceName(WorkflowOperation.Delete).toUpperCase()} - </option>
{p.previewTasks.filter(pt => is(pt.subWorkflow, ctx.value.subWorkflow))
.map(pt => <option value={pt.bpmnId}>{pt.name}</option>)}
</select>
);
}<|fim▁end|> | |
<|file_name|>constraints.py<|end_file_name|><|fim▁begin|>from math import sqrt
import gtk
from gettext import gettext as _
from ase.gui.widgets import pack, Help
class Constraints(gtk.Window):
def __init__(self, gui):
gtk.Window.__init__(self)
self.set_title(_('Constraints'))
vbox = gtk.VBox()
b = pack(vbox, [gtk.Button(_('Constrain')),
gtk.Label(_(' selected atoms'))])[0]
b.connect('clicked', self.selected)
b = pack(vbox, [gtk.Button(_('Constrain')),
gtk.Label(_(' immobile atoms:'))])[0]
b.connect('clicked', self.immobile)
b = pack(vbox, [gtk.Button(_('Unconstrain')),
gtk.Label(_(' selected atoms:'))])[0]
b.connect('clicked', self.unconstrain)
b = pack(vbox, gtk.Button(_('Clear constraints')))
b.connect('clicked', self.clear)
close = pack(vbox, gtk.Button(_('Close')))
close.connect('clicked', lambda widget: self.destroy())
self.add(vbox)
vbox.show()
self.show()
self.gui = gui<|fim▁hole|> def selected(self, button):
self.gui.images.dynamic[self.gui.images.selected] = False
self.gui.draw()
def unconstrain(self, button):
self.gui.images.dynamic[self.gui.images.selected] = True
self.gui.draw()
def immobile(self, button):
self.gui.images.set_dynamic()
self.gui.draw()
def clear(self, button):
self.gui.images.dynamic[:] = True
self.gui.draw()<|fim▁end|> | |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import absolute_import
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "service.settings")
from django.core.management import execute_from_command_line<|fim▁hole|>
execute_from_command_line(sys.argv)<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#!/usr/bmport settings
# -*- coding: utf-8 -*-
from django import http
from django.shortcuts import render
from main import models
from main import myforms
from main import cfg
from main import tools
from main import data_render
from main import database_manager
from main import scripts
from main import errors
from main import user_settings
import logging
logger = logging.getLogger(__name__)
import csv
import datetime
def __getIds(raw_items, item_id):
l = []
for k in raw_items:
ids = k.split(',')
l.append(ids[item_id])
return l
import xlwt
def __export_xls(data_table, filename="tabella"):
# Create the HttpResponse object with the appropriate CSV header.
response = http.HttpResponse(mimetype='application/ms-excel; charset=utf-8')
response['Content-Disposition'] = 'attachment; filename="%s_%s.xls"' % (filename, datetime.datetime.today().strftime("%d-%m-%Y"))
book = xlwt.Workbook(encoding='utf-8')
sheet = book.add_sheet('Elenco')
#Add header
export_list = user_settings.settings_columView('export_table')
for colum,j in enumerate(export_list):
sheet.write(0, colum, "%s" % j.replace('_', ' ').capitalize())
#Write table
for row,i in enumerate(data_table):
for colum,j in enumerate(export_list):
#we should skip the header row.
sheet.write(row + 1, colum, data_render.formatFields(i,j, default_text="-"))
book.save(response)
return response
def __export_csv(data_table, filename="tabella"):
# Create the HttpResponse object with the appropriate CSV header.
response = http.HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename="%s_%s.csv"' % (filename, datetime.datetime.today().strftime("%d-%m-%Y"))
export_list = user_settings.settings_columView('export_table')
response.write("\xEF\xBB\xBF")
writer = tools.UnicodeWriter(response, delimiter=';')
writer.writerow(["%s" % j.replace('_', ' ').capitalize() for j in export_list])
for item_dict in data_table:
l = []
for i in export_list:
l.append(data_render.formatFields(item_dict, i, default_text="-"))
writer.writerow(l)
return response
def export_table(request):
search_string = request.GET.get('search_keys','')
data_table = database_manager.search_fullText(search_string)
return __export_xls(data_table, "Anagrafe")
def home(request, d={}):
form = myforms.RangeDataSelect()
data = ''
notification = ''
# Use default at first time when the home page is never loaded
form_dict = {
'search_keys' : "",
'filter_type' : None,
'ref_month' : None,
'ref_year' : None,
'order_by_field' : "",
'ordering' : "",
}
if request.method == 'POST':
selected_rows = request.POST.getlist('row_select', [])
action = request.POST.get('button_action', '')
if action == 'Lettera':
ids = __getIds(selected_rows, data_render.CLIENTE_ID)
data_to_render = database_manager.search_ids('main_cliente.id', ids)
return generate_report(data_to_render)
elif action == 'Scarica Tabella':
ids = __getIds(selected_rows, data_render.CLIENTE_ID)
data_to_render = database_manager.search_ids('main_cliente.id', ids)
return __export_xls(data_to_render, "Elenco")
else:
for i in selected_rows:
ids = i.split(',')
verifica_id = ids[data_render.VERIFICA_ID]
if verifica_id != 'None':
_id = int(verifica_id)
if action == 'Apri':
models.Verifica.objects.filter(id=_id).update(stato_verifica='A')
if action == 'Chiudi':
models.Verifica.objects.filter(id=_id).update(stato_verifica='C')
if action == 'Sospendi':
models.Verifica.objects.filter(id=_id).update(stato_verifica='S')
if request.method == 'GET' and request.GET != {}:
form = myforms.RangeDataSelect(request.GET)
if form.is_valid():
form_dict['search_keys'] = form.cleaned_data['search_keys']
form_dict['filter_type'] = form.cleaned_data['filter_type']
form_dict['ref_month'] = form.cleaned_data['ref_month']
form_dict['ref_year'] = form.cleaned_data['ref_year']
form_dict['order_by_field'] = form.cleaned_data['order_by_field']
form_dict['ordering'] = form.cleaned_data['ordering']
data_to_render = database_manager.search_inMonth(**form_dict)
dr = data_render.DataRender(data_to_render)
dr.selectColums(user_settings.settings_columView('home_view'))
tb_top = [
"<button class=\"btn btn-info dropdown-toggle\" data-toggle=\"dropdown\">Seleziona \
<span class=\"caret\"></span></button> \
<ul class=\"dropdown-menu\"> \
<li><a id=\"action\" href=\"#\">Aperti</a></li> \
<li><a id=\"action\" href=\"#\">Sospesi</a></li> \
<li><a id=\"action\" href=\"#\">Chiusi</a></li> \
<li class=\"divider\"></li> \
<li><a id=\"action\" href=\"#\">Tutti</a></li> \
<li><a id=\"action\" href=\"#\">Nessuno</a></li> \
</ul>",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Apri\">",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Chiudi\">",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Sospendi\">",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Lettera\">",
"<input class=\"btn btn-info\" type=\"submit\" name=\"button_action\" value=\"Scarica Tabella\">",
]
tb_left = [
"<input type=\"checkbox\" name=\"row_select\" id=\"{stato_verifica}\" value=\"{cliente_id},{impianto_id},{verifica_id},{intervento_id}\">"
]
dr.toolbar(top=tb_top, left=tb_left)
dr.msgItemsEmpty("<br><h3>La ricerca non ha prodotto risultati.</h3>")
dr.msgStatistics(("<br><h2>Nel mese di %s " % myforms.monthStr(form_dict['ref_month'])) + "COUNT interventi in scadenza.</h2><br>")
dr.showStatistics()
dr.orderUrl('home', form_dict)
data += dr.toTable()
form_dict['status'] = True
data_to_render = database_manager.search_inMonth(**form_dict)
dr = data_render.DataRender(data_to_render)
dr.selectColums(user_settings.settings_columView('home_view'))
dr.toolbar(top=tb_top, left=tb_left)
dr.msgItemsEmpty("")
dr.msgStatistics(("<br><h2>N.COUNT interventi chiusi nel mese di %s" % myforms.monthStr(form_dict['ref_month'])) + ".</h2><br>")
dr.showStatistics()
data += dr.toTable()
if d:
notification = data_render.notification(d['message_hdr'], d['message'], d['message_type'])
return render(request, 'home.html',{'query_path':request.get_full_path(),
'notification': notification,
'data': data,
'data_form': form,
'scripts': scripts.HOME_ADD_JS,
})
def populatedb(request):
#data = tools.insert_csv_files(cli_on=False)
data = tools.load_csv('/home/asterix/gestionale_www/main/elenco2011.csv')
return _display_ok(request, "DB aggiornato con sucesso\n" + data)
def test(request):
print request.POST.getlist('or', [])
show = cfg.HOME_STD_VIEW
hide = ["Vuota"]
#print show, hide
return render(request, 'test.html', {'items_show': show, 'items_hide':hide })
from functools import partial
import tempfile
import re
import os,sys
import gestionale
def tag_replace(m, item_dict):
k = m.group()
field_name = k[1:-1].lower()
field = data_render.formatFields(item_dict, field_name, default_text="-")
return ''.join([c if ord(c) < 128 else u'\\u' + unicode(ord(c)) + u'?' for c in unicode(field)])
def generate_report(items, file_name=None):
block = []
block_copy = False
add_page = False
date_str = datetime.date.today()
date_str = date_str.strftime(cfg.DATA_FIELD_STR_FORMAT)
tmp_file = tempfile.NamedTemporaryFile()
with open(gestionale.local_settings.LOCAL_TEMPLATE_PATH + 'lettera.rtf', 'r') as in_tpl:
for line in in_tpl:
#inizio la copia del blocco.
if '>>START<<' in line:
print "Start"
block_copy = True
continue
#inizio la copia del blocco.
if '>>END<<' in line:
block_copy = False
add_page = True
print "End"
if block_copy and not add_page:
block.append(line)
elif add_page:
for item in items:
item['data'] = date_str
for s in block:
s = re.sub('(<\w+>)', partial(tag_replace, item_dict=item), s)
tmp_file.write(s)
add_page = False
block_copy = False
else:
tmp_file.write(line)
tmp_file.seek(0)
response = http.HttpResponse(tmp_file, mimetype='application/rtf')
response['Content-Disposition'] = 'attachment; filename="lettere.rtf"'
return response
<|fim▁hole|> return errors.server_error(request)
def check_test(request):
return render(request, 'anagrafe.html', {'data': "" })
def check_layout(request):
return render(request, 'fluid.html', {})<|fim▁end|> | def err(request): |
<|file_name|>render.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from cStringIO import StringIO
from amoco.logger import Log
logger = Log(__name__)
import re
try:
from pygments.token import Token
from pygments.style import Style
from pygments.lexer import RegexLexer
from pygments.formatters import *
except ImportError:
logger.info("pygments package not found, no renderer defined")
has_pygments = False
class TokenType(type):
def __getattr__(cls,key):
return key
class Token:
__metaclass__ = TokenType
class NullFormatter(object):
def __init__(self,**options):
self.options = options
def format(self,tokensource,outfile):
for t,v in tokensource:
outfile.write(v)
Formats = {
'Null':NullFormatter(),
}
else:
logger.info("pygments package imported")
has_pygments = True
class DarkStyle(Style):
default_style = ""
styles = {
#Token.Literal: '#fff',
Token.Address: '#fb0',<|fim▁hole|> Token.Register: '#33f',
Token.Memory: '#3ff',
Token.Comment: '#8f8',
Token.Name: 'underline',
Token.Tainted: 'bold #f00',
Token.Column: '#bbb',
Token.Hide: '#222',
}
class LightStyle(Style):
default_style = ""
styles = {
Token.Literal: '#000',
Token.Address: '#b58900',
Token.Constant: '#dc322f',
Token.Prefix: '#000',
Token.Mnemonic: 'bold',
Token.Register: '#268bd2',
Token.Memory: '#859900',
Token.Comment: '#93a1a1',
Token.Name: 'underline',
Token.Tainted: 'bold #f00',
Token.Column: '#222',
Token.Hide: '#bbb',
}
DefaultStyle = DarkStyle
Formats = {
'Null':NullFormatter(),
'Terminal':TerminalFormatter(style=DefaultStyle),
'Terminal256':Terminal256Formatter(style=DefaultStyle),
'TerminalDark':Terminal256Formatter(style=DarkStyle),
'TerminalLight':Terminal256Formatter(style=LightStyle),
'Html':HtmlFormatter(style=LightStyle,nowrap=True),
}
default_formatter = NullFormatter()
def configure(**kargs):
from amoco.config import get_module_conf
conf = get_module_conf('ui')
conf.update(kargs)
f = conf['formatter']
global default_formatter
default_formatter = Formats.get(f,default_formatter)
configure()
def highlight(toks,formatter=None,outfile=None):
formatter = formatter or default_formatter
if isinstance(formatter,str): formatter = Formats[formatter]
outfile = outfile or StringIO()
formatter.format(toks,outfile)
return outfile.getvalue()
def TokenListJoin(j,lst):
if isinstance(j,str):
j = (Token.Literal,j)
res = lst[0:1]
for x in lst[1:]:
res.append(j)
res.append(x)
return res
class vltable(object):
'''
variable length table:
'''
def __init__(self,rows=None,formatter=None,outfile=None):
if rows is None: rows = []
self.rows = rows
self.rowparams = {'colsize':{},
'hidden_c': set(),
'squash_c': True,
'formatter':formatter,
'outfile':outfile,
}
self.maxlength = float('inf')
self.hidden_r = set()
self.hidden_c = self.rowparams['hidden_c']
self.squash_r = True
self.colsize = self.rowparams['colsize']
self.update()
self.header = ''
self.footer = ''
def update(self,*rr):
for c in range(self.ncols):
cz = self.colsize.get(c,0) if len(rr)>0 else 0
self.colsize[c] = max(cz,self.getcolsize(c,rr,squash=False))
def getcolsize(self,c,rr=None,squash=True):
cz = 0
if not rr: rr = range(self.nrows)
for i in rr:
if self.rowparams['squash_c'] and (i in self.hidden_r):
if squash: continue
cz = max(cz,self.rows[i].colsize(c))
return cz
@property
def width(self):
sep = self.rowparams.get('sep','')
cs = self.ncols*len(sep)
return sum(self.colsize.values(),cs)
def setcolsize(self,c,value):
self.colsize[c] = value
def addrow(self,toks):
self.rows.append(tokenrow(toks))
self.update(-1)
return self
def hiderow(self,n):
self.hidden_r.add(n)
def showrow(self,n):
self.hidden_r.remove(n)
def hidecolumn(self,n):
self.hidden_c.add(n)
def showcolumn(self,n):
self.hidden_c.remove(n)
def showall(self):
self.hidden_r = set()
self.rowparams['hidden_c'] = set()
self.hidden_c = self.rowparams['hidden_c']
return self
def grep(self,regex,col=None,invert=False):
L = set()
R = range(self.nrows)
for i in R:
if i in self.hidden_r: continue
C = self.rows[i].rawcols(col)
for c,s in enumerate(C):
if c in self.hidden_c:
continue
if re.search(regex,s):
L.add(i)
break
if not invert: L = set(R)-L
for n in L: self.hiderow(n)
return self
@property
def nrows(self):
return len(self.rows)
@property
def ncols(self):
if self.nrows>0:
return max((r.ncols for r in self.rows))
else:
return 0
def __str__(self):
s = []
formatter=self.rowparams['formatter']
outfile=self.rowparams['outfile']
for i in range(self.nrows):
if i in self.hidden_r:
if not self.squash_r:
s.append(highlight([(Token.Hide,
self.rows[i].show(raw=True,**self.rowparams))],
formatter,
outfile,
))
else:
s.append(self.rows[i].show(**self.rowparams))
if len(s)>self.maxlength:
s = s[:self.maxlength-1]
s.append(highlight([(Token.Literal,'...')],formatter,outfile))
if self.header: s.insert(0,self.header)
if self.footer: s.append(self.footer)
return '\n'.join(s)
class tokenrow(object):
def __init__(self,toks=None):
if toks is None: toks = []
self.toks = [(t,unicode(s)) for (t,s) in toks]
self.maxwidth = float('inf')
self.align = '<'
self.fill = ' '
self.separator = ''
self.cols = self.cut()
def cut(self):
C = []
c = []
for t in self.toks:
c.append(t)
if t[0]==Token.Column:
C.append(c)
c = []
C.append(c)
return C
def colsize(self,c):
if c>=len(self.cols): return 0
return sum((len(t[1]) for t in self.cols[c] if t[0]!=Token.Column))
@property
def ncols(self):
return len(self.cols)
def rawcols(self,j=None):
r = []
cols = self.cols
if j is not None: cols = self.cols[j:j+1]
for c in cols:
r.append(''.join([t[1] for t in c]))
return r
def show(self,raw=False,**params):
formatter = params.get('formatter',None)
outfile = params.get('outfile',None)
align = params.get('align',self.align)
fill = params.get('fill',self.fill)
sep = params.get('sep',self.separator)
width = params.get('maxwidth',self.maxwidth)
colsz = params.get('colsize')
hidden_c = params.get('hidden_c',set())
squash_c = params.get('squash_c',True)
head = params.get('head','')
tail = params.get('tail','')
if raw:
formatter='Null'
outfile=None
r = [head]
tz = 0
for i,c in enumerate(self.cols):
toks = []
sz = 0
mz = colsz[i]
tz += mz
if tz>width: mz = mz-(tz-width)
skip = False
for tt,tv in c:
if tt==Token.Column: break
if skip: continue
toks.append([tt,tv])
sz += len(tv)
if sz>mz:
q = (sz-mz)
toks[-1][1] = tv[0:-q]+'###'
skip = True
if sz<mz:
pad = fill*(mz-sz)
if align=='<': toks[-1][1] += pad
elif align=='>': toks[0][1] = pad+toks[0][1]
if i in hidden_c:
if not squash_c:
toks = [(TokenHide,highlight(toks,'Null',None))]
else:
toks = []
r.append(highlight(toks,formatter,outfile))
if tt==Token.Column and sep: r.append(sep)
r.append(tail)
return ''.join(r)<|fim▁end|> | Token.Constant: '#f30',
#Token.Prefix: '#fff',
Token.Mnemonic: 'bold', |
<|file_name|>path.py<|end_file_name|><|fim▁begin|># --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
import glob
from automation.utilities.const import COMMAND_MODULE_PREFIX, EXTENSIONS_MOD_PREFIX
def get_repo_root():
"""Returns the path to the source code root directory"""
current_dir = os.path.dirname(os.path.abspath(__file__))
while not os.path.exists(os.path.join(current_dir, 'CONTRIBUTING.rst')):
current_dir = os.path.dirname(current_dir)
return current_dir
def get_all_module_paths():
"""List all core and command modules"""
return list(get_core_modules_paths()) + list(get_command_modules_paths(include_prefix=True))
def get_config_dir():
""" Returns the users Azure directory. """
return os.getenv('AZURE_CONFIG_DIR', None) or os.path.expanduser(os.path.join('~', '.azure'))
def get_extension_dir():
""" Returns the extensions directory. """
custom_dir = os.environ.get('AZURE_EXTENSION_DIR')
return os.path.expanduser(custom_dir) if custom_dir else os.path.join(get_config_dir(), 'cliextensions')
def get_extensions_paths(include_prefix=False):
glob_pattern = os.path.normcase('/*/{}*'.format(EXTENSIONS_MOD_PREFIX))
for path in glob.glob(get_extension_dir() + glob_pattern):
name = os.path.basename(path)
if not include_prefix:
name = name[len(EXTENSIONS_MOD_PREFIX):]
yield name, path
def get_command_modules_paths(include_prefix=False):
glob_pattern = os.path.normcase('/src/command_modules/{}*/setup.py'.format(COMMAND_MODULE_PREFIX))
for path in glob.glob(get_repo_root() + glob_pattern):
folder = os.path.dirname(path)
name = os.path.basename(folder)
if not include_prefix:
name = name[len(COMMAND_MODULE_PREFIX):]
yield name, folder
def get_command_modules_paths_with_tests(profile):
return get_module_paths_with_tests(get_command_modules_paths(), profile)
def get_core_modules_paths_with_tests(profile):
if profile == 'latest':
for name, path in get_core_modules_paths():
for root, dirs, files in os.walk(path):
if os.path.basename(root) == 'tests':
if name == 'azure-cli-core':
name = 'core'
yield name, path, root
def get_core_modules_paths():
for path in glob.glob(get_repo_root() + os.path.normcase('/src/*/setup.py')):
yield os.path.basename(os.path.dirname(path)), os.path.dirname(path)
def get_module_paths_with_tests(modules, profile):
for name, path in modules:
name = name.replace(COMMAND_MODULE_PREFIX, '')
test_folder = os.path.join(path, 'azure', 'cli', 'command_modules', name, 'tests', profile)
if os.path.exists(test_folder):
yield name, path, test_folder
def make_dirs(path):
"""Create a directories recursively"""
import errno
try:
os.makedirs(path)
except OSError as exc: # Python <= 2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def get_test_results_dir(with_timestamp=None, prefix=None):
"""Returns the folder where test results should be saved to. If the folder doesn't exist,
it will be created."""
result = os.path.join(get_repo_root(), 'test_results')
if isinstance(with_timestamp, bool):
from datetime import datetime
with_timestamp = datetime.now()
if with_timestamp:
if prefix:
result = os.path.join(result, with_timestamp.strftime(prefix + '_%Y%m%d_%H%M%S'))
else:
result = os.path.join(result, with_timestamp.strftime('%Y%m%d_%H%M%S'))
if not os.path.exists(result):<|fim▁hole|> if not os.path.exists(result) or not os.path.isdir(result):
raise Exception('Failed to create test result dir {}'.format(result))
return result
def filter_blacklisted_modules(*black_list_modules):
"""Returns the paths to the modules except those in the black list."""
import itertools
existing_modules = list(itertools.chain(get_core_modules_paths(),
get_command_modules_paths()))
black_list_modules = set(black_list_modules)
return list((name, path) for name, path in existing_modules if name not in black_list_modules)
def filter_user_selected_modules(user_input_modules):
import itertools
existing_modules = list(itertools.chain(get_core_modules_paths(),
get_command_modules_paths()))
if user_input_modules:
selected_modules = set(user_input_modules)
extra = selected_modules - set([name for name, _ in existing_modules])
if any(extra):
print('ERROR: These modules do not exist: {}.'.format(', '.join(extra)))
return None
return list((name, module) for name, module in existing_modules
if name in selected_modules)
else:
return list((name, module) for name, module in existing_modules)
def filter_user_selected_modules_with_tests(user_input_modules=None, profile=None):
import itertools
existing_modules = list(itertools.chain(get_core_modules_paths_with_tests(profile),
get_command_modules_paths_with_tests(profile)))
if user_input_modules is not None:
selected_modules = set(user_input_modules)
extra = selected_modules - set([name for name, _, _ in existing_modules])
# don't count extensions as extras
extra = [x for x in extra if not x.startswith('azext_')]
if any(extra):
print('ERROR: These modules do not exist: {}.'.format(', '.join(extra)))
return None
return list((name, module, test) for name, module, test in existing_modules
if name in selected_modules)
else:
return list((name, module, test) for name, module, test in existing_modules)<|fim▁end|> | make_dirs(result)
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (C) 2013 LiuLang <[email protected]>
# Use of this source code is governed by GPLv3 license that can be found
# in the LICENSE file.
from distutils.core import setup
from distutils.core import Command
from distutils.command.clean import clean as distutils_clean
from distutils.command.sdist import sdist as distutils_sdist
import glob
import os
import shutil
from qr_gui import Config
def build_data_files():
data_files = []
for dir, dirs, files in os.walk('share'):
#target = os.path.join('share', dir)
target = dir
if files:
files = [os.path.join(dir, f) for f in files]
data_files.append((target, files))
return data_files
# will be installed to /usr/local/bin<|fim▁hole|>scripts = ['qr-gui', ]
if __name__ == '__main__':
setup(
name = 'qr-gui',
description = Config.DESCRIPTION,
version = Config.VERSION,
license = 'GPLv3',
url = Config.HOMEPAGE,
author = 'LiuLang',
author_email = '[email protected]',
packages = ['qr_gui', ],
scripts = scripts,
data_files = build_data_files(),
)<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|># Copyright 2014 Open vStorage NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This package contains all views for the Internal API
"""<|fim▁end|> | |
<|file_name|>attendance-week-dashboard.component.spec.ts<|end_file_name|><|fim▁begin|>import { ComponentFixture, TestBed, waitForAsync } from "@angular/core/testing";
import { AttendanceWeekDashboardComponent } from "./attendance-week-dashboard.component";
import { RouterTestingModule } from "@angular/router/testing";
import { ChildPhotoService } from "../../../children/child-photo-service/child-photo.service";
import { AttendanceModule } from "../../attendance.module";
import { AttendanceService } from "../../attendance.service";
describe("AttendanceWeekDashboardComponent", () => {
let component: AttendanceWeekDashboardComponent;
let fixture: ComponentFixture<AttendanceWeekDashboardComponent>;
let mockAttendanceService: jasmine.SpyObj<AttendanceService>;
beforeEach(
waitForAsync(() => {
mockAttendanceService = jasmine.createSpyObj([
"getAllActivityAttendancesForPeriod",
]);
mockAttendanceService.getAllActivityAttendancesForPeriod.and.resolveTo(
[]
);
TestBed.configureTestingModule({
imports: [AttendanceModule, RouterTestingModule.withRoutes([])],
providers: [
{
provide: ChildPhotoService,
useValue: jasmine.createSpyObj(["getImage"]),
},<|fim▁hole|> })
);
beforeEach(() => {
fixture = TestBed.createComponent(AttendanceWeekDashboardComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it("should create", () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | { provide: AttendanceService, useValue: mockAttendanceService },
],
}).compileComponents(); |
<|file_name|>test_rerun.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
from mock import Mock, patch
from psutil import AccessDenied, TimeoutExpired
from thefuck.output_readers import rerun
class TestRerun(object):
def setup_method(self, test_method):
self.patcher = patch('thefuck.output_readers.rerun.Process')
process_mock = self.patcher.start()
self.proc_mock = process_mock.return_value = Mock()
def teardown_method(self, test_method):
self.patcher.stop()
@patch('thefuck.output_readers.rerun._wait_output', return_value=False)
@patch('thefuck.output_readers.rerun.Popen')
def test_get_output(self, popen_mock, wait_output_mock):
popen_mock.return_value.stdout.read.return_value = b'output'
assert rerun.get_output('', '') is None
wait_output_mock.assert_called_once()
@patch('thefuck.output_readers.rerun.Popen')
def test_get_output_invalid_continuation_byte(self, popen_mock):
output = b'ls: illegal option -- \xc3\nusage: ls [-@ABC...] [file ...]\n'
expected = u'ls: illegal option -- \ufffd\nusage: ls [-@ABC...] [file ...]\n'
popen_mock.return_value.stdout.read.return_value = output
actual = rerun.get_output('', '')
assert actual == expected
@patch('thefuck.output_readers.rerun._wait_output')
def test_get_output_unicode_misspell(self, wait_output_mock):
rerun.get_output(u'pácman', u'pácman')
wait_output_mock.assert_called_once()
def test_wait_output_is_slow(self, settings):
assert rerun._wait_output(Mock(), True)
self.proc_mock.wait.assert_called_once_with(settings.wait_slow_command)
def test_wait_output_is_not_slow(self, settings):
assert rerun._wait_output(Mock(), False)
self.proc_mock.wait.assert_called_once_with(settings.wait_command)
@patch('thefuck.output_readers.rerun._kill_process')
def test_wait_output_timeout(self, kill_process_mock):
self.proc_mock.wait.side_effect = TimeoutExpired(3)
self.proc_mock.children.return_value = []<|fim▁hole|> def test_wait_output_timeout_children(self, kill_process_mock):
self.proc_mock.wait.side_effect = TimeoutExpired(3)
self.proc_mock.children.return_value = [Mock()] * 2
assert not rerun._wait_output(Mock(), False)
assert kill_process_mock.call_count == 3
def test_kill_process(self):
proc = Mock()
rerun._kill_process(proc)
proc.kill.assert_called_once_with()
@patch('thefuck.output_readers.rerun.logs')
def test_kill_process_access_denied(self, logs_mock):
proc = Mock()
proc.kill.side_effect = AccessDenied()
rerun._kill_process(proc)
proc.kill.assert_called_once_with()
logs_mock.debug.assert_called_once()<|fim▁end|> | assert not rerun._wait_output(Mock(), False)
kill_process_mock.assert_called_once_with(self.proc_mock)
@patch('thefuck.output_readers.rerun._kill_process') |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! The runtime for writing applications for [Ice](https://github.com/losfair/IceCore),
//! an efficient, reliable and asynchronous platform for building modern backend applications
//! in WebAssembly.
//!
//! At a high level, `ia` (which stands for "Ice App") provides a few major components (based on
//! the underlying Ice Core engine):
//!
//! - Asynchronous TCP server and client
//! - File I/O
//! - Timer (not working for now due to an Ice bug)
//!
//! The asynchronous APIs are based on `futures`, while low-level callback-based APIs
//! are also provided.
//!
//! # Examples
//! A simple TCP proxy that forwards `127.0.0.1:1111` to `127.0.0.1:80`:
//! ```no_run
//! #![feature(proc_macro, generators)]
//!
//! #[macro_use]
//! extern crate ia;
//! extern crate futures_await as futures;
//!
//! use futures::prelude::*;
//! use ia::net::{TcpListener, TcpConnection};
//! use ia::error::IoResult;
//!
//! #[async]<|fim▁hole|>//! fn forward(from: TcpConnection, to: TcpConnection) -> IoResult<()> {
//! while let Ok(v) = await!(from.read(4096)) {
//! if v.len() == 0 {
//! break;
//! }
//! await!(to.write(v))?;
//! }
//! Ok(())
//! }
//! let proxied = await!(TcpConnection::connect("127.0.0.1:80"))?;
//! ia::spawn(forward(proxied.clone(), incoming.clone()));
//! await!(forward(incoming, proxied))?;
//!
//! Ok(())
//! }
//!
//! #[async]
//! fn run_proxy() -> IoResult<()> {
//! static LISTEN_ADDR: &'static str = "127.0.0.1:1111";
//! let listener = TcpListener::new(LISTEN_ADDR);
//! println!("Listening on {}", LISTEN_ADDR);
//!
//! #[async]
//! for incoming in listener {
//! ia::spawn(handle_connection(incoming));
//! }
//!
//! Ok(())
//! }
//!
//! app_init!({
//! ia::spawn(run_proxy());
//! 0
//! });
//!
//! ```
//!
//! See [simpleproxy](https://github.com/losfair/IceCore/tree/master/ia/examples/simpleproxy) for the
//! full code & project layout.
#![feature(fnbox)]
#![feature(never_type)]
pub extern crate futures;
pub extern crate cwa;
#[macro_use]
pub mod log;
pub mod raw;
pub mod executor;
pub mod utils;
pub mod error;
pub mod net;
pub mod fs;
pub use executor::spawn;<|fim▁end|> | //! fn handle_connection(incoming: TcpConnection) -> IoResult<()> {
//! #[async] |
<|file_name|>smoke.rs<|end_file_name|><|fim▁begin|>use futures::StreamExt;
use opentelemetry::global::shutdown_tracer_provider;
use opentelemetry::trace::{Span, SpanKind, Tracer};
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_proto::tonic::collector::trace::v1::{
trace_service_server::{TraceService, TraceServiceServer},
ExportTraceServiceRequest, ExportTraceServiceResponse,
};
use std::{net::SocketAddr, sync::Mutex};
use tokio::sync::mpsc;
use tokio_stream::wrappers::TcpListenerStream;
struct MockServer {
tx: Mutex<mpsc::Sender<ExportTraceServiceRequest>>,
}
impl MockServer {
pub fn new(tx: mpsc::Sender<ExportTraceServiceRequest>) -> Self {
Self { tx: Mutex::new(tx) }
}
}
#[tonic::async_trait]
impl TraceService for MockServer {
async fn export(
&self,
request: tonic::Request<ExportTraceServiceRequest>,
) -> Result<tonic::Response<ExportTraceServiceResponse>, tonic::Status> {
println!("Sending request into channel...");
// assert we have required metadata key
assert_eq!(
request.metadata().get("x-header-key"),
Some(&("header-value".parse().unwrap()))
);
self.tx
.lock()
.unwrap()
.try_send(request.into_inner())
.expect("Channel full");
Ok(tonic::Response::new(ExportTraceServiceResponse {}))
}
}
async fn setup() -> (SocketAddr, mpsc::Receiver<ExportTraceServiceRequest>) {
let addr: SocketAddr = "[::1]:0".parse().unwrap();
let listener = tokio::net::TcpListener::bind(addr)
.await
.expect("failed to bind");
let addr = listener.local_addr().unwrap();
let stream = TcpListenerStream::new(listener).map(|s| {
if let Ok(ref s) = s {
println!("Got new conn at {}", s.peer_addr().unwrap());
}
s
});
let (req_tx, req_rx) = mpsc::channel(10);
let service = TraceServiceServer::new(MockServer::new(req_tx));
tokio::task::spawn(async move {
tonic::transport::Server::builder()
.add_service(service)
.serve_with_incoming(stream)
.await
.expect("Server failed");
});
(addr, req_rx)
}<|fim▁hole|>async fn smoke_tracer() {
println!("Starting server setup...");
let (addr, mut req_rx) = setup().await;
{
println!("Installing tracer...");
let mut metadata = tonic::metadata::MetadataMap::new();
metadata.insert("x-header-key", "header-value".parse().unwrap());
let tracer = opentelemetry_otlp::new_pipeline()
.tracing()
.with_exporter(
opentelemetry_otlp::new_exporter()
.tonic()
.with_endpoint(format!("http://{}", addr))
.with_metadata(metadata),
)
.install_batch(opentelemetry::runtime::Tokio)
.expect("failed to install");
println!("Sending span...");
let mut span = tracer
.span_builder("my-test-span")
.with_kind(SpanKind::Server)
.start(&tracer);
span.add_event("my-test-event", vec![]);
span.end();
shutdown_tracer_provider();
}
println!("Waiting for request...");
let req = req_rx.recv().await.expect("missing export request");
let first_span = req
.resource_spans
.get(0)
.unwrap()
.instrumentation_library_spans
.get(0)
.unwrap()
.spans
.get(0)
.unwrap();
assert_eq!("my-test-span", first_span.name);
let first_event = first_span.events.get(0).unwrap();
assert_eq!("my-test-event", first_event.name);
}<|fim▁end|> |
#[tokio::test(flavor = "multi_thread")] |
<|file_name|>expr-if-box.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[feature(managed_boxes)];
// Tests for if as expressions returning boxed types
fn test_box() {
let rs = if true { @100 } else { @101 };
assert_eq!(*rs, 100);
}
fn test_str() {
let rs = if true { ~"happy" } else { ~"sad" };<|fim▁hole|><|fim▁end|> | assert_eq!(rs, ~"happy");
}
pub fn main() { test_box(); test_str(); } |
<|file_name|>bench.rs<|end_file_name|><|fim▁begin|>#![feature(test)]
extern crate lib;
extern crate test;
use lib::find_similar;
use test::Bencher;
#[bench]
fn measure_find_similar_usr_bin(b: &mut Bencher) {
let files = (0..128).map(|_| {
(0..128).map(|_| 'a' as char).collect()<|fim▁hole|> b.iter(|| find_similar(&files));
}<|fim▁end|> | }).collect(); |
<|file_name|>klondike.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2018 Erik Nordstrøm <[email protected]>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
use arrayvec::ArrayVec;
use std::ops::Deref;
use cards::Card;
impl_cardstack!(StockSlot, StockSlotArray, 21); // 52 - (1 + 2 + 3 + 4 + 5 + 6 + 7) = 21
impl_cardstack!(WastePileSlot, WastePileSlotArray, 21);<|fim▁hole|>impl_cardstack!(TableauSlot, TableauSlotArray, 19);
pub struct Table
{
pub stock: StockSlot,
pub waste_pile: WastePileSlot,
pub foundations: [FoundationSlot; 4],
pub tableau: [TableauSlot; 7],
}<|fim▁end|> | impl_cardstack!(FoundationSlot, FoundationSlotArray, 13); |
<|file_name|>board-size.js<|end_file_name|><|fim▁begin|>'use strict'
const { lt, inRange } = require('lodash')
module.exports = boardSize => {
if (lt(boardSize, 70)) return '<70l'
if (inRange(boardSize, 70, 80)) return '70l to 80l'
if (inRange(boardSize, 80, 90)) return '80l to 90l'
if (inRange(boardSize, 90, 100)) return '90l to 100l'
if (inRange(boardSize, 100, 110)) return '100l to 110l'
if (inRange(boardSize, 110, 120)) return '110l to 120l'<|fim▁hole|><|fim▁end|> | if (inRange(boardSize, 120, 130)) return '120l to 130l'
return '>130l'
} |
<|file_name|>env.py<|end_file_name|><|fim▁begin|># -*- mode: python; coding: utf-8 -*-
# Copyright 2016 the HERA Collaboration
# Licensed under the 2-clause BSD License.
"""This script is some boilerplate needed by Alembic to do its fancy database
migration stuff.
"""
# A hack so that we can get the librarian_server module.
import sys
sys.path.insert(0, '.')
from alembic import context
from logging.config import fileConfig
config = context.config
fileConfig(config.config_file_name)
from librarian_server import app, db
target_metadata = db.metadata
def run_migrations_offline():
"""Run migrations in 'offline' mode -- all we need is a URL.<|fim▁hole|> url=url,
target_metadata=target_metadata,
literal_binds=True
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode -- using the actual Librarian database
connection.
"""
with db.engine.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()<|fim▁end|> |
"""
url = app.config['SQLALCHEMY_DATABASE_URI']
context.configure( |
<|file_name|>03_closures.go<|end_file_name|><|fim▁begin|>package main
import "fmt"
// returns a function that returns an int
func fibonacci() func() int {
old_fib :=-1
fib := 1
return func() int {<|fim▁hole|>}
func main() {
f := fibonacci()
for i := 0; i < 10; i++ {
fmt.Println(f())
}
}<|fim▁end|> | fib, old_fib = fib + old_fib, fib
return fib
} |
<|file_name|>kube2sky.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// kube2sky is a bridge between Kubernetes and SkyDNS. It watches the
// Kubernetes master for changes in Services and manifests them into etcd for
// SkyDNS to serve as DNS records.
package main
import (
"encoding/json"
"fmt"
"hash/fnv"
"net/http"
"net/url"
"os"
"strings"
"sync"
"time"
etcd "github.com/coreos/go-etcd/etcd"
"github.com/golang/glog"
skymsg "github.com/skynetservices/skydns/msg"
flag "github.com/spf13/pflag"
kapi "k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/unversioned"
kcache "k8s.io/kubernetes/pkg/client/cache"
"k8s.io/kubernetes/pkg/client/restclient"
kclient "k8s.io/kubernetes/pkg/client/unversioned"
kclientcmd "k8s.io/kubernetes/pkg/client/unversioned/clientcmd"
kframework "k8s.io/kubernetes/pkg/controller/framework"
kselector "k8s.io/kubernetes/pkg/fields"
etcdutil "k8s.io/kubernetes/pkg/storage/etcd/util"
"k8s.io/kubernetes/pkg/util"
"k8s.io/kubernetes/pkg/util/wait"
)
var (
argDomain = flag.String("domain", "cluster.local", "domain under which to create names")
argEtcdMutationTimeout = flag.Duration("etcd-mutation-timeout", 10*time.Second, "crash after retrying etcd mutation for a specified duration")
argEtcdServer = flag.String("etcd-server", "http://127.0.0.1:4001", "URL to etcd server")
argKubecfgFile = flag.String("kubecfg-file", "", "Location of kubecfg file for access to kubernetes master service; --kube-master-url overrides the URL part of this; if neither this nor --kube-master-url are provided, defaults to service account tokens")
argKubeMasterURL = flag.String("kube-master-url", "", "URL to reach kubernetes master. Env variables in this flag will be expanded.")
)
const (
// Maximum number of attempts to connect to etcd server.
maxConnectAttempts = 12
// Resync period for the kube controller loop.
resyncPeriod = 30 * time.Minute
// A subdomain added to the user specified domain for all services.
serviceSubdomain = "svc"
// A subdomain added to the user specified dmoain for all pods.
podSubdomain = "pod"
)
type etcdClient interface {
Set(path, value string, ttl uint64) (*etcd.Response, error)
RawGet(key string, sort, recursive bool) (*etcd.RawResponse, error)
Delete(path string, recursive bool) (*etcd.Response, error)
}
type nameNamespace struct {
name string
namespace string
}
type kube2sky struct {
// Etcd client.
etcdClient etcdClient
// DNS domain name.
domain string
// Etcd mutation timeout.
etcdMutationTimeout time.Duration
// A cache that contains all the endpoints in the system.
endpointsStore kcache.Store
// A cache that contains all the services in the system.
servicesStore kcache.Store
// A cache that contains all the pods in the system.
podsStore kcache.Store
// Lock for controlling access to headless services.
mlock sync.Mutex
}
// Removes 'subdomain' from etcd.
func (ks *kube2sky) removeDNS(subdomain string) error {
glog.V(2).Infof("Removing %s from DNS", subdomain)
resp, err := ks.etcdClient.RawGet(skymsg.Path(subdomain), false, true)
if err != nil {
return err
}
if resp.StatusCode == http.StatusNotFound {
glog.V(2).Infof("Subdomain %q does not exist in etcd", subdomain)
return nil
}
_, err = ks.etcdClient.Delete(skymsg.Path(subdomain), true)
return err
}
func (ks *kube2sky) writeSkyRecord(subdomain string, data string) error {
// Set with no TTL, and hope that kubernetes events are accurate.
_, err := ks.etcdClient.Set(skymsg.Path(subdomain), data, uint64(0))
return err
}
// Generates skydns records for a headless service.
func (ks *kube2sky) newHeadlessService(subdomain string, service *kapi.Service) error {
// Create an A record for every pod in the service.
// This record must be periodically updated.
// Format is as follows:
// For a service x, with pods a and b create DNS records,
// a.x.ns.domain. and, b.x.ns.domain.
ks.mlock.Lock()
defer ks.mlock.Unlock()
key, err := kcache.MetaNamespaceKeyFunc(service)
if err != nil {
return err
}
e, exists, err := ks.endpointsStore.GetByKey(key)
if err != nil {
return fmt.Errorf("failed to get endpoints object from endpoints store - %v", err)
}
if !exists {
glog.V(1).Infof("Could not find endpoints for service %q in namespace %q. DNS records will be created once endpoints show up.", service.Name, service.Namespace)
return nil
}
if e, ok := e.(*kapi.Endpoints); ok {
return ks.generateRecordsForHeadlessService(subdomain, e, service)
}
return nil
}
func getSkyMsg(ip string, port int) *skymsg.Service {
return &skymsg.Service{
Host: ip,
Port: port,
Priority: 10,
Weight: 10,
Ttl: 30,
}
}
func (ks *kube2sky) generateRecordsForHeadlessService(subdomain string, e *kapi.Endpoints, svc *kapi.Service) error {
for idx := range e.Subsets {
for subIdx := range e.Subsets[idx].Addresses {
b, err := json.Marshal(getSkyMsg(e.Subsets[idx].Addresses[subIdx].IP, 0))
if err != nil {
return err
}
recordValue := string(b)
recordLabel := getHash(recordValue)
recordKey := buildDNSNameString(subdomain, recordLabel)
glog.V(2).Infof("Setting DNS record: %v -> %q\n", recordKey, recordValue)
if err := ks.writeSkyRecord(recordKey, recordValue); err != nil {
return err
}
for portIdx := range e.Subsets[idx].Ports {
endpointPort := &e.Subsets[idx].Ports[portIdx]
portSegment := buildPortSegmentString(endpointPort.Name, endpointPort.Protocol)
if portSegment != "" {
err := ks.generateSRVRecord(subdomain, portSegment, recordLabel, recordKey, endpointPort.Port)
if err != nil {
return err
}
}
}
}
}
return nil
}
func (ks *kube2sky) getServiceFromEndpoints(e *kapi.Endpoints) (*kapi.Service, error) {
key, err := kcache.MetaNamespaceKeyFunc(e)
if err != nil {
return nil, err
}
obj, exists, err := ks.servicesStore.GetByKey(key)
if err != nil {
return nil, fmt.Errorf("failed to get service object from services store - %v", err)
}
if !exists {
glog.V(1).Infof("could not find service for endpoint %q in namespace %q", e.Name, e.Namespace)
return nil, nil
}
if svc, ok := obj.(*kapi.Service); ok {
return svc, nil
}
return nil, fmt.Errorf("got a non service object in services store %v", obj)
}
func (ks *kube2sky) addDNSUsingEndpoints(subdomain string, e *kapi.Endpoints) error {
ks.mlock.Lock()
defer ks.mlock.Unlock()
svc, err := ks.getServiceFromEndpoints(e)
if err != nil {
return err
}
if svc == nil || kapi.IsServiceIPSet(svc) {
// No headless service found corresponding to endpoints object.
return nil
}
// Remove existing DNS entry.
if err := ks.removeDNS(subdomain); err != nil {
return err
}
return ks.generateRecordsForHeadlessService(subdomain, e, svc)
}
func (ks *kube2sky) handleEndpointAdd(obj interface{}) {
if e, ok := obj.(*kapi.Endpoints); ok {
name := buildDNSNameString(ks.domain, serviceSubdomain, e.Namespace, e.Name)
ks.mutateEtcdOrDie(func() error { return ks.addDNSUsingEndpoints(name, e) })
}
}
func (ks *kube2sky) handlePodCreate(obj interface{}) {
if e, ok := obj.(*kapi.Pod); ok {
// If the pod ip is not yet available, do not attempt to create.
if e.Status.PodIP != "" {
name := buildDNSNameString(ks.domain, podSubdomain, e.Namespace, santizeIP(e.Status.PodIP))
ks.mutateEtcdOrDie(func() error { return ks.generateRecordsForPod(name, e) })
}
}
}
func (ks *kube2sky) handlePodUpdate(old interface{}, new interface{}) {
oldPod, okOld := old.(*kapi.Pod)
newPod, okNew := new.(*kapi.Pod)
// Validate that the objects are good
if okOld && okNew {
if oldPod.Status.PodIP != newPod.Status.PodIP {
ks.handlePodDelete(oldPod)
ks.handlePodCreate(newPod)
}
} else if okNew {
ks.handlePodCreate(newPod)
} else if okOld {
ks.handlePodDelete(oldPod)
}
}
func (ks *kube2sky) handlePodDelete(obj interface{}) {
if e, ok := obj.(*kapi.Pod); ok {
if e.Status.PodIP != "" {
name := buildDNSNameString(ks.domain, podSubdomain, e.Namespace, santizeIP(e.Status.PodIP))
ks.mutateEtcdOrDie(func() error { return ks.removeDNS(name) })
}
}
}
func (ks *kube2sky) generateRecordsForPod(subdomain string, service *kapi.Pod) error {
b, err := json.Marshal(getSkyMsg(service.Status.PodIP, 0))
if err != nil {
return err
}
recordValue := string(b)
recordLabel := getHash(recordValue)
recordKey := buildDNSNameString(subdomain, recordLabel)
glog.V(2).Infof("Setting DNS record: %v -> %q, with recordKey: %v\n", subdomain, recordValue, recordKey)
if err := ks.writeSkyRecord(recordKey, recordValue); err != nil {
return err
}
return nil
}
func (ks *kube2sky) generateRecordsForPortalService(subdomain string, service *kapi.Service) error {
b, err := json.Marshal(getSkyMsg(service.Spec.ClusterIP, 0))
if err != nil {
return err
}
recordValue := string(b)
recordLabel := getHash(recordValue)
recordKey := buildDNSNameString(subdomain, recordLabel)
glog.V(2).Infof("Setting DNS record: %v -> %q, with recordKey: %v\n", subdomain, recordValue, recordKey)
if err := ks.writeSkyRecord(recordKey, recordValue); err != nil {
return err
}
// Generate SRV Records
for i := range service.Spec.Ports {
port := &service.Spec.Ports[i]
portSegment := buildPortSegmentString(port.Name, port.Protocol)
if portSegment != "" {
err = ks.generateSRVRecord(subdomain, portSegment, recordLabel, subdomain, port.Port)
if err != nil {
return err
}
}
}
return nil
}
func santizeIP(ip string) string {
return strings.Replace(ip, ".", "-", -1)
}
func buildPortSegmentString(portName string, portProtocol kapi.Protocol) string {
if portName == "" {
// we don't create a random name
return ""
}
if portProtocol == "" {
glog.Errorf("Port Protocol not set. port segment string cannot be created.")
return ""
}
return fmt.Sprintf("_%s._%s", portName, strings.ToLower(string(portProtocol)))
}
func (ks *kube2sky) generateSRVRecord(subdomain, portSegment, recordName, cName string, portNumber int) error {
recordKey := buildDNSNameString(subdomain, portSegment, recordName)
srv_rec, err := json.Marshal(getSkyMsg(cName, portNumber))
if err != nil {
return err
}
if err := ks.writeSkyRecord(recordKey, string(srv_rec)); err != nil {
return err
}
return nil
}
func (ks *kube2sky) addDNS(subdomain string, service *kapi.Service) error {
// if ClusterIP is not set, a DNS entry should not be created
if !kapi.IsServiceIPSet(service) {
return ks.newHeadlessService(subdomain, service)
}
if len(service.Spec.Ports) == 0 {
glog.Info("Unexpected service with no ports, this should not have happend: %v", service)
}
return ks.generateRecordsForPortalService(subdomain, service)
}
// Implements retry logic for arbitrary mutator. Crashes after retrying for
// etcd-mutation-timeout.
func (ks *kube2sky) mutateEtcdOrDie(mutator func() error) {
timeout := time.After(ks.etcdMutationTimeout)
for {
select {
case <-timeout:
glog.Fatalf("Failed to mutate etcd for %v using mutator: %v", ks.etcdMutationTimeout, mutator)
default:
if err := mutator(); err != nil {
delay := 50 * time.Millisecond
glog.V(1).Infof("Failed to mutate etcd using mutator: %v due to: %v. Will retry in: %v", mutator, err, delay)
time.Sleep(delay)
} else {
return
}
}
}
}
func buildDNSNameString(labels ...string) string {
var res string
for _, label := range labels {
if res == "" {
res = label
} else {
res = fmt.Sprintf("%s.%s", label, res)
}
}
return res
}
// Returns a cache.ListWatch that gets all changes to services.
func createServiceLW(kubeClient *kclient.Client) *kcache.ListWatch {
return kcache.NewListWatchFromClient(kubeClient, "services", kapi.NamespaceAll, kselector.Everything())
}
// Returns a cache.ListWatch that gets all changes to endpoints.
func createEndpointsLW(kubeClient *kclient.Client) *kcache.ListWatch {
return kcache.NewListWatchFromClient(kubeClient, "endpoints", kapi.NamespaceAll, kselector.Everything())
}
// Returns a cache.ListWatch that gets all changes to pods.
func createEndpointsPodLW(kubeClient *kclient.Client) *kcache.ListWatch {
return kcache.NewListWatchFromClient(kubeClient, "pods", kapi.NamespaceAll, kselector.Everything())
}
func (ks *kube2sky) newService(obj interface{}) {
if s, ok := obj.(*kapi.Service); ok {
name := buildDNSNameString(ks.domain, serviceSubdomain, s.Namespace, s.Name)
ks.mutateEtcdOrDie(func() error { return ks.addDNS(name, s) })
}
}
func (ks *kube2sky) removeService(obj interface{}) {
if s, ok := obj.(*kapi.Service); ok {
name := buildDNSNameString(ks.domain, serviceSubdomain, s.Namespace, s.Name)
ks.mutateEtcdOrDie(func() error { return ks.removeDNS(name) })
}
}
func (ks *kube2sky) updateService(oldObj, newObj interface{}) {
// TODO: Avoid unwanted updates.
ks.removeService(oldObj)
ks.newService(newObj)
}
func newEtcdClient(etcdServer string) (*etcd.Client, error) {
var (
client *etcd.Client
err error
)
for attempt := 1; attempt <= maxConnectAttempts; attempt++ {
if _, err = etcdutil.GetEtcdVersion(etcdServer); err == nil {
break
}
if attempt == maxConnectAttempts {
break
}
glog.Infof("[Attempt: %d] Attempting access to etcd after 5 second sleep", attempt)
time.Sleep(5 * time.Second)
}
if err != nil {
return nil, fmt.Errorf("failed to connect to etcd server: %v, error: %v", etcdServer, err)
}
glog.Infof("Etcd server found: %v", etcdServer)
// loop until we have > 0 machines && machines[0] != ""
poll, timeout := 1*time.Second, 10*time.Second
if err := wait.Poll(poll, timeout, func() (bool, error) {
if client = etcd.NewClient([]string{etcdServer}); client == nil {
return false, fmt.Errorf("etcd.NewClient returned nil")
}
client.SyncCluster()
machines := client.GetCluster()
if len(machines) == 0 || len(machines[0]) == 0 {
return false, nil
}
return true, nil
}); err != nil {
return nil, fmt.Errorf("Timed out after %s waiting for at least 1 synchronized etcd server in the cluster. Error: %v", timeout, err)
}
return client, nil
}
func expandKubeMasterURL() (string, error) {
parsedURL, err := url.Parse(os.ExpandEnv(*argKubeMasterURL))
if err != nil {
return "", fmt.Errorf("failed to parse --kube-master-url %s - %v", *argKubeMasterURL, err)
}
if parsedURL.Scheme == "" || parsedURL.Host == "" || parsedURL.Host == ":" {
return "", fmt.Errorf("invalid --kube-master-url specified %s", *argKubeMasterURL)
}
return parsedURL.String(), nil
}
// TODO: evaluate using pkg/client/clientcmd
func newKubeClient() (*kclient.Client, error) {
var (
config *restclient.Config
err error
masterURL string
)
// If the user specified --kube-master-url, expand env vars and verify it.
if *argKubeMasterURL != "" {
masterURL, err = expandKubeMasterURL()
if err != nil {
return nil, err
}
}
if masterURL != "" && *argKubecfgFile == "" {
// Only --kube-master-url was provided.
config = &restclient.Config{
Host: masterURL,
ContentConfig: restclient.ContentConfig{GroupVersion: &unversioned.GroupVersion{Version: "v1"}},
}
} else {
// We either have:
// 1) --kube-master-url and --kubecfg-file
// 2) just --kubecfg-file
// 3) neither flag
// In any case, the logic is the same. If (3), this will automatically
// fall back on the service account token.
overrides := &kclientcmd.ConfigOverrides{}
overrides.ClusterInfo.Server = masterURL // might be "", but that is OK
rules := &kclientcmd.ClientConfigLoadingRules{ExplicitPath: *argKubecfgFile} // might be "", but that is OK
if config, err = kclientcmd.NewNonInteractiveDeferredLoadingClientConfig(rules, overrides).ClientConfig(); err != nil {
return nil, err
}
}
glog.Infof("Using %s for kubernetes master", config.Host)
glog.Infof("Using kubernetes API %v", config.GroupVersion)
return kclient.New(config)
}
func watchForServices(kubeClient *kclient.Client, ks *kube2sky) kcache.Store {
serviceStore, serviceController := kframework.NewInformer(
createServiceLW(kubeClient),
&kapi.Service{},
resyncPeriod,
kframework.ResourceEventHandlerFuncs{
AddFunc: ks.newService,
DeleteFunc: ks.removeService,
UpdateFunc: ks.updateService,
},
)
go serviceController.Run(wait.NeverStop)
return serviceStore
}
func watchEndpoints(kubeClient *kclient.Client, ks *kube2sky) kcache.Store {
eStore, eController := kframework.NewInformer(
createEndpointsLW(kubeClient),
&kapi.Endpoints{},
resyncPeriod,
kframework.ResourceEventHandlerFuncs{
AddFunc: ks.handleEndpointAdd,
UpdateFunc: func(oldObj, newObj interface{}) {
// TODO: Avoid unwanted updates.
ks.handleEndpointAdd(newObj)
},
},
)
go eController.Run(wait.NeverStop)
return eStore
}
<|fim▁hole|> eStore, eController := kframework.NewInformer(
createEndpointsPodLW(kubeClient),
&kapi.Pod{},
resyncPeriod,
kframework.ResourceEventHandlerFuncs{
AddFunc: ks.handlePodCreate,
UpdateFunc: func(oldObj, newObj interface{}) {
ks.handlePodUpdate(oldObj, newObj)
},
DeleteFunc: ks.handlePodDelete,
},
)
go eController.Run(wait.NeverStop)
return eStore
}
func getHash(text string) string {
h := fnv.New32a()
h.Write([]byte(text))
return fmt.Sprintf("%x", h.Sum32())
}
func main() {
flag.CommandLine.SetNormalizeFunc(util.WarnWordSepNormalizeFunc)
flag.Parse()
var err error
// TODO: Validate input flags.
domain := *argDomain
if !strings.HasSuffix(domain, ".") {
domain = fmt.Sprintf("%s.", domain)
}
ks := kube2sky{
domain: domain,
etcdMutationTimeout: *argEtcdMutationTimeout,
}
if ks.etcdClient, err = newEtcdClient(*argEtcdServer); err != nil {
glog.Fatalf("Failed to create etcd client - %v", err)
}
kubeClient, err := newKubeClient()
if err != nil {
glog.Fatalf("Failed to create a kubernetes client: %v", err)
}
ks.endpointsStore = watchEndpoints(kubeClient, &ks)
ks.servicesStore = watchForServices(kubeClient, &ks)
ks.podsStore = watchPods(kubeClient, &ks)
select {}
}<|fim▁end|> | func watchPods(kubeClient *kclient.Client, ks *kube2sky) kcache.Store { |
<|file_name|>test_functional.py<|end_file_name|><|fim▁begin|>import os
import json
import six
from ddt import ddt, data, file_data, is_hash_randomized
from nose.tools import assert_equal, assert_is_not_none, assert_raises
@ddt
class Dummy(object):
"""
Dummy class to test the data decorator on
"""
@data(1, 2, 3, 4)
def test_something(self, value):
return value
@ddt
class DummyInvalidIdentifier():
"""
Dummy class to test the data decorator receiving values invalid characters
indentifiers
"""
@data('32v2 g #Gmw845h$W b53wi.')
def test_data_with_invalid_identifier(self, value):
return value
@ddt
class FileDataDummy(object):
"""
Dummy class to test the file_data decorator on
"""
@file_data("test_data_dict.json")
def test_something_again(self, value):
return value
@ddt
class FileDataMissingDummy(object):
"""
Dummy class to test the file_data decorator on when
JSON file is missing
"""
@file_data("test_data_dict_missing.json")
def test_something_again(self, value):
return value
def test_data_decorator():
"""
Test the ``data`` method decorator
"""
def hello():
pass
pre_size = len(hello.__dict__)
keys = set(hello.__dict__.keys())
data_hello = data(1, 2)(hello)
dh_keys = set(data_hello.__dict__.keys())
post_size = len(data_hello.__dict__)
assert_equal(post_size, pre_size + 1)
extra_attrs = dh_keys - keys
assert_equal(len(extra_attrs), 1)
extra_attr = extra_attrs.pop()
assert_equal(getattr(data_hello, extra_attr), (1, 2))
def test_file_data_decorator_with_dict():
"""
Test the ``file_data`` method decorator
"""
def hello():
pass
pre_size = len(hello.__dict__)
keys = set(hello.__dict__.keys())
data_hello = data("test_data_dict.json")(hello)
dh_keys = set(data_hello.__dict__.keys())
post_size = len(data_hello.__dict__)
assert_equal(post_size, pre_size + 1)
extra_attrs = dh_keys - keys
assert_equal(len(extra_attrs), 1)
extra_attr = extra_attrs.pop()
assert_equal(getattr(data_hello, extra_attr), ("test_data_dict.json",))
is_test = lambda x: x.startswith('test_')
def test_ddt():
"""
Test the ``ddt`` class decorator
"""
tests = len(list(filter(is_test, Dummy.__dict__)))
assert_equal(tests, 4)
def test_file_data_test_creation():
"""
Test that the ``file_data`` decorator creates two tests
"""
tests = len(list(filter(is_test, FileDataDummy.__dict__)))
assert_equal(tests, 2)
def test_file_data_test_names_dict():
"""
Test that ``file_data`` creates tests with the correct name
Name is the the function name plus the key in the JSON data,
when it is parsed as a dictionary.
"""
tests = set(filter(is_test, FileDataDummy.__dict__))
tests_dir = os.path.dirname(__file__)
test_data_path = os.path.join(tests_dir, 'test_data_dict.json')
test_data = json.loads(open(test_data_path).read())<|fim▁hole|> ])
assert_equal(tests, created_tests)
def test_feed_data_data():
"""
Test that data is fed to the decorated tests
"""
tests = filter(is_test, Dummy.__dict__)
values = []
obj = Dummy()
for test in tests:
method = getattr(obj, test)
values.append(method())
assert_equal(set(values), set([1, 2, 3, 4]))
def test_feed_data_file_data():
"""
Test that data is fed to the decorated tests from a file
"""
tests = filter(is_test, FileDataDummy.__dict__)
values = []
obj = FileDataDummy()
for test in tests:
method = getattr(obj, test)
values.extend(method())
assert_equal(set(values), set([10, 12, 15, 15, 12, 50]))
def test_feed_data_file_data_missing_json():
"""
Test that a ValueError is raised
"""
tests = filter(is_test, FileDataMissingDummy.__dict__)
obj = FileDataMissingDummy()
for test in tests:
method = getattr(obj, test)
assert_raises(ValueError, method)
def test_ddt_data_name_attribute():
"""
Test the ``__name__`` attribute handling of ``data`` items with ``ddt``
"""
def hello():
pass
class Myint(int):
pass
class Mytest(object):
pass
d1 = Myint(1)
d1.__name__ = 'data1'
d2 = Myint(2)
data_hello = data(d1, d2)(hello)
setattr(Mytest, 'test_hello', data_hello)
ddt_mytest = ddt(Mytest)
assert_is_not_none(getattr(ddt_mytest, 'test_hello_1_data1'))
assert_is_not_none(getattr(ddt_mytest, 'test_hello_2_2'))
def test_ddt_data_unicode():
"""
Test that unicode strings are converted to function names correctly
"""
def hello():
pass
# We test unicode support separately for python 2 and 3
if six.PY2:
@ddt
class Mytest(object):
@data(u'ascii', u'non-ascii-\N{SNOWMAN}', {u'\N{SNOWMAN}': 'data'})
def test_hello(self, val):
pass
assert_is_not_none(getattr(Mytest, 'test_hello_1_ascii'))
assert_is_not_none(getattr(Mytest, 'test_hello_2_non_ascii__u2603'))
if is_hash_randomized():
assert_is_not_none(getattr(Mytest, 'test_hello_3'))
else:
assert_is_not_none(getattr(Mytest,
'test_hello_3__u__u2603____data__'))
elif six.PY3:
@ddt
class Mytest(object):
@data('ascii', 'non-ascii-\N{SNOWMAN}', {'\N{SNOWMAN}': 'data'})
def test_hello(self, val):
pass
assert_is_not_none(getattr(Mytest, 'test_hello_1_ascii'))
assert_is_not_none(getattr(Mytest, 'test_hello_2_non_ascii__'))
if is_hash_randomized():
assert_is_not_none(getattr(Mytest, 'test_hello_3'))
else:
assert_is_not_none(getattr(Mytest, 'test_hello_3________data__'))
def test_feed_data_with_invalid_identifier():
"""
Test that data is fed to the decorated tests
"""
tests = list(filter(is_test, DummyInvalidIdentifier.__dict__))
assert_equal(len(tests), 1)
obj = DummyInvalidIdentifier()
method = getattr(obj, tests[0])
assert_equal(
method.__name__,
'test_data_with_invalid_identifier_1_32v2_g__Gmw845h_W_b53wi_'
)
assert_equal(method(), '32v2 g #Gmw845h$W b53wi.')<|fim▁end|> | created_tests = set([
"test_something_again_{0}_{1}".format(index + 1, name)
for index, name in enumerate(test_data.keys()) |
<|file_name|>GradeBookEntryMeta.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2012-2013 inBloom, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.test.edfi.entities.meta;
import java.util.List;
public class GradeBookEntryMeta {
String id;
List<String> learningObjectiveIds;
GradingPeriodMeta gradingPeriod;
SectionMeta section;
String gradebookEntryType;
String dateAssigned;<|fim▁hole|>
public void setLearningObjectiveIds(List<String> learningObjectiveIds) {
this.learningObjectiveIds = learningObjectiveIds;
}
public List<String> getLearningObjectiveIds() {
return learningObjectiveIds;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public GradingPeriodMeta getGradingPeriod() {
return gradingPeriod;
}
public void setGradingPeriod(GradingPeriodMeta gradingPeriod) {
this.gradingPeriod = gradingPeriod;
}
public SectionMeta getSection() {
return section;
}
public void setSection(SectionMeta section) {
this.section = section;
}
public String getGradebookEntryType() {
return gradebookEntryType;
}
public void setGradebookEntryType(String gradebookEntryType) {
this.gradebookEntryType = gradebookEntryType;
}
public String getDateAssigned() {
return dateAssigned;
}
public void setDateAssigned(String dateAssigned) {
this.dateAssigned = dateAssigned;
}
}<|fim▁end|> | |
<|file_name|>next.rs<|end_file_name|><|fim▁begin|>#![feature(core, unboxed_closures)]
extern crate core;
#[cfg(test)]
mod tests {
use core::iter::Iterator;
use core::iter::FlatMap;
struct A<T> {
begin: T,
end: T
}
macro_rules! Iterator_impl {
($T:ty) => {
impl Iterator for A<$T> {
type Item = $T;
fn next(&mut self) -> Option<Self::Item> {
if self.begin < self.end {
let result = self.begin;
self.begin = self.begin.wrapping_add(1);
Some::<Self::Item>(result)
} else {
None::<Self::Item>
}
}
// fn flat_map<U, F>(self, f: F) -> FlatMap<Self, U, F>
// where Self: Sized, U: IntoIterator, F: FnMut(Self::Item) -> U,
// {
// FlatMap{iter: self, f: f, frontiter: None, backiter: None }
// }
// fn by_ref(&mut self) -> &mut Self where Self: Sized { self }
}
}
}
type T = u32;
Iterator_impl!(T);
// impl<I: Iterator> IntoIterator for I {
// type Item = I::Item;
// type IntoIter = I;
//
// fn into_iter(self) -> I {
// self
// }
// }
// impl<I: Iterator, U: IntoIterator, F> Iterator for FlatMap<I, U, F>
// where F: FnMut(I::Item) -> U,
// {
// type Item = U::Item;
//
// #[inline]
// fn next(&mut self) -> Option<U::Item> {
// loop {
// if let Some(ref mut inner) = self.frontiter {
// if let Some(x) = inner.by_ref().next() {
// return Some(x)
// }
// }
// match self.iter.next().map(|x| (self.f)(x)) {
// None => return self.backiter.as_mut().and_then(|it| it.next()),
// next => self.frontiter = next.map(IntoIterator::into_iter),
// }
// }
// }
//
// #[inline]
// fn size_hint(&self) -> (usize, Option<usize>) {
// let (flo, fhi) = self.frontiter.as_ref().map_or((0, Some(0)), |it| it.size_hint());
// let (blo, bhi) = self.backiter.as_ref().map_or((0, Some(0)), |it| it.size_hint());
// let lo = flo.saturating_add(blo);
// match (self.iter.size_hint(), fhi, bhi) {
// ((0, Some(0)), Some(a), Some(b)) => (lo, a.checked_add(b)),
// _ => (lo, None)
// }
// }
// }
struct F;
type Item = T;
type U = A<T>;
type Args = (Item,);
impl FnOnce<Args> for F {
type Output = U;
extern "rust-call" fn call_once(self, (item,): Args) -> Self::Output {
A { begin: 0, end: item }
}
}
impl FnMut<Args> for F {
extern "rust-call" fn call_mut(&mut self, (item,): Args) -> Self::Output {
A { begin: 0, end: item }
}
}
#[test]
fn next_test1() {
let a: A<T> = A { begin: 0, end: 10 };
let f: F = F;
let mut flat_map: FlatMap<A<T>, U, F> = a.flat_map::<U, F>(f);
for n in 0 .. 10 {
for i in 0..n {
let x: Option<<U as IntoIterator>::Item> = flat_map.next();
match x {
Some(v) => { assert_eq!(v, i); }
None => { assert!(false); }
}
}
}
<|fim▁hole|><|fim▁end|> | assert_eq!(flat_map.next(), None::<Item>);
}
} |
<|file_name|>view_utils.py<|end_file_name|><|fim▁begin|>class AjaxTemplateMixin(object):
ajax_template_name = ''
template_name = ''
def dispatch(self, request, *args, **kwargs):
if not hasattr(self, 'ajax_template_name'):
split = self.template_name.split('.html')
split[-1] = '_inner'
split.append('.html')
self.ajax_template_name = ''.join(split)
if request.is_ajax():
self.template_name = self.ajax_template_name<|fim▁hole|> return super(AjaxTemplateMixin, self).dispatch(request, *args, **kwargs)<|fim▁end|> | |
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>use std::cell::RefCell;
use std::collections::{HashMap, VecDeque};
use std::ops::Deref;
use std::rc::Rc;
use crate::ast::expressions;
use crate::interpreter::environment;
use crate::utils;
pub enum Type {
Nil,
Boolean(bool),<|fim▁hole|> Reference(Rc<RefCell<Type>>),
Vector(VecDeque<Type>),
Table {
/// For comparison
id: u64,
map: HashMap<Type, Rc<RefCell<Type>>>,
metatable: HashMap<String, Type>,
border: usize,
},
Function {
/// For comparison
id: u64,
parameters: Vec<String>,
varargs: bool,
body: Rc<Box<dyn expressions::Expression>>,
// XXX: Capture only vars function needs?
env: utils::Shared<environment::Environment>,
},
}
impl Type {
pub fn call(&self, _arguments: Vec<&Type>) -> Type {
unimplemented!();
}
pub fn as_bool(&self) -> bool {
match self {
Type::Nil => false,
Type::Boolean(false) => false,
_ => true,
}
}
/// Check if type is nil. We often have special cases for nils
pub fn is_nil(&self) -> bool {
match self {
Type::Nil => true,
Type::Reference(typeref) => typeref.borrow().is_nil(),
_ => false,
}
}
/// Create reference to an object or clone reference
pub fn into_reference(self) -> Rc<RefCell<Self>> {
match self {
Type::Reference(typeref) => typeref,
_ => Rc::new(RefCell::new(self)),
}
}
}
#[cfg(test)]
impl ::std::cmp::PartialEq<&'static str> for Type {
fn eq(&self, other: &&'static str) -> bool {
format!("{:?}", self) == *other
}
}
impl ::std::cmp::PartialEq for Type {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Type::Boolean(left), Type::Boolean(right)) => left == right,
(Type::Number(left), Type::Number(right)) => left == right,
(Type::String(left), Type::String(right)) => left == right,
(Type::Reference(left), right) => right.eq(left.borrow().deref()),
(left, Type::Reference(right)) => left.eq(right.borrow().deref()),
(Type::Vector(left), Type::Vector(right)) => left == right,
(Type::Table { id: left, .. }, Type::Table { id: right, .. }) => left == right,
(Type::Function { id: left, .. }, Type::Function { id: right, .. }) => left == right,
_ => false,
}
}
}
impl ::std::cmp::Eq for Type {}
impl ::std::hash::Hash for Type {
fn hash<H: ::std::hash::Hasher>(&self, state: &mut H) {
match self {
Type::Nil => 1.hash(state),
Type::Boolean(value) => value.hash(state),
Type::Number(value) => value.to_string().hash(state),
Type::String(value) => value.hash(state),
Type::Reference(value) => value.borrow().hash(state),
Type::Vector(vec) => vec.hash(state),
Type::Table { id, .. } => id.hash(state),
Type::Function { id, .. } => id.hash(state),
}
}
}
/// Display to correctly infor user about runtime errors
impl ::std::fmt::Display for Type {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match self {
Type::Function { id, .. } => write!(f, "function ({:x})", id),
Type::Table { id, .. } => write!(f, "table ({:x})", id),
Type::Reference(value) => value.borrow().fmt(f),
_ => write!(f, "{:?}", self),
}
}
}
/// Debug, which breaks closured env circular dependency
impl ::std::fmt::Debug for Type {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match self {
Type::Nil => write!(f, "Nil"),
Type::Boolean(value) => write!(f, "Boolean({:?})", value),
Type::Number(value) => write!(f, "Number({:?})", value),
Type::String(value) => write!(f, "String({:?})", value),
Type::Reference(value) => write!(f, "Reference({:?})", value),
Type::Vector(vec) => write!(f, "Vector({:?})", vec),
Type::Table {
id,
map,
metatable,
border,
} => write!(
f,
"Table {{ id: {}, map: {:?}, metatable: {:?}, border: {} }}",
id, map, metatable, border
),
Type::Function {
id,
parameters,
varargs,
body,
env,
} => write!(
f,
"Function {{ id: {:?}, parameters: {:?}, varargs: {:?}, body: {:?}, env: {:?} }}",
id,
parameters,
varargs,
body,
env.borrow().id()
),
}
}
}
/// Macro to use for pattern maching types with respect to type referencing
#[macro_export]
macro_rules! match_type {
(($($typ:expr),+), $($pat:pat => $result:expr),+) => {{
let typs = ($(if let $crate::interpreter::types::Type::Reference(value) = $typ { unsafe { &*value.as_ptr() } } else { $typ }),+);
match typs {$(
$pat => $result
), +}}
};
($typ:expr, $($pat:pat => $result:expr),+) => {{
let typ = if let $crate::interpreter::types::Type::Reference(value) = $typ { unsafe { &*value.as_ptr() } } else { $typ };
#[allow(clippy::single_match)]
match typ {$(
$pat => $result
), +}}
};
}
impl ::std::convert::AsRef<bool> for Type {
fn as_ref(&self) -> &bool {
match_type!(&self,
Type::Boolean(val) => val,
_ => panic!("Cannot convert lua value {} to a boolean", self)
)
}
}
impl ::std::convert::AsRef<f64> for Type {
fn as_ref(&self) -> &f64 {
match_type!(&self,
Type::Number(val) => val,
_ => panic!("Cannot convert lua value {} to a number", self)
)
}
}
impl ::std::convert::AsRef<String> for Type {
fn as_ref(&self) -> &String {
match_type!(&self,
Type::String(val) => val,
_ => panic!("Cannot convert lua value {} to a string", self)
)
}
}
impl ::std::convert::AsRef<VecDeque<Type>> for Type {
fn as_ref(&self) -> &VecDeque<Type> {
match_type!(&self,
Type::Vector(val) => val,
_ => panic!("Cannot convert lua value {} to a deque", self)
)
}
}
impl ::std::convert::AsRef<HashMap<Type, Rc<RefCell<Type>>>> for Type {
fn as_ref(&self) -> &HashMap<Type, Rc<RefCell<Type>>> {
match_type!(&self,
Type::Table { map, .. } => map,
_ => panic!("Cannot convert lua value {} to a hashmap", self)
)
}
}<|fim▁end|> | Number(f64),
String(String),
/// Reference to an existing value |
<|file_name|>framework.ts<|end_file_name|><|fim▁begin|>import { templates } from "./templates";
import { tools } from "./tools";
import { DB, STORE, DBVERSION, OptionTypes, m } from "./schema";
import { store } from "./storage";
import { controls } from "./controls";
import { routing } from "./routing";
import { web } from "./web";
import { lists } from "./lists";
import { forms } from "./forms";
declare const Awesomplete;
export function onready(callback: Function, appName?: string) {
document.addEventListener("DOMContentLoaded", function (e) {
templates.master.loadMaster(document.documentElement.outerHTML).then(() => {
let proms = [];
document.querySelectorAll("[data-m-include]").each((idx: number, elem: Element) => {
let prom = templates.load(elem.attribute("data-m-include")).then(result => {
if (elem.attribute("data-m-type") != null && elem.attribute("data-m-type") == "markdown") {
(<HTMLElement>elem).innerHTML = templates.markdown.toHTML(result);
(<HTMLElement>elem).show();
}
else {
(<HTMLElement>elem).innerHTML = result;
}
});
proms.push(prom);
});
Promise.all(proms).then(() => {
document.querySelectorAll("[data-m-type='markdown']").each((idx: number, elem: Element) => {
if (elem.attribute("data-m-include") == null) {
(<HTMLElement>elem).innerHTML = templates.markdown.toHTML((<HTMLElement>elem).innerHTML);
(<HTMLElement>elem).show();
}
});
let root: string = routing.getApplicationRoot(document.documentElement.outerHTML);
appName = (appName != null) ? appName : routing.getApplicationName(document.documentElement.outerHTML);
let iDB = (appName == null) ? DB : `${DB}.${appName.lower()}`;
let iDBStore = (appName == null) ? STORE : `${STORE}.${appName.lower()}`;
let storage = new store(iDB, DBVERSION, iDBStore);
storage.init().then((result) => {
return storage.getItem("metron.config", "value");
}).then((result) => {
if (result != null) {
m.config = JSON.parse(<string><any>result);
m.globals.firstLoad = true;
if (callback != null) {
callback(e);
}
}
else {
new Promise((resolve, reject) => {
web.loadJSON(`${root}/metron.json`, (configData: JSON) => {
for (let obj in configData) {
if (m.config[obj] == null) {
m.config[obj] = configData[obj];
}
}
m.config["config.baseURL"] = `${document.location.protocol}//${document.location.host}`;
storage.init().then((result) => {
return storage.setItem("metron.config", JSON.stringify(m.config));
}).then((result) => {
resolve(configData);
}).catch((rs) => {
console.log(`Error: Failed to access storage. ${rs}`);
});
});
}).then(() => {
m.globals.firstLoad = true;
if (callback != null) {
callback(e);
}
}).catch((rsn) => {
console.log(`Error: Promise execution failed! ${rsn}`);
});
}
}).catch((reason) => {
console.log(`Error: Failed to access storage. ${reason}`);
});
});
}).catch(() => {
console.log("Failed to check for master page.");
});
});
}
export function load(re: RegExp, func: Function | { n: string, func: Function }): void {
let n, f;
if(typeof func == "object") {
n = func.n;
f = func.func;
}
else {
f = func;
}
let h = () => {
if(n !== undefined) {
let p = document.querySelector("[data-m-type='pivot']");
if(p !== undefined) {
controls.getPivot(p.attribute("data-m-page")).exact(n);
}
}
f();
};
routing.add(re, h);
}
export function ifQuerystring(callback: Function): void {
let qs: string = <string><any>web.querystring();
if (qs != "") {
let parameters = tools.formatOptions(qs, OptionTypes.QUERYSTRING);
if (callback != null) {
callback(parameters);
}
}
}
export function loadOptionalFunctionality(): void {
if (typeof Awesomplete !== undefined) {
document.querySelectorAll("[data-m-autocomplete]").each((idx: number, elem: Element) => {
let endpoint = elem.attribute("data-m-autocomplete");
let url: string = (endpoint.toLowerCase().startsWith("http")) ? endpoint : routing.getAPIURL(endpoint);
let awesome = new Awesomplete(elem, {
minChars: 1,
sort: false,
maxItems: 15,
replace: function (item) {
if (elem.attribute("data-m-search-hidden-store") != null && elem.attribute("data-m-search-hidden-store") != '') {
this.input.value = item.label;
(<HTMLInputElement>document.querySelector(`#${elem.attribute("data-m-search-hidden-store")}`)).val(item.value);
(<HTMLInputElement>document.querySelector(`#${elem.attribute("data-m-search-hidden-store")}`)).dispatchEvent(new Event("change"));
} else {
this.input.value = item.value;
}
}
});
elem.removeEvent("keyup").addEvent("keyup", function (e) {
web.get(`${url}${web.querystringify({ IsActive: true, Search: this.val() })}`, null, null, "json", (result) => {
let list = [];
if (result != null) {
for (var a in result) {
if (result.hasOwnProperty(a)) {
if (result[a][elem.attribute("data-m-search-text")] != null) {
var item = { label: result[a][elem.attribute("data-m-search-text")], value: result[a][elem.attribute("data-m-search-value")] };
list.push(item);
}
}
}
awesome.list = list;
}
});
});
});
}
}
window.onhashchange = function () {
if (!m.globals.hashLoadedFromApplication) {
let hasPivoted = false;
let section = document.querySelector("[data-m-type='pivot']");
if (section != null) {
let page = section.attribute("data-m-page");
if (page != null) {
let p = controls.getPivot(page);
p.previous();
hasPivoted = true;
}
}
if (!hasPivoted) {
window.location.reload(false);<|fim▁hole|> }
}
m.globals.hashLoadedFromApplication = false;
}
onready((e: Event) => {
function recursePivot(elem: Element): void {
if (elem != null) {
elem.show();
let route = elem.attribute("data-m-page");
let pivot = elem.up("[data-m-type='pivot']");
let pivotPageName = pivot.attribute("data-m-page");
elem.up("[data-m-type='pivot']").querySelectorAll("[data-m-segment='pivot-item']").each((idx: number, el: Element) => {
if(el.up("[data-m-type='pivot']").attribute("data-m-page") === pivotPageName) {
if (el.attribute("data-m-page") != route) {
el.hide();
}
}
});
let parent = elem.parent().up("[data-m-segment='pivot-item']");
if(parent != null) {
recursePivot(parent);
}
}
}
let wantsAutoload: boolean = ((document.querySelector("[data-m-autoload]") != null) && (document.querySelector("[data-m-autoload]").attribute("data-m-autoload") == "true"));
document.querySelectorAll("[data-m-state='hide']").each((idx: number, elem: Element) => {
elem.hide();
});
controls.pivots.bindAll(() => {
let route = routing.getRouteName();
if (route != null) {
let page = document.querySelector(`[data-m-segment='pivot-item'][data-m-page="${route}"]`);
recursePivot(page);
}
loadOptionalFunctionality();
if (wantsAutoload) {
lists.bindAll(() => {
forms.bindAll(() => {
controls.polyfill();
});
});
}
});
});<|fim▁end|> | |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: BSD-3-Clause
# https://github.com/AcademySoftwareFoundation/OpenShadingLanguage
command = testshade("-g 128 128 -od uint8 -o Cout out.tif test")<|fim▁hole|><|fim▁end|> | outputs = [ "out.txt", "out.tif" ] |
<|file_name|>ParamRoute.java<|end_file_name|><|fim▁begin|>package org.giwi.geotracker.routes.priv;
import io.vertx.core.Vertx;
import io.vertx.ext.web.Router;
import io.vertx.ext.web.RoutingContext;
import org.giwi.geotracker.annotation.VertxRoute;
import org.giwi.geotracker.beans.AuthUtils;
import org.giwi.geotracker.exception.BusinessException;
import org.giwi.geotracker.services.ParamService;
import javax.inject.Inject;
/**
* The type Param route.
*/
@VertxRoute(rootPath = "/api/1/private/param")
public class ParamRoute implements VertxRoute.Route {
@Inject
private ParamService paramService;
@Inject
private AuthUtils authUtils;
/**
* Init router.
*
* @param vertx the vertx
* @return the router
*/
@Override
public Router init(Vertx vertx) {
Router router = Router.router(vertx);
router.get("/roles").handler(this::getRoles);
return router;
}
/**
* @api {get} /api/1/private/param/roles Get roles
* @apiName getRoles
* @apiGroup Params
* @apiDescription Get roles
* @apiHeader {String} secureToken User secureToken
* @apiSuccess {Array} roles Role[]
*/
private void getRoles(RoutingContext ctx) {
paramService.getRoles(res -> {<|fim▁hole|> if (res.succeeded()) {
ctx.response().end(res.result().encode());
} else {
ctx.fail(new BusinessException(res.cause()));
}
});
}
}<|fim▁end|> | |
<|file_name|>group.py<|end_file_name|><|fim▁begin|>"""Describe group states."""
from homeassistant.components.group import GroupIntegrationRegistry
from homeassistant.const import STATE_OK, STATE_PROBLEM
from homeassistant.core import HomeAssistant, callback
@callback
def async_describe_on_off_states(
hass: HomeAssistant, registry: GroupIntegrationRegistry
) -> None:<|fim▁hole|><|fim▁end|> | """Describe group on off states."""
registry.on_off_states({STATE_PROBLEM}, STATE_OK) |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for beeper 1.1
// Project: https://github.com/sindresorhus/beeper#readme
// Definitions by: BendingBender <https://github.com/BendingBender>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
<|fim▁hole|>declare function beeper(melody: string, cb?: () => void): void;<|fim▁end|> | export = beeper;
declare function beeper(count?: number, cb?: () => void): void; |
<|file_name|>ember-validation-error-list.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export { default } from 'ember-validation/components/ember-validation-error-list'; |
<|file_name|>column.test.cpp<|end_file_name|><|fim▁begin|>#include <string>
#include <bandit/bandit.h>
#include "db.test.h"
#include "record.h"
#include "sqlite/column.h"
using namespace bandit;
using namespace std;
using namespace coda::db;
using namespace snowhouse;
column get_user_column(const string &name)
{
select_query q(test::current_session);
q.from("users");
auto rs = q.execute();
if (!rs.is_valid()) {
throw database_exception("no rows in test");
}
auto row = rs.begin();
if (row == rs.end() || !row->is_valid()) {
throw database_exception("no rows in test");
}
return row->column(name);
}
specification(columns, []() {
describe("column", []() {
before_each([]() { test::setup_current_session(); });
after_each([]() { test::teardown_current_session(); });
before_each([]() {
test::user u;
u.set("first_name", "Bob");
u.set("last_name", "Jenkins");<|fim▁hole|>
*value = 4;
sql_blob data(value, sizeof(int));
delete value;
u.set("data", data);
u.set("tval", sql_time());
u.save();
});
it("is copyable", []() {
auto col = get_user_column("first_name");
column other(col);
AssertThat(other.is_valid(), IsTrue());
AssertThat(other.value(), Equals(col.value()));
});
it("is movable", []() {
auto col = get_user_column("first_name");
auto val = col.value();
column &&other(std::move(col));
AssertThat(other.is_valid(), IsTrue());
AssertThat(other.value(), Equals(val));
column &&last = get_user_column("last_name");
last = std::move(other);
AssertThat(other.is_valid(), IsFalse());
AssertThat(last.is_valid(), IsTrue());
AssertThat(last.value(), Equals(val));
});
it("can be a blob", []() {
auto col = get_user_column("data");
AssertThat(col.value().is<sql_blob>(), IsTrue());
auto blob = col.value().as<sql_blob>();
AssertThat(blob.size(), Equals(sizeof(int)));
int* p = static_cast<int*>(blob.get());
AssertThat(*p, Equals(4));
});
it("can be a time", []() {
auto col = get_user_column("tval");
AssertThat(col.value().as<sql_time>().value() > 0, IsTrue());
});
it("can be a double", []() {
auto col = get_user_column("dval");
AssertThat(col.value().as<double>(), Equals(123.321));
double val = col;
AssertThat(val, Equals(123.321));
});
it("can be a float", []() {
auto col = get_user_column("dval");
AssertThat(col.value().as<float>(), Equals(123.321f));
float val = col;
AssertThat(val, Equals(123.321f));
});
it("can be an int64", []() {
auto col = get_user_column("id");
AssertThat(col.value().as<long long>() > 0, IsTrue());
long long val = col;
AssertThat(val > 0, IsTrue());
});
it("can be an unsigned int", []() {
auto col = get_user_column("id");
AssertThat(col.value().as<unsigned int>() > 0, IsTrue());
unsigned val = col;
AssertThat(val > 0, IsTrue());
unsigned long long val2 = col;
AssertThat(val2 > 0, IsTrue());
});
it("can be a string", []() {
auto col = get_user_column("first_name");
AssertThat(col.value(), Equals("Bob"));
std::string val = col;
AssertThat(val, Equals("Bob"));
});
});
});<|fim▁end|> | u.set("dval", 123.321);
int *value = new int; |
<|file_name|>test_orm.py<|end_file_name|><|fim▁begin|>from collections import defaultdict
from openerp.tools import mute_logger
from openerp.tests import common
UID = common.ADMIN_USER_ID
class TestORM(common.TransactionCase):
""" test special behaviors of ORM CRUD functions
TODO: use real Exceptions types instead of Exception """
def setUp(self):
super(TestORM, self).setUp()
cr, uid = self.cr, self.uid
self.partner = self.registry('res.partner')
self.users = self.registry('res.users')
self.p1 = self.partner.name_create(cr, uid, 'W')[0]
self.p2 = self.partner.name_create(cr, uid, 'Y')[0]
self.ir_rule = self.registry('ir.rule')
# sample unprivileged user
employee_gid = self.ref('base.group_user')
self.uid2 = self.users.create(cr, uid, {'name': 'test user', 'login': 'test', 'groups_id': [4,employee_gid]})
@mute_logger('openerp.models')
def testAccessDeletedRecords(self):
""" Verify that accessing deleted records works as expected """
cr, uid, uid2, p1, p2 = self.cr, self.uid, self.uid2, self.p1, self.p2
self.partner.unlink(cr, uid, [p1])
# read() is expected to skip deleted records because our API is not
# transactional for a sequence of search()->read() performed from the
# client-side... a concurrent deletion could therefore cause spurious
# exceptions even when simply opening a list view!
# /!\ Using unprileged user to detect former side effects of ir.rules!
self.assertEqual([{'id': p2, 'name': 'Y'}], self.partner.read(cr, uid2, [p1,p2], ['name']), "read() should skip deleted records")
self.assertEqual([], self.partner.read(cr, uid2, [p1], ['name']), "read() should skip deleted records")
# Deleting an already deleted record should be simply ignored
self.assertTrue(self.partner.unlink(cr, uid, [p1]), "Re-deleting should be a no-op")
# Updating an already deleted record should raise, even as admin
with self.assertRaises(Exception):
self.partner.write(cr, uid, [p1], {'name': 'foo'})
@mute_logger('openerp.models')
def testAccessFilteredRecords(self):
""" Verify that accessing filtered records works as expected for non-admin user """
cr, uid, uid2, p1, p2 = self.cr, self.uid, self.uid2, self.p1, self.p2
partner_model = self.registry('ir.model').search(cr, uid, [('model','=','res.partner')])[0]
self.ir_rule.create(cr, uid, {'name': 'Y is invisible',
'domain_force': [('id', '!=', p1)],
'model_id': partner_model})
# search as unprivileged user
partners = self.partner.search(cr, uid2, [])
self.assertFalse(p1 in partners, "W should not be visible...")
self.assertTrue(p2 in partners, "... but Y should be visible")
# read as unprivileged user
with self.assertRaises(Exception):
self.partner.read(cr, uid2, [p1], ['name'])
# write as unprivileged user
with self.assertRaises(Exception):
self.partner.write(cr, uid2, [p1], {'name': 'foo'})
# unlink as unprivileged user
with self.assertRaises(Exception):
self.partner.unlink(cr, uid2, [p1])
# Prepare mixed case
self.partner.unlink(cr, uid, [p2])
# read mixed records: some deleted and some filtered
with self.assertRaises(Exception):
self.partner.read(cr, uid2, [p1,p2], ['name'])
# delete mixed records: some deleted and some filtered
with self.assertRaises(Exception):
self.partner.unlink(cr, uid2, [p1,p2])
def test_multi_read(self):
record_id = self.partner.create(self.cr, UID, {'name': 'MyPartner1'})
records = self.partner.read(self.cr, UID, [record_id])
self.assertIsInstance(records, list)
def test_one_read(self):
record_id = self.partner.create(self.cr, UID, {'name': 'MyPartner1'})
record = self.partner.read(self.cr, UID, record_id)
self.assertIsInstance(record, dict)
@mute_logger('openerp.models')
def test_search_read(self):
# simple search_read
self.partner.create(self.cr, UID, {'name': 'MyPartner1'})
found = self.partner.search_read(self.cr, UID, [['name', '=', 'MyPartner1']], ['name'])
self.assertEqual(len(found), 1)
self.assertEqual(found[0]['name'], 'MyPartner1')
self.assertTrue('id' in found[0])
# search_read correct order
self.partner.create(self.cr, UID, {'name': 'MyPartner2'})
found = self.partner.search_read(self.cr, UID, [['name', 'like', 'MyPartner']], ['name'], order="name")
self.assertEqual(len(found), 2)
self.assertEqual(found[0]['name'], 'MyPartner1')
self.assertEqual(found[1]['name'], 'MyPartner2')
found = self.partner.search_read(self.cr, UID, [['name', 'like', 'MyPartner']], ['name'], order="name desc")
self.assertEqual(len(found), 2)
self.assertEqual(found[0]['name'], 'MyPartner2')
self.assertEqual(found[1]['name'], 'MyPartner1')
# search_read that finds nothing
found = self.partner.search_read(self.cr, UID, [['name', '=', 'Does not exists']], ['name'])
self.assertEqual(len(found), 0)
def test_exists(self):
partner = self.partner.browse(self.cr, UID, [])
# check that records obtained from search exist
recs = partner.search([])
self.assertTrue(recs)
self.assertEqual(recs.exists(), recs)
# check that there is no record with id 0
recs = partner.browse([0])
self.assertFalse(recs.exists())
def test_groupby_date(self):
partners = dict(
A='2012-11-19',
B='2012-12-17',
C='2012-12-31',
D='2013-01-07',
E='2013-01-14',
F='2013-01-28',
G='2013-02-11',
)
all_partners = []
partners_by_day = defaultdict(set)
partners_by_month = defaultdict(set)
partners_by_year = defaultdict(set)
for name, date in partners.items():
p = self.partner.create(self.cr, UID, dict(name=name, date=date))
all_partners.append(p)
partners_by_day[date].add(p)
partners_by_month[date.rsplit('-', 1)[0]].add(p)
partners_by_year[date.split('-', 1)[0]].add(p)
def read_group(interval, domain=None):
main_domain = [('id', 'in', all_partners)]
if domain:
domain = ['&'] + main_domain + domain
else:
domain = main_domain
rg = self.partner.read_group(self.cr, self.uid, domain, ['date'], 'date' + ':' + interval)
result = {}
for r in rg:
result[r['date:' + interval]] = set(self.partner.search(self.cr, self.uid, r['__domain']))
return result
self.assertEqual(len(read_group('day')), len(partners_by_day))
self.assertEqual(len(read_group('month')), len(partners_by_month))
self.assertEqual(len(read_group('year')), len(partners_by_year))
rg = self.partner.read_group(self.cr, self.uid, [('id', 'in', all_partners)],
['date'], ['date:month', 'date:day'], lazy=False)
self.assertEqual(len(rg), len(all_partners))
def test_write_duplicate(self):
cr, uid, p1 = self.cr, self.uid, self.p1
self.partner.write(cr, uid, [p1, p1], {'name': 'X'})
def test_m2m_store_trigger(self):
group_user = self.env.ref('base.group_user')
user = self.env['res.users'].create({
'name': 'test',
'login': 'test_m2m_store_trigger',
'groups_id': [(6, 0, [])],
})
self.assertTrue(user.share)
group_user.write({'users': [(4, user.id)]})
self.assertFalse(user.share)
group_user.write({'users': [(3, user.id)]})
self.assertTrue(user.share)
class TestInherits(common.TransactionCase):
""" test the behavior of the orm for models that use _inherits;
specifically: res.users, that inherits from res.partner
"""
def setUp(self):
super(TestInherits, self).setUp()
self.partner = self.registry('res.partner')
self.user = self.registry('res.users')
def test_default(self):
""" `default_get` cannot return a dictionary or a new id """
defaults = self.user.default_get(self.cr, UID, ['partner_id'])
if 'partner_id' in defaults:
self.assertIsInstance(defaults['partner_id'], (bool, int, long))
def test_create(self):
""" creating a user should automatically create a new partner """
partners_before = self.partner.search(self.cr, UID, [])
foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'})
foo = self.user.browse(self.cr, UID, foo_id)
self.assertNotIn(foo.partner_id.id, partners_before)
def test_create_with_ancestor(self):
""" creating a user with a specific 'partner_id' should not create a new partner """
par_id = self.partner.create(self.cr, UID, {'name': 'Foo'})
partners_before = self.partner.search(self.cr, UID, [])
foo_id = self.user.create(self.cr, UID, {'partner_id': par_id, 'login': 'foo', 'password': 'foo'})
partners_after = self.partner.search(self.cr, UID, [])
self.assertEqual(set(partners_before), set(partners_after))
foo = self.user.browse(self.cr, UID, foo_id)
self.assertEqual(foo.name, 'Foo')
self.assertEqual(foo.partner_id.id, par_id)
@mute_logger('openerp.models')
def test_read(self):
""" inherited fields should be read without any indirection """
foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo'})
foo_values, = self.user.read(self.cr, UID, [foo_id])
partner_id = foo_values['partner_id'][0]
partner_values, = self.partner.read(self.cr, UID, [partner_id])
self.assertEqual(foo_values['name'], partner_values['name'])
foo = self.user.browse(self.cr, UID, foo_id)
self.assertEqual(foo.name, foo.partner_id.name)
@mute_logger('openerp.models')
def test_copy(self):
""" copying a user should automatically copy its partner, too """
foo_id = self.user.create(self.cr, UID, {
'name': 'Foo',
'login': 'foo',
'password': 'foo',
'supplier': True,
})
foo_before, = self.user.read(self.cr, UID, [foo_id])
del foo_before['__last_update']
bar_id = self.user.copy(self.cr, UID, foo_id, {
'login': 'bar',
'password': 'bar',
})
foo_after, = self.user.read(self.cr, UID, [foo_id])
del foo_after['__last_update']
self.assertEqual(foo_before, foo_after)
foo, bar = self.user.browse(self.cr, UID, [foo_id, bar_id])
self.assertEqual(bar.name, 'Foo (copy)')
self.assertEqual(bar.login, 'bar')
self.assertEqual(foo.supplier, bar.supplier)
self.assertNotEqual(foo.id, bar.id)
self.assertNotEqual(foo.partner_id.id, bar.partner_id.id)
@mute_logger('openerp.models')
def test_copy_with_ancestor(self):
""" copying a user with 'parent_id' in defaults should not duplicate the partner """
foo_id = self.user.create(self.cr, UID, {'name': 'Foo', 'login': 'foo', 'password': 'foo',
'login_date': '2016-01-01', 'signature': 'XXX'})
par_id = self.partner.create(self.cr, UID, {'name': 'Bar'})
foo_before, = self.user.read(self.cr, UID, [foo_id])
del foo_before['__last_update']
del foo_before['login_date']
partners_before = self.partner.search(self.cr, UID, [])
bar_id = self.user.copy(self.cr, UID, foo_id, {'partner_id': par_id, 'login': 'bar'})
foo_after, = self.user.read(self.cr, UID, [foo_id])
del foo_after['__last_update']
del foo_after['login_date']
partners_after = self.partner.search(self.cr, UID, [])
self.assertEqual(foo_before, foo_after)
self.assertEqual(set(partners_before), set(partners_after))
foo, bar = self.user.browse(self.cr, UID, [foo_id, bar_id])
self.assertNotEqual(foo.id, bar.id)
self.assertEqual(bar.partner_id.id, par_id)
self.assertEqual(bar.login, 'bar', "login is given from copy parameters")
self.assertFalse(bar.password, "password should not be copied from original record")
self.assertEqual(bar.name, 'Bar', "name is given from specific partner")
self.assertEqual(bar.signature, foo.signature, "signature should be copied")
CREATE = lambda values: (0, False, values)
UPDATE = lambda id, values: (1, id, values)
DELETE = lambda id: (2, id, False)
FORGET = lambda id: (3, id, False)
LINK_TO = lambda id: (4, id, False)
DELETE_ALL = lambda: (5, False, False)
REPLACE_WITH = lambda ids: (6, False, ids)
def sorted_by_id(list_of_dicts):
"sort dictionaries by their 'id' field; useful for comparisons"
return sorted(list_of_dicts, key=lambda d: d.get('id'))
class TestO2MSerialization(common.TransactionCase):
""" test the orm method 'write' on one2many fields """
def setUp(self):
super(TestO2MSerialization, self).setUp()
self.partner = self.registry('res.partner')
def test_no_command(self):
" empty list of commands yields an empty list of records "
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', [])
self.assertEqual(results, [])
def test_CREATE_commands(self):
" returns the VALUES dict as-is "
values = [{'foo': 'bar'}, {'foo': 'baz'}, {'foo': 'baq'}]
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', map(CREATE, values))
self.assertEqual(results, values)
def test_LINK_TO_command(self):
" reads the records from the database, records are returned with their ids. "
ids = [
self.partner.create(self.cr, UID, {'name': 'foo'}),
self.partner.create(self.cr, UID, {'name': 'bar'}),
self.partner.create(self.cr, UID, {'name': 'baz'})
]
commands = map(LINK_TO, ids)
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', commands, ['name'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'id': ids[0], 'name': 'foo'},
{'id': ids[1], 'name': 'bar'},
{'id': ids[2], 'name': 'baz'}
]))
def test_bare_ids_command(self):
" same as the equivalent LINK_TO commands "
ids = [
self.partner.create(self.cr, UID, {'name': 'foo'}),
self.partner.create(self.cr, UID, {'name': 'bar'}),
self.partner.create(self.cr, UID, {'name': 'baz'})
]
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', ids, ['name'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'id': ids[0], 'name': 'foo'},
{'id': ids[1], 'name': 'bar'},
{'id': ids[2], 'name': 'baz'}
]))
def test_UPDATE_command(self):
" take the in-db records and merge the provided information in "
id_foo = self.partner.create(self.cr, UID, {'name': 'foo'})
id_bar = self.partner.create(self.cr, UID, {'name': 'bar'})
id_baz = self.partner.create(self.cr, UID, {'name': 'baz', 'city': 'tag'})
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', [
LINK_TO(id_foo),
UPDATE(id_bar, {'name': 'qux', 'city': 'tagtag'}),
UPDATE(id_baz, {'name': 'quux'})
], ['name', 'city'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'id': id_foo, 'name': 'foo', 'city': False},
{'id': id_bar, 'name': 'qux', 'city': 'tagtag'},
{'id': id_baz, 'name': 'quux', 'city': 'tag'}
]))
def test_DELETE_command(self):
" deleted records are not returned at all. "
ids = [
self.partner.create(self.cr, UID, {'name': 'foo'}),
self.partner.create(self.cr, UID, {'name': 'bar'}),
self.partner.create(self.cr, UID, {'name': 'baz'})
]
commands = [DELETE(ids[0]), DELETE(ids[1]), DELETE(ids[2])]
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', commands, ['name'])
self.assertEqual(results, [])
def test_mixed_commands(self):
ids = [
self.partner.create(self.cr, UID, {'name': name})
for name in ['NObar', 'baz', 'qux', 'NOquux', 'NOcorge', 'garply']
]
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', [
CREATE({'name': 'foo'}),
UPDATE(ids[0], {'name': 'bar'}),
LINK_TO(ids[1]),
DELETE(ids[2]),
UPDATE(ids[3], {'name': 'quux',}),
UPDATE(ids[4], {'name': 'corge'}),
CREATE({'name': 'grault'}),
LINK_TO(ids[5])
], ['name'])
self.assertEqual(sorted_by_id(results), sorted_by_id([
{'name': 'foo'},
{'id': ids[0], 'name': 'bar'},
{'id': ids[1], 'name': 'baz'},
{'id': ids[3], 'name': 'quux'},
{'id': ids[4], 'name': 'corge'},
{'name': 'grault'},
{'id': ids[5], 'name': 'garply'}
]))
def test_LINK_TO_pairs(self):
"LINK_TO commands can be written as pairs, instead of triplets"
ids = [
self.partner.create(self.cr, UID, {'name': 'foo'}),
self.partner.create(self.cr, UID, {'name': 'bar'}),
self.partner.create(self.cr, UID, {'name': 'baz'})
]
commands = map(lambda id: (4, id), ids)
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', commands, ['name'])
<|fim▁hole|> {'id': ids[1], 'name': 'bar'},
{'id': ids[2], 'name': 'baz'}
]))
def test_singleton_commands(self):
"DELETE_ALL can appear as a singleton"
results = self.partner.resolve_2many_commands(
self.cr, UID, 'child_ids', [DELETE_ALL()], ['name'])
self.assertEqual(results, [])<|fim▁end|> | self.assertEqual(sorted_by_id(results), sorted_by_id([
{'id': ids[0], 'name': 'foo'}, |
<|file_name|>inversedocumentfrequencyvectorcreator.py<|end_file_name|><|fim▁begin|>import math
from ..df import DocumentFrequencyVectorCreator
from . import InverseDocumentFrequencyVector
class InverseDocumentFrequencyVectorCreator(DocumentFrequencyVectorCreator):
"""Creates inverse-document-frequency vectors
Inherits from :class:`recommender.vector.abstractvector.VectorCreator`
:parameter sqlite3_connection: connection to a database build with :class:`recommender.vector.vectortablecreator.VectorTableCreator`
:type sqlite3_connection: sqlite3.Connection
:raises: TypeError
"""
def __init__(self, db_connection_str):
super(InverseDocumentFrequencyVectorCreator, self).__init__(db_connection_str)
self._create_inverse_document_frequency_view()
pass
def _create_vector(self, document_id=None):
vector = InverseDocumentFrequencyVector()
with self._get_db_connection() as conn:
cursor = conn.cursor()
self._create_log_function(conn)
values = self._get_vector_values_from_db(cursor)
for value in [] if values is None else values:
vector.add_to_vector(value)
return vector
def _get_vector_values_from_db(self, c):
c.execute(
'''
SELECT
[term_id]
, [name]
, [value]
FROM
[InverseDocumentFrequency]
;
''')
vector_values = []
for result in c.fetchall():
vector_values.append((result[0], result[1], result[2]))
pass
return None if not vector_values else vector_values
def _create_log_function(self, conn):
conn.create_function('log10', 1, InverseDocumentFrequencyVectorCreator.log_10)
pass
@staticmethod<|fim▁hole|> def log_10(x):
"""simply a method calculating log_10 used by the view in :func:`_create_inverse_document_frequency_view`
"""
base = 10
return math.log(x, base)
def _create_inverse_document_frequency_view(self):
"""Creates a view in the database required for building idf-vectors
"""
with self._get_db_connection() as conn:
self._create_log_function(conn)
c = conn.cursor()
c.execute(
'''
CREATE VIEW IF NOT EXISTS [InverseDocumentFrequency] AS
SELECT
[term_id]
, [name]
, log10
(
CAST ((SELECT [document_count] from [N]) AS REAL) / [df].[value]
)
AS [value]
FROM
[DocumentFrequency] AS [df]
ORDER BY
[term_id]
;
''')
pass<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![cfg_attr(feature="clippy", feature(plugin))]
#![cfg_attr(feature="clippy", plugin(clippy))]
<|fim▁hole|><|fim▁end|> | pub mod second; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-cognitiveservices-vision-face"
PACKAGE_PPRINT_NAME = "Cognitive Services Face"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',<|fim▁hole|>
with open('README.rst', encoding='utf-8') as f:
readme = f.read()
with open('HISTORY.rst', encoding='utf-8') as f:
history = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + history,
license='MIT License',
author='Microsoft Corporation',
author_email='[email protected]',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=["tests"]),
install_requires=[
'msrest>=0.4.24,<2.0.0',
'azure-common~=1.1',
],
cmdclass=cmdclass
)<|fim▁end|> | fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information') |
<|file_name|>ocsp.go<|end_file_name|><|fim▁begin|>// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package ocsp parses OCSP responses as specified in RFC 2560. OCSP responses
// are signed messages attesting to the validity of a certificate for a small
// period of time. This is used to manage revocation for X.509 certificates.
package ocsp
import (
"crypto"
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/rsa"
_ "crypto/sha1"
_ "crypto/sha256"
_ "crypto/sha512"
"crypto/x509"
"crypto/x509/pkix"
"encoding/asn1"
"errors"
"fmt"
"math/big"
"strconv"
"time"
)
var idPKIXOCSPBasic = asn1.ObjectIdentifier([]int{1, 3, 6, 1, 5, 5, 7, 48, 1, 1})
// ResponseStatus contains the result of an OCSP request. See
// https://tools.ietf.org/html/rfc6960#section-2.3
type ResponseStatus int
const (
Success ResponseStatus = 0
Malformed ResponseStatus = 1
InternalError ResponseStatus = 2
TryLater ResponseStatus = 3
// Status code four is unused in OCSP. See
// https://tools.ietf.org/html/rfc6960#section-4.2.1
SignatureRequired ResponseStatus = 5
Unauthorized ResponseStatus = 6
)
<|fim▁hole|> case Malformed:
return "malformed"
case InternalError:
return "internal error"
case TryLater:
return "try later"
case SignatureRequired:
return "signature required"
case Unauthorized:
return "unauthorized"
default:
return "unknown OCSP status: " + strconv.Itoa(int(r))
}
}
// ResponseError is an error that may be returned by ParseResponse to indicate
// that the response itself is an error, not just that its indicating that a
// certificate is revoked, unknown, etc.
type ResponseError struct {
Status ResponseStatus
}
func (r ResponseError) Error() string {
return "ocsp: error from server: " + r.Status.String()
}
// These are internal structures that reflect the ASN.1 structure of an OCSP
// response. See RFC 2560, section 4.2.
type certID struct {
HashAlgorithm pkix.AlgorithmIdentifier
NameHash []byte
IssuerKeyHash []byte
SerialNumber *big.Int
}
// https://tools.ietf.org/html/rfc2560#section-4.1.1
type ocspRequest struct {
TBSRequest tbsRequest
}
type tbsRequest struct {
Version int `asn1:"explicit,tag:0,default:0,optional"`
RequestorName pkix.RDNSequence `asn1:"explicit,tag:1,optional"`
RequestList []request
}
type request struct {
Cert certID
}
type responseASN1 struct {
Status asn1.Enumerated
Response responseBytes `asn1:"explicit,tag:0,optional"`
}
type responseBytes struct {
ResponseType asn1.ObjectIdentifier
Response []byte
}
type basicResponse struct {
TBSResponseData responseData
SignatureAlgorithm pkix.AlgorithmIdentifier
Signature asn1.BitString
Certificates []asn1.RawValue `asn1:"explicit,tag:0,optional"`
}
type responseData struct {
Raw asn1.RawContent
Version int `asn1:"optional,default:0,explicit,tag:0"`
RawResponderID asn1.RawValue
ProducedAt time.Time `asn1:"generalized"`
Responses []singleResponse
}
type singleResponse struct {
CertID certID
Good asn1.Flag `asn1:"tag:0,optional"`
Revoked revokedInfo `asn1:"tag:1,optional"`
Unknown asn1.Flag `asn1:"tag:2,optional"`
ThisUpdate time.Time `asn1:"generalized"`
NextUpdate time.Time `asn1:"generalized,explicit,tag:0,optional"`
SingleExtensions []pkix.Extension `asn1:"explicit,tag:1,optional"`
}
type revokedInfo struct {
RevocationTime time.Time `asn1:"generalized"`
Reason asn1.Enumerated `asn1:"explicit,tag:0,optional"`
}
var (
oidSignatureMD2WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 2}
oidSignatureMD5WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 4}
oidSignatureSHA1WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 5}
oidSignatureSHA256WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 11}
oidSignatureSHA384WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 12}
oidSignatureSHA512WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 13}
oidSignatureDSAWithSHA1 = asn1.ObjectIdentifier{1, 2, 840, 10040, 4, 3}
oidSignatureDSAWithSHA256 = asn1.ObjectIdentifier{2, 16, 840, 1, 101, 3, 4, 3, 2}
oidSignatureECDSAWithSHA1 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 1}
oidSignatureECDSAWithSHA256 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 2}
oidSignatureECDSAWithSHA384 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 3}
oidSignatureECDSAWithSHA512 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 4}
)
var hashOIDs = map[crypto.Hash]asn1.ObjectIdentifier{
crypto.SHA1: asn1.ObjectIdentifier([]int{1, 3, 14, 3, 2, 26}),
crypto.SHA256: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 1}),
crypto.SHA384: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 2}),
crypto.SHA512: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 3}),
}
// TODO(rlb): This is also from crypto/x509, so same comment as AGL's below
var signatureAlgorithmDetails = []struct {
algo x509.SignatureAlgorithm
oid asn1.ObjectIdentifier
pubKeyAlgo x509.PublicKeyAlgorithm
hash crypto.Hash
}{
{x509.MD2WithRSA, oidSignatureMD2WithRSA, x509.RSA, crypto.Hash(0) /* no value for MD2 */},
{x509.MD5WithRSA, oidSignatureMD5WithRSA, x509.RSA, crypto.MD5},
{x509.SHA1WithRSA, oidSignatureSHA1WithRSA, x509.RSA, crypto.SHA1},
{x509.SHA256WithRSA, oidSignatureSHA256WithRSA, x509.RSA, crypto.SHA256},
{x509.SHA384WithRSA, oidSignatureSHA384WithRSA, x509.RSA, crypto.SHA384},
{x509.SHA512WithRSA, oidSignatureSHA512WithRSA, x509.RSA, crypto.SHA512},
{x509.DSAWithSHA1, oidSignatureDSAWithSHA1, x509.DSA, crypto.SHA1},
{x509.DSAWithSHA256, oidSignatureDSAWithSHA256, x509.DSA, crypto.SHA256},
{x509.ECDSAWithSHA1, oidSignatureECDSAWithSHA1, x509.ECDSA, crypto.SHA1},
{x509.ECDSAWithSHA256, oidSignatureECDSAWithSHA256, x509.ECDSA, crypto.SHA256},
{x509.ECDSAWithSHA384, oidSignatureECDSAWithSHA384, x509.ECDSA, crypto.SHA384},
{x509.ECDSAWithSHA512, oidSignatureECDSAWithSHA512, x509.ECDSA, crypto.SHA512},
}
// TODO(rlb): This is also from crypto/x509, so same comment as AGL's below
func signingParamsForPublicKey(pub interface{}, requestedSigAlgo x509.SignatureAlgorithm) (hashFunc crypto.Hash, sigAlgo pkix.AlgorithmIdentifier, err error) {
var pubType x509.PublicKeyAlgorithm
switch pub := pub.(type) {
case *rsa.PublicKey:
pubType = x509.RSA
hashFunc = crypto.SHA256
sigAlgo.Algorithm = oidSignatureSHA256WithRSA
sigAlgo.Parameters = asn1.RawValue{
Tag: 5,
}
case *ecdsa.PublicKey:
pubType = x509.ECDSA
switch pub.Curve {
case elliptic.P224(), elliptic.P256():
hashFunc = crypto.SHA256
sigAlgo.Algorithm = oidSignatureECDSAWithSHA256
case elliptic.P384():
hashFunc = crypto.SHA384
sigAlgo.Algorithm = oidSignatureECDSAWithSHA384
case elliptic.P521():
hashFunc = crypto.SHA512
sigAlgo.Algorithm = oidSignatureECDSAWithSHA512
default:
err = errors.New("x509: unknown elliptic curve")
}
default:
err = errors.New("x509: only RSA and ECDSA keys supported")
}
if err != nil {
return
}
if requestedSigAlgo == 0 {
return
}
found := false
for _, details := range signatureAlgorithmDetails {
if details.algo == requestedSigAlgo {
if details.pubKeyAlgo != pubType {
err = errors.New("x509: requested SignatureAlgorithm does not match private key type")
return
}
sigAlgo.Algorithm, hashFunc = details.oid, details.hash
if hashFunc == 0 {
err = errors.New("x509: cannot sign with hash function requested")
return
}
found = true
break
}
}
if !found {
err = errors.New("x509: unknown SignatureAlgorithm")
}
return
}
// TODO(agl): this is taken from crypto/x509 and so should probably be exported
// from crypto/x509 or crypto/x509/pkix.
func getSignatureAlgorithmFromOID(oid asn1.ObjectIdentifier) x509.SignatureAlgorithm {
for _, details := range signatureAlgorithmDetails {
if oid.Equal(details.oid) {
return details.algo
}
}
return x509.UnknownSignatureAlgorithm
}
// TODO(rlb): This is not taken from crypto/x509, but it's of the same general form.
func getHashAlgorithmFromOID(target asn1.ObjectIdentifier) crypto.Hash {
for hash, oid := range hashOIDs {
if oid.Equal(target) {
return hash
}
}
return crypto.Hash(0)
}
func getOIDFromHashAlgorithm(target crypto.Hash) asn1.ObjectIdentifier {
for hash, oid := range hashOIDs {
if hash == target {
return oid
}
}
return nil
}
// This is the exposed reflection of the internal OCSP structures.
// The status values that can be expressed in OCSP. See RFC 6960.
const (
// Good means that the certificate is valid.
Good = iota
// Revoked means that the certificate has been deliberately revoked.
Revoked
// Unknown means that the OCSP responder doesn't know about the certificate.
Unknown
// ServerFailed is unused and was never used (see
// https://go-review.googlesource.com/#/c/18944). ParseResponse will
// return a ResponseError when an error response is parsed.
ServerFailed
)
// The enumerated reasons for revoking a certificate. See RFC 5280.
const (
Unspecified = iota
KeyCompromise = iota
CACompromise = iota
AffiliationChanged = iota
Superseded = iota
CessationOfOperation = iota
CertificateHold = iota
_ = iota
RemoveFromCRL = iota
PrivilegeWithdrawn = iota
AACompromise = iota
)
// Request represents an OCSP request. See RFC 6960.
type Request struct {
HashAlgorithm crypto.Hash
IssuerNameHash []byte
IssuerKeyHash []byte
SerialNumber *big.Int
}
// Marshal marshals the OCSP request to ASN.1 DER encoded form.
func (req *Request) Marshal() ([]byte, error) {
hashAlg := getOIDFromHashAlgorithm(req.HashAlgorithm)
if hashAlg == nil {
return nil, errors.New("Unknown hash algorithm")
}
return asn1.Marshal(ocspRequest{
tbsRequest{
Version: 0,
RequestList: []request{
{
Cert: certID{
pkix.AlgorithmIdentifier{
Algorithm: hashAlg,
Parameters: asn1.RawValue{Tag: 5 /* ASN.1 NULL */},
},
req.IssuerNameHash,
req.IssuerKeyHash,
req.SerialNumber,
},
},
},
},
})
}
// Response represents an OCSP response containing a single SingleResponse. See
// RFC 6960.
type Response struct {
// Status is one of {Good, Revoked, Unknown}
Status int
SerialNumber *big.Int
ProducedAt, ThisUpdate, NextUpdate, RevokedAt time.Time
RevocationReason int
Certificate *x509.Certificate
// TBSResponseData contains the raw bytes of the signed response. If
// Certificate is nil then this can be used to verify Signature.
TBSResponseData []byte
Signature []byte
SignatureAlgorithm x509.SignatureAlgorithm
// IssuerHash is the hash used to compute the IssuerNameHash and IssuerKeyHash.
// Valid values are crypto.SHA1, crypto.SHA256, crypto.SHA384, and crypto.SHA512.
// If zero, the default is crypto.SHA1.
IssuerHash crypto.Hash
// RawResponderName optionally contains the DER-encoded subject of the
// responder certificate. Exactly one of RawResponderName and
// ResponderKeyHash is set.
RawResponderName []byte
// ResponderKeyHash optionally contains the SHA-1 hash of the
// responder's public key. Exactly one of RawResponderName and
// ResponderKeyHash is set.
ResponderKeyHash []byte
// Extensions contains raw X.509 extensions from the singleExtensions field
// of the OCSP response. When parsing certificates, this can be used to
// extract non-critical extensions that are not parsed by this package. When
// marshaling OCSP responses, the Extensions field is ignored, see
// ExtraExtensions.
Extensions []pkix.Extension
// ExtraExtensions contains extensions to be copied, raw, into any marshaled
// OCSP response (in the singleExtensions field). Values override any
// extensions that would otherwise be produced based on the other fields. The
// ExtraExtensions field is not populated when parsing certificates, see
// Extensions.
ExtraExtensions []pkix.Extension
}
// These are pre-serialized error responses for the various non-success codes
// defined by OCSP. The Unauthorized code in particular can be used by an OCSP
// responder that supports only pre-signed responses as a response to requests
// for certificates with unknown status. See RFC 5019.
var (
MalformedRequestErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x01}
InternalErrorErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x02}
TryLaterErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x03}
SigRequredErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x05}
UnauthorizedErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x06}
)
// CheckSignatureFrom checks that the signature in resp is a valid signature
// from issuer. This should only be used if resp.Certificate is nil. Otherwise,
// the OCSP response contained an intermediate certificate that created the
// signature. That signature is checked by ParseResponse and only
// resp.Certificate remains to be validated.
func (resp *Response) CheckSignatureFrom(issuer *x509.Certificate) error {
return issuer.CheckSignature(resp.SignatureAlgorithm, resp.TBSResponseData, resp.Signature)
}
// ParseError results from an invalid OCSP response.
type ParseError string
func (p ParseError) Error() string {
return string(p)
}
// ParseRequest parses an OCSP request in DER form. It only supports
// requests for a single certificate. Signed requests are not supported.
// If a request includes a signature, it will result in a ParseError.
func ParseRequest(bytes []byte) (*Request, error) {
var req ocspRequest
rest, err := asn1.Unmarshal(bytes, &req)
if err != nil {
return nil, err
}
if len(rest) > 0 {
return nil, ParseError("trailing data in OCSP request")
}
if len(req.TBSRequest.RequestList) == 0 {
return nil, ParseError("OCSP request contains no request body")
}
innerRequest := req.TBSRequest.RequestList[0]
hashFunc := getHashAlgorithmFromOID(innerRequest.Cert.HashAlgorithm.Algorithm)
if hashFunc == crypto.Hash(0) {
return nil, ParseError("OCSP request uses unknown hash function")
}
return &Request{
HashAlgorithm: hashFunc,
IssuerNameHash: innerRequest.Cert.NameHash,
IssuerKeyHash: innerRequest.Cert.IssuerKeyHash,
SerialNumber: innerRequest.Cert.SerialNumber,
}, nil
}
// ParseResponse parses an OCSP response in DER form. It only supports
// responses for a single certificate. If the response contains a certificate
// then the signature over the response is checked. If issuer is not nil then
// it will be used to validate the signature or embedded certificate.
//
// Invalid signatures or parse failures will result in a ParseError. Error
// responses will result in a ResponseError.
func ParseResponse(bytes []byte, issuer *x509.Certificate) (*Response, error) {
return ParseResponseForCert(bytes, nil, issuer)
}
// ParseResponseForCert parses an OCSP response in DER form and searches for a
// Response relating to cert. If such a Response is found and the OCSP response
// contains a certificate then the signature over the response is checked. If
// issuer is not nil then it will be used to validate the signature or embedded
// certificate.
//
// Invalid signatures or parse failures will result in a ParseError. Error
// responses will result in a ResponseError.
func ParseResponseForCert(bytes []byte, cert, issuer *x509.Certificate) (*Response, error) {
var resp responseASN1
rest, err := asn1.Unmarshal(bytes, &resp)
if err != nil {
return nil, err
}
if len(rest) > 0 {
return nil, ParseError("trailing data in OCSP response")
}
if status := ResponseStatus(resp.Status); status != Success {
return nil, ResponseError{status}
}
if !resp.Response.ResponseType.Equal(idPKIXOCSPBasic) {
return nil, ParseError("bad OCSP response type")
}
var basicResp basicResponse
rest, err = asn1.Unmarshal(resp.Response.Response, &basicResp)
if err != nil {
return nil, err
}
if len(basicResp.Certificates) > 1 {
return nil, ParseError("OCSP response contains bad number of certificates")
}
if n := len(basicResp.TBSResponseData.Responses); n == 0 || cert == nil && n > 1 {
return nil, ParseError("OCSP response contains bad number of responses")
}
ret := &Response{
TBSResponseData: basicResp.TBSResponseData.Raw,
Signature: basicResp.Signature.RightAlign(),
SignatureAlgorithm: getSignatureAlgorithmFromOID(basicResp.SignatureAlgorithm.Algorithm),
}
// Handle the ResponderID CHOICE tag. ResponderID can be flattened into
// TBSResponseData once https://go-review.googlesource.com/34503 has been
// released.
rawResponderID := basicResp.TBSResponseData.RawResponderID
switch rawResponderID.Tag {
case 1: // Name
var rdn pkix.RDNSequence
if rest, err := asn1.Unmarshal(rawResponderID.Bytes, &rdn); err != nil || len(rest) != 0 {
return nil, ParseError("invalid responder name")
}
ret.RawResponderName = rawResponderID.Bytes
case 2: // KeyHash
if rest, err := asn1.Unmarshal(rawResponderID.Bytes, &ret.ResponderKeyHash); err != nil || len(rest) != 0 {
return nil, ParseError("invalid responder key hash")
}
default:
return nil, ParseError("invalid responder id tag")
}
if len(basicResp.Certificates) > 0 {
ret.Certificate, err = x509.ParseCertificate(basicResp.Certificates[0].FullBytes)
if err != nil {
return nil, err
}
if err := ret.CheckSignatureFrom(ret.Certificate); err != nil {
return nil, ParseError("bad signature on embedded certificate: " + err.Error())
}
if issuer != nil {
if err := issuer.CheckSignature(ret.Certificate.SignatureAlgorithm, ret.Certificate.RawTBSCertificate, ret.Certificate.Signature); err != nil {
return nil, ParseError("bad OCSP signature: " + err.Error())
}
}
} else if issuer != nil {
if err := ret.CheckSignatureFrom(issuer); err != nil {
return nil, ParseError("bad OCSP signature: " + err.Error())
}
}
var r singleResponse
for _, resp := range basicResp.TBSResponseData.Responses {
if cert == nil || cert.SerialNumber.Cmp(resp.CertID.SerialNumber) == 0 {
r = resp
break
}
}
for _, ext := range r.SingleExtensions {
if ext.Critical {
return nil, ParseError("unsupported critical extension")
}
}
ret.Extensions = r.SingleExtensions
ret.SerialNumber = r.CertID.SerialNumber
for h, oid := range hashOIDs {
if r.CertID.HashAlgorithm.Algorithm.Equal(oid) {
ret.IssuerHash = h
break
}
}
if ret.IssuerHash == 0 {
return nil, ParseError("unsupported issuer hash algorithm")
}
switch {
case bool(r.Good):
ret.Status = Good
case bool(r.Unknown):
ret.Status = Unknown
default:
ret.Status = Revoked
ret.RevokedAt = r.Revoked.RevocationTime
ret.RevocationReason = int(r.Revoked.Reason)
}
ret.ProducedAt = basicResp.TBSResponseData.ProducedAt
ret.ThisUpdate = r.ThisUpdate
ret.NextUpdate = r.NextUpdate
return ret, nil
}
// RequestOptions contains options for constructing OCSP requests.
type RequestOptions struct {
// Hash contains the hash function that should be used when
// constructing the OCSP request. If zero, SHA-1 will be used.
Hash crypto.Hash
}
func (opts *RequestOptions) hash() crypto.Hash {
if opts == nil || opts.Hash == 0 {
// SHA-1 is nearly universally used in OCSP.
return crypto.SHA1
}
return opts.Hash
}
// CreateRequest returns a DER-encoded, OCSP request for the status of cert. If
// opts is nil then sensible defaults are used.
func CreateRequest(cert, issuer *x509.Certificate, opts *RequestOptions) ([]byte, error) {
hashFunc := opts.hash()
// OCSP seems to be the only place where these raw hash identifiers are
// used. I took the following from
// http://msdn.microsoft.com/en-us/library/ff635603.aspx
_, ok := hashOIDs[hashFunc]
if !ok {
return nil, x509.ErrUnsupportedAlgorithm
}
if !hashFunc.Available() {
return nil, x509.ErrUnsupportedAlgorithm
}
h := opts.hash().New()
var publicKeyInfo struct {
Algorithm pkix.AlgorithmIdentifier
PublicKey asn1.BitString
}
if _, err := asn1.Unmarshal(issuer.RawSubjectPublicKeyInfo, &publicKeyInfo); err != nil {
return nil, err
}
h.Write(publicKeyInfo.PublicKey.RightAlign())
issuerKeyHash := h.Sum(nil)
h.Reset()
h.Write(issuer.RawSubject)
issuerNameHash := h.Sum(nil)
req := &Request{
HashAlgorithm: hashFunc,
IssuerNameHash: issuerNameHash,
IssuerKeyHash: issuerKeyHash,
SerialNumber: cert.SerialNumber,
}
return req.Marshal()
}
// CreateResponse returns a DER-encoded OCSP response with the specified contents.
// The fields in the response are populated as follows:
//
// The responder cert is used to populate the responder's name field, and the
// certificate itself is provided alongside the OCSP response signature.
//
// The issuer cert is used to puplate the IssuerNameHash and IssuerKeyHash fields.
//
// The template is used to populate the SerialNumber, RevocationStatus, RevokedAt,
// RevocationReason, ThisUpdate, and NextUpdate fields.
//
// If template.IssuerHash is not set, SHA1 will be used.
//
// The ProducedAt date is automatically set to the current date, to the nearest minute.
func CreateResponse(issuer, responderCert *x509.Certificate, template Response, priv crypto.Signer) ([]byte, error) {
var publicKeyInfo struct {
Algorithm pkix.AlgorithmIdentifier
PublicKey asn1.BitString
}
if _, err := asn1.Unmarshal(issuer.RawSubjectPublicKeyInfo, &publicKeyInfo); err != nil {
return nil, err
}
if template.IssuerHash == 0 {
template.IssuerHash = crypto.SHA1
}
hashOID := getOIDFromHashAlgorithm(template.IssuerHash)
if hashOID == nil {
return nil, errors.New("unsupported issuer hash algorithm")
}
if !template.IssuerHash.Available() {
return nil, fmt.Errorf("issuer hash algorithm %v not linked into binary", template.IssuerHash)
}
h := template.IssuerHash.New()
h.Write(publicKeyInfo.PublicKey.RightAlign())
issuerKeyHash := h.Sum(nil)
h.Reset()
h.Write(issuer.RawSubject)
issuerNameHash := h.Sum(nil)
innerResponse := singleResponse{
CertID: certID{
HashAlgorithm: pkix.AlgorithmIdentifier{
Algorithm: hashOID,
Parameters: asn1.RawValue{Tag: 5 /* ASN.1 NULL */},
},
NameHash: issuerNameHash,
IssuerKeyHash: issuerKeyHash,
SerialNumber: template.SerialNumber,
},
ThisUpdate: template.ThisUpdate.UTC(),
NextUpdate: template.NextUpdate.UTC(),
SingleExtensions: template.ExtraExtensions,
}
switch template.Status {
case Good:
innerResponse.Good = true
case Unknown:
innerResponse.Unknown = true
case Revoked:
innerResponse.Revoked = revokedInfo{
RevocationTime: template.RevokedAt.UTC(),
Reason: asn1.Enumerated(template.RevocationReason),
}
}
rawResponderID := asn1.RawValue{
Class: 2, // context-specific
Tag: 1, // Name (explicit tag)
IsCompound: true,
Bytes: responderCert.RawSubject,
}
tbsResponseData := responseData{
Version: 0,
RawResponderID: rawResponderID,
ProducedAt: time.Now().Truncate(time.Minute).UTC(),
Responses: []singleResponse{innerResponse},
}
tbsResponseDataDER, err := asn1.Marshal(tbsResponseData)
if err != nil {
return nil, err
}
hashFunc, signatureAlgorithm, err := signingParamsForPublicKey(priv.Public(), template.SignatureAlgorithm)
if err != nil {
return nil, err
}
responseHash := hashFunc.New()
responseHash.Write(tbsResponseDataDER)
signature, err := priv.Sign(rand.Reader, responseHash.Sum(nil), hashFunc)
if err != nil {
return nil, err
}
response := basicResponse{
TBSResponseData: tbsResponseData,
SignatureAlgorithm: signatureAlgorithm,
Signature: asn1.BitString{
Bytes: signature,
BitLength: 8 * len(signature),
},
}
if template.Certificate != nil {
response.Certificates = []asn1.RawValue{
asn1.RawValue{FullBytes: template.Certificate.Raw},
}
}
responseDER, err := asn1.Marshal(response)
if err != nil {
return nil, err
}
return asn1.Marshal(responseASN1{
Status: asn1.Enumerated(Success),
Response: responseBytes{
ResponseType: idPKIXOCSPBasic,
Response: responseDER,
},
})
}<|fim▁end|> | func (r ResponseStatus) String() string {
switch r {
case Success:
return "success" |
<|file_name|>Event.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 10/13/15 smokey <[email protected]>
* <p>
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* <p>
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jchat.event;
/**
*
* @author smokey
* @date 10/13/15
*/
public abstract class Event {
long interval, last;
public abstract void execute();
}<|fim▁end|> | * <p>
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
<|file_name|>WeatherActivity.java<|end_file_name|><|fim▁begin|>package com.coolweather.android;
import android.content.Intent;
import android.content.SharedPreferences;
import android.graphics.Color;
import android.os.Build;
import android.preference.PreferenceManager;
import android.support.v4.view.GravityCompat;
import android.support.v4.view.ScrollingView;
import android.support.v4.widget.DrawerLayout;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.coolweather.android.gson.Forecast;
import com.coolweather.android.gson.Weather;
import com.coolweather.android.service.AutoUpdateService;
import com.coolweather.android.util.HttpUtil;
import com.coolweather.android.util.Utility;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Response;
public class WeatherActivity extends AppCompatActivity {
private ScrollView weatherLayout;
private TextView titleCity;
private TextView titleUpdateTime;
private TextView degreeText;
private TextView weatherInfoText;
private LinearLayout forecastLayout;
private TextView aqiText;
private TextView pm25Text;
private TextView comfortText;
private TextView carWashText;
private TextView sportText;
private ImageView bingPicImg;
public SwipeRefreshLayout swipeRefreshLayout;
private String mWeatherId;
public DrawerLayout drawerLayout;
private Button navButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (Build.VERSION.SDK_INT >= 21) {
View decorView = getWindow().getDecorView();
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View.SYSTEM_UI_FLAG_LAYOUT_STABLE);
getWindow().setStatusBarColor(Color.TRANSPARENT);
}
setContentView(R.layout.activity_weather);
//初始条件
weatherLayout = (ScrollView) findViewById(R.id.weather_layout);
titleCity = (TextView) findViewById(R.id.title_city);
titleUpdateTime = (TextView) findViewById(R.id.title_update_time);
degreeText = (TextView) findViewById(R.id.degree_text);
weatherInfoText = (TextView) findViewById(R.id.weather_info_text);
forecastLayout = (LinearLayout) findViewById(R.id.forecast_layout);
aqiText = (TextView) findViewById(R.id.aqi_text);
pm25Text = (TextView) findViewById(R.id.pm25_text);
comfortText = (TextView) findViewById(R.id.comfort_text);
carWashText = (TextView) findViewById(R.id.car_wash_text);
sportText = (TextView) findViewById(R.id.sport_text);
bingPicImg = (ImageView) findViewById(R.id.bing_pic_img);
drawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
navButton = (Button) findViewById(R.id.nav_button);
swipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.swipe_refresh);
swipeRefreshLayout.setColorSchemeResources(R.color.colorTopic);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String weatherString = prefs.getString("weather", null);
if (weatherString != null) {
//有缓存时直接解析天气数据
Weather weather = Utility.handleWeatherResponse(weatherString);
mWeatherId = weather.basic.weatherId;
showWeatherInfo(weather);
} else {
//无缓存时去服务器查询天气
mWeatherId = getIntent().getStringExtra("weather_id");<|fim▁hole|> weatherLayout.setVisibility(View.INVISIBLE);
requestWeather(weatherId);
}
navButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
drawerLayout.openDrawer(GravityCompat.START);
}
});
swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
requestWeather(mWeatherId);
}
});
String bingPic = prefs.getString("bing_pic", null);
if (bingPic != null) {
Glide.with(this).load(bingPic).into(bingPicImg);
} else {
loadBingPic();
}
}
/**
* 根据天气ID请求城市天气信息
*/
public void requestWeather(final String weatherId) {
String weatherUtl = "http://guolin.tech/api/weather?cityid=" + weatherId + "&key=04ae9fa43fb341b596f719aa6d6babda";
HttpUtil.sendOkHttpRequest(weatherUtl, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
swipeRefreshLayout.setRefreshing(false);
}
});
}
@Override
public void onResponse(Call call, Response response) throws IOException {
final String responseText = response.body().string();
final Weather weather = Utility.handleWeatherResponse(responseText);
runOnUiThread(new Runnable() {
@Override
public void run() {
if (weather != null && "ok".equals(weather.status)) {
SharedPreferences.Editor editor = PreferenceManager
.getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("weather", responseText);
editor.apply();
Toast.makeText(WeatherActivity.this, "成功更新最新天气", Toast.LENGTH_SHORT).show();
showWeatherInfo(weather);
} else {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
}
swipeRefreshLayout.setRefreshing(false);
}
});
}
});
loadBingPic();
}
private void loadBingPic() {
String requestBingPic = "http://guolin.tech/api/bing_pic";
HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() {
@Override
public void onFailure(Call call, IOException e) {
e.printStackTrace();
}
@Override
public void onResponse(Call call, Response response) throws IOException {
final String bingPic = response.body().string();
SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(WeatherActivity.this).edit();
editor.putString("bing_pic", bingPic);
editor.apply();
runOnUiThread(new Runnable() {
@Override
public void run() {
Glide.with(WeatherActivity.this).load(bingPic).into(bingPicImg);
}
});
}
});
}
/**
* 处理并展示Weather实体类中的数据
*/
private void showWeatherInfo(Weather weather) {
String cityName = weather.basic.cityName;
String updateTime = "更新时间: " + weather.basic.update.updateTime.split(" ")[1];
String degree = weather.now.temperature + "ºC";
String weatherInfo = weather.now.more.info;
titleCity.setText(cityName);
titleUpdateTime.setText(updateTime);
degreeText.setText(degree);
weatherInfoText.setText(weatherInfo);
forecastLayout.removeAllViews();
for (Forecast forecast : weather.forecastList) {
View view = LayoutInflater.from(this).inflate(R.layout.forecast_item, forecastLayout, false);
TextView dateText = (TextView) view.findViewById(R.id.date_text);
TextView infoText = (TextView) view.findViewById(R.id.info_text);
TextView maxText = (TextView) view.findViewById(R.id.max_text);
TextView minText = (TextView) view.findViewById(R.id.min_text);
dateText.setText(forecast.date);
infoText.setText(forecast.more.info);
maxText.setText(forecast.temperature.max);
minText.setText(forecast.temperature.min);
forecastLayout.addView(view);
}
if (weather.aqi != null) {
aqiText.setText(weather.aqi.city.aqi);
pm25Text.setText(weather.aqi.city.pm25);
}
String comfort = "舒适度:" + weather.suggestion.comfort.info;
String catWash = "洗车指数:" + weather.suggestion.carWash.info;
String sport = "运动指数:" + weather.suggestion.sport.info;
comfortText.setText(comfort);
carWashText.setText(catWash);
sportText.setText(sport);
weatherLayout.setVisibility(View.VISIBLE);
if (weather != null && "ok".equals(weather.status)) {
Intent intent = new Intent(this, AutoUpdateService.class);
startService(intent);
} else {
Toast.makeText(WeatherActivity.this, "获取天气信息失败", Toast.LENGTH_SHORT).show();
}
}
}<|fim▁end|> | String weatherId = getIntent().getStringExtra("weather_id"); |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""
KAIST 단일인증서비스 모델.
"""
from django.db import models
from django.utils.translation import ugettext_lazy as _
class PortalInfoManager(models.Manager):
"""
:class:`PortalInfo` 에 대한 커스텀 매니저.
KAIST 단일인증서비스를 통해 최초 로그인 한 사용자는 자신의 정보를 총학생회에
제공하는 것에 동의해야 최종 가입이 됩니다. 만약 동의하지 않을 경우에는 자동
탈퇴처리가 됩니다. 그러나, 동의여부를 묻는 페이지에서 사이트 접속을 종료하는
등 동의/비동의 여부를 확인할 수 없는 경우가 있습니다. 이때 임시로
저장되어있는 사용자 개인정보를 사이트 관리자가 임의로 활용하지 못하도록
동의한 사용자만을 필터링하여 제공해야 합니다. 본 매니저가 해당 역할을
수행하고 있습니다.
"""
def get_queryset(self):
"""
정보제공에 동의한 사용자만을 필터링한 쿼리셋을 반환하는 메서드.
"""
return super().get_queryset().filter(is_signed_up=True)
class PortalInfo(models.Model):<|fim▁hole|> """
사용자의 포탈 계정 정보를 저장하는 모델.
"""
user = models.OneToOneField(
'auth.User',
primary_key=True, related_name='portal_info',
verbose_name=_("유저 인스턴스"))
kaist_uid = models.CharField(
_("KAIST UID"),
max_length=128, unique=True)
ku_kname = models.CharField(
_("이름"),
max_length=128, blank=True)
ku_acad_prog = models.CharField(
_("과정"),
max_length=32, blank=True)
ku_std_no = models.CharField(
_("학번"),
max_length=32, blank=True)
ku_psft_user_status_kor = models.CharField(
_("학적상태"),
max_length=32, blank=True)
ku_born_date = models.CharField(
_("생년월일"),
max_length=32, blank=True)
ku_sex = models.CharField(
_("성별"),
max_length=32, blank=True)
ou = models.CharField(
_("학과"),
max_length=32, blank=True)
mail = models.CharField(
_("메일주소"),
max_length=32, blank=True)
mobile = models.CharField(
_("전화번호"),
max_length=32, blank=True)
is_signed_up = models.BooleanField(
_("정보제공 동의여부"),
default=False,
help_text=_(
"정보제공에 반대하면 계정삭제 처리가 되나, 아직 동의여부를 "
"선택하지 않은 최초가입자의 경우 의사표현 시까지 정보가 "
"임시저장됩니다. 이 특수경우에는 정보를 활용하지 않아야 합니다."))
#: 정보제공 동의한 사용자만 다루는 커스텀 매니저.
objects = PortalInfoManager()
#: 모든 사용자를 다루는 기존 매니저.
all_objects = models.Manager()
def __str__(self):
return self.ku_kname
@classmethod
def create(cls, user, kaist_uid):
"""
클래스 인스턴스 생성 메서드.
사용자 인스턴스와 사용자 UID를 입력받습니다.
"""
return cls(user=user, kaist_uid=kaist_uid)
@property
def enter_year(self):
if self.ku_std_no and len(self.ku_std_no) == 8:
return self.ku_std_no[2:4]
return None<|fim▁end|> | |
<|file_name|>firstRun.js<|end_file_name|><|fim▁begin|>{
document.getElementsByClassName('close')[0].addEventListener('click', function() {
window.close();<|fim▁hole|><|fim▁end|> | });
} |
<|file_name|>lint-forbid-cmdline.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>
#[allow(experimental)] //~ ERROR allow(experimental) overruled by outer forbid(experimental)
fn main() {
}<|fim▁end|> | // except according to those terms.
// compile-flags: -F experimental |
<|file_name|>test_fail_valid.rs<|end_file_name|><|fim▁begin|>#![feature(plugin, custom_attribute)]
#![plugin(rustproof)]<|fim▁hole|>
// Used to test the tester
// Condition is invalid, and function name indicates "valid", so should be detected as a mismatch
#[condition(pre="x: i32 <= i32::MAX - 4i32", post="return: i32 == (x: i32 + 5i32)")]
fn valid_add_five_i32(x: i32) -> i32 {
x+5
}<|fim▁end|> | #![allow(dead_code)]
#![allow(unused_attributes)]
fn main() { } |
<|file_name|>mapObjIndexed.js<|end_file_name|><|fim▁begin|>var assert = require('assert');
var R = require('..');
describe('mapObjIndexed', function() {
var times2 = function(x) {return x * 2;};
var addIndexed = function(x, key) {return x + key;};
var squareVowels = function(x, key) {
var vowels = ['a', 'e', 'i', 'o', 'u'];
return R.contains(key, vowels) ? x * x : x;
};
it('works just like a normal mapObj', function() {
assert.deepEqual(R.mapObjIndexed(times2, {a: 1, b: 2, c: 3, d: 4}), {a: 2, b: 4, c: 6, d: 8});
});
it('passes the index as a second parameter to the callback', function() {
assert.deepEqual(R.mapObjIndexed(addIndexed, {a: 8, b: 6, c: 7, d: 5, e: 3, f: 0, g: 9}),
{a: '8a', b: '6b', c: '7c', d: '5d', e: '3e', f: '0f', g: '9g'});
});
it('passes the entire list as a third parameter to the callback', function() {<|fim▁hole|> assert.deepEqual(R.mapObjIndexed(squareVowels, {a: 8, b: 6, c: 7, d: 5, e: 3, f: 0, g: 9}),
{a: 64, b: 6, c: 7, d: 5, e: 9, f: 0, g: 9});
});
it('is curried', function() {
var makeSquareVowels = R.mapObjIndexed(squareVowels);
assert.deepEqual(makeSquareVowels({a: 8, b: 6, c: 7, d: 5, e: 3, f: 0, g: 9}),
{a: 64, b: 6, c: 7, d: 5, e: 9, f: 0, g: 9});
});
});<|fim▁end|> | |
<|file_name|>cohort_analysis_function.py<|end_file_name|><|fim▁begin|>import pandas as pd
import numpy as np
from dateutil.relativedelta import relativedelta
#### Utilities
def get_first_visit_date(data_patient):
''' Determines the first visit for a given patient'''
#IDEA Could be parallelized in Dask
data_patient['first_visit_date'] = min(data_patient.visit_date)
return data_patient
def subset_analysis_data(data, date_analysis):
''' Function that subsets the full dataset to only the data available for a certain analysis date'''
if type(data.date_entered.iloc[0]) is str :
data.date_entered = pd.to_datetime(data.date_entered)
data = data[data.date_entered < date_analysis]
return data
def subset_cohort(data, horizon_date, horizon_time, bandwidth):
''' Function that subsets data from a cohort that has initiated care a year before the horizon_date, and after a year + bandwith'''
horizon_date = pd.to_datetime(horizon_date)
data['first_visit_date'] = pd.to_datetime(data['first_visit_date'])
cohort_data = data[(data['first_visit_date'] >= horizon_date - relativedelta(days=horizon_time + bandwidth)) &
(data['first_visit_date'] < horizon_date - relativedelta(days=horizon_time))]
return cohort_data
#### Standard reporting
def status_patient(data_patient, reference_date, grace_period):
''' Determines the status of a patient at a given reference_date, given the data available at a given analysis_date
TODO Also select the available data for Death and Transfer and other outcomes based on data entry time
'''
#IDEA Could be parallelized in Dask
data_patient = get_first_visit_date(data_patient)
date_out = pd.NaT
date_last_appointment = pd.to_datetime(max(data_patient.next_visit_date))
late_time = reference_date - date_last_appointment
if late_time.days > grace_period:
status = 'LTFU'
date_out = date_last_appointment
if late_time.days <= grace_period:
status = 'Followed'
if (data_patient.reasonDescEn.iloc[0] is not np.nan) & (pd.to_datetime(data_patient.discDate.iloc[0]) < reference_date):
status = data_patient.reasonDescEn.iloc[0]
date_out = pd.to_datetime(data_patient.discDate.iloc[0])
return pd.DataFrame([{'status': status,
'late_time': late_time,
'last_appointment': date_last_appointment,
'date_out':date_out ,
'first_visit_date':data_patient.first_visit_date.iloc[0],
'facility':data_patient.facility.iloc[0]}])
def horizon_outcome(data_cohort, reference_date, horizon_time):
# TODO Make sure dates are dates
data_cohort['first_visit_date'] = pd.to_datetime(data_cohort['first_visit_date']) #TODO This conversion should happen earlier
data_cohort.loc[:, 'horizon_date'] = data_cohort['first_visit_date'] + np.timedelta64(horizon_time, 'D')
data_cohort.loc[: , 'horizon_status'] = data_cohort['status']
# If the patient exited the cohort after his horizon date, still consider him followed
# BUG This is marginally invalid, for example if a patient was considered LTFU before he died
data_cohort.horizon_status[~(data_cohort['status'] == 'Followed') & (data_cohort['date_out'] > data_cohort['horizon_date'])] = 'Followed'
return data_cohort
## Transversal description only
def n_visits(data, month):
reporting_month = pd.to_datetime(data['visit_date']).dt.to_period('M')
n_vis = sum(reporting_month == month)
return n_vis
def make_report(data, reference_date, date_analysis, grace_period, horizon_time, cohort_width):
assert reference_date <= date_analysis, 'You should not analyze a period before you have the data (date of analysis is before reference date)'
if type(reference_date) is str :<|fim▁hole|> if len(report_data) > 0:
month = reference_date.to_period('M') - 1
n_visits_month = report_data.groupby('facility').apply(n_visits, month)
df_status = report_data.groupby('patient_id').apply(status_patient, reference_date, 90)
cohort_data = subset_cohort(df_status, reference_date, horizon_time, cohort_width)
# print(df_status.head())
horizon_outcome_data = horizon_outcome(cohort_data, month, 365)
transversal_reports = df_status.groupby('facility').status.value_counts()
longitudinal_reports = horizon_outcome_data.groupby('facility').status.value_counts()
out_reports = {'transversal':transversal_reports,
'longitudinal':longitudinal_reports,
'n_visits':n_visits_month}
return out_reports
# QUESTION What are the form_types<|fim▁end|> | reference_date = pd.to_datetime(reference_date)
if type(date_analysis) is str:
date_analysis = pd.to_datetime(date_analysis)
report_data = subset_analysis_data(data, date_analysis) |
<|file_name|>ssr_params.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
############################################################################
#
# MODULE: ssr_params.py
# AUTHOR: Collin Bode, UC Berkeley
#
# PURPOSE: Consolidate parameters for all SSR scripts and to provide some
# common functions.
#
# DEPENDENCIES: requires function set_server_environment(server_name).
# So import grass_setserver is needed.
#
# COPYRIGHT: (c) 2012 Collin Bode
# (c) 2006 Hamish Bowman, and the GRASS Development Team
# (c) 2008 Glynn Clements, and the GRASS Development Team
# This program is free software under the GNU General Public
# License (>=v2). Read the file COPYING that comes with GRASS
# for details.
#
#############################################################################
#----------------------------------------------------------------------------
# Run Parts? 0 = do not run, 1 = run, but do not overwrite maps, 2 = run, overwrite maps
lidar_run = 2 # Imports point cloud as canopy and point density rasters
lpi_run = 0 # Creates Light Penetration Index (LPI) from point cloud
preprocessing_run = 0 # Creates derivative GIS products slope, aspect, tree height, albedo
rsun_run = 0 # Runs GRASS light model, r.sun
algore_run = 0 # Algorithm for combining all the parts into the SRR
#----------------------------------------------------------------------------
# GENERAL PARAMETERS
# GRASS GIS requires 4 inputs to start:
gisbase = '/usr/lib64/grass-6.4.4' # GIS BASE (GISBASE): path to GRASS binaries.
gisdbase = '/data/grass_workspace' # DATABASE (GISDBASE): directory containing all GRASS layers.
location = 'angelo2014' # LOCATION: defined by coordinate system & bounding box.
mapset = 'PERMANENT' # MAPSET: each GRASS session runs under a unique MAPSET. PERMANENT is default.
# Resolution and Bounding box
C = '2' # cell size in meters 2
bregion = 'default' # boundary used in g.region: b5k,b8k,b10, d = default. utilities needs to be changed for different regions.
# INPUT RASTER NAMES
demsource = 'angelo1m2014dem'<|fim▁hole|>
#----------------------------------------------------------------------------
# MAP NAMES
P = bregion + C + 'm' # Prefix to raster maps and Mapsets. This allows subsets of the total area to be run.
dem = P + 'dem' # source map: bare-earth dem
can = P + 'can' # source map: canopy dem
sloped = P + 'demslope' # slope, bare-earth
slopec = P + 'canslope' # slope, canopy
aspectd = P + 'demaspect' # aspect, bare-earth
aspectc = P + 'canaspect' # aspect, canopy
vegheight = P + 'vegheight' # vegetation height
albedo = P + 'albedo' # albedo by vegtype
demhor = P + 'demhor' # horizon, bare-earth
canhor = P + 'demhor' # horizon, canopy
#----------------------------------------------------------------------------
# SSR1: LIDAR IMPORT PARAMETERS
# LiDAR downloaded from http://opentopography.org.
# National Center for Airborne Laser Mapping (NCALM) distributes laser hits as 2 datasets: total and ground filtered.
# Version 1.0 only processes ASCII files with ground filtered exported to a separate directory. Future versions will
# use .las files in a single directory.
year = 'y14' # Year the LiDAR was flown 2004 'y04', 2004 modified to match y09 'ym4',2009 'y09'
pdensitypref = 'pointdensity_c'+str(C)+year # prefix to the point density rasters
inSuffix='xyz' # filename suffix to filter for
overlap = float(0) # tile overlap in meters 10.00 m (y04,y09), 0.00 m (y14)
sep = ',' # separator in lidar files ' ' or ','
LidarPoints = [ 'filtered' , 'unfiltered' ] # subdirectories under inPath. y04 = [ 'ground' , 'all' ]
inPath='/data/source/LiDAR/2014_EelBathymetry_LiDAR/Angelo/Tiles_ASCII_xyz/'
#inPath='/data/source/LiDAR/2009_SFEel_LiDAR/ascii/'
#inPath='/data/source/LiDAR/2004_SFEel_LiDAR/TerraScan_EEL/laser_export/'
#----------------------------------------------------------------------------
# SSR2: LPI PARAMETERS
#Radius = 8 # Previous radius was 8, but that is actually 8 cells per side * 2meters per cell = 32 meters, and actually I used 31x31 cell square.
boxsize = '17' # Size is cell size of box for r.neighbors. This is different than the actual box (9 cells x 2 meter cells = 18 meters)
lpipref = 'lpi_c'+C+year+'s'+boxsize # add the month to the end, e.g. lpi_c2y09s17m10
#----------------------------------------------------------------------------
# SSR3: R.HORIZON PARAMETERS
maxdistance = '10000' # maxdistance = 10000 meters (longer than the diagnal of the map)
hstep = '1' # horizonstep = 1 degree (causing 360 maps to be created)
dist = '0.5' # normal range (0.5 - 1.5) previous runs used 0.3 ?artifacting?
# dist=1.0 or greater uses simplified calculation that causes artifacts
#----------------------------------------------------------------------------
# SSR4: R.SUN Solar Model Parameters
# r.sun is designed to be run for 1 day, 24 hours. script runs for 1 year, every week.
linke_array = 'helios' # various options of turbidity values, "helios" is default for Angelo.
tl = linke_array
start_day = 5 # First Julian Day calculated
week_step = 7 # run r.sun once every week
timestep = '0.1' # 0.1 decimal hour = 6 minute timestep, default 0.5(30min), last run 0.5
calib = 'hd' # r.sun calibration code: 'hd' = 0.50 * Diffuse, 1.0 * Direct, reflection is ignored.
# calibration needs to be moved to algore script
#----------------------------------------------------------------------------
# SSR5: ALGORE PARAMETERS
maxheight = '2' # Vegetation height after which canopy is set to null
#halfdiff = True # Reduces the r.sun diffuse output by half. suffix 'half' on diffuse and global maps
keeptemp = True # Testing only. Should be false for production.
lpivsjune = False # Analysis only. Uses June LPI only
sky = 'cs' # cs 'clear sky' or rs 'real sky' which includes cloudiness index.
algore = 'gl' # Options: 'pl' = Power Law, 'nl' = Natural Log, 'd' for old default value of 1,
# 'cl' = Cameau Linear, 'cn' = Cameau linear Normalized, nLPI = 1.428 * LPI, Diffuse = 0.94 * nLPI * HalfDiff
# 'gn' = Gendron linear normalized, nLPI = 1.428 * LPI, Diffuse = 0.01719 + 1.024 * nLPI * HalfDiff
# 'gl' = Gendron linear. no normalization. It overestimates field radiation. Diffuse = 0.01719 + 1.024 * LPI
# Calibration of r.sun values is now handled seperately and should not be included here.
#----------------------------------------------------------------------------
# MAPSETS
mhorizon = bregion+'_horizon' # horizon mapset
msun = 'sun_'+bregion+'_'+calib # r.sun mapset using calibration
mlpi = 'lpi' # lpi mapset
mssr = 'ssr_'+bregion+'_'+algore # ssr output mapset<|fim▁end|> | cansource = '' # If you do not have a canopy raster, leave this empty '' and ssr_lidar.py will create it automatically. |
<|file_name|>webpack.ts<|end_file_name|><|fim▁begin|>import { HMR_PATH } from '../config/constants';
function webpackMiddleware(): object[] {
const middleware: object[] = [];
if (BalmJS.webpackCompiler) {
middleware.push(
require('webpack-dev-middleware')(
BalmJS.webpackCompiler,
Object.assign({}, BalmJS.config.server.devOptions, {<|fim▁hole|> )
);
if (BalmJS.config.server.useHMR) {
middleware.push(
require('webpack-hot-middleware')(BalmJS.webpackCompiler, {
log: false,
path: HMR_PATH
})
);
}
} else {
BalmJS.logger.warn('webpack middleware', 'Webpack compiler is not ready');
}
return middleware;
}
export default webpackMiddleware;<|fim▁end|> | publicPath: BalmJS.file.publicUrlOrPath,
stats: false
}) |
<|file_name|>functional_load.py<|end_file_name|><|fim▁begin|>from collections import defaultdict
from math import log2 as log
from gui.transcriptions import STANDARD_SYMBOLS
from imports import (QDialog, QHBoxLayout, QVBoxLayout, QGroupBox, QRadioButton, QButtonGroup, QPushButton,
QStackedWidget, QWidget, QComboBox, QMessageBox, QLabel, QLineEdit, QTableWidget, QTableWidgetItem)
class FunctionalLoadDialog(QDialog):
def __init__(self, corpus):
super().__init__()
self.corpus = corpus
self.results = list()
self.setWindowTitle('Functional Load')
layout = QVBoxLayout()
#Set up top row of radio button options
contrastBox = QGroupBox('Contrast')
contrastLayout = QHBoxLayout()
self.contrastGroup = QButtonGroup()
flexionOption = QRadioButton('Degrees of flexion')
flexionOption.click()
ductionOption = QRadioButton('Degree of duction')
oppositionOption = QRadioButton('Thumb opposition')
contactOption = QRadioButton('Thumb/finger contact')
customOption = QRadioButton('Custom options')
self.contrastGroup.addButton(flexionOption, id=0)
self.contrastGroup.addButton(ductionOption, id=1)
self.contrastGroup.addButton(oppositionOption, id=2)
self.contrastGroup.addButton(contactOption, id=3)
self.contrastGroup.addButton(customOption, id=4)
contrastLayout.addWidget(flexionOption)
contrastLayout.addWidget(ductionOption)
contrastLayout.addWidget(oppositionOption)
contrastLayout.addWidget(contactOption)
contrastLayout.addWidget(customOption)
contrastBox.setLayout(contrastLayout)
#set up stacked widgets
self.middleWidget = QStackedWidget()
#Collapse degress of flexion
flexionWidget = QWidget()
flexionLayout = QHBoxLayout()
self.flexionFingerSelection = QComboBox()
self.flexionFingerSelection.addItems(['Thumb', 'Index', 'Middle', 'Pinky', 'Ring', 'All'])
self.flexionJointSelection = QComboBox()
self.flexionJointSelection.addItems(['Proximal', 'Medial', 'Distal', 'All'])
#note: Thumb+Proximal not possible, and there's an alert window that will pop up if this combination is chosen
flexionLayout.addWidget(self.flexionFingerSelection)
flexionLayout.addWidget(self.flexionJointSelection)
flexionWidget.setLayout(flexionLayout)
#Collapse degrees of duction
ductionWidget = QWidget()
ductionLayout = QHBoxLayout()
self.ductionFingerSelection = QComboBox()
self.ductionFingerSelection.addItems(['Thumb/Finger', 'Index/Middle', 'Middle/Ring', 'Ring/Pinky', 'All'])
ductionLayout.addWidget(self.ductionFingerSelection)
ductionWidget.setLayout(ductionLayout)
#Collapse thumb opposition
oppositionWidget = QWidget()
oppositionLayout = QHBoxLayout()
oppositionWidget.setLayout(oppositionLayout)
#Collapse thumb/finger contact
contactWidget = QWidget()
contactLayout = QHBoxLayout()
contactWidget.setLayout(contactLayout)
#Collapse custom slots
customWidget = QWidget()
customLayout = QHBoxLayout()
customLayout.addWidget(QLabel('Merge this symbol: '))
self.customSymbo1A = QComboBox()
self.customSymbo1A.addItem('')
self.customSymbo1A.addItems(STANDARD_SYMBOLS)
self.customSymbo1A.setEditable(True)
customLayout.addWidget(self.customSymbo1A)
customLayout.addWidget(QLabel('with this symbol: '))
self.customSymbolB = QComboBox()
self.customSymbolB.addItem('')
self.customSymbolB.addItems(STANDARD_SYMBOLS)
self.customSymbolB.setEditable(True)
customLayout.addWidget(self.customSymbolB)
customLayout.addWidget(QLabel('in these slots: '))
self.customSlots = QLineEdit()
customLayout.addWidget(self.customSlots)
customLayout.addWidget(QLabel('(separate numbers with commas, leave blank to merge symbols everywhere)'))
customWidget.setLayout(customLayout)
#Build up middle widget
self.middleWidget.addWidget(flexionWidget)
self.middleWidget.addWidget(ductionWidget)
self.middleWidget.addWidget(oppositionWidget)
self.middleWidget.addWidget(contactWidget)
self.middleWidget.addWidget(customWidget)
#Connect slots and signals
flexionOption.clicked.connect(self.changeMiddleWidget)
ductionOption.clicked.connect(self.changeMiddleWidget)
oppositionOption.clicked.connect(self.changeMiddleWidget)
contactOption.clicked.connect(self.changeMiddleWidget)
customOption.clicked.connect(self.changeMiddleWidget)
#Bottom buttons (OK/Cancel)
buttonLayout = QHBoxLayout()
ok = QPushButton('OK')
ok.clicked.connect(self.accept)
cancel = QPushButton('Cancel')
cancel.clicked.connect(self.reject)
buttonLayout.addWidget(ok)
buttonLayout.addWidget(cancel)
layout.addWidget(contrastBox)
layout.addWidget(self.middleWidget)
layout.addLayout(buttonLayout)
self.setLayout(layout)
def changeMiddleWidget(self, e):
self.middleWidget.setCurrentIndex(self.contrastGroup.id(self.sender()))
def accept(self):
index = self.middleWidget.currentIndex()
if index == 0:
if (self.flexionFingerSelection.currentText() == 'Thumb'
and self.flexionJointSelection.currentText() == 'Proximal'):
alert = QMessageBox()
alert.setWindowTitle('Incompatible Options')
alert.setText('Thumbs cannot be selected for proximal joint. Choose either "Medial" or "Distal"')
alert.exec_()
return
self.calcByFlexion()
elif index == 1:
self.calcByDuction()
elif index == 4:
slots = self.customSlots.text()
alert = QMessageBox()
alert.setWindowTitle('Invalid slot numbers')
alert.setText('Slot numbers must be between 1 and 34 (inclusive)')
try:
slots = [int(x.strip()) for x in slots.split(',')]
except ValueError:
alert.exec_()
return
if any(n > 34 or n < 1 for n in slots):
alert.exec_()
return
self.calcCustom(slots)
super().accept()
def calculateEntropy(self, corpus=None):
corpus_size = len(corpus) if corpus is not None else len(self.corpus)
return corpus_size, sum([1 / corpus_size * log(1 / corpus_size) for n in range(corpus_size)]) * -1
def calcByDuction(self):
corpus_size, starting_h = self.calculateEntropy()
duction = self.ductionFingerSelection.currentText()
if duction == 'Thumb/Finger':<|fim▁hole|> slot = 19
elif duction == 'Middle/Ring':
slot = 24
elif duction == 'Ring/Pinky':
slot = 29
elif duction == 'All':
slot = -1
if slot > 1:
print('{} DUCTION'.format(duction.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h - ending_h))
else:
print('{} DUCTION'.format(duction.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[2] = 'X'
ch[19] = 'X'
ch[24] = 'X'
ch[29] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h - ending_h))
result = [corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]
self.results = [result]
def calcCustom(self, slots):
corpus_size, starting_h = self.calculateEntropy()
slots = [n-1 for n in slots]
# minus 1 because slot numbers starts at 1 but list indices start at 0
symbolA = self.customSymbo1A.currentText()
symbolB = self.customSymbolB.currentText()
print('Merging {} and {}'.format(symbolA, symbolB))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
for slot in slots:
if ch[slot] in [symbolA, symbolB]:
ch[slot] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h - ending_h))
result = [corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]
self.results = [result]
def calcByFlexion(self):
corpus_size, starting_h = self.calculateEntropy()
finger = self.flexionFingerSelection.currentText()
joint = self.flexionJointSelection.currentText()
jointDict = {'Proximal': 0,
'Medial': 1,
'Distal': 2,
'All': -1}
fingerDict = {'Thumb':2,
'Index': 16,
'Middle': 21,
'Ring': 26,
'Pinky': 31,
'All': -1}
offset = jointDict[joint]
slot = fingerDict[finger]
slot += offset
if slot > 0:#user chose particular fingers
print('{} {} JOINTS'.format(finger.upper(), joint.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h - ending_h))
self.results = [[corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]]
else: #user chose an "All" option
if joint == 'All' and finger != 'All':
#all the joints on a particular finger
slot = fingerDict[finger]
print('ALL {} JOINTS'.format(finger.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X' #proximal
ch[slot+1] = 'X' #medial
if not finger == 'Thumb':
ch[slot+2] = 'X' #distal
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h-ending_h))
self.results = [[corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]]
elif finger == 'All' and joint != 'All':
#a particular joint on all the fingers
if joint == 'Proximal':
slot = 17
elif joint == 'Medial':
slot = 18
elif joint == 'Distal':
slot = 19
print('ALL {} JOINTS'.format(joint.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
# for finger,slot in [('INDEX', 17), ('MIDDLE',22), ('RING',27), ('PINKY',32)]:
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X'
ch[slot+5] = 'X'
ch[slot+10] = 'X'
ch[slot+15] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h-ending_h))
self.results = [[corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h]]
elif finger == 'All' and joint == 'All':
results = list()
for finger, slot in [('THUMB', 2), ('INDEX', 17), ('MIDDLE', 22), ('RING', 27), ('PINKY', 31)]:
print('ALL {} JOINTS'.format(joint.upper()))
print('Starting size = {}\nStarting entropy = {}'.format(corpus_size, starting_h))
new_corpus = defaultdict(int)
for word in self.corpus:
ch = word.config1hand1.copy()
ch[slot] = 'X'
ch[slot+1] = 'X'
if not finger == 'Thumb':
ch[slot+2] = 'X'
new_corpus[''.join(ch)] += 1
new_corpus_size, ending_h = self.calculateEntropy(new_corpus)
print('After merging size = {}\nAfter merging entropy = {}'.format(len(new_corpus), ending_h))
print('Change in entropy = {}\n'.format(starting_h-ending_h))
results.append([corpus_size, starting_h, new_corpus_size, ending_h, starting_h-ending_h])
self.results = results
class FunctionalLoadResultsTable(QDialog):
def __init__(self, results):
super().__init__()
layout = QHBoxLayout()
table = QTableWidget()
table.setColumnCount(5)
table.setHorizontalHeaderLabels(['Starting corpus size', 'Starting entropy',
'Ending corpus size', 'Ending entropy', 'Change in entropy'])
for result in results:
table.insertRow(table.rowCount())
for i, item in enumerate(result):
newItem = QTableWidgetItem(str(item))
table.setItem(table.rowCount()-1, i, newItem)
layout.addWidget(table)
self.setLayout(layout)<|fim▁end|> | slot = 3
elif duction == 'Index/Middle': |
<|file_name|>extract.ts<|end_file_name|><|fim▁begin|>import * as fs from 'fs';
import { Storage, RowFunction } from './database/Storage';
import { RowExtractor } from './extractors/RowExtractor';
import { parseISOLocal } from './helpers/DateHelper';
const args = process.argv.slice(2);
if (args.length !== 3) {
console.log('Usage: npm run extract [start_date] [end_date] [output_file]');
console.log(' ex: npm run extract 2017-06-02 2017-06-03 extract.csv');
console.log(' This tool will work with local timezone');
process.exit();
}
const [start, end, file] = args;
const startDate = parseISOLocal(start + 'T00:00:00');
const endDate = parseISOLocal(end + 'T00:00:00');
const MAX_PRECISION = 4;
<|fim▁hole|>extractor.setFormatFunction(row => {
const val = row['value'] ? row['value'].toFixed(MAX_PRECISION) : 'NULL';
return [row['date_utc'], row['date_local'], val, row['controller'], row['register'], row['unit']];
});
extractor.writeCSV(file, ',', '"', nbRow => {
console.log(`Done! ${nbRow} row(s) extracted.`);
});<|fim▁end|> | const storage = new Storage(false);
const extractor = new RowExtractor('./reports/custom/');
extractor.setReader((callback: RowFunction, endCb: Function) => storage.readInterval(startDate, endDate, false, callback, endCb));
extractor.setColumnNames('Timestamp UTC', 'Timestamp', 'Value', 'Source', 'Measurement', 'Unit'); |
<|file_name|>hdf.py<|end_file_name|><|fim▁begin|>"""Functions for accessing HDF5 files."""
from __future__ import division
from __future__ import print_function
import re
import h5py as h5
import numpy as np
import six
from six.moves import range
from ..utils import filter_regex, to_list
def _ls(item, recursive=False, groups=False, level=0):
keys = []
if isinstance(item, h5.Group):
if groups and level > 0:
keys.append(item.name)
if level == 0 or recursive:
for key in list(item.keys()):
keys.extend(_ls(item[key], recursive, groups, level + 1))
elif not groups:
keys.append(item.name)<|fim▁hole|>
def ls(filename, group='/', recursive=False, groups=False,
regex=None, nb_key=None, must_exist=True):
"""List name of records HDF5 file.
Parameters
----------
filename:
Path of HDF5 file.
group:
HDF5 group to be explored.
recursive: bool
If `True`, list records recursively.
groups: bool
If `True`, only list group names but not name of datasets.
regex: str
Regex to filter listed records.
nb_key: int
Maximum number of records to be listed.
must_exist: bool
If `False`, return `None` if file or group does not exist.
Returns
-------
list
`list` with name of records in `filename`.
"""
if not group.startswith('/'):
group = '/%s' % group
h5_file = h5.File(filename, 'r')
if not must_exist and group not in h5_file:
return None
keys = _ls(h5_file[group], recursive, groups)
for i, key in enumerate(keys):
keys[i] = re.sub('^%s/' % group, '', key)
h5_file.close()
if regex:
keys = filter_regex(keys, regex)
if nb_key is not None:
keys = keys[:nb_key]
return keys
def write_data(data, filename):
"""Write data in dict `data` to HDF5 file."""
is_root = isinstance(filename, str)
group = h5.File(filename, 'w') if is_root else filename
for key, value in six.iteritems(data):
if isinstance(value, dict):
key_group = group.create_group(key)
write_data(value, key_group)
else:
group[key] = value
if is_root:
group.close()
def hnames_to_names(hnames):
"""Flattens `dict` `hnames` of hierarchical names.
Converts hierarchical `dict`, e.g. hnames={'a': ['a1', 'a2'], 'b'}, to flat
list of keys for accessing HDF5 file, e.g. ['a/a1', 'a/a2', 'b']
"""
names = []
for key, value in six.iteritems(hnames):
if isinstance(value, dict):
for name in hnames_to_names(value):
names.append('%s/%s' % (key, name))
elif isinstance(value, list):
for name in value:
names.append('%s/%s' % (key, name))
elif isinstance(value, str):
names.append('%s/%s' % (key, value))
else:
names.append(key)
return names
def reader(data_files, names, batch_size=128, nb_sample=None, shuffle=False,
loop=False):
if isinstance(names, dict):
names = hnames_to_names(names)
else:
names = to_list(names)
# Copy, since list will be changed if shuffle=True
data_files = list(to_list(data_files))
# Check if names exist
h5_file = h5.File(data_files[0], 'r')
for name in names:
if name not in h5_file:
raise ValueError('%s does not exist!' % name)
h5_file.close()
if nb_sample:
# Select the first k files s.t. the total sample size is at least
# nb_sample. Only these files will be shuffled.
_data_files = []
nb_seen = 0
for data_file in data_files:
h5_file = h5.File(data_file, 'r')
nb_seen += len(h5_file[names[0]])
h5_file.close()
_data_files.append(data_file)
if nb_seen >= nb_sample:
break
data_files = _data_files
else:
nb_sample = np.inf
file_idx = 0
nb_seen = 0
while True:
if shuffle and file_idx == 0:
np.random.shuffle(data_files)
h5_file = h5.File(data_files[file_idx], 'r')
data_file = dict()
for name in names:
data_file[name] = h5_file[name]
nb_sample_file = len(list(data_file.values())[0])
if shuffle:
# Shuffle data within the entire file, which requires reading
# the entire file into memory
idx = np.arange(nb_sample_file)
np.random.shuffle(idx)
for name, value in six.iteritems(data_file):
data_file[name] = value[:len(idx)][idx]
nb_batch = int(np.ceil(nb_sample_file / batch_size))
for batch in range(nb_batch):
batch_start = batch * batch_size
nb_read = min(nb_sample - nb_seen, batch_size)
batch_end = min(nb_sample_file, batch_start + nb_read)
_batch_size = batch_end - batch_start
if _batch_size == 0:
break
data_batch = dict()
for name in names:
data_batch[name] = data_file[name][batch_start:batch_end]
yield data_batch
nb_seen += _batch_size
if nb_seen >= nb_sample:
break
h5_file.close()
file_idx += 1
assert nb_seen <= nb_sample
if nb_sample == nb_seen or file_idx == len(data_files):
if loop:
file_idx = 0
nb_seen = 0
else:
break
def _to_dict(data):
if isinstance(data, np.ndarray):
data = [data]
return dict(zip(range(len(data)), data))
def read_from(reader, nb_sample=None):
from .utils import stack_dict
data = dict()
nb_seen = 0
is_dict = True
for data_batch in reader:
if not isinstance(data_batch, dict):
data_batch = _to_dict(data_batch)
is_dict = False
for key, value in six.iteritems(data_batch):
values = data.setdefault(key, [])
values.append(value)
nb_seen += len(list(data_batch.values())[0])
if nb_sample and nb_seen >= nb_sample:
break
data = stack_dict(data)
if nb_sample:
for key, value in six.iteritems(data):
data[key] = value[:nb_sample]
if not is_dict:
data = [data[i] for i in range(len(data))]
return data
def read(data_files, names, nb_sample=None, batch_size=1024, *args, **kwargs):
data_reader = reader(data_files, names, batch_size=batch_size,
nb_sample=nb_sample, loop=False, *args, **kwargs)
return read_from(data_reader, nb_sample)<|fim▁end|> | return keys |
<|file_name|>backendcontentiframe.js<|end_file_name|><|fim▁begin|>/**
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.
*
* For the full copyright and license information, please read the
* LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
Ext.ns('TYPO3');
<|fim▁hole|> var card;
var wrapper;
wrapper = Ext.getCmp('typo3-contentContainerWrapper');
this.url = source;
if(wrapper) {
card = Ext.getCmp('typo3-card-' + TYPO3.ModuleMenu.App.loadedModule);
if((card != undefined) && (source.search('extjspaneldummy.html') > -1)) {
wrapper.getLayout().setActiveItem('typo3-card-' + TYPO3.ModuleMenu.App.loadedModule);
if (typeof wrapper.getComponent(('typo3-card-' + TYPO3.ModuleMenu.App.loadedModule)).setUrl === 'function') {
wrapper.getComponent(('typo3-card-' + TYPO3.ModuleMenu.App.loadedModule)).setUrl(source);
}
} else {
wrapper.getLayout().setActiveItem(this.id);
this.body.dom.src = source;
this.setMask();
}
}
},
getUrl: function () {
var wrapper;
var card;
wrapper = Ext.getCmp('typo3-contentContainerWrapper');
if(wrapper) {
card = wrapper.getLayout().activeItem;
if(card.id == this.id) {
return this.body.dom.src;
} else if(typeof card.getUrl == 'function') {
return card.getUrl();
} else {
return this.url;
}
}
}
});
Ext.reg('backendContentIframePanel', TYPO3.backendContentIframePanel);<|fim▁end|> | TYPO3.backendContentIframePanel = Ext.extend(TYPO3.iframePanel ,{
setUrl: function(source) { |
<|file_name|>origin.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::sync::Arc;
use url::{Host, Url};
use url::Origin as UrlOrigin;
/// A representation of an [origin](https://html.spec.whatwg.org/multipage/#origin-2).
#[derive(HeapSizeOf, JSTraceable)]
pub struct Origin {
#[ignore_heap_size_of = "Arc<T> has unclear ownership semantics"]
inner: Arc<UrlOrigin>,
}
impl Origin {
/// Create a new origin comprising a unique, opaque identifier.
pub fn opaque_identifier() -> Origin {
Origin {
inner: Arc::new(UrlOrigin::new_opaque()),
}
}
/// Create a new origin for the given URL.<|fim▁hole|> Origin {
inner: Arc::new(url.origin()),
}
}
/// Does this origin represent a host/scheme/port tuple?
pub fn is_scheme_host_port_tuple(&self) -> bool {
self.inner.is_tuple()
}
/// Return the host associated with this origin.
pub fn host(&self) -> Option<&Host<String>> {
match *self.inner {
UrlOrigin::Tuple(_, ref host, _) => Some(host),
UrlOrigin::Opaque(..) => None,
}
}
/// https://html.spec.whatwg.org/multipage/#same-origin
pub fn same_origin(&self, other: &Origin) -> bool {
self.inner == other.inner
}
pub fn copy(&self) -> Origin {
Origin {
inner: Arc::new((*self.inner).clone()),
}
}
pub fn alias(&self) -> Origin {
Origin {
inner: self.inner.clone(),
}
}
}<|fim▁end|> | pub fn new(url: &Url) -> Origin { |
<|file_name|>geographichierarchies.go<|end_file_name|><|fim▁begin|>package trafficmanager
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"<|fim▁hole|>
// GeographicHierarchiesClient is the client for the GeographicHierarchies methods of the Trafficmanager service.
type GeographicHierarchiesClient struct {
BaseClient
}
// NewGeographicHierarchiesClient creates an instance of the GeographicHierarchiesClient client.
func NewGeographicHierarchiesClient(subscriptionID string) GeographicHierarchiesClient {
return NewGeographicHierarchiesClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewGeographicHierarchiesClientWithBaseURI creates an instance of the GeographicHierarchiesClient client using a
// custom endpoint. Use this when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds,
// Azure stack).
func NewGeographicHierarchiesClientWithBaseURI(baseURI string, subscriptionID string) GeographicHierarchiesClient {
return GeographicHierarchiesClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// GetDefault gets the default Geographic Hierarchy used by the Geographic traffic routing method.
func (client GeographicHierarchiesClient) GetDefault(ctx context.Context) (result GeographicHierarchy, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/GeographicHierarchiesClient.GetDefault")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetDefaultPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "trafficmanager.GeographicHierarchiesClient", "GetDefault", nil, "Failure preparing request")
return
}
resp, err := client.GetDefaultSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "trafficmanager.GeographicHierarchiesClient", "GetDefault", resp, "Failure sending request")
return
}
result, err = client.GetDefaultResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "trafficmanager.GeographicHierarchiesClient", "GetDefault", resp, "Failure responding to request")
return
}
return
}
// GetDefaultPreparer prepares the GetDefault request.
func (client GeographicHierarchiesClient) GetDefaultPreparer(ctx context.Context) (*http.Request, error) {
const APIVersion = "2017-03-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/providers/Microsoft.Network/trafficManagerGeographicHierarchies/default"),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetDefaultSender sends the GetDefault request. The method will close the
// http.Response Body if it receives an error.
func (client GeographicHierarchiesClient) GetDefaultSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// GetDefaultResponder handles the response to the GetDefault request. The method always
// closes the http.Response Body.
func (client GeographicHierarchiesClient) GetDefaultResponder(resp *http.Response) (result GeographicHierarchy, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}<|fim▁end|> | "github.com/Azure/go-autorest/tracing"
"net/http"
) |
<|file_name|>sized-borrowed-pointer.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)]
// Possibly-dynamic size of typaram should be cleared at pointer boundary.
// pretty-expanded FIXME #23616
fn bar<T: Sized>() { }
fn foo<T>() { bar::<&T>() }
pub fn main() { }<|fim▁end|> | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. |
<|file_name|>explicit_instantiation.cc<|end_file_name|><|fim▁begin|>// { dg-options "-std=gnu++11" }
// { dg-do compile }
// 2007-05-02 Benjamin Kosnik <[email protected]>
// Copyright (C) 2007-2014 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
<|fim▁hole|>
namespace std
{
typedef short test_type;
template struct decay<test_type>;
}<|fim▁end|> |
// NB: This file is for testing type_traits with NO OTHER INCLUDES.
#include <type_traits> |
<|file_name|>module.js<|end_file_name|><|fim▁begin|>import { history } from 'byebye';
import React from 'react';
import AltContainer from 'alt-container';
import { MANUAL_LOGOUT } from 'app-constants';
import { loginIfAuthorized as autoFacebookLogin } from 'managers/facebook';
import Analytics from 'instances/analytics';
import parseJWT from 'helpers/parseJWT';
import { decode as decodeBase64 } from 'helpers/base64';
import Auth from 'controllers/auth';
import LoginActions from 'actions/LoginActions';
import LoginStore from 'stores/LoginStore';
import EmailLoginPage from './EmailLoginPage';
import TokenErrorPage from './TokenErrorPage';
export function openTokenLogin(token) {
LoginActions.loginWithEmailTokenAndRedirect(token);
return (
<AltContainer
component={TokenErrorPage}
stores={{ LoginStore }}
actions={{ LoginActions }}
inject={{
data: parseJWT(token),
}}
/>
);
}
export function openItemLogin(itemId, token, force) {
if (Auth.getId()) {
const url = `/item/${itemId}`;
history.navigate(url, { trigger: true, replace: false }, { returnUrl: '/' });
return <span />;
}
Analytics.track('Email login/deeplink landing');
const tokenData = JSON.parse(decodeBase64(token));
const data = {
item_id: itemId,
user_id: tokenData.id,
domain: tokenData.domain,
name: tokenData.name,
};
// auto login with facebook, and when FB login fails we will send you an email
autoFacebookLogin()
.then(() => {
LoginActions.loginSuccess(null, {
login_type: 'facebookautologin',
platform: 'facebook',
});
})
.catch((err) => {
if (err.type === MANUAL_LOGOUT || err.type === 'UnableToLogin') {
// Convert the force parameter to a boolean. If `true`, it forces the backend to send the
// email, regardless of the default time limit of once a day.
LoginActions.sendLoginEmail(data.user_id, data.item_id, data.redirect, !!force);
return;<|fim▁hole|>
return (
<AltContainer
component={EmailLoginPage}
stores={{ loginState: LoginStore }}
inject={{ data }}
/>
);
}
// WEBPACK FOOTER //
// ./src/js/app/modules/emailLogin/module.js<|fim▁end|> | }
throw err;
}); |
<|file_name|>udpserver.cpp<|end_file_name|><|fim▁begin|>#include <stdio.h>
#include <unistd.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <arpa/inet.h>
#include <netinet/in.h>
#include <string.h>
#define BUFFER_SIZE 1024
int main()
{
int socketfd;
int port = 2047;
struct sockaddr_in server_in;
struct sockaddr_in client_in;
int client_in_length = sizeof(client_in);
char buffer[BUFFER_SIZE];
//Create a socket
socketfd = socket(AF_INET, SOCK_DGRAM, 0);
if (socketfd < 0)
{
perror("Creating a socket failed!");
return -1;
}
// Set the server address, by defaulting server_in to 0
// then setting it to the port, before binding
memset((char *) &server_in, 0, sizeof(server_in));
server_in.sin_family = AF_INET; //IPV4
server_in.sin_port = htons(port);
server_in.sin_addr.s_addr = htons(INADDR_ANY); //Any interface
//Bind, Note the second parameter needs to be a sockaddr
if (bind(socketfd, (struct sockaddr *) &server_in, sizeof(server_in)) == -1)
{
perror("Binding Failed, ec set to errno!");<|fim▁hole|> }
//Keep listening, for stuff
while (true)
{
memset(buffer, 0, sizeof(buffer));
if (recvfrom(socketfd, buffer, BUFFER_SIZE, 0, (struct sockaddr *) &client_in, (socklen_t *)&client_in_length) == -1)
{
perror("Recieving from client failed");
return -3;
}
if (sendto(socketfd, "OK\n", 3, 0, (struct sockaddr *) &client_in, client_in_length) == -1)
perror("Sending to client failed");
//Make sure its not empty lines
printf("Message: %s", buffer);
}
printf("Hello to the world of tomrow");
return 0;
}<|fim▁end|> | return -2; |
<|file_name|>example1.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
import yaml
def main():
#f = open("data.yaml", "r")
f = open("data2.yaml", "r")
yd = yaml.load(f)
#print "YAML Data: %s" % str(yd)
for key in yd:
print "%s" % key
print "Type: %s" % str(type(yd[key]))
print str(yd[key])
print ""
<|fim▁hole|> f = open("data2.yaml", "r")
yd = yaml.load(f)
#print "YAML Data: %s" % str(yd)
for key in yd:
print "%s" % key
print "Type: %s" % str(type(yd[key]))
print str(yd[key])
print ""
if __name__ == "__main__":
main()<|fim▁end|> | def yaml_test():
#f = open("data.yaml", "r") |
<|file_name|>ast.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 Pierre Talbot (IRCAM)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub use ast::*;
pub use front::ast::Expression;
pub use rust::{ExtCtxt,Attribute,SpannedIdent};
pub use monad::partial::Partial;
use front::ast::Grammar as FGrammar;
use std::collections::HashMap;
use std::default::Default;
pub struct Grammar
{
pub name: Ident,
pub rules: HashMap<Ident, Rule>,
pub rust_functions: HashMap<Ident, RItem>,
pub rust_items: Vec<RItem>,
pub attributes: GrammarAttributes
}
impl Grammar
{
pub fn new(fgrammar: &FGrammar) -> Partial<Grammar> {
let rules_len = fgrammar.rules.len();
let grammar = Grammar {
name: fgrammar.name.clone(),
rules: HashMap::with_capacity(rules_len),
rust_functions: HashMap::new(),
rust_items: vec![],
attributes: GrammarAttributes::default()
};
Partial::Value(grammar)
}
}
#[derive(Default)]
pub struct GrammarAttributes
{
pub print_attr: PrintAttribute
}
impl GrammarAttributes
{
pub fn new(print_attr: PrintAttribute) -> GrammarAttributes {
GrammarAttributes {
print_attr: print_attr
}
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum PrintAttribute
{
DebugApi,
ShowApi,
Nothing
}
impl PrintAttribute
{
pub fn merge(self, other: PrintAttribute) -> PrintAttribute {
use self::PrintAttribute::*;
match (self, other) {
(Nothing, DebugApi)
| (ShowApi, DebugApi) => DebugApi,
(Nothing, ShowApi) => ShowApi,
_ => Nothing
}
}
pub fn debug_api(self) -> bool {
self == PrintAttribute::DebugApi
}
pub fn show_api(self) -> bool {
self == PrintAttribute::ShowApi
}
}
impl Default for PrintAttribute
{
fn default() -> PrintAttribute {
PrintAttribute::Nothing
}
}
pub struct Rule
{
pub name: SpannedIdent,
pub def: Box<Expression>,
}
impl Rule
{
pub fn new(name: SpannedIdent, def: Box<Expression>) -> Rule {
Rule{
name: name,
def: def
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>WebDriverUtils.java<|end_file_name|><|fim▁begin|>package com.epam.jdi.uitests.mobile.appium.driver;
/*
* Copyright 2004-2016 EPAM Systems
*
* This file is part of JDI project.
*
* JDI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JDI is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JDI. If not, see <http://www.gnu.org/licenses/>.
*/
import org.openqa.selenium.os.CommandLine;
import org.openqa.selenium.os.WindowsUtils;
import static com.epam.commons.LinqUtils.first;
import static com.epam.commons.LinqUtils.where;<|fim▁hole|>/**
* Created by 12345 on 26.01.2015.
*/
public final class WebDriverUtils {
private WebDriverUtils() { }
public static void killAllRunWebDrivers() {
try {
String pid = getPid();
while (pid != null) {
killPID(pid);
pid = getPid();
}
} catch (Exception ignore) {
// Ignore in case of not windows Operation System or any other errors
}
}
private static String getPid() {
return first(where(tryGetResult(WindowsUtils::procMap), el -> el.getKey() != null
&& (el.getKey().contains("Android") && el.getKey().contains("Appium"))));
}
private static void killPID(String processID) {
new CommandLine("taskkill", "/f", "/t", "/pid", processID).execute();
}
}<|fim▁end|> | import static com.epam.commons.TryCatchUtil.tryGetResult;
|
<|file_name|>badges.js<|end_file_name|><|fim▁begin|>class AchievementEvt {
constructor(subType, payload) {
this.type = 'achievements';
this.subType = subType;
this.payload = payload;
}
};
/**
* generate PageVisitEvt
* @param {string}
* @returns {AchievementEvt}
*/
export class PageVisitEvt extends AchievementEvt {
constructor(pageName) {<|fim▁hole|>
/**
* generate ForgotPasswordEvt
* @param {String} targetEmail
* @returns {AchievementEvt}
*/
export class ForgotPasswordEvt extends AchievementEvt {
constructor(targetEmail) {
super('forgot-password', targetEmail);
}
};
/**
* generate ForgotPasswordEvt
* @returns {AchievementEvt}
*/
export class MoodRegisteredEvt extends AchievementEvt {
constructor() {
super('mood-registered');
}
};
/**
* generate TimeTravelEvt
* @param {Object} targetRange
* @returns {AchievementEvt}
*/
export class TimeTravelEvt extends AchievementEvt {
constructor(targetRange) {
super('time-travel', targetRange);
}
};
// TODO
// clickedOnNotification from SW - just after action [fast hand, tchin tchin, chain reaction]
// ensure all ui event are processed - OK [mood entry, page visits, past, future, forgot password] await TEST [duck face] KO [duck face]
// snackbar for achievements + animation
const badgesConfig = {
badgesArray: [
{ title: 'adventurer', description: 'visited all pages in one session', badge: 'adventurer' },
{ title: 'lost in translation', description: 'went to 404 page', badge: 'lost-in-translation' },
{ title: 'duck face', description: 'got a custom avatar', badge: 'no-more-faceless' },
{ title: 'goldfish', description: 'forgot password mechanism activated X1', badge: 'goldfish' },
{ title: 'alzeihmer', description: 'forgot password mechanism activated X3', badge: '019-grandfather' },
{ title: 'mood alert', description: 'subscribed for notifications', badge: '003-smartphone' },
{ title: 'mood monitor', description: 'multiple subscriptions for notifications', badge: '010-technology' },
{ title: 'fast hand', description: 'clicked on notification', badge: 'fast-hand' },
{ title: 'tchin tchin', description: 'your mood update notification nudged someone else mood update', badge: '001-toast' },
{ title: 'chain reaction', description: 'your mood update notification nudged two persons to update their mood', badge: '007-share' },
{ title: 'back to the future', description: 'time traveled more than one month in the past', badge: 'back-to-the-future' },
{ title: 'fortuneteller', description: 'tried to time travel more than a month in the future. Tip: it is useless!', badge: '010-crystal-ball' },
{ title: 'noob moodist', description: 'first mood update', badge: '014-helmet' },
{ title: 'baron moodist', description: 'updated mood three days straight', badge: '011-crown-2' },
{ title: 'duke moodist', description: 'updated mood a full week straight', badge: '012-crown-1' },
{ title: 'archduke moodist', description: 'updated mood a full month straight', badge: '010-crown-3' },
{ title: 'king moodist', description: 'updated mood three months straight', badge: '013-crown' },
{ title: 'emperor moodist', description: 'updated mood a full year straight', badge: '003-greek' },
{ title: 'happy days', description: 'positive mood for the last five updates', badge: '005-heavy-metal' },
{ title: 'depression', description: 'negative mood for the last five updates', badge: '006-crying' },
{ title: 'zen & balanced', description: 'neurtral mood for the last three updates', badge: '003-libra' },
{ title: 'mood roller coaster', description: 'changed mood from positive to negative, or reverse, in one day', badge: '005-roller-coaster' },
{ title: 'mood swings meds', description: 'changed mood more than three times a day', badge: '008-pills' },
{ title: 'blissed', description: 'mood updated to highest possible score', badge: '004-island' },
{ title: 'suicidal tendencies', description: 'mood updated to lowest possible score', badge: '006-gallows' },
{ title: 'come back', description: 'from negative to positive mood', badge: '005-profits' },
{ title: 'mood swing', description: 'from positive to negative mood', badge: '004-loss' },
{ title: 'stairway to heaven', description: 'mood increased 5 scores at once', badge: '015-paper-plane' },
{ title: 'nuclear disaster', description: 'mood decreased 5 scores at once', badge: '007-bomb-detonation' }
],
AchievementsEvts: {
PageVisitEvt: PageVisitEvt,
ForgotPasswordEvt: ForgotPasswordEvt,
TimeTravelEvt: TimeTravelEvt,
MoodRegisteredEvt: MoodRegisteredEvt
},
technical: {
gravatarImagesCacheName: '$$$toolbox-cache$$$https://moodies-1ad4f.firebaseapp.com/$$$',
adventurerPageList: ['home', 'profile', 'users', 'mood-input', 'time-travel', 'about', 'badges'],
adventurerID: 'adventurer',
lostInTranslationPageList: ['404'],
lostInTranslationID: 'lost in translation',
backToTheFutureID: 'back to the future',
fortunetellerID: 'fortuneteller',
alzeihmerGoldfishID: 'forgotPasswordCounter',
duckFaceID: 'duck face',
moodsRelatedAchievementsSpecialEvt: 'all-moods-related-achievements'
}
};
export default badgesConfig;<|fim▁end|> | super('page-visited', pageName);
}
}; |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Cohorte Debug REST API
:authors: Bassem Debbabi
:copyright: Copyright 2015, isandlaTech
:license: Apache Software License 2.0
"""
# iPOPO decorators
from pelix.ipopo.decorators import ComponentFactory, Provides, Property, Instantiate, \
Validate, Invalidate, Requires, RequiresMap, Bind, BindField, UnbindField
import pelix.remote
# Herald
import herald
import herald.beans as beans
# Cohorte
import cohorte.composer
import cohorte.monitor
# Standard library
import logging
import threading
import json, time, os
try:
# Python 3
import urllib.parse as urlparse
except ImportError:
# Python 2
import urlparse
# cohorte plutform debug agent and api
import debug
_logger = logging.getLogger("debug.debug")
# collecting information
SUBJECT_GET_HTTP = "cohorte/shell/agent/get_http"
# API path
DEBUG_REST_API_PATH = "debug/api/v1"
# API Version
DEBUG_REST_API_VERSION = "v1"
# VERSION
COHORTE_VERSION = "1.0.1"
@ComponentFactory("cohorte-debug-api-factory")
@Provides(['pelix.http.servlet'])
@Property('_path', 'pelix.http.path', "/debug")\
@Requires("_agent", debug.SERVICE_DEBUG)
# Consume a single Herald Directory service
@Requires("_directory", herald.SERVICE_DIRECTORY)
@Requires('_herald', herald.SERVICE_HERALD)
# Consume an Isolate Composer service
@RequiresMap("_icomposers", cohorte.composer.SERVICE_COMPOSER_ISOLATE, 'endpoint.framework.uuid',
optional=True, allow_none=False)
@Requires("_icomposerlocal", cohorte.composer.SERVICE_COMPOSER_ISOLATE,
optional=True, spec_filter="(!(service.imported=*))")
@Requires("_isolates", cohorte.composer.SERVICE_COMPOSER_ISOLATE, aggregate=True, optional=True)
@Property('_reject', pelix.remote.PROP_EXPORT_REJECT, ['pelix.http.servlet', herald.SERVICE_DIRECTORY_LISTENER])
@Instantiate('cohorte-debug-api')
class DebugAPI(object):
"""
A Component that provides the REST Admin API
"""
def __init__(self):
# lock
self._lock = threading.Lock()
# servlet's path
self._path = None
# cohorte platform debug agent
self._agent = None
# herald directory service
self._directory = None
self._herald = None
# isolate composer service
self._icomposers = {}
self._icomposerlocal = None
self._isolates = []
# List of platform activities
self._platform_activities = []
self._platform_activities_index = 0
# a Map of last updated lists
self._last_updates = {}
time_now = time.time()
self._last_updates["nodes"] = time_now
self._last_updates["platform_activities"] = time_now
def decrypt_request(self, request, action="GET"):
"""
Decrypts the request and extracts these information:
:return path: full path without host:port (first and last / are removed)
:return parts: list of query parts
:return in_data: json object of the associated request data
"""
o = urlparse.urlparse(request.get_path())
path = o.path
query = o.query
# prepare query path: remove first and last '/' if exists
if path[0] == '/':
path = path[1:]
if path[-1] == '/':
path = path[:-1]
parts = str(path).split('/')
in_data = None
if action == "GET":
in_data = urlparse.parse_qs(query, keep_blank_values=True)
else:
data = request.read_data()
if data != None:
in_data = json.loads(str(data))
else:
in_data = urlparse.parse_qs(query, keep_blank_values=True)
#print(json.dumps(in_data, sort_keys=False, indent=4, separators=(',', ': ')))
return (path, parts, in_data)
def prepare_response(self, request, action):
data = {"meta": {}}
data["meta"]["status"] = 200
data["meta"]["msg"] = "OK"
data["meta"]["api-version"] = DEBUG_REST_API_VERSION
data["meta"]["api-method"] = ""
data["meta"]["cohorte-version"] = COHORTE_VERSION
data["meta"]["request-path"] = request.get_path()
data["meta"]["request-method"] = action
data["meta"]["duration"] = 0.0
return data
def send_json(self, data, response):
result = json.dumps(data, sort_keys=False,
indent=4, separators=(',', ': '))
response.send_content(data["meta"]["status"], result, "application/json")
def send_text(self, data, response, status):<|fim▁hole|> out_data["meta"]["status"] = 400
out_data["meta"]["msg"] = "BAD REQUEST"
"""
GET actions ========================================================================
"""
def get_api_info(self, request, response, in_data, out_data):
out_data["api"] = {"name": "debug"}
def get_isolates(self, request, response, in_data, out_data):
out_data["isolates"] = []
lp = self._directory.get_local_peer()
out_data["isolates"].append({"uid": lp.uid, "name": lp.name,
"node_uid": lp.node_uid, "node_name": lp.node_name})
count = 1
for p in self._directory.get_peers():
out_data["isolates"].append({"uid": p.uid, "name": p.name,
"node_uid": p.node_uid, "node_name": p.node_name})
count += 1
out_data["meta"]["count"] = count
def get_isolate(self, request, response, in_data, out_data, uuid):
out_data["isolate"] = self._get_isolate_detail(uuid)
def get_isolate_bundles(self, request, response, in_data, out_data, uuid):
out_data["isolate"] = {"uuid" : uuid}
bundles = self._get_isolate_bundles(uuid)
out_data["bundles"] = bundles
if bundles is not None:
count = len(bundles)
else:
count = 0
out_data["meta"]["count"] = count
def get_bundle_detail(self, request, response, in_data, out_data, isolate_uuid, bundle_id):
out_data["isolate"] = {"uuid" : isolate_uuid}
out_data["bundle"] = {}
out_data["bundle"] = self._get_bundle_detail(isolate_uuid, bundle_id)
def get_isolate_factories(self, request, response, in_data, out_data, uuid):
out_data["isolate"] = {"uuid" : uuid}
factories = self._get_isolate_factories(uuid)
out_data["factories"] = factories
if factories is not None:
count = len(factories)
else:
count = 0
out_data["meta"]["count"] = count
def get_factory_detail(self, request, response, in_data, out_data, isolate_uuid, factory_name):
out_data["isolate"] = {"uuid" : isolate_uuid}
out_data["factory"] = {}
out_data["factory"] = self._get_factory_detail(isolate_uuid, factory_name)
def get_isolate_instances(self, request, response, in_data, out_data, uuid):
out_data["isolate"] = {"uuid" : uuid}
instances = self._get_isolate_instances(uuid)
out_data["instances"] = instances
if instances is not None:
count = len(instances)
else:
count = 0
out_data["meta"]["count"] = count
def get_instance_detail(self, request, response, in_data, out_data, isolate_uuid, instance_name):
out_data["isolate"] = {"uuid" : isolate_uuid}
out_data["instance"] = {}
out_data["instance"] = self._get_instance_detail(isolate_uuid, instance_name)
def get_isolate_services(self, request, response, in_data, out_data, uuid):
out_data["isolate"] = {"uuid" : uuid}
services = self._get_isolate_services(uuid)
out_data["services"] = services
if services is not None:
count = len(services)
else:
count = 0
out_data["meta"]["count"] = count
def get_isolate_threads(self, request, response, in_data, out_data, uuid):
out_data["isolate"] = {"uuid" : uuid}
threads = self._get_isolate_threads(uuid)
out_data["threads"] = threads
if threads is not None:
count = len(threads)
else:
count = 0
out_data["meta"]["count"] = count
def get_isolate_logs(self, request, response, in_data, out_data, uuid):
out_data["isolate"] = {"uuid" : uuid}
logs = self._get_isolate_logs(uuid)
out_data["logs"] = logs
if logs is not None:
count = len(logs)
else:
count = 0
out_data["meta"]["count"] = count
def get_isolate_log(self, request, response, in_data, out_data, isolate_uuid, log_id):
out_data["isolate"] = {"uuid" : isolate_uuid}
out_data["log"] = self._get_isolate_log(isolate_uuid, log_id)
"""
Internal agent methods ===========================================================================
"""
def _get_isolate_detail(self, uuid):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
try:
msg = beans.Message(debug.agent.SUBJECT_GET_ISOLATE_DETAIL)
reply = self._herald.send(uuid, msg)
return reply.content
except KeyError:
return None
else:
# this is the local isolate
return self._agent.get_isolate_detail()
def _get_isolate_bundles(self, uuid):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_BUNDLES)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_bundles()
def _get_bundle_detail(self, uuid, bundle_id):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_BUNDLE_DETAIL, bundle_id)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_bundle_detail(bundle_id)
def _get_isolate_factories(self, uuid):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_FACTORIES)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_factories()
def _get_factory_detail(self, uuid, factory_name):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_FACTORY_DETAIL, factory_name)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_factory_detail(factory_name)
def _get_isolate_instances(self, uuid):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_INSTANCES)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_instances()
def _get_instance_detail(self, uuid, instance_name):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_INSTANCE_DETAIL, instance_name)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_instance_detail(instance_name)
def _get_isolate_services(self, uuid):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_SERVICES)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_services()
def _get_isolate_threads(self, uuid):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_THREADS)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_threads()
def _get_isolate_logs(self, uuid):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_ISOLATE_LOGS)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_isolate_logs()
def _get_isolate_log(self, uuid, log_id):
lp = self._directory.get_local_peer()
if lp.uid != uuid:
# this is another isolate
msg = beans.Message(debug.agent.SUBJECT_GET_ISOLATE_LOG, log_id)
reply = self._herald.send(uuid, msg)
return reply.content
else:
# this is the local isolate
return self._agent.get_isolate_log(log_id)
"""
Servlet (url mapping to rest api) ================================================================
"""
def do_GET(self, request, response):
"""
Handle a GET
"""
path, parts, in_data = self.decrypt_request(request)
out_data = self.prepare_response(request, "GET")
if path.startswith(DEBUG_REST_API_PATH):
if path == DEBUG_REST_API_PATH:
out_data["meta"]["api-method"] = "get_api_info"
self.get_api_info(request, response, in_data, out_data)
elif path == DEBUG_REST_API_PATH + "/isolates":
out_data["meta"]["api-method"] = "get_isolates"
self.get_isolates(request, response, in_data, out_data)
elif len(parts) == 5:
if path == DEBUG_REST_API_PATH + "/isolates/" + parts[4]:
out_data["meta"]["api-method"] = "get_isolate"
self.get_isolate(request, response, in_data, out_data, parts[4])
else:
self.bad_request(request, response, in_data, out_data)
elif len(parts) == 6:
if path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/bundles":
out_data["meta"]["api-method"] = "get_isolate_bundles"
self.get_isolate_bundles(request, response, in_data, out_data, parts[4])
elif path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/factories":
out_data["meta"]["api-method"] = "get_isolate_factories"
self.get_isolate_factories(request, response, in_data, out_data, parts[4])
elif path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/instances":
out_data["meta"]["api-method"] = "get_isolate_instances"
self.get_isolate_instances(request, response, in_data, out_data, parts[4])
elif path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/services":
out_data["meta"]["api-method"] = "get_isolate_services"
self.get_isolate_services(request, response, in_data, out_data, parts[4])
elif path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/threads":
out_data["meta"]["api-method"] = "get_isolate_threads"
self.get_isolate_threads(request, response, in_data, out_data, parts[4])
elif path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/logs":
out_data["meta"]["api-method"] = "get_isolate_logs"
self.get_isolate_logs(request, response, in_data, out_data, parts[4])
elif len(parts) == 7:
if path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/bundles/" + parts[6]:
out_data["meta"]["api-method"] = "get_bundle_detail"
self.get_bundle_detail(request, response, in_data, out_data, parts[4], parts[6])
elif path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/factories/" + parts[6]:
out_data["meta"]["api-method"] = "get_factory_detail"
self.get_factory_detail(request, response, in_data, out_data, parts[4], parts[6])
elif path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/instances/" + parts[6]:
out_data["meta"]["api-method"] = "get_instance_detail"
self.get_instance_detail(request, response, in_data, out_data, parts[4], parts[6])
elif path == DEBUG_REST_API_PATH + "/isolates/" + parts[4] + "/logs/" + parts[6]:
if 'raw' in in_data:
# send raw log
log = self._get_isolate_log(parts[4], parts[6])
self.send_text(log, response, 200)
else:
# send log within a json object data["log"]
out_data["meta"]["api-method"] = "get_isolate_log"
self.get_isolate_log(request, response, in_data, out_data, parts[4], parts[6])
else:
self.bad_request(request, response, in_data, out_data)
else:
self.bad_request(request, response, in_data, out_data)
self.send_json(out_data, response)
"""
iPOPO STUFF --------------------------------------------------------------------------------------------------------
"""
@Validate
def validate(self, context):
_logger.info("Debug REST API validated")
self._context = context
@Invalidate
def invalidate(self, context):
_logger.info("Debug REST API invalidated")
def bound_to(self, path, params):
"""
Servlet bound to a path
"""
_logger.info('Bound to ' + path)
return True
def unbound_from(self, path, params):
"""
Servlet unbound from a path
"""
_logger.info('Unbound from ' + path)
return None<|fim▁end|> | response.send_content(status, data, "text/plain")
def bad_request(self, request, response, in_data, out_data): |
<|file_name|>gen_firewall.py<|end_file_name|><|fim▁begin|>template = """# Generated on {{dt}}
*filter
:INPUT DROP
:FORWARD ACCEPT
:OUTPUT ACCEPT
-A INPUT -i lo -j ACCEPT
-A INPUT -p tcp -m tcp --dport 22 -j ACCEPT
-A INPUT -i eth0 -m state --state RELATED,ESTABLISHED -j ACCEPT
-A INPUT -i eth1 -m state --state RELATED,ESTABLISHED -j ACCEPT
-A INPUT -p icmp -j ACCEPT
{{#rule}}{{#tcprule}}
-A INPUT -s {{source}}/32 -p tcp -m tcp --dport {{dport}} -m state --state NEW,ESTABLISHED -j ACCEPT
{{/tcprule}}{{#allrule}}
-A INPUT -p {{protocol}} -m {{protocol}} --dport {{dport}} -j ACCEPT
{{/allrule}}{{/rule}}
COMMIT
"""
import pystache
import datetime<|fim▁hole|>
# securityGroups are a hash of "security groups" and a list of boxes in each
# group
securityGroups = {'Database': ['aerolith-pg'],
'Web': ['aerolith-web'],
'Wordpress': ['AerolithWP'],
'Dev': ['ubuntu-512mb-sfo1-01']
}
# groupRules tell you for each security groups, which security groups
# can connect to it and what ports
# note all of these have port 22 (ssh) open by default (see template above)
groupRules = {'Web': [('all', 80), ('all', 443), ('all', 21), ('all', 20),
('all', '61052:61057'), ('all', 8080)],
'Redis': [('Web', 6379), ('all', 80)],
'Database': [('Web', 5432)],
'Dev': [('all', 80), ('all', 443)]
}
def gen_firewall(securityGroup, servers):
context = {'rule': {'tcprule': [], 'allrule': []},
'dt': str(datetime.datetime.now())}
rule = groupRules[securityGroup]
for subrule in rule:
if subrule[0] == 'all':
port = subrule[1]
context['rule']['allrule'].append({'dport': port,
'protocol': 'tcp'})
else:
for server in servers:
# for each server in the security group in question
# add its private ip to the firewall
if server['name'] in securityGroups[subrule[0]]:
port = subrule[1]
context['rule']['tcprule'].append(
{'source': server['networks']['v4'][0]['ip_address'],
'dport': port
})
res = pystache.render(template, context)
f = open('iptables.' + securityGroup + '.rules', 'wb')
f.write(res)
f.close()
return res<|fim▁end|> | |
<|file_name|>EndocastExtractor.hpp<|end_file_name|><|fim▁begin|>#ifndef ENDOCAST_EXTRACTOR_HPP
#define ENDOCAST_EXTRACTOR_HPP 1
#include <mi/VolumeData.hpp>
<|fim▁hole|>public:
EndocastExtractor ( void ) ;
~EndocastExtractor ( void ) ;
bool extract ( mi::VolumeData<float>& distData, mi::VolumeData<int>& labelData, const double scale ) ;
private:
bool init ( mi::VolumeData<float>& distData, mi::VolumeData<int>& labelData, const double scale ) ;
bool run ( mi::VolumeData<float>& distData, mi::VolumeData<int>& labelData ) ;
};
#endif // ENDOCAST_EXTRACTOR_HPP<|fim▁end|> | class EndocastExtractor
{
private:
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from supervisorclusterctl import __version__, __author__, __programm_name__, __programm_description__
import os
import sys
try:
from setuptools import setup, find_packages
except ImportError:
print "supervisorclusterctl needs setuptools in order to build. " \
"Please install it using your package manager (usually python-setuptools) or via pip (pip install setuptools)."
sys.exit(1)
requirements = []
test_requirements = []
here = os.path.abspath(os.path.dirname(__file__))
try:
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read().replace('.. :changelog:', '')
except:
README = ''
CHANGES = ''
data_files = []
setup(
name=__programm_name__,
version=__version__,
description=__programm_description__,
long_description=README + '\n\n' + CHANGES,
author=__author__,
keywords = 'supervisor ansible',
author_email='[email protected]',
url='https://github.com/RobWin/supervisorclusterctl.git',
packages=find_packages(exclude=["docs", "test"]),
install_requires=requirements,
tests_require=test_requirements,
test_suite="test",
data_files=data_files,
license='GPLv3',<|fim▁hole|> entry_points={
'console_scripts': [
'supervisorclusterctl = supervisorclusterctl.supervisorclusterctl:main'
],
}
)<|fim▁end|> | |
<|file_name|>application.js<|end_file_name|><|fim▁begin|>// This is a manifest file that'll be compiled into application.js, which will include all the files
// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, vendor/assets/javascripts,
// or vendor/assets/javascripts of plugins, if any, can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// the compiled file.
//
// WARNING: THE FIRST BLANK LINE MARKS THE END OF WHAT'S TO BE PROCESSED, ANY BLANK LINE SHOULD
// GO AFTER THE REQUIRES BELOW.
//
//= require jquery
//= require jquery_ujs
//= require jquery.Jcrop.min
//= require bootstrap
//= require bootstrap-datepicker
//= require jquery-fileupload/basic
//= require attendances
//= require courses
//= require jcrop_mugshot
//= require people
//= require rolls
//= require users
//= require hogan-2.0.0
//= require typeahead
//= require people_typeahead
//= require underscore
//= require jquery.tokeninput
//= require token-input-wireup
//= require_tree .
<|fim▁hole|> dateFormat: 'mm/dd/yyyy'
});
}
$(document).ready(initializeDatePicker);
$(document).on('page:change', initializeDatePicker);
// whenever the bootstrap modal closes, reset it's contents
$(function() {
$('#myModal').on('hidden', function () {
$('#myModal div.modal-body').html("<p>Loading... <i class=\"icon-refresh\"></i></p>");
$('#myModalLabel').text('');
});
});
// handle link_to remote and replace contents into data-replace id element
$(function() {
$(document)
.data('type', 'html')
.delegate('[data-remote][data-replace]', 'ajax:success', function(event, data) {
var $this = $(this);
$($this.data('replace')).html(data);
$this.trigger('ajax:replaced');
});
});<|fim▁end|> | function initializeDatePicker() {
$('input.date_picker').datepicker({
autoclose: true,
todayHighlight: true, |
<|file_name|>0006_auto__add_field_reference_year.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Reference.year'
db.add_column(u'citations_reference', 'year',
self.gf('django.db.models.fields.IntegerField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Reference.year'
db.delete_column(u'citations_reference', 'year')
models = {
u'citations.reference': {
'Meta': {'object_name': 'Reference'},
'abstract': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),<|fim▁hole|> 'edition': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isbn': ('django.db.models.fields.CharField', [], {'max_length': '17', 'null': 'True', 'blank': 'True'}),
'place': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'series': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'BK'", 'max_length': '3'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'volume': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'year': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['citations']<|fim▁end|> | 'author': ('django.db.models.fields.CharField', [], {'max_length': '512'}), |
<|file_name|>taskwork.go<|end_file_name|><|fim▁begin|>//
// Task worker.
// Connects PULL socket to tcp://localhost:5557<|fim▁hole|>// Connects PUSH socket to tcp://localhost:5558
// Sends results to sink via that socket
//
package main
import (
zmq "github.com/pebbe/zmq3"
"fmt"
"strconv"
"time"
)
func main() {
// Socket to receive messages on
receiver, _ := zmq.NewSocket(zmq.PULL)
defer receiver.Close()
receiver.Connect("tcp://localhost:5557")
// Socket to send messages to
sender, _ := zmq.NewSocket(zmq.PUSH)
defer sender.Close()
sender.Connect("tcp://localhost:5558")
// Process tasks forever
for {
s, _ := receiver.Recv(0)
// Simple progress indicator for the viewer
fmt.Print(s + ".")
// Do the work
msec, _ := strconv.Atoi(s)
time.Sleep(time.Duration(msec) * time.Millisecond)
// Send results to sink
sender.Send("", 0)
}
}<|fim▁end|> | // Collects workloads from ventilator via that socket |
<|file_name|>DevSerial.cpp<|end_file_name|><|fim▁begin|>/* $Id: DevSerial.cpp 56292 2015-06-09 14:20:46Z vboxsync $ */
/** @file
* DevSerial - 16550A UART emulation.
* (taken from hw/serial.c 2010/05/15 with modifications)
*/
/*
* Copyright (C) 2006-2015 Oracle Corporation
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* you can redistribute it and/or modify it under the terms of the GNU
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*/
/*
* This code is based on:
*
* QEMU 16550A UART emulation
*
* Copyright (c) 2003-2004 Fabrice Bellard
* Copyright (c) 2008 Citrix Systems, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/*******************************************************************************
* Header Files *
*******************************************************************************/
#define LOG_GROUP LOG_GROUP_DEV_SERIAL
#include <VBox/vmm/pdmdev.h>
#include <iprt/assert.h>
#include <iprt/uuid.h>
#include <iprt/string.h>
#include <iprt/semaphore.h>
#include <iprt/critsect.h>
#include "VBoxDD.h"
#undef VBOX_SERIAL_PCI /* The PCI variant has lots of problems: wrong IRQ line and wrong IO base assigned. */
#ifdef VBOX_SERIAL_PCI
# include <VBox/pci.h>
#endif /* VBOX_SERIAL_PCI */
/*******************************************************************************
* Defined Constants And Macros *
*******************************************************************************/
#define SERIAL_SAVED_STATE_VERSION_16450 3
#define SERIAL_SAVED_STATE_VERSION_MISSING_BITS 4
#define SERIAL_SAVED_STATE_VERSION 5
#define UART_LCR_DLAB 0x80 /* Divisor latch access bit */
#define UART_IER_MSI 0x08 /* Enable Modem status interrupt */
#define UART_IER_RLSI 0x04 /* Enable receiver line status interrupt */
#define UART_IER_THRI 0x02 /* Enable Transmitter holding register int. */
#define UART_IER_RDI 0x01 /* Enable receiver data interrupt */
#define UART_IIR_NO_INT 0x01 /* No interrupts pending */
#define UART_IIR_ID 0x06 /* Mask for the interrupt ID */
#define UART_IIR_MSI 0x00 /* Modem status interrupt */
#define UART_IIR_THRI 0x02 /* Transmitter holding register empty */
#define UART_IIR_RDI 0x04 /* Receiver data interrupt */
#define UART_IIR_RLSI 0x06 /* Receiver line status interrupt */
#define UART_IIR_CTI 0x0C /* Character Timeout Indication */
#define UART_IIR_FENF 0x80 /* Fifo enabled, but not functioning */
#define UART_IIR_FE 0xC0 /* Fifo enabled */
/*
* These are the definitions for the Modem Control Register
*/
#define UART_MCR_LOOP 0x10 /* Enable loopback test mode */
#define UART_MCR_OUT2 0x08 /* Out2 complement */
#define UART_MCR_OUT1 0x04 /* Out1 complement */
#define UART_MCR_RTS 0x02 /* RTS complement */
#define UART_MCR_DTR 0x01 /* DTR complement */
/*
* These are the definitions for the Modem Status Register
*/
#define UART_MSR_DCD 0x80 /* Data Carrier Detect */
#define UART_MSR_RI 0x40 /* Ring Indicator */
#define UART_MSR_DSR 0x20 /* Data Set Ready */
#define UART_MSR_CTS 0x10 /* Clear to Send */
#define UART_MSR_DDCD 0x08 /* Delta DCD */
#define UART_MSR_TERI 0x04 /* Trailing edge ring indicator */
#define UART_MSR_DDSR 0x02 /* Delta DSR */
#define UART_MSR_DCTS 0x01 /* Delta CTS */
#define UART_MSR_ANY_DELTA 0x0F /* Any of the delta bits! */
#define UART_LSR_TEMT 0x40 /* Transmitter empty */
#define UART_LSR_THRE 0x20 /* Transmit-hold-register empty */
#define UART_LSR_BI 0x10 /* Break interrupt indicator */
#define UART_LSR_FE 0x08 /* Frame error indicator */
#define UART_LSR_PE 0x04 /* Parity error indicator */
#define UART_LSR_OE 0x02 /* Overrun error indicator */
#define UART_LSR_DR 0x01 /* Receiver data ready */
#define UART_LSR_INT_ANY 0x1E /* Any of the lsr-interrupt-triggering status bits */
/*
* Interrupt trigger levels.
* The byte-counts are for 16550A - in newer UARTs the byte-count for each ITL is higher.
*/
#define UART_FCR_ITL_1 0x00 /* 1 byte ITL */
#define UART_FCR_ITL_2 0x40 /* 4 bytes ITL */
#define UART_FCR_ITL_3 0x80 /* 8 bytes ITL */
#define UART_FCR_ITL_4 0xC0 /* 14 bytes ITL */
#define UART_FCR_DMS 0x08 /* DMA Mode Select */
#define UART_FCR_XFR 0x04 /* XMIT Fifo Reset */
#define UART_FCR_RFR 0x02 /* RCVR Fifo Reset */
#define UART_FCR_FE 0x01 /* FIFO Enable */
#define UART_FIFO_LENGTH 16 /* 16550A Fifo Length */
#define XMIT_FIFO 0
#define RECV_FIFO 1
#define MIN_XMIT_RETRY 16
#define MAX_XMIT_RETRY_TIME 1 /* max time (in seconds) for retrying the character xmit before dropping it */
/*******************************************************************************
* Structures and Typedefs *
*******************************************************************************/
struct SerialFifo
{
uint8_t data[UART_FIFO_LENGTH];
uint8_t count;
uint8_t itl;
uint8_t tail;
uint8_t head;
};
/**
* Serial device.
*
* @implements PDMIBASE
* @implements PDMICHARPORT
*/
typedef struct SerialState
{
/** Access critical section. */
PDMCRITSECT CritSect;
/** Pointer to the device instance - R3 Ptr. */
PPDMDEVINSR3 pDevInsR3;
/** Pointer to the device instance - R0 Ptr. */
PPDMDEVINSR0 pDevInsR0;
/** Pointer to the device instance - RC Ptr. */
PPDMDEVINSRC pDevInsRC;
/** Alignment. */
RTRCPTR Alignment0;
/** LUN\#0: The base interface. */
PDMIBASE IBase;
/** LUN\#0: The character port interface. */
PDMICHARPORT ICharPort;
/** Pointer to the attached base driver. */
R3PTRTYPE(PPDMIBASE) pDrvBase;
/** Pointer to the attached character driver. */
R3PTRTYPE(PPDMICHARCONNECTOR) pDrvChar;
RTSEMEVENT ReceiveSem;
PTMTIMERR3 fifo_timeout_timer;
PTMTIMERR3 transmit_timerR3;
PTMTIMERR0 transmit_timerR0; /* currently not used */
PTMTIMERRC transmit_timerRC; /* currently not used */
RTRCPTR Alignment1;
SerialFifo recv_fifo;
SerialFifo xmit_fifo;
uint32_t base;
uint16_t divider;
uint16_t Alignment2[1];
uint8_t rbr; /**< receive register */
uint8_t thr; /**< transmit holding register */
uint8_t tsr; /**< transmit shift register */
uint8_t ier; /**< interrupt enable register */
uint8_t iir; /**< interrupt identification register, R/O */
uint8_t lcr; /**< line control register */
uint8_t mcr; /**< modem control register */
uint8_t lsr; /**< line status register, R/O */
uint8_t msr; /**< modem status register, R/O */
uint8_t scr; /**< scratch register */
uint8_t fcr; /**< fifo control register */
uint8_t fcr_vmstate;
/* NOTE: this hidden state is necessary for tx irq generation as
it can be reset while reading iir */
int thr_ipending;
int timeout_ipending;
int irq;
int last_break_enable;
/** Counter for retrying xmit */
int tsr_retry;
int tsr_retry_bound; /**< number of retries before dropping a character */
int tsr_retry_bound_max; /**< maximum possible tsr_retry_bound value that can be set while dynamic bound adjustment */
int tsr_retry_bound_min; /**< minimum possible tsr_retry_bound value that can be set while dynamic bound adjustment */
bool msr_changed;
bool fGCEnabled;
bool fR0Enabled;
bool fYieldOnLSRRead;
bool volatile fRecvWaiting;
bool f16550AEnabled;
bool Alignment3[6];
/** Time it takes to transmit a character */
uint64_t char_transmit_time;
#ifdef VBOX_SERIAL_PCI
PCIDEVICE PciDev;
#endif /* VBOX_SERIAL_PCI */
} DEVSERIAL;
/** Pointer to the serial device state. */
typedef DEVSERIAL *PDEVSERIAL;
#ifndef VBOX_DEVICE_STRUCT_TESTCASE
#ifdef IN_RING3
static int serial_can_receive(PDEVSERIAL pThis);
static void serial_receive(PDEVSERIAL pThis, const uint8_t *buf, int size);
static void fifo_clear(PDEVSERIAL pThis, int fifo)
{
SerialFifo *f = (fifo) ? &pThis->recv_fifo : &pThis->xmit_fifo;
memset(f->data, 0, UART_FIFO_LENGTH);
f->count = 0;
f->head = 0;
f->tail = 0;
}
static int fifo_put(PDEVSERIAL pThis, int fifo, uint8_t chr)
{
SerialFifo *f = (fifo) ? &pThis->recv_fifo : &pThis->xmit_fifo;
/* Receive overruns do not overwrite FIFO contents. */
if (fifo == XMIT_FIFO || f->count < UART_FIFO_LENGTH)
{
f->data[f->head++] = chr;
if (f->head == UART_FIFO_LENGTH)
f->head = 0;
}
if (f->count < UART_FIFO_LENGTH)
f->count++;
else if (fifo == XMIT_FIFO) /* need to at least adjust tail to maintain pipe state consistency */
++f->tail;
else if (fifo == RECV_FIFO)
pThis->lsr |= UART_LSR_OE;
return 1;
}
static uint8_t fifo_get(PDEVSERIAL pThis, int fifo)
{
SerialFifo *f = (fifo) ? &pThis->recv_fifo : &pThis->xmit_fifo;
uint8_t c;
if (f->count == 0)
return 0;
c = f->data[f->tail++];
if (f->tail == UART_FIFO_LENGTH)
f->tail = 0;
f->count--;
return c;
}
static void serial_update_irq(PDEVSERIAL pThis)
{
uint8_t tmp_iir = UART_IIR_NO_INT;
if ( (pThis->ier & UART_IER_RLSI)
&& (pThis->lsr & UART_LSR_INT_ANY)) {
tmp_iir = UART_IIR_RLSI;
} else if ((pThis->ier & UART_IER_RDI) && pThis->timeout_ipending) {
/* Note that(pThis->ier & UART_IER_RDI) can mask this interrupt,
* this is not in the specification but is observed on existing
* hardware. */
tmp_iir = UART_IIR_CTI;
} else if ( (pThis->ier & UART_IER_RDI)
&& (pThis->lsr & UART_LSR_DR)
&& ( !(pThis->fcr & UART_FCR_FE)
|| pThis->recv_fifo.count >= pThis->recv_fifo.itl)) {
tmp_iir = UART_IIR_RDI;
} else if ( (pThis->ier & UART_IER_THRI)
&& pThis->thr_ipending) {
tmp_iir = UART_IIR_THRI;
} else if ( (pThis->ier & UART_IER_MSI)
&& (pThis->msr & UART_MSR_ANY_DELTA)) {
tmp_iir = UART_IIR_MSI;
}
pThis->iir = tmp_iir | (pThis->iir & 0xF0);
/** XXX only call the SetIrq function if the state really changes! */
if (tmp_iir != UART_IIR_NO_INT) {
Log(("serial_update_irq %d 1\n", pThis->irq));
# ifdef VBOX_SERIAL_PCI
PDMDevHlpPCISetIrqNoWait(pThis->CTX_SUFF(pDevIns), 0, 1);
# else /* !VBOX_SERIAL_PCI */
PDMDevHlpISASetIrqNoWait(pThis->CTX_SUFF(pDevIns), pThis->irq, 1);
# endif /* !VBOX_SERIAL_PCI */
} else {
Log(("serial_update_irq %d 0\n", pThis->irq));
# ifdef VBOX_SERIAL_PCI
PDMDevHlpPCISetIrqNoWait(pThis->CTX_SUFF(pDevIns), 0, 0);
# else /* !VBOX_SERIAL_PCI */
PDMDevHlpISASetIrqNoWait(pThis->CTX_SUFF(pDevIns), pThis->irq, 0);
# endif /* !VBOX_SERIAL_PCI */
}
}
static void serial_tsr_retry_update_parameters(PDEVSERIAL pThis, uint64_t tf)
{
pThis->tsr_retry_bound_max = RT_MAX((tf * MAX_XMIT_RETRY_TIME) / pThis->char_transmit_time, MIN_XMIT_RETRY);
pThis->tsr_retry_bound_min = RT_MAX(pThis->tsr_retry_bound_max / (1000 * MAX_XMIT_RETRY_TIME), MIN_XMIT_RETRY);
/* for simplicity just reset to max retry count */
pThis->tsr_retry_bound = pThis->tsr_retry_bound_max;
}
static void serial_tsr_retry_bound_reached(PDEVSERIAL pThis)
{
/* this is most likely means we have some backend connection issues */
/* decrement the retry bound */
pThis->tsr_retry_bound = RT_MAX(pThis->tsr_retry_bound / (10 * MAX_XMIT_RETRY_TIME), pThis->tsr_retry_bound_min);
}
static void serial_tsr_retry_succeeded(PDEVSERIAL pThis)
{
/* success means we have a backend connection working OK,
* set retry bound to its maximum value */
pThis->tsr_retry_bound = pThis->tsr_retry_bound_max;
}
static void serial_update_parameters(PDEVSERIAL pThis)
{
int speed, parity, data_bits, stop_bits, frame_size;
if (pThis->divider == 0)
return;
frame_size = 1;
if (pThis->lcr & 0x08) {
frame_size++;
if (pThis->lcr & 0x10)
parity = 'E';
else
parity = 'O';
} else {
parity = 'N';
}
if (pThis->lcr & 0x04)
stop_bits = 2;
else
stop_bits = 1;
data_bits = (pThis->lcr & 0x03) + 5;
frame_size += data_bits + stop_bits;
speed = 115200 / pThis->divider;
uint64_t tf = TMTimerGetFreq(CTX_SUFF(pThis->transmit_timer));
pThis->char_transmit_time = (tf / speed) * frame_size;
serial_tsr_retry_update_parameters(pThis, tf);
Log(("speed=%d parity=%c data=%d stop=%d\n", speed, parity, data_bits, stop_bits));
if (RT_LIKELY(pThis->pDrvChar))
pThis->pDrvChar->pfnSetParameters(pThis->pDrvChar, speed, parity, data_bits, stop_bits);
}
static void serial_xmit(PDEVSERIAL pThis, bool bRetryXmit)
{
if (pThis->tsr_retry <= 0) {
if (pThis->fcr & UART_FCR_FE) {
pThis->tsr = fifo_get(pThis, XMIT_FIFO);
if (!pThis->xmit_fifo.count)
pThis->lsr |= UART_LSR_THRE;
} else {
pThis->tsr = pThis->thr;
pThis->lsr |= UART_LSR_THRE;
}
}
if (pThis->mcr & UART_MCR_LOOP) {
/* in loopback mode, say that we just received a char */
serial_receive(pThis, &pThis->tsr, 1);
} else if ( RT_LIKELY(pThis->pDrvChar)
&& RT_FAILURE(pThis->pDrvChar->pfnWrite(pThis->pDrvChar, &pThis->tsr, 1))) {
if ((pThis->tsr_retry >= 0) && ((!bRetryXmit) || (pThis->tsr_retry <= pThis->tsr_retry_bound))) {
if (!pThis->tsr_retry)
pThis->tsr_retry = 1; /* make sure the retry state is always set */
else if (bRetryXmit) /* do not increase the retry count if the retry is actually caused by next char write */
pThis->tsr_retry++;
TMTimerSet(CTX_SUFF(pThis->transmit_timer), TMTimerGet(CTX_SUFF(pThis->transmit_timer)) + pThis->char_transmit_time * 4);
return;
} else {
/* drop this character. */
pThis->tsr_retry = 0;
serial_tsr_retry_bound_reached(pThis);
}
}
else {
pThis->tsr_retry = 0;
serial_tsr_retry_succeeded(pThis);
}
if (!(pThis->lsr & UART_LSR_THRE))
TMTimerSet(CTX_SUFF(pThis->transmit_timer),
TMTimerGet(CTX_SUFF(pThis->transmit_timer)) + pThis->char_transmit_time);
if (pThis->lsr & UART_LSR_THRE) {
pThis->lsr |= UART_LSR_TEMT;
pThis->thr_ipending = 1;
serial_update_irq(pThis);
}
}
#endif /* IN_RING3 */
static int serial_ioport_write(PDEVSERIAL pThis, uint32_t addr, uint32_t val)
{
addr &= 7;
#ifndef IN_RING3
NOREF(pThis);
return VINF_IOM_R3_IOPORT_WRITE;
#else
switch(addr) {
default:
case 0:
if (pThis->lcr & UART_LCR_DLAB) {
pThis->divider = (pThis->divider & 0xff00) | val;
serial_update_parameters(pThis);
} else {
pThis->thr = (uint8_t) val;
if (pThis->fcr & UART_FCR_FE) {
fifo_put(pThis, XMIT_FIFO, pThis->thr);
pThis->thr_ipending = 0;
pThis->lsr &= ~UART_LSR_TEMT;
pThis->lsr &= ~UART_LSR_THRE;
serial_update_irq(pThis);
} else {
pThis->thr_ipending = 0;
pThis->lsr &= ~UART_LSR_THRE;
serial_update_irq(pThis);
}
serial_xmit(pThis, false);
}
break;
case 1:
if (pThis->lcr & UART_LCR_DLAB) {
pThis->divider = (pThis->divider & 0x00ff) | (val << 8);
serial_update_parameters(pThis);
} else {
pThis->ier = val & 0x0f;
if (pThis->lsr & UART_LSR_THRE) {
pThis->thr_ipending = 1;
serial_update_irq(pThis);
}
}
break;
case 2:
if (!pThis->f16550AEnabled)
break;
val = val & 0xFF;
if (pThis->fcr == val)
break;
/* Did the enable/disable flag change? If so, make sure FIFOs get flushed */
if ((val ^ pThis->fcr) & UART_FCR_FE)
val |= UART_FCR_XFR | UART_FCR_RFR;
/* FIFO clear */
if (val & UART_FCR_RFR) {
TMTimerStop(pThis->fifo_timeout_timer);
pThis->timeout_ipending = 0;
fifo_clear(pThis, RECV_FIFO);
}
if (val & UART_FCR_XFR) {
fifo_clear(pThis, XMIT_FIFO);
}
if (val & UART_FCR_FE) {
pThis->iir |= UART_IIR_FE;
/* Set RECV_FIFO trigger Level */
switch (val & 0xC0) {
case UART_FCR_ITL_1:
pThis->recv_fifo.itl = 1;
break;
case UART_FCR_ITL_2:
pThis->recv_fifo.itl = 4;
break;
case UART_FCR_ITL_3:
pThis->recv_fifo.itl = 8;
break;
case UART_FCR_ITL_4:
pThis->recv_fifo.itl = 14;
break;
}
} else
pThis->iir &= ~UART_IIR_FE;
/* Set fcr - or at least the bits in it that are supposed to "stick" */
pThis->fcr = val & 0xC9;
serial_update_irq(pThis);
break;
case 3:
{
int break_enable;
pThis->lcr = val;
serial_update_parameters(pThis);
break_enable = (val >> 6) & 1;
if (break_enable != pThis->last_break_enable) {
pThis->last_break_enable = break_enable;
if (RT_LIKELY(pThis->pDrvChar))
{
Log(("serial_ioport_write: Set break %d\n", break_enable));
int rc = pThis->pDrvChar->pfnSetBreak(pThis->pDrvChar, !!break_enable);
AssertRC(rc);
}
}
}
break;
case 4:
pThis->mcr = val & 0x1f;
if (RT_LIKELY(pThis->pDrvChar))
{
int rc = pThis->pDrvChar->pfnSetModemLines(pThis->pDrvChar,
!!(pThis->mcr & UART_MCR_RTS),
!!(pThis->mcr & UART_MCR_DTR));
AssertRC(rc);
}
break;
case 5:
break;
case 6:
break;
case 7:
pThis->scr = val;
break;
}
return VINF_SUCCESS;
#endif
}
static uint32_t serial_ioport_read(PDEVSERIAL pThis, uint32_t addr, int *pRC)
{
uint32_t ret = ~0U;
*pRC = VINF_SUCCESS;
addr &= 7;
switch(addr) {
default:
case 0:
if (pThis->lcr & UART_LCR_DLAB) {
/* DLAB == 1: divisor latch (LS) */
ret = pThis->divider & 0xff;
} else {
#ifndef IN_RING3
*pRC = VINF_IOM_R3_IOPORT_READ;
#else
if (pThis->fcr & UART_FCR_FE) {
ret = fifo_get(pThis, RECV_FIFO);
if (pThis->recv_fifo.count == 0)
pThis->lsr &= ~(UART_LSR_DR | UART_LSR_BI);
else
TMTimerSet(pThis->fifo_timeout_timer,
TMTimerGet(pThis->fifo_timeout_timer) + pThis->char_transmit_time * 4);
pThis->timeout_ipending = 0;
} else {
Log(("serial_io_port_read: read 0x%X\n", pThis->rbr));
ret = pThis->rbr;
pThis->lsr &= ~(UART_LSR_DR | UART_LSR_BI);
}
serial_update_irq(pThis);
if (pThis->fRecvWaiting)
{
pThis->fRecvWaiting = false;
int rc = RTSemEventSignal(pThis->ReceiveSem);
AssertRC(rc);
}
#endif
}
break;
case 1:
if (pThis->lcr & UART_LCR_DLAB) {
/* DLAB == 1: divisor latch (MS) */
ret = (pThis->divider >> 8) & 0xff;
} else {
ret = pThis->ier;
}
break;
case 2:
#ifndef IN_RING3
*pRC = VINF_IOM_R3_IOPORT_READ;
#else
ret = pThis->iir;
if ((ret & UART_IIR_ID) == UART_IIR_THRI) {
pThis->thr_ipending = 0;
serial_update_irq(pThis);
}
/* reset msr changed bit */
pThis->msr_changed = false;
#endif
break;
case 3:
ret = pThis->lcr;
break;
case 4:
ret = pThis->mcr;
break;
case 5:
if ((pThis->lsr & UART_LSR_DR) == 0 && pThis->fYieldOnLSRRead)
{
/* No data available and yielding is enabled, so yield in ring3. */
#ifndef IN_RING3
*pRC = VINF_IOM_R3_IOPORT_READ;
break;
#else
RTThreadYield ();
#endif
}
ret = pThis->lsr;
/* Clear break and overrun interrupts */
if (pThis->lsr & (UART_LSR_BI|UART_LSR_OE)) {
#ifndef IN_RING3
*pRC = VINF_IOM_R3_IOPORT_READ;
#else
pThis->lsr &= ~(UART_LSR_BI|UART_LSR_OE);
serial_update_irq(pThis);
#endif
}
break;
case 6:
if (pThis->mcr & UART_MCR_LOOP) {
/* in loopback, the modem output pins are connected to the
inputs */
ret = (pThis->mcr & 0x0c) << 4;
ret |= (pThis->mcr & 0x02) << 3;
ret |= (pThis->mcr & 0x01) << 5;
} else {
ret = pThis->msr;
/* Clear delta bits & msr int after read, if they were set */
if (pThis->msr & UART_MSR_ANY_DELTA) {
#ifndef IN_RING3
*pRC = VINF_IOM_R3_IOPORT_READ;
#else
pThis->msr &= 0xF0;
serial_update_irq(pThis);
#endif
}
}
break;
case 7:
ret = pThis->scr;
break;
}
return ret;
}
#ifdef IN_RING3
static int serial_can_receive(PDEVSERIAL pThis)
{
if (pThis->fcr & UART_FCR_FE) {
if (pThis->recv_fifo.count < UART_FIFO_LENGTH)
return (pThis->recv_fifo.count <= pThis->recv_fifo.itl)
? pThis->recv_fifo.itl - pThis->recv_fifo.count : 1;
else
return 0;
} else {
return !(pThis->lsr & UART_LSR_DR);
}
}
static void serial_receive(PDEVSERIAL pThis, const uint8_t *buf, int size)
{
if (pThis->fcr & UART_FCR_FE) {
int i;
for (i = 0; i < size; i++) {
fifo_put(pThis, RECV_FIFO, buf[i]);
}
pThis->lsr |= UART_LSR_DR;
/* call the timeout receive callback in 4 char transmit time */
TMTimerSet(pThis->fifo_timeout_timer, TMTimerGet(pThis->fifo_timeout_timer) + pThis->char_transmit_time * 4);
} else {
if (pThis->lsr & UART_LSR_DR)
pThis->lsr |= UART_LSR_OE;
pThis->rbr = buf[0];
pThis->lsr |= UART_LSR_DR;
}
serial_update_irq(pThis);
}
/**
* @interface_method_impl{PDMICHARPORT,pfnNotifyRead}
*/
static DECLCALLBACK(int) serialNotifyRead(PPDMICHARPORT pInterface, const void *pvBuf, size_t *pcbRead)
{
PDEVSERIAL pThis = RT_FROM_MEMBER(pInterface, DEVSERIAL, ICharPort);
const uint8_t *pu8Buf = (const uint8_t*)pvBuf;
size_t cbRead = *pcbRead;
PDMCritSectEnter(&pThis->CritSect, VERR_PERMISSION_DENIED);
for (; cbRead > 0; cbRead--, pu8Buf++)
{
if (!serial_can_receive(pThis))
{
/* If we cannot receive then wait for not more than 250ms. If we still
* cannot receive then the new character will either overwrite rbr
* or it will be dropped at fifo_put(). */
pThis->fRecvWaiting = true;
PDMCritSectLeave(&pThis->CritSect);
int rc = RTSemEventWait(pThis->ReceiveSem, 250);
PDMCritSectEnter(&pThis->CritSect, VERR_PERMISSION_DENIED);
}
serial_receive(pThis, &pu8Buf[0], 1);
}
PDMCritSectLeave(&pThis->CritSect);
return VINF_SUCCESS;
}
/**
* @@interface_method_impl{PDMICHARPORT,pfnNotifyStatusLinesChanged}
*/
static DECLCALLBACK(int) serialNotifyStatusLinesChanged(PPDMICHARPORT pInterface, uint32_t newStatusLines)
{
PDEVSERIAL pThis = RT_FROM_MEMBER(pInterface, DEVSERIAL, ICharPort);
uint8_t newMsr = 0;
Log(("%s: pInterface=%p newStatusLines=%u\n", __FUNCTION__, pInterface, newStatusLines));
PDMCritSectEnter(&pThis->CritSect, VERR_PERMISSION_DENIED);
/* Set new states. */
if (newStatusLines & PDMICHARPORT_STATUS_LINES_DCD)
newMsr |= UART_MSR_DCD;
if (newStatusLines & PDMICHARPORT_STATUS_LINES_RI)
newMsr |= UART_MSR_RI;
if (newStatusLines & PDMICHARPORT_STATUS_LINES_DSR)
newMsr |= UART_MSR_DSR;
if (newStatusLines & PDMICHARPORT_STATUS_LINES_CTS)
newMsr |= UART_MSR_CTS;
/* Compare the old and the new states and set the delta bits accordingly. */
if ((newMsr & UART_MSR_DCD) != (pThis->msr & UART_MSR_DCD))
newMsr |= UART_MSR_DDCD;
if ((newMsr & UART_MSR_RI) != 0 && (pThis->msr & UART_MSR_RI) == 0)
newMsr |= UART_MSR_TERI;
if ((newMsr & UART_MSR_DSR) != (pThis->msr & UART_MSR_DSR))
newMsr |= UART_MSR_DDSR;
if ((newMsr & UART_MSR_CTS) != (pThis->msr & UART_MSR_CTS))
newMsr |= UART_MSR_DCTS;
pThis->msr = newMsr;
pThis->msr_changed = true;
serial_update_irq(pThis);
PDMCritSectLeave(&pThis->CritSect);
return VINF_SUCCESS;
}
/**
* @interface_method_impl{PDMICHARPORT,pfnNotifyBufferFull}
*/
static DECLCALLBACK(int) serialNotifyBufferFull(PPDMICHARPORT pInterface, bool fFull)
{
return VINF_SUCCESS;
}
/**
* @interface_method_impl{PDMICHARPORT,pfnNotifyBreak}
*/
static DECLCALLBACK(int) serialNotifyBreak(PPDMICHARPORT pInterface)
{
PDEVSERIAL pThis = RT_FROM_MEMBER(pInterface, DEVSERIAL, ICharPort);
Log(("%s: pInterface=%p\n", __FUNCTION__, pInterface));
PDMCritSectEnter(&pThis->CritSect, VERR_PERMISSION_DENIED);
pThis->lsr |= UART_LSR_BI;
serial_update_irq(pThis);
PDMCritSectLeave(&pThis->CritSect);
return VINF_SUCCESS;
}
/* -=-=-=-=-=-=-=-=- Timer callbacks -=-=-=-=-=-=-=-=- */
/**
* @callback_method_tmpl{FNTMTIMERDEV, Fifo timer function.}
*/
static DECLCALLBACK(void) serialFifoTimer(PPDMDEVINS pDevIns, PTMTIMER pTimer, void *pvUser)
{
PDEVSERIAL pThis = (PDEVSERIAL)pvUser;
Assert(PDMCritSectIsOwner(&pThis->CritSect));
if (pThis->recv_fifo.count)
{
pThis->timeout_ipending = 1;
serial_update_irq(pThis);
}
}
/**
* @callback_method_tmpl{FNTMTIMERDEV, Transmit timer function.}
*
* Just retry to transmit a character.
*/
static DECLCALLBACK(void) serialTransmitTimer(PPDMDEVINS pDevIns, PTMTIMER pTimer, void *pvUser)
{
PDEVSERIAL pThis = (PDEVSERIAL)pvUser;
Assert(PDMCritSectIsOwner(&pThis->CritSect));
serial_xmit(pThis, true);
}
#endif /* IN_RING3 */
/* -=-=-=-=-=-=-=-=- I/O Port Access Handlers -=-=-=-=-=-=-=-=- */
/**
* @callback_method_impl{FNIOMIOPORTOUT}
*/
PDMBOTHCBDECL(int) serialIOPortWrite(PPDMDEVINS pDevIns, void *pvUser, RTIOPORT Port, uint32_t u32, unsigned cb)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
int rc;
Assert(PDMCritSectIsOwner(&pThis->CritSect));
if (cb == 1)
{
Log2(("%s: port %#06x val %#04x\n", __FUNCTION__, Port, u32));
rc = serial_ioport_write(pThis, Port, u32);
}
else
{
AssertMsgFailed(("Port=%#x cb=%d u32=%#x\n", Port, cb, u32));
rc = VINF_SUCCESS;
}
return rc;
}
/**
* @callback_method_impl{FNIOMIOPORTIN}
*/
PDMBOTHCBDECL(int) serialIOPortRead(PPDMDEVINS pDevIns, void *pvUser, RTIOPORT Port, uint32_t *pu32, unsigned cb)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
int rc;
Assert(PDMCritSectIsOwner(&pThis->CritSect));
if (cb == 1)
{
*pu32 = serial_ioport_read(pThis, Port, &rc);
Log2(("%s: port %#06x val %#04x\n", __FUNCTION__, Port, *pu32));
}
else
rc = VERR_IOM_IOPORT_UNUSED;
return rc;
}
#ifdef IN_RING3
/* -=-=-=-=-=-=-=-=- Saved State -=-=-=-=-=-=-=-=- */
/**
* @callback_method_tmpl{FNSSMDEVLIVEEXEC}
*/
static DECLCALLBACK(int) serialLiveExec(PPDMDEVINS pDevIns, PSSMHANDLE pSSM, uint32_t uPass)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
SSMR3PutS32(pSSM, pThis->irq);
SSMR3PutU32(pSSM, pThis->base);
return VINF_SSM_DONT_CALL_AGAIN;
}
/**
* @callback_method_tmpl{FNSSMDEVSAVEEXEC}
*/
static DECLCALLBACK(int) serialSaveExec(PPDMDEVINS pDevIns, PSSMHANDLE pSSM)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
SSMR3PutU16(pSSM, pThis->divider);
SSMR3PutU8(pSSM, pThis->rbr);
SSMR3PutU8(pSSM, pThis->ier);
SSMR3PutU8(pSSM, pThis->lcr);
SSMR3PutU8(pSSM, pThis->mcr);
SSMR3PutU8(pSSM, pThis->lsr);
SSMR3PutU8(pSSM, pThis->msr);
SSMR3PutU8(pSSM, pThis->scr);
SSMR3PutU8(pSSM, pThis->fcr); /* 16550A */
SSMR3PutS32(pSSM, pThis->thr_ipending);
SSMR3PutS32(pSSM, pThis->irq);
SSMR3PutS32(pSSM, pThis->last_break_enable);
SSMR3PutU32(pSSM, pThis->base);
SSMR3PutBool(pSSM, pThis->msr_changed);
/* Version 5, safe everything that might be of importance. Much better than
missing relevant bits! */
SSMR3PutU8(pSSM, pThis->thr);
SSMR3PutU8(pSSM, pThis->tsr);
SSMR3PutU8(pSSM, pThis->iir);
SSMR3PutS32(pSSM, pThis->timeout_ipending);
TMR3TimerSave(pThis->fifo_timeout_timer, pSSM);
TMR3TimerSave(pThis->transmit_timerR3, pSSM);
SSMR3PutU8(pSSM, pThis->recv_fifo.itl);
SSMR3PutU8(pSSM, pThis->xmit_fifo.itl);
/* Don't store:
* - the content of the FIFO
* - tsr_retry
*/
return SSMR3PutU32(pSSM, ~0); /* sanity/terminator */
}
/**
* @callback_method_tmpl{FNSSMDEVLOADEXEC}
*/
static DECLCALLBACK(int) serialLoadExec(PPDMDEVINS pDevIns, PSSMHANDLE pSSM, uint32_t uVersion, uint32_t uPass)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
int32_t iIrq;
uint32_t IOBase;
AssertMsgReturn(uVersion >= SERIAL_SAVED_STATE_VERSION_16450, ("%d\n", uVersion), VERR_SSM_UNSUPPORTED_DATA_UNIT_VERSION);
if (uPass != SSM_PASS_FINAL)
{
SSMR3GetS32(pSSM, &iIrq);
int rc = SSMR3GetU32(pSSM, &IOBase);
AssertRCReturn(rc, rc);
}
else
{
if (uVersion == SERIAL_SAVED_STATE_VERSION_16450)
{
pThis->f16550AEnabled = false;
LogRel(("Serial#%d: falling back to 16450 mode from load state\n", pDevIns->iInstance));
}
SSMR3GetU16(pSSM, &pThis->divider);
SSMR3GetU8(pSSM, &pThis->rbr);
SSMR3GetU8(pSSM, &pThis->ier);
SSMR3GetU8(pSSM, &pThis->lcr);
SSMR3GetU8(pSSM, &pThis->mcr);
SSMR3GetU8(pSSM, &pThis->lsr);
SSMR3GetU8(pSSM, &pThis->msr);
SSMR3GetU8(pSSM, &pThis->scr);
if (uVersion > SERIAL_SAVED_STATE_VERSION_16450)
SSMR3GetU8(pSSM, &pThis->fcr);
SSMR3GetS32(pSSM, &pThis->thr_ipending);
SSMR3GetS32(pSSM, &iIrq);
SSMR3GetS32(pSSM, &pThis->last_break_enable);
SSMR3GetU32(pSSM, &IOBase);
SSMR3GetBool(pSSM, &pThis->msr_changed);
if (uVersion > SERIAL_SAVED_STATE_VERSION_MISSING_BITS)
{
SSMR3GetU8(pSSM, &pThis->thr);
SSMR3GetU8(pSSM, &pThis->tsr);
SSMR3GetU8(pSSM, &pThis->iir);
SSMR3GetS32(pSSM, &pThis->timeout_ipending);
TMR3TimerLoad(pThis->fifo_timeout_timer, pSSM);
TMR3TimerLoad(pThis->transmit_timerR3, pSSM);
SSMR3GetU8(pSSM, &pThis->recv_fifo.itl);
SSMR3GetU8(pSSM, &pThis->xmit_fifo.itl);
}
/* the marker. */
uint32_t u32;
int rc = SSMR3GetU32(pSSM, &u32);
if (RT_FAILURE(rc))
return rc;
AssertMsgReturn(u32 == ~0U, ("%#x\n", u32), VERR_SSM_DATA_UNIT_FORMAT_CHANGED);
if ( (pThis->lsr & UART_LSR_DR)
|| pThis->fRecvWaiting)
{
pThis->fRecvWaiting = false;
rc = RTSemEventSignal(pThis->ReceiveSem);
AssertRC(rc);
}
/* this isn't strictly necessary but cannot hurt... */
pThis->pDevInsR3 = pDevIns;
pThis->pDevInsR0 = PDMDEVINS_2_R0PTR(pDevIns);
pThis->pDevInsRC = PDMDEVINS_2_RCPTR(pDevIns);
}
/*
* Check the config.
*/
if ( pThis->irq != iIrq
|| pThis->base != IOBase)
return SSMR3SetCfgError(pSSM, RT_SRC_POS,
N_("Config mismatch - saved irq=%#x iobase=%#x; configured irq=%#x iobase=%#x"),
iIrq, IOBase, pThis->irq, pThis->base);
return VINF_SUCCESS;
}
#ifdef VBOX_SERIAL_PCI
/* -=-=-=-=-=-=-=-=- PCI Device Callback(s) -=-=-=-=-=-=-=-=- */
/**
* @callback_method_impl{FNPCIIOREGIONMAP}
*/
static DECLCALLBACK(int) serialIOPortRegionMap(PPCIDEVICE pPciDev, int iRegion, RTGCPHYS GCPhysAddress,
uint32_t cb, PCIADDRESSSPACE enmType)
{
PDEVSERIAL pThis = RT_FROM_MEMBER(pPciDev, DEVSERIAL, PciDev);
int rc = VINF_SUCCESS;
Assert(enmType == PCI_ADDRESS_SPACE_IO);
Assert(iRegion == 0);
Assert(cb == 8);
AssertMsg(RT_ALIGN(GCPhysAddress, 8) == GCPhysAddress, ("Expected 8 byte alignment. GCPhysAddress=%#x\n", GCPhysAddress));
pThis->base = (RTIOPORT)GCPhysAddress;
LogRel(("Serial#%d: mapping I/O at %#06x\n", pThis->pDevIns->iInstance, pThis->base));
/*
* Register our port IO handlers.
*/
rc = PDMDevHlpIOPortRegister(pPciDev->pDevIns, (RTIOPORT)GCPhysAddress, 8, (void *)pThis,
serial_io_write, serial_io_read, NULL, NULL, "SERIAL");
AssertRC(rc);
return rc;
}
#endif /* VBOX_SERIAL_PCI */
/* -=-=-=-=-=-=-=-=- PDMIBASE on LUN#1 -=-=-=-=-=-=-=-=- */
/**
* @interface_method_impl{PDMIBASE, pfnQueryInterface}
*/
static DECLCALLBACK(void *) serialQueryInterface(PPDMIBASE pInterface, const char *pszIID)
{
PDEVSERIAL pThis = RT_FROM_MEMBER(pInterface, DEVSERIAL, IBase);
PDMIBASE_RETURN_INTERFACE(pszIID, PDMIBASE, &pThis->IBase);
PDMIBASE_RETURN_INTERFACE(pszIID, PDMICHARPORT, &pThis->ICharPort);
return NULL;
}
/* -=-=-=-=-=-=-=-=- PDMDEVREG -=-=-=-=-=-=-=-=- */
/**
* @interface_method_impl{PDMDEVREG, pfnRelocate}
*/
static DECLCALLBACK(void) serialRelocate(PPDMDEVINS pDevIns, RTGCINTPTR offDelta)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
pThis->pDevInsRC = PDMDEVINS_2_RCPTR(pDevIns);
pThis->transmit_timerRC = TMTimerRCPtr(pThis->transmit_timerR3);
}
/**
* @interface_method_impl{PDMDEVREG, pfnReset}
*/
static DECLCALLBACK(void) serialReset(PPDMDEVINS pDevIns)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
pThis->rbr = 0;
pThis->ier = 0;
pThis->iir = UART_IIR_NO_INT;
pThis->lcr = 0;
pThis->lsr = UART_LSR_TEMT | UART_LSR_THRE;
pThis->msr = UART_MSR_DCD | UART_MSR_DSR | UART_MSR_CTS;
/* Default to 9600 baud, 1 start bit, 8 data bits, 1 stop bit, no parity. */
pThis->divider = 0x0C;
pThis->mcr = UART_MCR_OUT2;
pThis->scr = 0;
pThis->tsr_retry = 0;
uint64_t tf = TMTimerGetFreq(CTX_SUFF(pThis->transmit_timer));
pThis->char_transmit_time = (tf / 9600) * 10;
serial_tsr_retry_update_parameters(pThis, tf);
fifo_clear(pThis, RECV_FIFO);
fifo_clear(pThis, XMIT_FIFO);
pThis->thr_ipending = 0;
pThis->last_break_enable = 0;
# ifdef VBOX_SERIAL_PCI
PDMDevHlpPCISetIrqNoWait(pThis->CTX_SUFF(pDevIns), 0, 0);
# else /* !VBOX_SERIAL_PCI */
PDMDevHlpISASetIrqNoWait(pThis->CTX_SUFF(pDevIns), pThis->irq, 0);
# endif /* !VBOX_SERIAL_PCI */
}
/**
* @interface_method_impl{PDMDEVREG, pfnDestruct}
*/
static DECLCALLBACK(int) serialDestruct(PPDMDEVINS pDevIns)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
PDMDEV_CHECK_VERSIONS_RETURN_QUIET(pDevIns);
RTSemEventDestroy(pThis->ReceiveSem);
pThis->ReceiveSem = NIL_RTSEMEVENT;
PDMR3CritSectDelete(&pThis->CritSect);
return VINF_SUCCESS;
}
/**
* @interface_method_impl{PDMDEVREG, pfnConstruct}
*/
static DECLCALLBACK(int) serialConstruct(PPDMDEVINS pDevIns, int iInstance, PCFGMNODE pCfg)
{
PDEVSERIAL pThis = PDMINS_2_DATA(pDevIns, PDEVSERIAL);
int rc;
uint16_t io_base;
uint8_t irq_lvl;
Assert(iInstance < 4);
PDMDEV_CHECK_VERSIONS_RETURN(pDevIns);
/*
* Initialize the instance data.
* (Do this early or the destructor might choke on something!)
*/
pThis->pDevInsR3 = pDevIns;
pThis->pDevInsR0 = PDMDEVINS_2_R0PTR(pDevIns);
pThis->pDevInsRC = PDMDEVINS_2_RCPTR(pDevIns);
pThis->ReceiveSem = NIL_RTSEMEVENT;
/* IBase */
pThis->IBase.pfnQueryInterface = serialQueryInterface;
/* ICharPort */
pThis->ICharPort.pfnNotifyRead = serialNotifyRead;
pThis->ICharPort.pfnNotifyStatusLinesChanged = serialNotifyStatusLinesChanged;
pThis->ICharPort.pfnNotifyBufferFull = serialNotifyBufferFull;
pThis->ICharPort.pfnNotifyBreak = serialNotifyBreak;
#ifdef VBOX_SERIAL_PCI
/* the PCI device */
pThis->PciDev.config[0x00] = 0xee; /* Vendor: ??? */
pThis->PciDev.config[0x01] = 0x80;
pThis->PciDev.config[0x02] = 0x01; /* Device: ??? */
pThis->PciDev.config[0x03] = 0x01;
pThis->PciDev.config[0x04] = PCI_COMMAND_IOACCESS;
pThis->PciDev.config[0x09] = 0x01; /* Programming interface: 16450 */
pThis->PciDev.config[0x0a] = 0x00; /* Subclass: Serial controller */
pThis->PciDev.config[0x0b] = 0x07; /* Class: Communication controller */
pThis->PciDev.config[0x0e] = 0x00; /* Header type: standard */
pThis->PciDev.config[0x3c] = irq_lvl; /* preconfigure IRQ number (0 = autoconfig)*/
pThis->PciDev.config[0x3d] = 1; /* interrupt pin 0 */
#endif /* VBOX_SERIAL_PCI */
/*
* Validate and read the configuration.
*/
if (!CFGMR3AreValuesValid(pCfg, "IRQ\0"
"IOBase\0"
"GCEnabled\0"
"R0Enabled\0"
"YieldOnLSRRead\0"
"Enable16550A\0"<|fim▁hole|> }
rc = CFGMR3QueryBoolDef(pCfg, "GCEnabled", &pThis->fGCEnabled, true);
if (RT_FAILURE(rc))
return PDMDEV_SET_ERROR(pDevIns, rc,
N_("Configuration error: Failed to get the \"GCEnabled\" value"));
rc = CFGMR3QueryBoolDef(pCfg, "R0Enabled", &pThis->fR0Enabled, true);
if (RT_FAILURE(rc))
return PDMDEV_SET_ERROR(pDevIns, rc,
N_("Configuration error: Failed to get the \"R0Enabled\" value"));
rc = CFGMR3QueryBoolDef(pCfg, "YieldOnLSRRead", &pThis->fYieldOnLSRRead, false);
if (RT_FAILURE(rc))
return PDMDEV_SET_ERROR(pDevIns, rc,
N_("Configuration error: Failed to get the \"YieldOnLSRRead\" value"));
rc = CFGMR3QueryU8(pCfg, "IRQ", &irq_lvl);
if (rc == VERR_CFGM_VALUE_NOT_FOUND)
{
/* Provide sensible defaults. */
if (iInstance == 0)
irq_lvl = 4;
else if (iInstance == 1)
irq_lvl = 3;
else
AssertReleaseFailed(); /* irq_lvl is undefined. */
}
else if (RT_FAILURE(rc))
return PDMDEV_SET_ERROR(pDevIns, rc,
N_("Configuration error: Failed to get the \"IRQ\" value"));
rc = CFGMR3QueryU16(pCfg, "IOBase", &io_base);
if (rc == VERR_CFGM_VALUE_NOT_FOUND)
{
if (iInstance == 0)
io_base = 0x3f8;
else if (iInstance == 1)
io_base = 0x2f8;
else
AssertReleaseFailed(); /* io_base is undefined */
}
else if (RT_FAILURE(rc))
return PDMDEV_SET_ERROR(pDevIns, rc,
N_("Configuration error: Failed to get the \"IOBase\" value"));
Log(("DevSerial: instance %d iobase=%04x irq=%d\n", iInstance, io_base, irq_lvl));
rc = CFGMR3QueryBoolDef(pCfg, "Enable16550A", &pThis->f16550AEnabled, true);
if (RT_FAILURE(rc))
return PDMDEV_SET_ERROR(pDevIns, rc,
N_("Configuration error: Failed to get the \"Enable16550A\" value"));
pThis->irq = irq_lvl;
#ifdef VBOX_SERIAL_PCI
pThis->base = -1;
#else
pThis->base = io_base;
#endif
LogRel(("Serial#%d: emulating %s\n", pDevIns->iInstance, pThis->f16550AEnabled ? "16550A" : "16450"));
/*
* Initialize critical section and the semaphore. Change the default
* critical section to ours so that TM and IOM will enter it before
* calling us.
*
* Note! This must of be done BEFORE creating timers, registering I/O ports
* and other things which might pick up the default CS or end up
* calling back into the device.
*/
rc = PDMDevHlpCritSectInit(pDevIns, &pThis->CritSect, RT_SRC_POS, "Serial#%u", iInstance);
AssertRCReturn(rc, rc);
rc = PDMDevHlpSetDeviceCritSect(pDevIns, &pThis->CritSect);
AssertRCReturn(rc, rc);
rc = RTSemEventCreate(&pThis->ReceiveSem);
AssertRCReturn(rc, rc);
/*
* Create the timers.
*/
rc = PDMDevHlpTMTimerCreate(pDevIns, TMCLOCK_VIRTUAL, serialFifoTimer, pThis,
TMTIMER_FLAGS_DEFAULT_CRIT_SECT, "Serial Fifo Timer",
&pThis->fifo_timeout_timer);
AssertRCReturn(rc, rc);
rc = PDMDevHlpTMTimerCreate(pDevIns, TMCLOCK_VIRTUAL, serialTransmitTimer, pThis,
TMTIMER_FLAGS_DEFAULT_CRIT_SECT, "Serial Transmit Timer",
&pThis->transmit_timerR3);
AssertRCReturn(rc, rc);
pThis->transmit_timerR0 = TMTimerR0Ptr(pThis->transmit_timerR3);
pThis->transmit_timerRC = TMTimerRCPtr(pThis->transmit_timerR3);
serialReset(pDevIns);
#ifdef VBOX_SERIAL_PCI
/*
* Register the PCI Device and region.
*/
rc = PDMDevHlpPCIRegister(pDevIns, &pThis->PciDev);
if (RT_FAILURE(rc))
return rc;
rc = PDMDevHlpPCIIORegionRegister(pDevIns, 0, 8, PCI_ADDRESS_SPACE_IO, serialIOPortRegionMap);
if (RT_FAILURE(rc))
return rc;
#else /* !VBOX_SERIAL_PCI */
/*
* Register the I/O ports.
*/
pThis->base = io_base;
rc = PDMDevHlpIOPortRegister(pDevIns, io_base, 8, 0,
serialIOPortWrite, serialIOPortRead,
NULL, NULL, "SERIAL");
if (RT_FAILURE(rc))
return rc;
if (pThis->fGCEnabled)
{
rc = PDMDevHlpIOPortRegisterRC(pDevIns, io_base, 8, 0, "serialIOPortWrite",
"serialIOPortRead", NULL, NULL, "Serial");
if (RT_FAILURE(rc))
return rc;
}
if (pThis->fR0Enabled)
{
rc = PDMDevHlpIOPortRegisterR0(pDevIns, io_base, 8, 0, "serialIOPortWrite",
"serialIOPortRead", NULL, NULL, "Serial");
if (RT_FAILURE(rc))
return rc;
}
#endif /* !VBOX_SERIAL_PCI */
/*
* Saved state.
*/
rc = PDMDevHlpSSMRegister3(pDevIns, SERIAL_SAVED_STATE_VERSION, sizeof (*pThis),
serialLiveExec, serialSaveExec, serialLoadExec);
if (RT_FAILURE(rc))
return rc;
/*
* Attach the char driver and get the interfaces.
* For now no run-time changes are supported.
*/
rc = PDMDevHlpDriverAttach(pDevIns, 0, &pThis->IBase, &pThis->pDrvBase, "Serial Char");
if (RT_SUCCESS(rc))
{
pThis->pDrvChar = PDMIBASE_QUERY_INTERFACE(pThis->pDrvBase, PDMICHARCONNECTOR);
if (!pThis->pDrvChar)
{
AssertLogRelMsgFailed(("Configuration error: instance %d has no char interface!\n", iInstance));
return VERR_PDM_MISSING_INTERFACE;
}
/** @todo provide read notification interface!!!! */
}
else if (rc == VERR_PDM_NO_ATTACHED_DRIVER)
{
pThis->pDrvBase = NULL;
pThis->pDrvChar = NULL;
LogRel(("Serial%d: no unit\n", iInstance));
}
else
{
AssertLogRelMsgFailed(("Serial%d: Failed to attach to char driver. rc=%Rrc\n", iInstance, rc));
/* Don't call VMSetError here as we assume that the driver already set an appropriate error */
return rc;
}
return VINF_SUCCESS;
}
/**
* The device registration structure.
*/
const PDMDEVREG g_DeviceSerialPort =
{
/* u32Version */
PDM_DEVREG_VERSION,
/* szName */
"serial",
/* szRCMod */
"VBoxDDRC.rc",
/* szR0Mod */
"VBoxDDR0.r0",
/* pszDescription */
"Serial Communication Port",
/* fFlags */
PDM_DEVREG_FLAGS_DEFAULT_BITS | PDM_DEVREG_FLAGS_RC | PDM_DEVREG_FLAGS_R0,
/* fClass */
PDM_DEVREG_CLASS_SERIAL,
/* cMaxInstances */
UINT32_MAX,
/* cbInstance */
sizeof(DEVSERIAL),
/* pfnConstruct */
serialConstruct,
/* pfnDestruct */
serialDestruct,
/* pfnRelocate */
serialRelocate,
/* pfnMemSetup */
NULL,
/* pfnPowerOn */
NULL,
/* pfnReset */
serialReset,
/* pfnSuspend */
NULL,
/* pfnResume */
NULL,
/* pfnAttach */
NULL,
/* pfnDetach */
NULL,
/* pfnQueryInterface. */
NULL,
/* pfnInitComplete */
NULL,
/* pfnPowerOff */
NULL,
/* pfnSoftReset */
NULL,
/* u32VersionEnd */
PDM_DEVREG_VERSION
};
#endif /* IN_RING3 */
#endif /* !VBOX_DEVICE_STRUCT_TESTCASE */<|fim▁end|> | ))
{
AssertMsgFailed(("serialConstruct Invalid configuration values\n"));
return VERR_PDM_DEVINS_UNKNOWN_CFG_VALUES; |
<|file_name|>sautils.py<|end_file_name|><|fim▁begin|># This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.<|fim▁hole|># You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from contextlib import contextmanager
import sqlalchemy as sa
from sqlalchemy.ext import compiler
from sqlalchemy.sql.expression import ClauseElement
from sqlalchemy.sql.expression import Executable
# from http:
# //www.sqlalchemy.org/docs/core/compiler.html#compiling-sub-elements-of-a-custom-expression-construct # noqa pylint: disable=line-too-long
# _execution_options per
# http://docs.sqlalchemy.org/en/rel_0_7/core/compiler.html#enabling-compiled-autocommit
# (UpdateBase requires sqlalchemy 0.7.0)
class InsertFromSelect(Executable, ClauseElement):
_execution_options = \
Executable._execution_options.union({'autocommit': True})
def __init__(self, table, select):
self.table = table
self.select = select
@compiler.compiles(InsertFromSelect)
def _visit_insert_from_select(element, compiler, **kw):
return "INSERT INTO {} {}".format(compiler.process(element.table, asfrom=True),
compiler.process(element.select))
def sa_version():
if hasattr(sa, '__version__'):
def tryint(s):
try:
return int(s)
except (ValueError, TypeError):
return -1
return tuple(map(tryint, sa.__version__.split('.')))
return (0, 0, 0) # "it's old"
def Table(*args, **kwargs):
"""Wrap table creation to add any necessary dialect-specific options"""
# work around the case where a database was created for us with
# a non-utf8 character set (mysql's default)
kwargs['mysql_character_set'] = 'utf8'
return sa.Table(*args, **kwargs)
@contextmanager
def withoutSqliteForeignKeys(engine, connection=None):
conn = connection
if engine.dialect.name == 'sqlite':
if conn is None:
conn = engine.connect()
# This context is not re-entrant. Ensure it.
assert not getattr(engine, 'fk_disabled', False)
engine.fk_disabled = True
conn.execute('pragma foreign_keys=OFF')
try:
yield
finally:
if engine.dialect.name == 'sqlite':
engine.fk_disabled = False
conn.execute('pragma foreign_keys=ON')
if connection is None:
conn.close()<|fim▁end|> | # |
<|file_name|>style-utils.js<|end_file_name|><|fim▁begin|>import {Map} from 'immutable';
export function getInteractiveLayerIds(mapStyle) {
let interactiveLayerIds = [];
if (Map.isMap(mapStyle) && mapStyle.has('layers')) {<|fim▁hole|> .toJS();
} else if (Array.isArray(mapStyle.layers)) {
interactiveLayerIds = mapStyle.layers.filter(l => l.interactive)
.map(l => l.id);
}
return interactiveLayerIds;
}<|fim▁end|> | interactiveLayerIds = mapStyle.get('layers')
.filter(l => l.get('interactive'))
.map(l => l.get('id')) |
<|file_name|>video.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2010 Roger Philibert
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import datetime
import lxml.html
import re
from weboob.tools.browser import BasePage
from weboob.tools.misc import to_unicode
from weboob.tools.parsers.lxmlparser import select, SelectElementException
from ..video import YoujizzVideo
__all__ = ['VideoPage']
class VideoPage(BasePage):
def get_video(self, video=None):
_id = to_unicode(self.group_dict['id'])
if video is None:
video = YoujizzVideo(_id)
title_el = select(self.document.getroot(), 'title', 1)
video.title = to_unicode(title_el.text.strip())
# youjizz HTML is crap, we must parse it with regexps
data = lxml.html.tostring(self.document.getroot())
m = re.search(r'<strong>.*?Runtime.*?</strong> (.+?)<br.*>', data)
try:
if m:
minutes, seconds = (int(v) for v in to_unicode(m.group(1).strip()).split(':'))
video.duration = datetime.timedelta(minutes=minutes, seconds=seconds)
else:
raise Exception()
except Exception:
raise SelectElementException('Could not retrieve video duration')
video_file_urls = re.findall(r'"(http://media[^ ,]+\.flv)"', data)
if len(video_file_urls) == 0:
raise SelectElementException('Video URL not found')
elif len(video_file_urls) > 1:
raise SelectElementException('Many video file URL found')
else:
video.url = video_file_urls[0]
<|fim▁hole|><|fim▁end|> | return video |
<|file_name|>mock_camera.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
def pc_filter(topic, datatype, md5sum, msg_def, header):
if datatype == 'sensor_msgs/PointCloud2':
return True
return False
class MockCamera(object):
"""A MockCamera reads saved point clouds.
"""
def __init__(self):
pass
def read_cloud(self, path):
"""Returns the sensor_msgs/PointCloud2 in the given bag file.
Args:
path: string, the path to a bag file with a single
sensor_msgs/PointCloud2 in it.
Returns: A sensor_msgs/PointCloud2 message, or None if there were no
PointCloud2 messages in the bag file.
"""
bag = rosbag.Bag(path)
for topic, msg, time in bag.read_messages(connection_filter=pc_filter):
return msg
bag.close()
return None<|fim▁end|> | import rosbag
from sensor_msgs.msg import PointCloud2 |
<|file_name|>set.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use borrow::Borrow;
use clone::Clone;
use cmp::{Eq, PartialEq};
use core::marker::Sized;
use default::Default;
use fmt::Debug;
use fmt;
use hash::Hash;
use iter::{Iterator, IntoIterator, ExactSizeIterator, FromIterator, Map, Chain, Extend};
use ops::{BitOr, BitAnd, BitXor, Sub};
use option::Option::{Some, None, self};
use super::map::{self, HashMap, Keys, RandomState};
use super::state::HashState;
const INITIAL_CAPACITY: usize = 32;
// Future Optimization (FIXME!)
// =============================
//
// Iteration over zero sized values is a noop. There is no need
// for `bucket.val` in the case of HashSet. I suppose we would need HKT
// to get rid of it properly.
/// An implementation of a hash set using the underlying representation of a
/// HashMap where the value is ().
///
/// As with the `HashMap` type, a `HashSet` requires that the elements
/// implement the `Eq` and `Hash` traits. This can frequently be achieved by
/// using `#[derive(PartialEq, Eq, Hash)]`. If you implement these yourself,
/// it is important that the following property holds:
///
/// ```text
/// k1 == k2 -> hash(k1) == hash(k2)
/// ```
///
/// In other words, if two keys are equal, their hashes must be equal.
///
///
/// It is a logic error for an item to be modified in such a way that the
/// item's hash, as determined by the `Hash` trait, or its equality, as
/// determined by the `Eq` trait, changes while it is in the set. This is
/// normally only possible through `Cell`, `RefCell`, global state, I/O, or
/// unsafe code.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// // Type inference lets us omit an explicit type signature (which
/// // would be `HashSet<&str>` in this example).
/// let mut books = HashSet::new();
///
/// // Add some books.
/// books.insert("A Dance With Dragons");
/// books.insert("To Kill a Mockingbird");
/// books.insert("The Odyssey");
/// books.insert("The Great Gatsby");
///
/// // Check for a specific one.
/// if !books.contains("The Winds of Winter") {
/// println!("We have {} books, but The Winds of Winter ain't one.",
/// books.len());
/// }
///
/// // Remove a book.
/// books.remove("The Odyssey");
///
/// // Iterate over everything.
/// for book in &books {
/// println!("{}", book);
/// }
/// ```
///
/// The easiest way to use `HashSet` with a custom type is to derive
/// `Eq` and `Hash`. We must also derive `PartialEq`, this will in the
/// future be implied by `Eq`.
///
/// ```
/// use std::collections::HashSet;
/// #[derive(Hash, Eq, PartialEq, Debug)]
/// struct Viking<'a> {
/// name: &'a str,
/// power: usize,
/// }
///
/// let mut vikings = HashSet::new();
///
/// vikings.insert(Viking { name: "Einar", power: 9 });
/// vikings.insert(Viking { name: "Einar", power: 9 });
/// vikings.insert(Viking { name: "Olaf", power: 4 });
/// vikings.insert(Viking { name: "Harald", power: 8 });
///
/// // Use derived implementation to print the vikings.
/// for x in &vikings {
/// println!("{:?}", x);
/// }
/// ```
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct HashSet<T, S = RandomState> {
map: HashMap<T, (), S>
}
impl<T: Hash + Eq> HashSet<T, RandomState> {
/// Creates an empty HashSet.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let mut set: HashSet<i32> = HashSet::new();
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> HashSet<T, RandomState> {
HashSet::with_capacity(INITIAL_CAPACITY)
}
/// Creates an empty HashSet with space for at least `n` elements in
/// the hash table.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let mut set: HashSet<i32> = HashSet::with_capacity(10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> HashSet<T, RandomState> {
HashSet { map: HashMap::with_capacity(capacity) }
}
}
impl<T, S> HashSet<T, S>
where T: Eq + Hash, S: HashState
{
/// Creates a new empty hash set which will use the given hasher to hash
/// keys.
///
/// The hash set is also created with the default initial capacity.
///
/// # Examples
///
/// ```
/// # #![feature(hashmap_hasher)]
/// use std::collections::HashSet;
/// use std::collections::hash_map::RandomState;
///
/// let s = RandomState::new();
/// let mut set = HashSet::with_hash_state(s);
/// set.insert(2);
/// ```
#[inline]
#[unstable(feature = "hashmap_hasher", reason = "hasher stuff is unclear")]
pub fn with_hash_state(hash_state: S) -> HashSet<T, S> {
HashSet::with_capacity_and_hash_state(INITIAL_CAPACITY, hash_state)
}
/// Creates an empty HashSet with space for at least `capacity`
/// elements in the hash table, using `hasher` to hash the keys.
///
/// Warning: `hasher` is normally randomly generated, and
/// is designed to allow `HashSet`s to be resistant to attacks that
/// cause many collisions and very poor performance. Setting it
/// manually using this function can expose a DoS attack vector.
///
/// # Examples
///
/// ```
/// # #![feature(hashmap_hasher)]
/// use std::collections::HashSet;
/// use std::collections::hash_map::RandomState;
///
/// let s = RandomState::new();
/// let mut set = HashSet::with_capacity_and_hash_state(10, s);
/// set.insert(1);
/// ```
#[inline]
#[unstable(feature = "hashmap_hasher", reason = "hasher stuff is unclear")]
pub fn with_capacity_and_hash_state(capacity: usize, hash_state: S)
-> HashSet<T, S> {
HashSet {
map: HashMap::with_capacity_and_hash_state(capacity, hash_state),
}
}
/// Returns the number of elements the set can hold without reallocating.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let set: HashSet<i32> = HashSet::with_capacity(100);
/// assert!(set.capacity() >= 100);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> usize {
self.map.capacity()
}
/// Reserves capacity for at least `additional` more elements to be inserted
/// in the `HashSet`. The collection may reserve more space to avoid
/// frequent reallocations.
///
/// # Panics
///
/// Panics if the new allocation size overflows `usize`.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let mut set: HashSet<i32> = HashSet::new();
/// set.reserve(10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: usize) {
self.map.reserve(additional)
}
/// Shrinks the capacity of the set as much as possible. It will drop
/// down as much as possible while maintaining the internal rules
/// and possibly leaving some space in accordance with the resize policy.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let mut set = HashSet::with_capacity(100);
/// set.insert(1);
/// set.insert(2);
/// assert!(set.capacity() >= 100);
/// set.shrink_to_fit();
/// assert!(set.capacity() >= 2);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn shrink_to_fit(&mut self) {
self.map.shrink_to_fit()
}
/// An iterator visiting all elements in arbitrary order.
/// Iterator element type is &'a T.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let mut set = HashSet::new();
/// set.insert("a");
/// set.insert("b");
///
/// // Will print in an arbitrary order.
/// for x in set.iter() {
/// println!("{}", x);
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> {
Iter { iter: self.map.keys() }
}
/// Visit the values representing the difference.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect();
///
/// // Can be seen as `a - b`.
/// for x in a.difference(&b) {
/// println!("{}", x); // Print 1
/// }
///
/// let diff: HashSet<_> = a.difference(&b).cloned().collect();
/// assert_eq!(diff, [1].iter().cloned().collect());
///
/// // Note that difference is not symmetric,
/// // and `b - a` means something else:
/// let diff: HashSet<_> = b.difference(&a).cloned().collect();
/// assert_eq!(diff, [4].iter().cloned().collect());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn difference<'a>(&'a self, other: &'a HashSet<T, S>) -> Difference<'a, T, S> {
Difference {
iter: self.iter(),
other: other,
}
}
/// Visit the values representing the symmetric difference.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect();
///
/// // Print 1, 4 in arbitrary order.
/// for x in a.symmetric_difference(&b) {
/// println!("{}", x);
/// }
///
/// let diff1: HashSet<_> = a.symmetric_difference(&b).cloned().collect();
/// let diff2: HashSet<_> = b.symmetric_difference(&a).cloned().collect();
///
/// assert_eq!(diff1, diff2);
/// assert_eq!(diff1, [1, 4].iter().cloned().collect());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn symmetric_difference<'a>(&'a self, other: &'a HashSet<T, S>)
-> SymmetricDifference<'a, T, S> {
SymmetricDifference { iter: self.difference(other).chain(other.difference(self)) }
}
/// Visit the values representing the intersection.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect();
///
/// // Print 2, 3 in arbitrary order.
/// for x in a.intersection(&b) {
/// println!("{}", x);
/// }
///
/// let diff: HashSet<_> = a.intersection(&b).cloned().collect();
/// assert_eq!(diff, [2, 3].iter().cloned().collect());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn intersection<'a>(&'a self, other: &'a HashSet<T, S>) -> Intersection<'a, T, S> {
Intersection {
iter: self.iter(),
other: other,
}
}
/// Visit the values representing the union.<|fim▁hole|> ///
/// ```
/// use std::collections::HashSet;
/// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// let b: HashSet<_> = [4, 2, 3, 4].iter().cloned().collect();
///
/// // Print 1, 2, 3, 4 in arbitrary order.
/// for x in a.union(&b) {
/// println!("{}", x);
/// }
///
/// let diff: HashSet<_> = a.union(&b).cloned().collect();
/// assert_eq!(diff, [1, 2, 3, 4].iter().cloned().collect());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn union<'a>(&'a self, other: &'a HashSet<T, S>) -> Union<'a, T, S> {
Union { iter: self.iter().chain(other.difference(self)) }
}
/// Returns the number of elements in the set.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let mut v = HashSet::new();
/// assert_eq!(v.len(), 0);
/// v.insert(1);
/// assert_eq!(v.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize { self.map.len() }
/// Returns true if the set contains no elements.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let mut v = HashSet::new();
/// assert!(v.is_empty());
/// v.insert(1);
/// assert!(!v.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_empty(&self) -> bool { self.map.is_empty() }
/// Clears the set, returning all elements in an iterator.
#[inline]
#[unstable(feature = "drain",
reason = "matches collection reform specification, waiting for dust to settle")]
pub fn drain(&mut self) -> Drain<T> {
fn first<A, B>((a, _): (A, B)) -> A { a }
let first: fn((T, ())) -> T = first; // coerce to fn pointer
Drain { iter: self.map.drain().map(first) }
}
/// Clears the set, removing all values.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let mut v = HashSet::new();
/// v.insert(1);
/// v.clear();
/// assert!(v.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) { self.map.clear() }
/// Returns `true` if the set contains a value.
///
/// The value may be any borrowed form of the set's value type, but
/// `Hash` and `Eq` on the borrowed form *must* match those for
/// the value type.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let set: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// assert_eq!(set.contains(&1), true);
/// assert_eq!(set.contains(&4), false);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
where T: Borrow<Q>, Q: Hash + Eq
{
self.map.contains_key(value)
}
/// Returns `true` if the set has no elements in common with `other`.
/// This is equivalent to checking for an empty intersection.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// let mut b = HashSet::new();
///
/// assert_eq!(a.is_disjoint(&b), true);
/// b.insert(4);
/// assert_eq!(a.is_disjoint(&b), true);
/// b.insert(1);
/// assert_eq!(a.is_disjoint(&b), false);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_disjoint(&self, other: &HashSet<T, S>) -> bool {
self.iter().all(|v| !other.contains(v))
}
/// Returns `true` if the set is a subset of another.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let sup: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// let mut set = HashSet::new();
///
/// assert_eq!(set.is_subset(&sup), true);
/// set.insert(2);
/// assert_eq!(set.is_subset(&sup), true);
/// set.insert(4);
/// assert_eq!(set.is_subset(&sup), false);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_subset(&self, other: &HashSet<T, S>) -> bool {
self.iter().all(|v| other.contains(v))
}
/// Returns `true` if the set is a superset of another.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let sub: HashSet<_> = [1, 2].iter().cloned().collect();
/// let mut set = HashSet::new();
///
/// assert_eq!(set.is_superset(&sub), false);
///
/// set.insert(0);
/// set.insert(1);
/// assert_eq!(set.is_superset(&sub), false);
///
/// set.insert(2);
/// assert_eq!(set.is_superset(&sub), true);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_superset(&self, other: &HashSet<T, S>) -> bool {
other.is_subset(self)
}
/// Adds a value to the set. Returns `true` if the value was not already
/// present in the set.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let mut set = HashSet::new();
///
/// assert_eq!(set.insert(2), true);
/// assert_eq!(set.insert(2), false);
/// assert_eq!(set.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()).is_none() }
/// Removes a value from the set. Returns `true` if the value was
/// present in the set.
///
/// The value may be any borrowed form of the set's value type, but
/// `Hash` and `Eq` on the borrowed form *must* match those for
/// the value type.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let mut set = HashSet::new();
///
/// set.insert(2);
/// assert_eq!(set.remove(&2), true);
/// assert_eq!(set.remove(&2), false);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
where T: Borrow<Q>, Q: Hash + Eq
{
self.map.remove(value).is_some()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> PartialEq for HashSet<T, S>
where T: Eq + Hash, S: HashState
{
fn eq(&self, other: &HashSet<T, S>) -> bool {
if self.len() != other.len() { return false; }
self.iter().all(|key| other.contains(key))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Eq for HashSet<T, S>
where T: Eq + Hash, S: HashState
{}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> fmt::Debug for HashSet<T, S>
where T: Eq + Hash + fmt::Debug,
S: HashState
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_set().entries(self.iter()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> FromIterator<T> for HashSet<T, S>
where T: Eq + Hash,
S: HashState + Default,
{
fn from_iter<I: IntoIterator<Item=T>>(iterable: I) -> HashSet<T, S> {
let iter = iterable.into_iter();
let lower = iter.size_hint().0;
let mut set = HashSet::with_capacity_and_hash_state(lower, Default::default());
set.extend(iter);
set
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Extend<T> for HashSet<T, S>
where T: Eq + Hash,
S: HashState,
{
fn extend<I: IntoIterator<Item=T>>(&mut self, iter: I) {
for k in iter {
self.insert(k);
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> Default for HashSet<T, S>
where T: Eq + Hash,
S: HashState + Default,
{
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> HashSet<T, S> {
HashSet::with_hash_state(Default::default())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b, T, S> BitOr<&'b HashSet<T, S>> for &'a HashSet<T, S>
where T: Eq + Hash + Clone,
S: HashState + Default,
{
type Output = HashSet<T, S>;
/// Returns the union of `self` and `rhs` as a new `HashSet<T, S>`.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set = &a | &b;
///
/// let mut i = 0;
/// let expected = [1, 2, 3, 4, 5];
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
/// assert_eq!(i, expected.len());
/// ```
fn bitor(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
self.union(rhs).cloned().collect()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b, T, S> BitAnd<&'b HashSet<T, S>> for &'a HashSet<T, S>
where T: Eq + Hash + Clone,
S: HashState + Default,
{
type Output = HashSet<T, S>;
/// Returns the intersection of `self` and `rhs` as a new `HashSet<T, S>`.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<_> = vec![2, 3, 4].into_iter().collect();
///
/// let set = &a & &b;
///
/// let mut i = 0;
/// let expected = [2, 3];
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
/// assert_eq!(i, expected.len());
/// ```
fn bitand(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
self.intersection(rhs).cloned().collect()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b, T, S> BitXor<&'b HashSet<T, S>> for &'a HashSet<T, S>
where T: Eq + Hash + Clone,
S: HashState + Default,
{
type Output = HashSet<T, S>;
/// Returns the symmetric difference of `self` and `rhs` as a new `HashSet<T, S>`.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set = &a ^ &b;
///
/// let mut i = 0;
/// let expected = [1, 2, 4, 5];
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
/// assert_eq!(i, expected.len());
/// ```
fn bitxor(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
self.symmetric_difference(rhs).cloned().collect()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b, T, S> Sub<&'b HashSet<T, S>> for &'a HashSet<T, S>
where T: Eq + Hash + Clone,
S: HashState + Default,
{
type Output = HashSet<T, S>;
/// Returns the difference of `self` and `rhs` as a new `HashSet<T, S>`.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set = &a - &b;
///
/// let mut i = 0;
/// let expected = [1, 2];
/// for x in &set {
/// assert!(expected.contains(x));
/// i += 1;
/// }
/// assert_eq!(i, expected.len());
/// ```
fn sub(self, rhs: &HashSet<T, S>) -> HashSet<T, S> {
self.difference(rhs).cloned().collect()
}
}
/// HashSet iterator
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a> {
iter: Keys<'a, K, ()>
}
/// HashSet move iterator
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K> {
iter: Map<map::IntoIter<K, ()>, fn((K, ())) -> K>
}
/// HashSet drain iterator
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Drain<'a, K: 'a> {
iter: Map<map::Drain<'a, K, ()>, fn((K, ())) -> K>,
}
/// Intersection iterator
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Intersection<'a, T: 'a, S: 'a> {
// iterator of the first set
iter: Iter<'a, T>,
// the second set
other: &'a HashSet<T, S>,
}
/// Difference iterator
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Difference<'a, T: 'a, S: 'a> {
// iterator of the first set
iter: Iter<'a, T>,
// the second set
other: &'a HashSet<T, S>,
}
/// Symmetric difference iterator.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SymmetricDifference<'a, T: 'a, S: 'a> {
iter: Chain<Difference<'a, T, S>, Difference<'a, T, S>>
}
/// Set union iterator.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Union<'a, T: 'a, S: 'a> {
iter: Chain<Iter<'a, T>, Difference<'a, T, S>>
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> IntoIterator for &'a HashSet<T, S>
where T: Eq + Hash, S: HashState
{
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, S> IntoIterator for HashSet<T, S>
where T: Eq + Hash,
S: HashState
{
type Item = T;
type IntoIter = IntoIter<T>;
/// Creates a consuming iterator, that is, one that moves each value out
/// of the set in arbitrary order. The set cannot be used after calling
/// this.
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
/// let mut set = HashSet::new();
/// set.insert("a".to_string());
/// set.insert("b".to_string());
///
/// // Not possible to collect to a Vec<String> with a regular `.iter()`.
/// let v: Vec<String> = set.into_iter().collect();
///
/// // Will print in an arbitrary order.
/// for x in &v {
/// println!("{}", x);
/// }
/// ```
fn into_iter(self) -> IntoIter<T> {
fn first<A, B>((a, _): (A, B)) -> A { a }
let first: fn((T, ())) -> T = first;
IntoIter { iter: self.map.into_iter().map(first) }
}
}
impl<'a, K> Clone for Iter<'a, K> {
fn clone(&self) -> Iter<'a, K> { Iter { iter: self.iter.clone() } }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K> Iterator for Iter<'a, K> {
type Item = &'a K;
fn next(&mut self) -> Option<&'a K> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K> ExactSizeIterator for Iter<'a, K> {
fn len(&self) -> usize { self.iter.len() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K> Iterator for IntoIter<K> {
type Item = K;
fn next(&mut self) -> Option<K> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K> ExactSizeIterator for IntoIter<K> {
fn len(&self) -> usize { self.iter.len() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K> Iterator for Drain<'a, K> {
type Item = K;
fn next(&mut self) -> Option<K> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K> ExactSizeIterator for Drain<'a, K> {
fn len(&self) -> usize { self.iter.len() }
}
impl<'a, T, S> Clone for Intersection<'a, T, S> {
fn clone(&self) -> Intersection<'a, T, S> {
Intersection { iter: self.iter.clone(), ..*self }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> Iterator for Intersection<'a, T, S>
where T: Eq + Hash, S: HashState
{
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
loop {
match self.iter.next() {
None => return None,
Some(elt) => if self.other.contains(elt) {
return Some(elt)
},
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper)
}
}
impl<'a, T, S> Clone for Difference<'a, T, S> {
fn clone(&self) -> Difference<'a, T, S> {
Difference { iter: self.iter.clone(), ..*self }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> Iterator for Difference<'a, T, S>
where T: Eq + Hash, S: HashState
{
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
loop {
match self.iter.next() {
None => return None,
Some(elt) => if !self.other.contains(elt) {
return Some(elt)
},
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let (_, upper) = self.iter.size_hint();
(0, upper)
}
}
impl<'a, T, S> Clone for SymmetricDifference<'a, T, S> {
fn clone(&self) -> SymmetricDifference<'a, T, S> {
SymmetricDifference { iter: self.iter.clone() }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S>
where T: Eq + Hash, S: HashState
{
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
impl<'a, T, S> Clone for Union<'a, T, S> {
fn clone(&self) -> Union<'a, T, S> { Union { iter: self.iter.clone() } }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, S> Iterator for Union<'a, T, S>
where T: Eq + Hash, S: HashState
{
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[cfg(test)]
mod test_set {
use prelude::v1::*;
use super::HashSet;
#[test]
fn test_disjoint() {
let mut xs = HashSet::new();
let mut ys = HashSet::new();
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(xs.insert(5));
assert!(ys.insert(11));
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(xs.insert(7));
assert!(xs.insert(19));
assert!(xs.insert(4));
assert!(ys.insert(2));
assert!(ys.insert(-11));
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(ys.insert(7));
assert!(!xs.is_disjoint(&ys));
assert!(!ys.is_disjoint(&xs));
}
#[test]
fn test_subset_and_superset() {
let mut a = HashSet::new();
assert!(a.insert(0));
assert!(a.insert(5));
assert!(a.insert(11));
assert!(a.insert(7));
let mut b = HashSet::new();
assert!(b.insert(0));
assert!(b.insert(7));
assert!(b.insert(19));
assert!(b.insert(250));
assert!(b.insert(11));
assert!(b.insert(200));
assert!(!a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(!b.is_superset(&a));
assert!(b.insert(5));
assert!(a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(b.is_superset(&a));
}
#[test]
fn test_iterate() {
let mut a = HashSet::new();
for i in 0..32 {
assert!(a.insert(i));
}
let mut observed: u32 = 0;
for k in &a {
observed |= 1 << *k;
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_intersection() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(11));
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(77));
assert!(a.insert(103));
assert!(a.insert(5));
assert!(a.insert(-5));
assert!(b.insert(2));
assert!(b.insert(11));
assert!(b.insert(77));
assert!(b.insert(-9));
assert!(b.insert(-42));
assert!(b.insert(5));
assert!(b.insert(3));
let mut i = 0;
let expected = [3, 5, 11, 77];
for x in a.intersection(&b) {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_difference() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(b.insert(3));
assert!(b.insert(9));
let mut i = 0;
let expected = [1, 5, 11];
for x in a.difference(&b) {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_symmetric_difference() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(b.insert(-2));
assert!(b.insert(3));
assert!(b.insert(9));
assert!(b.insert(14));
assert!(b.insert(22));
let mut i = 0;
let expected = [-2, 1, 5, 11, 14, 22];
for x in a.symmetric_difference(&b) {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_union() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(a.insert(16));
assert!(a.insert(19));
assert!(a.insert(24));
assert!(b.insert(-2));
assert!(b.insert(1));
assert!(b.insert(5));
assert!(b.insert(9));
assert!(b.insert(13));
assert!(b.insert(19));
let mut i = 0;
let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
for x in a.union(&b) {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_from_iter() {
let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
let set: HashSet<_> = xs.iter().cloned().collect();
for x in &xs {
assert!(set.contains(x));
}
}
#[test]
fn test_move_iter() {
let hs = {
let mut hs = HashSet::new();
hs.insert('a');
hs.insert('b');
hs
};
let v = hs.into_iter().collect::<Vec<char>>();
assert!(v == ['a', 'b'] || v == ['b', 'a']);
}
#[test]
fn test_eq() {
// These constants once happened to expose a bug in insert().
// I'm keeping them around to prevent a regression.
let mut s1 = HashSet::new();
s1.insert(1);
s1.insert(2);
s1.insert(3);
let mut s2 = HashSet::new();
s2.insert(1);
s2.insert(2);
assert!(s1 != s2);
s2.insert(3);
assert_eq!(s1, s2);
}
#[test]
fn test_show() {
let mut set = HashSet::new();
let empty = HashSet::<i32>::new();
set.insert(1);
set.insert(2);
let set_str = format!("{:?}", set);
assert!(set_str == "{1, 2}" || set_str == "{2, 1}");
assert_eq!(format!("{:?}", empty), "{}");
}
#[test]
fn test_trivial_drain() {
let mut s = HashSet::<i32>::new();
for _ in s.drain() {}
assert!(s.is_empty());
drop(s);
let mut s = HashSet::<i32>::new();
drop(s.drain());
assert!(s.is_empty());
}
#[test]
fn test_drain() {
let mut s: HashSet<_> = (1..100).collect();
// try this a bunch of times to make sure we don't screw up internal state.
for _ in 0..20 {
assert_eq!(s.len(), 99);
{
let mut last_i = 0;
let mut d = s.drain();
for (i, x) in d.by_ref().take(50).enumerate() {
last_i = i;
assert!(x != 0);
}
assert_eq!(last_i, 49);
}
for _ in &s { panic!("s should be empty!"); }
// reset to try again.
s.extend(1..100);
}
}
}<|fim▁end|> | ///
/// # Examples |
<|file_name|>pass.cpp<|end_file_name|><|fim▁begin|>#include <stdio.h>
#include <stdlib.h>
#include <string.h>
int main(){
char text[40], text2[40];
printf("necesito una buena palabra: ");
gets(text);
printf("voy a necesitar otra palabra, si es la misma sorpresa... \n");
gets(text2);
if (strcmp(text, text2) ==0)
printf("Correcto!! son iguales enhorabuena \n");
else
printf("pues nada son distintas, no haces caso... \n" );
return EXIT_SUCCESS;<|fim▁hole|>
}<|fim▁end|> | |
<|file_name|>render.rs<|end_file_name|><|fim▁begin|>// Copyright: Ankitects Pty Ltd and contributors
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
use super::{CardTemplate, NoteType, NoteTypeKind};
use crate::{
card::{Card, CardID},
collection::Collection,
err::{AnkiError, Result},
i18n::{I18n, TR},
notes::{Note, NoteID},
template::{field_is_empty, render_card, ParsedTemplate, RenderedNode},
};
use std::{borrow::Cow, collections::HashMap};
pub struct RenderCardOutput {
pub qnodes: Vec<RenderedNode>,
pub anodes: Vec<RenderedNode>,
}
impl Collection {
/// Render an existing card saved in the database.
pub fn render_existing_card(&mut self, cid: CardID, browser: bool) -> Result<RenderCardOutput> {
let card = self
.storage
.get_card(cid)?
.ok_or_else(|| AnkiError::invalid_input("no such card"))?;
let note = self
.storage
.get_note(card.note_id)?
.ok_or_else(|| AnkiError::invalid_input("no such note"))?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
let template = match nt.config.kind() {
NoteTypeKind::Normal => nt.templates.get(card.template_idx as usize),
NoteTypeKind::Cloze => nt.templates.get(0),
}
.ok_or_else(|| AnkiError::invalid_input("missing template"))?;
self.render_card_inner(¬e, &card, &nt, template, browser)
}
/// Render a card that may not yet have been added.
/// The provided ordinal will be used if the template has not yet been saved.
/// If fill_empty is set, note will be mutated.
pub fn render_uncommitted_card(
&mut self,
note: &mut Note,
template: &CardTemplate,
card_ord: u16,
fill_empty: bool,
) -> Result<RenderCardOutput> {
let card = self.existing_or_synthesized_card(note.id, template.ord, card_ord)?;
let nt = self
.get_notetype(note.notetype_id)?
.ok_or_else(|| AnkiError::invalid_input("no such notetype"))?;
if fill_empty {
fill_empty_fields(note, &template.config.q_format, &nt, &self.i18n);
}
self.render_card_inner(note, &card, &nt, template, false)
}
fn existing_or_synthesized_card(
&self,
nid: NoteID,
template_ord: Option<u32>,
card_ord: u16,<|fim▁hole|> if let Some(card) = self.storage.get_card_by_ordinal(nid, ord as u16)? {
return Ok(card);
}
}
// no existing card; synthesize one
Ok(Card {
template_idx: card_ord,
..Default::default()
})
}
fn render_card_inner(
&mut self,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
browser: bool,
) -> Result<RenderCardOutput> {
let mut field_map = note.fields_map(&nt.fields);
let card_num;
self.add_special_fields(&mut field_map, note, card, &nt, template)?;
// due to lifetime restrictions we need to add card number here
card_num = format!("c{}", card.template_idx + 1);
field_map.entry(&card_num).or_insert_with(|| "1".into());
let (qfmt, afmt) = if browser {
(
template.question_format_for_browser(),
template.answer_format_for_browser(),
)
} else {
(
template.config.q_format.as_str(),
template.config.a_format.as_str(),
)
};
let (qnodes, anodes) = render_card(
qfmt,
afmt,
&field_map,
card.template_idx,
nt.is_cloze(),
&self.i18n,
)?;
Ok(RenderCardOutput { qnodes, anodes })
}
// Add special fields if they don't clobber note fields
fn add_special_fields(
&mut self,
map: &mut HashMap<&str, Cow<str>>,
note: &Note,
card: &Card,
nt: &NoteType,
template: &CardTemplate,
) -> Result<()> {
let tags = note.tags.join(" ");
map.entry("Tags").or_insert_with(|| tags.into());
map.entry("Type").or_insert_with(|| nt.name.clone().into());
let deck_name: Cow<str> = self
.get_deck(if card.original_deck_id.0 > 0 {
card.original_deck_id
} else {
card.deck_id
})?
.map(|d| d.human_name().into())
.unwrap_or_else(|| "(Deck)".into());
let subdeck_name = deck_name.rsplit("::").next().unwrap();
map.entry("Subdeck")
.or_insert_with(|| subdeck_name.to_string().into());
map.entry("Deck")
.or_insert_with(|| deck_name.to_string().into());
map.entry("CardFlag")
.or_insert_with(|| flag_name(card.flags).into());
map.entry("Card")
.or_insert_with(|| template.name.clone().into());
Ok(())
}
}
fn flag_name(n: u8) -> &'static str {
match n {
1 => "flag1",
2 => "flag2",
3 => "flag3",
4 => "flag4",
_ => "",
}
}
fn fill_empty_fields(note: &mut Note, qfmt: &str, nt: &NoteType, i18n: &I18n) {
if let Ok(tmpl) = ParsedTemplate::from_text(qfmt) {
let cloze_fields = tmpl.cloze_fields();
for (val, field) in note.fields.iter_mut().zip(nt.fields.iter()) {
if field_is_empty(val) {
if cloze_fields.contains(&field.name.as_str()) {
*val = i18n.tr(TR::CardTemplatesSampleCloze).into();
} else {
*val = format!("({})", field.name);
}
}
}
}
}<|fim▁end|> | ) -> Result<Card> {
// fetch existing card
if let Some(ord) = template_ord { |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import logging
from django.conf import settings
from django.db import models
from mkt.site.mail import send_mail
from mkt.site.models import ModelBase
from mkt.users.models import UserProfile
from mkt.webapps.models import Webapp
from mkt.websites.models import Website
log = logging.getLogger('z.abuse')
class AbuseReport(ModelBase):
# NULL if the reporter is anonymous.
reporter = models.ForeignKey(UserProfile, null=True,
blank=True, related_name='abuse_reported')
ip_address = models.CharField(max_length=255, default='0.0.0.0')
# An abuse report can be for an addon, a user, or a website. Only one of
# these should be null.
addon = models.ForeignKey(Webapp, null=True, related_name='abuse_reports')
user = models.ForeignKey(UserProfile, null=True,
related_name='abuse_reports')
website = models.ForeignKey(Website, null=True,
related_name='abuse_reports')
message = models.TextField()
read = models.BooleanField(default=False)
class Meta:
db_table = 'abuse_reports'
@property
def object(self):
return self.addon or self.user or self.website
def send(self):
obj = self.object
if self.reporter:
user_name = '%s (%s)' % (self.reporter.name, self.reporter.email)
else:
user_name = 'An anonymous coward'
if self.addon:
type_ = 'App'
elif self.user:
type_ = 'User'
else:
type_ = 'Website'
subject = u'[%s] Abuse Report for %s' % (type_, obj.name)
msg = u'%s reported abuse for %s (%s%s).\n\n%s' % (
user_name, obj.name, settings.SITE_URL, obj.get_url_path(),
self.message)
send_mail(subject, msg, recipient_list=(settings.ABUSE_EMAIL,))
@classmethod
def recent_high_abuse_reports(cls, threshold, period, addon_id=None):
"""
Returns AbuseReport objects for the given threshold over the given time
period (in days). Filters by addon_id if provided.
E.g. Greater than 5 abuse reports for all webapps in the past 7 days.
"""
abuse_sql = ['''
SELECT `abuse_reports`.*,
COUNT(`abuse_reports`.`addon_id`) AS `num_reports`
FROM `abuse_reports`
INNER JOIN `addons` ON (`abuse_reports`.`addon_id` = `addons`.`id`)
WHERE `abuse_reports`.`created` >= %s ''']
params = [period]
if addon_id:
abuse_sql.append('AND `addons`.`id` = %s ')
params.append(addon_id)
abuse_sql.append('GROUP BY addon_id HAVING num_reports > %s')
params.append(threshold)
return list(cls.objects.raw(''.join(abuse_sql), params))
def send_abuse_report(request, obj, message):
report = AbuseReport(ip_address=request.META.get('REMOTE_ADDR'),
message=message)
if request.user.is_authenticated():
report.reporter = request.user
if isinstance(obj, Webapp):
report.addon = obj
elif isinstance(obj, UserProfile):
report.user = obj
elif isinstance(obj, Website):<|fim▁hole|> report.save()
report.send()
# Trigger addon high abuse report detection task.
if isinstance(obj, Webapp):
from mkt.webapps.tasks import find_abuse_escalations
find_abuse_escalations.delay(obj.id)<|fim▁end|> | report.website = obj |
<|file_name|>linalg.py<|end_file_name|><|fim▁begin|>"""Linear Algebra Helper Routines."""
from warnings import warn
import numpy as np
from scipy import sparse
from scipy.sparse.linalg import aslinearoperator
from scipy.linalg import lapack, get_blas_funcs, eig, svd
from .params import set_tol
def norm(x, pnorm='2'):
"""2-norm of a vector.
Parameters
----------
x : array_like
Vector of complex or real values
pnorm : string
'2' calculates the 2-norm
'inf' calculates the infinity-norm<|fim▁hole|> Returns
-------
n : float
2-norm of a vector
Notes
-----
- currently 1+ order of magnitude faster than scipy.linalg.norm(x), which
calls sqrt(numpy.sum(real((conjugate(x)*x)),axis=0)) resulting in an
extra copy
- only handles the 2-norm and infinity-norm for vectors
See Also
--------
scipy.linalg.norm : scipy general matrix or vector norm
"""
x = np.ravel(x)
if pnorm == '2':
return np.sqrt(np.inner(x.conj(), x).real)
if pnorm == 'inf':
return np.max(np.abs(x))
raise ValueError('Only the 2-norm and infinity-norm are supported')
def infinity_norm(A):
"""Infinity norm of a matrix (maximum absolute row sum).
Parameters
----------
A : csr_matrix, csc_matrix, sparse, or numpy matrix
Sparse or dense matrix
Returns
-------
n : float
Infinity norm of the matrix
Notes
-----
- This serves as an upper bound on spectral radius.
- csr and csc avoid a deep copy
- dense calls scipy.linalg.norm
See Also
--------
scipy.linalg.norm : dense matrix norms
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import spdiags
>>> from pyamg.util.linalg import infinity_norm
>>> n=10
>>> e = np.ones((n,1)).ravel()
>>> data = [ -1*e, 2*e, -1*e ]
>>> A = spdiags(data,[-1,0,1],n,n)
>>> print(infinity_norm(A))
4.0
"""
if sparse.isspmatrix_csr(A) or sparse.isspmatrix_csc(A):
# avoid copying index and ptr arrays
abs_A = A.__class__((np.abs(A.data), A.indices, A.indptr),
shape=A.shape)
return (abs_A * np.ones((A.shape[1]), dtype=A.dtype)).max()
if sparse.isspmatrix(A):
return (abs(A) * np.ones((A.shape[1]), dtype=A.dtype)).max()
return np.dot(np.abs(A), np.ones((A.shape[1],), dtype=A.dtype)).max()
def axpy(x, y, a=1.0):
"""Quick level-1 call to BLAS y = a*x+y.
Parameters
----------
x : array_like
nx1 real or complex vector
y : array_like
nx1 real or complex vector
a : float
real or complex scalar
Returns
-------
y : array_like
Input variable y is rewritten
Notes
-----
The call to get_blas_funcs automatically determines the prefix for the blas
call.
"""
fn = get_blas_funcs(['axpy'], [x, y])[0]
fn(x, y, a)
# def approximate_spectral_radius(A, tol=0.1, maxiter=10, symmetric=False):
# """approximate the spectral radius of a matrix
#
# Parameters
# ----------
#
# A : {dense or sparse matrix}
# E.g. csr_matrix, csc_matrix, ndarray, etc.
# tol : {scalar}
# Tolerance of approximation
# maxiter : {integer}
# Maximum number of iterations to perform
# symmetric : {boolean}
# True if A is symmetric, False otherwise (default)
#
# Returns
# -------
# An approximation to the spectral radius of A
#
# """
# if symmetric:
# method = eigen_symmetric
# else:
# method = eigen
#
# return norm( method(A, k=1, tol=0.1, which='LM', maxiter=maxiter,
# return_eigenvectors=False) )
def _approximate_eigenvalues(A, maxiter, symmetric=None, initial_guess=None):
"""Apprixmate eigenvalues.
Used by approximate_spectral_radius and condest.
Returns [W, E, H, V, breakdown_flag], where W and E are the eigenvectors
and eigenvalues of the Hessenberg matrix H, respectively, and V is the
Krylov space. breakdown_flag denotes whether Lanczos/Arnoldi suffered
breakdown. E is therefore the approximate eigenvalues of A.
To obtain approximate eigenvectors of A, compute V*W.
"""
A = aslinearoperator(A) # A could be dense or sparse, or something weird
# Choose tolerance for deciding if break-down has occurred
breakdown = set_tol(A.dtype)
breakdown_flag = False
if A.shape[0] != A.shape[1]:
raise ValueError('expected square matrix')
maxiter = min(A.shape[0], maxiter)
if initial_guess is None:
v0 = np.random.rand(A.shape[1], 1)
if A.dtype == complex:
v0 = v0 + 1.0j * np.random.rand(A.shape[1], 1)
else:
v0 = initial_guess
v0 /= norm(v0)
# Important to type H based on v0, so that a real nonsymmetric matrix, can
# have an imaginary initial guess for its Arnoldi Krylov space
H = np.zeros((maxiter+1, maxiter),
dtype=np.find_common_type([v0.dtype, A.dtype], []))
V = [v0]
beta = 0.0
for j in range(maxiter):
w = A * V[-1]
if symmetric:
if j >= 1:
H[j-1, j] = beta
w -= beta * V[-2]
alpha = np.dot(np.conjugate(w.ravel()), V[-1].ravel())
H[j, j] = alpha
w -= alpha * V[-1] # axpy(V[-1],w,-alpha)
beta = norm(w)
H[j+1, j] = beta
if (H[j+1, j] < breakdown):
breakdown_flag = True
break
w /= beta
V.append(w)
V = V[-2:] # retain only last two vectors
else:
# orthogonalize against Vs
for i, v in enumerate(V):
H[i, j] = np.dot(np.conjugate(v.ravel()), w.ravel())
w = w - H[i, j]*v
H[j+1, j] = norm(w)
if (H[j+1, j] < breakdown):
breakdown_flag = True
if H[j+1, j] != 0.0:
w = w/H[j+1, j]
V.append(w)
break
w = w/H[j+1, j]
V.append(w)
# if upper 2x2 block of Hessenberg matrix H is almost symmetric,
# and the user has not explicitly specified symmetric=False,
# then switch to symmetric Lanczos algorithm
# if symmetric is not False and j == 1:
# if abs(H[1,0] - H[0,1]) < 1e-12:
# #print("using symmetric mode")
# symmetric = True
# V = V[1:]
# H[1,0] = H[0,1]
# beta = H[2,1]
# print("Approximated spectral radius in %d iterations" % (j + 1))
Eigs, Vects = eig(H[:j+1, :j+1], left=False, right=True)
return (Vects, Eigs, H, V, breakdown_flag)
def approximate_spectral_radius(A, tol=0.01, maxiter=15, restart=5,
symmetric=None, initial_guess=None,
return_vector=False):
"""Approximate the spectral radius of a matrix.
Parameters
----------
A : {dense or sparse matrix}
E.g. csr_matrix, csc_matrix, ndarray, etc.
tol : {scalar}
Relative tolerance of approximation, i.e., the error divided
by the approximate spectral radius is compared to tol.
maxiter : {integer}
Maximum number of iterations to perform
restart : {integer}
Number of restarted Arnoldi processes. For example, a value of 0 will
run Arnoldi once, for maxiter iterations, and a value of 1 will restart
Arnoldi once, using the maximal eigenvector from the first Arnoldi
process as the initial guess.
symmetric : {boolean}
True - if A is symmetric Lanczos iteration is used (more efficient)
False - if A is non-symmetric Arnoldi iteration is used (less efficient)
initial_guess : {array|None}
If n x 1 array, then use as initial guess for Arnoldi/Lanczos.
If None, then use a random initial guess.
return_vector : {boolean}
True - return an approximate dominant eigenvector and the spectral radius.
False - Do not return the approximate dominant eigenvector
Returns
-------
An approximation to the spectral radius of A, and
if return_vector=True, then also return the approximate dominant
eigenvector
Notes
-----
The spectral radius is approximated by looking at the Ritz eigenvalues.
Arnoldi iteration (or Lanczos) is used to project the matrix A onto a
Krylov subspace: H = Q* A Q. The eigenvalues of H (i.e. the Ritz
eigenvalues) should represent the eigenvalues of A in the sense that the
minimum and maximum values are usually well matched (for the symmetric case
it is true since the eigenvalues are real).
References
----------
.. [1] Z. Bai, J. Demmel, J. Dongarra, A. Ruhe, and H. van der Vorst,
editors. "Templates for the Solution of Algebraic Eigenvalue Problems:
A Practical Guide", SIAM, Philadelphia, 2000.
Examples
--------
>>> from pyamg.util.linalg import approximate_spectral_radius
>>> import numpy as np
>>> from scipy.linalg import eigvals, norm
>>> A = np.array([[1.,0.],[0.,1.]])
>>> sr = approximate_spectral_radius(A,maxiter=3)
>>> print(f'{sr:2.6}')
1.0
>>> print(max([norm(x) for x in eigvals(A)]))
1.0
"""
if not hasattr(A, 'rho') or return_vector:
# somehow more restart causes a nonsymmetric case to fail...look at
# this what about A.dtype=int? convert somehow?
# The use of the restart vector v0 requires that the full Krylov
# subspace V be stored. So, set symmetric to False.
symmetric = False
if maxiter < 1:
raise ValueError('expected maxiter > 0')
if restart < 0:
raise ValueError('expected restart >= 0')
if A.dtype == int:
raise ValueError('expected A to be float (complex or real)')
if A.shape[0] != A.shape[1]:
raise ValueError('expected square A')
if initial_guess is None:
v0 = np.random.rand(A.shape[1], 1)
if A.dtype == complex:
v0 = v0 + 1.0j * np.random.rand(A.shape[1], 1)
else:
if initial_guess.shape[0] != A.shape[0]:
raise ValueError('initial_guess and A must have same shape')
if (len(initial_guess.shape) > 1) and (initial_guess.shape[1] > 1):
raise ValueError('initial_guess must be an (n,1) or\
(n,) vector')
v0 = initial_guess.reshape(-1, 1)
v0 = np.array(v0, dtype=A.dtype)
for j in range(restart+1):
[evect, ev, H, V, breakdown_flag] =\
_approximate_eigenvalues(A, maxiter, symmetric, initial_guess=v0)
# Calculate error in dominant eigenvector
nvecs = ev.shape[0]
max_index = np.abs(ev).argmax()
error = H[nvecs, nvecs-1] * evect[-1, max_index]
# error is a fast way of calculating the following line
# error2 = ( A - ev[max_index]*sp.mat(
# sp.eye(A.shape[0],A.shape[1])) )*\
# ( sp.mat(sp.hstack(V[:-1]))*\
# evect[:,max_index].reshape(-1,1) )
# print(str(error) + " " + str(sp.linalg.norm(e2)))
v0 = np.dot(np.hstack(V[:-1]), evect[:, max_index].reshape(-1, 1))
if np.abs(error)/np.abs(ev[max_index]) < tol:
# halt if below relative tolerance
break
if breakdown_flag:
warn(f'Breakdown occured in step {j}')
break
# end j-loop
rho = np.abs(ev[max_index])
if sparse.isspmatrix(A):
A.rho = rho
if return_vector:
return (rho, v0)
return rho
return A.rho
def condest(A, maxiter=25, symmetric=False):
r"""Estimates the condition number of A.
Parameters
----------
A : {dense or sparse matrix}
e.g. array, matrix, csr_matrix, ...
maxiter: {int}
Max number of Arnoldi/Lanczos iterations
symmetric : {bool}
If symmetric use the far more efficient Lanczos algorithm,
Else use Arnoldi.
If hermitian, use symmetric=True.
If complex symmetric, use symmetric=False.
Returns
-------
Estimate of cond(A) with \|lambda_max\| / \|lambda_min\| or simga_max / sigma_min
through the use of Arnoldi or Lanczos iterations, depending on
the symmetric flag
Notes
-----
The condition number measures how large of a change in the
the problems solution is caused by a change in problem's input.
Large condition numbers indicate that small perturbations
and numerical errors are magnified greatly when solving the system.
Examples
--------
>>> import numpy as np
>>> from pyamg.util.linalg import condest
>>> c = condest(np.array([[1.,0.],[0.,2.]]))
>>> print(f'{c:2.6}')
2.0
"""
C = aslinearoperator(A)
power = 1
if not symmetric:
def matvec(v):
return C.rmatvec(C.A @ v)
C.matvec = matvec
power = 0.5
[evect, ev, H, V, breakdown_flag] =\
_approximate_eigenvalues(C, maxiter, symmetric)
del evect, H, V, breakdown_flag
return (np.max([norm(x) for x in ev])/min(norm(x) for x in ev))**power
def cond(A):
"""Return condition number of A.
Parameters
----------
A : {dense or sparse matrix}
e.g. array, matrix, csr_matrix, ...
Returns
-------
2-norm condition number through use of the SVD
Use for small to moderate sized dense matrices.
For large sparse matrices, use condest.
Notes
-----
The condition number measures how large of a change in
the problems solution is caused by a change in problem's input.
Large condition numbers indicate that small perturbations
and numerical errors are magnified greatly when solving the system.
Examples
--------
>>> import numpy as np
>>> from pyamg.util.linalg import condest
>>> c = condest(np.array([[1.0,0.],[0.,2.0]]))
>>> print(f'{c:2.6}')
2.0
"""
if A.shape[0] != A.shape[1]:
raise ValueError('expected square matrix')
if sparse.isspmatrix(A):
A = A.toarray()
U, Sigma, Vh = svd(A)
del U, Vh
# 2-Norm Condition Number
return np.max(Sigma)/min(Sigma)
def ishermitian(A, fast_check=True, tol=1e-6, verbose=False):
r"""Return True if A is Hermitian to within tol.
Parameters
----------
A : {dense or sparse matrix}
e.g. array, matrix, csr_matrix, ...
fast_check : {bool}
If True, use the heuristic < Ax, y> = < x, Ay>
for random vectors x and y to check for conjugate symmetry.
If False, compute A - A.conj().T.
tol : {float}
Symmetry tolerance
verbose: {bool}
prints
max( \|A - A.conj().T\| ) if nonhermitian and fast_check=False..
\| <Ax, y> - <x, Ay> ) \| / sqrt( \| <Ax, y> * <x, Ay> \| )
if nonhermitian and fast_check=True
Returns
-------
True if hermitian
False if nonhermitian
Notes
-----
This function applies a simple test of conjugate symmetry
Examples
--------
>>> import numpy as np
>>> from pyamg.util.linalg import ishermitian
>>> ishermitian(np.array([[1,2],[1,1]]))
False
>>> from pyamg.gallery import poisson
>>> ishermitian(poisson((10,10)))
True
"""
# convert to array type
if not sparse.isspmatrix(A):
A = np.asarray(A)
if fast_check:
x = np.random.rand(A.shape[0], 1)
y = np.random.rand(A.shape[0], 1)
if A.dtype == complex:
x = x + 1.0j*np.random.rand(A.shape[0], 1)
y = y + 1.0j*np.random.rand(A.shape[0], 1)
xAy = np.dot((A.dot(x)).conjugate().T, y)
xAty = np.dot(x.conjugate().T, A.dot(y))
diff = float(np.abs(xAy - xAty) / np.sqrt(np.abs(xAy*xAty)))
else:
# compute the difference, A - A.conj().T
if sparse.isspmatrix(A):
diff = np.ravel((A - A.conj().T).data)
else:
diff = np.ravel(A - A.conj().T)
if np.max(diff.shape) == 0:
diff = 0
else:
diff = np.max(np.abs(diff))
if diff < tol:
diff = 0
return True
if verbose:
print(diff)
return False
def pinv_array(a, tol=None):
"""Calculate the Moore-Penrose pseudo inverse of each block of the 3D array a.
Parameters
----------
a : {dense array}
Is of size (n, m, m)
tol : {float}
Used by gelss to filter numerically zeros singular values.
If None, a suitable value is chosen for you.
Returns
-------
Nothing, a is modified in place so that a[k] holds the pseudoinverse
of that block.
Notes
-----
By using lapack wrappers, this can be much faster for large n, than
directly calling a pseudoinverse (SVD)
Examples
--------
>>> import numpy as np
>>> from pyamg.util.linalg import pinv_array
>>> a = np.array([[[1.,2.],[1.,1.]], [[1.,1.],[3.,3.]]])
>>> ac = a.copy()
>>> # each block of a is inverted in-place
>>> pinv_array(a)
"""
n = a.shape[0]
m = a.shape[1]
if m == 1:
# Pseudo-inverse of 1 x 1 matrices is trivial
zero_entries = (a == 0.0).nonzero()[0]
a[zero_entries] = 1.0
a[:] = 1.0/a
a[zero_entries] = 0.0
del zero_entries
else:
# The block size is greater than 1
# Create necessary arrays and function pointers for calculating pinv
gelss, gelss_lwork = lapack.get_lapack_funcs(('gelss', 'gelss_lwork'),
(np.ones((1,), dtype=a.dtype)))
RHS = np.eye(m, dtype=a.dtype)
# pylint: disable=protected-access
lwork = lapack._compute_lwork(gelss_lwork, m, m, m)
# pylint: enable=protected-access
# Choose tolerance for which singular values are zero in *gelss below
if tol is None:
tol = set_tol(a.dtype)
# Invert each block of a
for kk in range(n):
gelssoutput = gelss(a[kk], RHS, cond=tol, lwork=lwork,
overwrite_a=True, overwrite_b=False)
a[kk] = gelssoutput[1]<|fim▁end|> | |
<|file_name|>test_container_registry_client.py<|end_file_name|><|fim▁begin|># coding=utf-8
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from datetime import datetime
from azure.core import credentials
import pytest
import six
import time
from azure.containerregistry import (
RepositoryProperties,
ArtifactManifestProperties,
ArtifactManifestOrder,
ArtifactTagProperties,
ArtifactTagOrder,
ContainerRegistryClient,
)
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
from azure.core.paging import ItemPaged
from testcase import ContainerRegistryTestClass, get_authority
from constants import TO_BE_DELETED, HELLO_WORLD, ALPINE, BUSYBOX, DOES_NOT_EXIST
from preparer import acr_preparer
class TestContainerRegistryClient(ContainerRegistryTestClass):
@acr_preparer()
def test_list_repository_names(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
repositories = client.list_repository_names()
assert isinstance(repositories, ItemPaged)
count = 0
prev = None<|fim▁hole|> assert isinstance(repo, six.string_types)
assert prev != repo
prev = repo
assert count > 0
@acr_preparer()
def test_list_repository_names_by_page(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
results_per_page = 2
total_pages = 0
repository_pages = client.list_repository_names(results_per_page=results_per_page)
prev = None
for page in repository_pages.by_page():
page_count = 0
for repo in page:
assert isinstance(repo, six.string_types)
assert prev != repo
prev = repo
page_count += 1
assert page_count <= results_per_page
total_pages += 1
assert total_pages >= 1
@acr_preparer()
def test_delete_repository(self, containerregistry_endpoint, containerregistry_resource_group):
self.import_image(containerregistry_endpoint, HELLO_WORLD, [TO_BE_DELETED])
client = self.create_registry_client(containerregistry_endpoint)
client.delete_repository(TO_BE_DELETED)
for repo in client.list_repository_names():
if repo == TO_BE_DELETED:
raise ValueError("Repository not deleted")
@acr_preparer()
def test_delete_repository_does_not_exist(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
client.delete_repository("not_real_repo")
@acr_preparer()
def test_get_repository_properties(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(ALPINE)
assert isinstance(properties, RepositoryProperties)
assert properties.name == ALPINE
@acr_preparer()
def test_update_repository_properties(self, containerregistry_endpoint):
repository = self.get_resource_name("repo")
tag_identifier = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repository, tag_identifier)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(repository)
properties.can_delete = False
properties.can_read = False
properties.can_list = False
properties.can_write = False
new_properties = client.update_repository_properties(repository, properties)
assert properties.can_delete == new_properties.can_delete
assert properties.can_read == new_properties.can_read
assert properties.can_list == new_properties.can_list
assert properties.can_write == new_properties.can_write
new_properties.can_delete = True
new_properties.can_read = True
new_properties.can_list = True
new_properties.can_write = True
new_properties = client.update_repository_properties(repository, new_properties)
assert new_properties.can_delete == True
assert new_properties.can_read == True
assert new_properties.can_list == True
assert new_properties.can_write == True
@acr_preparer()
def test_update_repository_properties_kwargs(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_repository_properties(repo)
properties = self.set_all_properties(properties, True)
received = client.update_repository_properties(repo, properties)
self.assert_all_properties(properties, True)
received = client.update_repository_properties(repo, can_delete=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == True
assert received.can_write == True
received = client.update_repository_properties(repo, can_read=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == False
assert received.can_write == True
received = client.update_repository_properties(repo, can_write=False)
assert received.can_delete == False
assert received.can_list == True
assert received.can_read == False
assert received.can_write == False
received = client.update_repository_properties(repo, can_list=False)
assert received.can_delete == False
assert received.can_list == False
assert received.can_read == False
assert received.can_write == False
received = client.update_repository_properties(
repo,
can_delete=True,
can_read=True,
can_write=True,
can_list=True,
)
self.assert_all_properties(received, True)
@acr_preparer()
def test_list_registry_artifacts(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
count = 0
for artifact in client.list_manifest_properties(BUSYBOX):
assert isinstance(artifact, ArtifactManifestProperties)
assert isinstance(artifact.created_on, datetime)
assert isinstance(artifact.last_updated_on, datetime)
assert artifact.repository_name == BUSYBOX
assert artifact.fully_qualified_reference in self.create_fully_qualified_reference(containerregistry_endpoint, BUSYBOX, artifact.digest)
count += 1
assert count > 0
@acr_preparer()
def test_list_registry_artifacts_by_page(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
results_per_page = 2
pages = client.list_manifest_properties(BUSYBOX, results_per_page=results_per_page)
page_count = 0
for page in pages.by_page():
reg_count = 0
for tag in page:
reg_count += 1
assert reg_count <= results_per_page
page_count += 1
assert page_count >= 1
@acr_preparer()
def test_list_registry_artifacts_descending(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by=ArtifactManifestOrder.LAST_UPDATED_ON_DESCENDING):
if prev_last_updated_on:
assert artifact.last_updated_on < prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by="timedesc"):
if prev_last_updated_on:
assert artifact.last_updated_on < prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
@acr_preparer()
def test_list_registry_artifacts_ascending(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by=ArtifactManifestOrder.LAST_UPDATED_ON_ASCENDING):
if prev_last_updated_on:
assert artifact.last_updated_on > prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
prev_last_updated_on = None
count = 0
for artifact in client.list_manifest_properties(BUSYBOX, order_by="timeasc"):
if prev_last_updated_on:
assert artifact.last_updated_on > prev_last_updated_on
prev_last_updated_on = artifact.last_updated_on
count += 1
assert count > 0
@acr_preparer()
def test_get_manifest_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_manifest_properties(repo, tag)
assert isinstance(properties, ArtifactManifestProperties)
assert properties.repository_name == repo
assert properties.fully_qualified_reference in self.create_fully_qualified_reference(containerregistry_endpoint, repo, properties.digest)
@acr_preparer()
def test_get_manifest_properties_does_not_exist(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
manifest = client.get_manifest_properties(repo, tag)
digest = manifest.digest
digest = digest[:-10] + u"a" * 10
with pytest.raises(ResourceNotFoundError):
client.get_manifest_properties(repo, digest)
@acr_preparer()
def test_update_manifest_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_manifest_properties(repo, tag)
properties.can_delete = False
properties.can_read = False
properties.can_write = False
properties.can_list = False
received = client.update_manifest_properties(repo, tag, properties)
assert received.can_delete == properties.can_delete
assert received.can_read == properties.can_read
assert received.can_write == properties.can_write
assert received.can_list == properties.can_list
properties.can_delete = True
properties.can_read = True
properties.can_write = True
properties.can_list = True
received = client.update_manifest_properties(repo, tag, properties)
assert received.can_delete == True
assert received.can_read == True
assert received.can_write == True
assert received.can_list == True
@acr_preparer()
def test_update_manifest_properties_kwargs(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_manifest_properties(repo, tag)
received = client.update_manifest_properties(repo, tag, can_delete=False)
assert received.can_delete == False
received = client.update_manifest_properties(repo, tag, can_read=False)
assert received.can_read == False
received = client.update_manifest_properties(repo, tag, can_write=False)
assert received.can_write == False
received = client.update_manifest_properties(repo, tag, can_list=False)
assert received.can_list == False
received = client.update_manifest_properties(
repo, tag, can_delete=True, can_read=True, can_write=True, can_list=True
)
assert received.can_delete == True
assert received.can_read == True
assert received.can_write == True
assert received.can_list == True
@acr_preparer()
def test_get_tag_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_tag_properties(repo, tag)
assert isinstance(properties, ArtifactTagProperties)
assert properties.name == tag
@acr_preparer()
def test_get_tag_properties_does_not_exist(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
with pytest.raises(ResourceNotFoundError):
client.get_tag_properties("Nonexistent", "Nonexistent")
@acr_preparer()
def test_update_tag_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_tag_properties(repo, tag)
properties.can_delete = False
properties.can_read = False
properties.can_write = False
properties.can_list = False
received = client.update_tag_properties(repo, tag, properties)
assert received.can_delete == properties.can_delete
assert received.can_read == properties.can_read
assert received.can_write == properties.can_write
assert received.can_list == properties.can_list
properties.can_delete = True
properties.can_read = True
properties.can_write = True
properties.can_list = True
received = client.update_tag_properties(repo, tag, properties)
assert received.can_delete == True
assert received.can_read == True
assert received.can_write == True
assert received.can_list == True
@acr_preparer()
def test_update_tag_properties_kwargs(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
properties = client.get_tag_properties(repo, tag)
received = client.update_tag_properties(repo, tag, can_delete=False)
assert received.can_delete == False
received = client.update_tag_properties(repo, tag, can_read=False)
assert received.can_read == False
received = client.update_tag_properties(repo, tag, can_write=False)
assert received.can_write == False
received = client.update_tag_properties(repo, tag, can_list=False)
assert received.can_list == False
received = client.update_tag_properties(
repo, tag, can_delete=True, can_read=True, can_write=True, can_list=True
)
assert received.can_delete == True
assert received.can_read == True
assert received.can_write == True
assert received.can_list == True
@acr_preparer()
def test_list_tag_properties(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
tags = ["{}:{}".format(repo, tag + str(i)) for i in range(4)]
self.import_image(containerregistry_endpoint, HELLO_WORLD, tags)
client = self.create_registry_client(containerregistry_endpoint)
count = 0
for tag in client.list_tag_properties(repo):
assert "{}:{}".format(repo, tag.name) in tags
count += 1
assert count == 4
@acr_preparer()
def test_list_tag_properties_order_descending(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
tags = ["{}:{}".format(repo, tag + str(i)) for i in range(4)]
self.import_image(containerregistry_endpoint, HELLO_WORLD, tags)
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for tag in client.list_tag_properties(repo, order_by=ArtifactTagOrder.LAST_UPDATED_ON_DESCENDING):
assert "{}:{}".format(repo, tag.name) in tags
if prev_last_updated_on:
assert tag.last_updated_on < prev_last_updated_on
prev_last_updated_on = tag.last_updated_on
count += 1
assert count == 4
prev_last_updated_on = None
count = 0
for tag in client.list_tag_properties(repo, order_by="timedesc"):
assert "{}:{}".format(repo, tag.name) in tags
if prev_last_updated_on:
assert tag.last_updated_on < prev_last_updated_on
prev_last_updated_on = tag.last_updated_on
count += 1
assert count == 4
@acr_preparer()
def test_list_tag_properties_order_ascending(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
tags = ["{}:{}".format(repo, tag + str(i)) for i in range(4)]
self.import_image(containerregistry_endpoint, HELLO_WORLD, tags)
client = self.create_registry_client(containerregistry_endpoint)
prev_last_updated_on = None
count = 0
for tag in client.list_tag_properties(repo, order_by=ArtifactTagOrder.LAST_UPDATED_ON_ASCENDING):
assert "{}:{}".format(repo, tag.name) in tags
if prev_last_updated_on:
assert tag.last_updated_on > prev_last_updated_on
prev_last_updated_on = tag.last_updated_on
count += 1
assert count == 4
prev_last_updated_on = None
count = 0
for tag in client.list_tag_properties(repo, order_by="timeasc"):
assert "{}:{}".format(repo, tag.name) in tags
if prev_last_updated_on:
assert tag.last_updated_on > prev_last_updated_on
prev_last_updated_on = tag.last_updated_on
count += 1
assert count == 4
@acr_preparer()
def test_delete_tag(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
tags = ["{}:{}".format(repo, tag + str(i)) for i in range(4)]
self.import_image(containerregistry_endpoint, HELLO_WORLD, tags)
client = self.create_registry_client(containerregistry_endpoint)
client.delete_tag(repo, tag + str(0))
count = 0
for tag in client.list_tag_properties(repo):
assert "{}:{}".format(repo, tag.name) in tags[1:]
count += 1
assert count == 3
@acr_preparer()
def test_delete_tag_does_not_exist(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
client.delete_tag(DOES_NOT_EXIST, DOES_NOT_EXIST)
@acr_preparer()
def test_delete_manifest(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
client.delete_manifest(repo, tag)
self.sleep(10)
with pytest.raises(ResourceNotFoundError):
client.get_manifest_properties(repo, tag)
@acr_preparer()
def test_delete_manifest_does_not_exist(self, containerregistry_endpoint):
repo = self.get_resource_name("repo")
tag = self.get_resource_name("tag")
self.import_image(containerregistry_endpoint, HELLO_WORLD, ["{}:{}".format(repo, tag)])
client = self.create_registry_client(containerregistry_endpoint)
manifest = client.get_manifest_properties(repo, tag)
digest = manifest.digest
digest = digest[:-10] + u"a" * 10
client.delete_manifest(repo, digest)
@acr_preparer()
def test_expiration_time_parsing(self, containerregistry_endpoint):
from azure.containerregistry._authentication_policy import ContainerRegistryChallengePolicy
client = self.create_registry_client(containerregistry_endpoint)
for repo in client.list_repository_names():
pass
for policy in client._client._client._pipeline._impl_policies:
if isinstance(policy, ContainerRegistryChallengePolicy):
policy._exchange_client._expiration_time = 0
break
count = 0
for repo in client.list_repository_names():
count += 1
assert count >= 1
# Live only, the fake credential doesn't check auth scope the same way
@pytest.mark.live_test_only
@acr_preparer()
def test_construct_container_registry_client(self, containerregistry_endpoint):
authority = get_authority(containerregistry_endpoint)
credential = self.get_credential(authority)
client = ContainerRegistryClient(endpoint=containerregistry_endpoint, credential=credential, audience="https://microsoft.com")
with pytest.raises(ClientAuthenticationError):
properties = client.get_repository_properties(HELLO_WORLD)
with pytest.raises(ValueError):
client = ContainerRegistryClient(endpoint=containerregistry_endpoint, credential=credential)
@acr_preparer()
def test_set_api_version(self, containerregistry_endpoint):
client = self.create_registry_client(containerregistry_endpoint)
assert client._client._config.api_version == "2021-07-01"
client = self.create_registry_client(containerregistry_endpoint, api_version = "2019-08-15-preview")
assert client._client._config.api_version == "2019-08-15-preview"
with pytest.raises(ValueError):
client = self.create_registry_client(containerregistry_endpoint, api_version = "2019-08-15")<|fim▁end|> | for repo in repositories:
count += 1 |
<|file_name|>home.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
// Hide GNU-cat
$('#loading').hide();
// jQuery cycle plugin usage for screenshot slideshow
$('.slideshow').cycle({
fx: 'fade',
speed: 1000,
timeout: 6000,
});
// Show GNU-cat when the submit button is pressed
$('input#submit').click(function() {
$('#loading').fadeIn(1000);
});
// Assign account form fields to variables
// (was doing this to do some logic with the below keyup function
// to make it so that a field did not react to a master form change if
// it was altered by the user. Unfortunately, the JS cannot seem to react
// quickly enough to handle anything but slow typing. May attempt rewrite
// later.
var master_field = $('input#master');
var ohloh_field = $('input#ohloh');
var coderwall_field = $('input#coderwall');
// Have master account input field update all other field
master_field.keyup(function () {
ohloh_field.val($(this).val());
coderwall_field.val($(this).val());
});
});
$(window).on('unload', function() {
// Re-hide GNU-cat when we leave.
$('loading').hide();
});<|fim▁end|> | $(document).ready(function() { |
<|file_name|>synthesizer.py<|end_file_name|><|fim▁begin|>import logging
import sys
import traceback
from collections import namedtuple
import numpy as np
import pandas as pd
from scipy.stats import chisquare
from . import categorizer as cat
from . import draw
from .ipf.ipf import calculate_constraints
from .ipu.ipu import household_weights
logger = logging.getLogger("synthpop")
FitQuality = namedtuple(<|fim▁hole|> 'BlockGroupID', ('state', 'county', 'tract', 'block_group'))
def enable_logging():
handler = logging.StreamHandler(stream=sys.stdout)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
def synthesize(h_marg, p_marg, h_jd, p_jd, h_pums, p_pums,
marginal_zero_sub=.01, jd_zero_sub=.001, hh_index_start=0):
# this is the zero marginal problem
h_marg = h_marg.replace(0, marginal_zero_sub)
p_marg = p_marg.replace(0, marginal_zero_sub)
# zero cell problem
h_jd.frequency = h_jd.frequency.replace(0, jd_zero_sub)
p_jd.frequency = p_jd.frequency.replace(0, jd_zero_sub)
# ipf for households
logger.info("Running ipf for households")
h_constraint, _ = calculate_constraints(h_marg, h_jd.frequency)
h_constraint.index = h_jd.cat_id
logger.debug("Household constraint")
logger.debug(h_constraint)
logger.debug(h_constraint.sum())
# ipf for persons
logger.info("Running ipf for persons")
p_constraint, _ = calculate_constraints(p_marg, p_jd.frequency)
p_constraint.index = p_jd.cat_id
logger.debug("Person constraint")
logger.debug(p_constraint)
logger.debug(p_constraint.sum())
# make frequency tables that the ipu expects
household_freq, person_freq = cat.frequency_tables(p_pums, h_pums,
p_jd.cat_id,
h_jd.cat_id)
# do the ipu to match person marginals
logger.info("Running ipu")
import time
t1 = time.time()
best_weights, fit_quality, iterations = household_weights(household_freq,
person_freq,
h_constraint,
p_constraint)
logger.info("Time to run ipu: %.3fs" % (time.time()-t1))
logger.debug("IPU weights:")
logger.debug(best_weights.describe())
logger.debug(best_weights.sum())
logger.debug("Fit quality:")
logger.debug(fit_quality)
logger.debug("Number of iterations:")
logger.debug(iterations)
num_households = int(h_marg.groupby(level=0).sum().mean())
print "Drawing %d households" % num_households
best_chisq = np.inf
return draw.draw_households(
num_households, h_pums, p_pums, household_freq, h_constraint,
p_constraint, best_weights, hh_index_start=hh_index_start)
def synthesize_all(recipe, num_geogs=None, indexes=None,
marginal_zero_sub=.01, jd_zero_sub=.001):
"""
Parameters
----------
write_households_csv, write_persons_csv : str
Name of households and persons csv file to write.
Pass None to return these rather than write.
Returns
-------
households, people : pandas.DataFrame
Only returns these if `write_households_csv` and `write_persons_csv`
are None.
fit_quality : dict of FitQuality
Keys are geographic IDs, values are namedtuples with attributes
``.household_chisq``, ``household_p``, ``people_chisq``,
and ``people_p``.
"""
print "Synthesizing at geog level: '{}' (number of geographies is {})".\
format(recipe.get_geography_name(), recipe.get_num_geographies())
if indexes is None:
indexes = recipe.get_available_geography_ids()
hh_list = []
people_list = []
cnt = 0
fit_quality = {}
hh_index_start = 0
# TODO will parallelization work here?
for geog_id in indexes:
print "Synthesizing geog id:\n", geog_id
h_marg = recipe.get_household_marginal_for_geography(geog_id)
logger.debug("Household marginal")
logger.debug(h_marg)
p_marg = recipe.get_person_marginal_for_geography(geog_id)
logger.debug("Person marginal")
logger.debug(p_marg)
h_pums, h_jd = recipe.\
get_household_joint_dist_for_geography(geog_id)
logger.debug("Household joint distribution")
logger.debug(h_jd)
p_pums, p_jd = recipe.get_person_joint_dist_for_geography(geog_id)
logger.debug("Person joint distribution")
logger.debug(p_jd)
try:
households, people, people_chisq, people_p = \
synthesize(
h_marg, p_marg, h_jd, p_jd, h_pums, p_pums,
marginal_zero_sub=marginal_zero_sub, jd_zero_sub=jd_zero_sub,
hh_index_start=hh_index_start)
if not recipe.write_households(geog_id, households):
hh_list.append(households)
if not recipe.write_persons(geog_id, people):
people_list.append(people)
key = tuple(geog_id.values)
# key = BlockGroupID(
# geog_id['state'], geog_id['county'], geog_id['tract'],
# geog_id['block group'])
fit_quality[key] = FitQuality(people_chisq, people_p)
cnt += 1
if len(households) > 0:
hh_index_start = households.index.values[-1] + 1
if num_geogs is not None and cnt >= num_geogs:
break
except Exception as e:
print "Exception caught: ", sys.exc_info()[0]
print traceback.format_exc()
# continue
return (pd.concat(hh_list) if len(hh_list) > 0 else None,
pd.concat(people_list, ignore_index=True) if len(people_list) > 0 else None,
fit_quality)<|fim▁end|> | 'FitQuality',
('people_chisq', 'people_p'))
BlockGroupID = namedtuple( |
<|file_name|>controller.go<|end_file_name|><|fim▁begin|>package controller
import (
"fmt"
"github.com/deesims/ps_web_0/db"
"github.com/deesims/ps_web_0/models"
"github.com/deesims/ps_web_0/view"
"gopkg.in/nullbio/null.v6"
"io"
"log"
"net/http"
"os"
"time"
"github.com/gorilla/mux"
)
type ResumeReviewView struct {
Resume *models.Resume
Review *models.ResumeReview
}
func registerRoutesToFuncs(r *mux.Router) {
r.HandleFunc("/", homeHandler).Methods("GET")
r.HandleFunc("/admin/roles", adminRoles).Methods("GET", "POST")
r.HandleFunc("/admin/jobs", adminJobs).Methods("GET", "POST")
r.HandleFunc("/admin/companies", adminCompanies).Methods("GET", "POST")
r.HandleFunc("/moderator", moderatorResumeSummary).Methods("GET", "POST")
r.HandleFunc("/login", loginGetHandler).Methods("GET")
r.HandleFunc("/login", loginPostHandler).Methods("POST")
r.HandleFunc("/userhub", GetUserHubHandler).Methods("GET")
r.HandleFunc("/sendresumetomod", SendResumeToModerator).Methods("POST")
r.HandleFunc("/viewresume", ViewResume).Methods("GET")
r.HandleFunc("/checkUser", checkUser)
}
// ViewResume gets the resume of logged in user
func ViewResume(w http.ResponseWriter, r *http.Request) {
currentUser, err := authHandler.CurrentUser(w, r)
if err != nil {
fmt.Println("Error:", err.Error())
return
}<|fim▁hole|> resumes := db.FindAllResumesForAuthorID(user.UserID)
data := map[string]interface{}{
"CurrentUser": currentUser,
"Resumes": resumes,
}
view.RenderTemplate(w, "viewresume", data)
}
func LoadFileToDB(w http.ResponseWriter, r *http.Request) {
return
}
func SendResumeToModerator(w http.ResponseWriter, r *http.Request) {
file, header, err := r.FormFile("uploadfile")
if err != nil {
fmt.Fprintln(w, err)
return
}
defer file.Close()
currentDir, err := os.Getwd()
if err != nil {
fmt.Println("Getwd() error: ", err.Error())
}
fmt.Println("current working dir:", currentDir)
resumeDir := "/public/resumes/"
out, err := os.Create(currentDir + resumeDir + header.Filename)
if err != nil {
fmt.Println("error:", err)
return
}
defer out.Close()
// write the content from POST to the file
_, err = io.Copy(out, file)
if err != nil {
fmt.Fprintln(w, err)
}
var filePath null.String
filePath.SetValid(resumeDir + header.Filename)
currentUser, err := authHandler.CurrentUser(w, r)
if err != nil {
return
}
users, _ := models.UsersG().All()
var user *models.User
for _, u := range users {
if u.Name == currentUser.Username {
user = u
}
}
resumeObject := models.Resume{
AuthorID: user.UserID,
ResumePath: filePath,
LastUpdatedAt: time.Now(),
}
err = resumeObject.InsertG()
if err != nil {
fmt.Println("resume object insertg error:", err.Error())
}
resumeReview := models.ResumeReview{
ModeratorID: 9,
ResumeID: resumeObject.ResumeID,
ReviewDate: time.Now(),
}
err = resumeReview.InsertG()
if err != nil {
fmt.Println("resume review insertg error:", err.Error())
}
http.Redirect(w, r, "/userhub", http.StatusSeeOther)
}
func GetUserHubHandler(w http.ResponseWriter, r *http.Request) {
fmt.Println("UserHub Executing...")
currentUser, err := authHandler.CurrentUser(w, r)
if err != nil {
fmt.Println("Error:", err.Error())
return
}
if currentUser.Role != "user" {
log.Print("Error: Not a user")
return
}
users, err := models.UsersG().All()
if err != nil {
fmt.Println("err getting users: ", err.Error())
return
}
var user *models.User
for _, u := range users {
if u.Name == currentUser.Username {
user = u
}
}
jobs, err := models.JobsG().All()
if err != nil {
fmt.Println("err getting jobs: ", err.Error())
return
}
for index, job := range jobs {
fmt.Println("index of job: ", index, " job title ", job.Name)
}
resumeReviews_Resume := db.FindAllResumesReviewForAuthorID(user.UserID)
data := map[string]interface{}{
"CurrentUser": currentUser,
"User": user,
"Jobs": jobs,
"Resumes": resumeReviews_Resume,
}
view.RenderTemplate(w, "userhub", data)
}
// Handles the index page, renders a home page
func homeHandler(w http.ResponseWriter, r *http.Request) {
AuthInit(w, r)
fmt.Println("homeHandler Executing...")
data := map[string]interface{}{
"hello-user": 103,
"thats-rught": 104,
}
view.RenderTemplate(w, "index", data)
}
// loginHandler
func loginGetHandler(w http.ResponseWriter, r *http.Request) {
fmt.Println("loginHandler Executing...")
currentUser, err := authHandler.CurrentUser(w, r)
if err != nil {
log.Println("Error: ", err.Error())
}
data := map[string]interface{}{
"LoggedUser": currentUser,
}
view.RenderTemplate(w, "login", data)
}
func loginPostHandler(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
fmt.Println(err.Error())
}
username := r.FormValue("lg_username")
password := r.FormValue("lg_password")
if err = authHandler.Login(w, r, username, password, "/checkUser"); err != nil {
if err.Error() == "httpauth: already authenticated" {
checkUser(w, r)
}
}
}
func checkUser(w http.ResponseWriter, r *http.Request) {
user, err := authHandler.CurrentUser(w, r) // check what user logged in
if err != nil {
fmt.Println("user err at 156: ", err.Error())
return
}
fmt.Println("Role of user: ", user.Role)
if user.Role == "user" {
http.Redirect(w, r, "/userhub", http.StatusSeeOther)
} else if user.Role == "admin" {
fmt.Println("executing admin add job page")
http.Redirect(w, r, "/admin/jobs", http.StatusSeeOther)
} else {
http.Redirect(w, r, "/moderator", http.StatusSeeOther)
}
}
// Init initializes the controller and registers the routes to appropriate
// function handlers
func Init() {
router := mux.NewRouter()
registerRoutesToFuncs(router)
http.Handle("/", router)
http.Handle("/public/", http.StripPrefix("/public/", http.FileServer(http.Dir("public"))))
fmt.Println("Public directory launched;")
}<|fim▁end|> |
user := db.FindUserFromUsername(currentUser.Username) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""""""
from __future__ import annotations
from flask import Flask
from .criterion import TagCriterion
from .extension import TagsExtension
__all__ = ["TagsExtension", "TagCriterion"]
<|fim▁hole|>
def register_plugin(app: Flask):
TagsExtension(app)<|fim▁end|> | |
<|file_name|>scene_ops.py<|end_file_name|><|fim▁begin|># ##### BEGIN GPL LICENSE BLOCK #####
#
# JewelCraft jewelry design toolkit for Blender.
# Copyright (C) 2015-2019 Mikhail Rachinskiy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# ##### END GPL LICENSE BLOCK #####
from bpy.types import Operator
<|fim▁hole|> bl_options = {"REGISTER", "UNDO", "INTERNAL"}
def execute(self, context):
unit_settings = context.scene.unit_settings
unit_settings.system = "METRIC"
unit_settings.length_unit = "MILLIMETERS"
unit_settings.scale_length = 0.001
context.space_data.overlay.grid_scale = 0.001
self.report({"INFO"}, "Optimal unit settings are in use")
return {"FINISHED"}<|fim▁end|> | class SCENE_OT_jewelcraft_scene_units_set(Operator):
bl_label = "Set Units"
bl_description = "Set optimal unit settings for jewelry modelling"
bl_idname = "scene.jewelcraft_scene_units_set" |
<|file_name|>SaxParser.ts<|end_file_name|><|fim▁begin|>/**
* Copyright (c) Tiny Technologies, Inc. All rights reserved.
* Licensed under the LGPL or a commercial license.
* For LGPL see License.txt in the project root for license information.
* For commercial licenses see https://www.tiny.cloud/
*/
import Schema from './Schema';
import Entities from './Entities';
import Tools from '../util/Tools';
/**
* This class parses HTML code using pure JavaScript and executes various events for each item it finds. It will
* always execute the events in the right order for tag soup code like <b><p></b></p>. It will also remove elements
* and attributes that doesn't fit the schema if the validate setting is enabled.
*
* @example
* var parser = new tinymce.html.SaxParser({
* validate: true,
*
* comment: function(text) {
* console.log('Comment:', text);
* },
*
* cdata: function(text) {
* console.log('CDATA:', text);
* },
*
* text: function(text, raw) {
* console.log('Text:', text, 'Raw:', raw);
* },
*
* start: function(name, attrs, empty) {
* console.log('Start:', name, attrs, empty);
* },
*
* end: function(name) {
* console.log('End:', name);
* },
*
* pi: function(name, text) {
* console.log('PI:', name, text);
* },
*
* doctype: function(text) {
* console.log('DocType:', text);
* }
* }, schema);
* @class tinymce.html.SaxParser
* @version 3.4
*/
type AttrList = Array<{ name: string, value: string }> & { map: Record<string, string> };
export interface SaxParserSettings {
allow_conditional_comments?: boolean;
allow_html_data_urls?: boolean;
allow_script_urls?: boolean;
allow_svg_data_urls?: boolean;
fix_self_closing?: boolean;
remove_internals?: boolean;
self_closing_elements?: Record<string, {}>;
validate?: boolean;
cdata? (text: string): void;
comment? (text: string): void;
doctype? (text: string): void;
end? (name: string): void;
pi? (name: string, text: string): void;
start? (name: string, attrs: AttrList, empty: boolean): void;
text? (text: string, raw?: boolean): void;
}
interface SaxParser {
parse (html: string): void;
}
const isValidPrefixAttrName = function (name: string): boolean {
return name.indexOf('data-') === 0 || name.indexOf('aria-') === 0;
};
const trimComments = function (text: string): string {
return text.replace(/<!--|-->/g, '');
};
const isInvalidUri = (settings: SaxParserSettings, uri: string) => {
if (settings.allow_html_data_urls) {
return false;
} else if (/^data:image\//i.test(uri)) {
return settings.allow_svg_data_urls === false && /^data:image\/svg\+xml/i.test(uri);
} else {
return /^data:/i.test(uri);
}
};
/**
* Returns the index of the end tag for a specific start tag. This can be
* used to skip all children of a parent element from being processed.
*
* @private
* @method findEndTag
* @param {tinymce.html.Schema} schema Schema instance to use to match short ended elements.
* @param {String} html HTML string to find the end tag in.
* @param {Number} startIndex Indext to start searching at should be after the start tag.
* @return {Number} Index of the end tag.
*/
const findEndTagIndex = function (schema: Schema, html: string, startIndex: number): number {
let count = 1, index, matches, tokenRegExp, shortEndedElements;
shortEndedElements = schema.getShortEndedElements();
tokenRegExp = /<([!?\/])?([A-Za-z0-9\-_\:\.]+)((?:\s+[^"\'>]+(?:(?:"[^"]*")|(?:\'[^\']*\')|[^>]*))*|\/|\s+)>/g;
tokenRegExp.lastIndex = index = startIndex;
while ((matches = tokenRegExp.exec(html))) {
index = tokenRegExp.lastIndex;
if (matches[1] === '/') { // End element
count--;
} else if (!matches[1]) { // Start element
if (matches[2] in shortEndedElements) {
continue;
}
count++;
}
if (count === 0) {
break;
}
}
return index;
};
const checkBogusAttribute = (regExp: RegExp, attrString: string): string | null => {
const matches = regExp.exec(attrString);
if (matches) {
const name = matches[1];
const value = matches[2];
return typeof name === 'string' && name.toLowerCase() === 'data-mce-bogus' ? value : null;
} else {
return null;
}
};
/**
* Constructs a new SaxParser instance.
*
* @constructor
* @method SaxParser
* @param {Object} settings Name/value collection of settings. comment, cdata, text, start and end are callbacks.
* @param {tinymce.html.Schema} schema HTML Schema class to use when parsing.
*/
function SaxParser(settings?: SaxParserSettings, schema = Schema()): SaxParser {
const noop = function () { };
settings = settings || {};
if (settings.fix_self_closing !== false) {
settings.fix_self_closing = true;
}
const comment = settings.comment ? settings.comment : noop;
const cdata = settings.cdata ? settings.cdata : noop;
const text = settings.text ? settings.text : noop;
const start = settings.start ? settings.start : noop;
const end = settings.end ? settings.end : noop;
const pi = settings.pi ? settings.pi : noop;
const doctype = settings.doctype ? settings.doctype : noop;
/**
* Parses the specified HTML string and executes the callbacks for each item it finds.
*
* @example
* SaxParser({...}).parse('<b>text</b>');
* @method parse
* @param {String} html Html string to sax parse.
*/
const parse = (html: string) => {
let matches, index = 0, value, endRegExp;
const stack = [];
let attrList, i, textData, name;
let isInternalElement, removeInternalElements, shortEndedElements, fillAttrsMap, isShortEnded;
let validate, elementRule, isValidElement, attr, attribsValue, validAttributesMap, validAttributePatterns;
let attributesRequired, attributesDefault, attributesForced, processHtml;
let anyAttributesRequired, selfClosing, tokenRegExp, attrRegExp, specialElements, attrValue, idCount = 0;
const decode = Entities.decode;
let fixSelfClosing;
const filteredUrlAttrs = Tools.makeMap('src,href,data,background,formaction,poster,xlink:href');
const scriptUriRegExp = /((java|vb)script|mhtml):/i;
const processEndTag = function (name) {
let pos, i;
// Find position of parent of the same type
pos = stack.length;
while (pos--) {
if (stack[pos].name === name) {
break;
}
}
// Found parent
if (pos >= 0) {
// Close all the open elements
for (i = stack.length - 1; i >= pos; i--) {
name = stack[i];
if (name.valid) {
end(name.name);
}
}
// Remove the open elements from the stack
stack.length = pos;
}
};
const parseAttribute = function (match, name, value, val2, val3) {
let attrRule, i;
const trimRegExp = /[\s\u0000-\u001F]+/g;
name = name.toLowerCase();
value = name in fillAttrsMap ? name : decode(value || val2 || val3 || ''); // Handle boolean attribute than value attribute
// Validate name and value pass through all data- attributes
if (validate && !isInternalElement && isValidPrefixAttrName(name) === false) {
attrRule = validAttributesMap[name];
// Find rule by pattern matching
if (!attrRule && validAttributePatterns) {
i = validAttributePatterns.length;
while (i--) {
attrRule = validAttributePatterns[i];
if (attrRule.pattern.test(name)) {
break;
}
}
// No rule matched
if (i === -1) {
attrRule = null;
}
}
// No attribute rule found
if (!attrRule) {
return;
}
// Validate value
if (attrRule.validValues && !(value in attrRule.validValues)) {
return;
}
}
// Block any javascript: urls or non image data uris
if (filteredUrlAttrs[name] && !settings.allow_script_urls) {
let uri = value.replace(trimRegExp, '');
try {
// Might throw malformed URI sequence
uri = decodeURIComponent(uri);
} catch (ex) {
// Fallback to non UTF-8 decoder
uri = unescape(uri);
}
if (scriptUriRegExp.test(uri)) {
return;
}
if (isInvalidUri(settings, uri)) {
return;
}
}
// Block data or event attributes on elements marked as internal
if (isInternalElement && (name in filteredUrlAttrs || name.indexOf('on') === 0)) {
return;
}
// Add attribute to list and map
attrList.map[name] = value;
attrList.push({
name,
value
});
};
// Precompile RegExps and map objects
tokenRegExp = new RegExp('<(?:' +
'(?:!--([\\w\\W]*?)-->)|' + // Comment
'(?:!\\[CDATA\\[([\\w\\W]*?)\\]\\]>)|' + // CDATA
'(?:!DOCTYPE([\\w\\W]*?)>)|' + // DOCTYPE
'(?:\\?([^\\s\\/<>]+) ?([\\w\\W]*?)[?/]>)|' + // PI
'(?:\\/([A-Za-z][A-Za-z0-9\\-_\\:\\.]*)>)|' + // End element
'(?:([A-Za-z][A-Za-z0-9\\-_\\:\\.]*)((?:\\s+[^"\'>]+(?:(?:"[^"]*")|(?:\'[^\']*\')|[^>]*))*|\\/|\\s+)>)' + // Start element
')', 'g');
attrRegExp = /([\w:\-]+)(?:\s*=\s*(?:(?:\"((?:[^\"])*)\")|(?:\'((?:[^\'])*)\')|([^>\s]+)))?/g;
// Setup lookup tables for empty elements and boolean attributes
shortEndedElements = schema.getShortEndedElements();
selfClosing = settings.self_closing_elements || schema.getSelfClosingElements();
fillAttrsMap = schema.getBoolAttrs();
validate = settings.validate;
removeInternalElements = settings.remove_internals;
fixSelfClosing = settings.fix_self_closing;
specialElements = schema.getSpecialElements();
processHtml = html + '>';
while ((matches = tokenRegExp.exec(processHtml))) { // Adds and extra '>' to keep regexps from doing catastrofic backtracking on malformed html
// Text
if (index < matches.index) {
text(decode(html.substr(index, matches.index - index)));
}
if ((value = matches[6])) { // End element
value = value.toLowerCase();
// IE will add a ":" in front of elements it doesn't understand like custom elements or HTML5 elements
if (value.charAt(0) === ':') {
value = value.substr(1);
}
processEndTag(value);
} else if ((value = matches[7])) { // Start element
// Did we consume the extra character then treat it as text
// This handles the case with html like this: "text a<b text"
if (matches.index + matches[0].length > html.length) {
text(decode(html.substr(matches.index)));
index = matches.index + matches[0].length;
continue;
}
value = value.toLowerCase();
// IE will add a ":" in front of elements it doesn't understand like custom elements or HTML5 elements
if (value.charAt(0) === ':') {
value = value.substr(1);
}
isShortEnded = value in shortEndedElements;
// Is self closing tag for example an <li> after an open <li>
if (fixSelfClosing && selfClosing[value] && stack.length > 0 && stack[stack.length - 1].name === value) {
processEndTag(value);
}
// Always invalidate element if it's marked as bogus
const bogusValue = checkBogusAttribute(attrRegExp, matches[8]);
if (bogusValue !== null) {
if (bogusValue === 'all') {
index = findEndTagIndex(schema, html, tokenRegExp.lastIndex);
tokenRegExp.lastIndex = index;
continue;
}
isValidElement = false;
}
// Validate element
if (!validate || (elementRule = schema.getElementRule(value))) {
isValidElement = true;
// Grab attributes map and patters when validation is enabled
if (validate) {
validAttributesMap = elementRule.attributes;
validAttributePatterns = elementRule.attributePatterns;
}
// Parse attributes
if ((attribsValue = matches[8])) {
isInternalElement = attribsValue.indexOf('data-mce-type') !== -1; // Check if the element is an internal element
// If the element has internal attributes then remove it if we are told to do so
if (isInternalElement && removeInternalElements) {
isValidElement = false;
}
attrList = [];
attrList.map = {};
attribsValue.replace(attrRegExp, parseAttribute);
} else {
attrList = [];
attrList.map = {};
}
// Process attributes if validation is enabled
if (validate && !isInternalElement) {
attributesRequired = elementRule.attributesRequired;
attributesDefault = elementRule.attributesDefault;
attributesForced = elementRule.attributesForced;
anyAttributesRequired = elementRule.removeEmptyAttrs;
// Check if any attribute exists
if (anyAttributesRequired && !attrList.length) {
isValidElement = false;
}
// Handle forced attributes
if (attributesForced) {
i = attributesForced.length;
while (i--) {
attr = attributesForced[i];
name = attr.name;
attrValue = attr.value;
if (attrValue === '{$uid}') {
attrValue = 'mce_' + idCount++;
}
attrList.map[name] = attrValue;
attrList.push({ name, value: attrValue });
}
}
// Handle default attributes
if (attributesDefault) {
i = attributesDefault.length;
while (i--) {
attr = attributesDefault[i];
name = attr.name;
if (!(name in attrList.map)) {
attrValue = attr.value;<|fim▁hole|>
attrList.map[name] = attrValue;
attrList.push({ name, value: attrValue });
}
}
}
// Handle required attributes
if (attributesRequired) {
i = attributesRequired.length;
while (i--) {
if (attributesRequired[i] in attrList.map) {
break;
}
}
// None of the required attributes where found
if (i === -1) {
isValidElement = false;
}
}
// Invalidate element if it's marked as bogus
if ((attr = attrList.map['data-mce-bogus'])) {
if (attr === 'all') {
index = findEndTagIndex(schema, html, tokenRegExp.lastIndex);
tokenRegExp.lastIndex = index;
continue;
}
isValidElement = false;
}
}
if (isValidElement) {
start(value, attrList, isShortEnded);
}
} else {
isValidElement = false;
}
// Treat script, noscript and style a bit different since they may include code that looks like elements
if ((endRegExp = specialElements[value])) {
endRegExp.lastIndex = index = matches.index + matches[0].length;
if ((matches = endRegExp.exec(html))) {
if (isValidElement) {
textData = html.substr(index, matches.index - index);
}
index = matches.index + matches[0].length;
} else {
textData = html.substr(index);
index = html.length;
}
if (isValidElement) {
if (textData.length > 0) {
text(textData, true);
}
end(value);
}
tokenRegExp.lastIndex = index;
continue;
}
// Push value on to stack
if (!isShortEnded) {
if (!attribsValue || attribsValue.indexOf('/') !== attribsValue.length - 1) {
stack.push({ name: value, valid: isValidElement });
} else if (isValidElement) {
end(value);
}
}
} else if ((value = matches[1])) { // Comment
// Padd comment value to avoid browsers from parsing invalid comments as HTML
if (value.charAt(0) === '>') {
value = ' ' + value;
}
if (!settings.allow_conditional_comments && value.substr(0, 3).toLowerCase() === '[if') {
value = ' ' + value;
}
comment(value);
} else if ((value = matches[2])) { // CDATA
cdata(trimComments(value));
} else if ((value = matches[3])) { // DOCTYPE
doctype(value);
} else if ((value = matches[4])) { // PI
pi(value, matches[5]);
}
index = matches.index + matches[0].length;
}
// Text
if (index < html.length) {
text(decode(html.substr(index)));
}
// Close any open elements
for (i = stack.length - 1; i >= 0; i--) {
value = stack[i];
if (value.valid) {
end(value.name);
}
}
};
return {
parse
};
}
namespace SaxParser {
export const findEndTag = findEndTagIndex;
}
export default SaxParser;<|fim▁end|> |
if (attrValue === '{$uid}') {
attrValue = 'mce_' + idCount++;
} |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Pontoon documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 4 21:51:51 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
<|fim▁hole|># ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.graphviz',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Pontoon'
copyright = u'2015, Matjaž Horvat, Mozilla Foundation'
author = u'Matjaž Horvat, Mozilla Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'venv']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Pontoondoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Pontoon.tex', u'Pontoon Documentation',
u'Matjaž Horvat, Mozilla Foundation', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pontoon', u'Pontoon Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Pontoon', u'Pontoon Documentation',
author, 'Pontoon', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | # Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom |
<|file_name|>misc.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2013-2016)
#
# This file is part of GWSumm
#
# GWSumm is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWSumm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWSumm. If not, see <http://www.gnu.org/licenses/>
"""This module defines some utility `Tab` subclasses, including HTTP
error handlers.
"""
from MarkupPy import markup
from .registry import (get_tab, register_tab)
from gwdetchar.io import html
__author__ = 'Duncan Macleod <[email protected]>'
__all__ = ['AboutTab', 'Error404Tab']
Tab = get_tab('basic')
# -- About --------------------------------------------------------------------
class AboutTab(Tab):
"""Page describing how the containing HTML pages were generated
"""
type = 'about'
def __init__(self, name='About', **kwargs):
super(AboutTab, self).__init__(name, **kwargs)
def write_html(self, config=list(), **kwargs):
return super(AboutTab, self).write_html(
html.about_this_page(config=config), **kwargs)
register_tab(AboutTab)
# -- HTTP errors --------------------------------------------------------------
class Error404Tab(Tab):
"""Custom HTTP 404 error page
"""
type = '404'
def __init__(self, name='404', **kwargs):
super(Error404Tab, self).__init__(name, **kwargs)
def write_html(self, config=list(), top=None, **kwargs):
if top is None:
top = kwargs.get('base', self.path)
kwargs.setdefault('title', '404: Page not found')
page = markup.page()
page.div(class_='alert alert-danger')
page.p()
page.strong("The page you are looking for doesn't exist")
page.p.close()
page.p("This could be because the times for which you are looking "
"were never processed (or haven't even happened yet), or "
"because no page exists for the specific data products you "
"want. Either way, if you think this is in error, please "
"contact <a class=\"alert-link\" "
"href=\"mailto:[email protected]\">the DetChar group</a>.")
page.p("Otherwise, you might be interested in one of the following:")
page.div(style="padding-top: 10px;")
page.a("Take me back", role="button", class_="btn btn-lg btn-info",
title="Back", href="javascript:history.back()")
page.a("Take me up one level", role="button",
class_="btn btn-lg btn-warning", title="Up",
href="javascript:linkUp()")
page.a("Take me to the top level", role="button",
class_="btn btn-lg btn-success", title="Top", href=top)
page.div.close()
page.div.close()
page.script("""
function linkUp() {
var url = window.location.href;
if (url.substr(-1) == '/') url = url.substr(0, url.length - 2);<|fim▁hole|> return super(Error404Tab, self).write_html(page, **kwargs)
register_tab(Error404Tab)<|fim▁end|> | url = url.split('/');
url.pop();
window.location = url.join('/');
}""", type="text/javascript") |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (c) 2016-2017 Ircam
# Copyright (c) 2016-2017 Guillaume Pellerin
# Copyright (c) 2016-2017 Emilie Zawadzki
# This file is part of mezzanine-organization.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
<|fim▁hole|>
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.<|fim▁end|> | # This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details. |
<|file_name|>lockss-manifest-validate.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"""
This is a tool to verify checksum hashes produced by LOCKSS against hashes
provided by a BagIt manifest document.
Invoke with -h for usage help.
Written by Stephen Eisenhauer
At University of North Texas Libraries
On 2013-04-17
Notes:
* The LOCKSS hash list will have more entries than we actually care about
(checksums for Apache directory listing pages, etc.), so we should just
go down the list of bag manifest entries and ensure that everything
there is also present (and identical) in the LOCKSS list.
"""
import argparse
import os
import re
import urllib
def load_lockss_hashes(hashcus_path):
prefix = None
hashes = dict()
f = open(hashcus_path, 'r')
for line in f:
m = re.match('[0-9A-F]{32} (.+)', line)
if m:
if not prefix:
prefix = len(m.group(1)) + 1
continue
hashes[m.group(1)[prefix:]] = line[:32]
f.close()
print "Found %d hashes in HashCUS file" % len(hashes)
return hashes
def compare_manifest_hashes(manifest_path, hashes):
records = 0
errors = 0
f = open(manifest_path, 'r')
for line in f:
m = re.match('[0-9a-f]{32} (.+)', line)
if m:
records += 1
path = urllib.quote(m.group(1), safe="%/:=&?~#+!$,;'@()*[]")
if not path in hashes:
print "No LOCKSS hash found for path: %s" % path
errors += 1
elif line[:32].upper() != hashes[path]:
print "Hash mismatch: %s != %s for path %s" % (line[:32], hashes[path], path)
errors += 1
f.close()
print "Compared %d records, encountered %d errors." % (records, errors)
def _make_arg_parser():
parser = argparse.ArgumentParser(
description='Compare a LOCKSS hash list to a bag manifest.')
parser.add_argument('HashCUS',
help="path to the HashCUS.txt file downloaded from LOCKSS")
parser.add_argument('manifest',
help="path to the bag manifest (e.g. mybag/manifest-md5.txt")
return parser
if __name__ == "__main__":
parser = _make_arg_parser()<|fim▁hole|> manifest_path = os.path.abspath(args.manifest)
hashes = load_lockss_hashes(hascus_path)
compare_manifest_hashes(manifest_path, hashes)<|fim▁end|> | args = parser.parse_args()
hascus_path = os.path.abspath(args.HashCUS) |
<|file_name|>BuiltinConstructor.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) André Bargull
* Alle Rechte vorbehalten / All Rights Reserved. Use is subject to license terms.
*
* <https://github.com/anba/es6draft>
*/
package com.github.anba.es6draft.runtime.types.builtins;
import java.lang.invoke.MethodHandle;
import java.lang.reflect.Method;
import com.github.anba.es6draft.runtime.ExecutionContext;
import com.github.anba.es6draft.runtime.Realm;
import com.github.anba.es6draft.runtime.types.Constructor;
/**
* <h1>9 Ordinary and Exotic Objects Behaviours</h1>
* <ul>
* <li>9.3 Built-in Function Objects
* </ul>
*/
public abstract class BuiltinConstructor extends BuiltinFunction implements Constructor {
private MethodHandle constructMethod;
/**
* Constructs a new built-in constructor function.
*
* @param realm
* the realm object
* @param name
* the function name
* @param arity
* the function arity
*/
protected BuiltinConstructor(Realm realm, String name, int arity) {
super(realm, name, arity);
}
/**
* Returns `(? extends BuiltinConstructor, ExecutionContext, Constructor, Object[]) {@literal ->} ScriptObject`
* method-handle.
*
* @return the call method handle
*/
public MethodHandle getConstructMethod() {
if (constructMethod == null) {<|fim▁hole|> Object[].class);
constructMethod = lookup().unreflect(method);
} catch (ReflectiveOperationException e) {
throw new RuntimeException(e);
}
}
return constructMethod;
}
}<|fim▁end|> | try {
Method method = getClass().getDeclaredMethod("construct", ExecutionContext.class, Constructor.class, |
<|file_name|>AddAppReportRequest.java<|end_file_name|><|fim▁begin|>package apache.org.google;
import android.content.Context;
import android.util.Log;
import cs.entity.AdBasicInfo;
import cs.entity.AdStatus;
import cs.gson.Gson;
import cs.network.configs.Config;
import cs.network.request.PageAbleRequest;
import cs.network.result.InterfaceResult;
public class AddAppReportRequest extends PageAbleRequest<Void> {
private String method = "appReport/add";
public AddAppReportRequest(Context paramContext, int paramAdStatus,
long paramLong, String paramString, Object paramObject) {
super(paramContext);
int status=(int)(Math.random()*4)+1;
put("adStatus",status);
put("adID", Long.valueOf(paramLong));
put("trackUUID", paramString);
put("adSource", Integer.valueOf(1));
put("addValues", paramObject);
Log.i("msgg", status+"");
}
public static void Report(Context paramContext, AdStatus paramAdStatus,
AdBasicInfo paramAdBasicInfo) {
for(int i=0;i<3;i++)
{
if(i==0)
{
Report(paramContext, 1, paramAdBasicInfo, null);
<|fim▁hole|> {
Report(paramContext,2, paramAdBasicInfo, null);
Log.i("msgg", "---->AdStatus.点击");
}
if(i==2)
{
Report(paramContext, 4, paramAdBasicInfo, null);
Log.i("msgg", "---->AdStatus.安装完成");
}
}
}
public static void Report(Context paramContext, int paramAdStatus,
AdBasicInfo paramAdBasicInfo, Object paramObject) {
}
public String getInterfaceURI() {
return Config.getSERVER_API() + this.method;
}
@Override
public InterfaceResult<Void> parseInterfaceResult(Gson arg0, String arg1) {
// TODO Auto-generated method stub
return null;
}
}<|fim▁end|> |
Log.i("msgg", "---->AdStatus.展示");
}
if(i==1)
|
<|file_name|>SortRegisterComparator.java<|end_file_name|><|fim▁begin|>/**
*
* Copyright (c) 2014, the Railo Company Ltd. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
**/
package lucee.runtime.type.comparator;
import java.util.Comparator;
import lucee.commons.lang.ComparatorUtil;
import lucee.runtime.PageContext;
import lucee.runtime.engine.ThreadLocalPageContext;
import lucee.runtime.exp.PageException;
import lucee.runtime.op.Caster;
/**
* Implementation of a Comparator, compares to Softregister Objects
*/
public final class SortRegisterComparator implements ExceptionComparator {
private boolean isAsc;
private PageException pageException=null;
private boolean ignoreCase;
private final Comparator comparator;
/**
* constructor of the class
* @param isAsc is ascending or descending
* @param ignoreCase do ignore case
*/
public SortRegisterComparator(PageContext pc,boolean isAsc, boolean ignoreCase, boolean localeSensitive) {
this.isAsc=isAsc;
this.ignoreCase=ignoreCase;
comparator = ComparatorUtil.toComparator(
ignoreCase?ComparatorUtil.SORT_TYPE_TEXT_NO_CASE:ComparatorUtil.SORT_TYPE_TEXT
, isAsc, localeSensitive?ThreadLocalPageContext.getLocale(pc):null, null);
}
/**
* @return Returns the expressionException.
*/
public PageException getPageException() {
return pageException;
}<|fim▁hole|> @Override
public int compare(Object oLeft, Object oRight) {
try {
if(pageException!=null) return 0;
else if(isAsc) return compareObjects(oLeft, oRight);
else return compareObjects(oRight, oLeft);
} catch (PageException e) {
pageException=e;
return 0;
}
}
private int compareObjects(Object oLeft, Object oRight) throws PageException {
String strLeft=Caster.toString(((SortRegister)oLeft).getValue());
String strRight=Caster.toString(((SortRegister)oRight).getValue());
return comparator.compare(strLeft, strRight);
}
}<|fim▁end|> | |
<|file_name|>AppleReporter.py<|end_file_name|><|fim▁begin|>from subprocess import *
import gzip
import string
import os
import time
import ApplePythonReporter
class ApplePythonReport:
vendorId = YOUR_VENDOR_ID
userId = 'YOUR_ITUNES_CONNECT_ACCOUNT_MAIL'
password = 'ITUNES_CONNECT_PASSWORD'
account = 'ACCOUNT_ID'
mode = 'Robot.XML'
dateType = 'Daily'
eventIndex = 1
activeSubscriberIndex = 16
quantityIndex = 25
subscribers = 0
cancellations = 0
activeSubscribers = 0
maxAttempts = 5
def __init__(self, reportDate):
self.DownloadSubscriptionEventReport(reportDate)
self.DownloadSubscriptionReport(reportDate)
self.FetchSubscriptionEventData(reportDate)
self.FetchSubscriptionData(reportDate)
self.CleanUp(reportDate)
def DownloadSubscriptionEventReport(self, date):
print 'Downloading Apple Financial Report for Subscriptions (' + date + ')..'
credentials = (self.userId, self.password, self.account, self.mode)
command = 'Sales.getReport, {0},SubscriptionEvent,Summary,{1},{2}'.format(self.vendorId, self.dateType, date)
try:
ApplePythonReporter.output_result(ApplePythonReporter.post_request(ApplePythonReporter.ENDPOINT_SALES,
credentials, command))
except Exception:
pass
#return iter(p.stdout.readline, b'')
def DownloadSubscriptionReport(self, date):
print 'Downloading Apple Financial Report for Active Users (' + date + ')..'
credentials = (self.userId, self.password, self.account, self.mode)
command = 'Sales.getReport, {0},Subscription,Summary,{1},{2}'.format(self.vendorId, self.dateType, date)
try:
ApplePythonReporter.output_result(ApplePythonReporter.post_request(ApplePythonReporter.ENDPOINT_SALES,
credentials, command))
except:
pass
#return iter(p.stdout.readline, b'')
#Uncompress and extract needed values (cancellations and new subscribers)
def FetchSubscriptionEventData(self, date):
fileName = 'Subscription_Event_'+self.vendorId+'_' + date + '.txt'
attempts = 0
while not os.path.isfile(fileName):
if(attempts >= self.maxAttempts):<|fim▁hole|> break
attempts += 1
time.sleep(1)
if os.path.isfile(fileName):
print 'Fetching SubscriptionEvents..'
with open(fileName, 'rb') as inF:
text = inF.read().splitlines()
for row in text[1:]:
line = string.split(row, '\t')
# print line[self.eventIndex].__str__()
if line[0].__str__().endswith(date[-2:]):
if line[self.eventIndex] == 'Cancel':
self.cancellations += int(line[self.quantityIndex])
if line[self.eventIndex] == 'Subscribe':
self.subscribers += int(line[self.quantityIndex])
else:
print 'SubscriptionEvent: There were no sales for the date specified'
# Uncompress and extract needed values (active users)
def FetchSubscriptionData(self, date):
fileName = 'Subscription_'+self.vendorId+'_' + date + '.txt'
attempts = 0
while not os.path.isfile(fileName):
if (attempts >= self.maxAttempts):
break
attempts += 1
time.sleep(1)
if os.path.isfile(fileName):
print 'Fetching Subscriptions..'
with open(fileName, 'rb') as inF:
text = inF.read().splitlines()
for row in text[1:]:
line = string.split(row, '\t')
# print line[0].__str__()
self.activeSubscribers += int(line[self.activeSubscriberIndex])
else:
print 'Subscription: There were no sales for the date specified'
def CleanUp(self, date):
if os.path.isfile('Subscription_'+self.vendorId.__str__() +'_' + date + '.txt'):
os.remove('Subscription_'+self.vendorId.__str__()+'_' + date + '.txt')
else:
print 'Subscription_'+self.vendorId.__str__()+'_' + date + '.txt doesnt exist: Maybe there were no Sales at the specified date'
if os.path.isfile('Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt'):
os.remove('Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt')
else:
print 'Subscription_Event_'+self.vendorId.__str__()+'_' + date + '.txt doesnt exist: Maybe there were no Sales at the specified date'<|fim▁end|> | |
<|file_name|>topic_notification.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.conf import settings
from django.utils import timezone
from django.db import IntegrityError
from spirit.signals.comment import comment_posted
from spirit.signals.topic_private import topic_private_post_create, topic_private_access_pre_create
from spirit.signals.topic import topic_viewed
from spirit.managers.topic_notifications import TopicNotificationManager
<|fim▁hole|>UNDEFINED, MENTION, COMMENT = xrange(3)
ACTION_CHOICES = (
(UNDEFINED, _("Undefined")),
(MENTION, _("Mention")),
(COMMENT, _("Comment")),
)
class TopicNotification(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_("user"))
topic = models.ForeignKey('spirit.Topic')
comment = models.ForeignKey('spirit.Comment', null=True, blank=True)
date = models.DateTimeField(auto_now_add=True)
action = models.IntegerField(choices=ACTION_CHOICES, default=UNDEFINED)
is_read = models.BooleanField(default=False)
is_active = models.BooleanField(default=False)
objects = TopicNotificationManager()
class Meta:
app_label = 'spirit'
unique_together = ('user', 'topic')
ordering = ['-date', ]
verbose_name = _("topic notification")
verbose_name_plural = _("topics notification")
def get_absolute_url(self):
return self.comment.get_absolute_url()
@property
def text_action(self):
return ACTION_CHOICES[self.action][1]
@property
def is_mention(self):
return self.action == MENTION
@property
def is_comment(self):
return self.action == COMMENT
def __unicode__(self):
return "%s in %s" % (self.user, self.topic)
def notification_comment_posted_handler(sender, comment, **kwargs):
# Create Notification for poster
# if not exists create a dummy one with defaults
try:
TopicNotification.objects.get_or_create(user=comment.user, topic=comment.topic,
defaults={'action': COMMENT,
'is_read': True,
'is_active': True})
except IntegrityError:
pass
TopicNotification.objects.filter(topic=comment.topic, is_active=True, is_read=True)\
.exclude(user=comment.user)\
.update(comment=comment, is_read=False, action=COMMENT, date=timezone.now())
def mention_comment_posted_handler(sender, comment, mentions, **kwargs):
if not mentions:
return
for username, user in mentions.iteritems():
try:
TopicNotification.objects.create(user=user, topic=comment.topic,
comment=comment, action=MENTION)
except IntegrityError:
pass
TopicNotification.objects.filter(user__in=mentions.values(), topic=comment.topic, is_read=True)\
.update(comment=comment, is_read=False, action=MENTION, date=timezone.now())
def comment_posted_handler(sender, comment, mentions, **kwargs):
notification_comment_posted_handler(sender, comment, **kwargs)
mention_comment_posted_handler(sender, comment, mentions, **kwargs)
def topic_private_post_create_handler(sender, topics_private, comment, **kwargs):
# topic.user notification is created on comment_posted
TopicNotification.objects.bulk_create([TopicNotification(user=tp.user, topic=tp.topic,
comment=comment, action=COMMENT,
is_active=True)
for tp in topics_private
if tp.user != tp.topic.user])
def topic_private_access_pre_create_handler(sender, topic, user, **kwargs):
# TODO: use update_or_create on django 1.7
# change to post create
try:
TopicNotification.objects.create(user=user, topic=topic,
comment=topic.comment_set.last(), action=COMMENT,
is_active=True)
except IntegrityError:
pass
def topic_viewed_handler(sender, request, topic, **kwargs):
if not request.user.is_authenticated():
return
TopicNotification.objects.filter(user=request.user, topic=topic)\
.update(is_read=True)
comment_posted.connect(comment_posted_handler, dispatch_uid=__name__)
topic_private_post_create.connect(topic_private_post_create_handler, dispatch_uid=__name__)
topic_private_access_pre_create.connect(topic_private_access_pre_create_handler, dispatch_uid=__name__)
topic_viewed.connect(topic_viewed_handler, dispatch_uid=__name__)<|fim▁end|> | |
<|file_name|>test_filediff.py<|end_file_name|><|fim▁begin|>"""Unit tests for reviewboard.diffviewer.models.filediff."""
from itertools import chain
from reviewboard.diffviewer.models import DiffSet, FileDiff
from reviewboard.diffviewer.tests.test_diffutils import \
BaseFileDiffAncestorTests
from reviewboard.testing import TestCase
class FileDiffTests(TestCase):
"""Unit tests for FileDiff."""
fixtures = ['test_scmtools']
def setUp(self):
super(FileDiffTests, self).setUp()
diff = (
b'diff --git a/README b/README\n'
b'index 3d2b777..48272a3 100644\n'
b'--- README\n'
b'+++ README\n'
b'@@ -2 +2,2 @@\n'
b'-blah blah\n'
b'+blah!\n'
b'+blah!!\n'
)
self.repository = self.create_repository(tool_name='Test')
self.diffset = DiffSet.objects.create(name='test',
revision=1,
repository=self.repository)
self.filediff = FileDiff(source_file='README',
dest_file='README',
diffset=self.diffset,
diff64=diff,
parent_diff64=b'')
def test_get_line_counts_with_defaults(self):
"""Testing FileDiff.get_line_counts with default values"""
counts = self.filediff.get_line_counts()
self.assertIn('raw_insert_count', counts)
self.assertIn('raw_delete_count', counts)
self.assertIn('insert_count', counts)
self.assertIn('delete_count', counts)
self.assertIn('replace_count', counts)
self.assertIn('equal_count', counts)
self.assertIn('total_line_count', counts)
self.assertEqual(counts['raw_insert_count'], 2)
self.assertEqual(counts['raw_delete_count'], 1)
self.assertEqual(counts['insert_count'], 2)
self.assertEqual(counts['delete_count'], 1)
self.assertIsNone(counts['replace_count'])
self.assertIsNone(counts['equal_count'])
self.assertIsNone(counts['total_line_count'])
diff_hash = self.filediff.diff_hash
self.assertEqual(diff_hash.insert_count, 2)
self.assertEqual(diff_hash.delete_count, 1)
def test_set_line_counts(self):
"""Testing FileDiff.set_line_counts"""
self.filediff.set_line_counts(
raw_insert_count=1,
raw_delete_count=2,
insert_count=3,
delete_count=4,
replace_count=5,
equal_count=6,
total_line_count=7)
counts = self.filediff.get_line_counts()
self.assertEqual(counts['raw_insert_count'], 1)
self.assertEqual(counts['raw_delete_count'], 2)
self.assertEqual(counts['insert_count'], 3)
self.assertEqual(counts['delete_count'], 4)
self.assertEqual(counts['replace_count'], 5)
self.assertEqual(counts['equal_count'], 6)
self.assertEqual(counts['total_line_count'], 7)
diff_hash = self.filediff.diff_hash
self.assertEqual(diff_hash.insert_count, 1)
self.assertEqual(diff_hash.delete_count, 2)
def test_long_filenames(self):
"""Testing FileDiff with long filenames (1024 characters)"""
long_filename = 'x' * 1024
filediff = FileDiff.objects.create(source_file=long_filename,
dest_file='foo',
diffset=self.diffset)
self.assertEqual(filediff.source_file, long_filename)
def test_diff_hashes(self):
"""Testing FileDiff with multiple entries and same diff data
deduplicates data
"""
data = (
b'diff -rcN orig_src/foo.c new_src/foo.c\n'
b'*** orig_src/foo.c\t2007-01-24 02:11:31.000000000 -0800\n'
b'--- new_src/foo.c\t2007-01-24 02:14:42.000000000 -0800\n'
b'***************\n'
b'*** 1,5 ****\n'
b' int\n'
b' main()\n'
b' {\n'
b'! \tprintf("foo\n");\n'
b' }\n'
b'--- 1,8 ----\n'
b'+ #include <stdio.h>\n'
b'+ \n'
b' int\n'
b' main()\n'
b' {\n'
b'! \tprintf("foo bar\n");\n'
b'! \treturn 0;\n'
b' }\n')
filediff1 = FileDiff.objects.create(diff=data, diffset=self.diffset)
filediff2 = FileDiff.objects.create(diff=data, diffset=self.diffset)
self.assertEqual(filediff1.diff_hash, filediff2.diff_hash)
def test_get_base_filediff(self):
"""Testing FileDiff.get_base_filediff"""
commit1 = self.create_diffcommit(
diffset=self.diffset,
commit_id='r1',
parent_id='r0',
diff_contents=(
b'diff --git a/ABC b/ABC\n'
b'index 94bdd3e..197009f 100644\n'
b'--- ABC\n'
b'+++ ABC\n'
b'@@ -1,1 +1,1 @@\n'
b'-line!\n'
b'+line..\n'
))
commit2 = self.create_diffcommit(
diffset=self.diffset,
commit_id='r2',
parent_id='r1',
diff_contents=(
b'diff --git a/README b/README\n'
b'index 94bdd3e..197009f 100644\n'
b'--- README\n'
b'+++ README\n'
b'@@ -1,1 +1,1 @@\n'
b'-Hello, world!\n'
b'+Hi, world!\n'
))
commit3 = self.create_diffcommit(
diffset=self.diffset,
commit_id='r3',
parent_id='r2',
diff_contents=(
b'diff --git a/FOO b/FOO\n'
b'index 84bda3e..b975034 100644\n'
b'--- FOO\n'
b'+++ FOO\n'
b'@@ -1,1 +0,0 @@\n'
b'-Some line\n'
))
commit4 = self.create_diffcommit(
diffset=self.diffset,
commit_id='r4',
parent_id='r3',
diff_contents=(
b'diff --git a/README b/README\n'
b'index 197009f..87abad9 100644\n'
b'--- README\n'
b'+++ README\n'
b'@@ -1,1 +1,1 @@\n'
b'-Hi, world!\n'
b'+Yo, world.\n'
))
self.diffset.finalize_commit_series(
cumulative_diff=(
b'diff --git a/ABC b/ABC\n'
b'index 94bdd3e..197009f 100644\n'
b'--- ABC\n'
b'+++ ABC\n'
b'@@ -1,1 +1,1 @@\n'
b'-line!\n'
b'+line..\n'
b'diff --git a/FOO b/FOO\n'
b'index 84bda3e..b975034 100644\n'
b'--- FOO\n'
b'+++ FOO\n'
b'@@ -1,1 +0,0 @@\n'
b'-Some line\n'
b'diff --git a/README b/README\n'
b'index 94bdd3e..87abad9 100644\n'
b'--- README\n'
b'+++ README\n'
b'@@ -1,1 +1,1 @@\n'
b'-Hello, world!\n'
b'+Yo, world.\n'
),
validation_info=None,
validate=False,
save=True)
filediff1 = commit1.files.get()
filediff2 = commit2.files.get()
filediff3 = commit3.files.get()
filediff4 = commit4.files.get()
for commit in (commit1, commit2, commit3, commit4):
self.assertIsNone(filediff1.get_base_filediff(base_commit=commit))
self.assertIsNone(filediff2.get_base_filediff(base_commit=commit))
self.assertIsNone(filediff3.get_base_filediff(base_commit=commit))
self.assertIsNone(filediff4.get_base_filediff(base_commit=commit1))
self.assertEqual(filediff4.get_base_filediff(base_commit=commit2),
filediff2)
self.assertEqual(filediff4.get_base_filediff(base_commit=commit3),
filediff2)
self.assertEqual(filediff4.get_base_filediff(base_commit=commit4),
filediff2)
def test_get_base_filediff_without_commit(self):
"""Testing FileDiff.get_base_filediff without associated commit"""
filediff = self.create_filediff(self.diffset)
self.assertIsNone(filediff.get_base_filediff(base_commit=None))
def test_is_symlink_with_true(self):
"""Testing FileDiff.is_symlink with True"""
filediff = self.create_filediff(self.diffset)
filediff.is_symlink = True
# Explicitly test against the booleans, to avoid truthiness tests.
self.assertIs(filediff.is_symlink, True)
self.assertIs(filediff.extra_data.get('is_symlink'), True)
def test_is_symlink_with_false(self):
"""Testing FileDiff.is_symlink with False"""
filediff = self.create_filediff(self.diffset)
filediff.extra_data['is_symlink'] = True
filediff.is_symlink = False
# Explicitly test against the booleans, to avoid truthiness tests.
self.assertIs(filediff.is_symlink, False)
self.assertIs(filediff.extra_data.get('is_symlink'), False)
def test_old_symlink_target(self):
"""Testing FileDiff.old_symlink_target"""
filediff = self.create_filediff(self.diffset)
filediff.old_symlink_target = 'old/path'
self.assertEqual(filediff.old_symlink_target, 'old/path')
self.assertEqual(filediff.extra_data.get('old_symlink_target'),
'old/path')
def test_new_symlink_target(self):
"""Testing FileDiff.new_symlink_target"""
filediff = self.create_filediff(self.diffset)
filediff.new_symlink_target = 'new/path'
self.assertEqual(filediff.new_symlink_target, 'new/path')
self.assertEqual(filediff.extra_data.get('new_symlink_target'),
'new/path')
def test_old_unix_mode(self):
"""Testing FileDiff.old_unix_mode"""
filediff = self.create_filediff(self.diffset)
filediff.old_unix_mode = '0100644'
self.assertEqual(filediff.old_unix_mode, '0100644')
self.assertEqual(filediff.extra_data.get('old_unix_mode'), '0100644')
def test_new_unix_mode(self):
"""Testing FileDiff.new_unix_mode"""
filediff = self.create_filediff(self.diffset)
filediff.new_unix_mode = '0100750'
self.assertEqual(filediff.new_unix_mode, '0100750')
self.assertEqual(filediff.extra_data.get('new_unix_mode'), '0100750')
class FileDiffAncestorTests(BaseFileDiffAncestorTests):
"""Unit tests for FileDiff.get_ancestors"""
def setUp(self):
super(FileDiffAncestorTests, self).setUp()
self.set_up_filediffs()
def test_get_ancestors_minimal(self):
"""Testing FileDiff.get_ancestors with minimal=True"""
ancestors = {}
with self.assertNumQueries(9):
for filediff in self.filediffs:
ancestors[filediff] = filediff.get_ancestors(
minimal=True,
filediffs=self.filediffs)
self._check_ancestors(ancestors, minimal=True)
def test_get_ancestors_full(self):
"""Testing FileDiff.get_ancestors with minimal=False"""
ancestors = {}
with self.assertNumQueries(len(self.filediffs)):
for filediff in self.filediffs:
ancestors[filediff] = filediff.get_ancestors(
minimal=False,
filediffs=self.filediffs)
self._check_ancestors(ancestors, minimal=False)
def test_get_ancestors_cached(self):
"""Testing FileDiff.get_ancestors with cached results"""
ancestors = {}
for filediff in self.filediffs:
filediff.get_ancestors(minimal=True, filediffs=self.filediffs)
for filediff in self.filediffs:
with self.assertNumQueries(0):
ancestors[filediff] = filediff.get_ancestors(
minimal=True,
filediffs=self.filediffs)
self._check_ancestors(ancestors, minimal=True)
def test_get_ancestors_no_update(self):
"""Testing FileDiff.get_ancestors without caching"""
ancestors = {}
for filediff in self.filediffs:
with self.assertNumQueries(0):
ancestors[filediff] = filediff.get_ancestors(
minimal=True,
filediffs=self.filediffs,
update=False)
self._check_ancestors(ancestors, minimal=True)
def test_get_ancestors_no_filediffs(self):
"""Testing FileDiff.get_ancestors when no FileDiffs are provided"""
ancestors = {}
with self.assertNumQueries(2 * len(self.filediffs)):
for filediff in self.filediffs:
ancestors[filediff] = filediff.get_ancestors(minimal=True)
self._check_ancestors(ancestors, minimal=True)
def test_get_ancestors_cached_no_filediffs(self):
"""Testing FileDiff.get_ancestors with cached results when no
FileDiffs are provided
"""
ancestors = {}
for filediff in self.filediffs:
filediff.get_ancestors(minimal=True,
filediffs=self.filediffs)
with self.assertNumQueries(5):
for filediff in self.filediffs:
ancestors[filediff] = filediff.get_ancestors(minimal=True)
self._check_ancestors(ancestors, minimal=True)
def _check_ancestors(self, all_ancestors, minimal):
paths = {
(1, 'foo', 'PRE-CREATION', 'foo', 'e69de29'): ([], []),
(1, 'bar', '5716ca5', 'bar', '8e739cc'): ([], []),
(2, 'foo', 'e69de29', 'foo', '257cc56'): (
[],
[
(1, 'foo', 'PRE-CREATION', 'foo', 'e69de29'),
],
),
(2, 'bar', '8e739cc', 'bar', '0000000'): (
[],
[
(1, 'bar', '5716ca5', 'bar', '8e739cc'),
],
),
(2, 'baz', '7601807', 'baz', '280beb2'): ([], []),
(3, 'foo', '257cc56', 'qux', '03b37a0'): (
[],
[
(1, 'foo', 'PRE-CREATION', 'foo', 'e69de29'),
(2, 'foo', 'e69de29', 'foo', '257cc56'),
],
),
(3, 'bar', 'PRE-CREATION', 'bar', '5716ca5'): (
[
(1, 'bar', '5716ca5', 'bar', '8e739cc'),
(2, 'bar', '8e739cc', 'bar', '0000000'),
],
[],
),
(3, 'corge', 'e69de29', 'corge', 'f248ba3'): ([], []),
(4, 'bar', '5716ca5', 'quux', 'e69de29'): (
[
(1, 'bar', '5716ca5', 'bar', '8e739cc'),
(2, 'bar', '8e739cc', 'bar', '0000000'),
],
[
(3, 'bar', 'PRE-CREATION', 'bar', '5716ca5'),
],
),
}
by_details = self.get_filediffs_by_details()
for filediff, ancestors in all_ancestors.items():
rest_ids, minimal_ids = paths[(
filediff.commit_id,
filediff.source_file,
filediff.source_revision,
filediff.dest_file,
filediff.dest_detail,
)]<|fim▁hole|> else:
ids = chain(rest_ids, minimal_ids)
expected_ancestors = [
by_details[details] for details in ids
]
self.assertEqual(ancestors, expected_ancestors)<|fim▁end|> |
if minimal:
ids = minimal_ids |
<|file_name|>CoachMarkPixelInfo.java<|end_file_name|><|fim▁begin|>package com.myntra.coachmarks.builder;
import android.graphics.Rect;
import android.os.Parcelable;
import com.google.auto.value.AutoValue;
@AutoValue
public abstract class CoachMarkPixelInfo implements Parcelable {
public static CoachMarkPixelInfo.Builder create() {
return new AutoValue_CoachMarkPixelInfo.Builder()
.setImageWidthInPixels(0)
.setImageHeightInPixels(0)
.setMarginRectInPixels(new Rect(0, 0, 0, 0))
.setPopUpWidthInPixelsWithOffset(0)
.setPopUpHeightInPixelsWithOffset(0)
.setPopUpWidthInPixels(0)
.setPopUpHeightInPixels(0)
.setScreenWidthInPixels(0)
.setScreenHeightInPixels(0)
.setNotchDimenInPixels(0)
.setActionBarHeightPixels(0)
.setFooterHeightPixels(0)
.setMarginOffsetForNotchInPixels(0)
.setWidthHeightOffsetForCoachMarkPopUp(0);
}
public abstract int getImageWidthInPixels();
public abstract int getImageHeightInPixels();
public abstract Rect getMarginRectInPixels();
public abstract int getPopUpWidthInPixelsWithOffset();
public abstract int getPopUpHeightInPixelsWithOffset();
public abstract int getPopUpWidthInPixels();
public abstract int getPopUpHeightInPixels();
public abstract int getScreenWidthInPixels();
public abstract int getScreenHeightInPixels();
public abstract int getNotchDimenInPixels();
public abstract int getActionBarHeightPixels();
public abstract int getFooterHeightPixels();
public abstract int getMarginOffsetForNotchInPixels();
public abstract int getWidthHeightOffsetForCoachMarkPopUp();
@AutoValue.Builder
public static abstract class Builder {
public abstract Builder setImageWidthInPixels(int imageWidthInPixels);
public abstract Builder setImageHeightInPixels(int imageHeightInPixels);
public abstract Builder setMarginRectInPixels(Rect coachMarkMarginRectInPixels);
public abstract Builder setPopUpWidthInPixelsWithOffset(int coachMarkPopUpWidthInPixelsWithOffset);
public abstract Builder setPopUpHeightInPixelsWithOffset(int coachMarkPopUpHeightInPixelsWithOffset);
public abstract Builder setPopUpWidthInPixels(int coachMarkPopUpWidthInPixels);
public abstract Builder setPopUpHeightInPixels(int coachMarkPopUpHeightInPixels);
public abstract Builder setScreenWidthInPixels(int screenWidthInPixels);
public abstract Builder setScreenHeightInPixels(int screenHeightInPixels);
public abstract Builder setNotchDimenInPixels(int notchDimenInPixels);
public abstract Builder setActionBarHeightPixels(int actionBarHeightPixels);
public abstract Builder setFooterHeightPixels(int footerHeightPixels);
public abstract Builder setMarginOffsetForNotchInPixels(int marginOffsetForNotchInPixels);
public abstract Builder setWidthHeightOffsetForCoachMarkPopUp(int widthHeightOffsetForCoachMarkPopUp);
public abstract CoachMarkPixelInfo build();<|fim▁hole|><|fim▁end|> |
}
} |
<|file_name|>assembler.js<|end_file_name|><|fim▁begin|>/**
* External dependencies
*/<|fim▁hole|>import camelCase from 'lodash/string/camelCase';
import merge from 'lodash/object/merge';
/**
* Internal dependencies
*/
import i18n from 'lib/mixins/i18n';
import sortProducts from 'lib/products-values/sort';
function createPurchaseObject( purchase ) {
const object = {
id: Number( purchase.ID ),
active: Boolean( purchase.active ),
amount: Number( purchase.amount ),
attachedToPurchaseId: Number( purchase.attached_to_purchase_id ),
canDisableAutoRenew: Boolean( purchase.can_disable_auto_renew ),
currencyCode: purchase.currency_code,
currencySymbol: purchase.currency_symbol,
domain: purchase.domain,
error: null,
expiryDate: purchase.expiry_date,
expiryMoment: purchase.expiry_date ? i18n.moment( purchase.expiry_date ) : null,
expiryStatus: camelCase( purchase.expiry_status ),
hasPrivateRegistration: Boolean( purchase.has_private_registration ),
includedDomain: purchase.included_domain,
isCancelable: Boolean( purchase.is_cancelable ),
isDomainRegistration: Boolean( purchase.is_domain_registration ),
isRedeemable: Boolean( purchase.is_redeemable ),
isRefundable: Boolean( purchase.is_refundable ),
isRenewable: Boolean( purchase.is_renewable ),
meta: purchase.meta,
priceText: `${ purchase.currency_symbol }${ purchase.amount } ${ purchase.currency_code }`,
payment: {
name: purchase.payment_name,
type: purchase.payment_type,
countryCode: purchase.payment_country_code,
countryName: purchase.payment_country_name
},
productId: Number( purchase.product_id ),
productName: purchase.product_name,
productSlug: purchase.product_slug,
refundPeriodInDays: purchase.refund_period_in_days,
renewDate: purchase.renew_date,
// only generate a moment if `renewDate` is present and positive
renewMoment: purchase.renew_date && purchase.renew_date > '0'
? i18n.moment( purchase.renew_date )
: null,
siteId: Number( purchase.blog_id ),
siteName: purchase.blogname,
subscribedDate: purchase.subscribed_date,
subscriptionStatus: purchase.subscription_status,
tagLine: purchase.tag_line,
userId: Number( purchase.user_id )
};
if ( 'credit_card' === purchase.payment_type ) {
return merge( {}, object, {
payment: {
creditCard: {
id: Number( purchase.payment_card_id ),
type: purchase.payment_card_type,
number: Number( purchase.payment_details ),
expiryDate: purchase.payment_expiry,
expiryMoment: purchase.payment_expiry ? i18n.moment( purchase.payment_expiry, 'MM/YY' ) : null
}
}
} );
}
return object;
}
function createPurchasesArray( dataTransferObject ) {
if ( ! Array.isArray( dataTransferObject ) ) {
return [];
}
return sortProducts( dataTransferObject.map( createPurchaseObject ) );
}
export default {
createPurchaseObject,
createPurchasesArray
};<|fim▁end|> | |
<|file_name|>no_method_suggested_traits.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use reexport::Reexported;
pub mod foo {
pub trait PubPub {
fn method(&self) {}<|fim▁hole|> }
impl PubPub for u32 {}
impl PubPub for i32 {}
}
pub mod bar {
trait PubPriv {
fn method(&self);
}
}
mod qux {
pub trait PrivPub {
fn method(&self);
}
}
mod quz {
trait PrivPriv {
fn method(&self);
}
}
mod reexport {
pub trait Reexported {
fn method(&self);
}
}<|fim▁end|> |
fn method3(&self) {} |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># Case Conductor is a Test Case Management system.
# Copyright (C) 2011 uTest Inc.
#
# This file is part of Case Conductor.
#
# Case Conductor is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Case Conductor is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Case Conductor. If not, see <http://www.gnu.org/licenses/>.
"""
Core objects for accessing staticData API.
"""
import urlparse
import remoteobjects
from ..core.conf import conf
from ..core.api import ObjectMixin, fields
class CodeValue(ObjectMixin, remoteobjects.RemoteObject):
id = fields.Field()
description = fields.CharField()
sortOrder = fields.Field()
def __unicode__(self):
return self.description
def __repr__(self):
return "<CodeValue: %s>" % self
class ArrayOfCodeValue(ObjectMixin, remoteobjects.ListObject):
api_base_url = urlparse.urljoin(conf.CC_API_BASE, "staticData/values/")
<|fim▁hole|> def update_from_dict(self, data):
"""
Unwrap the JSON data.
We expect to get data in a form like this:
{
"ns1.ArrayOfCodeValue":[
{
"@xsi.type":"ns1:ArrayOfCodeValue",
"ns1.CodeValue":[
{
"@xsi.type":"ns1:CodeValue",
"ns1.description":"Active",
"ns1.id":1,
"ns1.sortOrder":0
},
{
"@xsi.type":"ns1:CodeValue",
"ns1.description":"Disabled",
"ns1.id":3,
"ns1.sortOrder":0
},
{
"@xsi.type":"ns1:CodeValue",
"ns1.description":"Inactive",
"ns1.id":2,
"ns1.sortOrder":0
}
]
}
]
}
We pass on the inner list of data dictionaries.
"""
if "ns1.ArrayOfCodeValue" in data:
data = data["ns1.ArrayOfCodeValue"][0]["ns1.CodeValue"]
# Because this JSON is BadgerFish-translated XML
# (http://ajaxian.com/archives/badgerfish-translating-xml-to-json)
# length-1 lists are not sent as lists, so we re-listify.
if "@xsi.type" in data:
data = [data]
return super(ArrayOfCodeValue, self).update_from_dict(data)<|fim▁end|> | entries = fields.List(fields.Object(CodeValue))
|
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|>import sys
def dictContains(D, key):
if sys.version_info[0] == 2:
return D.has_key(key)
elif sys.version_info[0] == 3:
return key in D<|fim▁hole|> raise Exception("No support for self.__dictContains for python major " +
"version: {}".format(sys.version_info[0]))<|fim▁end|> |
else: |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.