file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
PhotoWidgetDropzone.tsx
import React, { useCallback } from 'react'; import { useDropzone } from 'react-dropzone'; import { Icon, Header } from 'semantic-ui-react'; interface IProps { setFiles: (files: object[]) => void; } const dropzoneStyles = { border: 'dashed 3px', borderColor: '#eee', borderRadius: '5px', paddingTop: '30px', textAlign: 'center' as 'center', height: '200px', }; const dropzoneActive = { borderColor: 'green', }; const PhotoWidgetDropzone: React.FC<IProps> = ({ setFiles }) => { const onDrop = useCallback( (acceptedFiles) => { setFiles( acceptedFiles.map((file: object) => Object.assign(file, { preview: URL.createObjectURL(file),
) ); }, [setFiles] ); const { getRootProps, getInputProps, isDragActive } = useDropzone({ onDrop }); return ( <div {...getRootProps()} style={ isDragActive ? { ...dropzoneStyles, ...dropzoneActive, } : dropzoneStyles } > <input {...getInputProps()} /> <Icon name="upload" size="huge" /> <Header content="Drop image here" /> </div> ); }; export default PhotoWidgetDropzone;
})
deploy.rs
//! MIT License //! Copyright (c) [year] [fullname] //! Permission is hereby granted, free of charge, to any person obtaining a copy //! of this software and associated documentation files (the "Software"), to deal //! in the Software without restriction, including without limitation the rights //! to use, copy, modify, merge, publish, distribute, sublicense, and/or sell //! copies of the Software, and to permit persons to whom the Software is //! furnished to do so, subject to the following conditions: //! The above copyright notice and this permission notice shall be included in all //! copies or substantial portions of the Software. //! THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR //! IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, //! FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE //! AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER //! LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, //! OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE //! SOFTWARE. //!
pub mod command; pub mod task; pub mod plan; extern crate serde; extern crate serde_json; use std::process::Command; use std::fs::File; use std::io::{Read}; use std::path::PathBuf; use std::cmp::{PartialOrd, Ord}; use serde::{Serialize, Deserialize}; pub use command::{ExecutableCommand, CommandSet}; pub use task::DeployTask; pub use plan::DeployPlan;
iron.rs
//! Exposes the `Iron` type, the main entrance point of the //! `Iron` library. use std::net::{ToSocketAddrs, SocketAddr}; use std::time::Duration; pub use hyper::server::Listening; use hyper::server::Server; use hyper::net::{Fresh, SslServer, HttpListener, HttpsListener, NetworkListener}; use request::HttpRequest; use response::HttpResponse; use error::HttpResult; use {Request, Handler}; use status; /// The primary entrance point to `Iron`, a `struct` to instantiate a new server. /// /// `Iron` contains the `Handler` which takes a `Request` and produces a /// `Response`. pub struct Iron<H> { /// Iron contains a `Handler`, which it uses to create responses for client /// requests. pub handler: H, /// Server timeouts. pub timeouts: Timeouts, /// The number of request handling threads. /// /// Defaults to `8 * num_cpus`. pub threads: usize, } /// A settings struct containing a set of timeouts which can be applied to a server. #[derive(Debug, PartialEq, Clone, Copy)] pub struct Timeouts { /// Controls the timeout for keep alive connections. /// /// The default is `Some(Duration::from_secs(5))`. /// /// NOTE: Setting this to None will have the effect of turning off keep alive. pub keep_alive: Option<Duration>, /// Controls the timeout for reads on existing connections. /// /// The default is `Some(Duration::from_secs(30))` pub read: Option<Duration>, /// Controls the timeout for writes on existing connections. /// /// The default is `Some(Duration::from_secs(1))` pub write: Option<Duration> } impl Default for Timeouts { fn default() -> Self { Timeouts { keep_alive: Some(Duration::from_secs(5)), read: Some(Duration::from_secs(30)), write: Some(Duration::from_secs(1)) } } } #[derive(Clone)] enum _Protocol { Http, Https, } /// Protocol used to serve content. #[derive(Clone)] pub struct Protocol(_Protocol); impl Protocol { /// Plaintext HTTP/1 pub fn http() -> Protocol { Protocol(_Protocol::Http) } /// HTTP/1 over SSL/TLS pub fn https() -> Protocol { Protocol(_Protocol::Https) } /// Returns the name used for this protocol in a URI's scheme part. pub fn name(&self) -> &str { match self.0 { _Protocol::Http => "http", _Protocol::Https => "https", } } } impl<H: Handler> Iron<H> { /// Instantiate a new instance of `Iron`. /// /// This will create a new `Iron`, the base unit of the server, using the /// passed in `Handler`. pub fn new(handler: H) -> Iron<H> { Iron { handler: handler, timeouts: Timeouts::default(), threads: 8 * ::num_cpus::get(), } } /// Kick off the server process using the HTTP protocol. /// /// Call this once to begin listening for requests on the server. /// This consumes the Iron instance, but does the listening on /// another task, so is not blocking. /// /// The thread returns a guard that will automatically join with the parent /// once it is dropped, blocking until this happens. pub fn http<A>(self, addr: A) -> HttpResult<Listening> where A: ToSocketAddrs { HttpListener::new(addr).and_then(|l| self.listen(l, Protocol::http())) } /// Kick off the server process using the HTTPS protocol. /// /// Call this once to begin listening for requests on the server. /// This consumes the Iron instance, but does the listening on /// another task, so is not blocking. /// /// The thread returns a guard that will automatically join with the parent /// once it is dropped, blocking until this happens. pub fn
<A, S>(self, addr: A, ssl: S) -> HttpResult<Listening> where A: ToSocketAddrs, S: 'static + SslServer + Send + Clone { HttpsListener::new(addr, ssl).and_then(|l| self.listen(l, Protocol::http())) } /// Kick off a server process on an arbitrary `Listener`. /// /// Most use cases may call `http` and `https` methods instead of this. pub fn listen<L>(self, mut listener: L, protocol: Protocol) -> HttpResult<Listening> where L: 'static + NetworkListener + Send { let handler = RawHandler { handler: self.handler, addr: try!(listener.local_addr()), protocol: protocol, }; let mut server = Server::new(listener); server.keep_alive(self.timeouts.keep_alive); server.set_read_timeout(self.timeouts.read); server.set_write_timeout(self.timeouts.write); server.handle_threads(handler, self.threads) } } struct RawHandler<H> { handler: H, addr: SocketAddr, protocol: Protocol, } impl<H: Handler> ::hyper::server::Handler for RawHandler<H> { fn handle(&self, http_req: HttpRequest, mut http_res: HttpResponse<Fresh>) { // Set some defaults in case request handler panics. // This should not be necessary anymore once stdlib's catch_panic becomes stable. *http_res.status_mut() = status::InternalServerError; // Create `Request` wrapper. match Request::from_http(http_req, self.addr, &self.protocol) { Ok(mut req) => { // Dispatch the request, write the response back to http_res self.handler.handle(&mut req).unwrap_or_else(|e| { error!("Error handling:\n{:?}\nError was: {:?}", req, e.error); e.response }).write_back(http_res) }, Err(e) => { error!("Error creating request:\n {}", e); bad_request(http_res) } } } } fn bad_request(mut http_res: HttpResponse<Fresh>) { *http_res.status_mut() = status::BadRequest; // Consume and flush the response. // We would like this to work, but can't do anything if it doesn't. if let Ok(res) = http_res.start() { let _ = res.end(); } }
https
compare_sortings_with_truth.py
import spikeextractors as si #import spikewidgets as sw import spiketoolkit as st import mlprocessors as mlpr import json from cairio import client as ca import numpy as np from copy import deepcopy def compare_sortings_with_truth(sortings,compute_resource,num_workers=None): print('>>>>>> compare sortings with truth') container='sha1://3b26155930cc4a4745c67b702ce297c9c968ac94/02-12-2019/mountaintools_basic.simg' jobs_gen_table=[] for sorting in sortings: units_true=sorting.get('units_true',[]) firings=sorting['firings'] firings_true=sorting['firings_true'] units_true=units_true job=GenSortingComparisonTable.createJob( firings=firings, firings_true=firings_true, units_true=units_true, json_out={'ext':'.json','upload':True}, html_out={'ext':'.html','upload':True}, _container=container ) jobs_gen_table.append(job) all_jobs=jobs_gen_table label='Compare sortings with truth' mlpr.executeBatch(jobs=all_jobs,label=label,num_workers=num_workers,compute_resource=compute_resource) sortings_out=[] for i,sorting in enumerate(sortings): comparison_with_truth=dict() comparison_with_truth['json']=jobs_gen_table[i]['result']['outputs']['json_out'] comparison_with_truth['html']=jobs_gen_table[i]['result']['outputs']['html_out'] sorting2=deepcopy(sorting) sorting2['comparison_with_truth']=comparison_with_truth sortings_out.append(sorting2) return sortings_out class GenSortingComparisonTable(mlpr.Processor): VERSION='0.2.0' firings=mlpr.Input('Firings file (sorting)') firings_true=mlpr.Input('True firings file') units_true=mlpr.IntegerListParameter('List of true units to consider') json_out=mlpr.Output('Table as .json file produced from pandas dataframe') html_out=mlpr.Output('Table as .html file produced from pandas dataframe') def run(self): sorting=si.MdaSortingExtractor(firings_file=self.firings) sorting_true=si.MdaSortingExtractor(firings_file=self.firings_true) if (self.units_true is not None) and (len(self.units_true)>0): sorting_true=si.SubSortingExtractor(parent_sorting=sorting_true,unit_ids=self.units_true) SC=st.comparison.SortingComparison(sorting_true,sorting) df=get_comparison_data_frame(comparison=SC) #sw.SortingComparisonTable(comparison=SC).getDataframe() json=df.transpose().to_dict() html=df.to_html(index=False) _write_json_file(json,self.json_out) _write_json_file(html,self.html_out) def
(*,comparison): import pandas as pd SC=comparison unit_properties=[] #snr, etc? these would need to be properties in the sortings of the comparison # Compute events counts sorting1=SC.getSorting1() sorting2=SC.getSorting2() unit1_ids = sorting1.getUnitIds() unit2_ids = sorting2.getUnitIds() N1 = len(unit1_ids) N2 = len(unit2_ids) event_counts1 = dict() for i1, u1 in enumerate(unit1_ids): times1 = sorting1.getUnitSpikeTrain(u1) event_counts1[u1] = len(times1) event_counts2 = dict() for i2, u2 in enumerate(unit2_ids): times2 = sorting2.getUnitSpikeTrain(u2) event_counts2[u2] = len(times2) rows = [] for u_1, unit1 in enumerate(unit1_ids): unit2 = SC.getBestUnitMatch1(unit1) if unit2>=0: num_matches=SC.getMatchingEventCount(unit1, unit2) num_false_negatives=event_counts1[unit1]-num_matches num_false_positives=event_counts2[unit2]-num_matches else: num_matches=0 num_false_negatives=event_counts1[unit1] num_false_positives=0 row0 = { 'unit_id': unit1, 'accuracy': _safe_frac(num_matches,num_false_positives+num_false_negatives+num_matches), 'best_unit': unit2, 'matched_unit': SC.getMappedSorting1().getMappedUnitIds(unit1), 'num_matches': num_matches, 'num_false_negatives': num_false_negatives, 'num_false_positives': num_false_positives, 'f_n': _safe_frac(num_false_negatives,num_false_negatives+num_matches), 'f_p': _safe_frac(num_false_positives,num_false_positives+num_matches) } for prop in unit_properties: pname = prop['name'] row0[pname] = SC.getSorting1().getUnitProperty(unit_id=int(unit1), property_name=pname) rows.append(row0) df = pd.DataFrame(rows) fields = ['unit_id'] fields = fields + ['accuracy', 'best_unit', 'matched_unit', 'num_matches', 'num_false_negatives', 'num_false_positives', 'f_n', 'f_p'] for prop in unit_properties: pname = prop['name'] fields.append(pname) df = df[fields] df['accuracy'] = df['accuracy'].map('{:,.4f}'.format) # df['Best match'] = df['Accuracy'].map('{:,.2f}'.format) df['f_n'] = df['f_n'].map('{:,.4f}'.format) df['f_p'] = df['f_p'].map('{:,.4f}'.format) return df def _safe_frac(numer, denom): if denom == 0: return 0 return float(numer) / denom def _write_json_file(obj,path): with open(path,'w') as f: return json.dump(obj,f)
get_comparison_data_frame
train_standard_vgg.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ''' This is the implement of standard training on GTSRB dataset. Copyright (c) Yiming Li, 2020 ''' from __future__ import print_function import argparse import os import shutil import time import random import torch import torch.nn as nn import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim as optim import torch.utils.data as data import torchvision.transforms as transforms import gtsrb_dataset as dataset from model import * from utils import Bar, Logger, AverageMeter, accuracy, mkdir_p, savefig parser = argparse.ArgumentParser(description='PyTorch GTSRB') # Datasets parser.add_argument('-j', '--workers', default=2, type=int, metavar='N', help='number of data loading workers (default: 2)') # Optimization options parser.add_argument('--epochs', default=30, type=int, metavar='N', help='number of total epochs to run') parser.add_argument('--start-epoch', default=0, type=int, metavar='N', help='manual epoch number (useful on restarts)') parser.add_argument('--train-batch', default=128, type=int, metavar='N', help='train batchsize') parser.add_argument('--test-batch', default=128, type=int, metavar='N', help='test batchsize') parser.add_argument('--lr', '--learning-rate', default=0.01, type=float, metavar='LR', help='initial learning rate') parser.add_argument('--drop', '--dropout', default=0, type=float, metavar='Dropout', help='Dropout ratio') parser.add_argument('--schedule', type=int, nargs='+', default=[20], help='Decrease learning rate at these epochs.') parser.add_argument('--gamma', type=float, default=0.1, help='LR is multiplied by gamma on schedule.') parser.add_argument('--momentum', default=0.9, type=float, metavar='M', help='momentum') parser.add_argument('--weight-decay', '--wd', default=5e-4, type=float, metavar='W', help='weight decay (default: 1e-4)') # Checkpoints parser.add_argument('-c', '--checkpoint', default='checkpoint/benign', type=str, metavar='PATH', help='path to save checkpoint (default: checkpoint)') parser.add_argument('--resume', default='', type=str, metavar='PATH', help='path to latest checkpoint (default: none)') # Miscs parser.add_argument('--manualSeed', type=int, default=1, help='manual seed') parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true', help='evaluate model on validation set') #Device options parser.add_argument('--gpu-id', default='0', type=str, help='id(s) for CUDA_VISIBLE_DEVICES') args = parser.parse_args() state = {k: v for k, v in args._get_kwargs()} # Use CUDA os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id use_cuda = torch.cuda.is_available() # Random seed if args.manualSeed is None: args.manualSeed = random.randint(1, 10000) random.seed(args.manualSeed) torch.manual_seed(args.manualSeed) if use_cuda: torch.cuda.manual_seed_all(args.manualSeed) best_acc = 0 # best test accuracy def main():
def train(args, model, trainloader, criterion, optimizer, epoch, use_cuda): # switch to train mode model.train() batch_time = AverageMeter() data_time = AverageMeter() losses = AverageMeter() top1 = AverageMeter() top5 = AverageMeter() end = time.time() bar = Bar('Processing', max=len(trainloader)) for batch_idx, (image, target) in enumerate(trainloader): # measure data loading time data_time.update(time.time() - end) if use_cuda: image, target = image.cuda(), target.cuda() # compute loss and do SGD step outputs = model(image) loss = criterion(outputs, target) optimizer.zero_grad() loss.backward() optimizer.step() # measure train accuracy and record loss prec1, prec5 = accuracy(outputs.data, target.data, topk=(1, 5)) losses.update(loss.item(), image.size(0)) top1.update(prec1.item(), image.size(0)) top5.update(prec5.item(), image.size(0)) # measure elapsed time batch_time.update(time.time() - end) end = time.time() # plot progress bar.suffix = '({batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.4f} | top1: {top1: .4f} | top5: {top5: .4f}'.format( batch=batch_idx + 1, size=len(trainloader), data=data_time.avg, bt=batch_time.avg, total=bar.elapsed_td, eta=bar.eta_td, loss=losses.avg, top1=top1.avg, top5=top5.avg, ) bar.next() bar.finish() return (losses.avg, top1.avg) def test(testloader, model, criterion, epoch, use_cuda): global best_acc batch_time = AverageMeter() data_time = AverageMeter() losses = AverageMeter() top1 = AverageMeter() top5 = AverageMeter() # switch to evaluate mode model.eval() end = time.time() bar = Bar('Processing', max=len(testloader)) for batch_idx, (inputs, targets) in enumerate(testloader): # measure data loading time data_time.update(time.time() - end) if use_cuda: inputs, targets = inputs.cuda(), targets.cuda() # compute output outputs = model(inputs) loss = criterion(outputs, targets) # measure accuracy and record standard loss prec1, prec5 = accuracy(outputs.data, targets.data, topk=(1, 5)) losses.update(loss.item(), inputs.size(0)) top1.update(prec1.item(), inputs.size(0)) top5.update(prec5.item(), inputs.size(0)) # measure elapsed time batch_time.update(time.time() - end) end = time.time() # plot progress bar.suffix = '({batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss: {loss:.4f} | top1: {top1: .4f} | top5: {top5: .4f}'.format( batch=batch_idx + 1, size=len(testloader), data=data_time.avg, bt=batch_time.avg, total=bar.elapsed_td, eta=bar.eta_td, loss=losses.avg, top1=top1.avg, top5=top5.avg, ) bar.next() bar.finish() return (losses.avg, top1.avg) def save_checkpoint(state, is_best, checkpoint='checkpoint', filename='checkpoint.pth.tar'): filepath = os.path.join(checkpoint, filename) torch.save(state, filepath) if is_best: shutil.copyfile(filepath, os.path.join(checkpoint, 'model_best.pth.tar')) def adjust_learning_rate(optimizer, epoch): global state if epoch in args.schedule: state['lr'] *= args.gamma for param_group in optimizer.param_groups: param_group['lr'] = state['lr'] if __name__ == '__main__': main()
global best_acc start_epoch = args.start_epoch # start from epoch 0 or last checkpoint epoch if not os.path.isdir(args.checkpoint): mkdir_p(args.checkpoint) # Dataset preprocessing title = 'GTSRB' print('==> Preparing GTSRB dataset') transform = transforms.Compose([ transforms.Resize((32, 32)), transforms.ToTensor() ]) # Create Datasets trainset = dataset.GTSRB( root_dir='./data', train=True, transform=transform) testset = dataset.GTSRB( root_dir='./data', train=False, transform=transform) # Load Datasets trainloader = torch.utils.data.DataLoader( trainset, batch_size=args.train_batch, shuffle=True, num_workers=args.workers) testloader = torch.utils.data.DataLoader( testset, batch_size=args.test_batch, shuffle=False, num_workers=args.workers) # Model model = vgg19_bn() model = torch.nn.DataParallel(model).cuda() cudnn.benchmark = True print('Total params: %.2fM' % (sum(p.numel() for p in model.parameters())/1000000.0)) criterion = nn.CrossEntropyLoss() optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=args.weight_decay) # Resume if args.resume: # Load checkpoint. print('==> Resuming from checkpoint..') assert os.path.isfile(args.resume), 'Error: no checkpoint directory found!' args.checkpoint = os.path.dirname(args.resume) checkpoint = torch.load(args.resume) best_acc = checkpoint['best_acc'] start_epoch = checkpoint['epoch'] model.load_state_dict(checkpoint['state_dict']) optimizer.load_state_dict(checkpoint['optimizer']) logger = Logger(os.path.join(args.checkpoint, 'log.txt'), title=title, resume=True) else: logger = Logger(os.path.join(args.checkpoint, 'log.txt'), title=title) logger.set_names(['Learning Rate', 'Train Loss', 'Valid Loss', 'Train Acc.', 'Valid Acc.']) if args.evaluate: print('\nEvaluation only') test_loss, test_acc = test(testloader, model, criterion, start_epoch, use_cuda) print(' Test Loss: %.8f, Test Acc: %.2f' % (test_loss, test_acc)) return # Train and val for epoch in range(start_epoch, args.epochs): adjust_learning_rate(optimizer, epoch) print('\nEpoch: [%d | %d] LR: %f' % (epoch + 1, args.epochs, state['lr'])) train_loss, train_acc = train(args, model, trainloader, criterion, optimizer, epoch, use_cuda) test_loss, test_acc = test(testloader, model, criterion, epoch, use_cuda) # append logger file logger.append([state['lr'], train_loss, test_loss, train_acc, test_acc]) # save model is_best = test_acc > best_acc best_acc = max(test_acc, best_acc) save_checkpoint({ 'epoch': epoch + 1, 'state_dict': model.state_dict(), 'acc': test_acc, 'best_acc': best_acc, 'optimizer' : optimizer.state_dict(), }, is_best, checkpoint=args.checkpoint) logger.close() logger.plot() savefig(os.path.join(args.checkpoint, 'log.eps')) print('Best acc:') print(best_acc)
index.js
var crypto = require('crypto') var bitcoin = require('bitcoinjs-lib') var HttpUtility = require('./http-utility') var payments = require('./payments') var paymentRequest = require('./payment-request') module.exports = { Client: Client, Xpub: Xpub, Xprv: Xprv, XpubPass: XpubPass, PaymentRequest: paymentRequest, payments: payments, // Deprecated exports Xpriv: Xprv } /** * Creates a Chain Wallets API client. * @constructor */ function Client(c) { // For compatibility, support both "key" and "token" if (!c.apiTokenId && !c.keyId) { c.apiTokenId = "GUEST-TOKEN" } this.auth = { user: c.apiTokenId || c.keyId, pass: c.secretApiToken || c.keySecret } if (!c.url) { c.url = 'https://w.chain.com' } if (!c.apiVersion) { c.apiVersion = 'v3' } if (!c.timeout) { c.timeout = 10000; // 10 seconds } var baseurl = c.url + '/' + c.apiVersion this.api = new HttpUtility({ url: baseurl, auth: this.auth, timeout: c.timeout }) this.certChain = c.certChain this.certKey = c.certKey this.keyStore = new KeyStore() } Client.prototype.getWallet = function (id, cb) { this.api.get('/wallets/' + id, cb) } Client.prototype.getWalletBalance = function (id, cb) { this.api.get('/wallets/' + id + '/balance', cb) } Client.prototype.getWallets = function (cb) { this.api.get('/wallets', cb) } Client.prototype.getWalletActivity = function (id, cb) { this.api.get('/wallets/' + id + '/activity', cb) } Client.prototype.rotateWalletKey = function (walletID, args, cb) { this.api.post('/wallets/' + walletID + '/rotate', args, cb) } Client.prototype.getBucketActivity = function (id, cb) { this.api.get('/buckets/' + id + '/activity', cb) } Client.prototype.getBucketBalance = function (id, cb) { this.api.get('/buckets/' + id + '/balance', cb) } Client.prototype.getBuckets = function (walletID, cb) { this.api.get('/wallets/' + walletID + '/buckets', cb) } Client.prototype.createBucket = function (walletID, cb) { this.api.post('/wallets/' + walletID + '/buckets', {}, cb) } Client.prototype.getReceiver = function (id, cb) { this.api.get('/receivers/' + id, cb) } // createReceiver(bucketID, [args, ] callback) Client.prototype.createReceiver = function (bucketID, args, cb) { if (typeof args === 'function') { cb = args args = {} } var self = this self.api.post('/buckets/' + bucketID + '/receivers', args, function (err, resp) { if (err) { return cb(err) } self.buildReceiver(resp, cb) }) } Client.prototype.getReceiver = function (id, cb) { var self = this self.api.get('/receivers/' + id, function (err, resp) { if (err) { return cb(err) } self.buildReceiver(resp, cb) }) } Client.prototype.buildReceiver = function (resp, cb) { var rec try { rec = new Receiver(resp, { certChain: this.certChain, certKey: this.certKey }) this.verifyReceiver(rec) } catch (err) { return cb(err) } cb(null, rec) } Client.prototype.verifyReceiver = function (rec) { // Validate pubkey this.verifySigners( rec.receiver_address, rec.receiver_address_components.signers, rec.blockChain ) // Verify output var pd = payments.PaymentDetails.decode( new Buffer(rec.payment_details_message, "hex")) if (pd.outputs.length !== 1) { throw new Error("payment details had too many outputs") } var addr = bitcoin.Address.fromBase58Check(rec.receiver_address) if (!addr.toOutputScript().toBuffer().equals(pd.outputs[0].script.toBuffer())) { throw new Error("payment details output did not match address") } } Client.prototype.verifySigners = function (addr, signers, blockChain) { var numClientKeys = 0, self = this signers.forEach(function (s) { if (s.entity === "client") { numClientKeys++ var key = self.keyStore.get(s.xpub_hash) if (!key) { throw new Error("Could not find key matching xpub hash " + s.xpub_hash) } if (!key.canReceive) { throw new Error("key " + key.hash + " is not valid for receiving") } verifyPubkey(key.xpub, s.derivation_path, s.pubkey) } }) if (numClientKeys < signers.length / 2) { throw new Error("do not have majority of keys for address: " + addr) } var pubkeys = signers.map(function (s) { return bitcoin.ECPubKey.fromHex(s.pubkey) }) verifyP2SHAddr(addr, 2, pubkeys, blockChain) } function verifyPubkey(xpub, derivePath, pubkey) { derivePath.forEach(function (x) { xpub = xpub.derive(x) }) if (xpub.pubKey.toHex() !== pubkey) { throw new Error("client pubkey did not match local xpub") } } function verifyP2SHAddr(addr, sigsRequired, pubkeys, blockChain) { var redeem = bitcoin.scripts.multisigOutput(sigsRequired, pubkeys) var addrCheck = new bitcoin.Address(redeem.getHash(), blockChain.scriptHash) if (addrCheck.toBase58Check() !== addr) { throw new Error("address did not match p2sh script") } } function Receiver(data, conf) { for (var k in data) { if (data.hasOwnProperty(k)) { this[k] = data[k] } } this.conf = conf this.blockChain = getBlockChain(data.block_chain) } Receiver.prototype.address = function () { return this.receiver_address } Receiver.prototype.paymentRequest = function () { var details = new Buffer(this.payment_details_message, "hex") var signer = crypto.Sign('RSA-SHA256') var pr = new payments.PaymentRequest({ serialized_payment_details: details, pki_type: 'x509+sha256', pki_data: (new payments.X509Certificates(this.conf.certChain)).encode(), signature: '' }) signer.update(pr.toBuffer()) pr.signature = signer.sign(this.conf.certKey) return pr.toBuffer() } /** * Performs the following steps: * - Generate a transaction template using the Chain Wallets API. Equivalent to * Client#buildTransactionTemplate() * - Sign the transaction template using the local key store. Equivalent to * Client#signTransactionTemplate() * - Finalize the transaction using the Chain Wallets API. Equivalent to * Client#finalizeTransactionTemplate() * @param {Object} buildParams - Specifies the inputs and outputs for the * transaction. See Client#buildTransactionTemplate() for a complete * description. * @param {function} cb - Returns a summary of the transaction submitted to the * Bitcoin network, or an error. */ Client.prototype.transact = function (buildParams, cb) { var self = this self.buildTransactionTemplate({ inputs: buildParams.inputs, outputs: buildParams.outputs, metadata: buildParams.metadata }, function (err, buildResp) { if (err) { return cb(err) } try { self.verifyBuildResponse(buildParams, buildResp) self.signTransactionTemplate(buildResp.template) } catch (err) { return cb(err, null) } self.finalizeTransactionTemplate(buildResp.template, cb) }) } /** * WARNING: this method is currently being short * circuited as we are making changes to w.chain.com. * Verifies an API response from /wallets/transact/build against the specified * inputs/outputs and the local key store. If the response is not valid, an * error will be thrown. * @param {Object} buildParams - The parameters passed to * Client#buildTransactionTemplate() * @param {Object} buildResp - The response from * Client#buildTransactionTemplate() */ Client.prototype.verifyBuildResponse = function (buildParams, buildResp) { return } /** * Generates a transaction template using the Chain Wallets API * @param {Object} buildParams - Specifies the inputs and outputs for the * transaction. * @param {Object[]} buildParams.inputs - A list of inputs and the amounts sent. * @param {string} buildParams.inputs[].bucket_id * @param {number} buildParams.inputs[].amount * @param {boolean} buildParams.inputs[].pays_fee - Whether the fee will be paid * from this bucket. A transaction can only have a single fee payer. * @param {Object[]} buildParams.outputs - A list of outputs (either a bucket or * and address) and the amounts received. * @param {string} buildParams.outputs[].bucket_id * @param {string} buildParams.outputs[].address * @param {number} buildParams.outputs[].amount * @param {boolean} buildParams.outputs[].pays_fee - Whether the fee will be * paid from this bucket. A transaction can only have a single fee payer. * @param {function} cb - Returns a transaction template and verification * metadata, or an error. */ Client.prototype.buildTransactionTemplate = function (buildParams, cb) { this.api.post('/wallets/transact/build', buildParams, cb) } /** * Signs a transaction template using the local key store. * @param {Object} template - A transaction template, returned by * Client#buildTransactionTemplate() */ Client.prototype.signTransactionTemplate = function (template) { var tx = bitcoin.Transaction.fromHex(template.unsigned_hex) for (var i = 0; i < template.inputs.length; i++) { var inp = template.inputs[i] var sigs = inp.signatures for (var c = 0; c < sigs.length; c++) { var s = sigs[c], key = this.keyStore.get(s.xpub_hash) // sigs contains entries for multiple signers, // so we need to ignore any that we don't recognize // or are unable to sign if (!(key && key.canSign)) { continue } var xprv = key.xprv(s.xprv_encrypted), path = s.derivation_path for (var j = 0; j < path.length; j++) { xprv = xprv.derive(path[j]) } var redeem = bitcoin.Script.fromHex(inp.redeem_script) var txSigHash = tx.hashForSignature(i, redeem, bitcoin.Transaction.SIGHASH_ALL) var sig = xprv.privKey.sign(txSigHash).toScriptSignature(bitcoin.Transaction.SIGHASH_ALL) s.signature = sig.toString('hex') } } return null } /** * Finalizes a transaction using the Chain Wallets API. * @param {Object} template - A transaction template, returned by * Client#buildTransactionTemplate and signed by * Client#signTransactionTemplate() */ Client.prototype.finalizeTransactionTemplate = function (signedTemplate, cb) { this.api.post('/wallets/transact/finalize', signedTemplate, cb) } /** * Open Assets */ Client.prototype.createAsset = function (id, opts, cb) { this.api.post('/wallets/' + id + "/assets", opts, cb) } Client.prototype.issueAsset = function (id, outputs, cb) { var self = this this.buildAssetIssuance(id, outputs, function (err, resp) { if (err) { cb(err, null) return } try { self.signTransactionTemplate(resp.template) } catch (err) { cb(err, null) return } self.finalizeTransactionTemplate(resp.template, cb) }) } Client.prototype.buildAssetIssuance = function (id, outputs, cb) { this.api.post("/assets/" + id + "/issue", outputs, cb) } Client.prototype.transferAsset = function (inputs, outputs, cb) { var self = this this.buildAssetTransfer(inputs, outputs, function (err, resp) { if (err) { cb(err, null) return } try { self.signTransactionTemplate(resp.template) } catch (err) { cb(err, null) return } self.finalizeTransactionTemplate(resp.template, cb) }) } Client.prototype.buildAssetTransfer = function (inputs, outputs, cb) { var body = { inputs: inputs, outputs: outputs } this.api.post("/assets/transfer", body, cb) } Client.prototype.getAsset = function (id, cb) { this.api.get("/assets/" + id, cb) } Client.prototype.getWalletAssets = function (id, cb) { this.api.get("/wallets/" + id + "/assets", cb) } Client.prototype.getWalletAssetBalance = function (id, cb) { this.api.get("/wallets/" + id + "/balance/assets", cb) } Client.prototype.getWalletAssetActivity = function () { var args = Array.prototype.slice.call(arguments) switch (args.length) { case 2: var id = args[0] var opts = {} var done = args[1] break case 3: var id = args[0] var opts = args[1] var done = args[2] break default: throw new Error('Invalid args: ' + args) } var headers = {} if (opts.nextPage) { headers['range-after'] = opts.nextPage } this.api.makeRequest({ method: 'GET', path: '/wallets/' + id + '/activity/assets', headers: headers }, function (err, body, resp) { if (err) { return done(err) } var body = { items: body, nextPage: resp.headers['next-range-after'] } done(null, body, resp) }) } Client.prototype.getBucketAssetBalance = function (id, cb) { this.api.get("/buckets/" + id + "/balance/assets", cb) } Client.prototype.getBucketAssetActivity = function () { var args = Array.prototype.slice.call(arguments) switch (args.length) { case 2: var id = args[0] var opts = {} var done = args[1] break case 3: var id = args[0] var opts = args[1] var done = args[2] break default: throw new Error('Invalid args: ' + args) } var headers = { 'range-after': opts.nextPage } this.api.makeRequest({ method: 'GET', path: '/buckets/' + id + '/activity/assets', headers: headers }, function (err, body, resp) { if (err) { return done(err) } var body = { items: body, nextPage: resp.headers['next-range-after'] } done(null, body, resp) }) } function getBlockChain(name) { switch (name) { case 'bitcoin': return bitcoin.networks.bitcoin case 'testnet3': return bitcoin.networks.testnet } throw new Error("bad blockchain: " + name) } // A keyStore is a set of key objects. // A key object may or may not be able to produce // an xprv, and it may or may not need to be given // the encrypted xprv to do so. function KeyStore() { this.data = {} } KeyStore.prototype.add = function (k) { this.data[k.hash] = k } // Note: this dictionary is keyed on the xpub hash. // This is the bitcoin-style hash160 function // (RIPEMD(SHA256(x))) of the base58 encoding // of the serialized xpub data. KeyStore.prototype.get = function (xpubHash) { return this.data[xpubHash] } // Xprv takes a base58-encoded xprv and a boolean
function Xprv(xprv, canRecv) { this.xprvObj = bitcoin.HDNode.fromBase58(xprv) this.xpub = this.xprvObj.neutered() this.hash = hashXpub(this.xpub.toBase58()) this.canReceive = canRecv this.canSign = true } Xprv.prototype.xprv = function () { return this.xprvObj } Xprv.prototype.xpriv = Xprv.prototype.xprv; // deprecated // Xpub takes a base58-encoded xpub and a boolean // flag indicating whether it can be used to receive funds. function Xpub(xpub, canRecv) { this.hash = hashXpub(xpub) this.xpub = bitcoin.HDNode.fromBase58(xpub) this.canReceive = canRecv this.canSign = false } Xpub.prototype.xprv = function () { throw new Error("xpub-only key cannot return xprv") } Xpub.prototype.xpriv = Xpub.prototype.xprv; // deprecated // XpubPass takes a base58-encoded xpub, // a passphrase used to decrypt the corresponding encrypted xprv, // and a boolean flag indicating whether it can be used to receive funds. function XpubPass(xpub, passphrase, canRecv) { this.hash = hashXpub(xpub) this.xpubBase58 = xpub this.xpub = bitcoin.HDNode.fromBase58(xpub) this.pass = passphrase this.canReceive = canRecv this.canSign = true } XpubPass.prototype.xprv = function (enc) { return decryptXprv(this.pass, this.xpubBase58, enc) } XpubPass.prototype.xpriv = XpubPass.prototype.xprv; // deprecated function decryptXprv(passphrase, xpub, encryptedXPrv) { var key = crypto.pbkdf2Sync(passphrase, xpub, 100000, 16, 'sha256') encryptedXPrv = new Buffer(encryptedXPrv, "hex") var iv = encryptedXPrv.slice(0, 12) var cipherText = encryptedXPrv.slice(12, encryptedXPrv.length - 16) var authTag = encryptedXPrv.slice(encryptedXPrv.length - 16) var decipher = crypto.createDecipheriv('aes-128-gcm', key, iv) decipher.setAuthTag(authTag) var xprv = decipher.update(cipherText) + decipher.final() return bitcoin.HDNode.fromBase58(xprv) } function hashXpub(data) { return new Buffer(bitcoin.crypto.hash160(data)).toString("hex") }
// flag indicating whether it can be used to receive funds.
optional_scalars_generated.rs
// automatically generated by the FlatBuffers compiler, do not modify #![allow(unused_imports, dead_code)] use std::mem; use std::cmp::Ordering; extern crate flatbuffers; use self::flatbuffers::EndianScalar; #[allow(unused_imports, dead_code)] pub mod optional_scalars { use std::mem; use std::cmp::Ordering; extern crate flatbuffers; use self::flatbuffers::EndianScalar; #[deprecated(since = "1.13", note = "Use associated constants instead. This will no longer be generated in 2021.")] pub const ENUM_MIN_OPTIONAL_BYTE: i8 = 0; #[deprecated(since = "1.13", note = "Use associated constants instead. This will no longer be generated in 2021.")] pub const ENUM_MAX_OPTIONAL_BYTE: i8 = 2; #[deprecated(since = "1.13", note = "Use associated constants instead. This will no longer be generated in 2021.")] #[allow(non_camel_case_types)] pub const ENUM_VALUES_OPTIONAL_BYTE: [OptionalByte; 3] = [ OptionalByte::None, OptionalByte::One, OptionalByte::Two, ]; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[repr(transparent)] pub struct OptionalByte(pub i8); #[allow(non_upper_case_globals)] impl OptionalByte { pub const None: Self = Self(0); pub const One: Self = Self(1); pub const Two: Self = Self(2); pub const ENUM_MIN: i8 = 0; pub const ENUM_MAX: i8 = 2; pub const ENUM_VALUES: &'static [Self] = &[ Self::None, Self::One, Self::Two, ]; /// Returns the variant's name or "" if unknown. pub fn variant_name(self) -> Option<&'static str> { match self { Self::None => Some("None"), Self::One => Some("One"), Self::Two => Some("Two"), _ => None, } } } impl std::fmt::Debug for OptionalByte { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { if let Some(name) = self.variant_name() { f.write_str(name) } else { f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0)) } } } impl<'a> flatbuffers::Follow<'a> for OptionalByte { type Inner = Self; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self(flatbuffers::read_scalar_at::<i8>(buf, loc)) } } impl flatbuffers::Push for OptionalByte { type Output = OptionalByte; #[inline] fn push(&self, dst: &mut [u8], _rest: &[u8]) { flatbuffers::emplace_scalar::<i8>(dst, self.0); } } impl flatbuffers::EndianScalar for OptionalByte { #[inline] fn to_little_endian(self) -> Self { Self(i8::to_le(self.0)) } #[inline] fn from_little_endian(self) -> Self { Self(i8::from_le(self.0)) } } pub enum ScalarStuffOffset {} #[derive(Copy, Clone, PartialEq)] pub struct ScalarStuff<'a> { pub _tab: flatbuffers::Table<'a>, } impl<'a> flatbuffers::Follow<'a> for ScalarStuff<'a> { type Inner = ScalarStuff<'a>; #[inline] fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { Self { _tab: flatbuffers::Table { buf, loc } } } } impl<'a> ScalarStuff<'a> { #[inline] pub fn init_from_table(table: flatbuffers::Table<'a>) -> Self { ScalarStuff { _tab: table, } } #[allow(unused_mut)] pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr>( _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr>, args: &'args ScalarStuffArgs) -> flatbuffers::WIPOffset<ScalarStuff<'bldr>> { let mut builder = ScalarStuffBuilder::new(_fbb); builder.add_default_f64(args.default_f64); if let Some(x) = args.maybe_f64 { builder.add_maybe_f64(x); } builder.add_just_f64(args.just_f64); builder.add_default_u64(args.default_u64); if let Some(x) = args.maybe_u64 { builder.add_maybe_u64(x); } builder.add_just_u64(args.just_u64); builder.add_default_i64(args.default_i64); if let Some(x) = args.maybe_i64 { builder.add_maybe_i64(x); } builder.add_just_i64(args.just_i64); builder.add_default_f32(args.default_f32); if let Some(x) = args.maybe_f32 { builder.add_maybe_f32(x); } builder.add_just_f32(args.just_f32); builder.add_default_u32(args.default_u32); if let Some(x) = args.maybe_u32 { builder.add_maybe_u32(x); } builder.add_just_u32(args.just_u32); builder.add_default_i32(args.default_i32); if let Some(x) = args.maybe_i32 { builder.add_maybe_i32(x); } builder.add_just_i32(args.just_i32); builder.add_default_u16(args.default_u16); if let Some(x) = args.maybe_u16 { builder.add_maybe_u16(x); } builder.add_just_u16(args.just_u16); builder.add_default_i16(args.default_i16); if let Some(x) = args.maybe_i16 { builder.add_maybe_i16(x); } builder.add_just_i16(args.just_i16); builder.add_default_enum(args.default_enum); if let Some(x) = args.maybe_enum { builder.add_maybe_enum(x); } builder.add_just_enum(args.just_enum); builder.add_default_bool(args.default_bool); if let Some(x) = args.maybe_bool { builder.add_maybe_bool(x); } builder.add_just_bool(args.just_bool); builder.add_default_u8(args.default_u8); if let Some(x) = args.maybe_u8 { builder.add_maybe_u8(x); } builder.add_just_u8(args.just_u8); builder.add_default_i8(args.default_i8); if let Some(x) = args.maybe_i8 { builder.add_maybe_i8(x); } builder.add_just_i8(args.just_i8); builder.finish() } pub const VT_JUST_I8: flatbuffers::VOffsetT = 4; pub const VT_MAYBE_I8: flatbuffers::VOffsetT = 6; pub const VT_DEFAULT_I8: flatbuffers::VOffsetT = 8; pub const VT_JUST_U8: flatbuffers::VOffsetT = 10; pub const VT_MAYBE_U8: flatbuffers::VOffsetT = 12; pub const VT_DEFAULT_U8: flatbuffers::VOffsetT = 14; pub const VT_JUST_I16: flatbuffers::VOffsetT = 16; pub const VT_MAYBE_I16: flatbuffers::VOffsetT = 18; pub const VT_DEFAULT_I16: flatbuffers::VOffsetT = 20; pub const VT_JUST_U16: flatbuffers::VOffsetT = 22; pub const VT_MAYBE_U16: flatbuffers::VOffsetT = 24; pub const VT_DEFAULT_U16: flatbuffers::VOffsetT = 26; pub const VT_JUST_I32: flatbuffers::VOffsetT = 28; pub const VT_MAYBE_I32: flatbuffers::VOffsetT = 30; pub const VT_DEFAULT_I32: flatbuffers::VOffsetT = 32; pub const VT_JUST_U32: flatbuffers::VOffsetT = 34; pub const VT_MAYBE_U32: flatbuffers::VOffsetT = 36; pub const VT_DEFAULT_U32: flatbuffers::VOffsetT = 38; pub const VT_JUST_I64: flatbuffers::VOffsetT = 40; pub const VT_MAYBE_I64: flatbuffers::VOffsetT = 42; pub const VT_DEFAULT_I64: flatbuffers::VOffsetT = 44; pub const VT_JUST_U64: flatbuffers::VOffsetT = 46; pub const VT_MAYBE_U64: flatbuffers::VOffsetT = 48; pub const VT_DEFAULT_U64: flatbuffers::VOffsetT = 50; pub const VT_JUST_F32: flatbuffers::VOffsetT = 52; pub const VT_MAYBE_F32: flatbuffers::VOffsetT = 54; pub const VT_DEFAULT_F32: flatbuffers::VOffsetT = 56; pub const VT_JUST_F64: flatbuffers::VOffsetT = 58; pub const VT_MAYBE_F64: flatbuffers::VOffsetT = 60; pub const VT_DEFAULT_F64: flatbuffers::VOffsetT = 62; pub const VT_JUST_BOOL: flatbuffers::VOffsetT = 64; pub const VT_MAYBE_BOOL: flatbuffers::VOffsetT = 66; pub const VT_DEFAULT_BOOL: flatbuffers::VOffsetT = 68; pub const VT_JUST_ENUM: flatbuffers::VOffsetT = 70; pub const VT_MAYBE_ENUM: flatbuffers::VOffsetT = 72; pub const VT_DEFAULT_ENUM: flatbuffers::VOffsetT = 74; #[inline] pub fn just_i8(&self) -> i8 { self._tab.get::<i8>(ScalarStuff::VT_JUST_I8, Some(0)).unwrap() } #[inline] pub fn maybe_i8(&self) -> Option<i8> { self._tab.get::<i8>(ScalarStuff::VT_MAYBE_I8, None) } #[inline] pub fn default_i8(&self) -> i8 { self._tab.get::<i8>(ScalarStuff::VT_DEFAULT_I8, Some(42)).unwrap() } #[inline] pub fn just_u8(&self) -> u8 { self._tab.get::<u8>(ScalarStuff::VT_JUST_U8, Some(0)).unwrap() } #[inline] pub fn maybe_u8(&self) -> Option<u8> { self._tab.get::<u8>(ScalarStuff::VT_MAYBE_U8, None) } #[inline] pub fn default_u8(&self) -> u8 { self._tab.get::<u8>(ScalarStuff::VT_DEFAULT_U8, Some(42)).unwrap() } #[inline] pub fn just_i16(&self) -> i16 { self._tab.get::<i16>(ScalarStuff::VT_JUST_I16, Some(0)).unwrap() } #[inline] pub fn maybe_i16(&self) -> Option<i16> { self._tab.get::<i16>(ScalarStuff::VT_MAYBE_I16, None) } #[inline] pub fn default_i16(&self) -> i16 { self._tab.get::<i16>(ScalarStuff::VT_DEFAULT_I16, Some(42)).unwrap() } #[inline] pub fn just_u16(&self) -> u16 { self._tab.get::<u16>(ScalarStuff::VT_JUST_U16, Some(0)).unwrap() } #[inline] pub fn maybe_u16(&self) -> Option<u16> { self._tab.get::<u16>(ScalarStuff::VT_MAYBE_U16, None) } #[inline] pub fn default_u16(&self) -> u16 { self._tab.get::<u16>(ScalarStuff::VT_DEFAULT_U16, Some(42)).unwrap() } #[inline] pub fn just_i32(&self) -> i32 { self._tab.get::<i32>(ScalarStuff::VT_JUST_I32, Some(0)).unwrap() } #[inline] pub fn maybe_i32(&self) -> Option<i32> { self._tab.get::<i32>(ScalarStuff::VT_MAYBE_I32, None) } #[inline] pub fn default_i32(&self) -> i32 { self._tab.get::<i32>(ScalarStuff::VT_DEFAULT_I32, Some(42)).unwrap() } #[inline] pub fn just_u32(&self) -> u32 { self._tab.get::<u32>(ScalarStuff::VT_JUST_U32, Some(0)).unwrap() } #[inline] pub fn maybe_u32(&self) -> Option<u32>
#[inline] pub fn default_u32(&self) -> u32 { self._tab.get::<u32>(ScalarStuff::VT_DEFAULT_U32, Some(42)).unwrap() } #[inline] pub fn just_i64(&self) -> i64 { self._tab.get::<i64>(ScalarStuff::VT_JUST_I64, Some(0)).unwrap() } #[inline] pub fn maybe_i64(&self) -> Option<i64> { self._tab.get::<i64>(ScalarStuff::VT_MAYBE_I64, None) } #[inline] pub fn default_i64(&self) -> i64 { self._tab.get::<i64>(ScalarStuff::VT_DEFAULT_I64, Some(42)).unwrap() } #[inline] pub fn just_u64(&self) -> u64 { self._tab.get::<u64>(ScalarStuff::VT_JUST_U64, Some(0)).unwrap() } #[inline] pub fn maybe_u64(&self) -> Option<u64> { self._tab.get::<u64>(ScalarStuff::VT_MAYBE_U64, None) } #[inline] pub fn default_u64(&self) -> u64 { self._tab.get::<u64>(ScalarStuff::VT_DEFAULT_U64, Some(42)).unwrap() } #[inline] pub fn just_f32(&self) -> f32 { self._tab.get::<f32>(ScalarStuff::VT_JUST_F32, Some(0.0)).unwrap() } #[inline] pub fn maybe_f32(&self) -> Option<f32> { self._tab.get::<f32>(ScalarStuff::VT_MAYBE_F32, None) } #[inline] pub fn default_f32(&self) -> f32 { self._tab.get::<f32>(ScalarStuff::VT_DEFAULT_F32, Some(42.0)).unwrap() } #[inline] pub fn just_f64(&self) -> f64 { self._tab.get::<f64>(ScalarStuff::VT_JUST_F64, Some(0.0)).unwrap() } #[inline] pub fn maybe_f64(&self) -> Option<f64> { self._tab.get::<f64>(ScalarStuff::VT_MAYBE_F64, None) } #[inline] pub fn default_f64(&self) -> f64 { self._tab.get::<f64>(ScalarStuff::VT_DEFAULT_F64, Some(42.0)).unwrap() } #[inline] pub fn just_bool(&self) -> bool { self._tab.get::<bool>(ScalarStuff::VT_JUST_BOOL, Some(false)).unwrap() } #[inline] pub fn maybe_bool(&self) -> Option<bool> { self._tab.get::<bool>(ScalarStuff::VT_MAYBE_BOOL, None) } #[inline] pub fn default_bool(&self) -> bool { self._tab.get::<bool>(ScalarStuff::VT_DEFAULT_BOOL, Some(true)).unwrap() } #[inline] pub fn just_enum(&self) -> OptionalByte { self._tab.get::<OptionalByte>(ScalarStuff::VT_JUST_ENUM, Some(OptionalByte::None)).unwrap() } #[inline] pub fn maybe_enum(&self) -> Option<OptionalByte> { self._tab.get::<OptionalByte>(ScalarStuff::VT_MAYBE_ENUM, None) } #[inline] pub fn default_enum(&self) -> OptionalByte { self._tab.get::<OptionalByte>(ScalarStuff::VT_DEFAULT_ENUM, Some(OptionalByte::One)).unwrap() } } pub struct ScalarStuffArgs { pub just_i8: i8, pub maybe_i8: Option<i8>, pub default_i8: i8, pub just_u8: u8, pub maybe_u8: Option<u8>, pub default_u8: u8, pub just_i16: i16, pub maybe_i16: Option<i16>, pub default_i16: i16, pub just_u16: u16, pub maybe_u16: Option<u16>, pub default_u16: u16, pub just_i32: i32, pub maybe_i32: Option<i32>, pub default_i32: i32, pub just_u32: u32, pub maybe_u32: Option<u32>, pub default_u32: u32, pub just_i64: i64, pub maybe_i64: Option<i64>, pub default_i64: i64, pub just_u64: u64, pub maybe_u64: Option<u64>, pub default_u64: u64, pub just_f32: f32, pub maybe_f32: Option<f32>, pub default_f32: f32, pub just_f64: f64, pub maybe_f64: Option<f64>, pub default_f64: f64, pub just_bool: bool, pub maybe_bool: Option<bool>, pub default_bool: bool, pub just_enum: OptionalByte, pub maybe_enum: Option<OptionalByte>, pub default_enum: OptionalByte, } impl<'a> Default for ScalarStuffArgs { #[inline] fn default() -> Self { ScalarStuffArgs { just_i8: 0, maybe_i8: None, default_i8: 42, just_u8: 0, maybe_u8: None, default_u8: 42, just_i16: 0, maybe_i16: None, default_i16: 42, just_u16: 0, maybe_u16: None, default_u16: 42, just_i32: 0, maybe_i32: None, default_i32: 42, just_u32: 0, maybe_u32: None, default_u32: 42, just_i64: 0, maybe_i64: None, default_i64: 42, just_u64: 0, maybe_u64: None, default_u64: 42, just_f32: 0.0, maybe_f32: None, default_f32: 42.0, just_f64: 0.0, maybe_f64: None, default_f64: 42.0, just_bool: false, maybe_bool: None, default_bool: true, just_enum: OptionalByte::None, maybe_enum: None, default_enum: OptionalByte::One, } } } pub struct ScalarStuffBuilder<'a: 'b, 'b> { fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a>, start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>, } impl<'a: 'b, 'b> ScalarStuffBuilder<'a, 'b> { #[inline] pub fn add_just_i8(&mut self, just_i8: i8) { self.fbb_.push_slot::<i8>(ScalarStuff::VT_JUST_I8, just_i8, 0); } #[inline] pub fn add_maybe_i8(&mut self, maybe_i8: i8) { self.fbb_.push_slot_always::<i8>(ScalarStuff::VT_MAYBE_I8, maybe_i8); } #[inline] pub fn add_default_i8(&mut self, default_i8: i8) { self.fbb_.push_slot::<i8>(ScalarStuff::VT_DEFAULT_I8, default_i8, 42); } #[inline] pub fn add_just_u8(&mut self, just_u8: u8) { self.fbb_.push_slot::<u8>(ScalarStuff::VT_JUST_U8, just_u8, 0); } #[inline] pub fn add_maybe_u8(&mut self, maybe_u8: u8) { self.fbb_.push_slot_always::<u8>(ScalarStuff::VT_MAYBE_U8, maybe_u8); } #[inline] pub fn add_default_u8(&mut self, default_u8: u8) { self.fbb_.push_slot::<u8>(ScalarStuff::VT_DEFAULT_U8, default_u8, 42); } #[inline] pub fn add_just_i16(&mut self, just_i16: i16) { self.fbb_.push_slot::<i16>(ScalarStuff::VT_JUST_I16, just_i16, 0); } #[inline] pub fn add_maybe_i16(&mut self, maybe_i16: i16) { self.fbb_.push_slot_always::<i16>(ScalarStuff::VT_MAYBE_I16, maybe_i16); } #[inline] pub fn add_default_i16(&mut self, default_i16: i16) { self.fbb_.push_slot::<i16>(ScalarStuff::VT_DEFAULT_I16, default_i16, 42); } #[inline] pub fn add_just_u16(&mut self, just_u16: u16) { self.fbb_.push_slot::<u16>(ScalarStuff::VT_JUST_U16, just_u16, 0); } #[inline] pub fn add_maybe_u16(&mut self, maybe_u16: u16) { self.fbb_.push_slot_always::<u16>(ScalarStuff::VT_MAYBE_U16, maybe_u16); } #[inline] pub fn add_default_u16(&mut self, default_u16: u16) { self.fbb_.push_slot::<u16>(ScalarStuff::VT_DEFAULT_U16, default_u16, 42); } #[inline] pub fn add_just_i32(&mut self, just_i32: i32) { self.fbb_.push_slot::<i32>(ScalarStuff::VT_JUST_I32, just_i32, 0); } #[inline] pub fn add_maybe_i32(&mut self, maybe_i32: i32) { self.fbb_.push_slot_always::<i32>(ScalarStuff::VT_MAYBE_I32, maybe_i32); } #[inline] pub fn add_default_i32(&mut self, default_i32: i32) { self.fbb_.push_slot::<i32>(ScalarStuff::VT_DEFAULT_I32, default_i32, 42); } #[inline] pub fn add_just_u32(&mut self, just_u32: u32) { self.fbb_.push_slot::<u32>(ScalarStuff::VT_JUST_U32, just_u32, 0); } #[inline] pub fn add_maybe_u32(&mut self, maybe_u32: u32) { self.fbb_.push_slot_always::<u32>(ScalarStuff::VT_MAYBE_U32, maybe_u32); } #[inline] pub fn add_default_u32(&mut self, default_u32: u32) { self.fbb_.push_slot::<u32>(ScalarStuff::VT_DEFAULT_U32, default_u32, 42); } #[inline] pub fn add_just_i64(&mut self, just_i64: i64) { self.fbb_.push_slot::<i64>(ScalarStuff::VT_JUST_I64, just_i64, 0); } #[inline] pub fn add_maybe_i64(&mut self, maybe_i64: i64) { self.fbb_.push_slot_always::<i64>(ScalarStuff::VT_MAYBE_I64, maybe_i64); } #[inline] pub fn add_default_i64(&mut self, default_i64: i64) { self.fbb_.push_slot::<i64>(ScalarStuff::VT_DEFAULT_I64, default_i64, 42); } #[inline] pub fn add_just_u64(&mut self, just_u64: u64) { self.fbb_.push_slot::<u64>(ScalarStuff::VT_JUST_U64, just_u64, 0); } #[inline] pub fn add_maybe_u64(&mut self, maybe_u64: u64) { self.fbb_.push_slot_always::<u64>(ScalarStuff::VT_MAYBE_U64, maybe_u64); } #[inline] pub fn add_default_u64(&mut self, default_u64: u64) { self.fbb_.push_slot::<u64>(ScalarStuff::VT_DEFAULT_U64, default_u64, 42); } #[inline] pub fn add_just_f32(&mut self, just_f32: f32) { self.fbb_.push_slot::<f32>(ScalarStuff::VT_JUST_F32, just_f32, 0.0); } #[inline] pub fn add_maybe_f32(&mut self, maybe_f32: f32) { self.fbb_.push_slot_always::<f32>(ScalarStuff::VT_MAYBE_F32, maybe_f32); } #[inline] pub fn add_default_f32(&mut self, default_f32: f32) { self.fbb_.push_slot::<f32>(ScalarStuff::VT_DEFAULT_F32, default_f32, 42.0); } #[inline] pub fn add_just_f64(&mut self, just_f64: f64) { self.fbb_.push_slot::<f64>(ScalarStuff::VT_JUST_F64, just_f64, 0.0); } #[inline] pub fn add_maybe_f64(&mut self, maybe_f64: f64) { self.fbb_.push_slot_always::<f64>(ScalarStuff::VT_MAYBE_F64, maybe_f64); } #[inline] pub fn add_default_f64(&mut self, default_f64: f64) { self.fbb_.push_slot::<f64>(ScalarStuff::VT_DEFAULT_F64, default_f64, 42.0); } #[inline] pub fn add_just_bool(&mut self, just_bool: bool) { self.fbb_.push_slot::<bool>(ScalarStuff::VT_JUST_BOOL, just_bool, false); } #[inline] pub fn add_maybe_bool(&mut self, maybe_bool: bool) { self.fbb_.push_slot_always::<bool>(ScalarStuff::VT_MAYBE_BOOL, maybe_bool); } #[inline] pub fn add_default_bool(&mut self, default_bool: bool) { self.fbb_.push_slot::<bool>(ScalarStuff::VT_DEFAULT_BOOL, default_bool, true); } #[inline] pub fn add_just_enum(&mut self, just_enum: OptionalByte) { self.fbb_.push_slot::<OptionalByte>(ScalarStuff::VT_JUST_ENUM, just_enum, OptionalByte::None); } #[inline] pub fn add_maybe_enum(&mut self, maybe_enum: OptionalByte) { self.fbb_.push_slot_always::<OptionalByte>(ScalarStuff::VT_MAYBE_ENUM, maybe_enum); } #[inline] pub fn add_default_enum(&mut self, default_enum: OptionalByte) { self.fbb_.push_slot::<OptionalByte>(ScalarStuff::VT_DEFAULT_ENUM, default_enum, OptionalByte::One); } #[inline] pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>) -> ScalarStuffBuilder<'a, 'b> { let start = _fbb.start_table(); ScalarStuffBuilder { fbb_: _fbb, start_: start, } } #[inline] pub fn finish(self) -> flatbuffers::WIPOffset<ScalarStuff<'a>> { let o = self.fbb_.end_table(self.start_); flatbuffers::WIPOffset::new(o.value()) } } impl std::fmt::Debug for ScalarStuff<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut ds = f.debug_struct("ScalarStuff"); ds.field("just_i8", &self.just_i8()); ds.field("maybe_i8", &self.maybe_i8()); ds.field("default_i8", &self.default_i8()); ds.field("just_u8", &self.just_u8()); ds.field("maybe_u8", &self.maybe_u8()); ds.field("default_u8", &self.default_u8()); ds.field("just_i16", &self.just_i16()); ds.field("maybe_i16", &self.maybe_i16()); ds.field("default_i16", &self.default_i16()); ds.field("just_u16", &self.just_u16()); ds.field("maybe_u16", &self.maybe_u16()); ds.field("default_u16", &self.default_u16()); ds.field("just_i32", &self.just_i32()); ds.field("maybe_i32", &self.maybe_i32()); ds.field("default_i32", &self.default_i32()); ds.field("just_u32", &self.just_u32()); ds.field("maybe_u32", &self.maybe_u32()); ds.field("default_u32", &self.default_u32()); ds.field("just_i64", &self.just_i64()); ds.field("maybe_i64", &self.maybe_i64()); ds.field("default_i64", &self.default_i64()); ds.field("just_u64", &self.just_u64()); ds.field("maybe_u64", &self.maybe_u64()); ds.field("default_u64", &self.default_u64()); ds.field("just_f32", &self.just_f32()); ds.field("maybe_f32", &self.maybe_f32()); ds.field("default_f32", &self.default_f32()); ds.field("just_f64", &self.just_f64()); ds.field("maybe_f64", &self.maybe_f64()); ds.field("default_f64", &self.default_f64()); ds.field("just_bool", &self.just_bool()); ds.field("maybe_bool", &self.maybe_bool()); ds.field("default_bool", &self.default_bool()); ds.field("just_enum", &self.just_enum()); ds.field("maybe_enum", &self.maybe_enum()); ds.field("default_enum", &self.default_enum()); ds.finish() } } #[inline] pub fn get_root_as_scalar_stuff<'a>(buf: &'a [u8]) -> ScalarStuff<'a> { flatbuffers::get_root::<ScalarStuff<'a>>(buf) } #[inline] pub fn get_size_prefixed_root_as_scalar_stuff<'a>(buf: &'a [u8]) -> ScalarStuff<'a> { flatbuffers::get_size_prefixed_root::<ScalarStuff<'a>>(buf) } pub const SCALAR_STUFF_IDENTIFIER: &str = "NULL"; #[inline] pub fn scalar_stuff_buffer_has_identifier(buf: &[u8]) -> bool { flatbuffers::buffer_has_identifier(buf, SCALAR_STUFF_IDENTIFIER, false) } #[inline] pub fn scalar_stuff_size_prefixed_buffer_has_identifier(buf: &[u8]) -> bool { flatbuffers::buffer_has_identifier(buf, SCALAR_STUFF_IDENTIFIER, true) } pub const SCALAR_STUFF_EXTENSION: &str = "mon"; #[inline] pub fn finish_scalar_stuff_buffer<'a, 'b>( fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<ScalarStuff<'a>>) { fbb.finish(root, Some(SCALAR_STUFF_IDENTIFIER)); } #[inline] pub fn finish_size_prefixed_scalar_stuff_buffer<'a, 'b>(fbb: &'b mut flatbuffers::FlatBufferBuilder<'a>, root: flatbuffers::WIPOffset<ScalarStuff<'a>>) { fbb.finish_size_prefixed(root, Some(SCALAR_STUFF_IDENTIFIER)); } } // pub mod optional_scalars
{ self._tab.get::<u32>(ScalarStuff::VT_MAYBE_U32, None) }
encrypted_key.go
// Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package packet import ( "crypto/rsa" "encoding/binary" "io" "math/big" "strconv" "github.com/bored-engineer/crypto/openpgp/elgamal" "github.com/bored-engineer/crypto/openpgp/errors" ) const encryptedKeyVersion = 3 // EncryptedKey represents a public-key encrypted session key. See RFC 4880, // section 5.1. type EncryptedKey struct { KeyId uint64 Algo PublicKeyAlgorithm CipherFunc CipherFunction // only valid after a successful Decrypt Key []byte // only valid after a successful Decrypt encryptedMPI1, encryptedMPI2 parsedMPI } func (e *EncryptedKey) parse(r io.Reader) (err error) { var buf [10]byte _, err = readFull(r, buf[:]) if err != nil { return } if buf[0] != encryptedKeyVersion { return errors.UnsupportedError("unknown EncryptedKey version " + strconv.Itoa(int(buf[0]))) } e.KeyId = binary.BigEndian.Uint64(buf[1:9]) e.Algo = PublicKeyAlgorithm(buf[9]) switch e.Algo { case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly: e.encryptedMPI1.bytes, e.encryptedMPI1.bitLength, err = readMPI(r) if err != nil { return } case PubKeyAlgoElGamal: e.encryptedMPI1.bytes, e.encryptedMPI1.bitLength, err = readMPI(r) if err != nil { return } e.encryptedMPI2.bytes, e.encryptedMPI2.bitLength, err = readMPI(r) if err != nil { return } } _, err = consumeAll(r) return } func checksumKeyMaterial(key []byte) uint16 { var checksum uint16 for _, v := range key { checksum += uint16(v) } return checksum } // Decrypt decrypts an encrypted session key with the given private key. The // private key must have been decrypted first. // If config is nil, sensible defaults will be used. func (e *EncryptedKey) Decrypt(priv *PrivateKey, config *Config) error { var err error var b []byte // TODO(agl): use session key decryption routines here to avoid // padding oracle attacks. switch priv.PubKeyAlgo { case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly: k := priv.PrivateKey.(*rsa.PrivateKey) b, err = rsa.DecryptPKCS1v15(config.Random(), k, padToKeySize(&k.PublicKey, e.encryptedMPI1.bytes)) case PubKeyAlgoElGamal: c1 := new(big.Int).SetBytes(e.encryptedMPI1.bytes) c2 := new(big.Int).SetBytes(e.encryptedMPI2.bytes) b, err = elgamal.Decrypt(priv.PrivateKey.(*elgamal.PrivateKey), c1, c2) default: err = errors.InvalidArgumentError("cannot decrypted encrypted session key with private key of type " + strconv.Itoa(int(priv.PubKeyAlgo))) } if err != nil { return err } e.CipherFunc = CipherFunction(b[0]) e.Key = b[1 : len(b)-2] expectedChecksum := uint16(b[len(b)-2])<<8 | uint16(b[len(b)-1]) checksum := checksumKeyMaterial(e.Key) if checksum != expectedChecksum { return errors.StructuralError("EncryptedKey checksum incorrect") } return nil } // Serialize writes the encrypted key packet, e, to w. func (e *EncryptedKey) Serialize(w io.Writer) error { var mpiLen int switch e.Algo { case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly: mpiLen = 2 + len(e.encryptedMPI1.bytes) case PubKeyAlgoElGamal: mpiLen = 2 + len(e.encryptedMPI1.bytes) + 2 + len(e.encryptedMPI2.bytes) default: return errors.InvalidArgumentError("don't know how to serialize encrypted key type " + strconv.Itoa(int(e.Algo))) } serializeHeader(w, packetTypeEncryptedKey, 1 /* version */ +8 /* key id */ +1 /* algo */ +mpiLen) w.Write([]byte{encryptedKeyVersion}) binary.Write(w, binary.BigEndian, e.KeyId) w.Write([]byte{byte(e.Algo)}) switch e.Algo { case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly: writeMPIs(w, e.encryptedMPI1) case PubKeyAlgoElGamal: writeMPIs(w, e.encryptedMPI1, e.encryptedMPI2) default: panic("internal error") } return nil } // SerializeEncryptedKey serializes an encrypted key packet to w that contains // key, encrypted to pub. // If config is nil, sensible defaults will be used. func SerializeEncryptedKey(w io.Writer, pub *PublicKey, cipherFunc CipherFunction, key []byte, config *Config) error
func serializeEncryptedKeyRSA(w io.Writer, rand io.Reader, header [10]byte, pub *rsa.PublicKey, keyBlock []byte) error { cipherText, err := rsa.EncryptPKCS1v15(rand, pub, keyBlock) if err != nil { return errors.InvalidArgumentError("RSA encryption failed: " + err.Error()) } packetLen := 10 /* header length */ + 2 /* mpi size */ + len(cipherText) err = serializeHeader(w, packetTypeEncryptedKey, packetLen) if err != nil { return err } _, err = w.Write(header[:]) if err != nil { return err } return writeMPI(w, 8*uint16(len(cipherText)), cipherText) } func serializeEncryptedKeyElGamal(w io.Writer, rand io.Reader, header [10]byte, pub *elgamal.PublicKey, keyBlock []byte) error { c1, c2, err := elgamal.Encrypt(rand, pub, keyBlock) if err != nil { return errors.InvalidArgumentError("ElGamal encryption failed: " + err.Error()) } packetLen := 10 /* header length */ packetLen += 2 /* mpi size */ + (c1.BitLen()+7)/8 packetLen += 2 /* mpi size */ + (c2.BitLen()+7)/8 err = serializeHeader(w, packetTypeEncryptedKey, packetLen) if err != nil { return err } _, err = w.Write(header[:]) if err != nil { return err } err = writeBig(w, c1) if err != nil { return err } return writeBig(w, c2) }
{ var buf [10]byte buf[0] = encryptedKeyVersion binary.BigEndian.PutUint64(buf[1:9], pub.KeyId) buf[9] = byte(pub.PubKeyAlgo) keyBlock := make([]byte, 1 /* cipher type */ +len(key)+2 /* checksum */) keyBlock[0] = byte(cipherFunc) copy(keyBlock[1:], key) checksum := checksumKeyMaterial(key) keyBlock[1+len(key)] = byte(checksum >> 8) keyBlock[1+len(key)+1] = byte(checksum) switch pub.PubKeyAlgo { case PubKeyAlgoRSA, PubKeyAlgoRSAEncryptOnly: return serializeEncryptedKeyRSA(w, config.Random(), buf, pub.PublicKey.(*rsa.PublicKey), keyBlock) case PubKeyAlgoElGamal: return serializeEncryptedKeyElGamal(w, config.Random(), buf, pub.PublicKey.(*elgamal.PublicKey), keyBlock) case PubKeyAlgoDSA, PubKeyAlgoRSASignOnly: return errors.InvalidArgumentError("cannot encrypt to public key of type " + strconv.Itoa(int(pub.PubKeyAlgo))) } return errors.UnsupportedError("encrypting a key to public key of type " + strconv.Itoa(int(pub.PubKeyAlgo))) }
mysql.rs
use super::SqlFlavour; use crate::{ connect, connection_wrapper::Connection, error::{quaint_error_to_connector_error, SystemDatabase}, SqlMigrationConnector, }; use datamodel::{walkers::walk_scalar_fields, Datamodel}; use enumflags2::BitFlags; use indoc::indoc; use migration_connector::{ConnectorError, ConnectorResult, MigrationDirectory, MigrationFeature}; use once_cell::sync::Lazy; use quaint::connector::MysqlUrl; use regex::{Regex, RegexSet}; use sql_schema_describer::{DescriberErrorKind, SqlSchema, SqlSchemaDescriberBackend}; use std::sync::atomic::{AtomicU8, Ordering}; use url::Url; const ADVISORY_LOCK_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10); static QUALIFIED_NAME_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"`[^ ]+`\.`[^ ]+`"#).unwrap()); pub(crate) struct MysqlFlavour { url: MysqlUrl, /// See the [Circumstances] enum. circumstances: AtomicU8, /// Relevant features enabled in the schema, features: BitFlags<MigrationFeature>, } impl std::fmt::Debug for MysqlFlavour { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("MysqlFlavour").field("url", &"<REDACTED>").finish() } } impl MysqlFlavour { pub(crate) fn new(url: MysqlUrl, features: BitFlags<MigrationFeature>) -> Self { MysqlFlavour { url, circumstances: Default::default(), features, } } pub(crate) fn is_mariadb(&self) -> bool { BitFlags::<Circumstances>::from_bits(self.circumstances.load(Ordering::Relaxed)) .unwrap_or_default() .contains(Circumstances::IsMariadb) } pub(crate) fn is_mysql_5_6(&self) -> bool { BitFlags::<Circumstances>::from_bits(self.circumstances.load(Ordering::Relaxed)) .unwrap_or_default() .contains(Circumstances::IsMysql56) } pub(crate) fn lower_cases_table_names(&self) -> bool { BitFlags::<Circumstances>::from_bits(self.circumstances.load(Ordering::Relaxed)) .unwrap_or_default() .contains(Circumstances::LowerCasesTableNames) } async fn shadow_database_connection( &self, main_connection: &Connection, connector: &SqlMigrationConnector, temporary_database_name: Option<&str>, ) -> ConnectorResult<Connection> { if let Some(shadow_database_connection_string) = &connector.shadow_database_connection_string { let conn = crate::connect(shadow_database_connection_string).await?; let shadow_conninfo = conn.connection_info(); let main_conninfo = main_connection.connection_info(); if shadow_conninfo.host() == main_conninfo.host() && shadow_conninfo.dbname() == main_conninfo.dbname() { return Err(ConnectorError::from_msg("The shadow database you configured appears to be the same as as the main database. Please specify another shadow database.".into())); } tracing::info!( "Connecting to user-provided shadow database at {}", shadow_database_connection_string ); if self.reset(&conn).await.is_err() { connector.best_effort_reset(&conn).await?; } return Ok(conn); } let database_name = temporary_database_name.unwrap(); let create_database = format!("CREATE DATABASE `{}`", database_name); main_connection .raw_cmd(&create_database) .await .map_err(ConnectorError::from) .map_err(|err| err.into_shadow_db_creation_error())?; let mut temporary_database_url = self.url.url().clone(); temporary_database_url.set_path(&format!("/{}", database_name)); let temporary_database_url = temporary_database_url.to_string(); tracing::debug!("Connecting to temporary database at {:?}", temporary_database_url); Ok(crate::connect(&temporary_database_url).await?) } } #[async_trait::async_trait] impl SqlFlavour for MysqlFlavour { async fn acquire_lock(&self, connection: &Connection) -> ConnectorResult<()> { // https://dev.mysql.com/doc/refman/8.0/en/locking-functions.html let query = format!("SELECT GET_LOCK('prisma_migrate', {})", ADVISORY_LOCK_TIMEOUT.as_secs()); Ok(connection.raw_cmd(&query).await?) } fn check_database_version_compatibility( &self, datamodel: &Datamodel, ) -> Option<user_facing_errors::common::DatabaseVersionIncompatibility> { if self.is_mysql_5_6() { let mut errors = Vec::new(); check_datamodel_for_mysql_5_6(datamodel, &mut errors); if errors.is_empty() { return None; } let mut errors_string = String::with_capacity(errors.iter().map(|err| err.len() + 3).sum()); for error in &errors { errors_string.push_str("- "); errors_string.push_str(error); errors_string.push('\n'); } Some(user_facing_errors::common::DatabaseVersionIncompatibility { errors: errors_string, database_version: "MySQL 5.6".into(), }) } else { None } } async fn create_database(&self, database_str: &str) -> ConnectorResult<String> { let mut url = Url::parse(database_str).map_err(|err| ConnectorError::url_parse_error(err, database_str))?; url.set_path("/mysql"); let conn = connect(&url.to_string()).await?; let db_name = self.url.dbname(); let query = format!( "CREATE DATABASE `{}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;", db_name ); conn.raw_cmd(&query).await?; Ok(db_name.to_owned()) } async fn create_migrations_table(&self, connection: &Connection) -> ConnectorResult<()> { let sql = indoc! {r#" CREATE TABLE _prisma_migrations ( id VARCHAR(36) PRIMARY KEY NOT NULL, checksum VARCHAR(64) NOT NULL, finished_at DATETIME(3), migration_name VARCHAR(255) NOT NULL, logs TEXT, rolled_back_at DATETIME(3), started_at DATETIME(3) NOT NULL DEFAULT CURRENT_TIMESTAMP(3), applied_steps_count INTEGER UNSIGNED NOT NULL DEFAULT 0 ) DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; "#}; Ok(connection.raw_cmd(sql).await?) } async fn describe_schema<'a>(&'a self, connection: &Connection) -> ConnectorResult<SqlSchema> { sql_schema_describer::mysql::SqlSchemaDescriber::new(connection.quaint().clone()) .describe(connection.connection_info().schema_name()) .await .map_err(|err| match err.into_kind() { DescriberErrorKind::QuaintError(err) => { quaint_error_to_connector_error(err, connection.connection_info()) } DescriberErrorKind::CrossSchemaReference { .. } => { unreachable!("No schemas in MySQL") } }) } async fn drop_database(&self, database_url: &str) -> ConnectorResult<()> { let connection = connect(database_url).await?; let db_name = connection.connection_info().dbname().unwrap(); connection.raw_cmd(&format!("DROP DATABASE `{}`", db_name)).await?; Ok(()) } async fn
(&self, connection: &Connection) -> ConnectorResult<()> { connection.raw_cmd("DROP TABLE _prisma_migrations").await?; Ok(()) } async fn ensure_connection_validity(&self, connection: &Connection) -> ConnectorResult<()> { static MYSQL_SYSTEM_DATABASES: Lazy<regex::RegexSet> = Lazy::new(|| { RegexSet::new(&[ "(?i)^mysql$", "(?i)^information_schema$", "(?i)^performance_schema$", "(?i)^sys$", ]) .unwrap() }); let db_name = connection.connection_info().schema_name(); if MYSQL_SYSTEM_DATABASES.is_match(db_name) { return Err(SystemDatabase(db_name.to_owned()).into()); } let version = connection.version().await?; let mut circumstances = BitFlags::<Circumstances>::default(); if let Some(version) = version { if version.starts_with("5.6") { circumstances |= Circumstances::IsMysql56; } if version.contains("MariaDB") { circumstances |= Circumstances::IsMariadb; } } let result_set = connection.query_raw("SELECT @@lower_case_table_names", &[]).await?; if let Some(1) = result_set.into_single().ok().and_then(|row| { row.at(0) .and_then(|row| row.to_string().and_then(|s| s.parse().ok()).or_else(|| row.as_i64())) }) { // https://dev.mysql.com/doc/refman/8.0/en/identifier-case-sensitivity.html circumstances |= Circumstances::LowerCasesTableNames; } self.circumstances.store(circumstances.bits(), Ordering::Relaxed); Ok(()) } async fn qe_setup(&self, database_str: &str) -> ConnectorResult<()> { let mut url = Url::parse(database_str).map_err(|err| ConnectorError::url_parse_error(err, database_str))?; url.set_path("/mysql"); let conn = connect(&url.to_string()).await?; let db_name = self.url.dbname(); let query = format!("DROP DATABASE IF EXISTS `{}`", db_name); conn.raw_cmd(&query).await?; let query = format!( "CREATE DATABASE `{}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;", db_name ); conn.raw_cmd(&query).await?; Ok(()) } async fn reset(&self, connection: &Connection) -> ConnectorResult<()> { let db_name = connection.connection_info().dbname().unwrap(); connection.raw_cmd(&format!("DROP DATABASE `{}`", db_name)).await?; connection.raw_cmd(&format!("CREATE DATABASE `{}`", db_name)).await?; connection.raw_cmd(&format!("USE `{}`", db_name)).await?; Ok(()) } fn scan_migration_script(&self, script: &str) { for capture in QUALIFIED_NAME_RE .captures_iter(script) .filter_map(|captures| captures.get(0)) { tracing::warn!( location = ?capture.range(), name = capture.as_str(), "Your migration appears to contain a qualified name. Qualified names like `mydb`.`mytable` interact badly with the shadow database on MySQL. Please change these to unqualified names (just `mytable` in the previous example)." ); } } #[tracing::instrument(skip(self, migrations, connection, connector))] async fn sql_schema_from_migration_history( &self, migrations: &[MigrationDirectory], connection: &Connection, connector: &SqlMigrationConnector, ) -> ConnectorResult<SqlSchema> { let temporary_database_name = connector.temporary_database_name(); let temp_database = self .shadow_database_connection(connection, connector, temporary_database_name.as_deref()) .await?; // We go through the whole process without early return, then clean up // the temporary database, and only then return the result. This avoids // leaving shadow databases behind in case of e.g. faulty migrations. let sql_schema_result = (|| async { for migration in migrations { let script = migration.read_migration_script()?; tracing::debug!( "Applying migration `{}` to temporary database.", migration.migration_name() ); self.scan_migration_script(&script); temp_database .raw_cmd(&script) .await .map_err(ConnectorError::from) .map_err(|connector_error| { connector_error.into_migration_does_not_apply_cleanly(migration.migration_name().to_owned()) })?; } self.describe_schema(&temp_database).await })() .await; if let Some(database_name) = temporary_database_name { let drop_database = format!("DROP DATABASE IF EXISTS `{}`", database_name); connection.raw_cmd(&drop_database).await?; } sql_schema_result } fn features(&self) -> BitFlags<MigrationFeature> { self.features } } #[derive(BitFlags, Debug, Clone, Copy, PartialEq)] #[repr(u8)] pub enum Circumstances { LowerCasesTableNames = 0b0001, IsMysql56 = 0b0010, IsMariadb = 0b0100, } fn check_datamodel_for_mysql_5_6(datamodel: &Datamodel, errors: &mut Vec<String>) { walk_scalar_fields(datamodel).for_each(|field| { if field.field_type().is_json() { errors.push(format!( "The `Json` data type used in {}.{} is not supported on MySQL 5.6.", field.model().name(), field.name() )) } }); } #[cfg(test)] mod tests { use super::*; #[test] fn debug_impl_does_not_leak_connection_info() { let url = "mysql://myname:mypassword@myserver:8765/mydbname"; let flavour = MysqlFlavour::new(MysqlUrl::new(url.parse().unwrap()).unwrap(), BitFlags::default()); let debugged = format!("{:?}", flavour); let words = &["myname", "mypassword", "myserver", "8765", "mydbname"]; for word in words { assert!(!debugged.contains(word)); } } #[test] fn qualified_name_re_matches_as_expected() { let should_match = r#"ALTER TABLE `mydb`.`cat` DROP PRIMARY KEY"#; let should_not_match = r#"ALTER TABLE `cat` ADD FOREIGN KEY (`ab`, cd`) REFERENCES `dog`(`id`)"#; assert!( QUALIFIED_NAME_RE.is_match_at(should_match, 12), "captures: {:?}", QUALIFIED_NAME_RE.captures(should_match) ); assert!(!QUALIFIED_NAME_RE.is_match(should_not_match)); } }
drop_migrations_table
policy_nets_test.py
# """ Tests policy nets. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import tensorflow as tf from texar.modules.policies.policy_nets import CategoricalPolicyNet class CategoricalPolicyNetTest(tf.test.TestCase): """Tests :class:`texar.modules.CategoricalPolicyNet`. """ def
(self): """Tests logics. """ policy = CategoricalPolicyNet() inputs = tf.random_uniform(shape=[64, 4]) outputs = policy(inputs=inputs) self.assertEqual(outputs['action'].shape, outputs['log_prob'].shape) self.assertIsInstance( outputs['distribution'], tf.distributions.Categorical) if __name__ == "__main__": tf.test.main()
test_categorical_policy
test_views.py
# -*- coding: utf-8 -*- import json import re import time from datetime import datetime, timedelta from itertools import cycle from os import path from django import test from django.conf import settings from django.core import mail from django.core.urlresolvers import reverse from django.test.client import RequestFactory from django.test.utils import override_settings from django.utils import translation import mock import requests import waffle from cache_nuggets.lib import Token from jingo.helpers import urlparams from nose import SkipTest from nose.tools import eq_, ok_ from post_request_task import task as post_request_task from pyquery import PyQuery as pq from requests.structures import CaseInsensitiveDict import mkt import mkt.ratings import mkt.site.tests from lib.crypto import packaged from lib.crypto.tests import mock_sign from mkt.abuse.models import AbuseReport from mkt.api.tests.test_oauth import RestOAuth from mkt.comm.tests.test_views import CommTestMixin from mkt.comm.utils import create_comm_note from mkt.constants import MANIFEST_CONTENT_TYPE, comm from mkt.developers.models import ActivityLog, AppLog from mkt.files.models import File from mkt.ratings.models import Review, ReviewFlag from mkt.reviewers.models import (SHOWCASE_TAG, CannedResponse, EscalationQueue, RereviewQueue, ReviewerScore) from mkt.reviewers.utils import ReviewersQueuesHelper from mkt.reviewers.views import (_progress, app_review, queue_apps, route_reviewer) from mkt.site.fixtures import fixture from mkt.site.helpers import absolutify, isotime from mkt.site.storage_utils import private_storage, public_storage from mkt.site.tests import (check_links, days_ago, formset, initial, req_factory_factory, user_factory) from mkt.site.utils import app_factory, make_rated, paginate, version_factory from mkt.submit.tests.test_views import BasePackagedAppTest, SetupFilesMixin from mkt.tags.models import Tag from mkt.users.models import UserProfile from mkt.versions.models import Version from mkt.webapps.indexers import WebappIndexer from mkt.webapps.models import AddonDeviceType, Webapp from mkt.webapps.tasks import unindex_webapps from mkt.websites.utils import website_factory from mkt.zadmin.models import get_config, set_config TIME_FORMAT = '%Y-%m-%dT%H:%M:%SZ' TEST_PATH = path.dirname(path.abspath(__file__)) ATTACHMENTS_DIR = path.abspath(path.join(TEST_PATH, '..', '..', 'comm', 'tests', 'attachments')) class AttachmentManagementMixin(object): def _attachment_management_form(self, num=1): """ Generate and return data for a management form for `num` attachments """ return {'attachment-TOTAL_FORMS': max(1, num), 'attachment-INITIAL_FORMS': 0, 'attachment-MAX_NUM_FORMS': 1000} def _attachments(self, num): """Generate and return data for `num` attachments """ data = {} files = ['bacon.jpg', 'bacon.txt'] descriptions = ['mmm, bacon', ''] if num > 0: for n in xrange(num): i = 0 if n % 2 else 1 attachment = open(path.join(ATTACHMENTS_DIR, files[i]), 'r+') data.update({ 'attachment-%d-attachment' % n: attachment, 'attachment-%d-description' % n: descriptions[i] }) return data class TestedonManagementMixin(object): def _testedon_management_form(self, num=0): """ Generate and return data for a management form for `num` tested on platforms. """ return {'testedon-TOTAL_FORMS': max(1, num), 'testedon-INITIAL_FORMS': 0, 'testedon-MAX_NUM_FORMS': 1000} def _platforms(self, num, device_types=[u'\xd0esktop', u'FirefoxOS'], devices=[u'PC ', u'ZT\xc8 Open'], versions=[u'34', u'1.3<']): """Generate and return data for `num` tested on platforms """ data = {} if num > 0: for n in xrange(num): i = n % len(device_types) data.update({ 'testedon-%d-device_type' % n: device_types[i], 'testedon-%d-device' % n: devices[i], 'testedon-%d-version' % n: versions[i], }) return data class AppReviewerTest(mkt.site.tests.TestCase): def setUp(self): super(AppReviewerTest, self).setUp() self.reviewer_user = user_factory(email='editor') self.grant_permission(self.reviewer_user, 'Apps:Review') self.snr_reviewer_user = user_factory(email='snrreviewer') self.grant_permission(self.snr_reviewer_user, 'Apps:Review,Apps:Edit,' 'Apps:ReviewEscalated,Apps:ReviewPrivileged', name='Senior App Reviewers') self.admin_user = user_factory(email='admin') self.grant_permission(self.admin_user, '*:*') self.regular_user = user_factory(email='regular') self.contact_user = user_factory(email='contact') self.login_as_editor() def login_as_admin(self): self.login(self.admin_user) def login_as_editor(self): self.login(self.reviewer_user) def login_as_senior_reviewer(self): self.login(self.snr_reviewer_user) def check_actions(self, expected, elements): """Check the action buttons on the review page. `expected` is a list of tuples containing action name and action form value. `elements` is a PyQuery list of input elements. """ for idx, item in enumerate(expected): text, form_value = item e = elements.eq(idx) eq_(e.parent().text(), text) eq_(e.attr('name'), 'action') eq_(e.val(), form_value) def uses_es(self): return waffle.switch_is_active('reviewer-tools-elasticsearch') class AccessMixin(object): def test_403_for_non_editor(self, *args, **kwargs): self.login('[email protected]') eq_(self.client.head(self.url).status_code, 403) def test_302_for_anonymous(self, *args, **kwargs): self.client.logout() eq_(self.client.head(self.url).status_code, 302) class SearchMixin(object): def test_search_query(self): # Light test to make sure queues can handle search queries. res = self.client.get(self.url, {'text_query': 'test'}) eq_(res.status_code, 200) @mock.patch('mkt.webapps.models.Webapp.get_cached_manifest', mock.Mock) class TestReviewersHome(AppReviewerTest, AccessMixin): def setUp(self): super(TestReviewersHome, self).setUp() self.url = reverse('reviewers.home') self.apps = [app_factory(name='Antelope', status=mkt.STATUS_PENDING, file_kw={'status': mkt.STATUS_PENDING}), app_factory(name='Bear', status=mkt.STATUS_PENDING, file_kw={'status': mkt.STATUS_PENDING}), app_factory(name='Cougar', status=mkt.STATUS_PENDING, file_kw={'status': mkt.STATUS_PENDING})] self.packaged_app = app_factory(name='Dinosaur', status=mkt.STATUS_PUBLIC, is_packaged=True) version_factory(addon=self.packaged_app, file_kw={'status': mkt.STATUS_PENDING}) # Add a disabled app for good measure. app_factory(name='Elephant', disabled_by_user=True, status=mkt.STATUS_PENDING) # Escalate one app to make sure it doesn't affect stats. escalated = app_factory(name='Eyelash Pit Viper', status=mkt.STATUS_PENDING) EscalationQueue.objects.create(addon=escalated) # Add a public app under re-review. rereviewed = app_factory(name='Finch', status=mkt.STATUS_PUBLIC) rq = RereviewQueue.objects.create(addon=rereviewed) rq.update(created=self.days_ago(1)) # Add an app with latest update deleted. It shouldn't affect anything. app = app_factory(name='Great White Shark', status=mkt.STATUS_PUBLIC, version_kw={'version': '1.0'}, is_packaged=True) v = version_factory(addon=app, version='2.1', file_kw={'status': mkt.STATUS_PENDING}) v.update(deleted=True) def test_route_reviewer(self): # App reviewers go to apps home. req = mkt.site.tests.req_factory_factory( reverse('reviewers'), user=UserProfile.objects.get(email='[email protected]')) r = route_reviewer(req) self.assert3xx(r, reverse('reviewers.home')) def test_progress_pending(self): self.apps[0].latest_version.update(nomination=self.days_ago(1)) self.apps[1].latest_version.update(nomination=self.days_ago(8)) self.apps[2].latest_version.update(nomination=self.days_ago(15)) counts, percentages = _progress() eq_(counts['pending']['week'], 1) eq_(counts['pending']['new'], 1) eq_(counts['pending']['old'], 1) eq_(counts['pending']['med'], 1) self.assertAlmostEqual(percentages['pending']['new'], 33.333333333333) self.assertAlmostEqual(percentages['pending']['old'], 33.333333333333) self.assertAlmostEqual(percentages['pending']['med'], 33.333333333333) def test_progress_rereview(self): rq = RereviewQueue.objects.create(addon=self.apps[0]) rq.update(created=self.days_ago(8)) rq = RereviewQueue.objects.create(addon=self.apps[1]) rq.update(created=self.days_ago(15)) counts, percentages = _progress() eq_(counts['rereview']['week'], 1) eq_(counts['rereview']['new'], 1) eq_(counts['rereview']['old'], 1) eq_(counts['rereview']['med'], 1) self.assertAlmostEqual(percentages['rereview']['new'], 33.333333333333) self.assertAlmostEqual(percentages['rereview']['old'], 33.333333333333) self.assertAlmostEqual(percentages['rereview']['med'], 33.333333333333) def test_progress_updated(self): extra_app = app_factory(name='Jackalope', status=mkt.STATUS_PUBLIC, is_packaged=True, created=self.days_ago(35)) version_factory(addon=extra_app, file_kw={'status': mkt.STATUS_PENDING}, created=self.days_ago(25), nomination=self.days_ago(8)) extra_app = app_factory(name='Jackrabbit', status=mkt.STATUS_PUBLIC, is_packaged=True, created=self.days_ago(35)) version_factory(addon=extra_app, file_kw={'status': mkt.STATUS_PENDING}, created=self.days_ago(25), nomination=self.days_ago(25)) counts, percentages = _progress() eq_(counts['updates']['week'], 1) eq_(counts['updates']['new'], 1) eq_(counts['updates']['old'], 1) eq_(counts['updates']['med'], 1) self.assertAlmostEqual(percentages['updates']['new'], 33.333333333333) self.assertAlmostEqual(percentages['updates']['old'], 33.333333333333) self.assertAlmostEqual(percentages['updates']['med'], 33.333333333333) def test_stats_waiting(self): self.apps[0].latest_version.update(nomination=self.days_ago(1)) self.apps[1].latest_version.update(nomination=self.days_ago(5)) self.apps[2].latest_version.update(nomination=self.days_ago(15)) self.packaged_app.update(created=self.days_ago(1)) doc = pq(self.client.get(self.url).content) anchors = doc('.editor-stats-title a') eq_(anchors.eq(0).text(), '3 Pending App Reviews') eq_(anchors.eq(1).text(), '1 Re-review') eq_(anchors.eq(2).text(), '1 Update Review') divs = doc('.editor-stats-table > div') # Pending review. eq_(divs.eq(0).text(), '2 unreviewed app submissions this week.') # Re-reviews. eq_(divs.eq(2).text(), '1 unreviewed app submission this week.') # Update review. eq_(divs.eq(4).text(), '1 unreviewed app submission this week.') # Maths. # Pending review. eq_(doc('.waiting_new').eq(0).attr('title')[-3:], '33%') eq_(doc('.waiting_med').eq(0).attr('title')[-3:], '33%') eq_(doc('.waiting_old').eq(0).attr('title')[-3:], '33%') # Re-reviews. eq_(doc('.waiting_new').eq(1).attr('title')[-4:], '100%') eq_(doc('.waiting_med').eq(1).attr('title')[-3:], ' 0%') eq_(doc('.waiting_old').eq(1).attr('title')[-3:], ' 0%') # Update review. eq_(doc('.waiting_new').eq(2).attr('title')[-4:], '100%') eq_(doc('.waiting_med').eq(2).attr('title')[-3:], ' 0%') eq_(doc('.waiting_old').eq(2).attr('title')[-3:], ' 0%') def test_reviewer_leaders(self): reviewers = UserProfile.objects.all()[:2] # 1st user reviews 2, 2nd user only 1. users = cycle(reviewers) for app in self.apps: mkt.log(mkt.LOG.APPROVE_VERSION, app, app.latest_version, user=users.next(), details={'comments': 'hawt'}) doc = pq(self.client.get(self.url).content.decode('utf-8')) # Top Reviews. table = doc('#editors-stats .editor-stats-table').eq(0) eq_(table.find('td').eq(0).text(), reviewers[0].email) eq_(table.find('td').eq(1).text(), u'2') eq_(table.find('td').eq(2).text(), reviewers[1].email) eq_(table.find('td').eq(3).text(), u'1') # Top Reviews this month. table = doc('#editors-stats .editor-stats-table').eq(1) eq_(table.find('td').eq(0).text(), reviewers[0].email) eq_(table.find('td').eq(1).text(), u'2') eq_(table.find('td').eq(2).text(), reviewers[1].email) eq_(table.find('td').eq(3).text(), u'1') class FlagsMixin(object): def test_flag_packaged_app(self): self.apps[0].update(is_packaged=True) if self.uses_es(): self.reindex(Webapp) eq_(self.apps[0].is_packaged, True) res = self.client.get(self.url) eq_(res.status_code, 200) td = pq(res.content)('#addon-queue tbody tr td.flags').eq(0) flag = td('div.sprite-reviewer-packaged-app') eq_(flag.length, 1) def test_flag_premium_app(self): self.apps[0].update(premium_type=mkt.ADDON_PREMIUM) if self.uses_es(): self.reindex(Webapp) eq_(self.apps[0].is_premium(), True) res = self.client.get(self.url) eq_(res.status_code, 200) tds = pq(res.content)('#addon-queue tbody tr td.flags') flags = tds('div.sprite-reviewer-premium') eq_(flags.length, 1) def test_flag_free_inapp_app(self): self.apps[0].update(premium_type=mkt.ADDON_FREE_INAPP) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) tds = pq(res.content)('#addon-queue tbody tr td.flags') eq_(tds('div.sprite-reviewer-premium.inapp.free').length, 1) def test_flag_premium_inapp_app(self): self.apps[0].update(premium_type=mkt.ADDON_PREMIUM_INAPP) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) tds = pq(res.content)('#addon-queue tbody tr td.flags') eq_(tds('div.sprite-reviewer-premium.inapp').length, 1) def test_flag_info(self): self.apps[0].latest_version.update(has_info_request=True) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) eq_(res.status_code, 200) tds = pq(res.content)('#addon-queue tbody tr td.flags') flags = tds('div.sprite-reviewer-info') eq_(flags.length, 1) def test_flag_comment(self): self.apps[0].latest_version.update(has_editor_comment=True) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) eq_(res.status_code, 200) tds = pq(res.content)('#addon-queue tbody tr td.flags') flags = tds('div.sprite-reviewer-editor') eq_(flags.length, 1) class XSSMixin(object): def test_xss_in_queue(self): a = self.apps[0] a.name = '<script>alert("xss")</script>' a.save() if self.uses_es(): self.refresh(doctypes=('homescreen', 'webapp')) res = self.client.get(self.url) eq_(res.status_code, 200) tbody = pq(res.content)('#addon-queue tbody').html() assert '&lt;script&gt;' in tbody assert '<script>' not in tbody class TestAppQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin, XSSMixin): def setUp(self): super(TestAppQueue, self).setUp() yesterday = self.days_ago(1) long_ago = self.days_ago(2) self.apps = [app_factory(name='XXX', status=mkt.STATUS_PENDING, version_kw={'nomination': long_ago}, file_kw={'status': mkt.STATUS_PENDING}), app_factory(name='YYY', status=mkt.STATUS_PENDING, version_kw={'nomination': yesterday}, file_kw={'status': mkt.STATUS_PENDING}), app_factory(name='ZZZ')] self.apps[0].update(created=self.days_ago(12)) self.apps[1].update(created=self.days_ago(11)) # Quick sanity check. eq_(self.apps[0].latest_version.nomination, long_ago) eq_(self.apps[1].latest_version.nomination, yesterday) RereviewQueue.objects.create(addon=self.apps[2]) self.url = reverse('reviewers.apps.queue_pending') def tearDown(self): if self.uses_es(): unindex_webapps([app.id for app in self.apps]) super(TestAppQueue, self).tearDown() def review_url(self, app): return reverse('reviewers.apps.review', args=[app.app_slug]) def test_queue_viewing_ping(self): eq_(self.client.post(reverse('reviewers.queue_viewing')).status_code, 200) def test_template_links(self): r = self.client.get(self.url) eq_(r.status_code, 200) links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a') apps = Webapp.objects.filter( status=mkt.STATUS_PENDING).order_by('created') expected = [ (unicode(apps[0].name), self.review_url(apps[0])), (unicode(apps[1].name), self.review_url(apps[1])), ] check_links(expected, links, verify=False) def test_action_buttons_pending(self): r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Approve', 'public'), (u'Reject', 'reject'), (u'Request Re-review', 'manual_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_action_buttons_rejected(self): # Check action buttons for a previously rejected app. self.apps[0].update(status=mkt.STATUS_REJECTED) self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED) r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Approve', 'public'), (u'Request Re-review', 'manual_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) @mock.patch('mkt.versions.models.Version.is_privileged', True) def test_action_buttons_privileged_cantreview(self): self.apps[0].update(is_packaged=True) self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING) r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Request Re-review', 'manual_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) @mock.patch('mkt.versions.models.Version.is_privileged', True) def test_action_buttons_privileged_canreview(self): self.login_as_senior_reviewer() self.apps[0].update(is_packaged=True) self.apps[0].latest_version.files.update(status=mkt.STATUS_PENDING) r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Approve', 'public'), (u'Reject', 'reject'), (u'Ban app', 'disable'), (u'Request Re-review', 'manual_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_devices(self): AddonDeviceType.objects.create(addon=self.apps[0], device_type=1) AddonDeviceType.objects.create(addon=self.apps[0], device_type=2) if self.uses_es(): self.reindex(Webapp) r = self.client.get(self.url) eq_(r.status_code, 200) tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(5)') eq_(tds('ul li:not(.unavailable)').length, 2) def test_payments(self): self.apps[0].update(premium_type=mkt.ADDON_PREMIUM) self.apps[1].update(premium_type=mkt.ADDON_FREE_INAPP) if self.uses_es(): self.reindex(Webapp) r = self.client.get(self.url) eq_(r.status_code, 200) tds = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(6)') eq_(tds.eq(0).text(), unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_PREMIUM])) eq_(tds.eq(1).text(), unicode(mkt.ADDON_PREMIUM_TYPES[mkt.ADDON_FREE_INAPP])) def test_invalid_page(self): r = self.client.get(self.url, {'page': 999}) eq_(r.status_code, 200) eq_(r.context['pager'].number, 1) def test_queue_count(self): if self.uses_es(): self.refresh(doctypes=('webapp', 'homescreen')) r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (2)') eq_(links[1].text, u'Re-reviews (1)') eq_(links[2].text, u'Updates (0)') eq_(links[4].text, u'Homescreens (0)') def test_homescreen_count(self): Tag(tag_text='homescreen').save_tag(self.apps[1]) self.apps[1].save() if self.uses_es(): WebappIndexer.unindex(self.apps[1].id) self.refresh(('homescreen', 'webapp')) r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (1)') eq_(links[1].text, u'Re-reviews (1)') eq_(links[2].text, u'Updates (0)') eq_(links[4].text, u'Homescreens (1)') def test_queue_count_senior_reviewer(self): self.login_as_senior_reviewer() r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (2)') eq_(links[1].text, u'Re-reviews (1)') eq_(links[2].text, u'Updates (0)') eq_(links[3].text, u'Escalations (0)') def test_escalated_not_in_queue(self): self.login_as_senior_reviewer() EscalationQueue.objects.create(addon=self.apps[0]) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) # self.apps[2] is not pending so doesn't show up either. eq_([a.app.id for a in res.context['addons']], [self.apps[1].id]) doc = pq(res.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (1)') eq_(links[1].text, u'Re-reviews (1)') eq_(links[2].text, u'Updates (0)') eq_(links[3].text, u'Escalations (1)') def test_incomplete_no_in_queue(self): [app.update(status=mkt.STATUS_NULL) for app in self.apps] if self.uses_es(): self.reindex(Webapp) req = req_factory_factory( self.url, user=UserProfile.objects.get(email='[email protected]')) doc = pq(queue_apps(req).content) assert not doc('#addon-queue tbody tr').length def test_waiting_time(self): """Check objects show queue objects' created.""" res = self.client.get(self.url) waiting_times = [wait.attrib['isotime'] for wait in pq(res.content)('td time')] expected_waiting_times = [isotime(app.latest_version.nomination) for app in self.apps[0:2]] self.assertSetEqual(expected_waiting_times, waiting_times) class TestAppQueueES(mkt.site.tests.ESTestCase, TestAppQueue): def setUp(self): super(TestAppQueueES, self).setUp() self.create_switch('reviewer-tools-elasticsearch') self.reindex(Webapp) @mock.patch('mkt.versions.models.Version.is_privileged', False) class TestRereviewQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin, XSSMixin): def setUp(self): super(TestRereviewQueue, self).setUp() self.apps = [app_factory(name='XXX'), app_factory(name='YYY'), app_factory(name='ZZZ')] RereviewQueue.objects.create(addon=self.apps[0]).update( created=self.days_ago(5)) RereviewQueue.objects.create(addon=self.apps[1]).update( created=self.days_ago(3)) RereviewQueue.objects.create(addon=self.apps[2]).update( created=self.days_ago(1)) self.apps[0].update(created=self.days_ago(15)) self.apps[1].update(created=self.days_ago(13)) self.apps[2].update(created=self.days_ago(11)) if self.uses_es(): self.refresh(doctypes=('homescreen', 'webapp')) self.url = reverse('reviewers.apps.queue_rereview') def tearDown(self): if self.uses_es(): unindex_webapps([app.id for app in self.apps]) super(TestRereviewQueue, self).tearDown() def review_url(self, app): return reverse('reviewers.apps.review', args=[app.app_slug]) def test_template_links(self): r = self.client.get(self.url) eq_(r.status_code, 200) links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a') apps = [rq.addon for rq in RereviewQueue.objects.all().order_by('created')] expected = [ (unicode(apps[0].name), self.review_url(apps[0])), (unicode(apps[1].name), self.review_url(apps[1])), (unicode(apps[2].name), self.review_url(apps[2])), ] check_links(expected, links, verify=False) def test_waiting_time(self): """Check objects show queue objects' created.""" r = self.client.get(self.url) waiting_times = [wait.attrib['isotime'] for wait in pq(r.content)('td time')] expected_waiting_times = [ isotime(app.rereviewqueue_set.all()[0].created) for app in self.apps] self.assertSetEqual(expected_waiting_times, waiting_times) def test_action_buttons_public_senior_reviewer(self): self.login_as_senior_reviewer() r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Reject', 'reject'), (u'Ban app', 'disable'), (u'Clear Re-review', 'clear_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_action_buttons_public(self): r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Reject', 'reject'), (u'Clear Re-review', 'clear_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_action_buttons_reject(self): self.apps[0].update(status=mkt.STATUS_REJECTED) self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED) r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Approve', 'public'), (u'Clear Re-review', 'clear_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_invalid_page(self): r = self.client.get(self.url, {'page': 999}) eq_(r.status_code, 200) eq_(r.context['pager'].number, 1) def test_queue_count(self): r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (3)') eq_(links[2].text, u'Updates (0)') def test_queue_count_senior_reviewer(self): self.login_as_senior_reviewer() r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (3)') eq_(links[2].text, u'Updates (0)') eq_(links[3].text, u'Escalations (0)') def test_escalated_not_in_queue(self): self.login_as_senior_reviewer() EscalationQueue.objects.create(addon=self.apps[0]) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) self.assertSetEqual([a.app.id for a in res.context['addons']], [a.id for a in self.apps[1:]]) doc = pq(res.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (2)') eq_(links[2].text, u'Updates (0)') eq_(links[3].text, u'Escalations (1)') def test_addon_deleted(self): app = self.apps[0] app.delete() eq_(RereviewQueue.objects.filter(addon=app).exists(), False) class TestRereviewQueueES(mkt.site.tests.ESTestCase, TestRereviewQueue): def setUp(self): super(TestRereviewQueueES, self).setUp() self.create_switch('reviewer-tools-elasticsearch') self.reindex(Webapp) @mock.patch('mkt.versions.models.Version.is_privileged', False) class TestUpdateQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin, XSSMixin): # Prevent update_cached_manifests at setUp() since it gets called and tries # to access files when we add versions. @mock.patch('mkt.webapps.tasks.update_cached_manifests', False) def setUp(self): super(TestUpdateQueue, self).setUp() post_request_task._start_queuing_tasks() app1 = app_factory(is_packaged=True, name='XXX', version_kw={'version': '1.0', 'created': self.days_ago(2), 'nomination': self.days_ago(2)}) app2 = app_factory(is_packaged=True, name='YYY', version_kw={'version': '1.0', 'created': self.days_ago(2), 'nomination': self.days_ago(2)}) version_factory(addon=app1, version='1.1', created=self.days_ago(1), nomination=self.days_ago(1), file_kw={'status': mkt.STATUS_PENDING}) version_factory(addon=app2, version='1.1', created=self.days_ago(1), nomination=self.days_ago(1), file_kw={'status': mkt.STATUS_PENDING}) post_request_task._send_tasks_and_stop_queuing() self.apps = list(Webapp.objects.order_by('id')) self.url = reverse('reviewers.apps.queue_updates') def tearDown(self): if self.uses_es(): unindex_webapps([app.id for app in self.apps]) super(TestUpdateQueue, self).tearDown() def review_url(self, app): return reverse('reviewers.apps.review', args=[app.app_slug]) def test_template_links(self): self.apps[0].versions.latest().update(nomination=self.days_ago(2)) self.apps[1].versions.latest().update(nomination=self.days_ago(1)) if self.uses_es(): self.reindex(Webapp) r = self.client.get(self.url) eq_(r.status_code, 200) links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a') expected = [ (unicode(self.apps[0].name), self.review_url(self.apps[0])), (unicode(self.apps[1].name), self.review_url(self.apps[1])), ] check_links(expected, links, verify=False) def test_action_buttons_public_senior_reviewer(self): self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC) self.login_as_senior_reviewer() r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Reject', 'reject'), (u'Ban app', 'disable'), (u'Request Re-review', 'manual_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_action_buttons_public(self): self.apps[0].versions.latest().files.update(status=mkt.STATUS_PUBLIC) r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Reject', 'reject'), (u'Request Re-review', 'manual_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_action_buttons_reject(self): self.apps[0].versions.latest().files.update(status=mkt.STATUS_DISABLED) r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Approve', 'public'), (u'Request Re-review', 'manual_rereview'), (u'Escalate', 'escalate'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_invalid_page(self): r = self.client.get(self.url, {'page': 999}) eq_(r.status_code, 200) eq_(r.context['pager'].number, 1) def test_queue_count(self): r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (0)') eq_(links[2].text, u'Updates (2)') def test_homescreen(self): Tag(tag_text='homescreen').save_tag(self.apps[1]) self.apps[1].save() if self.uses_es(): WebappIndexer.unindex(self.apps[1].id) self.refresh(doctypes=('homescreen', 'webapp')) r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (0)') eq_(links[2].text, u'Updates (1)') eq_(links[3].text, u'Reviewing (0)') eq_(links[4].text, u'Homescreens (1)') def test_queue_count_senior_reviewer(self): self.login_as_senior_reviewer() r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (0)') eq_(links[2].text, u'Updates (2)') eq_(links[3].text, u'Escalations (0)') def test_escalated_not_in_queue(self): self.login_as_senior_reviewer() EscalationQueue.objects.create(addon=self.apps[0]) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) eq_([a.app.id for a in res.context['addons']], [app.id for app in self.apps[1:]]) doc = pq(res.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (0)') eq_(links[2].text, u'Updates (1)') eq_(links[3].text, u'Escalations (1)') def test_order(self): self.apps[0].update(created=self.days_ago(10)) self.apps[1].update(created=self.days_ago(5)) self.apps[0].versions.latest().update(nomination=self.days_ago(1)) self.apps[1].versions.latest().update(nomination=self.days_ago(4)) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) apps = list(res.context['addons']) eq_(apps[0].app.id, self.apps[1].id) eq_(apps[1].app.id, self.apps[0].id) def test_only_updates_in_queue(self): # Add new packaged app, which should only show up in the pending queue. app = app_factory(is_packaged=True, name='ZZZ', status=mkt.STATUS_PENDING, version_kw={'version': '1.0'}, file_kw={'status': mkt.STATUS_PENDING}) self.apps.append(app) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) apps = [a.app for a in res.context['addons']] assert app not in apps, ( 'Unexpected: Found a new packaged app in the updates queue.') eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (2)') def test_approved_update_in_queue(self): app = app_factory(is_packaged=True, name='YYY', status=mkt.STATUS_APPROVED, version_kw={'version': '1.0', 'created': self.days_ago(2), 'nomination': self.days_ago(2)}) self.apps.append(app) File.objects.filter(version__addon=app).update(status=app.status) version_factory(addon=app, version='1.1', created=self.days_ago(1), nomination=self.days_ago(1), file_kw={'status': mkt.STATUS_PENDING}) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) assert app.id in [a.app.id for a in res.context['addons']] eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)') def test_update_queue_with_empty_nomination(self): app = app_factory(is_packaged=True, name='YYY', status=mkt.STATUS_NULL, version_kw={'version': '1.0', 'created': self.days_ago(2), 'nomination': None}) self.apps.append(app) first_version = app.latest_version version_factory(addon=app, version='1.1', created=self.days_ago(1), nomination=None, file_kw={'status': mkt.STATUS_PENDING}) # Now that we have a version with nomination=None, reset app status. app.update(status=mkt.STATUS_APPROVED) File.objects.filter(version=first_version).update(status=app.status) # Safeguard: we /really/ want to test with nomination=None. eq_(app.latest_version.reload().nomination, None) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) assert app.id in [a.app.id for a in res.context['addons']] eq_(pq(res.content)('.tabnav li a')[2].text, u'Updates (3)') def test_deleted_version_not_in_queue(self): """ This tests that an app with a prior pending version that got deleted doesn't trigger the app to remain in the review queue. """ app = self.apps[0] # File is PENDING and delete current version. old_ver = app.versions.order_by('id')[0] old_ver.files.latest().update(status=mkt.STATUS_PENDING) old_ver.delete() # "Approve" the app. app.versions.latest().files.latest().update(status=mkt.STATUS_PUBLIC) eq_(app.reload().status, mkt.STATUS_PUBLIC) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) eq_(res.status_code, 200) # Verify that our app has 2 versions. eq_(Version.with_deleted.filter(addon=app).count(), 2) # Verify the apps in the context are what we expect. doc = pq(res.content) eq_(doc('.tabnav li a')[2].text, u'Updates (1)') apps = [a.app.id for a in res.context['addons']] ok_(app.id not in apps) ok_(self.apps[1].id in apps) def test_waiting_time(self): """Check objects show queue objects' created.""" r = self.client.get(self.url) waiting_times = [wait.attrib['isotime'] for wait in pq(r.content)('td time')] expected_waiting_times = [isotime(app.latest_version.nomination) for app in self.apps] self.assertSetEqual(expected_waiting_times, waiting_times) class TestUpdateQueueES(mkt.site.tests.ESTestCase, TestUpdateQueue): def setUp(self): super(TestUpdateQueueES, self).setUp() self.create_switch('reviewer-tools-elasticsearch') self.refresh(doctypes=('homescreen', 'webapp')) @mock.patch('mkt.versions.models.Version.is_privileged', False) class TestEscalationQueue(AppReviewerTest, AccessMixin, FlagsMixin, SearchMixin, XSSMixin): def setUp(self): super(TestEscalationQueue, self).setUp() self.apps = [app_factory(name='XXX'), app_factory(name='YYY'), app_factory(name='ZZZ')] EscalationQueue.objects.create(addon=self.apps[0]).update( created=self.days_ago(5)) EscalationQueue.objects.create(addon=self.apps[1]).update( created=self.days_ago(3)) EscalationQueue.objects.create(addon=self.apps[2]).update( created=self.days_ago(1)) self.apps[0].update(created=self.days_ago(15)) self.apps[1].update(created=self.days_ago(13)) self.apps[2].update(created=self.days_ago(11)) self.login_as_senior_reviewer() self.url = reverse('reviewers.apps.queue_escalated') def tearDown(self): if self.uses_es(): unindex_webapps([app.id for app in self.apps]) super(TestEscalationQueue, self).tearDown() def review_url(self, app): return reverse('reviewers.apps.review', args=[app.app_slug]) def test_flag_blocked(self): # Blocklisted apps should only be in the update queue, so this flag # check is here rather than in FlagsMixin. self.apps[0].update(status=mkt.STATUS_BLOCKED) if self.uses_es(): self.reindex(Webapp) res = self.client.get(self.url) eq_(res.status_code, 200) tds = pq(res.content)('#addon-queue tbody tr td.flags') flags = tds('div.sprite-reviewer-blocked') eq_(flags.length, 1) def test_no_access_regular_reviewer(self): self.login_as_editor() res = self.client.get(self.url) eq_(res.status_code, 403) def test_template_links(self): r = self.client.get(self.url) eq_(r.status_code, 200) links = pq(r.content)('#addon-queue tbody')('tr td:nth-of-type(2) a') apps = [rq.addon for rq in EscalationQueue.objects.all().order_by('addon__created')] expected = [ (unicode(apps[0].name), self.review_url(apps[0])), (unicode(apps[1].name), self.review_url(apps[1])), (unicode(apps[2].name), self.review_url(apps[2])), ] check_links(expected, links, verify=False) def test_waiting_time(self): """Check objects show queue objects' created.""" r = self.client.get(self.url) waiting_times = [wait.attrib['isotime'] for wait in pq(r.content)('td time')] expected_waiting_times = [ isotime(app.escalationqueue_set.all()[0].created) for app in self.apps] self.assertSetEqual(expected_waiting_times, waiting_times) def test_action_buttons_public(self): r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Reject', 'reject'), (u'Ban app', 'disable'), (u'Request Re-review', 'manual_rereview'), (u'Clear Escalation', 'clear_escalation'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_action_buttons_reject(self): self.apps[0].update(status=mkt.STATUS_REJECTED) self.apps[0].latest_version.files.update(status=mkt.STATUS_DISABLED) r = self.client.get(self.review_url(self.apps[0])) eq_(r.status_code, 200) actions = pq(r.content)('#review-actions input') expected = [ (u'Approve', 'public'), (u'Ban app', 'disable'), (u'Request Re-review', 'manual_rereview'), (u'Clear Escalation', 'clear_escalation'), (u'Message developer', 'info'), (u'Private comment', 'comment'), ] self.check_actions(expected, actions) def test_invalid_page(self): r = self.client.get(self.url, {'page': 999}) eq_(r.status_code, 200) eq_(r.context['pager'].number, 1) def test_queue_count(self): r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (0)') eq_(links[2].text, u'Updates (0)') eq_(links[3].text, u'Escalations (3)') def test_addon_deleted(self): app = self.apps[0] app.delete() eq_(EscalationQueue.objects.filter(addon=app).exists(), False) class TestEscalationQueueES(mkt.site.tests.ESTestCase, TestEscalationQueue): def setUp(self): super(TestEscalationQueueES, self).setUp() self.create_switch('reviewer-tools-elasticsearch') self.reindex(Webapp) class TestReviewTransaction(AttachmentManagementMixin, mkt.site.tests.MockEsMixin, mkt.site.tests.MockBrowserIdMixin, test.TransactionTestCase, TestedonManagementMixin): fixtures = fixture('webapp_337141') def setUp(self): super(TestReviewTransaction, self).setUp() mkt.site.tests.TestCase.grant_permission( user_factory(email='editor'), 'Apps:Review') self.mock_browser_id() def get_app(self): return Webapp.objects.get(id=337141) @mock.patch('mkt.webapps.tasks.update_cached_manifests') @mock.patch('mkt.webapps.models.Webapp.get_manifest_json') @mock.patch('lib.crypto.packaged.sign_app') def test_public_sign(self, sign_mock, json_mock, update_cached_manifests): self.app = self.get_app() self.version = self.app.latest_version self.version.files.all().update(status=mkt.STATUS_PENDING) with private_storage.open( self.version.files.all()[0].file_path, 'w') as f: f.write('.') public_storage.delete(self.version.files.all()[0].signed_file_path) self.app.update(status=mkt.STATUS_PENDING, is_packaged=True, _current_version=None, _signal=False) eq_(self.get_app().status, mkt.STATUS_PENDING) update_cached_manifests.reset_mock() sign_mock.return_value = None # Didn't fail. json_mock.return_value = {'name': 'Something'} self.login('[email protected]') data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) resp = self.client.post( reverse('reviewers.apps.review', args=[self.app.app_slug]), data) eq_(resp.status_code, 302) eq_(self.get_app().status, mkt.STATUS_PUBLIC) eq_(update_cached_manifests.delay.call_count, 1) @mock.patch('mkt.webapps.tasks.update_cached_manifests') @mock.patch('mkt.webapps.models.Webapp.get_manifest_json') @mock.patch('lib.crypto.packaged.sign') def test_public_sign_failure(self, sign_mock, json_mock, update_cached_manifests): self.app = self.get_app() self.version = self.app.latest_version self.version.files.all().update(status=mkt.STATUS_PENDING) self.app.update(status=mkt.STATUS_PENDING, is_packaged=True, _current_version=None, _signal=False) eq_(self.get_app().status, mkt.STATUS_PENDING) sign_mock.side_effect = packaged.SigningError json_mock.return_value = {'name': 'Something'} self.login('[email protected]') data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) resp = self.client.post( reverse('reviewers.apps.review', args=[self.app.app_slug]), data) eq_(resp.status_code, 302) eq_(self.get_app().status, mkt.STATUS_PENDING) eq_(update_cached_manifests.delay.call_count, 0) class TestReviewMixin(object): # E.g commreply+12e0caffc4ca4174a6f62300c0ff180a@marketplace.firefox.com . COMM_REPLY_RE = r'^commreply\+[a-f0-9]+\@marketplace\.firefox\.com$' def post(self, data, queue='pending'): res = self.client.post(self.url, data) self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue)) def _check_email(self, msg, subject, to=None): if to: eq_(msg.to, to) else: eq_(msg.to, list(self.app.authors.values_list('email', flat=True))) assert re.match(self.COMM_REPLY_RE, msg.extra_headers['Reply-To']) eq_(msg.cc, []) eq_(msg.from_email, settings.MKT_REVIEWERS_EMAIL) if subject: eq_(msg.subject, '%s: %s' % (subject, self.app.name)) def _get_mail(self, email): return filter(lambda x: x.to[0].startswith(email), mail.outbox)[0] def _check_email_dev_and_contact(self, subject, outbox_len=2): """ Helper for checking developer and Mozilla contact get emailed. """ eq_(len(mail.outbox), outbox_len) # Developer. self._check_email(self._get_mail('steamcube'), subject) # Mozilla contact. self._check_email(self._get_mail('contact'), subject, to=[self.mozilla_contact]) def _check_thread(self): thread = self.app.threads eq_(thread.count(), 1) thread = thread.get() perms = ('developer', 'reviewer', 'staff') for key in perms: assert getattr(thread, 'read_permission_%s' % key) def _check_email_body(self, msg=None): if not msg: msg = mail.outbox[0] body = msg.message().as_string() url = self.app.get_url_path() assert url in body, 'Could not find apps detail URL in %s' % msg def _check_log(self, action): assert AppLog.objects.filter( addon=self.app, activity_log__action=action.id).exists(), ( "Didn't find `%s` action in logs." % action.short) def _check_score(self, reviewed_type): scores = ReviewerScore.objects.all() assert len(scores) > 0 eq_(scores[0].score, mkt.REVIEWED_SCORES[reviewed_type]) eq_(scores[0].note_key, reviewed_type) class TestReviewApp(SetupFilesMixin, AppReviewerTest, TestReviewMixin, AccessMixin, AttachmentManagementMixin, TestedonManagementMixin): fixtures = fixture('webapp_337141') def setUp(self): super(TestReviewApp, self).setUp() self.mozilla_contact = '[email protected]' self.app = self.get_app() make_rated(self.app) self.app.update(status=mkt.STATUS_PENDING, mozilla_contact=self.mozilla_contact) self.version = self.app.latest_version self.version.files.all().update(status=mkt.STATUS_PENDING) self.file = self.version.all_files[0] self.url = reverse('reviewers.apps.review', args=[self.app.app_slug]) self.setup_files() def get_app(self): return Webapp.objects.get(id=337141) def test_review_viewing_ping(self): eq_(self.client.post(reverse('reviewers.review_viewing')).status_code, 200) @mock.patch('mkt.webapps.models.Webapp.in_rereview_queue') def test_rereview(self, is_rereview_queue): is_rereview_queue.return_value = True content = pq(self.client.get(self.url).content) assert content('#queue-rereview').length @mock.patch('mkt.webapps.models.Webapp.in_escalation_queue') def test_escalated(self, in_escalation_queue): in_escalation_queue.return_value = True content = pq(self.client.get(self.url).content) assert content('#queue-escalation').length def test_cannot_review_my_app(self): with self.settings(ALLOW_SELF_REVIEWS=False): self.app.addonuser_set.create( user=UserProfile.objects.get(email='[email protected]')) res = self.client.head(self.url) self.assert3xx(res, reverse('reviewers.home')) res = self.client.post(self.url) self.assert3xx(res, reverse('reviewers.home')) def test_cannot_review_blocklisted_app(self): self.app.update(status=mkt.STATUS_BLOCKED) res = self.client.get(self.url) self.assert3xx(res, reverse('reviewers.home')) res = self.client.post(self.url) self.assert3xx(res, reverse('reviewers.home')) def test_review_no_latest_version(self): self.app.versions.all().delete() self.app.reload() eq_(self.app.latest_version, None) eq_(self.app.current_version, None) response = self.client.get(self.url) eq_(response.status_code, 200) doc = pq(response.content) assert not doc('input[name=action][value=info]').length assert not doc('input[name=action][value=comment]').length assert not doc('input[name=action][value=public]').length assert not doc('input[name=action][value=reject]').length # Also try with a packaged app. self.app.update(is_packaged=True) response = self.client.get(self.url) eq_(response.status_code, 200) def test_sr_can_review_blocklisted_app(self): self.app.update(status=mkt.STATUS_BLOCKED) self.login_as_senior_reviewer() eq_(self.client.get(self.url).status_code, 200) data = {'action': 'public', 'comments': 'yo'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) res = self.client.post(self.url, data) self.assert3xx(res, reverse('reviewers.apps.queue_pending')) def test_pending_to_reject_w_device_overrides(self): # This shouldn't be possible unless there's form hacking. AddonDeviceType.objects.create(addon=self.app, device_type=mkt.DEVICE_DESKTOP.id) AddonDeviceType.objects.create(addon=self.app, device_type=mkt.DEVICE_TABLET.id) eq_(self.app.publish_type, mkt.PUBLISH_IMMEDIATE) data = {'action': 'reject', 'comments': 'something', 'device_override': [mkt.DEVICE_DESKTOP.id]} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE) eq_(app.status, mkt.STATUS_REJECTED) eq_(set([o.id for o in app.device_types]), set([mkt.DEVICE_DESKTOP.id, mkt.DEVICE_TABLET.id])) self._check_email_dev_and_contact('Rejected') self._check_email_body() def test_pending_to_public_w_requirements_overrides(self): data = {'action': 'public', 'comments': 'something', 'has_packaged_apps': True} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) assert not self.app.latest_version.features.has_packaged_apps self.post(data) app = self.get_app() assert app.latest_version.features.has_packaged_apps # Since features have been changed by the reviewer, the app should not # be immediately published. eq_(app.publish_type, mkt.PUBLISH_PRIVATE) eq_(app.status, mkt.STATUS_APPROVED) self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE) # A reviewer changing features shouldn't generate a re-review. eq_(RereviewQueue.objects.count(), 0) def test_pending_to_public_w_requirements_removed(self): self.app.latest_version.features.update(has_packaged_apps=True) data = {'action': 'public', 'comments': 'something', 'has_packaged_apps': False} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) assert self.app.latest_version.features.has_packaged_apps self.post(data) app = self.get_app() assert not app.latest_version.features.has_packaged_apps # Since features have been changed by the reviewer, the app should not # be immediately published. eq_(app.publish_type, mkt.PUBLISH_PRIVATE) eq_(app.status, mkt.STATUS_APPROVED) self._check_log(mkt.LOG.REVIEW_FEATURES_OVERRIDE) # A reviewer changing features shouldn't generate a re-review. eq_(RereviewQueue.objects.count(), 0) def test_pending_to_reject_w_requirements_overrides(self): # Rejecting an app doesn't let you override features requirements. data = {'action': 'reject', 'comments': 'something', 'has_packaged_apps': True} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) assert not self.app.latest_version.features.has_packaged_apps self.post(data) app = self.get_app() assert not app.latest_version.features.has_packaged_apps eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE) eq_(app.status, mkt.STATUS_REJECTED) def test_pending_to_public_w_requirements_overrides_nothing_changed(self): self.version.features.update(has_packaged_apps=True) data = {'action': 'public', 'comments': 'something', 'has_packaged_apps': True} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) assert self.app.latest_version.features.has_packaged_apps self.post(data) app = self.get_app() assert app.latest_version.features.has_packaged_apps eq_(app.publish_type, mkt.PUBLISH_IMMEDIATE) eq_(app.status, mkt.STATUS_PUBLIC) action_id = mkt.LOG.REVIEW_FEATURES_OVERRIDE.id assert not AppLog.objects.filter( addon=self.app, activity_log__action=action_id).exists() @mock.patch('mkt.reviewers.views.messages.success', new=mock.Mock) def test_incomplete_cant_approve(self): self.app.update(status=mkt.STATUS_NULL) self.app.latest_version.files.update(status=mkt.STATUS_NULL) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) # Still incomplete. eq_(self.get_app().status, mkt.STATUS_NULL) def test_notification_email_translation(self): # https://bugzilla.mozilla.org/show_bug.cgi?id=1127790 raise SkipTest """Test that the app name is translated with the app's default_locale and not the reviewer's when we are sending notification emails.""" original_name = unicode(self.app.name) fr_translation = u'Mais allô quoi!' es_translation = u'¿Dónde está la biblioteca?' self.app.name = { 'fr': fr_translation, 'es': es_translation, } self.app.default_locale = 'fr' self.app.save() data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.client.post(self.url, data, HTTP_ACCEPT_LANGUAGE='es') eq_(translation.get_language(), 'es') eq_(len(mail.outbox), 2) msg = mail.outbox[0] assert original_name not in msg.subject assert es_translation not in msg.subject assert fr_translation in msg.subject assert original_name not in msg.body assert es_translation not in msg.body assert fr_translation in msg.body @mock.patch('lib.crypto.packaged.sign') def test_require_sig_for_public(self, sign): sign.side_effect = packaged.SigningError self.get_app().update(is_packaged=True) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.client.post(self.url, data) eq_(self.get_app().status, mkt.STATUS_PENDING) def _test_pending_to_public(self): self.app.update(mozilla_contact='') data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_PUBLIC) eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) eq_(len(mail.outbox), 1) self._check_email(mail.outbox[0], ('Approved')) self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_HOSTED) def test_pending_to_public(self): self._test_pending_to_public() @mock.patch('mkt.reviewers.views.messages.success') def test_pending_to_escalation(self, messages): data = {'action': 'escalate', 'comments': 'soup her man'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) eq_(EscalationQueue.objects.count(), 1) self._check_log(mkt.LOG.ESCALATE_MANUAL) # Test 2 emails: 1 to dev, 1 to admin. eq_(len(mail.outbox), 2) self._check_email(self._get_mail('steamcube'), 'Escalated') self._check_email( self._get_mail('snrreviewer'), 'Escalated', to=[self.snr_reviewer_user.email]) eq_(messages.call_args_list[0][0][1], 'Review successfully processed.') def test_pending_to_disable_senior_reviewer(self): self.login_as_senior_reviewer() self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) data = {'action': 'disable', 'comments': 'banned ur app'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_DISABLED) eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED) self._check_log(mkt.LOG.APP_DISABLED) self._check_email_dev_and_contact('Banned') def test_pending_to_disable(self): # Only senior reviewers can ban apps. self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) data = {'action': 'disable', 'comments': 'banned ur app'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) res = self.client.post(self.url, data) eq_(res.status_code, 200) ok_('action' in res.context['form'].errors) eq_(self.get_app().status, mkt.STATUS_PUBLIC) eq_(len(mail.outbox), 0) def _test_escalation_to_public(self): EscalationQueue.objects.create(addon=self.app) eq_(self.app.status, mkt.STATUS_PENDING) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='escalated') app = self.get_app() eq_(app.status, mkt.STATUS_PUBLIC) eq_(app.current_version.files.all()[0].status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) eq_(EscalationQueue.objects.count(), 0) self._check_email_dev_and_contact('Approved') self._check_email_body() def test_escalation_to_public(self): self._test_escalation_to_public() def test_escalation_to_reject(self): EscalationQueue.objects.create(addon=self.app) eq_(self.app.status, mkt.STATUS_PENDING) files = list(self.version.files.values_list('id', flat=True)) data = {'action': 'reject', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='escalated') app = self.get_app() eq_(app.status, mkt.STATUS_REJECTED) eq_(File.objects.filter(id__in=files)[0].status, mkt.STATUS_REJECTED) self._check_log(mkt.LOG.REJECT_VERSION) eq_(EscalationQueue.objects.count(), 0) self._check_email_dev_and_contact('Rejected') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_HOSTED) def test_escalation_to_disable_senior_reviewer(self): self.login_as_senior_reviewer() EscalationQueue.objects.create(addon=self.app) self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) data = {'action': 'disable', 'comments': 'banned ur app'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='escalated') app = self.get_app() eq_(app.status, mkt.STATUS_DISABLED) eq_(app.latest_version.files.all()[0].status, mkt.STATUS_DISABLED) self._check_log(mkt.LOG.APP_DISABLED) eq_(EscalationQueue.objects.count(), 0) self._check_email_dev_and_contact('Banned') def test_escalation_to_disable(self): EscalationQueue.objects.create(addon=self.app) self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) data = {'action': 'disable', 'comments': 'banned ur app'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) res = self.client.post(self.url, data, queue='escalated') eq_(res.status_code, 200) ok_('action' in res.context['form'].errors) eq_(self.get_app().status, mkt.STATUS_PUBLIC) eq_(EscalationQueue.objects.count(), 1) eq_(len(mail.outbox), 0) def test_clear_escalation(self): self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) EscalationQueue.objects.create(addon=self.app) data = {'action': 'clear_escalation', 'comments': 'all clear'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='escalated') eq_(EscalationQueue.objects.count(), 0) self._check_log(mkt.LOG.ESCALATION_CLEARED) # Ensure we don't send email to developer on clearing escalations. eq_(len(mail.outbox), 1) self._check_email(mail.outbox[0], None, to=[self.mozilla_contact]) def test_rereview_to_reject(self): RereviewQueue.objects.create(addon=self.app) self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) data = {'action': 'reject', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='rereview') eq_(self.get_app().status, mkt.STATUS_REJECTED) self._check_log(mkt.LOG.REJECT_VERSION) eq_(RereviewQueue.objects.count(), 0) self._check_email_dev_and_contact('Rejected') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW) def test_rereview_to_disable_senior_reviewer(self): self.login_as_senior_reviewer() RereviewQueue.objects.create(addon=self.app) self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) data = {'action': 'disable', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='rereview') eq_(self.get_app().status, mkt.STATUS_DISABLED) self._check_log(mkt.LOG.APP_DISABLED) eq_(RereviewQueue.objects.filter(addon=self.app).count(), 0) self._check_email_dev_and_contact('Banned') def test_rereview_to_disable(self): RereviewQueue.objects.create(addon=self.app) self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) data = {'action': 'disable', 'comments': 'banned ur app'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) res = self.client.post(self.url, data, queue='rereview') eq_(res.status_code, 200) ok_('action' in res.context['form'].errors) eq_(self.get_app().status, mkt.STATUS_PUBLIC) eq_(RereviewQueue.objects.filter(addon=self.app).count(), 1) eq_(len(mail.outbox), 0) def test_manual_rereview(self): self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) data = {'action': 'manual_rereview', 'comments': 'man dem'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) # The app status shouldn't change. eq_(self.get_app().status, mkt.STATUS_PUBLIC) eq_(RereviewQueue.objects.count(), 1) self._check_log(mkt.LOG.REREVIEW_MANUAL) # Ensure we don't send email to developer on manual rereviews. eq_(len(mail.outbox), 1) self._check_email(mail.outbox[0], None, to=[self.mozilla_contact]) def test_clear_rereview(self): self.app.update(status=mkt.STATUS_PUBLIC) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) RereviewQueue.objects.create(addon=self.app) data = {'action': 'clear_rereview', 'comments': 'all clear'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='rereview') eq_(RereviewQueue.objects.count(), 0) self._check_log(mkt.LOG.REREVIEW_CLEARED) # Ensure we don't send emails to the developer on clearing re-reviews. eq_(len(mail.outbox), 1) self._check_email(mail.outbox[0], None, to=[self.mozilla_contact]) self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW) def test_clear_rereview_unlisted(self): self.app.update(status=mkt.STATUS_UNLISTED) self.app.latest_version.files.update(status=mkt.STATUS_PUBLIC) RereviewQueue.objects.create(addon=self.app) data = {'action': 'clear_rereview', 'comments': 'all clear'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='rereview') eq_(RereviewQueue.objects.count(), 0) self._check_log(mkt.LOG.REREVIEW_CLEARED) # Ensure we don't send emails to the developer on clearing re-reviews. eq_(len(mail.outbox), 1) self._check_email(mail.outbox[0], None, to=[self.mozilla_contact]) self._check_score(mkt.REVIEWED_WEBAPP_REREVIEW) def test_rereview_to_escalation(self): RereviewQueue.objects.create(addon=self.app) data = {'action': 'escalate', 'comments': 'soup her man'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data, queue='rereview') eq_(EscalationQueue.objects.count(), 1) self._check_log(mkt.LOG.ESCALATE_MANUAL) # Test 2 emails: 1 to dev, 1 to admin. eq_(len(mail.outbox), 2) self._check_email(self._get_mail('steamcube'), 'Escalated') self._check_email( self._get_mail('snrreviewer'), 'Escalated', to=[self.snr_reviewer_user.email]) def test_more_information(self): # Test the same for all queues. data = {'action': 'info', 'comments': 'Knead moor in faux'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) eq_(self.get_app().status, mkt.STATUS_PENDING) self._check_log(mkt.LOG.REQUEST_INFORMATION) vqs = self.get_app().versions.all() eq_(vqs.count(), 1) eq_(vqs.filter(has_info_request=True).count(), 1) self._check_email_dev_and_contact('Reviewer comment') def test_multi_cc_email(self): # Test multiple mozilla_contact emails via more information. contacts = [user_factory(email=u'á').email, user_factory(email=u'ç').email] self.mozilla_contact = ', '.join(contacts) self.app.update(mozilla_contact=self.mozilla_contact) data = {'action': 'info', 'comments': 'Knead moor in faux'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) eq_(len(mail.outbox), 3) subject = 'Reviewer comment' self._check_email(self._get_mail('steamcube'), subject) self._check_email(self._get_mail(contacts[0]), subject, to=[contacts[0]]) self._check_email(self._get_mail(contacts[1]), subject, to=[contacts[1]]) def test_comment(self): # Test the same for all queues. data = {'action': 'comment', 'comments': 'mmm, nice app'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) eq_(len(mail.outbox), 1) self._check_email(mail.outbox[0], None, to=[self.mozilla_contact]) self._check_log(mkt.LOG.COMMENT_VERSION) def test_receipt_no_node(self): res = self.client.get(self.url) eq_(len(pq(res.content)('#receipt-check-result')), 0) def test_receipt_has_node(self): self.get_app().update(premium_type=mkt.ADDON_PREMIUM) res = self.client.get(self.url) eq_(len(pq(res.content)('.reviewers-desktop #receipt-check-result')), 1) eq_(len(pq(res.content)('.reviewers-mobile #receipt-check-result')), 1) @mock.patch('mkt.reviewers.views.requests.get') def test_manifest_json(self, mock_get): m = mock.Mock() m.content = 'the manifest contents <script>' m.headers = CaseInsensitiveDict( {'content-type': 'application/x-web-app-manifest+json <script>'}) mock_get.return_value = m expected = { 'content': 'the manifest contents &lt;script&gt;', 'headers': {'content-type': 'application/x-web-app-manifest+json &lt;script&gt;'}, 'success': True, 'permissions': {} } r = self.client.get(reverse('reviewers.apps.review.manifest', args=[self.app.app_slug])) eq_(r.status_code, 200) eq_(json.loads(r.content), expected) @mock.patch('mkt.reviewers.views.requests.get') def test_manifest_json_unicode(self, mock_get): m = mock.Mock() m.content = u'كك some foreign ish' m.headers = CaseInsensitiveDict({}) mock_get.return_value = m r = self.client.get(reverse('reviewers.apps.review.manifest', args=[self.app.app_slug])) eq_(r.status_code, 200) eq_(json.loads(r.content), {'content': u'كك some foreign ish', 'headers': {}, 'success': True, 'permissions': {}}) @mock.patch('mkt.reviewers.views.requests.get') def test_manifest_json_encoding(self, mock_get): m = mock.Mock() m.content = open(self.manifest_path('non-utf8.webapp')).read() m.headers = CaseInsensitiveDict({}) mock_get.return_value = m r = self.client.get(reverse('reviewers.apps.review.manifest', args=[self.app.app_slug])) eq_(r.status_code, 200) data = json.loads(r.content) assert u'&#34;name&#34;: &#34;W2MO\u017d&#34;' in data['content'] @mock.patch('mkt.reviewers.views.requests.get') def test_manifest_json_encoding_empty(self, mock_get): m = mock.Mock() m.content = '' m.headers = CaseInsensitiveDict({}) mock_get.return_value = m r = self.client.get(reverse('reviewers.apps.review.manifest', args=[self.app.app_slug])) eq_(r.status_code, 200) eq_(json.loads(r.content), {'content': u'', 'headers': {}, 'success': True, 'permissions': {}}) @mock.patch('mkt.reviewers.views.requests.get') def test_manifest_json_traceback_in_response(self, mock_get): m = mock.Mock() m.content = {'name': 'Some name'} m.headers = CaseInsensitiveDict({}) mock_get.side_effect = requests.exceptions.SSLError mock_get.return_value = m # We should not 500 on a traceback. r = self.client.get(reverse('reviewers.apps.review.manifest', args=[self.app.app_slug])) eq_(r.status_code, 200) data = json.loads(r.content) assert data['content'], 'There should be a content with the traceback' eq_(data['headers'], {}) @mock.patch('mkt.reviewers.views.json.dumps') def test_manifest_json_packaged(self, mock_): # Test that when the app is packaged, _mini_manifest is called. mock_.return_value = '{}' self.get_app().update(is_packaged=True) res = self.client.get(reverse('reviewers.apps.review.manifest', args=[self.app.app_slug])) eq_(res.status_code, 200) assert mock_.called @mock.patch('mkt.reviewers.views._get_manifest_json') def test_manifest_json_perms(self, mock_): mock_.return_value = { 'permissions': { "foo": {"description": "foo"}, "camera": {"description": "<script>"} } } self.get_app().update(is_packaged=True) r = self.client.get(reverse('reviewers.apps.review.manifest', args=[self.app.app_slug])) eq_(r.status_code, 200) eq_(json.loads(r.content)['permissions'], {'foo': {'description': 'foo', 'type': 'web'}, 'camera': {'description': '&lt;script&gt;', 'type': 'priv'}}) def test_abuse(self): AbuseReport.objects.create(addon=self.app, message='!@#$') res = self.client.get(self.url) doc = pq(res.content) dd = doc('.reviewers-desktop #summary dd.abuse-reports') eq_(dd.text(), u'1') eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse', args=[self.app.app_slug])) dd = doc('.reviewers-mobile #summary dd.abuse-reports') eq_(dd.text(), u'1') eq_(dd.find('a').attr('href'), reverse('reviewers.apps.review.abuse', args=[self.app.app_slug])) def _attachment_form_data(self, num=1, action='comment'): data = {'action': action, 'comments': 'mmm, nice app'} data.update(self._attachment_management_form(num=num)) data.update(self._attachments(num)) return data @override_settings(REVIEWER_ATTACHMENTS_PATH=ATTACHMENTS_DIR) @mock.patch('mkt.site.storage_utils.LocalFileStorage.save') def test_no_attachments(self, save_mock): """ Test addition of no attachment """ data = self._attachment_form_data(num=0, action='public') data.update(self._testedon_management_form()) self.post(data) eq_(save_mock.called, False, save_mock.call_args_list) def test_idn_app_domain(self): response = self.client.get(self.url) assert 'IDN domain!' not in response.content self.get_app().update(app_domain=u'http://www.allïzom.org') response = self.client.get(self.url) assert 'IDN domain!' in response.content def test_xss_domain(self): # It shouldn't be possible to have this in app domain, it will never # validate, but better safe than sorry. self.get_app().update(app_domain=u'<script>alert(42)</script>') response = self.client.get(self.url) assert '<script>alert(42)</script>' not in response.content assert '&lt;script&gt;alert(42)&lt;/script&gt;' in response.content def test_priority_flag_cleared_for_public(self): self.get_app().update(priority_review=True) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) eq_(self.get_app().priority_review, False) def test_priority_flag_uncleared_for_reject(self): self.get_app().update(priority_review=True) data = {'action': 'reject', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) eq_(self.get_app().priority_review, True) def test_is_showcase_checkbox(self): res = self.client.get(self.url) eq_(pq(res.content)('#id_is_showcase:checked').length, 0) app = self.get_app() Tag(tag_text=SHOWCASE_TAG).save_tag(app) res = self.client.get(self.url) eq_(pq(res.content)('#id_is_showcase:checked').length, 1) def test_is_showcase_on(self): # Note: Using action=comment b/c it does less and keeps test faster. data = {'action': 'comment', 'comments': 'blah', 'is_showcase': 'on'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) tags = self.get_app().tags.values_list('tag_text', flat=True) assert SHOWCASE_TAG in tags # Check email is sent to curation board. msg = self._get_mail('appcurationboard') eq_(msg.to, [settings.APP_CURATION_BOARD_EMAIL]) eq_(msg.subject, u'App [%s] nominated to be featured' % self.get_app().name) def test_is_showcase_off(self): # Clearing contact so we don't get a superflous email below. self.app.update(mozilla_contact='') # Note: Using action=comment b/c it does less and keeps test faster. # Note: `is_showcase` isn't passed b/c checkboxes. data = {'action': 'comment', 'comments': 'blah'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) tags = self.get_app().tags.values_list('tag_text', flat=True) assert SHOWCASE_TAG not in tags # Check no email is sent. eq_(len(mail.outbox), 0) def test_versions_history_pagination(self): self.app.update(is_packaged=True) version_factory(addon=self.app, version='2.0') version_factory(addon=self.app, version='3.0') # Mock paginate to paginate with only 2 versions to limit the # number of versions this test has to create. with mock.patch('mkt.reviewers.views.paginate', lambda req, objs, limit: paginate(req, objs, 2)): content = pq(self.client.get(self.url).content) eq_(len(content('#review-files tr.listing-body')), 2) eq_(len(content('#review-files-paginate a[rel=next]')), 1) eq_(len(content('#review-files-paginate a[rel=prev]')), 0) link = content('#review-files-paginate a[rel=next]')[0].attrib['href'] eq_(link, '%s?page=2#history' % self.url) # Look at page 2. with mock.patch('mkt.reviewers.views.paginate', lambda req, objs, limit: paginate(req, objs, 2)): content = pq(self.client.get(link).content) eq_(len(content('#review-files tr.listing-body')), 1) eq_(len(content('#review-files-paginate a[rel=next]')), 0) eq_(len(content('#review-files-paginate a[rel=prev]')), 1) eq_(content('#review-files-paginate a[rel=prev]')[0].attrib['href'], '%s?page=1#history' % self.url) class TestCannedResponses(AppReviewerTest): def setUp(self): super(TestCannedResponses, self).setUp() self.login_as_editor() self.app = app_factory(name='XXX', status=mkt.STATUS_PENDING) self.cr = CannedResponse.objects.create( name=u'app reason', response=u'app reason body', sort_group=u'public') self.url = reverse('reviewers.apps.review', args=[self.app.app_slug]) def test_ok(self): r = self.client.get(self.url) eq_(r.status_code, 200) form = r.context['form'] choices = form.fields['canned_response'].choices[1][1] # choices is grouped by the sort_group, where choices[0] is the # default "Choose a response..." option. # Within that, it's paired by [group, [[response, name],...]]. # So above, choices[1][1] gets the first real group's list of # responses. eq_(len(choices), 1) assert self.cr.response in choices[0] @mock.patch('mkt.reviewers.views.messages.success') @mock.patch('mkt.webapps.tasks.index_webapps') @mock.patch('mkt.webapps.tasks.update_cached_manifests') @mock.patch('mkt.webapps.models.Webapp.update_supported_locales') @mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest') class TestApproveHostedApp(AppReviewerTest, TestReviewMixin, AttachmentManagementMixin, TestedonManagementMixin): """ A separate test class for apps going to an approved state. All other state transitions are tested above. We're doing this to make the mocks easier to handle. """ fixtures = fixture('webapp_337141') def setUp(self): super(TestApproveHostedApp, self).setUp() self.mozilla_contact = '[email protected]' self.app = self.get_app() self.file = self.app.latest_version.files.all()[0] self.file.update(status=mkt.STATUS_PENDING) self.app.update(status=mkt.STATUS_PENDING, mozilla_contact=self.mozilla_contact, _current_version=None) self.url = reverse('reviewers.apps.review', args=[self.app.app_slug]) def get_app(self): return Webapp.objects.get(id=337141) def _check_message(self, msg): eq_(msg.call_args_list[0][0][1], '"Web App Review" successfully processed (+60 points, 60 total).') def test_pending_to_public(self, update_name, update_locales, update_cached_manifests, index_webapps, messages): index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_PUBLIC) eq_(self.file.reload().status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) self._check_message(messages) self._check_email_dev_and_contact('Approved') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_HOSTED) eq_(update_name.call_count, 0) # Not a packaged app. eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) # App is not packaged, no need to call update_cached_manifests. eq_(update_cached_manifests.delay.call_count, 0) def test_pending_to_hidden(self, update_name, update_locales, update_cached_manifests, index_webapps, messages): self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_UNLISTED) eq_(self.file.reload().status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) self._check_email_dev_and_contact('Approved') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_HOSTED) self._check_message(messages) eq_(update_name.call_count, 0) # Not a packaged app. eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) # App is not packaged, no need to call update_cached_manifests. eq_(update_cached_manifests.delay.call_count, 0) def test_pending_to_approved(self, update_name, update_locales, update_cached_manifests, index_webapps, messages): self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE) index_webapps.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(index_webapps.delay.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_APPROVED) # File status is PUBLIC since it is the only version. eq_(self.file.reload().status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE) self._check_message(messages) self._check_email_dev_and_contact('Approved but private') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_HOSTED) # The app is not private but can still be installed by team members, # so we should call those: eq_(update_name.call_count, 0) # Not a packaged app. eq_(update_locales.call_count, 1) # App is not packaged, no need to call update_cached_manifests. eq_(update_cached_manifests.delay.call_count, 0) # App is private so we don't send this yet. eq_(index_webapps.delay.call_count, 1) def test_pending_to_reject(self, update_name, update_locales, update_cached_manifests, index_webapps, messages): index_webapps.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(index_webapps.delay.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'reject', 'comments': 'suxor'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) eq_(index_webapps.delay.call_count, 1) app = self.get_app() eq_(app.status, mkt.STATUS_REJECTED) eq_(self.file.reload().status, mkt.STATUS_REJECTED) self._check_log(mkt.LOG.REJECT_VERSION) self._check_message(messages) self._check_email_dev_and_contact('Rejected') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_HOSTED) eq_(update_name.call_count, 0) # Not a packaged app. eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) eq_(index_webapps.delay.call_count, 1) @mock.patch('lib.crypto.packaged.sign') @mock.patch('mkt.reviewers.views.messages.success') @mock.patch('mkt.webapps.tasks.index_webapps') @mock.patch('mkt.webapps.tasks.update_cached_manifests') @mock.patch('mkt.webapps.models.Webapp.update_supported_locales') @mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest') class TestApprovePackagedApp(AppReviewerTest, TestReviewMixin, AttachmentManagementMixin, TestedonManagementMixin): """ A separate test class for packaged apps going to an approved state. We're doing this to make the mocks easier to handle. """ fixtures = fixture('webapp_337141') def setUp(self): super(TestApprovePackagedApp, self).setUp() self.mozilla_contact = '[email protected]' self.app = self.get_app() self.file = self.app.latest_version.files.all()[0] self.file.update(status=mkt.STATUS_PENDING) self.app.update(status=mkt.STATUS_PENDING, mozilla_contact=self.mozilla_contact, _current_version=None, is_packaged=True) self.url = reverse('reviewers.apps.review', args=[self.app.app_slug]) def get_app(self): return Webapp.objects.get(id=337141) def _check_message(self, msg): eq_(msg.call_args_list[0][0][1], '"Packaged App Review" successfully processed ' '(+60 points, 60 total).') def test_pending_to_public(self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_PUBLIC) eq_(self.file.reload().status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) self._check_email_dev_and_contact('Approved') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk) def test_pending_to_hidden(self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.get_app().update(publish_type=mkt.PUBLISH_HIDDEN) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_UNLISTED) eq_(self.file.reload().status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) self._check_email_dev_and_contact('Approved') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk) def test_pending_to_approved(self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.get_app().update(publish_type=mkt.PUBLISH_PRIVATE) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_APPROVED) eq_(self.file.reload().status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE) self._check_email_dev_and_contact('Approved but private') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], self.get_app().current_version.pk) def test_pending_to_rejected(self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'reject', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_REJECTED) eq_(self.file.reload().status, mkt.STATUS_REJECTED) self._check_email_dev_and_contact('Rejected') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED) self._check_message(messages) eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 0) eq_(sign_mock.call_count, 0) def test_pending_to_approved_app_private_prior_version_rejected( self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): """ Test that everything works out ok when v1.0 was rejected and developer submitted v1.1 that is then approved. This should still be considered a packaged review (not an update) and set the approved version to PUBLIC since the proir verison is DISABLED. See bug 1075042. """ self.app.update(status=mkt.STATUS_REJECTED, publish_type=mkt.PUBLISH_PRIVATE) self.file.update(status=mkt.STATUS_DISABLED) self.new_version = version_factory( addon=self.app, version='1.1', file_kw={'status': mkt.STATUS_PENDING}) index_webapps.delay.reset_mock() update_cached_manifests.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(self.app.current_version, None) eq_(self.app.latest_version, self.new_version) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_APPROVED) eq_(app.latest_version, self.new_version) eq_(app.current_version, self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE) self._check_email_dev_and_contact('Approved but private') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_PACKAGED) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], self.new_version.pk) @mock.patch('lib.crypto.packaged.sign') @mock.patch('mkt.reviewers.views.messages.success') @mock.patch('mkt.webapps.tasks.index_webapps') @mock.patch('mkt.webapps.tasks.update_cached_manifests') @mock.patch('mkt.webapps.models.Webapp.update_supported_locales') @mock.patch('mkt.webapps.models.Webapp.update_name_from_package_manifest') class TestApprovePackagedVersions(AppReviewerTest, TestReviewMixin, AttachmentManagementMixin, TestedonManagementMixin): """ A separate test class for packaged apps with a 2nd version going to an approved state. We're doing this to make the mocks easier to handle. """ fixtures = fixture('webapp_337141') def setUp(self): super(TestApprovePackagedVersions, self).setUp() self.mozilla_contact = '[email protected]' self.app = self.get_app() self.file = self.app.latest_version.files.all()[0] self.app.update(status=mkt.STATUS_PUBLIC, mozilla_contact=self.mozilla_contact, is_packaged=True) self.new_version = version_factory( addon=self.app, version='2.0', file_kw={'status': mkt.STATUS_PENDING}) self.url = reverse('reviewers.apps.review', args=[self.app.app_slug]) def get_app(self): return Webapp.objects.get(id=337141) def _check_message(self, msg): eq_(msg.call_args_list[0][0][1], '"Updated Packaged App Review" successfully processed ' '(+40 points, 40 total).') def test_version_pending_to_public(self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_PUBLIC) eq_(app.current_version, self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) self._check_email_dev_and_contact('Approved') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], app.current_version.pk) def test_version_pending_to_approved(self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.app.update(publish_type=mkt.PUBLISH_PRIVATE) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_PUBLIC) ok_(app.current_version != self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED) self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE) self._check_email_dev_and_contact('Approved but private') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], self.new_version.pk) def test_version_pending_to_public_app_unlisted( self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.app.update(status=mkt.STATUS_UNLISTED) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_UNLISTED) eq_(app.current_version, self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) self._check_email_dev_and_contact('Approved') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], app.current_version.pk) def test_version_pending_to_approved_app_unlisted( self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.app.update(status=mkt.STATUS_UNLISTED, publish_type=mkt.PUBLISH_PRIVATE) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_UNLISTED) ok_(app.current_version != self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED) self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE) self._check_email_dev_and_contact('Approved but private') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], self.new_version.pk) def test_version_pending_to_public_app_private( self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.app.update(status=mkt.STATUS_APPROVED) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_APPROVED) eq_(app.current_version, self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) self._check_log(mkt.LOG.APPROVE_VERSION) self._check_email_dev_and_contact('Approved') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], app.current_version.pk) def test_version_pending_to_approved_app_private( self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.app.update(status=mkt.STATUS_APPROVED, publish_type=mkt.PUBLISH_PRIVATE) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'public', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_APPROVED) ok_(app.current_version != self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) eq_(self.new_version.all_files[0].status, mkt.STATUS_APPROVED) self._check_log(mkt.LOG.APPROVE_VERSION_PRIVATE) self._check_email_dev_and_contact('Approved but private') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 1) eq_(update_locales.call_count, 1) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 1) eq_(sign_mock.call_args[0][0], self.new_version.pk) def test_version_pending_to_rejected_app_public( self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.app.update(status=mkt.STATUS_PUBLIC) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'reject', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_PUBLIC) ok_(app.current_version != self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED) self._check_log(mkt.LOG.REJECT_VERSION) self._check_email_dev_and_contact('Rejected') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 0) eq_(sign_mock.call_count, 0) def test_version_pending_to_rejected_app_unlisted( self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.app.update(status=mkt.STATUS_UNLISTED) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'reject', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_UNLISTED) ok_(app.current_version != self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED) self._check_log(mkt.LOG.REJECT_VERSION) self._check_email_dev_and_contact('Rejected') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 0) eq_(sign_mock.call_count, 0) def test_version_pending_to_rejected_app_private( self, update_name, update_locales, update_cached_manifests, index_webapps, messages, sign_mock): self.app.update(status=mkt.STATUS_APPROVED) index_webapps.delay.reset_mock() eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(update_cached_manifests.delay.call_count, 0) data = {'action': 'reject', 'comments': 'something'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self.post(data) app = self.get_app() eq_(app.status, mkt.STATUS_APPROVED) ok_(app.current_version != self.new_version) eq_(app.current_version.all_files[0].status, mkt.STATUS_PUBLIC) eq_(self.new_version.all_files[0].status, mkt.STATUS_REJECTED) self._check_log(mkt.LOG.REJECT_VERSION) self._check_email_dev_and_contact('Rejected') self._check_email_body() self._check_score(mkt.REVIEWED_WEBAPP_UPDATE) self._check_message(messages) eq_(update_name.call_count, 0) eq_(update_locales.call_count, 0) eq_(index_webapps.delay.call_count, 1) eq_(update_cached_manifests.delay.call_count, 0) eq_(sign_mock.call_count, 0) class TestReviewLog(AppReviewerTest, AccessMixin): def setUp(self): super(TestReviewLog, self).setUp() # Note: if `created` is not specified, `app_factory` uses a randomly # generated timestamp. self.apps = [app_factory(name='XXX', created=days_ago(3), status=mkt.STATUS_PENDING), app_factory(name='YYY', created=days_ago(2), status=mkt.STATUS_PENDING)] self.url = reverse('reviewers.apps.logs') patcher = mock.patch.object(settings, 'TASK_USER_ID', self.admin_user.id) patcher.start() self.addCleanup(patcher.stop) def get_user(self): return self.reviewer_user def make_approvals(self): d = 1 for app in self.apps: days_ago = self.days_ago(d) mkt.log(mkt.LOG.REJECT_VERSION, app, app.latest_version, user=self.get_user(), details={'comments': 'youwin'}, created=days_ago) # Throw in a few tasks logs that shouldn't get queried. mkt.log(mkt.LOG.REREVIEW_MANIFEST_CHANGE, app, app.latest_version, user=self.admin_user, details={'comments': 'foo'}, created=days_ago) d += 1 def make_an_approval(self, action, comment='youwin', user=None, app=None): if not user: user = self.get_user() if not app: app = self.apps[0] mkt.log(action, app, app.latest_version, user=user, details={'comments': comment}) def test_basic(self): self.make_approvals() r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) assert doc('#log-filter button'), 'No filters.' # Should have 2 showing. rows = doc('tbody tr') logs = rows.filter(':not(.hide)') eq_(logs.length, 2) # Ensure that the app links are valid. eq_(logs.find('.name .app-link').eq(0).attr('href'), self.apps[0].get_url_path()) eq_(logs.find('.name .app-link').eq(1).attr('href'), self.apps[1].get_url_path()) eq_(rows.filter('.hide').eq(0).text(), 'youwin') def test_search_app_soft_deleted(self): self.make_approvals() self.apps[0].update(status=mkt.STATUS_DELETED) res = self.client.get(self.url) eq_(res.status_code, 200) doc = pq(res.content) all_reviews = [d.attrib.get('data-addonid') for d in doc('#log-listing tbody tr')] assert str(self.apps[0].pk) in all_reviews, ( 'Soft deleted review did not show up in listing') def test_xss(self): a = self.apps[0] a.name = '<script>alert("xss")</script>' a.save() mkt.log(mkt.LOG.REJECT_VERSION, a, a.latest_version, user=self.get_user(), details={'comments': 'xss!'}) r = self.client.get(self.url) eq_(r.status_code, 200) inner_html = pq(r.content)('#log-listing tbody td').eq(1).html() assert '&lt;script&gt;' in inner_html assert '<script>' not in inner_html def test_end_filter(self): """ Let's use today as an end-day filter and make sure we see stuff if we filter. """ self.make_approvals() # Make sure we show the stuff we just made. date = time.strftime('%Y-%m-%d') r = self.client.get(self.url, dict(end=date)) eq_(r.status_code, 200) doc = pq(r.content)('#log-listing tbody') eq_(doc('tr:not(.hide)').length, 2) eq_(doc('tr.hide').eq(0).text(), 'youwin') def test_end_filter_wrong(self): """ Let's use today as an end-day filter and make sure we see stuff if we filter. """ self.make_approvals() r = self.client.get(self.url, dict(end='wrong!')) # If this is broken, we'll get a traceback. eq_(r.status_code, 200) eq_(pq(r.content)('#log-listing tr:not(.hide)').length, 3) def test_search_comment_exists(self): """Search by comment.""" self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello') r = self.client.get(self.url, dict(search='hello')) eq_(r.status_code, 200) eq_(pq(r.content)('#log-listing tbody tr.hide').eq(0).text(), 'hello') def test_search_comment_doesnt_exist(self): """Search by comment, with no results.""" self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, comment='hello') r = self.client.get(self.url, dict(search='bye')) eq_(r.status_code, 200) eq_(pq(r.content)('.no-results').length, 1) def test_search_author_exists(self): """Search by author.""" self.make_approvals() user = UserProfile.objects.get(email='[email protected]') self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user, comment='hi') r = self.client.get(self.url, dict(search='regular')) eq_(r.status_code, 200) rows = pq(r.content)('#log-listing tbody tr') eq_(rows.filter(':not(.hide)').length, 1) eq_(rows.filter('.hide').eq(0).text(), 'hi') def test_search_author_doesnt_exist(self): """Search by author, with no results.""" self.make_approvals() user = UserProfile.objects.get(email='[email protected]') self.make_an_approval(mkt.LOG.ESCALATE_MANUAL, user=user) r = self.client.get(self.url, dict(search='wrong')) eq_(r.status_code, 200) eq_(pq(r.content)('.no-results').length, 1) def test_search_addon_exists(self): """Search by add-on name.""" self.make_approvals() app = self.apps[0] r = self.client.get(self.url, dict(search=app.name)) eq_(r.status_code, 200) tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id) eq_(tr.length, 1) eq_(tr.siblings('.comments').text(), 'youwin') def test_search_addon_by_slug_exists(self): """Search by app slug.""" app = self.apps[0] app.app_slug = 'a-fox-was-sly' app.save() self.make_approvals() r = self.client.get(self.url, dict(search='fox')) eq_(r.status_code, 200) tr = pq(r.content)('#log-listing tr[data-addonid="%s"]' % app.id) eq_(tr.length, 1) eq_(tr.siblings('.comments').text(), 'youwin') def test_search_addon_doesnt_exist(self): """Search by add-on name, with no results.""" self.make_approvals() r = self.client.get(self.url, dict(search='zzz')) eq_(r.status_code, 200) eq_(pq(r.content)('.no-results').length, 1) @mock.patch('mkt.developers.models.ActivityLog.arguments', new=mock.Mock) def test_addon_missing(self): self.make_approvals() r = self.client.get(self.url) eq_(pq(r.content)('#log-listing tr td').eq(1).text(), 'App has been deleted.') def test_request_info_logs(self): self.make_an_approval(mkt.LOG.REQUEST_INFORMATION) r = self.client.get(self.url) eq_(pq(r.content)('#log-listing tr td a').eq(1).text(), 'More information requested') def test_escalate_logs(self): self.make_an_approval(mkt.LOG.ESCALATE_MANUAL) r = self.client.get(self.url) eq_(pq(r.content)('#log-listing tr td a').eq(1).text(), 'Reviewer escalation') def test_no_double_encode(self): version = self.apps[0].latest_version version.update(version='<foo>') self.make_an_approval(mkt.LOG.ESCALATE_MANUAL) r = self.client.get(self.url) assert '<foo>' in pq(r.content)('#log-listing tr td').eq(1).text(), ( 'Double-encoded string was found in reviewer log.') class TestMotd(AppReviewerTest, AccessMixin): def setUp(self): super(TestMotd, self).setUp() self.url = reverse('reviewers.apps.motd') self.key = u'mkt_reviewers_motd' set_config(self.key, u'original value') def test_perms_not_editor(self): self.client.logout() req = self.client.get(self.url, follow=True) self.assert3xx(req, '%s?to=%s' % (reverse('users.login'), self.url)) self.client.login('[email protected]') eq_(self.client.get(self.url).status_code, 403) def test_perms_not_motd(self): # Any type of reviewer can see the MOTD. self.login_as_editor() req = self.client.get(self.url) eq_(req.status_code, 200) eq_(req.context['form'], None) # No redirect means it didn't save. eq_(self.client.post(self.url, dict(motd='motd')).status_code, 200) eq_(get_config(self.key), u'original value') def test_motd_change(self): # Only users in the MOTD group can POST. user = self.reviewer_user self.grant_permission(user, 'AppReviewerMOTD:Edit') self.login_as_editor() # Get is a 200 with a form. req = self.client.get(self.url) eq_(req.status_code, 200) eq_(req.context['form'].initial['motd'], u'original value') # Empty post throws an error. req = self.client.post(self.url, dict(motd='')) eq_(req.status_code, 200) # Didn't redirect after save. eq_(pq(req.content)('#editor-motd .errorlist').text(), 'This field is required.') # A real post now. req = self.client.post(self.url, dict(motd='new motd')) self.assert3xx(req, self.url) eq_(get_config(self.key), u'new motd') class TestReviewAppComm(AppReviewerTest, AttachmentManagementMixin, TestReviewMixin, TestedonManagementMixin): """ Integration test that notes are created and that emails are sent to the right groups of people. """ def setUp(self): super(TestReviewAppComm, self).setUp() self.app = app_factory(rated=True, status=mkt.STATUS_PENDING, mozilla_contact='[email protected]') self.app.addonuser_set.create(user=user_factory(email='steamcube')) self.url = reverse('reviewers.apps.review', args=[self.app.app_slug]) self.mozilla_contact = '[email protected]' def _post(self, data, queue='pending'): res = self.client.post(self.url, data) self.assert3xx(res, reverse('reviewers.apps.queue_%s' % queue)) def _get_note(self): eq_(self.app.threads.count(), 1) thread = self.app.threads.all()[0] eq_(thread.notes.count(), 1) return thread.notes.all()[0] def test_email_cc(self): """ Emailed cc'ed people (those who have posted on the thread). """ poster = user_factory() thread, note = create_comm_note( self.app, self.app.latest_version, poster, 'lgtm') data = {'action': 'public', 'comments': 'gud jerb'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self._post(data) # Test emails. self._check_email_dev_and_contact(None, outbox_len=5) # Some person who joined the thread. self._check_email( self._get_mail(poster.email), 'Approved', to=[poster.email]) def test_approve(self): """ On approval, send an email to [developer, mozilla contact]. """ data = {'action': 'public', 'comments': 'gud jerb'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self._post(data) # Test notes. note = self._get_note() eq_(note.note_type, comm.APPROVAL) eq_(note.body, 'gud jerb') # Test emails. self._check_email_dev_and_contact(None) def test_reject(self): """ On rejection, send an email to [developer, mozilla contact]. """ data = {'action': 'reject', 'comments': 'rubesh'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self._post(data) # Test notes. note = self._get_note() eq_(note.note_type, comm.REJECTION) eq_(note.body, 'rubesh') # Test emails. self._check_email_dev_and_contact(None) def test_info(self): """ On info request, send an email to [developer, mozilla contact]. """ data = {'action': 'info', 'comments': 'huh'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self._post(data) # Test notes. note = self._get_note() eq_(note.note_type, comm.MORE_INFO_REQUIRED) eq_(note.body, 'huh') # Test emails. self._check_email_dev_and_contact(None) def test_escalate(self): """ On escalation, send an email to senior reviewers and developer. """ data = {'action': 'escalate', 'comments': 'soup her man'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self._post(data) # Test notes. note = self._get_note() eq_(note.note_type, comm.ESCALATION) eq_(note.body, 'soup her man') # Test emails. eq_(len(mail.outbox), 2) self._check_email( # Senior reviewer. self._get_mail(self.snr_reviewer_user.email), 'Escalated', to=[self.snr_reviewer_user.email]) self._check_email(self._get_mail('steamcube'), 'Escalated') def test_comment(self): """ On reviewer comment, send an email to those but developers. """ data = {'action': 'comment', 'comments': 'huh'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self._post(data) # Test notes. note = self._get_note() eq_(note.note_type, comm.REVIEWER_COMMENT) eq_(note.body, 'huh') # Test emails. eq_(len(mail.outbox), 1) self._check_email(mail.outbox[0], 'Private reviewer comment', to=[self.mozilla_contact]) def test_disable(self): """ On banning, send an email to [developer, mozilla contact]. """ self.login_as_admin() data = {'action': 'disable', 'comments': 'u dun it'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form()) self._post(data) # Test notes. note = self._get_note() eq_(note.note_type, comm.DISABLED) eq_(note.body, 'u dun it') # Test emails. self._check_email_dev_and_contact(None) def test_attachments(self): data = {'action': 'comment', 'comments': 'huh'} data.update(self._attachment_management_form(num=2)) data.update(self._attachments(num=2)) data.update(self._testedon_management_form()) self._post(data) # Test attachments. note = self._get_note() eq_(note.attachments.count(), 2) def test_tested_on_one(self): """Tested 'Tested on' message appended to note body.""" data = {'action': 'reject', 'comments': 'rubesh'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form(num=1)) data.update(self._platforms(1)) self._post(data) # Test notes. note = self._get_note() eq_(note.note_type, comm.REJECTION) eq_(note.body, u'rubesh\n\n' u'Tested on \xd0esktop platform on PC with version 34') def test_tested_on_two(self): """Tested two 'Tested on' messages appended to note body.""" data = {'action': 'reject', 'comments': 'rubesh'} data.update(self._attachment_management_form(num=0)) data.update(self._testedon_management_form(num=2)) data.update(self._platforms(2)) self._post(data) # Test notes. note = self._get_note() eq_(note.note_type, comm.REJECTION) eq_(note.body, u'rubesh\n\n' u'Tested on \xd0esktop platform on PC with version 34; ' u'FirefoxOS platform on ZT\xc8 Open with version 1.3<') class TestModeratedQueue(mkt.site.tests.TestCase, AccessMixin): def setUp(self): super(TestModeratedQueue, self).setUp() self.app = app_factory() self.moderator_user = user_factory(email='moderator') self.grant_permission(self.moderator_user, 'Apps:ModerateReview') user_factory(email='regular') user1 = user_factory() user2 = user_factory() self.url = reverse('reviewers.apps.queue_moderated') self.review1 = Review.objects.create(addon=self.app, body='body', user=user1, rating=3, editorreview=True) ReviewFlag.objects.create(review=self.review1, flag=ReviewFlag.SPAM, user=user1) self.review2 = Review.objects.create(addon=self.app, body='body', user=user2, rating=4, editorreview=True) ReviewFlag.objects.create(review=self.review2, flag=ReviewFlag.SUPPORT, user=user2) self.login(self.moderator_user) def _post(self, action): ctx = self.client.get(self.url).context data_formset = formset(initial(ctx['reviews_formset'].forms[0])) data_formset['form-0-action'] = action res = self.client.post(self.url, data_formset) self.assert3xx(res, self.url) def _get_logs(self, action): return ActivityLog.objects.filter(action=action.id) def test_anonymous_flagger(self): ReviewFlag.objects.all()[0].update(user=None) ReviewFlag.objects.all()[1].delete() res = self.client.get(self.url) txt = pq(res.content)('.reviews-flagged-reasons li div span').text() teststring = u'Flagged by an anonymous user on' ok_(txt.startswith(teststring), '"%s" doesn\'t start with "%s"' % (txt, teststring)) def test_setup(self): eq_(Review.objects.filter(editorreview=True).count(), 2) eq_(ReviewFlag.objects.filter(flag=ReviewFlag.SPAM).count(), 1) res = self.client.get(self.url) doc = pq(res.content)('#reviews-flagged') # Test the default action is "skip". eq_(doc('.reviewers-desktop #id_form-0-action_1:checked').length, 1) def test_skip(self): # Skip the first review, which still leaves two. self._post(mkt.ratings.REVIEW_MODERATE_SKIP) res = self.client.get(self.url) eq_(len(res.context['page'].object_list), 2) def test_delete(self): # Delete the first review, which leaves one. self._post(mkt.ratings.REVIEW_MODERATE_DELETE) res = self.client.get(self.url) eq_(len(res.context['page'].object_list), 1) eq_(self._get_logs(mkt.LOG.DELETE_REVIEW).count(), 1) def test_keep(self): # Keep the first review, which leaves one. self._post(mkt.ratings.REVIEW_MODERATE_KEEP) res = self.client.get(self.url) eq_(len(res.context['page'].object_list), 1) eq_(self._get_logs(mkt.LOG.APPROVE_REVIEW).count(), 1) def test_no_reviews(self): Review.objects.all().delete() res = self.client.get(self.url) eq_(res.status_code, 200) eq_(pq(res.content)('#reviews-flagged .no-results').length, 1) def test_queue_count(self): r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (2)') def test_queue_count_reviewer_and_moderator(self): self.grant_permission(self.moderator_user, 'Apps:Review') r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (0)') eq_(links[2].text, u'Updates (0)') eq_(links[3].text, u'Reviewing (0)') eq_(links[4].text, u'Homescreens (0)') eq_(links[5].text, u'Moderated Reviews (2)') def test_deleted_app(self): "Test that a deleted app doesn't break the queue." self.app.delete() r = self.client.get(self.url) eq_(r.status_code, 200) def test_queue_count_deleted_app(self): self.app.delete() r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) eq_(doc('.tabnav li a')[0].text, u'Moderated Reviews (0)') class AbuseQueueMixin(object): def _setUp(self): self.abuseviewer_user = user_factory(email='abuser') self.grant_permission(self.abuseviewer_user, self.perm) self.login(self.abuseviewer_user) user_factory(email='regular') self.url = reverse(self.view_name) def _post(self, action, form_index=0): ctx = self.client.get(self.url).context data_formset = formset(initial(ctx['abuse_formset'].forms[0])) data_formset['form-%s-action' % (form_index)] = action res = self.client.post(self.url, data_formset) self.assert3xx(res, self.url) def _get_logs(self, action): return ActivityLog.objects.filter(action=action.id) def test_anonymous_flagger(self): AbuseReport.objects.all()[0].update(reporter=None) res = self.client.get(self.url) txt = pq(res.content)('.abuse-reports-reports li div span').text() teststring = u'Submitted by an anonymous user on' ok_(txt.startswith(teststring), '"%s" doesn\'t start with "%s"' % (txt, teststring)) def test_no_reviews(self): AbuseReport.objects.all().delete() res = self.client.get(self.url) eq_(res.status_code, 200) eq_(pq(res.content)('#abuse-reports .no-results').length, 1) def test_queue_count(self): r = self.client.get(self.url) eq_(r.status_code, 200) txt = pq(r.content)('.tabnav li a')[0].text teststring = u'Abuse Reports (2)' ok_(txt.endswith(teststring), '"%s" doesn\'t start with "%s"' % (txt, teststring)) def test_skip(self): # Skip the first xxx's reports, which still leaves 2 apps/sites. self._post(
est_first_read(self): # Mark read the first xxx's reports, which leaves one. self._post(mkt.abuse.forms.ABUSE_REPORT_READ) res = self.client.get(self.url) eq_(len(res.context['page'].object_list), 1) # There are two abuse reports for app1/website1, so two log entries. eq_(self._get_logs(self.log_const).count(), 2) # Check the remaining abuse report remains unread. eq_(AbuseReport.objects.filter(read=False).count(), 1) def test_first_flag(self): # Flag the first xxx's reports. self._post(mkt.abuse.forms.ABUSE_REPORT_FLAG) res = self.client.get(self.url) # Check one is left. eq_(len(res.context['page'].object_list), 1) # Check the object is flagged. eq_(RereviewQueue.objects.count(), 1) # As flagging marks read too, there should be 2 log entries. eq_(self._get_logs(self.log_const).count(), 2) # Check the remaining abuse report remains unread. eq_(AbuseReport.objects.filter(read=False).count(), 1) def test_xss(self): xss = '<script>alert("xss")</script>' AbuseReport.objects.all()[0].update(message=xss) res = self.client.get(self.url) eq_(res.status_code, 200) tbody = pq(res.content)( '#abuse-reports .abuse-reports-reports').html() assert '&lt;script&gt;' in tbody assert '<script>' not in tbody def test_deleted_website(self): "Test that a deleted app/website doesn't break the queue." AbuseReport.objects.all()[0].object.delete() r = self.client.get(self.url) eq_(r.status_code, 200) txt = pq(r.content)('.tabnav li a')[0].text teststring = u'Abuse Reports (1)' ok_(txt.endswith(teststring), '"%s" doesn\'t start with "%s"' % (txt, teststring)) class TestAppAbuseQueue(mkt.site.tests.TestCase, AccessMixin, AbuseQueueMixin): perm = 'Apps:ReadAbuse' view_name = 'reviewers.apps.queue_abuse' log_const = mkt.LOG.APP_ABUSE_MARKREAD def setUp(self): super(TestAppAbuseQueue, self).setUp() self._setUp() @classmethod def setUpTestData(cls): app1 = app_factory() app2 = app_factory() # Add some extra apps, which shouldn't show up. app_factory() app_factory() user1 = user_factory() user2 = user_factory() AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89', addon=app1, message='bad') AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89', addon=app1, message='terrible') AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89', addon=app2, message='the worst') def test_setup(self): eq_(AbuseReport.objects.filter(read=False).count(), 3) eq_(AbuseReport.objects.filter(addon=Webapp.objects.all()[0]).count(), 2) res = self.client.get(self.url) # Check there are 2 apps listed. eq_(len(res.context['page'].object_list), 2) def test_queue_count_reviewer_and_moderator(self): self.grant_permission(self.abuseviewer_user, 'Apps:Review') r = self.client.get(self.url) eq_(r.status_code, 200) doc = pq(r.content) links = doc('.tabnav li a') eq_(links[0].text, u'Apps (0)') eq_(links[1].text, u'Re-reviews (0)') eq_(links[2].text, u'Updates (0)') eq_(links[3].text, u'Reviewing (0)') eq_(links[4].text, u'Homescreens (0)') eq_(links[5].text, u'Abuse Reports (2)') class TestWebsiteAbuseQueue(mkt.site.tests.TestCase, AccessMixin, AbuseQueueMixin): perm = 'Websites:ReadAbuse' view_name = 'reviewers.websites.queue_abuse' log_const = mkt.LOG.WEBSITE_ABUSE_MARKREAD def setUp(self): super(TestWebsiteAbuseQueue, self).setUp() self._setUp() @classmethod def setUpTestData(cls): website1 = website_factory() website2 = website_factory() # Add some extra sites, which shouldn't show up. website_factory() website_factory() user1 = user_factory() user2 = user_factory() AbuseReport.objects.create(reporter=user1, ip_address='123.45.67.89', website=website1, message='bad') AbuseReport.objects.create(reporter=user2, ip_address='123.01.67.89', website=website1, message='terrible') AbuseReport.objects.create(reporter=user1, ip_address='123.01.02.89', website=website2, message='the worst') cls.website1 = website1 def test_setup(self): eq_(AbuseReport.objects.filter(read=False).count(), 3) eq_(AbuseReport.objects.filter(website=self.website1).count(), 2) res = self.client.get(self.url) # Check there are 2 websites listed. eq_(len(res.context['page'].object_list), 2) def test_first_flag(self): # No re-review flagging for Websites yet - no re-review queue! raise SkipTest() class TestGetSigned(BasePackagedAppTest, mkt.site.tests.TestCase): def setUp(self): super(TestGetSigned, self).setUp() self.url = reverse('reviewers.signed', args=[self.app.app_slug, self.version.pk]) self.grant_permission(user_factory(email='editor'), 'Apps:Review') self.login('[email protected]') def test_not_logged_in(self): self.client.logout() self.assertLoginRequired(self.client.get(self.url)) def test_not_reviewer(self): self.client.logout() self.login(user_factory()) eq_(self.client.get(self.url).status_code, 403) @override_settings( DEFAULT_FILE_STORAGE='mkt.site.storage_utils.LocalFileStorage') @mock.patch('lib.crypto.packaged.sign') def test_reviewer_sign_arguments_local(self, sign_mock): sign_mock.side_effect = mock_sign self.setup_files() res = self.client.get(self.url) sign_mock.assert_called_with(self.version.pk, reviewer=True) eq_(res.status_code, 200) @override_settings( DEFAULT_FILE_STORAGE='mkt.site.storage_utils.S3BotoPrivateStorage') @mock.patch('lib.crypto.packaged.sign') def test_reviewer_sign_arguments_storage(self, sign_mock): sign_mock.side_effect = mock_sign self.setup_files() res = self.client.get(self.url) sign_mock.assert_called_with(self.version.pk, reviewer=True) self.assert3xx(res, private_storage.url( self.file.signed_reviewer_file_path)) @mock.patch.object(packaged, 'sign', mock_sign) def test_reviewer(self): if not settings.XSENDFILE: raise SkipTest() self.setup_files() res = self.client.get(self.url) eq_(res.status_code, 200) file_ = self.app.current_version.all_files[0] eq_(res['x-sendfile'], file_.signed_reviewer_file_path) eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1]) def test_not_packaged(self): self.app.update(is_packaged=False) res = self.client.get(self.url) eq_(res.status_code, 404) def test_wrong_version(self): self.url = reverse('reviewers.signed', args=[self.app.app_slug, 0]) res = self.client.get(self.url) eq_(res.status_code, 404) def test_token_good(self): if not settings.XSENDFILE: raise SkipTest() token = Token(data={'app_id': self.app.id}) token.save() self.setup_files() self.client.logout() res = self.client.get(urlparams(self.url, token=token.token)) eq_(res.status_code, 200) file_ = self.app.current_version.all_files[0] eq_(res['x-sendfile'], file_.signed_reviewer_file_path) eq_(res['etag'], '"%s"' % file_.hash.split(':')[-1]) # Test token doesn't work the 2nd time. res = self.client.get(urlparams(self.url, token=token.token)) eq_(res.status_code, 403) def test_token_bad(self): token = Token(data={'app_id': 'abcdef'}) token.save() self.setup_files() self.client.logout() res = self.client.get(urlparams(self.url, token=token.token)) eq_(res.status_code, 403) class TestMiniManifestView(BasePackagedAppTest): def setUp(self): super(TestMiniManifestView, self).setUp() self.app = Webapp.objects.get(pk=337141) self.app.update(is_packaged=True) self.version = self.app.versions.latest() self.file = self.version.all_files[0] self.file.update(filename='mozball.zip') self.url = reverse('reviewers.mini_manifest', args=[self.app.app_slug, self.version.pk]) self.grant_permission(user_factory(email='editor'), 'Apps:Review') self.login('[email protected]') def test_not_logged_in(self): self.client.logout() self.assertLoginRequired(self.client.get(self.url)) def test_not_reviewer(self): self.client.logout() self.login(user_factory()) eq_(self.client.get(self.url).status_code, 403) def test_not_packaged(self): self.app.update(is_packaged=False) res = self.client.get(self.url) eq_(res.status_code, 404) def test_wrong_version(self): url = reverse('reviewers.mini_manifest', args=[self.app.app_slug, 0]) res = self.client.get(url) eq_(res.status_code, 404) def test_reviewer(self): self.setup_files() manifest = self.app.get_manifest_json(self.file) res = self.client.get(self.url) eq_(res['Content-type'], MANIFEST_CONTENT_TYPE) data = json.loads(res.content) eq_(data['name'], manifest['name']) eq_(data['developer']['name'], 'Mozilla Marketplace') eq_(data['package_path'], absolutify(reverse('reviewers.signed', args=[self.app.app_slug, self.version.id]))) def test_rejected(self): # Rejected sets file.status to DISABLED and moves to a guarded path. self.setup_files() self.app.update(status=mkt.STATUS_REJECTED) self.file.update(status=mkt.STATUS_DISABLED) manifest = self.app.get_manifest_json(self.file) res = self.client.get(self.url) eq_(res['Content-type'], MANIFEST_CONTENT_TYPE) data = json.loads(res.content) eq_(data['name'], manifest['name']) eq_(data['developer']['name'], 'Mozilla Marketplace') eq_(data['package_path'], absolutify(reverse('reviewers.signed', args=[self.app.app_slug, self.version.id]))) def test_minifest_name_matches_manifest_name(self): self.setup_files() self.app.name = 'XXX' self.app.save() manifest = self.app.get_manifest_json(self.file) res = self.client.get(self.url) data = json.loads(res.content) eq_(data['name'], manifest['name']) def test_token_good(self): token = Token(data={'app_id': self.app.id}) token.save() self.setup_files() self.client.logout() res = self.client.get(urlparams(self.url, token=token.token)) eq_(res.status_code, 200) eq_(res['Content-type'], MANIFEST_CONTENT_TYPE) data = json.loads(res.content) ok_('token=' in data['package_path']) # Test token doesn't work the 2nd time. res = self.client.get(urlparams(self.url, token=token.token)) eq_(res.status_code, 403) def test_token_bad(self): token = Token(data={'app_id': 'abcdef'}) token.save() self.setup_files() self.client.logout() res = self.client.get(urlparams(self.url, token=token.token)) eq_(res.status_code, 403) class TestReviewersScores(AppReviewerTest, AccessMixin): def setUp(self): super(TestReviewersScores, self).setUp() self.user = self.reviewer_user self.url = reverse('reviewers.performance', args=[self.user.email]) def test_404(self): res = self.client.get(reverse('reviewers.performance', args=['poop'])) eq_(res.status_code, 404) def test_with_email(self): res = self.client.get(self.url) eq_(res.status_code, 200) eq_(res.context['profile'].id, self.user.id) def test_without_email(self): res = self.client.get(reverse('reviewers.performance')) eq_(res.status_code, 200) eq_(res.context['profile'].id, self.user.id) def test_no_reviews(self): res = self.client.get(self.url) eq_(res.status_code, 200) assert u'No review points awarded yet' in res.content class TestQueueSort(AppReviewerTest): def setUp(self): super(TestQueueSort, self).setUp() """Create and set up apps for some filtering fun.""" self.apps = [app_factory(name='Lillard', status=mkt.STATUS_PENDING, is_packaged=False, version_kw={'version': '1.0'}, file_kw={'status': mkt.STATUS_PENDING}, premium_type=mkt.ADDON_FREE), app_factory(name='Batum', status=mkt.STATUS_PENDING, is_packaged=True, version_kw={'version': '1.0', 'has_editor_comment': True, 'has_info_request': True}, file_kw={'status': mkt.STATUS_PENDING}, premium_type=mkt.ADDON_PREMIUM)] # Set up app attributes. self.apps[0].update(created=self.days_ago(2)) self.apps[1].update(created=self.days_ago(5)) self.apps[0].addonuser_set.create(user=user_factory(email='XXX')) self.apps[1].addonuser_set.create(user=user_factory(email='illmatic')) self.apps[0].addondevicetype_set.create( device_type=mkt.DEVICE_DESKTOP.id) self.apps[1].addondevicetype_set.create( device_type=mkt.DEVICE_MOBILE.id) self.url = reverse('reviewers.apps.queue_pending') def test_do_sort_webapp(self): """ Test that apps are sorted in order specified in GET params. """ rf = RequestFactory() qs = Webapp.objects.all() # Test apps are sorted by created/asc by default. req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'}) sorted_qs = ReviewersQueuesHelper(req).sort(qs) eq_(list(sorted_qs), [self.apps[1], self.apps[0]]) # Test sorting by created, descending. req = rf.get(self.url, {'sort': 'created', 'order': 'desc'}) sorted_qs = ReviewersQueuesHelper(req).sort(qs) eq_(list(sorted_qs), [self.apps[0], self.apps[1]]) # Test sorting by app name. req = rf.get(self.url, {'sort': 'name', 'order': 'asc'}) sorted_qs = ReviewersQueuesHelper(req).sort(qs) eq_(list(sorted_qs), [self.apps[1], self.apps[0]]) req = rf.get(self.url, {'sort': 'name', 'order': 'desc'}) sorted_qs = ReviewersQueuesHelper(req).sort(qs) eq_(list(sorted_qs), [self.apps[0], self.apps[1]]) def test_do_sort_version_nom(self): """Tests version nomination sort order.""" url = reverse('reviewers.apps.queue_pending') user = UserProfile.objects.get(email='[email protected]') version_0 = self.apps[0].versions.get() version_0.update(nomination=days_ago(1)) version_1 = self.apps[1].versions.get() version_1.update(nomination=days_ago(2)) # Throw in some disabled versions, they shouldn't affect order. version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[0], nomination=days_ago(10)) version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1], nomination=days_ago(1)) version_factory({'status': mkt.STATUS_DISABLED}, addon=self.apps[1], nomination=days_ago(20)) req = mkt.site.tests.req_factory_factory( url, user=user, data={'sort': 'nomination'}) res = queue_apps(req) doc = pq(res.content) # Desktop and mobile (hidden on desktop) alternate, so we jump by 2. eq_(doc('tbody tr')[0].get('data-addon'), str(version_1.addon.id)) eq_(doc('tbody tr')[2].get('data-addon'), str(version_0.addon.id)) req = mkt.site.tests.req_factory_factory( url, user=user, data={'sort': 'nomination', 'order': 'desc'}) res = queue_apps(req) doc = pq(res.content) # Desktop and mobile (hidden on desktop) alternate, so we jump by 2. eq_(doc('tbody tr')[0].get('data-addon'), str(version_0.addon.id)) eq_(doc('tbody tr')[2].get('data-addon'), str(version_1.addon.id)) def test_do_sort_queue_object(self): """Tests sorting queue object.""" rf = RequestFactory() url = reverse('reviewers.apps.queue_rereview') earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0]) later_rrq = RereviewQueue.objects.create(addon=self.apps[1]) later_rrq.created += timedelta(days=1) later_rrq.save() request = rf.get(url, {'sort': 'created'}) apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all()) # Assert the order that RereviewQueue objects were created is # maintained. eq_([earlier_rrq.addon, later_rrq.addon], list(apps)) request = rf.get(url, {'sort': 'created', 'order': 'desc'}) apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all()) eq_([later_rrq.addon, earlier_rrq.addon], list(apps)) request = rf.get(url, {'sort': 'name', 'order': 'asc'}) apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all()) eq_([later_rrq.addon, earlier_rrq.addon], list(apps)) request = rf.get(url, {'sort': 'name', 'order': 'desc'}) apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all()) eq_([earlier_rrq.addon, later_rrq.addon], list(apps)) def test_sort_with_priority_review(self): """Tests the sorts are correct with a priority review flagged app.""" # Set up the priority review flagged app. self.apps.append(app_factory(name='Foxkeh', status=mkt.STATUS_PENDING, is_packaged=False, version_kw={'version': '1.0'}, file_kw={'status': mkt.STATUS_PENDING}, premium_type=mkt.ADDON_FREE, priority_review=True)) # Set up app attributes. self.apps[2].update(created=self.days_ago(1)) self.apps[2].addonuser_set.create( user=user_factory(email='[email protected]')) self.apps[2].addondevicetype_set.create( device_type=mkt.DEVICE_DESKTOP.id) # And check it also comes out top of waiting time with Webapp model. rf = RequestFactory() qs = Webapp.objects.all() # Test apps are sorted by created/asc by default. req = rf.get(self.url, {'sort': 'invalidsort', 'order': 'dontcare'}) sorted_qs = ReviewersQueuesHelper(req).sort(qs) eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]]) # Test sorting by created, descending. req = rf.get(self.url, {'sort': 'created', 'order': 'desc'}) sorted_qs = ReviewersQueuesHelper(req).sort(qs) eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]]) # And with Version model. version_0 = self.apps[0].versions.get() version_0.update(nomination=days_ago(1)) version_1 = self.apps[1].versions.get() version_1.update(nomination=days_ago(2)) qs = (Version.objects.filter( files__status=mkt.STATUS_PENDING, addon__disabled_by_user=False, addon__status=mkt.STATUS_PENDING) .order_by('nomination', 'created') .select_related('addon', 'files').no_transforms()) req = rf.get(self.url, {'sort': 'nomination'}) sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination') eq_(list(sorted_qs), [self.apps[2], self.apps[1], self.apps[0]]) req = rf.get(self.url, {'sort': 'nomination', 'order': 'desc'}) sorted_qs = ReviewersQueuesHelper(req).sort(qs, date_sort='nomination') eq_(list(sorted_qs), [self.apps[2], self.apps[0], self.apps[1]]) # And with Rereview model. url = reverse('reviewers.apps.queue_rereview') earlier_rrq = RereviewQueue.objects.create(addon=self.apps[0]) earlier_rrq.created += timedelta(days=1) earlier_rrq.save() later_rrq = RereviewQueue.objects.create(addon=self.apps[1]) later_rrq.created += timedelta(days=2) later_rrq.save() pri_rrq = RereviewQueue.objects.create(addon=self.apps[2]) pri_rrq.save() request = rf.get(url, {'sort': 'created'}) apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all()) eq_([pri_rrq.addon, earlier_rrq.addon, later_rrq.addon], list(apps)) request = rf.get(url, {'sort': 'created', 'order': 'desc'}) apps = ReviewersQueuesHelper(request).sort(RereviewQueue.objects.all()) eq_([pri_rrq.addon, later_rrq.addon, earlier_rrq.addon], list(apps)) class TestAppsReviewing(AppReviewerTest, AccessMixin): def setUp(self): super(TestAppsReviewing, self).setUp() self.url = reverse('reviewers.apps.apps_reviewing') self.apps = [app_factory(name='Antelope', status=mkt.STATUS_PENDING), app_factory(name='Bear', status=mkt.STATUS_PENDING), app_factory(name='Cougar', status=mkt.STATUS_PENDING)] def _view_app(self, app_id): self.client.post(reverse('reviewers.review_viewing'), { 'addon_id': app_id}) def test_no_apps_reviewing(self): res = self.client.get(self.url) eq_(len(res.context['apps']), 0) def test_apps_reviewing(self): self._view_app(self.apps[0].id) res = self.client.get(self.url) eq_(len(res.context['apps']), 1) def test_multiple_reviewers_no_cross_streams(self): self._view_app(self.apps[0].id) self._view_app(self.apps[1].id) res = self.client.get(self.url) eq_(len(res.context['apps']), 2) # Now view an app as another user and verify app. self.login('[email protected]') self._view_app(self.apps[2].id) res = self.client.get(self.url) eq_(len(res.context['apps']), 1) # Check original user again to make sure app list didn't increment. self.login_as_editor() res = self.client.get(self.url) eq_(len(res.context['apps']), 2) class TestLeaderboard(AppReviewerTest): def setUp(self): super(TestLeaderboard, self).setUp() self.url = reverse('reviewers.leaderboard') mkt.set_user(self.reviewer_user) def _award_points(self, user, score): ReviewerScore.objects.create(user=user, note_key=mkt.REVIEWED_MANUAL, score=score, note='Thing.') def test_leaderboard_ranks(self): users = (self.reviewer_user, self.regular_user, user_factory(email='clouserw')) self._award_points(users[0], mkt.REVIEWED_LEVELS[0]['points'] - 1) self._award_points(users[1], mkt.REVIEWED_LEVELS[0]['points'] + 1) self._award_points(users[2], mkt.REVIEWED_LEVELS[0]['points'] + 2) def get_cells(): doc = pq(self.client.get(self.url).content.decode('utf-8')) cells = doc('#leaderboard > tbody > tr > .name, ' '#leaderboard > tbody > tr > .level') return [cells.eq(i).text() for i in range(0, cells.length)] eq_(get_cells(), [users[2].display_name, users[1].display_name, mkt.REVIEWED_LEVELS[0]['name'], users[0].display_name]) self._award_points(users[0], 1) eq_(get_cells(), [users[2].display_name, users[1].display_name, users[0].display_name, mkt.REVIEWED_LEVELS[0]['name']]) self._award_points(users[0], -1) self._award_points(users[2], (mkt.REVIEWED_LEVELS[1]['points'] - mkt.REVIEWED_LEVELS[0]['points'])) eq_(get_cells(), [users[2].display_name, mkt.REVIEWED_LEVELS[1]['name'], users[1].display_name, mkt.REVIEWED_LEVELS[0]['name'], users[0].display_name]) class TestReviewPage(mkt.site.tests.TestCase): def setUp(self): super(TestReviewPage, self).setUp() self.app = app_factory(status=mkt.STATUS_PENDING) self.reviewer = user_factory(email='editor') self.grant_permission(self.reviewer, 'Apps:Review') self.url = reverse('reviewers.apps.review', args=[self.app.app_slug]) def test_status_null_disable_approve_btn(self): self.app.update(status=mkt.STATUS_NULL) req = req_factory_factory(self.url, user=self.reviewer) res = app_review(req, app_slug=self.app.app_slug) doc = pq(res.content) assert (doc('#review-actions input[value=public]') .parents('li').hasClass('disabled')) assert not (doc('#review-actions input[value=reject]') .parents('li').hasClass('disabled')) class TestAbusePage(AppReviewerTest): def setUp(self): super(TestAbusePage, self).setUp() self.app = app_factory(name=u'My app é <script>alert(5)</script>') self.url = reverse('reviewers.apps.review.abuse', args=[self.app.app_slug]) AbuseReport.objects.create(addon=self.app, message=self.app.name) def testXSS(self): from django.utils.encoding import smart_unicode from jinja2.utils import escape content = smart_unicode(self.client.get(self.url).content) ok_(not unicode(self.app.name) in content) ok_(unicode(escape(self.app.name)) in content) class TestReviewTranslate(RestOAuth): def setUp(self): super(TestReviewTranslate, self).setUp() self.grant_permission(self.profile, 'Apps:ModerateReview') self.create_switch('reviews-translate') user = user_factory(email='diego') app = app_factory(app_slug='myapp~-_') self.review = app.reviews.create(title=u'yes', body=u'oui', addon=app, user=user, editorreview=True, rating=4) def test_regular_call(self): res = self.client.get(reverse('reviewers.review_translate', args=[self.review.addon.app_slug, self.review.id, 'fr'])) self.assert3xx(res, 'https://translate.google.com/#auto/fr/oui', 302) @mock.patch('mkt.reviewers.views.requests') def test_ajax_call(self, requests): # Mock requests. response = mock.Mock(status_code=200) response.json.return_value = { u'data': { u'translations': [{ u'translatedText': u'oui', u'detectedSourceLanguage': u'fr' }] } } requests.get.return_value = response # Call translation. review = self.review url = reverse('reviewers.review_translate', args=[review.addon.app_slug, review.id, 'fr']) res = self.client.get(url, HTTP_X_REQUESTED_WITH='XMLHttpRequest') eq_(res.status_code, 200) eq_(res.content, '{"body": "oui", "title": "oui"}') @mock.patch('mkt.reviewers.views.requests') def test_invalid_api_key(self, requests): # Mock requests. response = mock.Mock(status_code=400) response.json.return_value = { 'error': { 'code': 400, 'errors': [ {'domain': 'usageLimits', 'message': 'Bad Request', 'reason': 'keyInvalid'} ], 'message': 'Bad Request' } } requests.get.return_value = response # Call translation. review = self.review res = self.client.get( reverse('reviewers.review_translate', args=[review.addon.app_slug, review.id, 'fr']), HTTP_X_REQUESTED_WITH='XMLHttpRequest') eq_(res.status_code, 400) class TestReviewHistory(mkt.site.tests.TestCase, CommTestMixin): def setUp(self): super(TestReviewHistory, self).setUp() self.app = self.addon = app_factory() self.url = reverse('reviewers.apps.review', args=[self.app.app_slug]) self.grant_permission(user_factory(email='editor'), 'Apps:Review') self.login('[email protected]') self._thread_factory() def test_comm_url(self): r = self.client.get(self.url) doc = pq(r.content) eq_(doc('#history .item-history').attr('data-comm-app-url'), reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) + '?limit=1&serializer=simple') def test_comm_url_multiple_thread(self): self._thread_factory() r = self.client.get(self.url) doc = pq(r.content) eq_(doc('#history .item-history').attr('data-comm-app-url'), reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) + '?limit=2&serializer=simple') def test_comm_url_no_encode(self): self.addon = app_factory(app_slug='&#21488;&#21271;') self._thread_factory() url = reverse('reviewers.apps.review', args=[self.addon.app_slug]) r = self.client.get(url) doc = pq(r.content) eq_(doc('#history .item-history').attr('data-comm-app-url'), reverse('api-v2:comm-app-list', args=[self.addon.app_slug]) + '?limit=1&serializer=simple') class ModerateLogTest(mkt.site.tests.TestCase): def setUp(self): super(ModerateLogTest, self).setUp() self.review = Review.objects.create(addon=app_factory(), body='body', user=user_factory(), rating=4, editorreview=True) self.moderator_user = user_factory(email='moderator') self.grant_permission(self.moderator_user, 'Apps:ModerateReview') mkt.set_user(self.moderator_user) self.login(self.moderator_user) self.admin_user = user_factory(email='admin') self.grant_permission(self.admin_user, '*:*') user_factory(email='regular') class TestModerateLog(ModerateLogTest, AccessMixin): def setUp(self): super(TestModerateLog, self).setUp() self.url = reverse('reviewers.apps.moderatelog') def test_log(self): r = self.client.get(self.url) eq_(r.status_code, 200) def test_start_filter(self): r = self.client.get(self.url, dict(start='2011-01-01')) eq_(r.status_code, 200) def test_enddate_filter(self): """ Make sure that if our end date is 1/1/2011, that we include items from 1/1/2011. """ mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon, created=datetime(2011, 1, 1)) r = self.client.get(self.url, dict(end='2011-01-01')) eq_(r.status_code, 200) eq_(pq(r.content)('tbody td').eq(0).text(), 'Jan 1, 2011, 12:00:00 AM') def test_action_filter(self): """ Based on setup we should see only two items if we filter for deleted reviews. """ for i in xrange(2): mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review) mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review) r = self.client.get(self.url, dict(search='deleted')) eq_(pq(r.content)('tbody tr').length, 2) def test_no_results(self): r = self.client.get(self.url, dict(end='2004-01-01')) no_results = 'No events found for this period.' assert no_results in r.content, 'Expected no results to be found.' def test_display_name_xss(self): mkt.log(mkt.LOG.APPROVE_REVIEW, self.review, self.review.addon, user=self.admin_user) self.admin_user.display_name = '<script>alert("xss")</script>' self.admin_user.save() assert '<script>' in self.admin_user.display_name, ( 'Expected <script> to be in display name') r = self.client.get(self.url) pq(r.content)('#log-listing tbody td').eq(1).html() assert '<script>' not in r.content assert '&lt;script&gt;' in r.content class TestModerateLogDetail(ModerateLogTest, AccessMixin): def setUp(self): super(TestModerateLogDetail, self).setUp() # AccessMixin needs a url property. self.url = self._url(0) def _url(self, id): return reverse('reviewers.apps.moderatelog.detail', args=[id]) def test_detail_page(self): mkt.log(mkt.LOG.APPROVE_REVIEW, self.review.addon, self.review) e_id = ActivityLog.objects.editor_events()[0].id r = self.client.get(self._url(e_id)) eq_(r.status_code, 200) def test_undelete_selfmoderation(self): e_id = mkt.log( mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id self.review.delete() r = self.client.post(self._url(e_id), {'action': 'undelete'}) eq_(r.status_code, 302) self.review = Review.objects.get(id=self.review.id) assert not self.review.deleted, 'Review should be undeleted now.' def test_undelete_admin(self): e_id = mkt.log( mkt.LOG.DELETE_REVIEW, self.review.addon, self.review).id self.review.delete() self.client.logout() self.login(self.admin_user) r = self.client.post(self._url(e_id), {'action': 'undelete'}) eq_(r.status_code, 302) self.review = Review.objects.get(id=self.review.id) assert not self.review.deleted, 'Review should be undeleted now.' def test_undelete_unauthorized(self): # Delete as admin (or any other user than the reviewer). e_id = mkt.log(mkt.LOG.DELETE_REVIEW, self.review.addon, self.review, user=self.admin_user).id self.review.delete() # Try to undelete as normal reviewer. r = self.client.post(self._url(e_id), {'action': 'undelete'}) eq_(r.status_code, 403) self.review = Review.with_deleted.get(id=self.review.id) assert self.review.deleted, 'Review shouldn`t have been undeleted.'
mkt.abuse.forms.ABUSE_REPORT_SKIP) res = self.client.get(self.url) eq_(len(res.context['page'].object_list), 2) def t
visibility.rs
//! We want to make sure that providing mutable traits doesn't accidentally //! leak the internal implementation details of a shrinkwrapped type. //! //! To do that, we need to make sure that the inner field has the same //! visibility as the shrinkwrapped struct itself. If it doesn't, we can //! give the user an error and refuse to generate implementations. use syn; use itertools::Itertools; // When checking for visibility containment, we can make use of the guarantee // that the langauge provides us that any visibility path must be a parent // module of the current one. This means, for instance, that we don't have // to worry about the possibility of the visibility paths "diverging". #[derive(PartialEq)] #[cfg_attr(test, derive(Debug))] pub enum PathComponent { /// Effectively, this means private. Inherited, Pub, Crate, InSelf, InSuper, Mod(String), } #[cfg_attr(test, derive(PartialEq, Debug))] pub enum FieldVisibility { /// The inner field is *at least* as visible as its containing struct. Visible, /// The inner field is less visible than its containing struct. Restricted, /// We can't figure out how the visibilities relate, probably due to the /// paths starting at different points (e.g. one is self and the other /// is ::a::b::c) CantDetermine, } /// Check what the relation between the given struct's visibility and the /// field's visibility is. pub fn field_visibility( struct_vis: &syn::Visibility, field_vis: &syn::Visibility, ) -> FieldVisibility { let struct_vis = to_path(struct_vis); let field_vis = to_path(field_vis); fn check_head(struct_vis: &[PathComponent], field_vis: &[PathComponent]) -> FieldVisibility { match (struct_vis.split_first(), field_vis.split_first()) { (_, None) | (Some((&PathComponent::Inherited, _)), _) => FieldVisibility::Visible, (None, _) | (_, Some((&PathComponent::Inherited, _))) => FieldVisibility::Restricted, (Some((sh, sr)), Some((fh, fr))) => { if sh == fh { check_head(sr, fr) } else { FieldVisibility::CantDetermine } } } } // If the field is marked `pub`, then we know it's definitely visible... if &field_vis == &vec![PathComponent::Pub] { return FieldVisibility::Visible; } // ...and if that's not the case, but the struct is marked `pub`, we know // the field is definitely restricted. if &struct_vis == &vec![PathComponent::Pub] { return FieldVisibility::Restricted; } check_head(&struct_vis, &field_vis) } fn to_path(path: &syn::Visibility) -> Vec<PathComponent> { use syn::Visibility::*; match path { &Public(..) => vec![PathComponent::Pub], &Crate(..) => vec![PathComponent::Pub, PathComponent::Crate], &Inherited => vec![PathComponent::Inherited], &Restricted(ref vis) => to_path_restricted(&vis.path), } } fn to_path_restricted(path: &syn::Path) -> Vec<PathComponent> { let segments = path .segments .iter() .map(|path_segment| &path_segment.ident) .collect_vec(); match segments.split_first() { None => vec![], Some((ident, rest)) => { let mut result; if *ident == "self" { result = vec![PathComponent::InSelf]; } else if *ident == "super" { result = vec![PathComponent::InSuper]; } else if *ident == "crate" { result = vec![PathComponent::Pub, PathComponent::Crate]; } else { // We add these components in non-self/super paths to allow us to // match them up with visibilities like `pub` and `pub(crate)`. result = vec![ PathComponent::Pub, PathComponent::Crate, PathComponent::Mod(ident.to_string()), ]; } let rest = rest .iter() .map(|ident| PathComponent::Mod(ident.to_string())); result.extend(rest); result } } } #[cfg(test)] mod path_convert_tests { use std::convert::From; use syn::{self, Visibility}; use super::PathComponent::*; use super::{to_path, PathComponent}; impl<'a> From<&'a str> for PathComponent { fn from(input: &'a str) -> Self { Mod(input.to_string()) } } macro_rules! vis_test { ($test_name:ident => $input:expr; $($component:expr),+) => { #[test] fn $test_name() { let vis: Visibility = syn::parse_str($input) .expect("path input is structured incorrectly!"); let vis = to_path(&vis); let expected = vec![ $($component.into()),+ ]; assert_eq!(&vis, &expected); } } } vis_test!(vis_test1 => "pub"; Pub); vis_test!(vis_test2 => "pub(crate)"; Pub, Crate); vis_test!(vis_test3 => ""; Inherited); vis_test!(vis_test4 => "pub(self)"; InSelf); vis_test!(vis_test5 => "pub(super)"; InSuper); vis_test!(vis_test6 => "pub(in ::a::b::c)"; Pub, Crate, "a", "b", "c"); vis_test!(vis_test7 => "pub(in ::super::b)"; InSuper, "b"); } #[cfg(test)] mod field_visibility_tests { use syn::{self, Visibility}; use super::field_visibility; use super::FieldVisibility::*; macro_rules! field_vis_test { ($test_name:ident => $struct_vis: expr; $field_vis: expr; $vis: expr) => { #[test] fn $test_name() { let struct_vis: Visibility = syn::parse_str($struct_vis).expect("failed to parse struct visibility");
syn::parse_str($field_vis).expect("failed to parse field visibility"); let vis = field_visibility(&struct_vis, &field_vis); assert_eq!(vis, $vis); } }; } field_vis_test!(test_field_vis1 => "pub"; "pub"; Visible); field_vis_test!(test_field_vis2 => ""; ""; Visible); field_vis_test!(test_field_vis3 => "pub(in a::b::c)"; "pub(in a::b)"; Visible); field_vis_test!(test_field_vis4 => "pub(in a::b)"; "pub(in a::b::c)"; Restricted); field_vis_test!(test_field_vis5 => "pub"; "pub(crate)"; Restricted); field_vis_test!(test_field_vis6 => "pub(crate)"; "pub(in a::b::c)"; Restricted); field_vis_test!(test_field_vis7 => "pub"; ""; Restricted); field_vis_test!(test_field_vis8 => ""; "pub"; Visible); field_vis_test!(test_field_vis9 => "pub(in a::b::c)"; "pub(self)"; CantDetermine); field_vis_test!(test_field_vis10 => "pub(in a::b::c)"; "pub(super)"; CantDetermine); field_vis_test!(test_field_vis11 => "pub"; "pub(self)"; Restricted); field_vis_test!(test_field_vis12 => "pub(in a::b::c)"; "pub"; Visible); field_vis_test!(test_field_vis13 => "pub(self)"; "pub(self)"; Visible); field_vis_test!(test_field_vis14 => "pub(super)"; "pub(super)"; Visible); field_vis_test!(test_field_vis15 => "pub(crate)"; "pub(crate)"; Visible); field_vis_test!(test_field_vis16 => "pub(in a::b::c)"; "pub(in a::b::c)"; Visible); }
let field_vis: Visibility =
overflowing-rsh-5.rs
// error-pattern:thread 'main' panicked at 'attempt to shift right with overflow' // compile-flags: -C debug-assertions #![warn(exceeding_bitshifts)] fn
() { let _n = 1i64 >> [64][0]; }
main
extract_images.py
import h5py import numpy as np import matplotlib.image as mpimg from tqdm import tqdm import os def clear_screen(): """Clears the console screen irrespective of os used""" import platform if platform.system() == 'Windows': os.system('cls') return os.system('clear') def make_folder(target_folder): """Creates folder if there is no folder in the specified path. Parameters: target_folder(str): path of the folder which needs to be created. Returns: None """ if not (os.path.isdir(target_folder)): print(f'Creating {target_folder} folder') os.mkdir(target_folder) def get_image_data(filename, path): """ Reads the mat image file and returns the image & mask array. Parameters: filename(str): Name of the file without the extension. path(str): Path where the filename is located. Returns: data(dict): A dictionary with the image & mask numpy array. 'image': The numpy array for image. 'mask' : The numpy array for the above image mask. """ path = os.path.join(path, filename+'.mat') file = h5py.File(path, 'r') data = dict() data['image'] = np.array(file.get('cjdata/image')) data['mask'] = np.array(file.get('cjdata/tumorMask')) return data def save_image_data(filename, path, data): """ Saves the image & mask array in png format. Parameters: filename(str): Name of the file without the extension. path(str): Path where the filename is to be saved. data(dict): A dictionary with the image & mask numpy array. 'image': The numpy array for image. 'mask' : The numpy array for the above image mask. Returns: None """ path_image = os.path.join(path, filename+'.png') path_mask = os.path.join(path, filename+'_mask.png') mpimg.imsave(path_image, data['image'], cmap='gray', format='png') mpimg.imsave(path_mask, data['mask'], cmap='gray', format='png') def
(): # Total number of images total_images = 3064 # Dataset paths data_read_path = os.path.join('dataset', 'mat_dataset') data_save_path = os.path.join('dataset', 'png_dataset') clear_screen() # Make if folder is missing. make_folder(data_save_path) print(f'Starting to save images in {data_save_path}') for filename in tqdm(range(1, total_images+1)): filename = str(filename) data = get_image_data(filename, data_read_path) save_image_data(str(int(filename)-1), data_save_path, data) if __name__ == "__main__": main()
main
day1.rs
use std::fs; // use std::collections::{HashMap, HashSet}; // use std::error::Error; // use std::io::{self, Write}; // use std::str::{self, FromStr}; // #[derive(Clone, Debug, Eq, PartialEq)] // struct Action { // act: i32, // } // impl FromStr for Action { // type Err = Box<dyn Error>; // fn from_str(s: &str) -> Result<Self, Self::Err> {} // } fn puzzle_1() { println!("Puzzle 1: {:?}", 1); } fn
() { println!("Puzzle 2: {:?}", 1); } pub fn run() { let input = fs::read_to_string("./year/2015/inputs/day1.input").expect("Error reading file."); puzzle_1(); }
puzzle_2
formatter_test.go
package formatter_test import ( "strings" . "github.com/onsi-experimental/ginkgo/v2" "github.com/onsi-experimental/ginkgo/v2/formatter" . "github.com/onsi/gomega" ) var _ = Describe("Formatter", func() { var colorMode formatter.ColorMode var f formatter.Formatter BeforeEach(func() { colorMode = formatter.ColorModeTerminal }) JustBeforeEach(func() { f = formatter.New(colorMode) }) Context("with ColorModeNone", func() { BeforeEach(func() { colorMode = formatter.ColorModeNone }) It("strips out color information", func() { Ω(f.F("{{green}}{{bold}}hi there{{/}}")).Should(Equal("hi there")) }) }) Context("with ColorModeTerminal", func() { BeforeEach(func() { colorMode = formatter.ColorModeTerminal }) It("renders the color information using terminal escape codes", func() { Ω(f.F("{{green}}{{bold}}hi there{{/}}")).Should(Equal("\x1b[38;5;10m\x1b[1mhi there\x1b[0m")) }) }) Context("with ColorModePassthrough", func() { BeforeEach(func() { colorMode = formatter.ColorModePassthrough }) It("leaves the color information as is, allowing us to test statements more easily", func() { Ω(f.F("{{green}}{{bold}}hi there{{/}}")).Should(Equal("{{green}}{{bold}}hi there{{/}}")) }) }) Describe("NewWithNoColorBool", func() { Context("when the noColor bool is true", func() { It("strips out color information", func() { f = formatter.NewWithNoColorBool(true) Ω(f.F("{{green}}{{bold}}hi there{{/}}")).Should(Equal("hi there")) }) }) Context("when the noColor bool is false", func() { It("renders the color information using terminal escape codes", func() { f = formatter.NewWithNoColorBool(false) Ω(f.F("{{green}}{{bold}}hi there{{/}}")).Should(Equal("\x1b[38;5;10m\x1b[1mhi there\x1b[0m")) }) }) }) Describe("F", func() { It("transforms the color information and sprintfs", func() { Ω(f.F("{{green}}hi there {{cyan}}%d {{yellow}}%s{{/}}", 3, "wise men")).Should(Equal("\x1b[38;5;10mhi there \x1b[38;5;14m3 \x1b[38;5;11mwise men\x1b[0m")) }) }) Describe("Fi", func() { It("transforms the color information, sprintfs, and applies an indentation", func() { Ω(f.Fi(2, "{{green}}hi there\n{{cyan}}%d {{yellow}}%s{{/}}", 3, "wise men")).Should(Equal( " \x1b[38;5;10mhi there\n \x1b[38;5;14m3 \x1b[38;5;11mwise men\x1b[0m", )) }) }) DescribeTable("Fiw", func(indentation int, maxWidth int, input string, expected ...string) { Ω(f.Fiw(uint(indentation), uint(maxWidth), input)).Should(Equal(strings.Join(expected, "\n"))) }, Entry("basic case", 0, 0, "a really long string is fine", "a really long string is fine"), Entry("indentation is accounted for in width", 1, 10, "1234 678", " 1234 678", ), Entry("indentation is accounted for in width", 1, 10, "1234 6789", " 1234", " 6789", ), Entry("when there is a nice long sentence", 0, 10, "12 456 890 1234 5", "12 456 890", "1234 5", ), Entry("when a word in a sentence intersects the boundary", 0, 10, "12 456 8901 123 45", "12 456", "8901 123", "45",
"12", "12345678901", "12 12345", "678901", "12345678901", ), ) Describe("CycleJoin", func() { It("combines elements, cycling through styles as it goes", func() { Ω(f.CycleJoin([]string{"a", "b", "c"}, "|", []string{"{{red}}", "{{green}}"})).Should(Equal( "\x1b[38;5;9ma|\x1b[38;5;10mb|\x1b[38;5;9mc\x1b[0m", )) }) }) })
), Entry("when a word in a sentence is just too long", 0, 10, "12 12345678901 12 12345 678901 12345678901",
thread.rs
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use prelude::v1::*; use alloc::boxed::FnBox; use cmp; #[cfg(not(any(target_env = "newlib", target_os = "solaris")))] use ffi::CString; use io; use libc; use mem; use ptr; use sys::os; use time::Duration; use sys_common::thread::*; pub struct Thread { id: libc::pthread_t, } // Some platforms may have pthread_t as a pointer in which case we still want // a thread to be Send/Sync unsafe impl Send for Thread {} unsafe impl Sync for Thread {} impl Thread { pub unsafe fn new<'a>(stack: usize, p: Box<FnBox() + 'a>) -> io::Result<Thread>
pub fn yield_now() { let ret = unsafe { libc::sched_yield() }; debug_assert_eq!(ret, 0); } #[cfg(any(target_os = "linux", target_os = "android", target_os = "emscripten"))] pub fn set_name(name: &str) { const PR_SET_NAME: libc::c_int = 15; let cname = CString::new(name).unwrap_or_else(|_| { panic!("thread name may not contain interior null bytes") }); // pthread wrapper only appeared in glibc 2.12, so we use syscall // directly. unsafe { libc::prctl(PR_SET_NAME, cname.as_ptr() as libc::c_ulong, 0, 0, 0); } } #[cfg(any(target_os = "freebsd", target_os = "dragonfly", target_os = "bitrig", target_os = "openbsd"))] pub fn set_name(name: &str) { let cname = CString::new(name).unwrap(); unsafe { libc::pthread_set_name_np(libc::pthread_self(), cname.as_ptr()); } } #[cfg(any(target_os = "macos", target_os = "ios"))] pub fn set_name(name: &str) { let cname = CString::new(name).unwrap(); unsafe { libc::pthread_setname_np(cname.as_ptr()); } } #[cfg(target_os = "netbsd")] pub fn set_name(name: &str) { let cname = CString::new(&b"%s"[..]).unwrap(); let carg = CString::new(name).unwrap(); unsafe { libc::pthread_setname_np(libc::pthread_self(), cname.as_ptr(), carg.as_ptr() as *mut libc::c_void); } } #[cfg(any(target_env = "newlib", target_os = "solaris"))] pub fn set_name(_name: &str) { // Newlib and Illumos has no way to set a thread name. } pub fn sleep(dur: Duration) { let mut ts = libc::timespec { tv_sec: dur.as_secs() as libc::time_t, tv_nsec: dur.subsec_nanos() as libc::c_long, }; // If we're awoken with a signal then the return value will be -1 and // nanosleep will fill in `ts` with the remaining time. unsafe { while libc::nanosleep(&ts, &mut ts) == -1 { assert_eq!(os::errno(), libc::EINTR); } } } pub fn join(self) { unsafe { let ret = libc::pthread_join(self.id, ptr::null_mut()); mem::forget(self); debug_assert_eq!(ret, 0); } } pub fn id(&self) -> libc::pthread_t { self.id } pub fn into_id(self) -> libc::pthread_t { let id = self.id; mem::forget(self); id } } impl Drop for Thread { fn drop(&mut self) { let ret = unsafe { libc::pthread_detach(self.id) }; debug_assert_eq!(ret, 0); } } #[cfg(all(not(all(target_os = "linux", not(target_env = "musl"))), not(target_os = "macos"), not(target_os = "bitrig"), not(all(target_os = "netbsd", not(target_vendor = "rumprun"))), not(target_os = "openbsd"), not(target_os = "solaris")))] #[cfg_attr(test, allow(dead_code))] pub mod guard { pub unsafe fn current() -> Option<usize> { None } pub unsafe fn init() -> Option<usize> { None } } #[cfg(any(all(target_os = "linux", not(target_env = "musl")), target_os = "macos", target_os = "bitrig", all(target_os = "netbsd", not(target_vendor = "rumprun")), target_os = "openbsd", target_os = "solaris"))] #[cfg_attr(test, allow(dead_code))] pub mod guard { use prelude::v1::*; use libc; use libc::mmap; use libc::{PROT_NONE, MAP_PRIVATE, MAP_ANON, MAP_FAILED, MAP_FIXED}; use sys::os; #[cfg(any(target_os = "macos", target_os = "bitrig", target_os = "openbsd", target_os = "solaris"))] unsafe fn get_stack_start() -> Option<*mut libc::c_void> { current().map(|s| s as *mut libc::c_void) } #[cfg(any(target_os = "linux", target_os = "android", target_os = "netbsd"))] unsafe fn get_stack_start() -> Option<*mut libc::c_void> { let mut ret = None; let mut attr: libc::pthread_attr_t = ::mem::zeroed(); assert_eq!(libc::pthread_attr_init(&mut attr), 0); if libc::pthread_getattr_np(libc::pthread_self(), &mut attr) == 0 { let mut stackaddr = ::ptr::null_mut(); let mut stacksize = 0; assert_eq!(libc::pthread_attr_getstack(&attr, &mut stackaddr, &mut stacksize), 0); ret = Some(stackaddr); } assert_eq!(libc::pthread_attr_destroy(&mut attr), 0); ret } pub unsafe fn init() -> Option<usize> { let psize = os::page_size(); let mut stackaddr = match get_stack_start() { Some(addr) => addr, None => return None, }; // Ensure stackaddr is page aligned! A parent process might // have reset RLIMIT_STACK to be non-page aligned. The // pthread_attr_getstack() reports the usable stack area // stackaddr < stackaddr + stacksize, so if stackaddr is not // page-aligned, calculate the fix such that stackaddr < // new_page_aligned_stackaddr < stackaddr + stacksize let remainder = (stackaddr as usize) % psize; if remainder != 0 { stackaddr = ((stackaddr as usize) + psize - remainder) as *mut libc::c_void; } // Rellocate the last page of the stack. // This ensures SIGBUS will be raised on // stack overflow. let result = mmap(stackaddr, psize as libc::size_t, PROT_NONE, MAP_PRIVATE | MAP_ANON | MAP_FIXED, -1, 0); if result != stackaddr || result == MAP_FAILED { panic!("failed to allocate a guard page"); } let offset = if cfg!(target_os = "linux") {2} else {1}; Some(stackaddr as usize + offset * psize) } #[cfg(target_os = "solaris")] pub unsafe fn current() -> Option<usize> { let mut current_stack: libc::stack_t = ::mem::zeroed(); assert_eq!(libc::stack_getbounds(&mut current_stack), 0); Some(current_stack.ss_sp as usize) } #[cfg(target_os = "macos")] pub unsafe fn current() -> Option<usize> { Some((libc::pthread_get_stackaddr_np(libc::pthread_self()) as libc::size_t - libc::pthread_get_stacksize_np(libc::pthread_self())) as usize) } #[cfg(any(target_os = "openbsd", target_os = "bitrig"))] pub unsafe fn current() -> Option<usize> { let mut current_stack: libc::stack_t = ::mem::zeroed(); assert_eq!(libc::pthread_stackseg_np(libc::pthread_self(), &mut current_stack), 0); let extra = if cfg!(target_os = "bitrig") {3} else {1} * os::page_size(); Some(if libc::pthread_main_np() == 1 { // main thread current_stack.ss_sp as usize - current_stack.ss_size as usize + extra } else { // new thread current_stack.ss_sp as usize - current_stack.ss_size as usize }) } #[cfg(any(target_os = "linux", target_os = "android", target_os = "netbsd"))] pub unsafe fn current() -> Option<usize> { let mut ret = None; let mut attr: libc::pthread_attr_t = ::mem::zeroed(); assert_eq!(libc::pthread_attr_init(&mut attr), 0); if libc::pthread_getattr_np(libc::pthread_self(), &mut attr) == 0 { let mut guardsize = 0; assert_eq!(libc::pthread_attr_getguardsize(&attr, &mut guardsize), 0); if guardsize == 0 { panic!("there is no guard page"); } let mut stackaddr = ::ptr::null_mut(); let mut size = 0; assert_eq!(libc::pthread_attr_getstack(&attr, &mut stackaddr, &mut size), 0); ret = if cfg!(target_os = "netbsd") { Some(stackaddr as usize) } else { Some(stackaddr as usize + guardsize as usize) }; } assert_eq!(libc::pthread_attr_destroy(&mut attr), 0); ret } } // glibc >= 2.15 has a __pthread_get_minstack() function that returns // PTHREAD_STACK_MIN plus however many bytes are needed for thread-local // storage. We need that information to avoid blowing up when a small stack // is created in an application with big thread-local storage requirements. // See #6233 for rationale and details. #[cfg(target_os = "linux")] #[allow(deprecated)] fn min_stack_size(attr: *const libc::pthread_attr_t) -> usize { weak!(fn __pthread_get_minstack(*const libc::pthread_attr_t) -> libc::size_t); match __pthread_get_minstack.get() { None => libc::PTHREAD_STACK_MIN as usize, Some(f) => unsafe { f(attr) as usize }, } } // No point in looking up __pthread_get_minstack() on non-glibc // platforms. #[cfg(all(not(target_os = "linux"), not(target_os = "netbsd")))] fn min_stack_size(_: *const libc::pthread_attr_t) -> usize { libc::PTHREAD_STACK_MIN as usize } #[cfg(target_os = "netbsd")] fn min_stack_size(_: *const libc::pthread_attr_t) -> usize { 2048 // just a guess }
{ let p = box p; let mut native: libc::pthread_t = mem::zeroed(); let mut attr: libc::pthread_attr_t = mem::zeroed(); assert_eq!(libc::pthread_attr_init(&mut attr), 0); let stack_size = cmp::max(stack, min_stack_size(&attr)); match libc::pthread_attr_setstacksize(&mut attr, stack_size as libc::size_t) { 0 => {} n => { assert_eq!(n, libc::EINVAL); // EINVAL means |stack_size| is either too small or not a // multiple of the system page size. Because it's definitely // >= PTHREAD_STACK_MIN, it must be an alignment issue. // Round up to the nearest page and try again. let page_size = os::page_size(); let stack_size = (stack_size + page_size - 1) & (-(page_size as isize - 1) as usize - 1); let stack_size = stack_size as libc::size_t; assert_eq!(libc::pthread_attr_setstacksize(&mut attr, stack_size), 0); } }; let ret = libc::pthread_create(&mut native, &attr, thread_start, &*p as *const _ as *mut _); assert_eq!(libc::pthread_attr_destroy(&mut attr), 0); return if ret != 0 { Err(io::Error::from_raw_os_error(ret)) } else { mem::forget(p); // ownership passed to pthread_create Ok(Thread { id: native }) }; extern fn thread_start(main: *mut libc::c_void) -> *mut libc::c_void { unsafe { start_thread(main); } ptr::null_mut() } }
service_definitions.rs
// Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. //! Service Discovery Protocol (SDP) record definitions for the Hands-Free //! and Audio Gateway roles. use {bitflags::bitflags, fidl_fuchsia_bluetooth_bredr as bredr, fuchsia_bluetooth::types::Uuid}; use crate::config::AudioGatewayFeatureSupport; /// SDP Attribute ID for Network. /// Defined in Assigned Numbers for SDP /// https://www.bluetooth.com/specifications/assigned-numbers/service-discovery const ATTR_ID_HFP_NETWORK: u16 = 0x0301; /// SDP Attribute ID for the Supported Features of HFP. /// Defined in Assigned Numbers for SDP /// https://www.bluetooth.com/specifications/assigned-numbers/service-discovery const ATTR_ID_HFP_SUPPORTED_FEATURES: u16 = 0x0311; /// Major Version of HFP implementation const PROFILE_MAJOR_VERSION: u8 = 1; /// Minor Version of HFP implementation const PROFILE_MINOR_VERSION: u8 = 8; bitflags! { struct AudioGatewayFeaturesSdpAttribute: u16 { const THREE_WAY_CALLING = 0b0000_0001; const ECHO_CANCELATION_AND_NOISE_REDUCTION = 0b0000_0010; const VOICE_RECOGNITION = 0b0000_0100; const IN_BAND_RING = 0b0000_1000; const ATTACH_NUMBER_TO_VOICE_TAG = 0b0001_0000; const WIDEBAND_SPEECH = 0b0010_0000; const ENHANCED_VOICE_RECOGNITION = 0b0100_0000; const ENHANCED_VOICE_RECOGNITION_TEXT = 0b1000_0000; /// Defined by HFP v1.8, Table 5.3: Service Record for the AG const DEFAULT = Self::THREE_WAY_CALLING.bits | Self::IN_BAND_RING.bits; } } impl From<AudioGatewayFeatureSupport> for AudioGatewayFeaturesSdpAttribute { fn from(features: AudioGatewayFeatureSupport) -> Self { let mut value = Self::empty(); value.set(Self::THREE_WAY_CALLING, features.three_way_calling); value.set( Self::ECHO_CANCELATION_AND_NOISE_REDUCTION, features.echo_canceling_and_noise_reduction, ); value.set(Self::IN_BAND_RING, features.in_band_ringtone); value.set(Self::ATTACH_NUMBER_TO_VOICE_TAG, features.attach_phone_number_to_voice_tag); value.set(Self::WIDEBAND_SPEECH, features.wide_band_speech); value.set(Self::VOICE_RECOGNITION, features.voice_recognition); value.set(Self::ENHANCED_VOICE_RECOGNITION, features.enhanced_voice_recognition); value.set( Self::ENHANCED_VOICE_RECOGNITION_TEXT, features.enhanced_voice_recognition_with_text, ); value } } /// Make the SDP definition for the HFP Audio Gateway service. /// See HFP v1.8, Table 5.3. pub fn audio_gateway(features: AudioGatewayFeatureSupport) -> bredr::ServiceDefinition { let network_supports_reject_call = features.reject_incoming_voice_call; let supported_features = AudioGatewayFeaturesSdpAttribute::from(features).bits(); bredr::ServiceDefinition {
service_class_uuids: Some(vec![ Uuid::new16(bredr::ServiceClassProfileIdentifier::HandsfreeAudioGateway as u16).into(), Uuid::new16(bredr::ServiceClassProfileIdentifier::GenericAudio as u16).into(), ]), protocol_descriptor_list: Some(vec![ bredr::ProtocolDescriptor { protocol: bredr::ProtocolIdentifier::L2Cap, params: vec![], }, bredr::ProtocolDescriptor { protocol: bredr::ProtocolIdentifier::Rfcomm, params: vec![], }, ]), profile_descriptors: Some(vec![bredr::ProfileDescriptor { profile_id: bredr::ServiceClassProfileIdentifier::HandsfreeAudioGateway, major_version: PROFILE_MAJOR_VERSION, minor_version: PROFILE_MINOR_VERSION, }]), additional_attributes: Some(vec![ bredr::Attribute { id: ATTR_ID_HFP_NETWORK, element: bredr::DataElement::Uint8(network_supports_reject_call.into()), }, bredr::Attribute { id: ATTR_ID_HFP_SUPPORTED_FEATURES, element: bredr::DataElement::Uint16(supported_features), }, ]), ..bredr::ServiceDefinition::EMPTY } } #[cfg(test)] mod tests { use super::*; /// Make sure all the flags are correctly mapped in the `From` implementation. /// Bit flags are independent so a test covering each flag independently is sufficient #[test] fn ag_features_sdp_attr_from_ag_features() { let three_way_calling = AudioGatewayFeatureSupport { three_way_calling: true, ..Default::default() }; assert_eq!(AudioGatewayFeaturesSdpAttribute::THREE_WAY_CALLING, three_way_calling.into()); let ec_nr = AudioGatewayFeatureSupport { echo_canceling_and_noise_reduction: true, ..Default::default() }; assert_eq!( AudioGatewayFeaturesSdpAttribute::ECHO_CANCELATION_AND_NOISE_REDUCTION, ec_nr.into() ); let in_band_ringtone = AudioGatewayFeatureSupport { in_band_ringtone: true, ..Default::default() }; assert_eq!(AudioGatewayFeaturesSdpAttribute::IN_BAND_RING, in_band_ringtone.into()); let vr = AudioGatewayFeatureSupport { voice_recognition: true, ..Default::default() }; assert_eq!(AudioGatewayFeaturesSdpAttribute::VOICE_RECOGNITION, vr.into()); let attach_phone_number_to_voice_tag = AudioGatewayFeatureSupport { attach_phone_number_to_voice_tag: true, ..Default::default() }; assert_eq!( AudioGatewayFeaturesSdpAttribute::ATTACH_NUMBER_TO_VOICE_TAG, attach_phone_number_to_voice_tag.into() ); let wide_band_speech = AudioGatewayFeatureSupport { wide_band_speech: true, ..Default::default() }; assert_eq!(AudioGatewayFeaturesSdpAttribute::WIDEBAND_SPEECH, wide_band_speech.into()); let enhanced_voice_recognition = AudioGatewayFeatureSupport { enhanced_voice_recognition: true, ..Default::default() }; assert_eq!( AudioGatewayFeaturesSdpAttribute::ENHANCED_VOICE_RECOGNITION, enhanced_voice_recognition.into() ); let enhanced_voice_recognition_with_text = AudioGatewayFeatureSupport { enhanced_voice_recognition_with_text: true, ..Default::default() }; assert_eq!( AudioGatewayFeaturesSdpAttribute::ENHANCED_VOICE_RECOGNITION_TEXT, enhanced_voice_recognition_with_text.into() ); } }
index.ts
import Notifications from './notification';
export default Notifications;
lexer_test.go
package lexer import ( "testing" ) func TestNextToken(t *testing.T)
{ input := ` let five = 5; let ten = 10; let add = fn(x, y) { x + y; }; let result = add(five, ten); !-/*0; 2 < 10 > 7; if (5 < 10) { return true; } else { return false; } 10 == 10; 10 != 9; "foobar"; "foo bar"; [1, 2]; {"foo": "bar"}; // comment let a = 1; // inline comment let b = 123.45; let c = 0.678; let d = 9.0; ` tests := []struct { expectedType token.Type expectedLiteral string }{ {token.LET, "let"}, {token.IDENT, "five"}, {token.ASSIGN, "="}, {token.INT, "5"}, {token.SEMICOLON, ";"}, {token.LET, "let"}, {token.IDENT, "ten"}, {token.ASSIGN, "="}, {token.INT, "10"}, {token.SEMICOLON, ";"}, {token.LET, "let"}, {token.IDENT, "add"}, {token.ASSIGN, "="}, {token.FUNCTION, "fn"}, {token.LPAREN, "("}, {token.IDENT, "x"}, {token.COMMA, ","}, {token.IDENT, "y"}, {token.RPAREN, ")"}, {token.LBRACE, "{"}, {token.IDENT, "x"}, {token.PLUS, "+"}, {token.IDENT, "y"}, {token.SEMICOLON, ";"}, {token.RBRACE, "}"}, {token.SEMICOLON, ";"}, {token.LET, "let"}, {token.IDENT, "result"}, {token.ASSIGN, "="}, {token.IDENT, "add"}, {token.LPAREN, "("}, {token.IDENT, "five"}, {token.COMMA, ","}, {token.IDENT, "ten"}, {token.RPAREN, ")"}, {token.SEMICOLON, ";"}, {token.BANG, "!"}, {token.MINUS, "-"}, {token.SLASH, "/"}, {token.ASTARISK, "*"}, {token.INT, "0"}, {token.SEMICOLON, ";"}, {token.INT, "2"}, {token.LT, "<"}, {token.INT, "10"}, {token.GT, ">"}, {token.INT, "7"}, {token.SEMICOLON, ";"}, {token.IF, "if"}, {token.LPAREN, "("}, {token.INT, "5"}, {token.LT, "<"}, {token.INT, "10"}, {token.RPAREN, ")"}, {token.LBRACE, "{"}, {token.RETURN, "return"}, {token.TRUE, "true"}, {token.SEMICOLON, ";"}, {token.RBRACE, "}"}, {token.ELSE, "else"}, {token.LBRACE, "{"}, {token.RETURN, "return"}, {token.FALSE, "false"}, {token.SEMICOLON, ";"}, {token.RBRACE, "}"}, {token.INT, "10"}, {token.EQ, "=="}, {token.INT, "10"}, {token.SEMICOLON, ";"}, {token.INT, "10"}, {token.NEQ, "!="}, {token.INT, "9"}, {token.SEMICOLON, ";"}, {token.STRING, "foobar"}, {token.SEMICOLON, ";"}, {token.STRING, "foo bar"}, {token.SEMICOLON, ";"}, {token.LBRACKET, "["}, {token.INT, "1"}, {token.COMMA, ","}, {token.INT, "2"}, {token.RBRACKET, "]"}, {token.SEMICOLON, ";"}, {token.LBRACE, "{"}, {token.STRING, "foo"}, {token.COLON, ":"}, {token.STRING, "bar"}, {token.RBRACE, "}"}, {token.SEMICOLON, ";"}, {token.LET, "let"}, {token.IDENT, "a"}, {token.ASSIGN, "="}, {token.INT, "1"}, {token.SEMICOLON, ";"}, {token.LET, "let"}, {token.IDENT, "b"}, {token.ASSIGN, "="}, {token.FLOAT, "123.45"}, {token.SEMICOLON, ";"}, {token.LET, "let"}, {token.IDENT, "c"}, {token.ASSIGN, "="}, {token.FLOAT, "0.678"}, {token.SEMICOLON, ";"}, {token.LET, "let"}, {token.IDENT, "d"}, {token.ASSIGN, "="}, {token.FLOAT, "9.0"}, {token.SEMICOLON, ";"}, {token.EOF, ""}, } l := New(input) for i, tt := range tests { tok := l.NextToken() if tok.Type != tt.expectedType { t.Logf("tests[%d] - tok: %#v", i, tok) t.Fatalf("tests[%d] - token type wrong. expected=%q, got=%q", i, tt.expectedType, tok.Type) } if tok.Literal != tt.expectedLiteral { t.Logf("tests[%d] - tok: %#v", i, tok) t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q", i, tt.expectedLiteral, tok.Literal) } } }
docs.js
/* global anchors: false, ClipboardJS: false, Holder: false */ (function ($) { 'use strict' $(function () { // Anchor anchors.options = { icon: '#' } anchors.add('.bd-content > h2, .bd-content > h3, .bd-content > h4, .bd-content > h5') $('.bd-content > h2, .bd-content > h3, .bd-content > h4, .bd-content > h5').wrapInner('<div></div>') // Clipboard $('div.highlight, figure.highlight').each(function () { var btnHtml = '<div class="bd-clipboard"><button class="btn-clipboard" title="Copy to clipboard">Copy</button></div>' $(this).before(btnHtml) $('.btn-clipboard').on('mouseleave', function () { $(this).tooltip('hide') }).tooltip() }) var clipboard = new ClipboardJS('.btn-clipboard', { target: function (trigger) { return trigger.parentNode.nextElementSibling } }) clipboard.on('error', function (e) { var key = /Mac/i.test(navigator.userAgent) ? '\u2318' : 'Ctrl-' var msg = 'Press ' + key + 'C to copy' $(e.trigger) .attr('title', msg) .tooltip('_fixTitle') .tooltip('show') .attr('title', 'Copy to clipboard') .tooltip('_fixTitle') }) clipboard.on('success', function (e) { $(e.trigger) .attr('title', 'Copied!') .tooltip('_fixTitle') .tooltip('show') .attr('title', 'Copy to clipboard') .tooltip('_fixTitle') e.clearSelection() }) // Disable empty links in docs examples $('.bd-content [href="#"]').on('click', function (e) { e.preventDefault() }) // Docsearch /* docsearch({ algoliaOptions: { facetFilters: ['version: 4.6'] }, apiKey: 'c1af50add5aa791153ec947a3035b0c4', debug: false, handleSelected: function (input, event, suggestion) { var url = suggestion.url url = suggestion.isLvl1 ? url.split('#')[0] : url window.location.href = url }, indexName: 'daemonite_material', inputSelector: '#doc-search', transformData: function (hits) { return hits.map(function (hit) { var siteurl = document.getElementById('doc-search').getAttribute('data-siteurl') var urlRE = /^https?:\/\/djibe\.github\.io/ hit.url = siteurl.match(urlRE) ? hit.url : hit.url.replace(urlRE, '') return hit }) } }) */ // Floating labels $('.floating-label .custom-select, .floating-label .form-control').floatinglabel() // Holder Holder.addTheme('gray', { bg: '#424242', fg: 'rgba(255, 255, 255, .7)', fontweight: 'normal' }) // Indeterminate checkbox example $('.bd-example-indeterminate [type="checkbox"]').prop('indeterminate', true) // Modal $('#exampleModal').on('show.bs.modal', function (event) { var $button = $(event.relatedTarget) var $modal = $(this) var recipient = $button.data('whatever') $modal.find('.modal-body input').val(recipient) $modal.find('.modal-title').text('New message to ' + recipient) }) // Navbar examples $('.navbar-brand img[src="/material/assets/brand/bootstrap-solid.svg"]') .addClass('mr-2 rounded') .attr({ height : 36, src : '/material/apple-touch-icon.png', width : 36 }) // Pickers $('#exampleInputDatePicker1').pickdate() $('#exampleInputDatePicker2').pickdate({ cancel : 'Clear', closeOnCancel : false, closeOnSelect : true, container : 'body', containerHidden : 'body', firstDay : 1, format : 'You selecte!d: dddd, d mm, yy', formatSubmit : 'dd/mmmm/yyyy', hiddenPrefix : 'prefix_', hiddenSuffix : '_suffix', labelMonthNext : 'Go to the next month', labelMonthPrev : 'Go to the previous month', labelMonthSelect : 'Choose a month from the dropdown menu', labelYearSelect : 'Choose a year from the dropdown menu', ok : 'Close', onClose : function () { console.log('Datepicker closes') // eslint-disable-line no-console }, onOpen : function () { console.log('Datepicker opens') // eslint-disable-line no-console }, selectMonths : true, selectYears : 10, today : 'Today' }) $('#exampleInputDatePicker3').pickdate({ max : true, min : -10 }) $('#exampleInputDatePicker4').pickdate({ max : new Date(2016, 1, 13), min : new Date(2016, 0, 3) }) $('#exampleInputDatePicker5').pickdate({
}) $('#exampleInputDatePicker6').pickdate({ disable: [ new Date(2016, 0, 16), new Date(2016, 0, 20), [2016, 0, 24] ] }) $('#exampleInputDatePicker7').pickdate({ disable: [ 1, 2, 3 ] }) $('#exampleInputDatePicker8').pickdate({ disable: [ { from: new Date(2016, 0, 16), to: [2016, 0, 24] } ] }) $('#exampleInputDatePicker9').pickdate({ disable: [ { from: -10, to: true } ] }) $('#exampleInputDatePicker10').pickdate({ disable: [ { from: [2016, 0, 16], to: 10 } ] }) // Progress bar $('.bd-toggle-animated-progress').on('click', function () { $(this).siblings('.progress').find('.progress-bar-striped').toggleClass('progress-bar-animated') }) // Snackbar $('#toast-demo1-btn').on('click', function () { $('#toast-demo1').toast('show') }) $('#toast-demo2-btn').on('click', function () { $('#toast-demo2').toast('show') }) $('#toast-demo3-btn').on('click', function () { $('#toast-demo3').toast('show') }) $('#toast-demo4-btn').on('click', function () { $('#toast-demo4').toast('show') }) $('#toast-demo5-btn').on('click', function () { $('#toast-demo5').toast('show') }) // Toolbar $(window).on('scroll', function () { if ($(window).scrollTop() > 0) { $('.toolbar-waterfall').addClass('waterfall') } else { $('.toolbar-waterfall').removeClass('waterfall') } }) // Tooltip $('[data-toggle="popover"]').popover() $('.bd-example .toast') .toast({ autohide: false }) .toast('show') // Live toast demo $('#liveToastBtn').click(function () { $('#liveToast').toast('show') }) // Demos within modals $('.tooltip-test').tooltip() $('.popover-test').popover() $('.tooltip-demo').tooltip({ container: 'body', selector: '[data-toggle="tooltip"]' }) }) }(jQuery))
max : [2016, 1, 13], min : [2016, 0, 3]
error_test.go
package horizonclient import ( "testing" "github.com/stellar/go/support/render/problem" "github.com/stretchr/testify/assert" ) func TestError_Error(t *testing.T) { var herr Error // transaction failed happy path: with the appropriate extra fields herr = Error{ Problem: problem.P{ Title: "Transaction Failed", Type: "transaction_failed", Extras: map[string]interface{}{ "result_codes": map[string]interface{}{ "transaction": "tx_failed", "operations": []string{"op_underfunded", "op_already_exists"}, }, }, }, } assert.Equal(t, `horizon error: "Transaction Failed" (tx_failed, op_underfunded, op_already_exists) - check horizon.Error.Problem for more information`, herr.Error()) // transaction failed sad path: missing result_codes extra herr = Error{ Problem: problem.P{ Title: "Transaction Failed", Type: "transaction_failed", Extras: map[string]interface{}{}, }, } assert.Equal(t, `horizon error: "Transaction Failed" - check horizon.Error.Problem for more information`, herr.Error()) // transaction failed sad path: unparseable result_codes extra herr = Error{ Problem: problem.P{ Title: "Transaction Failed", Type: "transaction_failed", Extras: map[string]interface{}{ "result_codes": "kaboom", }, }, } assert.Equal(t, `horizon error: "Transaction Failed" - check horizon.Error.Problem for more information`, herr.Error()) // non-transaction errors herr = Error{ Problem: problem.P{ Type: "https://stellar.org/horizon-errors/not_found", Title: "Resource Missing", Status: 404, }, } assert.Equal(t, `horizon error: "Resource Missing" - check horizon.Error.Problem for more information`, herr.Error()) } func TestError_ResultCodes(t *testing.T)
func TestError_ResultString(t *testing.T) { var herr Error // happy path: transaction_failed with the appropriate extra fields herr.Problem.Type = "transaction_failed" herr.Problem.Extras = make(map[string]interface{}) herr.Problem.Extras["result_xdr"] = "AAAAAAAAAMj/////AAAAAgAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAAAAA=" trs, err := herr.ResultString() if assert.NoError(t, err) { assert.Equal(t, "AAAAAAAAAMj/////AAAAAgAAAAAAAAAA/////wAAAAAAAAAAAAAAAAAAAAA=", trs) } // sad path: missing result_xdr extra herr.Problem.Type = "transaction_failed" herr.Problem.Extras = make(map[string]interface{}) _, err = herr.ResultString() assert.Equal(t, ErrResultNotPopulated, err) // sad path: unparseable result_xdr extra herr.Problem.Type = "transaction_failed" herr.Problem.Extras = make(map[string]interface{}) herr.Problem.Extras["result_xdr"] = 1234 _, err = herr.ResultString() assert.Error(t, err) } func TestError_Envelope(t *testing.T) { var herr Error // happy path: transaction_failed with the appropriate extra fields herr.Problem.Type = "transaction_failed" herr.Problem.Extras = make(map[string]interface{}) herr.Problem.Extras["envelope_xdr"] = `AAAAADSMMRmQGDH6EJzkgi/7PoKhphMHyNGQgDp2tlS/dhGXAAAAZAAT3TUAAAAwAAAAAAAAAAAAAAABAAAAAAAAAAMAAAABSU5SAAAAAAA0jDEZkBgx+hCc5IIv+z6CoaYTB8jRkIA6drZUv3YRlwAAAAFVU0QAAAAAADSMMRmQGDH6EJzkgi/7PoKhphMHyNGQgDp2tlS/dhGXAAAAAAX14QAAAAAKAAAAAQAAAAAAAAAAAAAAAAAAAAG/dhGXAAAAQLuStfImg0OeeGAQmvLkJSZ1MPSkCzCYNbGqX5oYNuuOqZ5SmWhEsC7uOD9ha4V7KengiwNlc0oMNqBVo22S7gk=` _, err := herr.Envelope() assert.NoError(t, err) // sad path: missing envelope_xdr extra herr.Problem.Extras = make(map[string]interface{}) _, err = herr.Envelope() assert.Equal(t, ErrEnvelopeNotPopulated, err) // sad path: unparseable envelope_xdr extra herr.Problem.Extras = make(map[string]interface{}) herr.Problem.Extras["envelope_xdr"] = "AAAAADSMMRmQGDH6EJzkgi" _, err = herr.Envelope() if assert.Error(t, err) { assert.Contains(t, err.Error(), "xdr decode") } }
{ var herr Error // happy path: transaction_failed with the appropriate extra fields herr.Problem.Type = "transaction_failed" herr.Problem.Extras = make(map[string]interface{}) herr.Problem.Extras["result_codes"] = map[string]interface{}{ "transaction": "tx_failed", "operations": []string{"op_underfunded", "op_already_exists"}, } trc, err := herr.ResultCodes() if assert.NoError(t, err) { assert.Equal(t, "tx_failed", trc.TransactionCode) if assert.Len(t, trc.OperationCodes, 2) { assert.Equal(t, "op_underfunded", trc.OperationCodes[0]) assert.Equal(t, "op_already_exists", trc.OperationCodes[1]) } } // sad path: missing result_codes extra herr.Problem.Type = "transaction_failed" herr.Problem.Extras = make(map[string]interface{}) _, err = herr.ResultCodes() assert.Equal(t, ErrResultCodesNotPopulated, err) // sad path: unparseable result_codes extra herr.Problem.Type = "transaction_failed" herr.Problem.Extras = make(map[string]interface{}) herr.Problem.Extras["result_codes"] = "kaboom" _, err = herr.ResultCodes() assert.Error(t, err) }
calendar-item.factory.ts
import { Injectable } from '@angular/core'; import { DayItem, MonthItem, YearItem, DecadeItem } from './calendar-item.class'; import { ALuDateAdapter } from '@lucca-front/ng/core'; @Injectable() export class LuCalendarItemFactory<D> { constructor( private _adapter: ALuDateAdapter<D>, ) {} forgeDay(d: D, format = 'd'): DayItem<D> { const date = this._adapter.clone(d); return new DayItem(date, this._adapter.format(date, format)); } forgeMonth(d: D, format = 'MMM'): MonthItem<D> { const year = this._adapter.getYear(d); const month = this._adapter.getMonth(d); const date = this._adapter.getDate(d); const monthStart = this._adapter.forge(year, month, 1); return new MonthItem(monthStart, this._adapter.format(monthStart, format)); } forgeYear(d: D, format = 'y'): YearItem<D> { const year = this._adapter.getYear(d); const yearStart = this._adapter.forge(year, 1, 1); return new YearItem(yearStart, this._adapter.format(yearStart, format)); } forgeDecade(d: D, format = 'y'): DecadeItem<D> { const year = this._adapter.getYear(d); const decadeStart = this._adapter.forge(10 * Math.floor(year / 10), 1, 1);
return new DecadeItem(decadeStart, label); } }
const decadeEnd = this._adapter.forge(10 * Math.floor(year / 10) + 9, 1, 1); const label = `${this._adapter.format(decadeStart, format)} - ${this._adapter.format(decadeEnd, format)}`;
private-impl-method.rs
mod a { pub struct Foo {
impl Foo { fn foo(&self) {} } } fn f() { impl a::Foo { fn bar(&self) {} // This should be visible outside `f` } } fn main() { let s = a::Foo { x: 1 }; s.bar(); s.foo(); //~ ERROR associated function `foo` is private }
pub x: isize }
agent.py
from abc import ABCMeta, abstractmethod from multiprocessing import Process, Value import numpy as np from flare.common.log import GameLogEntry from flare.common.communicator import AgentCommunicator from flare.common.replay_buffer import NoReplacementQueue, ReplayBuffer, Experience class AgentHelper(object): """ AgentHelper abstracts some part of Agent's data processing and the I/O communication between Agent and ComputationDataProcessor (CDP). It receives a Communicator from one CDP and uses it to send data to the CDP. """ __metaclass__ = ABCMeta def __init__(self, name, communicator, sample_interval): assert isinstance(communicator, AgentCommunicator) self.name = name self.comm = communicator self.counter = 0 assert sample_interval >= 2 self.sample_interval = sample_interval def unpack_exps(self, exp_seqs): """ The input `exp_seqs` is always a list of sequences, each sequence containing multiple Experience instances. """ def concat_lists(lists): return [x for l in lists for x in l] def extract_key(seq, k): assert seq return [e.val(k) for e in seq] ret = dict( inputs={}, next_inputs={}, next_alive={}, rewards={}, actions={}, next_actions={}, states=None, next_states=None) for k in self.input_keys: ipt_seqs = [extract_key(exp_seq, k) for exp_seq in exp_seqs] ret["inputs"][k] = [ipt_seq[:-1] for ipt_seq in ipt_seqs] ret["next_inputs"][k] = [ipt_seq[1:] for ipt_seq in ipt_seqs] for k in self.action_keys: act_seqs = [extract_key(exp_seq, k) for exp_seq in exp_seqs] ret["actions"][k] = [act_seq[:-1] for act_seq in act_seqs] ret["next_actions"][k] = [act_seq[1:] for act_seq in act_seqs] for k in self.reward_keys: ret["rewards"][ k] = [extract_key(exp_seq[:-1], k) for exp_seq in exp_seqs] if self.state_keys: ret["states"] = dict() ret["next_states"] = dict() for k in self.state_keys: ## we only take the first(second) element of a seq for states(next_states) ret["states"][ k] = [extract_key(exp_seq[:1], k)[0] for exp_seq in exp_seqs] ret["next_states"][k] = [ extract_key(exp_seq[1:2], k)[0] for exp_seq in exp_seqs ] ret["next_alive"]["alive"] \ = [extract_key(exp_seq[1:], "alive") for exp_seq in exp_seqs] ## HERE we decide whether the data are instances or seqs ## according to the existence of states if not self.state_keys: # sample instances for k in ret.keys(): if ret[k] is not None: for kk in ret[k].keys(): ret[k][kk] = concat_lists(ret[k][kk]) return ret, len(exp_seqs) def predict(self, inputs, states=dict()): """ Process the input data (if necessary), send them to CDP for prediction, and receive the outcome. Args: inputs(dict): data used for prediction. It is caller's job to make sure inputs contains all data needed and they are in the right form. """ data = dict(inputs=inputs, states=states) self.comm.put_prediction_data(data, 1) ret = self.comm.get_prediction_return() return ret @abstractmethod def add_experience(self, e): """ Implements how to record an experience. Will be called by self.store_data() """ pass def _store_data(self, alive, data): """ Store the past experience for later use, e.g., experience replay. Args: data(dict): data to store. """ assert isinstance(data, dict) data["alive"] = [alive] t = Experience(data) self.add_experience(t) self.counter += 1 if self.counter % self.sample_interval == 0: return self.learn() @abstractmethod def sample_experiences(self): """ Implements how to retrieve experiences from past. Will be called by self.learn() """ pass def learn(self): """ Sample data from past experiences and send them to CDP for learning. Optionally, it receives learning outcomes sent back from CW and does some processing. Depends on users' need, this function can be called in three ways: 1. In Agent's run_one_episode 2. In store_data(), e.g., learning once every few steps 3. As a separate thread, e.g., using experience replay """ exp_seqs = self.sample_experiences() if not exp_seqs: return data, size = self.unpack_exps(exp_seqs) self.comm.put_training_data(data, size) ret = self.comm.get_training_return() return ret class OnlineHelper(AgentHelper): """ Online helper. It calls `learn()` every `sample_interval` steps. While waiting for learning return, the calling `Agent` is blocked. """ def __init__(self, name, communicator, sample_interval=5): super(OnlineHelper, self).__init__(name, communicator, sample_interval) # NoReplacementQueue used to store past experience. self.exp_queue = NoReplacementQueue() @staticmethod def exp_replay(): return False def add_experience(self, e): self.exp_queue.add(e) def sample_experiences(self): return self.exp_queue.sample() class ExpReplayHelper(AgentHelper): """ Example of applying experience replay. It starts a separate threads to run learn(). """ def __init__(self, name, communicator, buffer_capacity, num_experiences, sample_interval=5, num_seqs=1): super(ExpReplayHelper, self).__init__(name, communicator, sample_interval) # replay buffer for experience replay self.replay_buffer = ReplayBuffer(buffer_capacity) self.num_experiences = num_experiences self.num_seqs = num_seqs @staticmethod def exp_replay(): return True def add_experience(self, e): self.replay_buffer.add(e) def sample_experiences(self): return self.replay_buffer.sample(self.num_experiences, self.num_seqs) class Agent(Process): """ Agent implements the control flow and logics of how Robot interacts with the environment and does computation. It is a subclass of Process. The entry function of the Agent process is run(). Some members: env: the environment num_games: number of games to run learning: Whether learn or not (only do testing) helpers: a dictionary of `AgentHelper`, each corresponds to one `ComputationTask` log_q: communication channel between `Agent` and the centralized logger running: the `Agent` will keep running as long as `running` is True. """ __metaclass__ = ABCMeta def __init__(self, num_games, actrep, learning): super(Agent, self).__init__() self.id = -1 # just created, not added to the Robot yet self.num_games = num_games self.learning = learning self.state_specs = None self.helpers = {} self.log_q = None self.running = Value('i', 0) self.daemon = True ## Process member self.alive = 1 self.env_f = None self.actrep = actrep def set_env(self, env_class, *args, **kwargs): """ Set the environment for the agent. For now, only create a lambda function. Once the agent process starts running, we will call this function. env_class: The environment class to create args, kwargs: The arguments for creating the class """ self.env_f = lambda: env_class(*args, **kwargs) def add_agent_helper(self, helper, input_keys, action_keys, state_keys, reward_keys): """ Add an AgentHelper, with its name (also the name of its correspoding `ComputationTask`) as key. """ assert isinstance(helper, AgentHelper) helper.input_keys = input_keys helper.action_keys = action_keys helper.state_keys = state_keys helper.reward_keys = reward_keys self.helpers[helper.name] = helper def _make_zero_states(self, prop): dtype = prop["dtype"] if "dtype" in prop else "float32" return np.zeros(prop["shape"]).astype(dtype) ## The following three functions hide the `AgentHelper` from the users of ## `Agent`. def predict(self, alg_name, inputs, states=dict()): ## Convert single instances to batches of size 1 ## The reason for this conversion is that we want to reuse the ## _pack_data() and _unpack_data() of the CDP for handling both training ## and prediction data. These two functions assume that data are stored ## as mini batches instead of single instances in the prediction and learning ## queues. inputs_ = {k: [v] for k, v in inputs.items()} states_ = {k: [v] for k, v in states.items()} prediction, next_states = self.helpers[alg_name].predict(inputs_, states_) ## convert back to single instances prediction = {k: v[0] for k, v in prediction.items()} next_states = {k: v[0] for k, v in next_states.items()} return prediction, next_states def run(self): """ Default entry function of Agent process. """ assert self.env_f is not None, "You should first call self.set_env()!" ## Only call the env function now to make sure there is only one ## environment (OpenGL context) in each process self.env = self.env_f() self.running.value = 1 for i in range(self.num_games): self._run_one_episode() if not self.running.value: return self.running.value = 0 def _store_data(self, alg_name, data): if self.learning: ## only store when the agent is learning return self.helpers[alg_name]._store_data(self.alive, data) def _run_one_episode(self): def __store_data(observations, actions, states, rewards): learning_ret = self._cts_store_data(observations, actions, states, rewards) ## written by user if learning_ret is not None: for k, v in learning_ret.items(): self.log_entry.add_key(k, v) observations = self._reset_env() states = self._get_init_states() ## written by user while self.alive and (not self.env.time_out()): actions, next_states = self._cts_predict( observations, states) ## written by user assert isinstance(actions, dict) assert isinstance(next_states, dict) next_observations, rewards, next_game_over = self._step_env( actions) __store_data(observations, actions, states, rewards) observations = next_observations states = next_states ## next_game_over == 1: success ## next_game_over == -1: failure self.alive = 1 - abs(next_game_over) ## self.alive: 0 -- success/failure ## 1 -- normal ## -1 -- timeout if self.env.time_out(): self.alive = -1 actions, _ = self._cts_predict(observations, states) zero_rewards = {k: [0] * len(v) for k, v in rewards.items()} __store_data(observations, actions, states, zero_rewards) ## Record success. For games that do not have a defintion of ## 'success' (e.g., 'breakout' never ends), this quantity will ## always be zero self.log_entry.add_key("success", next_game_over > 0) return self._total_reward() def _reset_env(self): self.alive = 1 ## currently we only support a single logger for all CTs self.log_entry = GameLogEntry(self.id, 'All') obs = self.env.reset() assert isinstance(obs, dict) return obs def _step_env(self, actions): next_observations, rewards, next_game_over = self.env.step(actions, self.actrep) assert isinstance(next_observations, dict) assert isinstance(rewards, dict) self.log_entry.add_key("num_steps", 1) self.log_entry.add_key("total_reward", sum(map(sum, rewards.values()))) return next_observations, rewards, next_game_over def _total_reward(self): self.log_q.put(self.log_entry) return self.log_entry.total_reward def _get_init_states(self): """ By default, there is no state. The user needs to override this function to return a dictionary of init states if necessary. """ return dict() @abstractmethod def _cts_predict(self, observations, states):
@abstractmethod def _cts_store_data(self, observations, actions, states, rewards): """ The user needs to override this function to specify how different CTs store their corresponding experiences, by calling self._store_data(). Each input should be a dictionary. """ pass
""" The user needs to override this function to specify how different CTs make predictions given observations and states. Output: actions: a dictionary of actions, each action being a vector If the action is discrete, then it is a length-one list of an integer. states (optional): a dictionary of states, each state being a floating vector """ pass
conv_test.py
# Lint as: python3 # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np from pyiree.tf.support import tf_test_utils import tensorflow.compat.v2 as tf class Conv2dModule(tf.Module): @tf.function(input_signature=[ tf.TensorSpec([1, 4, 5, 1], tf.float32), tf.TensorSpec([1, 1, 1, 1], tf.float32), ]) def conv2d_1451x1111_valid(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "VALID", name="result") @tf.function(input_signature=[ tf.TensorSpec([2, 4, 5, 1], tf.float32), tf.TensorSpec([1, 1, 1, 1], tf.float32), ]) def conv2d_2451x1111_valid(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "VALID", name="result") @tf.function(input_signature=[ tf.TensorSpec([1, 4, 5, 1], tf.float32), tf.TensorSpec([2, 3, 1, 1], tf.float32),
def conv2d_1451x2311_valid(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "VALID", name="result") @tf.function(input_signature=[ tf.TensorSpec([1, 4, 5, 1], tf.float32), tf.TensorSpec([2, 3, 1, 1], tf.float32), ]) def conv2d_1451x2311_same(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "SAME", name="result") @tf.function(input_signature=[ tf.TensorSpec([2, 4, 5, 1], tf.float32), tf.TensorSpec([2, 3, 1, 1], tf.float32), ]) def conv2d_2451x2311_same(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "SAME", name="result") @tf.function(input_signature=[ tf.TensorSpec([1, 4, 5, 2], tf.float32), tf.TensorSpec([3, 2, 2, 1], tf.float32), ]) def conv2d_1452x3221_same(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "SAME", name="result") @tf.function(input_signature=[ tf.TensorSpec([1, 4, 5, 1], tf.float32), tf.TensorSpec([1, 1, 1, 2], tf.float32), ]) def conv2d_1451x1112_same(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "SAME", name="result") @tf.function(input_signature=[ tf.TensorSpec([1, 4, 5, 2], tf.float32), tf.TensorSpec([1, 1, 2, 2], tf.float32), ]) def conv2d_1452x1122_same(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "SAME", name="result") @tf.function(input_signature=[ tf.TensorSpec([1, 4, 5, 2], tf.float32), tf.TensorSpec([2, 2, 2, 3], tf.float32), ]) def conv2d_1452x2223_same(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "SAME", name="result") @tf.function(input_signature=[ tf.TensorSpec([1, 4, 5, 2], tf.float32), tf.TensorSpec([2, 2, 2, 3], tf.float32), ]) def conv2d_1452x2223_valid(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "VALID", name="result") @tf.function(input_signature=[ tf.TensorSpec([2, 4, 5, 2], tf.float32), tf.TensorSpec([2, 2, 2, 3], tf.float32), ]) def conv2d_2452x2223_valid(self, img, kernel): return tf.nn.conv2d(img, kernel, [1, 1, 1, 1], "VALID", name="result") @tf_test_utils.compile_module(Conv2dModule) class ConvTest(tf_test_utils.SavedModelTestCase): def test_id_batch_size_1(self): i = np.arange(20, dtype=np.float32).reshape([1, 4, 5, 1]) k = np.ones([1, 1, 1, 1], dtype=np.float32) r = self.get_module().conv2d_1451x1111_valid(i, k) r.print().assert_all_close() def test_id_batch_size_2(self): i = np.arange(40, dtype=np.float32).reshape([2, 4, 5, 1]) k = np.ones([1, 1, 1, 1], dtype=np.float32) r = self.get_module().conv2d_2451x1111_valid(i, k) r.print().assert_all_close() def test_asym_kernel(self): i = np.arange(20, dtype=np.float32).reshape([1, 4, 5, 1]) k = np.array([[1, 4, 2], [-2, 0, 1]], dtype=np.float32).reshape(2, 3, 1, 1) r = self.get_module().conv2d_1451x2311_valid(i, k) r.print().assert_all_close() def test_padding(self): i = np.arange(20, dtype=np.float32).reshape([1, 4, 5, 1]) k = np.array([[1, 4, 2], [-2, 0, 1]], dtype=np.float32).reshape(2, 3, 1, 1) r = self.get_module().conv2d_1451x2311_same(i, k) r.print().assert_all_close() def test_batched_padding(self): i = np.arange(40, dtype=np.float32).reshape([2, 4, 5, 1]) k = np.array([[1, 4, 2], [-2, 0, 1]], dtype=np.float32).reshape(2, 3, 1, 1) r = self.get_module().conv2d_2451x2311_same(i, k) r.print().assert_all_close() def test_feature_reduce(self): i = np.arange(40, dtype=np.float32).reshape([1, 4, 5, 2]) k = np.ones([3, 2, 2, 1], dtype=np.float32) r = self.get_module().conv2d_1452x3221_same(i, k) r.print().assert_all_close() def test_feature_inflate(self): i = np.arange(20, dtype=np.float32).reshape([1, 4, 5, 1]) k = np.arange(2, dtype=np.float32).reshape([1, 1, 1, 2]) r = self.get_module().conv2d_1451x1112_same(i, k) r.print().assert_all_close() def test_feature_mix(self): i = np.arange(40, dtype=np.float32).reshape([1, 4, 5, 2]) k = np.arange(4, dtype=np.float32).reshape([1, 1, 2, 2]) r = self.get_module().conv2d_1452x1122_same(i, k) r.print().assert_all_close() def test_feature_padded(self): i = np.arange(40, dtype=np.float32).reshape([1, 4, 5, 2]) k = np.arange(24, dtype=np.float32).reshape([2, 2, 2, 3]) r = self.get_module().conv2d_1452x2223_same(i, k) r.print().assert_all_close() def test_feature_unpadded(self): i = np.arange(40, dtype=np.float32).reshape([1, 4, 5, 2]) k = np.arange(24, dtype=np.float32).reshape([2, 2, 2, 3]) r = self.get_module().conv2d_1452x2223_valid(i, k) r.print().assert_all_close() def test_batched_feature_unpadded(self): i = np.arange(80, dtype=np.float32).reshape([2, 4, 5, 2]) k = np.arange(24, dtype=np.float32).reshape([2, 2, 2, 3]) r = self.get_module().conv2d_2452x2223_valid(i, k) r.print().assert_all_close() if __name__ == "__main__": if hasattr(tf, "enable_v2_behavior"): tf.enable_v2_behavior() tf.test.main()
])
table_test.go
// Copyright (C) 2014 Nippon Telegraph and Telephone Corporation. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. // See the License for the specific language governing permissions and // limitations under the License. package table import ( "testing" "time" "github.com/osrg/gobgp/pkg/packet/bgp" "github.com/stretchr/testify/assert" ) func TestLookupLonger(t *testing.T) { tbl := NewTable(bgp.RF_IPv4_UC) tbl.setDestination(NewDestination(bgp.NewIPAddrPrefix(23, "11.0.0.0"), 0)) tbl.setDestination(NewDestination(bgp.NewIPAddrPrefix(24, "11.0.0.0"), 0)) tbl.setDestination(NewDestination(bgp.NewIPAddrPrefix(32, "11.0.0.4"), 0)) tbl.setDestination(NewDestination(bgp.NewIPAddrPrefix(32, "11.0.0.129"), 0)) tbl.setDestination(NewDestination(bgp.NewIPAddrPrefix(28, "11.0.0.144"), 0)) tbl.setDestination(NewDestination(bgp.NewIPAddrPrefix(29, "11.0.0.144"), 0)) tbl.setDestination(NewDestination(bgp.NewIPAddrPrefix(32, "11.0.0.145"), 0)) r, _ := tbl.GetLongerPrefixDestinations("11.0.0.128/25") assert.Equal(t, len(r), 4) r, _ = tbl.GetLongerPrefixDestinations("11.0.0.0/24") assert.Equal(t, len(r), 6)
} func TestTableDeleteDest(t *testing.T) { peerT := TableCreatePeer() pathT := TableCreatePath(peerT) ipv4t := NewTable(bgp.RF_IPv4_UC) for _, path := range pathT { dest := NewDestination(path.GetNlri(), 0) ipv4t.setDestination(dest) } dest := NewDestination(pathT[0].GetNlri(), 0) ipv4t.setDestination(dest) ipv4t.deleteDest(dest) gdest := ipv4t.GetDestination(pathT[0].GetNlri()) assert.Nil(t, gdest) } func TestTableGetRouteFamily(t *testing.T) { ipv4t := NewTable(bgp.RF_IPv4_UC) rf := ipv4t.GetRoutefamily() assert.Equal(t, rf, bgp.RF_IPv4_UC) } func TestTableSetDestinations(t *testing.T) { peerT := TableCreatePeer() pathT := TableCreatePath(peerT) ipv4t := NewTable(bgp.RF_IPv4_UC) destinations := make(map[string]*Destination) for _, path := range pathT { tableKey := ipv4t.tableKey(path.GetNlri()) dest := NewDestination(path.GetNlri(), 0) destinations[tableKey] = dest } ipv4t.setDestinations(destinations) ds := ipv4t.GetDestinations() assert.Equal(t, ds, destinations) } func TestTableGetDestinations(t *testing.T) { peerT := DestCreatePeer() pathT := DestCreatePath(peerT) ipv4t := NewTable(bgp.RF_IPv4_UC) destinations := make(map[string]*Destination) for _, path := range pathT { tableKey := ipv4t.tableKey(path.GetNlri()) dest := NewDestination(path.GetNlri(), 0) destinations[tableKey] = dest } ipv4t.setDestinations(destinations) ds := ipv4t.GetDestinations() assert.Equal(t, ds, destinations) } func TestTableKey(t *testing.T) { tb := NewTable(bgp.RF_IPv4_UC) n1, _ := bgp.NewPrefixFromRouteFamily(bgp.AFI_IP, bgp.SAFI_UNICAST, "0.0.0.0/0") d1 := NewDestination(n1, 0) n2, _ := bgp.NewPrefixFromRouteFamily(bgp.AFI_IP, bgp.SAFI_UNICAST, "0.0.0.0/1") d2 := NewDestination(n2, 0) assert.Equal(t, len(tb.tableKey(d1.GetNlri())), 5) tb.setDestination(d1) tb.setDestination(d2) assert.Equal(t, len(tb.GetDestinations()), 2) } func TableCreatePeer() []*PeerInfo { peerT1 := &PeerInfo{AS: 65000} peerT2 := &PeerInfo{AS: 65001} peerT3 := &PeerInfo{AS: 65002} peerT := []*PeerInfo{peerT1, peerT2, peerT3} return peerT } func TableCreatePath(peerT []*PeerInfo) []*Path { bgpMsgT1 := updateMsgT1() bgpMsgT2 := updateMsgT2() bgpMsgT3 := updateMsgT3() pathT := make([]*Path, 3) for i, msg := range []*bgp.BGPMessage{bgpMsgT1, bgpMsgT2, bgpMsgT3} { updateMsgT := msg.Body.(*bgp.BGPUpdate) nlriList := updateMsgT.NLRI pathAttributes := updateMsgT.PathAttributes nlri_info := nlriList[0] pathT[i] = NewPath(peerT[i], nlri_info, false, pathAttributes, time.Now(), false) } return pathT } func updateMsgT1() *bgp.BGPMessage { origin := bgp.NewPathAttributeOrigin(0) aspathParam := []bgp.AsPathParamInterface{bgp.NewAsPathParam(2, []uint16{65000})} aspath := bgp.NewPathAttributeAsPath(aspathParam) nexthop := bgp.NewPathAttributeNextHop("192.168.50.1") med := bgp.NewPathAttributeMultiExitDisc(0) pathAttributes := []bgp.PathAttributeInterface{ origin, aspath, nexthop, med, } nlri := []*bgp.IPAddrPrefix{bgp.NewIPAddrPrefix(24, "10.10.10.0")} return bgp.NewBGPUpdateMessage(nil, pathAttributes, nlri) } func updateMsgT2() *bgp.BGPMessage { origin := bgp.NewPathAttributeOrigin(0) aspathParam := []bgp.AsPathParamInterface{bgp.NewAsPathParam(2, []uint16{65100})} aspath := bgp.NewPathAttributeAsPath(aspathParam) nexthop := bgp.NewPathAttributeNextHop("192.168.100.1") med := bgp.NewPathAttributeMultiExitDisc(100) pathAttributes := []bgp.PathAttributeInterface{ origin, aspath, nexthop, med, } nlri := []*bgp.IPAddrPrefix{bgp.NewIPAddrPrefix(24, "20.20.20.0")} return bgp.NewBGPUpdateMessage(nil, pathAttributes, nlri) } func updateMsgT3() *bgp.BGPMessage { origin := bgp.NewPathAttributeOrigin(0) aspathParam := []bgp.AsPathParamInterface{bgp.NewAsPathParam(2, []uint16{65100})} aspath := bgp.NewPathAttributeAsPath(aspathParam) nexthop := bgp.NewPathAttributeNextHop("192.168.150.1") med := bgp.NewPathAttributeMultiExitDisc(100) pathAttributes := []bgp.PathAttributeInterface{ origin, aspath, nexthop, med, } nlri := []*bgp.IPAddrPrefix{bgp.NewIPAddrPrefix(24, "30.30.30.0")} w1 := bgp.NewIPAddrPrefix(23, "40.40.40.0") withdrawnRoutes := []*bgp.IPAddrPrefix{w1} return bgp.NewBGPUpdateMessage(withdrawnRoutes, pathAttributes, nlri) }
chain_core.go
package xchain import ( "errors" "github.com/xuperchain/xupercore/bcs/ledger/xledger/state" pb "github.com/xuperchain/xupercore/bcs/ledger/xledger/xldgpb" pb2 "github.com/xuperchain/xupercore/kernel/contract/bridge/pb" "github.com/xuperchain/xupercore/kernel/ledger" "math/big" ) var ( errUnimplemented = errors.New("unimplemented") ) type chainCore struct { } // GetAccountAddress get addresses associated with account name func (c *chainCore) GetAccountAddresses(accountName string) ([]string, error) { return []string{}, nil } // GetBalance get balance from utxo func (c *chainCore) GetBalance(addr string) (*big.Int, error) { return big.NewInt(0), nil } // VerifyContractPermission verify permission of calling contract func (c *chainCore) VerifyContractPermission(initiator string, authRequire []string, contractName, methodName string) (bool, error) { return true, nil } // VerifyContractOwnerPermission verify contract ownership permisson func (c *chainCore) VerifyContractOwnerPermission(contractName string, authRequire []string) error { return nil } // QueryTransaction query confirmed tx func (c *chainCore) QueryTransaction(txid []byte) (*pb2.Transaction, error) { return &pb2.Transaction{}, nil } // QueryBlock query block func (c *chainCore) QueryBlock(blockid []byte) (ledger.BlockHandle, error) { return &state.BlockAgent{}, nil } // QueryBlockByHeight query block by height func (c *chainCore) QueryBlockByHeight(height int64) (*pb.InternalBlock, error) { return new(pb.InternalBlock), nil }
func (c *chainCore) QueryLastBlock() (*pb.InternalBlock, error) { return new(pb.InternalBlock), nil } // CrossQuery query contract from otherchain //func (c *chainCore) ResolveChain(chainName string) (*pb.CrossQueryMeta, error) { // return new(pb.CrossQueryMeta), nil //}
// QueryLastBlock query last block
context.rs
// ignore-tidy-filelength //! Type context book-keeping. use crate::arena::Arena; use crate::dep_graph::DepGraph; use crate::dep_graph::{self, DepNode, DepConstructor}; use crate::session::Session; use crate::session::config::{BorrowckMode, OutputFilenames}; use crate::session::config::CrateType; use crate::middle; use crate::middle::lang_items::PanicLocationLangItem; use crate::hir::{self, TraitCandidate, HirId, ItemKind, ItemLocalId, Node}; use crate::hir::def::{Res, DefKind, Export}; use crate::hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use crate::hir::map as hir_map; use crate::hir::map::DefPathHash; use crate::lint::{self, Lint}; use crate::ich::{StableHashingContext, NodeIdHashingMode}; use crate::infer::canonical::{Canonical, CanonicalVarInfo, CanonicalVarInfos}; use crate::infer::outlives::free_region_map::FreeRegionMap; use crate::middle::cstore::CrateStoreDyn; use crate::middle::cstore::EncodedMetadata; use crate::middle::lang_items; use crate::middle::resolve_lifetime::{self, ObjectLifetimeDefault}; use crate::middle::stability; use crate::mir::{BodyAndCache, Field, interpret, Local, Place, PlaceElem, ProjectionKind, Promoted}; use crate::mir::interpret::{ConstValue, Allocation, Scalar}; use crate::ty::subst::{GenericArg, InternalSubsts, SubstsRef, Subst}; use crate::ty::ReprOptions; use crate::traits; use crate::traits::{Clause, Clauses, GoalKind, Goal, Goals}; use crate::ty::{self, DefIdTree, Ty, TypeAndMut}; use crate::ty::{TyS, TyKind, List}; use crate::ty::{AdtKind, AdtDef, Region, Const}; use crate::ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate}; use crate::ty::RegionKind; use crate::ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid, ConstVid}; use crate::ty::TyKind::*; use crate::ty::{InferConst, ParamConst}; use crate::ty::GenericParamDefKind; use crate::ty::layout::{LayoutDetails, TargetDataLayout, VariantIdx}; use crate::ty::query; use crate::ty::steal::Steal; use crate::ty::subst::{UserSubsts, GenericArgKind}; use crate::ty::{BoundVar, BindingMode}; use crate::ty::CanonicalPolyFnSig; use crate::util::common::ErrorReported; use crate::util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap, ItemLocalSet, NodeMap}; use crate::util::nodemap::{FxHashMap, FxHashSet}; use errors::DiagnosticBuilder; use arena::SyncDroplessArena; use smallvec::SmallVec; use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::stable_hasher::{ HashStable, StableHasher, StableVec, hash_stable_hashmap, }; use rustc_index::vec::{Idx, IndexVec}; use rustc_data_structures::sharded::ShardedHashMap; use rustc_data_structures::sync::{Lrc, Lock, WorkerLocal}; use std::any::Any; use std::borrow::Borrow; use std::cmp::Ordering; use std::collections::hash_map::{self, Entry}; use std::hash::{Hash, Hasher}; use std::fmt; use std::mem; use std::ops::{Deref, Bound}; use std::iter; use std::sync::Arc; use rustc_target::spec::abi; use rustc_macros::HashStable; use syntax::ast; use syntax::attr; use syntax::source_map::MultiSpan; use syntax::symbol::{Symbol, kw, sym}; use syntax_pos::Span; use syntax::expand::allocator::AllocatorKind; pub struct AllArenas { pub interner: SyncDroplessArena, } impl AllArenas { pub fn new() -> Self { AllArenas { interner: SyncDroplessArena::default(), } } } type InternedSet<'tcx, T> = ShardedHashMap<Interned<'tcx, T>, ()>; pub struct CtxtInterners<'tcx> { /// The arena that types, regions, etc. are allocated from. arena: &'tcx SyncDroplessArena, /// Specifically use a speedy hash algorithm for these hash sets, since /// they're accessed quite often. type_: InternedSet<'tcx, TyS<'tcx>>, type_list: InternedSet<'tcx, List<Ty<'tcx>>>, substs: InternedSet<'tcx, InternalSubsts<'tcx>>, canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo>>, region: InternedSet<'tcx, RegionKind>, existential_predicates: InternedSet<'tcx, List<ExistentialPredicate<'tcx>>>, predicates: InternedSet<'tcx, List<Predicate<'tcx>>>, clauses: InternedSet<'tcx, List<Clause<'tcx>>>, goal: InternedSet<'tcx, GoalKind<'tcx>>, goal_list: InternedSet<'tcx, List<Goal<'tcx>>>, projs: InternedSet<'tcx, List<ProjectionKind>>, place_elems: InternedSet<'tcx, List<PlaceElem<'tcx>>>, const_: InternedSet<'tcx, Const<'tcx>>, } impl<'tcx> CtxtInterners<'tcx> { fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> { CtxtInterners { arena, type_: Default::default(), type_list: Default::default(), substs: Default::default(), region: Default::default(), existential_predicates: Default::default(), canonical_var_infos: Default::default(), predicates: Default::default(), clauses: Default::default(), goal: Default::default(), goal_list: Default::default(), projs: Default::default(), place_elems: Default::default(), const_: Default::default(), } } /// Interns a type. #[allow(rustc::usage_of_ty_tykind)] #[inline(never)] fn intern_ty(&self, kind: TyKind<'tcx> ) -> Ty<'tcx> { self.type_.intern(kind, |kind| { let flags = super::flags::FlagComputation::for_kind(&kind); let ty_struct = TyS { kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; Interned(self.arena.alloc(ty_struct)) }).0 } } pub struct CommonTypes<'tcx> { pub unit: Ty<'tcx>, pub bool: Ty<'tcx>, pub char: Ty<'tcx>, pub isize: Ty<'tcx>, pub i8: Ty<'tcx>, pub i16: Ty<'tcx>, pub i32: Ty<'tcx>, pub i64: Ty<'tcx>, pub i128: Ty<'tcx>, pub usize: Ty<'tcx>, pub u8: Ty<'tcx>, pub u16: Ty<'tcx>, pub u32: Ty<'tcx>, pub u64: Ty<'tcx>, pub u128: Ty<'tcx>, pub f32: Ty<'tcx>, pub f64: Ty<'tcx>, pub never: Ty<'tcx>, pub self_param: Ty<'tcx>, pub err: Ty<'tcx>, /// Dummy type used for the `Self` of a `TraitRef` created for converting /// a trait object, and which gets removed in `ExistentialTraitRef`. /// This type must not appear anywhere in other converted types. pub trait_object_dummy_self: Ty<'tcx>, } pub struct CommonLifetimes<'tcx> { pub re_empty: Region<'tcx>, pub re_static: Region<'tcx>, pub re_erased: Region<'tcx>, } pub struct CommonConsts<'tcx> { pub err: &'tcx Const<'tcx>, } pub struct LocalTableInContext<'a, V> { local_id_root: Option<DefId>, data: &'a ItemLocalMap<V> } /// Validate that the given HirId (respectively its `local_id` part) can be /// safely used as a key in the tables of a TypeckTable. For that to be /// the case, the HirId must have the same `owner` as all the other IDs in /// this table (signified by `local_id_root`). Otherwise the HirId /// would be in a different frame of reference and using its `local_id` /// would result in lookup errors, or worse, in silently wrong data being /// stored/returned. fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>, hir_id: hir::HirId, mut_access: bool) { if let Some(local_id_root) = local_id_root { if hir_id.owner != local_id_root.index { ty::tls::with(|tcx| { bug!("node {} with HirId::owner {:?} cannot be placed in \ TypeckTables with local_id_root {:?}", tcx.hir().node_to_string(hir_id), DefId::local(hir_id.owner), local_id_root) }); } } else { // We use "Null Object" TypeckTables in some of the analysis passes. // These are just expected to be empty and their `local_id_root` is // `None`. Therefore we cannot verify whether a given `HirId` would // be a valid key for the given table. Instead we make sure that // nobody tries to write to such a Null Object table. if mut_access { bug!("access to invalid TypeckTables") } } } impl<'a, V> LocalTableInContext<'a, V> { pub fn contains_key(&self, id: hir::HirId) -> bool { validate_hir_id_for_typeck_tables(self.local_id_root, id, false); self.data.contains_key(&id.local_id) } pub fn get(&self, id: hir::HirId) -> Option<&V> { validate_hir_id_for_typeck_tables(self.local_id_root, id, false); self.data.get(&id.local_id) } pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> { self.data.iter() } } impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> { type Output = V; fn index(&self, key: hir::HirId) -> &V { self.get(key).expect("LocalTableInContext: key not found") } } pub struct LocalTableInContextMut<'a, V> { local_id_root: Option<DefId>, data: &'a mut ItemLocalMap<V> } impl<'a, V> LocalTableInContextMut<'a, V> { pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> { validate_hir_id_for_typeck_tables(self.local_id_root, id, true); self.data.get_mut(&id.local_id) } pub fn entry(&mut self, id: hir::HirId) -> Entry<'_, hir::ItemLocalId, V> { validate_hir_id_for_typeck_tables(self.local_id_root, id, true); self.data.entry(id.local_id) } pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> { validate_hir_id_for_typeck_tables(self.local_id_root, id, true); self.data.insert(id.local_id, val) } pub fn remove(&mut self, id: hir::HirId) -> Option<V> { validate_hir_id_for_typeck_tables(self.local_id_root, id, true); self.data.remove(&id.local_id) } } /// All information necessary to validate and reveal an `impl Trait`. #[derive(RustcEncodable, RustcDecodable, Debug, HashStable)] pub struct ResolvedOpaqueTy<'tcx> { /// The revealed type as seen by this function. pub concrete_type: Ty<'tcx>, /// Generic parameters on the opaque type as passed by this function. /// For `type Foo<A, B> = impl Bar<A, B>; fn foo<T, U>() -> Foo<T, U> { .. }` /// this is `[T, U]`, not `[A, B]`. pub substs: SubstsRef<'tcx>, } /// Whenever a value may be live across a generator yield, the type of that value winds up in the /// `GeneratorInteriorTypeCause` struct. This struct adds additional information about such /// captured types that can be useful for diagnostics. In particular, it stores the span that /// caused a given type to be recorded, along with the scope that enclosed the value (which can /// be used to find the await that the value is live across). /// /// For example: /// /// ```ignore (pseudo-Rust) /// async move { /// let x: T = ...; /// foo.await /// ... /// } /// ``` /// /// Here, we would store the type `T`, the span of the value `x`, and the "scope-span" for /// the scope that contains `x`. #[derive(RustcEncodable, RustcDecodable, Clone, Debug, Eq, Hash, PartialEq)] #[derive(HashStable, TypeFoldable)] pub struct GeneratorInteriorTypeCause<'tcx> { /// Type of the captured binding. pub ty: Ty<'tcx>, /// Span of the binding that was captured. pub span: Span, /// Span of the scope of the captured binding. pub scope_span: Option<Span>, } #[derive(RustcEncodable, RustcDecodable, Debug)] pub struct TypeckTables<'tcx> { /// The HirId::owner all ItemLocalIds in this table are relative to. pub local_id_root: Option<DefId>, /// Resolved definitions for `<T>::X` associated paths and /// method calls, including those of overloaded operators. type_dependent_defs: ItemLocalMap<Result<(DefKind, DefId), ErrorReported>>, /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`) /// or patterns (`S { field }`). The index is often useful by itself, but to learn more /// about the field you also need definition of the variant to which the field /// belongs, but it may not exist if it's a tuple field (`tuple.0`). field_indices: ItemLocalMap<usize>, /// Stores the types for various nodes in the AST. Note that this table /// is not guaranteed to be populated until after typeck. See /// typeck::check::fn_ctxt for details. node_types: ItemLocalMap<Ty<'tcx>>, /// Stores the type parameters which were substituted to obtain the type /// of this node. This only applies to nodes that refer to entities /// parameterized by type parameters, such as generic fns, types, or /// other items. node_substs: ItemLocalMap<SubstsRef<'tcx>>, /// This will either store the canonicalized types provided by the user /// or the substitutions that the user explicitly gave (if any) attached /// to `id`. These will not include any inferred values. The canonical form /// is used to capture things like `_` or other unspecified values. /// /// For example, if the user wrote `foo.collect::<Vec<_>>()`, then the /// canonical substitutions would include only `for<X> { Vec<X> }`. /// /// See also `AscribeUserType` statement in MIR. user_provided_types: ItemLocalMap<CanonicalUserType<'tcx>>, /// Stores the canonicalized types provided by the user. See also /// `AscribeUserType` statement in MIR. pub user_provided_sigs: DefIdMap<CanonicalPolyFnSig<'tcx>>, adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>, /// Stores the actual binding mode for all instances of hir::BindingAnnotation. pat_binding_modes: ItemLocalMap<BindingMode>, /// Stores the types which were implicitly dereferenced in pattern binding modes /// for later usage in HAIR lowering. For example, /// /// ``` /// match &&Some(5i32) { /// Some(n) => {}, /// _ => {}, /// } /// ``` /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored. /// /// See: /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>, /// Borrows pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>, /// Records the reasons that we picked the kind of each closure; /// not all closures are present in the map. closure_kind_origins: ItemLocalMap<(Span, ast::Name)>, /// For each fn, records the "liberated" types of its arguments /// and return type. Liberated means that all bound regions /// (including late-bound regions) are replaced with free /// equivalents. This table is not used in codegen (since regions /// are erased there) and hence is not serialized to metadata. liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>, /// For each FRU expression, record the normalized types of the fields /// of the struct - this is needed because it is non-trivial to /// normalize while preserving regions. This table is used only in /// MIR construction and hence is not serialized to metadata. fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>, /// For every coercion cast we add the HIR node ID of the cast /// expression to this set. coercion_casts: ItemLocalSet, /// Set of trait imports actually used in the method resolution. /// This is used for warning unused imports. During type /// checking, this `Lrc` should not be cloned: it must have a ref-count /// of 1 so that we can insert things into the set mutably. pub used_trait_imports: Lrc<DefIdSet>, /// If any errors occurred while type-checking this body, /// this field will be set to `true`. pub tainted_by_errors: bool, /// Stores the free-region relationships that were deduced from /// its where-clauses and parameter types. These are then /// read-again by borrowck. pub free_region_map: FreeRegionMap<'tcx>, /// All the opaque types that are restricted to concrete types /// by this function. pub concrete_opaque_types: FxHashMap<DefId, ResolvedOpaqueTy<'tcx>>, /// Given the closure ID this map provides the list of UpvarIDs used by it. /// The upvarID contains the HIR node ID and it also contains the full path /// leading to the member of the struct or tuple that is used instead of the /// entire variable. pub upvar_list: ty::UpvarListMap, /// Stores the type, span and optional scope span of all types /// that are live across the yield of this generator (if a generator). pub generator_interior_types: Vec<GeneratorInteriorTypeCause<'tcx>>, } impl<'tcx> TypeckTables<'tcx> { pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> { TypeckTables { local_id_root, type_dependent_defs: Default::default(), field_indices: Default::default(), user_provided_types: Default::default(), user_provided_sigs: Default::default(), node_types: Default::default(), node_substs: Default::default(), adjustments: Default::default(), pat_binding_modes: Default::default(), pat_adjustments: Default::default(), upvar_capture_map: Default::default(), closure_kind_origins: Default::default(), liberated_fn_sigs: Default::default(), fru_field_types: Default::default(), coercion_casts: Default::default(), used_trait_imports: Lrc::new(Default::default()), tainted_by_errors: false, free_region_map: Default::default(), concrete_opaque_types: Default::default(), upvar_list: Default::default(), generator_interior_types: Default::default(), } } /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node. pub fn qpath_res(&self, qpath: &hir::QPath, id: hir::HirId) -> Res { match *qpath { hir::QPath::Resolved(_, ref path) => path.res, hir::QPath::TypeRelative(..) => self.type_dependent_def(id) .map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)), } } pub fn type_dependent_defs( &self, ) -> LocalTableInContext<'_, Result<(DefKind, DefId), ErrorReported>> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.type_dependent_defs } } pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> { validate_hir_id_for_typeck_tables(self.local_id_root, id, false); self.type_dependent_defs.get(&id.local_id).cloned().and_then(|r| r.ok()) } pub fn type_dependent_def_id(&self, id: HirId) -> Option<DefId> { self.type_dependent_def(id).map(|(_, def_id)| def_id) } pub fn type_dependent_defs_mut( &mut self, ) -> LocalTableInContextMut<'_, Result<(DefKind, DefId), ErrorReported>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.type_dependent_defs } } pub fn field_indices(&self) -> LocalTableInContext<'_, usize> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.field_indices } } pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.field_indices } } pub fn user_provided_types( &self ) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.user_provided_types } } pub fn user_provided_types_mut( &mut self ) -> LocalTableInContextMut<'_, CanonicalUserType<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.user_provided_types } } pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.node_types } } pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.node_types } } pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> { self.node_type_opt(id).unwrap_or_else(|| bug!("node_type: no type for node `{}`", tls::with(|tcx| tcx.hir().node_to_string(id))) ) } pub fn node_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> { validate_hir_id_for_typeck_tables(self.local_id_root, id, false); self.node_types.get(&id.local_id).cloned() } pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, SubstsRef<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.node_substs } } pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> { validate_hir_id_for_typeck_tables(self.local_id_root, id, false); self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| InternalSubsts::empty()) } pub fn node_substs_opt(&self, id: hir::HirId) -> Option<SubstsRef<'tcx>> { validate_hir_id_for_typeck_tables(self.local_id_root, id, false); self.node_substs.get(&id.local_id).cloned() } // Returns the type of a pattern as a monotype. Like @expr_ty, this function // doesn't provide type parameter substitutions. pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> { self.node_type(pat.hir_id) } pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> { self.node_type_opt(pat.hir_id) } // Returns the type of an expression as a monotype. // // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in // some cases, we insert `Adjustment` annotations such as auto-deref or // auto-ref. The type returned by this function does not consider such // adjustments. See `expr_ty_adjusted()` instead. // // NB (2): This type doesn't provide type parameter substitutions; e.g., if you // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize" // instead of "fn(ty) -> T with T = isize". pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> { self.node_type(expr.hir_id) } pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> { self.node_type_opt(expr.hir_id) } pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.adjustments } } pub fn adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.adjustments } } pub fn expr_adjustments(&self, expr: &hir::Expr) -> &[ty::adjustment::Adjustment<'tcx>] { validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false); self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..]) } /// Returns the type of `expr`, considering any `Adjustment` /// entry recorded for that expression. pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> { self.expr_adjustments(expr) .last() .map_or_else(|| self.expr_ty(expr), |adj| adj.target) } pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> { self.expr_adjustments(expr) .last() .map(|adj| adj.target) .or_else(|| self.expr_ty_opt(expr)) } pub fn is_method_call(&self, expr: &hir::Expr) -> bool { // Only paths and method calls/overloaded operators have // entries in type_dependent_defs, ignore the former here. if let hir::ExprKind::Path(_) = expr.kind { return false; } match self.type_dependent_defs().get(expr.hir_id) { Some(Ok((DefKind::Method, _))) => true, _ => false } } pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.pat_binding_modes } } pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.pat_binding_modes } } pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.pat_adjustments, } } pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.pat_adjustments, } } pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> { self.upvar_capture_map[&upvar_id] } pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, ast::Name)> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.closure_kind_origins } } pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<'_, (Span, ast::Name)> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.closure_kind_origins } } pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.liberated_fn_sigs } } pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.liberated_fn_sigs } } pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> { LocalTableInContext { local_id_root: self.local_id_root, data: &self.fru_field_types } } pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> { LocalTableInContextMut { local_id_root: self.local_id_root, data: &mut self.fru_field_types } } pub fn is_coercion_cast(&self, hir_id: hir::HirId) -> bool { validate_hir_id_for_typeck_tables(self.local_id_root, hir_id, true); self.coercion_casts.contains(&hir_id.local_id) } pub fn set_coercion_cast(&mut self, id: ItemLocalId) { self.coercion_casts.insert(id); }
} } impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckTables<'tcx> { fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let ty::TypeckTables { local_id_root, ref type_dependent_defs, ref field_indices, ref user_provided_types, ref user_provided_sigs, ref node_types, ref node_substs, ref adjustments, ref pat_binding_modes, ref pat_adjustments, ref upvar_capture_map, ref closure_kind_origins, ref liberated_fn_sigs, ref fru_field_types, ref coercion_casts, ref used_trait_imports, tainted_by_errors, ref free_region_map, ref concrete_opaque_types, ref upvar_list, ref generator_interior_types, } = *self; hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { type_dependent_defs.hash_stable(hcx, hasher); field_indices.hash_stable(hcx, hasher); user_provided_types.hash_stable(hcx, hasher); user_provided_sigs.hash_stable(hcx, hasher); node_types.hash_stable(hcx, hasher); node_substs.hash_stable(hcx, hasher); adjustments.hash_stable(hcx, hasher); pat_binding_modes.hash_stable(hcx, hasher); pat_adjustments.hash_stable(hcx, hasher); hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| { let ty::UpvarId { var_path, closure_expr_id } = *up_var_id; let local_id_root = local_id_root.expect("trying to hash invalid TypeckTables"); let var_owner_def_id = DefId { krate: local_id_root.krate, index: var_path.hir_id.owner, }; let closure_def_id = DefId { krate: local_id_root.krate, index: closure_expr_id.to_def_id().index, }; (hcx.def_path_hash(var_owner_def_id), var_path.hir_id.local_id, hcx.def_path_hash(closure_def_id)) }); closure_kind_origins.hash_stable(hcx, hasher); liberated_fn_sigs.hash_stable(hcx, hasher); fru_field_types.hash_stable(hcx, hasher); coercion_casts.hash_stable(hcx, hasher); used_trait_imports.hash_stable(hcx, hasher); tainted_by_errors.hash_stable(hcx, hasher); free_region_map.hash_stable(hcx, hasher); concrete_opaque_types.hash_stable(hcx, hasher); upvar_list.hash_stable(hcx, hasher); generator_interior_types.hash_stable(hcx, hasher); }) } } rustc_index::newtype_index! { pub struct UserTypeAnnotationIndex { derive [HashStable] DEBUG_FORMAT = "UserType({})", const START_INDEX = 0, } } /// Mapping of type annotation indices to canonical user type annotations. pub type CanonicalUserTypeAnnotations<'tcx> = IndexVec<UserTypeAnnotationIndex, CanonicalUserTypeAnnotation<'tcx>>; #[derive(Clone, Debug, RustcEncodable, RustcDecodable, HashStable, TypeFoldable, Lift)] pub struct CanonicalUserTypeAnnotation<'tcx> { pub user_ty: CanonicalUserType<'tcx>, pub span: Span, pub inferred_ty: Ty<'tcx>, } /// Canonicalized user type annotation. pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>; impl CanonicalUserType<'tcx> { /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`, /// i.e., each thing is mapped to a canonical variable with the same index. pub fn is_identity(&self) -> bool { match self.value { UserType::Ty(_) => false, UserType::TypeOf(_, user_substs) => { if user_substs.user_self_ty.is_some() { return false; } user_substs.substs.iter().zip(BoundVar::new(0)..).all(|(kind, cvar)| { match kind.unpack() { GenericArgKind::Type(ty) => match ty.kind { ty::Bound(debruijn, b) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(debruijn, ty::INNERMOST); cvar == b.var } _ => false, }, GenericArgKind::Lifetime(r) => match r { ty::ReLateBound(debruijn, br) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(*debruijn, ty::INNERMOST); cvar == br.assert_bound_var() } _ => false, }, GenericArgKind::Const(ct) => match ct.val { ty::ConstKind::Bound(debruijn, b) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(debruijn, ty::INNERMOST); cvar == b } _ => false, }, } }) }, } } } /// A user-given type annotation attached to a constant. These arise /// from constants that are named via paths, like `Foo::<A>::new` and /// so forth. #[derive(Copy, Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)] #[derive(HashStable, TypeFoldable, Lift)] pub enum UserType<'tcx> { Ty(Ty<'tcx>), /// The canonical type is the result of `type_of(def_id)` with the /// given substitutions applied. TypeOf(DefId, UserSubsts<'tcx>), } impl<'tcx> CommonTypes<'tcx> { fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { let mk = |ty| interners.intern_ty(ty); CommonTypes { unit: mk(Tuple(List::empty())), bool: mk(Bool), char: mk(Char), never: mk(Never), err: mk(Error), isize: mk(Int(ast::IntTy::Isize)), i8: mk(Int(ast::IntTy::I8)), i16: mk(Int(ast::IntTy::I16)), i32: mk(Int(ast::IntTy::I32)), i64: mk(Int(ast::IntTy::I64)), i128: mk(Int(ast::IntTy::I128)), usize: mk(Uint(ast::UintTy::Usize)), u8: mk(Uint(ast::UintTy::U8)), u16: mk(Uint(ast::UintTy::U16)), u32: mk(Uint(ast::UintTy::U32)), u64: mk(Uint(ast::UintTy::U64)), u128: mk(Uint(ast::UintTy::U128)), f32: mk(Float(ast::FloatTy::F32)), f64: mk(Float(ast::FloatTy::F64)), self_param: mk(ty::Param(ty::ParamTy { index: 0, name: kw::SelfUpper, })), trait_object_dummy_self: mk(Infer(ty::FreshTy(0))), } } } impl<'tcx> CommonLifetimes<'tcx> { fn new(interners: &CtxtInterners<'tcx>) -> CommonLifetimes<'tcx> { let mk = |r| { interners.region.intern(r, |r| { Interned(interners.arena.alloc(r)) }).0 }; CommonLifetimes { re_empty: mk(RegionKind::ReEmpty), re_static: mk(RegionKind::ReStatic), re_erased: mk(RegionKind::ReErased), } } } impl<'tcx> CommonConsts<'tcx> { fn new(interners: &CtxtInterners<'tcx>, types: &CommonTypes<'tcx>) -> CommonConsts<'tcx> { let mk_const = |c| { interners.const_.intern(c, |c| { Interned(interners.arena.alloc(c)) }).0 }; CommonConsts { err: mk_const(ty::Const { val: ty::ConstKind::Value(ConstValue::Scalar(Scalar::zst())), ty: types.err, }), } } } // This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime // conflict. #[derive(Debug)] pub struct FreeRegionInfo { // def id corresponding to FreeRegion pub def_id: DefId, // the bound region corresponding to FreeRegion pub boundregion: ty::BoundRegion, // checks if bound region is in Impl Item pub is_impl_item: bool, } /// The central data structure of the compiler. It stores references /// to the various **arenas** and also houses the results of the /// various **compiler queries** that have been performed. See the /// [rustc guide] for more details. /// /// [rustc guide]: https://rust-lang.github.io/rustc-guide/ty.html #[derive(Copy, Clone)] #[rustc_diagnostic_item = "TyCtxt"] pub struct TyCtxt<'tcx> { gcx: &'tcx GlobalCtxt<'tcx>, } impl<'tcx> Deref for TyCtxt<'tcx> { type Target = &'tcx GlobalCtxt<'tcx>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.gcx } } pub struct GlobalCtxt<'tcx> { pub arena: &'tcx WorkerLocal<Arena<'tcx>>, interners: CtxtInterners<'tcx>, cstore: Box<CrateStoreDyn>, pub sess: &'tcx Session, pub lint_store: Lrc<lint::LintStore>, pub dep_graph: DepGraph, pub prof: SelfProfilerRef, /// Common types, pre-interned for your convenience. pub types: CommonTypes<'tcx>, /// Common lifetimes, pre-interned for your convenience. pub lifetimes: CommonLifetimes<'tcx>, /// Common consts, pre-interned for your convenience. pub consts: CommonConsts<'tcx>, /// Resolutions of `extern crate` items produced by resolver. extern_crate_map: NodeMap<CrateNum>, /// Map indicating what traits are in scope for places where this /// is relevant; generated by resolve. trait_map: FxHashMap<DefIndex, FxHashMap<ItemLocalId, StableVec<TraitCandidate>>>, /// Export map produced by name resolution. export_map: FxHashMap<DefId, Vec<Export<hir::HirId>>>, hir_map: hir_map::Map<'tcx>, /// A map from `DefPathHash` -> `DefId`. Includes `DefId`s from the local crate /// as well as all upstream crates. Only populated in incremental mode. pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>, pub queries: query::Queries<'tcx>, maybe_unused_trait_imports: FxHashSet<DefId>, maybe_unused_extern_crates: Vec<(DefId, Span)>, /// A map of glob use to a set of names it actually imports. Currently only /// used in save-analysis. glob_map: FxHashMap<DefId, FxHashSet<ast::Name>>, /// Extern prelude entries. The value is `true` if the entry was introduced /// via `extern crate` item and not `--extern` option or compiler built-in. pub extern_prelude: FxHashMap<ast::Name, bool>, // Internal cache for metadata decoding. No need to track deps on this. pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>, /// Caches the results of trait selection. This cache is used /// for things that do not have to do with the parameters in scope. pub selection_cache: traits::SelectionCache<'tcx>, /// Caches the results of trait evaluation. This cache is used /// for things that do not have to do with the parameters in scope. /// Merge this with `selection_cache`? pub evaluation_cache: traits::EvaluationCache<'tcx>, /// The definite name of the current crate after taking into account /// attributes, commandline parameters, etc. pub crate_name: Symbol, /// Data layout specification for the current target. pub data_layout: TargetDataLayout, stability_interner: ShardedHashMap<&'tcx attr::Stability, ()>, /// Stores the value of constants (and deduplicates the actual memory) allocation_interner: ShardedHashMap<&'tcx Allocation, ()>, pub alloc_map: Lock<interpret::AllocMap<'tcx>>, layout_interner: ShardedHashMap<&'tcx LayoutDetails, ()>, output_filenames: Arc<OutputFilenames>, } impl<'tcx> TyCtxt<'tcx> { #[inline(always)] pub fn hir(self) -> &'tcx hir_map::Map<'tcx> { &self.hir_map } pub fn alloc_steal_mir(self, mir: BodyAndCache<'tcx>) -> &'tcx Steal<BodyAndCache<'tcx>> { self.arena.alloc(Steal::new(mir)) } pub fn alloc_steal_promoted(self, promoted: IndexVec<Promoted, BodyAndCache<'tcx>>) -> &'tcx Steal<IndexVec<Promoted, BodyAndCache<'tcx>>> { self.arena.alloc(Steal::new(promoted)) } pub fn intern_promoted(self, promoted: IndexVec<Promoted, BodyAndCache<'tcx>>) -> &'tcx IndexVec<Promoted, BodyAndCache<'tcx>> { self.arena.alloc(promoted) } pub fn alloc_adt_def( self, did: DefId, kind: AdtKind, variants: IndexVec<VariantIdx, ty::VariantDef>, repr: ReprOptions, ) -> &'tcx ty::AdtDef { let def = ty::AdtDef::new(self, did, kind, variants, repr); self.arena.alloc(def) } pub fn intern_const_alloc(self, alloc: Allocation) -> &'tcx Allocation { self.allocation_interner.intern(alloc, |alloc| { self.arena.alloc(alloc) }) } /// Allocates a read-only byte or string literal for `mir::interpret`. pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId { // Create an allocation that just contains these bytes. let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes); let alloc = self.intern_const_alloc(alloc); self.alloc_map.lock().create_memory_alloc(alloc) } pub fn intern_stability(self, stab: attr::Stability) -> &'tcx attr::Stability { self.stability_interner.intern(stab, |stab| { self.arena.alloc(stab) }) } pub fn intern_layout(self, layout: LayoutDetails) -> &'tcx LayoutDetails { self.layout_interner.intern(layout, |layout| { self.arena.alloc(layout) }) } /// Returns a range of the start/end indices specified with the /// `rustc_layout_scalar_valid_range` attribute. pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound<u128>, Bound<u128>) { let attrs = self.get_attrs(def_id); let get = |name| { let attr = match attrs.iter().find(|a| a.check_name(name)) { Some(attr) => attr, None => return Bound::Unbounded, }; for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") { match meta.literal().expect("attribute takes lit").kind { ast::LitKind::Int(a, _) => return Bound::Included(a), _ => span_bug!(attr.span, "rustc_layout_scalar_valid_range expects int arg"), } } span_bug!(attr.span, "no arguments to `rustc_layout_scalar_valid_range` attribute"); }; (get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end)) } pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> { value.lift_to_tcx(self) } /// Creates a type context and call the closure with a `TyCtxt` reference /// to the context. The closure enforces that the type context and any interned /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. pub fn create_global_ctxt( s: &'tcx Session, lint_store: Lrc<lint::LintStore>, local_providers: ty::query::Providers<'tcx>, extern_providers: ty::query::Providers<'tcx>, arenas: &'tcx AllArenas, arena: &'tcx WorkerLocal<Arena<'tcx>>, resolutions: ty::ResolverOutputs, hir: hir_map::Map<'tcx>, on_disk_query_result_cache: query::OnDiskCache<'tcx>, crate_name: &str, output_filenames: &OutputFilenames, ) -> GlobalCtxt<'tcx> { let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| { s.fatal(&err); }); let interners = CtxtInterners::new(&arenas.interner); let common_types = CommonTypes::new(&interners); let common_lifetimes = CommonLifetimes::new(&interners); let common_consts = CommonConsts::new(&interners, &common_types); let dep_graph = hir.dep_graph.clone(); let cstore = resolutions.cstore; let crates = cstore.crates_untracked(); let max_cnum = crates.iter().map(|c| c.as_usize()).max().unwrap_or(0); let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1); providers[LOCAL_CRATE] = local_providers; let def_path_hash_to_def_id = if s.opts.build_dep_graph() { let def_path_tables = crates .iter() .map(|&cnum| (cnum, cstore.def_path_table(cnum))) .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table()))); // Precompute the capacity of the hashmap so we don't have to // re-allocate when populating it. let capacity = def_path_tables.clone().map(|(_, t)| t.size()).sum::<usize>(); let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher( capacity, ::std::default::Default::default() ); for (cnum, def_path_table) in def_path_tables { def_path_table.add_def_path_hashes_to(cnum, &mut map); } Some(map) } else { None }; let mut trait_map: FxHashMap<_, FxHashMap<_, _>> = FxHashMap::default(); for (k, v) in resolutions.trait_map { let hir_id = hir.node_to_hir_id(k); let map = trait_map.entry(hir_id.owner).or_default(); map.insert(hir_id.local_id, StableVec::new(v)); } GlobalCtxt { sess: s, lint_store, cstore, arena, interners, dep_graph, prof: s.prof.clone(), types: common_types, lifetimes: common_lifetimes, consts: common_consts, extern_crate_map: resolutions.extern_crate_map, trait_map, export_map: resolutions.export_map.into_iter().map(|(k, v)| { let exports: Vec<_> = v.into_iter().map(|e| { e.map_id(|id| hir.node_to_hir_id(id)) }).collect(); (k, exports) }).collect(), maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports .into_iter() .map(|id| hir.local_def_id_from_node_id(id)) .collect(), maybe_unused_extern_crates: resolutions.maybe_unused_extern_crates .into_iter() .map(|(id, sp)| (hir.local_def_id_from_node_id(id), sp)) .collect(), glob_map: resolutions.glob_map.into_iter().map(|(id, names)| { (hir.local_def_id_from_node_id(id), names) }).collect(), extern_prelude: resolutions.extern_prelude, hir_map: hir, def_path_hash_to_def_id, queries: query::Queries::new( providers, extern_providers, on_disk_query_result_cache, ), rcache: Default::default(), selection_cache: Default::default(), evaluation_cache: Default::default(), crate_name: Symbol::intern(crate_name), data_layout, layout_interner: Default::default(), stability_interner: Default::default(), allocation_interner: Default::default(), alloc_map: Lock::new(interpret::AllocMap::new()), output_filenames: Arc::new(output_filenames.clone()), } } pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool { let cname = self.crate_name(LOCAL_CRATE).as_str(); self.sess.consider_optimizing(&cname, msg) } pub fn lib_features(self) -> &'tcx middle::lib_features::LibFeatures { self.get_lib_features(LOCAL_CRATE) } /// Obtain all lang items of this crate and all dependencies (recursively) pub fn lang_items(self) -> &'tcx middle::lang_items::LanguageItems { self.get_lang_items(LOCAL_CRATE) } /// Obtain the given diagnostic item's `DefId`. Use `is_diagnostic_item` if you just want to /// compare against another `DefId`, since `is_diagnostic_item` is cheaper. pub fn get_diagnostic_item(self, name: Symbol) -> Option<DefId> { self.all_diagnostic_items(LOCAL_CRATE).get(&name).copied() } /// Check whether the diagnostic item with the given `name` has the given `DefId`. pub fn is_diagnostic_item(self, name: Symbol, did: DefId) -> bool { self.diagnostic_items(did.krate).get(&name) == Some(&did) } pub fn stability(self) -> &'tcx stability::Index<'tcx> { self.stability_index(LOCAL_CRATE) } pub fn crates(self) -> &'tcx [CrateNum] { self.all_crate_nums(LOCAL_CRATE) } pub fn allocator_kind(self) -> Option<AllocatorKind> { self.cstore.allocator_kind() } pub fn features(self) -> &'tcx rustc_feature::Features { self.features_query(LOCAL_CRATE) } pub fn def_key(self, id: DefId) -> hir_map::DefKey { if id.is_local() { self.hir().def_key(id) } else { self.cstore.def_key(id) } } /// Converts a `DefId` into its fully expanded `DefPath` (every /// `DefId` is really just an interned `DefPath`). /// /// Note that if `id` is not local to this crate, the result will /// be a non-local `DefPath`. pub fn def_path(self, id: DefId) -> hir_map::DefPath { if id.is_local() { self.hir().def_path(id) } else { self.cstore.def_path(id) } } /// Returns whether or not the crate with CrateNum 'cnum' /// is marked as a private dependency pub fn is_private_dep(self, cnum: CrateNum) -> bool { if cnum == LOCAL_CRATE { false } else { self.cstore.crate_is_private_dep_untracked(cnum) } } #[inline] pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash { if def_id.is_local() { self.hir().definitions().def_path_hash(def_id.index) } else { self.cstore.def_path_hash(def_id) } } pub fn def_path_debug_str(self, def_id: DefId) -> String { // We are explicitly not going through queries here in order to get // crate name and disambiguator since this code is called from debug!() // statements within the query system and we'd run into endless // recursion otherwise. let (crate_name, crate_disambiguator) = if def_id.is_local() { (self.crate_name.clone(), self.sess.local_crate_disambiguator()) } else { (self.cstore.crate_name_untracked(def_id.krate), self.cstore.crate_disambiguator_untracked(def_id.krate)) }; format!("{}[{}]{}", crate_name, // Don't print the whole crate disambiguator. That's just // annoying in debug output. &(crate_disambiguator.to_fingerprint().to_hex())[..4], self.def_path(def_id).to_string_no_crate()) } pub fn metadata_encoding_version(self) -> Vec<u8> { self.cstore.metadata_encoding_version().to_vec() } pub fn encode_metadata(self)-> EncodedMetadata { let _prof_timer = self.prof.generic_activity("generate_crate_metadata"); self.cstore.encode_metadata(self) } // Note that this is *untracked* and should only be used within the query // system if the result is otherwise tracked through queries pub fn cstore_as_any(self) -> &'tcx dyn Any { self.cstore.as_any() } #[inline(always)] pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> { let krate = self.gcx.hir_map.forest.untracked_krate(); StableHashingContext::new(self.sess, krate, self.hir().definitions(), &*self.cstore) } // This method makes sure that we have a DepNode and a Fingerprint for // every upstream crate. It needs to be called once right after the tcx is // created. // With full-fledged red/green, the method will probably become unnecessary // as this will be done on-demand. pub fn allocate_metadata_dep_nodes(self) { // We cannot use the query versions of crates() and crate_hash(), since // those would need the DepNodes that we are allocating here. for cnum in self.cstore.crates_untracked() { let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum)); let crate_hash = self.cstore.crate_hash_untracked(cnum); self.dep_graph.with_task(dep_node, self, crate_hash, |_, x| x, // No transformation needed dep_graph::hash_result, ); } } pub fn serialize_query_result_cache<E>(self, encoder: &mut E) -> Result<(), E::Error> where E: ty::codec::TyEncoder { self.queries.on_disk_cache.serialize(self, encoder) } /// If `true`, we should use the MIR-based borrowck, but also /// fall back on the AST borrowck if the MIR-based one errors. pub fn migrate_borrowck(self) -> bool { self.borrowck_mode().migrate() } /// If `true`, make MIR codegen for `match` emit a temp that holds a /// borrow of the input to the match expression. pub fn generate_borrow_of_any_match_input(&self) -> bool { self.emit_read_for_match() } /// If `true`, make MIR codegen for `match` emit FakeRead /// statements (which simulate the maximal effect of executing the /// patterns in a match arm). pub fn emit_read_for_match(&self) -> bool { !self.sess.opts.debugging_opts.nll_dont_emit_read_for_match } /// What mode(s) of borrowck should we run? AST? MIR? both? /// (Also considers the `#![feature(nll)]` setting.) pub fn borrowck_mode(&self) -> BorrowckMode { // Here are the main constraints we need to deal with: // // 1. An opts.borrowck_mode of `BorrowckMode::Migrate` is // synonymous with no `-Z borrowck=...` flag at all. // // 2. We want to allow developers on the Nightly channel // to opt back into the "hard error" mode for NLL, // (which they can do via specifying `#![feature(nll)]` // explicitly in their crate). // // So, this precedence list is how pnkfelix chose to work with // the above constraints: // // * `#![feature(nll)]` *always* means use NLL with hard // errors. (To simplify the code here, it now even overrides // a user's attempt to specify `-Z borrowck=compare`, which // we arguably do not need anymore and should remove.) // // * Otherwise, if no `-Z borrowck=...` then use migrate mode // // * Otherwise, use the behavior requested via `-Z borrowck=...` if self.features().nll { return BorrowckMode::Mir; } self.sess.opts.borrowck_mode } #[inline] pub fn local_crate_exports_generics(self) -> bool { debug_assert!(self.sess.opts.share_generics()); self.sess.crate_types.borrow().iter().any(|crate_type| { match crate_type { CrateType::Executable | CrateType::Staticlib | CrateType::ProcMacro | CrateType::Cdylib => false, // FIXME rust-lang/rust#64319, rust-lang/rust#64872: // We want to block export of generics from dylibs, // but we must fix rust-lang/rust#65890 before we can // do that robustly. CrateType::Dylib => true, CrateType::Rlib => true, } }) } // Returns the `DefId` and the `BoundRegion` corresponding to the given region. pub fn is_suitable_region(&self, region: Region<'tcx>) -> Option<FreeRegionInfo> { let (suitable_region_binding_scope, bound_region) = match *region { ty::ReFree(ref free_region) => (free_region.scope, free_region.bound_region), ty::ReEarlyBound(ref ebr) => ( self.parent(ebr.def_id).unwrap(), ty::BoundRegion::BrNamed(ebr.def_id, ebr.name), ), _ => return None, // not a free region }; let hir_id = self.hir() .as_local_hir_id(suitable_region_binding_scope) .unwrap(); let is_impl_item = match self.hir().find(hir_id) { Some(Node::Item(..)) | Some(Node::TraitItem(..)) => false, Some(Node::ImplItem(..)) => { self.is_bound_region_in_impl_item(suitable_region_binding_scope) } _ => return None, }; return Some(FreeRegionInfo { def_id: suitable_region_binding_scope, boundregion: bound_region, is_impl_item, }); } pub fn return_type_impl_trait( &self, scope_def_id: DefId, ) -> Option<(Ty<'tcx>, Span)> { // HACK: `type_of_def_id()` will fail on these (#55796), so return `None`. let hir_id = self.hir().as_local_hir_id(scope_def_id).unwrap(); match self.hir().get(hir_id) { Node::Item(item) => { match item.kind { ItemKind::Fn(..) => { /* `type_of_def_id()` will work */ } _ => { return None; } } } _ => { /* `type_of_def_id()` will work or panic */ } } let ret_ty = self.type_of(scope_def_id); match ret_ty.kind { ty::FnDef(_, _) => { let sig = ret_ty.fn_sig(*self); let output = self.erase_late_bound_regions(&sig.output()); if output.is_impl_trait() { let fn_decl = self.hir().fn_decl_by_hir_id(hir_id).unwrap(); Some((output, fn_decl.output.span())) } else { None } } _ => None } } // Checks if the bound region is in Impl Item. pub fn is_bound_region_in_impl_item( &self, suitable_region_binding_scope: DefId, ) -> bool { let container_id = self.associated_item(suitable_region_binding_scope) .container .id(); if self.impl_trait_ref(container_id).is_some() { // For now, we do not try to target impls of traits. This is // because this message is going to suggest that the user // change the fn signature, but they may not be free to do so, // since the signature must match the trait. // // FIXME(#42706) -- in some cases, we could do better here. return true; } false } /// Determines whether identifiers in the assembly have strict naming rules. /// Currently, only NVPTX* targets need it. pub fn has_strict_asm_symbol_naming(&self) -> bool { self.sess.target.target.arch.contains("nvptx") } /// Returns `&'static core::panic::Location<'static>`. pub fn caller_location_ty(&self) -> Ty<'tcx> { self.mk_imm_ref( self.lifetimes.re_static, self.type_of(self.require_lang_item(PanicLocationLangItem, None)) .subst(*self, self.mk_substs([self.lifetimes.re_static.into()].iter())), ) } } impl<'tcx> GlobalCtxt<'tcx> { /// Calls the closure with a local `TyCtxt` using the given arena. /// `interners` is a slot passed so we can create a CtxtInterners /// with the same lifetime as `arena`. pub fn enter_local<F, R>(&'tcx self, f: F) -> R where F: FnOnce(TyCtxt<'tcx>) -> R, { let tcx = TyCtxt { gcx: self, }; ty::tls::with_related_context(tcx, |icx| { let new_icx = ty::tls::ImplicitCtxt { tcx, query: icx.query.clone(), diagnostics: icx.diagnostics, layout_depth: icx.layout_depth, task_deps: icx.task_deps, }; ty::tls::enter_context(&new_icx, |_| { f(tcx) }) }) } } /// A trait implemented for all `X<'a>` types that can be safely and /// efficiently converted to `X<'tcx>` as long as they are part of the /// provided `TyCtxt<'tcx>`. /// This can be done, for example, for `Ty<'tcx>` or `SubstsRef<'tcx>` /// by looking them up in their respective interners. /// /// However, this is still not the best implementation as it does /// need to compare the components, even for interned values. /// It would be more efficient if `TypedArena` provided a way to /// determine whether the address is in the allocated range. /// /// `None` is returned if the value or one of the components is not part /// of the provided context. /// For `Ty`, `None` can be returned if either the type interner doesn't /// contain the `TyKind` key or if the address of the interned /// pointer differs. The latter case is possible if a primitive type, /// e.g., `()` or `u8`, was interned in a different context. pub trait Lift<'tcx>: fmt::Debug { type Lifted: fmt::Debug + 'tcx; fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted>; } macro_rules! nop_lift { ($ty:ty => $lifted:ty) => { impl<'a, 'tcx> Lift<'tcx> for $ty { type Lifted = $lifted; fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { if tcx.interners.arena.in_arena(*self as *const _) { Some(unsafe { mem::transmute(*self) }) } else { None } } } }; } macro_rules! nop_list_lift { ($ty:ty => $lifted:ty) => { impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> { type Lifted = &'tcx List<$lifted>; fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { if self.is_empty() { return Some(List::empty()); } if tcx.interners.arena.in_arena(*self as *const _) { Some(unsafe { mem::transmute(*self) }) } else { None } } } }; } nop_lift!{Ty<'a> => Ty<'tcx>} nop_lift!{Region<'a> => Region<'tcx>} nop_lift!{Goal<'a> => Goal<'tcx>} nop_lift!{&'a Const<'a> => &'tcx Const<'tcx>} nop_list_lift!{Goal<'a> => Goal<'tcx>} nop_list_lift!{Clause<'a> => Clause<'tcx>} nop_list_lift!{Ty<'a> => Ty<'tcx>} nop_list_lift!{ExistentialPredicate<'a> => ExistentialPredicate<'tcx>} nop_list_lift!{Predicate<'a> => Predicate<'tcx>} nop_list_lift!{CanonicalVarInfo => CanonicalVarInfo} nop_list_lift!{ProjectionKind => ProjectionKind} // This is the impl for `&'a InternalSubsts<'a>`. nop_list_lift!{GenericArg<'a> => GenericArg<'tcx>} pub mod tls { use super::{GlobalCtxt, TyCtxt, ptr_eq}; use std::fmt; use std::mem; use syntax_pos; use crate::ty::query; use errors::{Diagnostic, TRACK_DIAGNOSTICS}; use rustc_data_structures::OnDrop; use rustc_data_structures::sync::{self, Lrc, Lock}; use rustc_data_structures::thin_vec::ThinVec; use crate::dep_graph::TaskDeps; #[cfg(not(parallel_compiler))] use std::cell::Cell; #[cfg(parallel_compiler)] use rustc_rayon_core as rayon_core; /// This is the implicit state of rustc. It contains the current /// `TyCtxt` and query. It is updated when creating a local interner or /// executing a new query. Whenever there's a `TyCtxt` value available /// you should also have access to an `ImplicitCtxt` through the functions /// in this module. #[derive(Clone)] pub struct ImplicitCtxt<'a, 'tcx> { /// The current `TyCtxt`. Initially created by `enter_global` and updated /// by `enter_local` with a new local interner. pub tcx: TyCtxt<'tcx>, /// The current query job, if any. This is updated by `JobOwner::start` in /// `ty::query::plumbing` when executing a query. pub query: Option<Lrc<query::QueryJob<'tcx>>>, /// Where to store diagnostics for the current query job, if any. /// This is updated by `JobOwner::start` in `ty::query::plumbing` when executing a query. pub diagnostics: Option<&'a Lock<ThinVec<Diagnostic>>>, /// Used to prevent layout from recursing too deeply. pub layout_depth: usize, /// The current dep graph task. This is used to add dependencies to queries /// when executing them. pub task_deps: Option<&'a Lock<TaskDeps>>, } /// Sets Rayon's thread-local variable, which is preserved for Rayon jobs /// to `value` during the call to `f`. It is restored to its previous value after. /// This is used to set the pointer to the new `ImplicitCtxt`. #[cfg(parallel_compiler)] #[inline] fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R { rayon_core::tlv::with(value, f) } /// Gets Rayon's thread-local variable, which is preserved for Rayon jobs. /// This is used to get the pointer to the current `ImplicitCtxt`. #[cfg(parallel_compiler)] #[inline] fn get_tlv() -> usize { rayon_core::tlv::get() } #[cfg(not(parallel_compiler))] thread_local! { /// A thread local variable that stores a pointer to the current `ImplicitCtxt`. static TLV: Cell<usize> = Cell::new(0); } /// Sets TLV to `value` during the call to `f`. /// It is restored to its previous value after. /// This is used to set the pointer to the new `ImplicitCtxt`. #[cfg(not(parallel_compiler))] #[inline] fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R { let old = get_tlv(); let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old))); TLV.with(|tlv| tlv.set(value)); f() } /// Gets the pointer to the current `ImplicitCtxt`. #[cfg(not(parallel_compiler))] fn get_tlv() -> usize { TLV.with(|tlv| tlv.get()) } /// This is a callback from libsyntax as it cannot access the implicit state /// in librustc otherwise. fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter<'_>) -> fmt::Result { with_opt(|tcx| { if let Some(tcx) = tcx { write!(f, "{}", tcx.sess.source_map().span_to_string(span)) } else { syntax_pos::default_span_debug(span, f) } }) } /// This is a callback from libsyntax as it cannot access the implicit state /// in librustc otherwise. It is used to when diagnostic messages are /// emitted and stores them in the current query, if there is one. fn track_diagnostic(diagnostic: &Diagnostic) { with_context_opt(|icx| { if let Some(icx) = icx { if let Some(ref diagnostics) = icx.diagnostics { let mut diagnostics = diagnostics.lock(); diagnostics.extend(Some(diagnostic.clone())); } } }) } /// Sets up the callbacks from libsyntax on the current thread. pub fn with_thread_locals<F, R>(f: F) -> R where F: FnOnce() -> R { syntax_pos::SPAN_DEBUG.with(|span_dbg| { let original_span_debug = span_dbg.get(); span_dbg.set(span_debug); let _on_drop = OnDrop(move || { span_dbg.set(original_span_debug); }); TRACK_DIAGNOSTICS.with(|current| { let original = current.get(); current.set(track_diagnostic); let _on_drop = OnDrop(move || { current.set(original); }); f() }) }) } /// Sets `context` as the new current `ImplicitCtxt` for the duration of the function `f`. #[inline] pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R where F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, { set_tlv(context as *const _ as usize, || { f(&context) }) } /// Enters `GlobalCtxt` by setting up libsyntax callbacks and /// creating a initial `TyCtxt` and `ImplicitCtxt`. /// This happens once per rustc session and `TyCtxt`s only exists /// inside the `f` function. pub fn enter_global<'tcx, F, R>(gcx: &'tcx GlobalCtxt<'tcx>, f: F) -> R where F: FnOnce(TyCtxt<'tcx>) -> R, { // Update `GCX_PTR` to indicate there's a `GlobalCtxt` available. GCX_PTR.with(|lock| { *lock.lock() = gcx as *const _ as usize; }); // Set `GCX_PTR` back to 0 when we exit. let _on_drop = OnDrop(move || { GCX_PTR.with(|lock| *lock.lock() = 0); }); let tcx = TyCtxt { gcx, }; let icx = ImplicitCtxt { tcx, query: None, diagnostics: None, layout_depth: 0, task_deps: None, }; enter_context(&icx, |_| { f(tcx) }) } scoped_thread_local! { /// Stores a pointer to the `GlobalCtxt` if one is available. /// This is used to access the `GlobalCtxt` in the deadlock handler given to Rayon. pub static GCX_PTR: Lock<usize> } /// Creates a `TyCtxt` and `ImplicitCtxt` based on the `GCX_PTR` thread local. /// This is used in the deadlock handler. pub unsafe fn with_global<F, R>(f: F) -> R where F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R, { let gcx = GCX_PTR.with(|lock| *lock.lock()); assert!(gcx != 0); let gcx = &*(gcx as *const GlobalCtxt<'_>); let tcx = TyCtxt { gcx, }; let icx = ImplicitCtxt { query: None, diagnostics: None, tcx, layout_depth: 0, task_deps: None, }; enter_context(&icx, |_| f(tcx)) } /// Allows access to the current `ImplicitCtxt` in a closure if one is available. #[inline] pub fn with_context_opt<F, R>(f: F) -> R where F: for<'a, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'tcx>>) -> R, { let context = get_tlv(); if context == 0 { f(None) } else { // We could get a `ImplicitCtxt` pointer from another thread. // Ensure that `ImplicitCtxt` is `Sync`. sync::assert_sync::<ImplicitCtxt<'_, '_>>(); unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) } } } /// Allows access to the current `ImplicitCtxt`. /// Panics if there is no `ImplicitCtxt` available. #[inline] pub fn with_context<F, R>(f: F) -> R where F: for<'a, 'tcx> FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, { with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls"))) } /// Allows access to the current `ImplicitCtxt` whose tcx field has the same global /// interner as the tcx argument passed in. This means the closure is given an `ImplicitCtxt` /// with the same `'tcx` lifetime as the `TyCtxt` passed in. /// This will panic if you pass it a `TyCtxt` which has a different global interner from /// the current `ImplicitCtxt`'s `tcx` field. #[inline] pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R where F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R, { with_context(|context| { unsafe { assert!(ptr_eq(context.tcx.gcx, tcx.gcx)); let context: &ImplicitCtxt<'_, '_> = mem::transmute(context); f(context) } }) } /// Allows access to the `TyCtxt` in the current `ImplicitCtxt`. /// Panics if there is no `ImplicitCtxt` available. #[inline] pub fn with<F, R>(f: F) -> R where F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R, { with_context(|context| f(context.tcx)) } /// Allows access to the `TyCtxt` in the current `ImplicitCtxt`. /// The closure is passed None if there is no `ImplicitCtxt` available. #[inline] pub fn with_opt<F, R>(f: F) -> R where F: for<'tcx> FnOnce(Option<TyCtxt<'tcx>>) -> R, { with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx))) } } macro_rules! sty_debug_print { ($ctxt: expr, $($variant: ident),*) => {{ // Curious inner module to allow variant names to be used as // variable names. #[allow(non_snake_case)] mod inner { use crate::ty::{self, TyCtxt}; use crate::ty::context::Interned; #[derive(Copy, Clone)] struct DebugStat { total: usize, lt_infer: usize, ty_infer: usize, ct_infer: usize, all_infer: usize, } pub fn go(tcx: TyCtxt<'_>) { let mut total = DebugStat { total: 0, lt_infer: 0, ty_infer: 0, ct_infer: 0, all_infer: 0, }; $(let mut $variant = total;)* let shards = tcx.interners.type_.lock_shards(); let types = shards.iter().flat_map(|shard| shard.keys()); for &Interned(t) in types { let variant = match t.kind { ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Str | ty::Never => continue, ty::Error => /* unimportant */ continue, $(ty::$variant(..) => &mut $variant,)* }; let lt = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER); let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER); let ct = t.flags.intersects(ty::TypeFlags::HAS_CT_INFER); variant.total += 1; total.total += 1; if lt { total.lt_infer += 1; variant.lt_infer += 1 } if ty { total.ty_infer += 1; variant.ty_infer += 1 } if ct { total.ct_infer += 1; variant.ct_infer += 1 } if lt && ty && ct { total.all_infer += 1; variant.all_infer += 1 } } println!("Ty interner total ty lt ct all"); $(println!(" {:18}: {uses:6} {usespc:4.1}%, \ {ty:4.1}% {lt:5.1}% {ct:4.1}% {all:4.1}%", stringify!($variant), uses = $variant.total, usespc = $variant.total as f64 * 100.0 / total.total as f64, ty = $variant.ty_infer as f64 * 100.0 / total.total as f64, lt = $variant.lt_infer as f64 * 100.0 / total.total as f64, ct = $variant.ct_infer as f64 * 100.0 / total.total as f64, all = $variant.all_infer as f64 * 100.0 / total.total as f64); )* println!(" total {uses:6} \ {ty:4.1}% {lt:5.1}% {ct:4.1}% {all:4.1}%", uses = total.total, ty = total.ty_infer as f64 * 100.0 / total.total as f64, lt = total.lt_infer as f64 * 100.0 / total.total as f64, ct = total.ct_infer as f64 * 100.0 / total.total as f64, all = total.all_infer as f64 * 100.0 / total.total as f64) } } inner::go($ctxt) }} } impl<'tcx> TyCtxt<'tcx> { pub fn print_debug_stats(self) { sty_debug_print!( self, Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr, Placeholder, Generator, GeneratorWitness, Dynamic, Closure, Tuple, Bound, Param, Infer, UnnormalizedProjection, Projection, Opaque, Foreign); println!("InternalSubsts interner: #{}", self.interners.substs.len()); println!("Region interner: #{}", self.interners.region.len()); println!("Stability interner: #{}", self.stability_interner.len()); println!("Allocation interner: #{}", self.allocation_interner.len()); println!("Layout interner: #{}", self.layout_interner.len()); } } /// An entry in an interner. struct Interned<'tcx, T: ?Sized>(&'tcx T); impl<'tcx, T: 'tcx+?Sized> Clone for Interned<'tcx, T> { fn clone(&self) -> Self { Interned(self.0) } } impl<'tcx, T: 'tcx+?Sized> Copy for Interned<'tcx, T> {} // N.B., an `Interned<Ty>` compares and hashes as a `TyKind`. impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool { self.0.kind == other.0.kind } } impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {} impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> { fn hash<H: Hasher>(&self, s: &mut H) { self.0.kind.hash(s) } } #[allow(rustc::usage_of_ty_tykind)] impl<'tcx> Borrow<TyKind<'tcx>> for Interned<'tcx, TyS<'tcx>> { fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> { &self.0.kind } } // N.B., an `Interned<List<T>>` compares and hashes as its elements. impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List<T>> { fn eq(&self, other: &Interned<'tcx, List<T>>) -> bool { self.0[..] == other.0[..] } } impl<'tcx, T: Eq> Eq for Interned<'tcx, List<T>> {} impl<'tcx, T: Hash> Hash for Interned<'tcx, List<T>> { fn hash<H: Hasher>(&self, s: &mut H) { self.0[..].hash(s) } } impl<'tcx> Borrow<[Ty<'tcx>]> for Interned<'tcx, List<Ty<'tcx>>> { fn borrow<'a>(&'a self) -> &'a [Ty<'tcx>] { &self.0[..] } } impl<'tcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, List<CanonicalVarInfo>> { fn borrow(&self) -> &[CanonicalVarInfo] { &self.0[..] } } impl<'tcx> Borrow<[GenericArg<'tcx>]> for Interned<'tcx, InternalSubsts<'tcx>> { fn borrow<'a>(&'a self) -> &'a [GenericArg<'tcx>] { &self.0[..] } } impl<'tcx> Borrow<[ProjectionKind]> for Interned<'tcx, List<ProjectionKind>> { fn borrow(&self) -> &[ProjectionKind] { &self.0[..] } } impl<'tcx> Borrow<[PlaceElem<'tcx>]> for Interned<'tcx, List<PlaceElem<'tcx>>> { fn borrow(&self) -> &[PlaceElem<'tcx>] { &self.0[..] } } impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> { fn borrow(&self) -> &RegionKind { &self.0 } } impl<'tcx> Borrow<GoalKind<'tcx>> for Interned<'tcx, GoalKind<'tcx>> { fn borrow<'a>(&'a self) -> &'a GoalKind<'tcx> { &self.0 } } impl<'tcx> Borrow<[ExistentialPredicate<'tcx>]> for Interned<'tcx, List<ExistentialPredicate<'tcx>>> { fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'tcx>] { &self.0[..] } } impl<'tcx> Borrow<[Predicate<'tcx>]> for Interned<'tcx, List<Predicate<'tcx>>> { fn borrow<'a>(&'a self) -> &'a [Predicate<'tcx>] { &self.0[..] } } impl<'tcx> Borrow<Const<'tcx>> for Interned<'tcx, Const<'tcx>> { fn borrow<'a>(&'a self) -> &'a Const<'tcx> { &self.0 } } impl<'tcx> Borrow<[Clause<'tcx>]> for Interned<'tcx, List<Clause<'tcx>>> { fn borrow<'a>(&'a self) -> &'a [Clause<'tcx>] { &self.0[..] } } impl<'tcx> Borrow<[Goal<'tcx>]> for Interned<'tcx, List<Goal<'tcx>>> { fn borrow<'a>(&'a self) -> &'a [Goal<'tcx>] { &self.0[..] } } macro_rules! direct_interners { ($($name:ident: $method:ident($ty:ty)),+) => { $(impl<'tcx> PartialEq for Interned<'tcx, $ty> { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } impl<'tcx> Eq for Interned<'tcx, $ty> {} impl<'tcx> Hash for Interned<'tcx, $ty> { fn hash<H: Hasher>(&self, s: &mut H) { self.0.hash(s) } } impl<'tcx> TyCtxt<'tcx> { pub fn $method(self, v: $ty) -> &'tcx $ty { self.interners.$name.intern_ref(&v, || { Interned(self.interners.arena.alloc(v)) }).0 } })+ } } pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool { x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX) } direct_interners!( region: mk_region(RegionKind), goal: mk_goal(GoalKind<'tcx>), const_: mk_const(Const<'tcx>) ); macro_rules! slice_interners { ($($field:ident: $method:ident($ty:ty)),+) => ( $(impl<'tcx> TyCtxt<'tcx> { pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> { self.interners.$field.intern_ref(v, || { Interned(List::from_arena(&self.interners.arena, v)) }).0 } })+ ); } slice_interners!( type_list: _intern_type_list(Ty<'tcx>), substs: _intern_substs(GenericArg<'tcx>), canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo), existential_predicates: _intern_existential_predicates(ExistentialPredicate<'tcx>), predicates: _intern_predicates(Predicate<'tcx>), clauses: _intern_clauses(Clause<'tcx>), goal_list: _intern_goals(Goal<'tcx>), projs: _intern_projs(ProjectionKind), place_elems: _intern_place_elems(PlaceElem<'tcx>) ); impl<'tcx> TyCtxt<'tcx> { /// Given a `fn` type, returns an equivalent `unsafe fn` type; /// that is, a `fn` type that is equivalent in every way for being /// unsafe. pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> { assert_eq!(sig.unsafety(), hir::Unsafety::Normal); self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { unsafety: hir::Unsafety::Unsafe, ..sig })) } /// Given a closure signature `sig`, returns an equivalent `fn` /// type with the same signature. Detuples and so forth -- so /// e.g., if we have a sig with `Fn<(u32, i32)>` then you would get /// a `fn(u32, i32)`. /// `unsafety` determines the unsafety of the `fn` type. If you pass /// `hir::Unsafety::Unsafe` in the previous example, then you would get /// an `unsafe fn (u32, i32)`. /// It cannot convert a closure that requires unsafe. pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>, unsafety: hir::Unsafety) -> Ty<'tcx> { let converted_sig = sig.map_bound(|s| { let params_iter = match s.inputs()[0].kind { ty::Tuple(params) => { params.into_iter().map(|k| k.expect_ty()) } _ => bug!(), }; self.mk_fn_sig( params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust, ) }); self.mk_fn_ptr(converted_sig) } #[allow(rustc::usage_of_ty_tykind)] #[inline] pub fn mk_ty(&self, st: TyKind<'tcx>) -> Ty<'tcx> { self.interners.intern_ty(st) } pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> { match tm { ast::IntTy::Isize => self.types.isize, ast::IntTy::I8 => self.types.i8, ast::IntTy::I16 => self.types.i16, ast::IntTy::I32 => self.types.i32, ast::IntTy::I64 => self.types.i64, ast::IntTy::I128 => self.types.i128, } } pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> { match tm { ast::UintTy::Usize => self.types.usize, ast::UintTy::U8 => self.types.u8, ast::UintTy::U16 => self.types.u16, ast::UintTy::U32 => self.types.u32, ast::UintTy::U64 => self.types.u64, ast::UintTy::U128 => self.types.u128, } } pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> { match tm { ast::FloatTy::F32 => self.types.f32, ast::FloatTy::F64 => self.types.f64, } } #[inline] pub fn mk_str(self) -> Ty<'tcx> { self.mk_ty(Str) } #[inline] pub fn mk_static_str(self) -> Ty<'tcx> { self.mk_imm_ref(self.lifetimes.re_static, self.mk_str()) } #[inline] pub fn mk_adt(self, def: &'tcx AdtDef, substs: SubstsRef<'tcx>) -> Ty<'tcx> { // Take a copy of substs so that we own the vectors inside. self.mk_ty(Adt(def, substs)) } #[inline] pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> { self.mk_ty(Foreign(def_id)) } fn mk_generic_adt(self, wrapper_def_id: DefId, ty_param: Ty<'tcx>) -> Ty<'tcx> { let adt_def = self.adt_def(wrapper_def_id); let substs = InternalSubsts::for_item(self, wrapper_def_id, |param, substs| { match param.kind { GenericParamDefKind::Lifetime | GenericParamDefKind::Const => { bug!() } GenericParamDefKind::Type { has_default, .. } => { if param.index == 0 { ty_param.into() } else { assert!(has_default); self.type_of(param.def_id).subst(self, substs).into() } } } }); self.mk_ty(Adt(adt_def, substs)) } #[inline] pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> { let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem, None); self.mk_generic_adt(def_id, ty) } #[inline] pub fn mk_lang_item(self, ty: Ty<'tcx>, item: lang_items::LangItem) -> Option<Ty<'tcx>> { let def_id = self.lang_items().require(item).ok()?; Some(self.mk_generic_adt(def_id, ty)) } #[inline] pub fn mk_maybe_uninit(self, ty: Ty<'tcx>) -> Ty<'tcx> { let def_id = self.require_lang_item(lang_items::MaybeUninitLangItem, None); self.mk_generic_adt(def_id, ty) } #[inline] pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> { self.mk_ty(RawPtr(tm)) } #[inline] pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> { self.mk_ty(Ref(r, tm.ty, tm.mutbl)) } #[inline] pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::Mutability::Mutable}) } #[inline] pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::Mutability::Immutable}) } #[inline] pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::Mutability::Mutable}) } #[inline] pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::Mutability::Immutable}) } #[inline] pub fn mk_nil_ptr(self) -> Ty<'tcx> { self.mk_imm_ptr(self.mk_unit()) } #[inline] pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> { self.mk_ty(Array(ty, ty::Const::from_usize(self, n))) } #[inline] pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ty(Slice(ty)) } #[inline] pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> { let kinds: Vec<_> = ts.into_iter().map(|&t| GenericArg::from(t)).collect(); self.mk_ty(Tuple(self.intern_substs(&kinds))) } pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output { iter.intern_with(|ts| { let kinds: Vec<_> = ts.into_iter().map(|&t| GenericArg::from(t)).collect(); self.mk_ty(Tuple(self.intern_substs(&kinds))) }) } #[inline] pub fn mk_unit(self) -> Ty<'tcx> { self.types.unit } #[inline] pub fn mk_diverging_default(self) -> Ty<'tcx> { if self.features().never_type_fallback { self.types.never } else { self.types.unit } } #[inline] pub fn mk_bool(self) -> Ty<'tcx> { self.mk_ty(Bool) } #[inline] pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(FnDef(def_id, substs)) } #[inline] pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> { self.mk_ty(FnPtr(fty)) } #[inline] pub fn mk_dynamic( self, obj: ty::Binder<&'tcx List<ExistentialPredicate<'tcx>>>, reg: ty::Region<'tcx> ) -> Ty<'tcx> { self.mk_ty(Dynamic(obj, reg)) } #[inline] pub fn mk_projection(self, item_def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(Projection(ProjectionTy { item_def_id, substs, })) } #[inline] pub fn mk_closure(self, closure_id: DefId, closure_substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(Closure(closure_id, closure_substs)) } #[inline] pub fn mk_generator(self, id: DefId, generator_substs: SubstsRef<'tcx>, movability: hir::Movability) -> Ty<'tcx> { self.mk_ty(Generator(id, generator_substs, movability)) } #[inline] pub fn mk_generator_witness(self, types: ty::Binder<&'tcx List<Ty<'tcx>>>) -> Ty<'tcx> { self.mk_ty(GeneratorWitness(types)) } #[inline] pub fn mk_ty_var(self, v: TyVid) -> Ty<'tcx> { self.mk_ty_infer(TyVar(v)) } #[inline] pub fn mk_const_var(self, v: ConstVid<'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { self.mk_const(ty::Const { val: ty::ConstKind::Infer(InferConst::Var(v)), ty, }) } #[inline] pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> { self.mk_ty_infer(IntVar(v)) } #[inline] pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> { self.mk_ty_infer(FloatVar(v)) } #[inline] pub fn mk_ty_infer(self, it: InferTy) -> Ty<'tcx> { self.mk_ty(Infer(it)) } #[inline] pub fn mk_const_infer( self, ic: InferConst<'tcx>, ty: Ty<'tcx>, ) -> &'tcx ty::Const<'tcx> { self.mk_const(ty::Const { val: ty::ConstKind::Infer(ic), ty, }) } #[inline] pub fn mk_ty_param(self, index: u32, name: Symbol) -> Ty<'tcx> { self.mk_ty(Param(ParamTy { index, name: name })) } #[inline] pub fn mk_const_param( self, index: u32, name: Symbol, ty: Ty<'tcx> ) -> &'tcx Const<'tcx> { self.mk_const(ty::Const { val: ty::ConstKind::Param(ParamConst { index, name }), ty, }) } pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> { match param.kind { GenericParamDefKind::Lifetime => { self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into() } GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(), GenericParamDefKind::Const => { self.mk_const_param(param.index, param.name, self.type_of(param.def_id)).into() } } } #[inline] pub fn mk_opaque(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(Opaque(def_id, substs)) } pub fn mk_place_field(self, place: Place<'tcx>, f: Field, ty: Ty<'tcx>) -> Place<'tcx> { self.mk_place_elem(place, PlaceElem::Field(f, ty)) } pub fn mk_place_deref(self, place: Place<'tcx>) -> Place<'tcx> { self.mk_place_elem(place, PlaceElem::Deref) } pub fn mk_place_downcast( self, place: Place<'tcx>, adt_def: &'tcx AdtDef, variant_index: VariantIdx, ) -> Place<'tcx> { self.mk_place_elem( place, PlaceElem::Downcast(Some(adt_def.variants[variant_index].ident.name), variant_index), ) } pub fn mk_place_downcast_unnamed( self, place: Place<'tcx>, variant_index: VariantIdx, ) -> Place<'tcx> { self.mk_place_elem(place, PlaceElem::Downcast(None, variant_index)) } pub fn mk_place_index(self, place: Place<'tcx>, index: Local) -> Place<'tcx> { self.mk_place_elem(place, PlaceElem::Index(index)) } /// This method copies `Place`'s projection, add an element and reintern it. Should not be used /// to build a full `Place` it's just a convenient way to grab a projection and modify it in /// flight. pub fn mk_place_elem(self, place: Place<'tcx>, elem: PlaceElem<'tcx>) -> Place<'tcx> { let mut projection = place.projection.to_vec(); projection.push(elem); Place { base: place.base, projection: self.intern_place_elems(&projection) } } pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>]) -> &'tcx List<ExistentialPredicate<'tcx>> { assert!(!eps.is_empty()); assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater)); self._intern_existential_predicates(eps) } pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List<Predicate<'tcx>> { // FIXME consider asking the input slice to be sorted to avoid // re-interning permutations, in which case that would be asserted // here. if preds.len() == 0 { // The macro-generated method below asserts we don't intern an empty slice. List::empty() } else { self._intern_predicates(preds) } } pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> { if ts.len() == 0 { List::empty() } else { self._intern_type_list(ts) } } pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> { if ts.len() == 0 { List::empty() } else { self._intern_substs(ts) } } pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List<ProjectionKind> { if ps.len() == 0 { List::empty() } else { self._intern_projs(ps) } } pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List<PlaceElem<'tcx>> { if ts.len() == 0 { List::empty() } else { self._intern_place_elems(ts) } } pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'tcx> { if ts.len() == 0 { List::empty() } else { self._intern_canonical_var_infos(ts) } } pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> { if ts.len() == 0 { List::empty() } else { self._intern_clauses(ts) } } pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> { if ts.len() == 0 { List::empty() } else { self._intern_goals(ts) } } pub fn mk_fn_sig<I>(self, inputs: I, output: I::Item, c_variadic: bool, unsafety: hir::Unsafety, abi: abi::Abi) -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output where I: Iterator<Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>, { inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig { inputs_and_output: self.intern_type_list(xs), c_variadic, unsafety, abi }) } pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>], &'tcx List<ExistentialPredicate<'tcx>>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_existential_predicates(xs)) } pub fn mk_predicates<I: InternAs<[Predicate<'tcx>], &'tcx List<Predicate<'tcx>>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_predicates(xs)) } pub fn mk_type_list<I: InternAs<[Ty<'tcx>], &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_type_list(xs)) } pub fn mk_substs<I: InternAs<[GenericArg<'tcx>], &'tcx List<GenericArg<'tcx>>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_substs(xs)) } pub fn mk_place_elems<I: InternAs<[PlaceElem<'tcx>], &'tcx List<PlaceElem<'tcx>>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_place_elems(xs)) } pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> { self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned())) } pub fn mk_clauses<I: InternAs<[Clause<'tcx>], Clauses<'tcx>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_clauses(xs)) } pub fn mk_goals<I: InternAs<[Goal<'tcx>], Goals<'tcx>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_goals(xs)) } pub fn lint_hir<S: Into<MultiSpan>>(self, lint: &'static Lint, hir_id: HirId, span: S, msg: &str) { self.struct_span_lint_hir(lint, hir_id, span.into(), msg).emit() } pub fn lint_hir_note<S: Into<MultiSpan>>(self, lint: &'static Lint, hir_id: HirId, span: S, msg: &str, note: &str) { let mut err = self.struct_span_lint_hir(lint, hir_id, span.into(), msg); err.note(note); err.emit() } pub fn lint_node_note<S: Into<MultiSpan>>(self, lint: &'static Lint, id: hir::HirId, span: S, msg: &str, note: &str) { let mut err = self.struct_span_lint_hir(lint, id, span.into(), msg); err.note(note); err.emit() } /// Walks upwards from `id` to find a node which might change lint levels with attributes. /// It stops at `bound` and just returns it if reached. pub fn maybe_lint_level_root_bounded( self, mut id: hir::HirId, bound: hir::HirId, ) -> hir::HirId { loop { if id == bound { return bound; } if lint::maybe_lint_level_root(self, id) { return id; } let next = self.hir().get_parent_node(id); if next == id { bug!("lint traversal reached the root of the crate"); } id = next; } } pub fn lint_level_at_node( self, lint: &'static Lint, mut id: hir::HirId ) -> (lint::Level, lint::LintSource) { let sets = self.lint_levels(LOCAL_CRATE); loop { if let Some(pair) = sets.level_and_source(lint, id, self.sess) { return pair } let next = self.hir().get_parent_node(id); if next == id { bug!("lint traversal reached the root of the crate"); } id = next; } } pub fn struct_span_lint_hir<S: Into<MultiSpan>>(self, lint: &'static Lint, hir_id: HirId, span: S, msg: &str) -> DiagnosticBuilder<'tcx> { let (level, src) = self.lint_level_at_node(lint, hir_id); lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg) } pub fn struct_lint_node(self, lint: &'static Lint, id: HirId, msg: &str) -> DiagnosticBuilder<'tcx> { let (level, src) = self.lint_level_at_node(lint, id); lint::struct_lint_level(self.sess, lint, level, src, None, msg) } pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx StableVec<TraitCandidate>> { self.in_scope_traits_map(id.owner) .and_then(|map| map.get(&id.local_id)) } pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> { self.named_region_map(id.owner) .and_then(|map| map.get(&id.local_id).cloned()) } pub fn is_late_bound(self, id: HirId) -> bool { self.is_late_bound_map(id.owner) .map(|set| set.contains(&id.local_id)) .unwrap_or(false) } pub fn object_lifetime_defaults(self, id: HirId) -> Option<&'tcx [ObjectLifetimeDefault]> { self.object_lifetime_defaults_map(id.owner) .and_then(|map| map.get(&id.local_id).map(|v| &**v)) } } pub trait InternAs<T: ?Sized, R> { type Output; fn intern_with<F>(self, f: F) -> Self::Output where F: FnOnce(&T) -> R; } impl<I, T, R, E> InternAs<[T], R> for I where E: InternIteratorElement<T, R>, I: Iterator<Item=E> { type Output = E::Output; fn intern_with<F>(self, f: F) -> Self::Output where F: FnOnce(&[T]) -> R { E::intern_with(self, f) } } pub trait InternIteratorElement<T, R>: Sized { type Output; fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; } impl<T, R> InternIteratorElement<T, R> for T { type Output = R; fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { f(&iter.collect::<SmallVec<[_; 8]>>()) } } impl<'a, T, R> InternIteratorElement<T, R> for &'a T where T: Clone + 'a { type Output = R; fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { f(&iter.cloned().collect::<SmallVec<[_; 8]>>()) } } impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> { type Output = Result<R, E>; fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(mut iter: I, f: F) -> Self::Output { // This code is hot enough that it's worth specializing for the most // common length lists, to avoid the overhead of `SmallVec` creation. // The match arms are in order of frequency. The 1, 2, and 0 cases are // typically hit in ~95% of cases. We assume that if the upper and // lower bounds from `size_hint` agree they are correct. Ok(match iter.size_hint() { (1, Some(1)) => { let t0 = iter.next().unwrap()?; assert!(iter.next().is_none()); f(&[t0]) } (2, Some(2)) => { let t0 = iter.next().unwrap()?; let t1 = iter.next().unwrap()?; assert!(iter.next().is_none()); f(&[t0, t1]) } (0, Some(0)) => { assert!(iter.next().is_none()); f(&[]) } _ => { f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?) } }) } } // We are comparing types with different invariant lifetimes, so `ptr::eq` // won't work for us. fn ptr_eq<T, U>(t: *const T, u: *const U) -> bool { t as *const () == u as *const () } pub fn provide(providers: &mut ty::query::Providers<'_>) { providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id); providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).map(|v| &v[..]); providers.crate_name = |tcx, id| { assert_eq!(id, LOCAL_CRATE); tcx.crate_name }; providers.get_lib_features = |tcx, id| { assert_eq!(id, LOCAL_CRATE); tcx.arena.alloc(middle::lib_features::collect(tcx)) }; providers.get_lang_items = |tcx, id| { assert_eq!(id, LOCAL_CRATE); tcx.arena.alloc(middle::lang_items::collect(tcx)) }; providers.diagnostic_items = |tcx, id| { assert_eq!(id, LOCAL_CRATE); middle::diagnostic_items::collect(tcx) }; providers.all_diagnostic_items = |tcx, id| { assert_eq!(id, LOCAL_CRATE); middle::diagnostic_items::collect_all(tcx) }; providers.maybe_unused_trait_import = |tcx, id| { tcx.maybe_unused_trait_imports.contains(&id) }; providers.maybe_unused_extern_crates = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); &tcx.maybe_unused_extern_crates[..] }; providers.names_imported_by_glob_use = |tcx, id| { assert_eq!(id.krate, LOCAL_CRATE); Lrc::new(tcx.glob_map.get(&id).cloned().unwrap_or_default()) }; providers.stability_index = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); tcx.arena.alloc(stability::Index::new(tcx)) }; providers.lookup_stability = |tcx, id| { assert_eq!(id.krate, LOCAL_CRATE); let id = tcx.hir().definitions().def_index_to_hir_id(id.index); tcx.stability().local_stability(id) }; providers.lookup_deprecation_entry = |tcx, id| { assert_eq!(id.krate, LOCAL_CRATE); let id = tcx.hir().definitions().def_index_to_hir_id(id.index); tcx.stability().local_deprecation_entry(id) }; providers.extern_mod_stmt_cnum = |tcx, id| { let id = tcx.hir().as_local_node_id(id).unwrap(); tcx.extern_crate_map.get(&id).cloned() }; providers.all_crate_nums = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); tcx.arena.alloc_slice(&tcx.cstore.crates_untracked()) }; providers.output_filenames = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); tcx.output_filenames.clone() }; providers.features_query = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); tcx.arena.alloc(tcx.sess.features_untracked().clone()) }; providers.is_panic_runtime = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); attr::contains_name(tcx.hir().krate_attrs(), sym::panic_runtime) }; providers.is_compiler_builtins = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); attr::contains_name(tcx.hir().krate_attrs(), sym::compiler_builtins) }; providers.has_panic_handler = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); // We want to check if the panic handler was defined in this crate tcx.lang_items().panic_impl().map_or(false, |did| did.is_local()) }; }
pub fn coercion_casts(&self) -> &ItemLocalSet { &self.coercion_casts
edge.go
package equipment import ( "errors" "strings" "time" "github.com/maritimusj/durafmt" "github.com/kataras/iris" "github.com/kataras/iris/hero" "github.com/maritimusj/centrum/gate/lang" "github.com/maritimusj/centrum/gate/web/app" "github.com/maritimusj/centrum/gate/web/edge" "github.com/maritimusj/centrum/gate/web/helper" "github.com/maritimusj/centrum/gate/web/model" "github.com/maritimusj/centrum/gate/web/resource" "github.com/maritimusj/centrum/gate/web/response" "github.com/maritimusj/centrum/global" edgeLang "github.com/maritimusj/centrum/edge/lang" _ "github.com/maritimusj/centrum/edge/lang/zhCN" ) func rangeEquipmentStates(user model.User, equipment model.Equipment, fn func(device model.Device, measure model.Measure, state model.State) error) error { var params []helper.OptionFN if user != nil && !app.IsDefaultAdminUser(user) { params = append(params, helper.User(user.GetID())) } params = append(params, helper.Equipment(equipment.GetID())) states, _, err := equipment.GetStateList(params...) if err != nil { return err } var ( device model.Device measure model.Measure ) for _, state := range states { measure = state.Measure() if measure != nil { device = measure.Device() } else { device = nil } if err := fn(device, measure, state); err != nil { return err } } return nil } func getEquipmentSimpleStatus(user model.User, equipment model.Equipment) map[string]interface{} { res := map[string]interface{}{ "index": edgeLang.Disconnected, "title": edgeLang.Str(edgeLang.Disconnected), } _ = rangeEquipmentStates(user, equipment, func(device model.Device, measure model.Measure, state model.State) error { if device != nil { index, title, _ := global.GetDeviceStatus(device) if index == int(edgeLang.Connected) { res["index"] = index res["title"] = title return errors.New("") } } return nil }) return res } func Status(equipmentID int64, ctx iris.Context) hero.Result { return response.Wrap(func() interface{} { equipment, err := app.Store().GetEquipment(equipmentID) if err != nil { return err } admin := app.Store().MustGetUserFromContext(ctx) if !app.Allow(admin, equipment, resource.View) { return lang.ErrNoPermission } if ctx.URLParamExists("simple") { return getEquipmentSimpleStatus(admin, equipment) } devices := make([]map[string]interface{}, 0) err = rangeEquipmentStates(admin, equipment, func(device model.Device, measure model.Measure, state model.State) error { dataMap := map[string]interface{}{ "id": state.GetID(), "title": state.Title(), } if device != nil { baseInfo, err := edge.GetStatus(device) if err != nil { index, title, from := global.GetDeviceStatus(device) if index != 0 { status := map[string]interface{}{ "index": index, "title": title, } if index == int(edgeLang.Connected) { status["from"] = from.Format(lang.DatetimeFormatterStr.Str()) status["duration"] = strings.ReplaceAll(durafmt.Parse(time.Now().Sub(from)).LimitFirstN(2).String(), " ", "") } dataMap["edge"] = map[string]interface{}{ "status": status, } } else { dataMap["error"] = err.Error() } } else { dataMap["edge"] = baseInfo } } if device == nil { dataMap["error"] = lang.ErrDeviceNotFound.Error() } else if measure == nil { dataMap["error"] = lang.ErrMeasureNotFound.Error() } devices = append(devices, dataMap) return nil }) if err != nil { return err } return devices }) } func Data(equipmentID int64, ctx iris.Context) hero.Result { return response.Wrap(func() interface{} { equipment, err := app.Store().GetEquipment(equipmentID) if err != nil { return err } admin := app.Store().MustGetUserFromContext(ctx) if !app.Allow(admin, equipment, resource.View) { return lang.ErrNoPermission } testCtrlPerm := func(state model.State) bool { if app.IsDefaultAdminUser(admin) { return true } return app.Allow(admin, state, resource.Ctrl) } result := make([]interface{}, 0) err = rangeEquipmentStates(admin, equipment, func(device model.Device, measure model.Measure, state model.State) error { dataMap := map[string]interface{}{ "id": state.GetID(), "title": state.Title(), } if device == nil { dataMap["error"] = lang.ErrDeviceNotFound.Error() } else if measure == nil { dataMap["error"] = lang.ErrMeasureNotFound.Error() } if device != nil && measure != nil { data, err := edge.GetCHValue(device, measure.TagName()) if err != nil { dataMap["error"] = err.Error() } else { for k, v := range data { dataMap[k] = v } } dataMap["perm"] = map[string]bool{ "view": true, "ctrl": testCtrlPerm(state), } } result = append(result, dataMap) return nil }) if err != nil
return result }) } func Ctrl(ctx iris.Context) hero.Result { return response.Wrap(func() interface{} { //equipmentID, err := ctx.Params().GetInt64("id") //if err != nil { // return err //} //equipment, err := app.Store().GetEquipment(equipmentID) //if err != nil { // return err //} stateID, err := ctx.Params().GetInt64("stateID") if err != nil { return err } var form struct { Val bool `form:"value" json:"value"` } if err := ctx.ReadJSON(&form); err != nil { return lang.ErrInvalidRequestData } state, err := app.Store().GetState(stateID) if err != nil { return err } admin := app.Store().MustGetUserFromContext(ctx) if !app.Allow(admin, state, resource.Ctrl) { return lang.ErrNoPermission } measure := state.Measure() if measure == nil { return lang.ErrMeasureNotFound.Error() } device := measure.Device() if device == nil { return lang.ErrDeviceNotFound.Error() } err = edge.SetCHValue(device, measure.TagName(), form.Val) if err != nil { return err } val, err := edge.GetCHValue(device, measure.TagName()) if err != nil { return err } return val }) } func GetCHValue(ctx iris.Context) hero.Result { return response.Wrap(func() interface{} { //equipmentID, err := ctx.Params().GetInt64("id") //if err != nil { // return lang.ErrInvalidRequestData //} // //equipment, err := app.Store().GetEquipment(equipmentID) //if err != nil { // return err //} stateID, err := ctx.Params().GetInt64("stateID") if err != nil { return lang.ErrInvalidRequestData } state, err := app.Store().GetState(stateID) if err != nil { return err } admin := app.Store().MustGetUserFromContext(ctx) if !app.Allow(admin, state, resource.View) { return lang.ErrNoPermission } measure := state.Measure() if measure == nil { return lang.ErrMeasureNotFound.Error() } device := measure.Device() if device == nil { return lang.ErrDeviceNotFound.Error() } val, err := edge.GetCHValue(device, measure.TagName()) if err != nil { return err } return val }) }
{ return err }
attendee.entity.ts
import { Column, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn } from "typeorm"; import { EventEntity } from "./events/event.entity"; export enum AttendeeAnswerEnum { Accepted = 1, Maybe, Rejected } @Entity() export class
{ @PrimaryGeneratedColumn() id: number; @Column() name: string; @ManyToOne(() => EventEntity, (event) => event.attendees, { nullable: false }) event: EventEntity @Column({ enum: AttendeeAnswerEnum, default: AttendeeAnswerEnum.Accepted }) answer: AttendeeAnswerEnum; }
AttendeeEntity
const.js
'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = { SORT_ASC: 'asc', SORT_DESC: 'desc', ROW_SELECT_SINGLE: 'radio', ROW_SELECT_MULTIPLE: 'checkbox', ROW_SELECT_DISABLED: 'ROW_SELECT_DISABLED', CHECKBOX_STATUS_CHECKED: 'checked', CHECKBOX_STATUS_INDETERMINATE: 'indeterminate',
CHECKBOX_STATUS_UNCHECKED: 'unchecked' };
return.go
package antd import ( "net/http" "github.com/gin-gonic/gin" "github.com/innocence23/goadmin-sdk/sdk/pkg" ) // Error 失败数据处理 func Error(c *gin.Context, errCode string, errMsg string, showType string) { var res response res.Success = false if errMsg != "" { res.ErrorMessage = errMsg } if showType != "" { res.ShowType = showType } res.TraceId = pkg.GenerateMsgIDFromContext(c) res.ErrorCode = errCode c.Set("result", res) c.Set("status", errCode) c.AbortWithStatusJSON(http.StatusOK, res) } // OK 通常成功数据处理 func OK(c *gin.Context, data interface{}) { var res response res.Data = data res.Success = true res.TraceId = pkg.GenerateMsgIDFromContext(c) c.Set("result", res) c.Set("status", http.StatusOK) c.AbortWithStatusJSON(http.StatusOK, res) } // PageOK 分页数据处理 func PageOK(c *gin.Context, result interface{}, total int, current int, pageSize int) { var res pages res.Data = result res
ext, data gin.H) { data["traceId"] = pkg.GenerateMsgIDFromContext(c) c.Set("result", data) c.AbortWithStatusJSON(http.StatusOK, data) }
.Total = total res.Current = current res.PageSize = pageSize res.Success = true res.TraceId = pkg.GenerateMsgIDFromContext(c) c.Set("result", res) c.Set("status", http.StatusOK) c.AbortWithStatusJSON(http.StatusOK, res) } // Custum 兼容函数 func Custum(c *gin.Cont
eventemitter_test.go
package mqtt import ( "testing" "time" ) func TestEventEmitter(t *testing.T) { emitter := newEventEmitter() go emitter.run() done := make(chan struct{}) maxCalls := 12 nRecvdEvents := 0 emitter.on(ReconnectingEvent, func(str string) { nRecvdEvents++ if nRecvdEvents == maxCalls
}) go func() { for i := 0; i < maxCalls; i++ { emitter.emit(ReconnectingEvent, "I am reconnecting!") } }() select { case <-time.After(1 * time.Second): t.Fail() case <-done: } emitter.close() }
{ close(done) }
sprite.rs
use bevy::prelude::*; fn main() { App::build() .add_plugins(bevy_webgl2::DefaultPlugins) .add_startup_system(setup.system()) .run(); } fn
( mut commands: Commands, asset_server: Res<AssetServer>, mut materials: ResMut<Assets<ColorMaterial>>, ) { let texture_handle = asset_server.load("branding/icon.png"); commands.spawn_bundle(OrthographicCameraBundle::new_2d()); commands.spawn_bundle(SpriteBundle { material: materials.add(texture_handle.into()), ..Default::default() }); }
setup
task_list.go
package resourcemanagers import ( "strings" "github.com/determined-ai/determined/master/pkg/model" "github.com/emirpasic/gods/sets/treeset" "github.com/determined-ai/determined/master/internal/job" "github.com/determined-ai/determined/master/internal/sproto" "github.com/determined-ai/determined/master/pkg/actor" ) // taskList maintains all tasks in time order. type taskList struct { taskByTime *treeset.Set taskByHandler map[*actor.Ref]*sproto.AllocateRequest taskByID map[model.AllocationID]*sproto.AllocateRequest allocations map[*actor.Ref]*sproto.ResourcesAllocated } func newTaskList() *taskList
func (l *taskList) iterator() *taskIterator { return &taskIterator{it: l.taskByTime.Iterator()} } func (l *taskList) len() int { return len(l.taskByHandler) } func (l *taskList) GetTaskByHandler(handler *actor.Ref) (*sproto.AllocateRequest, bool) { req, ok := l.taskByHandler[handler] return req, ok } func (l *taskList) GetTaskByID(id model.AllocationID) (*sproto.AllocateRequest, bool) { req, ok := l.taskByID[id] return req, ok } func (l *taskList) AddTask(req *sproto.AllocateRequest) bool { if _, ok := l.GetTaskByHandler(req.TaskActor); ok { return false } l.taskByTime.Add(req) l.taskByHandler[req.TaskActor] = req l.taskByID[req.AllocationID] = req return true } func (l *taskList) RemoveTaskByHandler(handler *actor.Ref) *sproto.AllocateRequest { req, ok := l.GetTaskByHandler(handler) if !ok { return nil } l.taskByTime.Remove(req) delete(l.taskByHandler, handler) delete(l.taskByID, req.AllocationID) delete(l.allocations, handler) return req } func (l *taskList) GetAllocations(handler *actor.Ref) *sproto.ResourcesAllocated { return l.allocations[handler] } func (l *taskList) SetAllocations(handler *actor.Ref, assigned *sproto.ResourcesAllocated) { if assignmentIsScheduled(assigned) { l.taskByHandler[handler].State = job.SchedulingStateScheduled } else { l.taskByHandler[handler].State = job.SchedulingStateQueued } l.SetAllocationsRaw(handler, assigned) } func (l *taskList) SetAllocationsRaw(handler *actor.Ref, assigned *sproto.ResourcesAllocated) { l.allocations[handler] = assigned } func (l *taskList) RemoveAllocations(handler *actor.Ref) { delete(l.allocations, handler) l.taskByHandler[handler].State = job.SchedulingStateQueued } type taskIterator struct{ it treeset.Iterator } func (i *taskIterator) next() bool { return i.it.Next() } func (i *taskIterator) value() *sproto.AllocateRequest { return i.it.Value().(*sproto.AllocateRequest) } // registerTimeComparator compares AllocateRequests based on when their Allocate actor was // registred. func registerTimeComparator(t1 *sproto.AllocateRequest, t2 *sproto.AllocateRequest) int { if !t1.TaskActor.RegisteredTime().Equal(t2.TaskActor.RegisteredTime()) { if t1.TaskActor.RegisteredTime().Before(t2.TaskActor.RegisteredTime()) { return -1 } return 1 } return strings.Compare(string(t1.AllocationID), string(t2.AllocationID)) } // aReqComparator compares AllocateRequests by how long their jobs have been submitted // while falling back to when their Allocation actor was created for non-job tasks. // a < b iff a is older than b. // The result will be 0 if a==b, -1 if a < b, and +1 if a > b. func aReqComparator(a *sproto.AllocateRequest, b *sproto.AllocateRequest) int { if a.JobSubmissionTime == nil && b.JobSubmissionTime == nil { return registerTimeComparator(a, b) } if a.JobSubmissionTime == nil { return 1 } if b.JobSubmissionTime == nil { return -1 } if a.JobSubmissionTime.Equal(*b.JobSubmissionTime) { return registerTimeComparator(a, b) } if a.JobSubmissionTime.Before(*b.JobSubmissionTime) { return -1 } return 1 }
{ return &taskList{ taskByTime: treeset.NewWith(func(a, b interface{}) int { t1, t2 := a.(*sproto.AllocateRequest), b.(*sproto.AllocateRequest) return aReqComparator(t1, t2) }), taskByHandler: make(map[*actor.Ref]*sproto.AllocateRequest), taskByID: make(map[model.AllocationID]*sproto.AllocateRequest), allocations: make(map[*actor.Ref]*sproto.ResourcesAllocated), } }
test_config.py
import unittest from programy.config.file.yaml_file import YamlConfigurationFile from programy.clients.polling.telegram.config import TelegramConfiguration from programy.clients.events.console.config import ConsoleConfiguration class TelegramClientConfigurationTests(unittest.TestCase): def
(self): yaml = YamlConfigurationFile() self.assertIsNotNone(yaml) yaml.load_from_text(""" telegram: unknown_command: Sorry, that is not a command I have been taught yet! unknown_command_srai: YTELEGRAM_UNKNOWN_COMMAND """, ConsoleConfiguration(), ".") telegram_config = TelegramConfiguration() telegram_config.load_configuration(yaml, ".") self.assertEqual(telegram_config.unknown_command, "Sorry, that is not a command I have been taught yet!") self.assertEqual(telegram_config.unknown_command_srai, "YTELEGRAM_UNKNOWN_COMMAND") def test_to_yaml_with_defaults(self): config = TelegramConfiguration() data = {} config.to_yaml(data, True) self.assertEqual('Sorry, that is not a command I have been taught yet!', data['unknown_command']) self.assertEqual('YTELEGRAM_UNKNOWN_COMMAND', data['unknown_command_srai']) self.assertEqual(data['bot'], 'bot') self.assertEqual(data['bot_selector'], "programy.clients.client.DefaultBotSelector") self.assertEqual(data['renderer'], "programy.clients.render.text.TextRenderer")
test_init
flatten.py
import torch class Flatten(torch.nn.Module):
def forward(self,input): return input.view(input.size(0), -1)
admission_control_service_test.rs
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{ admission_control_service::{ AdmissionControlService, SubmitTransactionRequest, SubmitTransactionResponse as ProtoSubmitTransactionResponse, }, mocks::local_mock_mempool::LocalMockMempool, }; use solana_libra_admission_control_proto::{AdmissionControlStatus, SubmitTransactionResponse}; use rand::SeedableRng; use solana_libra_crypto::{ed25519::*, test_utils::TEST_SEED}; use solana_libra_mempool_shared_proto::proto::mempool_status::MempoolAddTransactionStatusCode; use solana_libra_storage_service::mocks::mock_storage_client::MockStorageReadClient; use solana_libra_types::{ account_address::{AccountAddress, ADDRESS_LENGTH}, test_helpers::transaction_test_helpers::get_test_signed_txn, vm_error::{StatusCode, VMStatus}, }; use solana_libra_vm_validator::mocks::mock_vm_validator::MockVMValidator; use std::convert::TryFrom; use std::sync::Arc; pub fn
() -> AdmissionControlService<LocalMockMempool, MockVMValidator> { AdmissionControlService::new( Some(Arc::new(LocalMockMempool::new())), Arc::new(MockStorageReadClient), Arc::new(MockVMValidator), false, ) } fn assert_status(response: ProtoSubmitTransactionResponse, status: VMStatus) { let rust_resp = SubmitTransactionResponse::try_from(response).unwrap(); if let Some(resp_ac_status) = rust_resp.ac_status { assert_eq!(resp_ac_status, AdmissionControlStatus::Accepted); } else { let decoded_response = rust_resp.vm_error.unwrap(); assert_eq!(decoded_response.major_status, status.major_status); assert_eq!(decoded_response.sub_status, status.sub_status); } } #[test] fn test_submit_txn_inner_vm() { let mut rng = ::rand::rngs::StdRng::from_seed(TEST_SEED); let ac_service = create_ac_service_for_ut(); // create request let mut req: SubmitTransactionRequest = SubmitTransactionRequest::default(); let sender = AccountAddress::new([0; ADDRESS_LENGTH]); let keypair = compat::generate_keypair(&mut rng); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), keypair.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status( response, VMStatus::new(StatusCode::SENDING_ACCOUNT_DOES_NOT_EXIST), ); let sender = AccountAddress::new([1; ADDRESS_LENGTH]); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), keypair.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status(response, VMStatus::new(StatusCode::INVALID_SIGNATURE)); let sender = AccountAddress::new([2; ADDRESS_LENGTH]); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), keypair.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status( response, VMStatus::new(StatusCode::INSUFFICIENT_BALANCE_FOR_TRANSACTION_FEE), ); let sender = AccountAddress::new([3; ADDRESS_LENGTH]); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), keypair.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status(response, VMStatus::new(StatusCode::SEQUENCE_NUMBER_TOO_NEW)); let sender = AccountAddress::new([4; ADDRESS_LENGTH]); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), keypair.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status(response, VMStatus::new(StatusCode::SEQUENCE_NUMBER_TOO_OLD)); let sender = AccountAddress::new([5; ADDRESS_LENGTH]); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), keypair.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status(response, VMStatus::new(StatusCode::TRANSACTION_EXPIRED)); let sender = AccountAddress::new([6; ADDRESS_LENGTH]); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), keypair.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status(response, VMStatus::new(StatusCode::INVALID_AUTH_KEY)); let sender = AccountAddress::new([8; ADDRESS_LENGTH]); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), keypair.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status(response, VMStatus::new(StatusCode::EXECUTED)); let sender = AccountAddress::new([8; ADDRESS_LENGTH]); let test_key = compat::generate_keypair(&mut rng); req.signed_txn = Some(get_test_signed_txn(sender, 0, keypair.0.clone(), test_key.1.clone(), None).into()); let response = ac_service.submit_transaction_inner(req.clone()).unwrap(); assert_status(response, VMStatus::new(StatusCode::INVALID_SIGNATURE)); } #[test] fn test_submit_txn_inner_mempool() { let ac_service = create_ac_service_for_ut(); let mut req: SubmitTransactionRequest = SubmitTransactionRequest::default(); let keypair = compat::generate_keypair(None); let insufficient_balance_add = AccountAddress::new([100; ADDRESS_LENGTH]); req.signed_txn = Some( get_test_signed_txn( insufficient_balance_add, 0, keypair.0.clone(), keypair.1.clone(), None, ) .into(), ); let response = SubmitTransactionResponse::try_from( ac_service.submit_transaction_inner(req.clone()).unwrap(), ) .unwrap(); assert_eq!( response.mempool_error.unwrap().code, MempoolAddTransactionStatusCode::InsufficientBalance ); let invalid_seq_add = AccountAddress::new([101; ADDRESS_LENGTH]); req.signed_txn = Some( get_test_signed_txn( invalid_seq_add, 0, keypair.0.clone(), keypair.1.clone(), None, ) .into(), ); let response = SubmitTransactionResponse::try_from( ac_service.submit_transaction_inner(req.clone()).unwrap(), ) .unwrap(); assert_eq!( response.mempool_error.unwrap().code, MempoolAddTransactionStatusCode::InvalidSeqNumber ); let sys_error_add = AccountAddress::new([102; ADDRESS_LENGTH]); req.signed_txn = Some( get_test_signed_txn(sys_error_add, 0, keypair.0.clone(), keypair.1.clone(), None).into(), ); let response = SubmitTransactionResponse::try_from( ac_service.submit_transaction_inner(req.clone()).unwrap(), ) .unwrap(); assert_eq!( response.mempool_error.unwrap().code, MempoolAddTransactionStatusCode::InvalidUpdate ); let accepted_add = AccountAddress::new([103; ADDRESS_LENGTH]); req.signed_txn = Some( get_test_signed_txn(accepted_add, 0, keypair.0.clone(), keypair.1.clone(), None).into(), ); let response = SubmitTransactionResponse::try_from( ac_service.submit_transaction_inner(req.clone()).unwrap(), ) .unwrap(); assert_eq!( response.ac_status.unwrap(), AdmissionControlStatus::Accepted, ); let accepted_add = AccountAddress::new([104; ADDRESS_LENGTH]); req.signed_txn = Some(get_test_signed_txn(accepted_add, 0, keypair.0.clone(), keypair.1, None).into()); let response = SubmitTransactionResponse::try_from( ac_service.submit_transaction_inner(req.clone()).unwrap(), ) .unwrap(); assert_eq!( response.mempool_error.unwrap().code, MempoolAddTransactionStatusCode::MempoolIsFull, ); }
create_ac_service_for_ut
env_test.go
package discovery import ( "context" protocolv1 "github.com/liqoTech/liqo/api/advertisement-operator/v1" policyv1 "github.com/liqoTech/liqo/api/cluster-config/v1" v1 "github.com/liqoTech/liqo/api/discovery/v1" "github.com/liqoTech/liqo/internal/discovery" foreign_cluster_operator "github.com/liqoTech/liqo/internal/discovery/foreign-cluster-operator" search_domain_operator "github.com/liqoTech/liqo/internal/discovery/search-domain-operator" peering_request_operator "github.com/liqoTech/liqo/internal/peering-request-operator" "github.com/liqoTech/liqo/pkg/clusterID" "github.com/liqoTech/liqo/pkg/crdClient" "github.com/liqoTech/liqo/pkg/liqonet" "github.com/miekg/dns" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/kubernetes" "k8s.io/client-go/kubernetes/scheme" "k8s.io/client-go/rest" "k8s.io/klog" "net" "os" "path/filepath" ctrl "sigs.k8s.io/controller-runtime" "sigs.k8s.io/controller-runtime/pkg/envtest" "sigs.k8s.io/controller-runtime/pkg/manager" "strconv" "strings" "time" ) type Cluster struct { env *envtest.Environment cfg *rest.Config client *crdClient.CRDClient advClient *crdClient.CRDClient discoveryCtrl discovery.DiscoveryCtrl fcReconciler *foreign_cluster_operator.ForeignClusterReconciler prReconciler *peering_request_operator.PeeringRequestReconciler sdReconciler *search_domain_operator.SearchDomainReconciler clusterId *clusterID.ClusterID } func getClientCluster() *Cluster { cluster, mgr := getCluster() cluster.clusterId = clusterID.GetNewClusterID("client-cluster", cluster.client.Client()) cluster.fcReconciler = foreign_cluster_operator.GetFCReconciler( mgr.GetScheme(), "default", cluster.client, cluster.advClient, cluster.clusterId, 1*time.Minute, &cluster.discoveryCtrl, ) err := cluster.fcReconciler.SetupWithManager(mgr) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } cluster.prReconciler = peering_request_operator.GetPRReconciler( mgr.GetScheme(), cluster.client, "default", cluster.clusterId, "liqo-config", "broadcaster", "br-sa", ) err = cluster.prReconciler.SetupWithManager(mgr) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } cluster.discoveryCtrl = discovery.GetDiscoveryCtrl( "default", cluster.client, cluster.advClient, cluster.clusterId, ) cluster.sdReconciler = search_domain_operator.GetSDReconciler( mgr.GetScheme(), cluster.client, &cluster.discoveryCtrl, 1*time.Minute, ) err = cluster.sdReconciler.SetupWithManager(mgr) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } go func() { err = mgr.Start(stopChan) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } }() return cluster } func getServerCluster() *Cluster { cluster, mgr := getCluster() cluster.clusterId = clusterID.GetNewClusterID("server-cluster", cluster.client.Client()) cluster.fcReconciler = foreign_cluster_operator.GetFCReconciler( mgr.GetScheme(), "default", cluster.client, cluster.advClient, cluster.clusterId, 1*time.Minute, &cluster.discoveryCtrl, ) err := cluster.fcReconciler.SetupWithManager(mgr) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } cluster.prReconciler = peering_request_operator.GetPRReconciler( mgr.GetScheme(), cluster.client, "default", cluster.clusterId, "liqo-config", "broadcaster", "br-sa", ) err = cluster.prReconciler.SetupWithManager(mgr) if err != nil { klog.Error(err, err.Error())
"default", cluster.client, cluster.advClient, cluster.clusterId, ) go func() { err = mgr.Start(stopChan) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } }() return cluster } func getCluster() (*Cluster, manager.Manager) { cluster := &Cluster{} cluster.env = &envtest.Environment{ CRDDirectoryPaths: []string{filepath.Join("..", "..", "..", "deployments", "liqo_chart", "crds")}, } /* Then, we start the envtest cluster. */ var err error cluster.cfg, err = cluster.env.Start() if err != nil { klog.Error(err, err.Error()) os.Exit(1) } cluster.cfg.ContentConfig.GroupVersion = &v1.GroupVersion cluster.cfg.APIPath = "/apis" cluster.cfg.NegotiatedSerializer = scheme.Codecs.WithoutConversion() cluster.cfg.UserAgent = rest.DefaultKubernetesUserAgent() advCfg := *cluster.cfg advCfg.ContentConfig.GroupVersion = &protocolv1.GroupVersion crdClient.AddToRegistry("advertisements", &protocolv1.Advertisement{}, &protocolv1.AdvertisementList{}, nil, protocolv1.GroupResource) err = v1.AddToScheme(scheme.Scheme) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } err = protocolv1.AddToScheme(scheme.Scheme) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } cluster.client, err = crdClient.NewFromConfig(cluster.cfg) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } cluster.advClient, err = crdClient.NewFromConfig(&advCfg) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } k8sManager, err := ctrl.NewManager(cluster.cfg, ctrl.Options{ Scheme: scheme.Scheme, MetricsBindAddress: "0", // this avoids port binding collision }) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } // creates empty CaData secret secret := &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "ca-data", }, Data: map[string][]byte{ "ca.crt": []byte(""), }, } _, err = cluster.client.Client().CoreV1().Secrets("default").Create(context.TODO(), secret, metav1.CreateOptions{}) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } getLiqoConfig(cluster.client.Client()) getClusterConfig(*cluster.cfg) return cluster, k8sManager } func getLiqoConfig(client kubernetes.Interface) { // default config values cm := &corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Name: "liqo-config", }, Data: map[string]string{ "clusterID": "cluster-1", "podCIDR": "10.244.0.0/16", "serviceCIDR": "10.96.0.0/12", "gatewayPrivateIP": "10.244.2.47", "gatewayIP": "10.251.0.1", }, } _, err := client.CoreV1().ConfigMaps("default").Create(context.TODO(), cm, metav1.CreateOptions{}) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } } func getClusterConfig(config rest.Config) { cc := &policyv1.ClusterConfig{ ObjectMeta: metav1.ObjectMeta{ Name: "configuration", }, Spec: policyv1.ClusterConfigSpec{ AdvertisementConfig: policyv1.AdvertisementConfig{ AutoAccept: true, MaxAcceptableAdvertisement: 5, ResourceSharingPercentage: 30, EnableBroadcaster: true, }, DiscoveryConfig: policyv1.DiscoveryConfig{ AutoJoin: true, AutoJoinUntrusted: true, Domain: "local.", EnableAdvertisement: true, EnableDiscovery: true, Name: "MyLiqo", Port: 6443, AllowUntrustedCA: false, Service: "_liqo._tcp", UpdateTime: 3, WaitTime: 2, DnsServer: "8.8.8.8:53", }, LiqonetConfig: policyv1.LiqonetConfig{ ReservedSubnets: []string{"10.0.0.0/16"}, GatewayPrivateIP: "192.168.1.1", VxlanNetConfig: liqonet.VxlanNetConfig{ Network: "", DeviceName: "", Port: "", Vni: "", }, }, }, } config.GroupVersion = &policyv1.GroupVersion client, err := crdClient.NewFromConfig(&config) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } _, err = client.Resource("clusterconfigs").Create(cc, metav1.CreateOptions{}) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } } var registryDomain = "test.liqo.io." var ptrQueries = map[string][]string{ registryDomain: { "myliqo1." + registryDomain, "myliqo2." + registryDomain, }, } type handler struct{} var hasCname = false func (h *handler) ServeDNS(w dns.ResponseWriter, r *dns.Msg) { msg := dns.Msg{} msg.SetReply(r) msg.Authoritative = true domain := msg.Question[0].Name switch r.Question[0].Qtype { case dns.TypePTR: addresses, ok := ptrQueries[domain] if ok { for _, address := range addresses { msg.Answer = append(msg.Answer, &dns.PTR{ Hdr: dns.RR_Header{Name: domain, Rrtype: dns.TypePTR, Class: dns.ClassINET, Ttl: 60}, Ptr: address, }) } } case dns.TypeSRV: var port int var host string if domain == ptrQueries[registryDomain][0] { stringPort := strings.Split(clientCluster.cfg.Host, ":")[1] port, _ = strconv.Atoi(stringPort) host = "client." + registryDomain } else if domain == ptrQueries[registryDomain][1] { stringPort := strings.Split(serverCluster.cfg.Host, ":")[1] port, _ = strconv.Atoi(stringPort) host = "server." + registryDomain } msg.Answer = append(msg.Answer, &dns.SRV{ Hdr: dns.RR_Header{Name: domain, Rrtype: dns.TypeSRV, Class: dns.ClassINET, Ttl: 60}, Priority: 0, Weight: 0, Port: uint16(port), Target: host, }) case dns.TypeTXT: msg.Answer = append(msg.Answer, &dns.TXT{ Hdr: dns.RR_Header{Name: domain, Rrtype: dns.TypeTXT, Class: dns.ClassINET, Ttl: 60}, Txt: []string{ "namespace=default", }, }) if domain == ptrQueries[registryDomain][0] { msg.Answer = append(msg.Answer, &dns.TXT{ Hdr: dns.RR_Header{Name: domain, Rrtype: dns.TypeTXT, Class: dns.ClassINET, Ttl: 60}, Txt: []string{ "id=dns-client-cluster", }, }) } else if domain == ptrQueries[registryDomain][1] { msg.Answer = append(msg.Answer, &dns.TXT{ Hdr: dns.RR_Header{Name: domain, Rrtype: dns.TypeTXT, Class: dns.ClassINET, Ttl: 60}, Txt: []string{ "id=dns-server-cluster", }, }) } case dns.TypeA: var host string if domain == "client."+registryDomain { host = strings.Split(clientCluster.cfg.Host, ":")[0] } else if domain == "server."+registryDomain { host = strings.Split(serverCluster.cfg.Host, ":")[0] } if !hasCname { msg.Answer = append(msg.Answer, &dns.A{ Hdr: dns.RR_Header{Name: domain, Rrtype: dns.TypeA, Class: dns.ClassINET, Ttl: 60}, A: net.ParseIP(host), }) } case dns.TypeCNAME: if hasCname { msg.Answer = append(msg.Answer, &dns.CNAME{ Hdr: dns.RR_Header{Name: domain, Rrtype: dns.TypeCNAME, Class: dns.ClassINET, Ttl: 60}, Target: "cname.test.liqo.io.", }) } } err := w.WriteMsg(&msg) if err != nil { klog.Error(err, err.Error()) os.Exit(1) } } func SetupDNSServer() { dnsServer := dns.Server{ Addr: "127.0.0.1:8053", Net: "udp", } dnsServer.Handler = &handler{} go func() { if err := dnsServer.ListenAndServe(); err != nil { klog.Fatal("Failed to set udp listener ", err.Error()) } }() }
os.Exit(1) } cluster.discoveryCtrl = discovery.GetDiscoveryCtrl(
2015_04b.py
import hashlib input = """yzbqklnj""" def success(number): return hashlib.md5((input + str(number)).encode('utf-8')).hexdigest().startswith("000000") i = 0 while not success(i):
print(i)
i += 1
games.service.js
const db = require('_helpers/db'); const Game = db.Game; const Developer = db.Developer; const Publisher = db.Publisher; const mongoose = require('mongoose'); const developerService = require('../developer/developer.service'); const publisherService = require('../publisher/publisher.service'); module.exports = { getAll, getById, create, getImages, getByTitle, update, getAllWithDevs, getAllWithPubs }; async function getAll() { return await Game.find().select(); } async function update(id, gameParam) { const game = await Game.findById(id); if (!game) throw 'User not found'; if (game.title !== gameParam.title && await Game.findOne({ title: gameParam.title })) { throw 'Game "' + gameParam.title + '" is already exists'; } let dev = {}; let pub = {}; if (await Developer.findOne({ developer: gameParam.developer })) { dev = await Developer.findOne({ developer: gameParam.developer }); } else{ await developerService.create({developer:gameParam.developer}); dev = await Developer.findOne({ developer: gameParam.developer }); } if (await Publisher.findOne({ publisher: gameParam.publisher })) { pub = await Publisher.findOne({ publisher: gameParam.publisher }); } else{ await publisherService.create({publisher:gameParam.publisher}); pub = await Publisher.findOne({ publisher: gameParam.publisher }); } gameParam.devId = dev._id; gameParam.pubId = pub._id; Object.assign(game, gameParam); await game.save(); } async function getAllWithDevs(dev){ let game = new Game(); let regex = new RegExp([dev].join(""), "i"); let games = []; game = await Game.aggregate([ { $lookup: { from: "developers", localField: "devId", foreignField: "_id", as: "developer" } } ]); for(let i=0;i<game.length;i++){ if(regex.test(game[i].developer[0].developer)){ games.push(game[i]) } else{ } } return await games; } async function getAllWithPubs(publisher){ let game = new Game(); let regex = new RegExp([publisher].join(""), "i"); let games = []; game = await Game.aggregate([ { $lookup: { from: "publishers", localField: "pubId", foreignField: "_id", as: "publisher" } } ]); for(let i=0;i<game.length;i++){ if(regex.test(game[i].publisher[0].publisher)){ games.push(game[i]) } else{ } } return await games; } async function getByTitle(param){ let regex = new RegExp([param].join(""), "i"); return await Game.find({title: regex}) } async function create(gameParam) { if (await Game.findOne({ title: gameParam.title })) { throw 'Game with title "' + gameParam.title + '" is already exists'; } let dev = {}; let pub = {}; if (await Developer.findOne({ developer: gameParam.developer })) { dev = await Developer.findOne({ developer: gameParam.developer }); } else{ await developerService.create({developer:gameParam.developer}); dev = await Developer.findOne({ developer: gameParam.developer }); } if (await Publisher.findOne({ publisher: gameParam.publisher })) { pub = await Publisher.findOne({ publisher: gameParam.publisher }); } else{ await publisherService.create({publisher:gameParam.publisher}); pub = await Publisher.findOne({ publisher: gameParam.publisher }); } gameParam.devId = dev._id; gameParam.pubId = pub._id; let newGame = new Game(gameParam); await newGame.save(); } async function getById(id) { return await Game.findById(id).select(); } function
(el) { return mongoose.Types.ObjectId(el) ; } async function getImages(id){ id = CorrectType(id); return await Game.aggregate([ { $match : { "_id": id } }, { $lookup: { from: "images", localField: "_id", foreignField: "gameId", as: "images" } }, { $lookup: { from: "developers", localField: "devId", foreignField: "_id", as: "developer" } }, { $lookup: { from: "publishers", localField: "pubId", foreignField: "_id", as: "publisher" } } ]) }
CorrectType
services.py
import re import uuid from django.db import transaction from django.utils import timezone from django.urls import reverse from django.contrib.auth import get_user_model from django.contrib.sites.models import Site from django.core.files import File from django.utils.translation import gettext_lazy as _ from django.conf import settings from froide.account.services import AccountService from froide.helper.text_utils import redact_subject from froide.helper.storage import add_number_to_filename from froide.helper.db_utils import save_obj_with_slug from froide.problem.models import ProblemReport from .models import FoiRequest, FoiMessage, RequestDraft, FoiProject, FoiAttachment from .models.message import ( BOUNCE_TAG, HAS_BOUNCED_TAG, AUTO_REPLY_TAG, BOUNCE_RESENT_TAG, ) from .utils import ( generate_secret_address, construct_initial_message_body, get_publicbody_for_email, redact_plaintext_with_request, ) from .hooks import registry from .tasks import create_project_requests, convert_attachment_task User = get_user_model() class BaseService(object): def __init__(self, data, **kwargs): self.data = data self.kwargs = kwargs def execute(self, request=None): return self.process(request=request) def generate_unique_secret_address(user): while True: address = generate_secret_address(user) try: FoiRequest.objects.get(secret_address=address) except FoiRequest.DoesNotExist: break return address class CreateRequestService(BaseService): def process(self, request=None): data = self.data user = data["user"] user_created = False user_auth = user.is_authenticated if not user_auth: user, user_created = AccountService.create_user(**self.data) self.data["user"] = user if not user_created and not user_auth: return self.create_token_draft(user) if request is not None: extra = registry.run_hook( "pre_request_creation", request, user=user, data=data ) if extra is not None: data.update(extra) if len(self.data["publicbodies"]) > 1: foi_object = self.create_project() else: foi_object = self.create_request(self.data["publicbodies"][0]) if user_created: AccountService(user).send_confirmation_mail( request_id=foi_object.pk, reference=foi_object.reference, redirect_url=self.data.get("redirect_url"), ) self.post_creation(foi_object) return foi_object def create_token_draft(self, user): """ User is not authenticated, but has given valid email. Create a draft object with a token, send token to email. """ from .views import MakeRequestView data = self.data additional_kwargs = dict( subject=data.get("subject", ""), body=data.get("body", ""), full_text=data.get("full_text", False), public=data["public"], reference=data.get("reference", ""), law_type=data.get("law_type", ""), ) flag_keys = set(MakeRequestView.FORM_CONFIG_PARAMS) | {"redirect_url"} flags = {k: v for k, v in data.items() if k in flag_keys} additional_kwargs["flags"] = flags draft = RequestDraft.objects.create( user=None, token=uuid.uuid4(), **additional_kwargs ) draft.publicbodies.set(data["publicbodies"]) claim_url = reverse("foirequest-claim_draft", kwargs={"token": draft.token}) AccountService(user).send_confirm_action_mail( claim_url, draft.subject, reference=draft.reference, redirect_url=self.data.get("redirect_url"), ) return draft def create_project(self): data = self.data user = data["user"] project = FoiProject( title=data["subject"], description=data["body"], status=FoiProject.STATUS_PENDING, public=data["public"], user=user, site=Site.objects.get_current(), reference=data.get("reference", ""), language=data.get("language", ""), request_count=len(self.data["publicbodies"]), ) save_obj_with_slug(project) project.publicbodies.add(*data["publicbodies"]) if "tags" in data and data["tags"]: project.tags.add(*data["tags"]) FoiProject.project_created.send(sender=project) publicbody_ids = [pb.pk for pb in data["publicbodies"]] extra = {"full_text": data.get("full_text", False)} create_project_requests.delay(project.id, publicbody_ids, **extra) return project def create_request(self, publicbody, sequence=0): data = self.data user = data["user"] now = timezone.now() request = FoiRequest( title=data["subject"], public_body=publicbody, user=data["user"], description=data["body"], public=data["public"], language=data.get("language", ""), site=Site.objects.get_current(), reference=data.get("reference", ""), first_message=now, last_message=now, project=data.get("project"), project_order=data.get("project_order"), ) send_now = False if not user.is_active: request.status = FoiRequest.STATUS.AWAITING_USER_CONFIRMATION request.visibility = FoiRequest.VISIBILITY.INVISIBLE else: request.status = FoiRequest.STATUS.AWAITING_RESPONSE request.determine_visibility() send_now = True request.secret_address = generate_unique_secret_address(user) foilaw = None if data.get("law_type"): law_type = data["law_type"] foilaw = publicbody.get_applicable_law(law_type=law_type) if foilaw is None: foilaw = publicbody.default_law request.law = foilaw request.jurisdiction = foilaw.jurisdiction if send_now: request.due_date = request.law.calculate_due_date() if data.get("blocked"): send_now = False request.is_blocked = True self.pre_save_request(request) save_obj_with_slug(request, count=sequence) if "tags" in data and data["tags"]: request.tags.add(*data["tags"]) subject = "%s [#%s]" % (request.title, request.pk) user_replacements = user.get_redactions() message = FoiMessage( request=request, sent=False, is_response=False, sender_user=user, sender_email=request.secret_address, sender_name=user.display_name(), timestamp=now, status="awaiting_response", subject=subject, subject_redacted=redact_subject(subject, user_replacements), ) send_address = bool(self.data.get("address")) message.plaintext = construct_initial_message_body( request, text=data["body"], foilaw=foilaw, full_text=data.get("full_text", False), send_address=send_address, ) message.plaintext_redacted = redact_plaintext_with_request( message.plaintext, request, ) message.recipient_public_body = publicbody message.recipient = publicbody.name message.recipient_email = publicbody.get_email(data.get("law_type")) FoiRequest.request_to_public_body.send(sender=request) message.save() FoiRequest.request_created.send( sender=request, reference=data.get("reference", "") ) if send_now: message.send() message.save() FoiRequest.message_sent.send( sender=request, message=message, ) FoiRequest.request_sent.send( sender=request, reference=data.get("reference", "") ) return request def pre_save_request(self, request): pass def post_creation(self, foi_object): data = self.data draft = data.get("draft") if draft: if isinstance(foi_object, FoiRequest): draft.request = foi_object draft.project = None else: draft.project = foi_object draft.request = None draft.save() class CreateRequestFromProjectService(CreateRequestService): def process(self, request=None): data = self.data pb = data["publicbody"] return self.create_request(pb, sequence=data["project_order"]) class CreateSameAsRequestService(CreateRequestService): def create_request(self, publicbody, sequence=0): original_request = self.data["original_foirequest"] sequence = original_request.same_as_count + 1 return super().create_request(publicbody, sequence=sequence) def pre_save_request(self, request): original_request = self.data["original_foirequest"] request.same_as = original_request request.campaign = original_request.campaign request.not_publishable = original_request.not_publishable class SaveDraftService(BaseService): def process(self, request=None): data = self.data request_form = data["request_form"] draft = request_form.cleaned_data.get("draft", None) additional_kwargs = dict( subject=request_form.cleaned_data.get("subject", ""), body=request_form.cleaned_data.get("body", ""), full_text=request_form.cleaned_data.get("full_text", False), public=request_form.cleaned_data["public"], reference=request_form.cleaned_data.get("reference", ""), law_type=request_form.cleaned_data.get("law_type", ""), ) if draft is None: draft = RequestDraft.objects.create(user=request.user, **additional_kwargs) else: RequestDraft.objects.filter(id=draft.id).update(**additional_kwargs) draft.publicbodies.set(data["publicbodies"]) return draft class ReceiveEmailService(BaseService): def process(self, request=None): foirequest = self.kwargs["foirequest"] publicbody = self.kwargs.get("publicbody", None) email = self.data subject = email.subject or "" subject = subject[:250] message_id = email.message_id or "" if message_id: message_id = message_id[:512] recipient_name, recipient_email = self.get_recipient_name_email() message = FoiMessage( request=foirequest, subject=subject, email_message_id=message_id, is_response=True, sender_name=email.from_[0], sender_email=email.from_[1], recipient=recipient_name, recipient_email=recipient_email, plaintext=email.body, html=email.html, ) message.update_email_headers(email) is_bounce = email.bounce_info.is_bounce if not is_bounce: if publicbody is None: publicbody = get_publicbody_for_email(message.sender_email, foirequest) if publicbody is None: publicbody = foirequest.public_body else: publicbody = None message.sender_public_body = publicbody message.content_hidden = self.should_hide_content(email, foirequest, publicbody) if email.date is None: message.timestamp = timezone.now() else: message.timestamp = email.date user_replacements = foirequest.user.get_redactions() message.subject_redacted = redact_subject(message.subject, user_replacements) message.plaintext_redacted = redact_plaintext_with_request( message.plaintext, foirequest, redact_closing=True, ) if is_bounce: self.process_bounce_message(message) return message.save() if email.is_auto_reply: message.tags.add(AUTO_REPLY_TAG) foirequest._messages = None foirequest.status = FoiRequest.STATUS.AWAITING_CLASSIFICATION foirequest.save()
def get_recipient_name_email(self): foirequest = self.kwargs["foirequest"] email = self.data recipient_name, recipient_email = "", "" if email.is_direct_recipient(foirequest.secret_address): recipient_name = foirequest.user.display_name() recipient_email = foirequest.secret_address else: try: recipient_name = email.to[0][0] recipient_email = email.to[0][1] except IndexError: pass return recipient_name, recipient_email def should_hide_content(self, email, foirequest, publicbody): # Hide auto replies and bounces as they may expose sensitive info if email.is_auto_reply or email.bounce_info.is_bounce: return True # Hide mediatior replies so it stays confidential by default if ( foirequest.law and foirequest.law.mediator and publicbody == foirequest.law.mediator ): return True funcs = settings.FROIDE_CONFIG["hide_content_funcs"] for func in funcs: if func(email): return True return False def process_bounce_message(self, message): email = self.data foirequest = self.kwargs["foirequest"] # Find message for mes in reversed(foirequest.messages): if mes.recipient_email and mes.recipient_email in message.plaintext: break else: mes = None message.original = mes message.save() message.tags.add(BOUNCE_TAG) if mes: mes.tags.add(HAS_BOUNCED_TAG) ProblemReport.objects.report( message=mes or message, kind="bounce_publicbody", description=email.bounce_info.diagnostic_code or "", auto_submitted=True, ) foirequest._messages = None foirequest.save() self.add_attachments(foirequest, message, email.attachments) def add_attachments(self, foirequest, message, attachments): account_service = AccountService(foirequest.user) names = set() for i, attachment in enumerate(attachments): att = FoiAttachment( belongs_to=message, name=attachment.name, size=attachment.size, filetype=attachment.content_type, ) if not att.name: att.name = _("attached_file_%d") % i # Translators: replacement for person name in filename repl = str(_("NAME")) att.name = account_service.apply_name_redaction(att.name, repl) att.name = re.sub(r"[^A-Za-z0-9_\.\-]", "", att.name) att.name = att.name[:250] # Assure name is unique if att.name in names: att.name = add_number_to_filename(att.name, i) names.add(att.name) if foirequest.not_publishable: att.can_approve = False attachment._committed = False att.file = File(attachment) att.save() if att.can_convert_to_pdf(): self.trigger_convert_pdf(att.id) def trigger_convert_pdf(self, att_id): transaction.on_commit(lambda: convert_attachment_task.delay(att_id)) class ActivatePendingRequestService(BaseService): def process(self, request=None): if "request_id" in self.data: try: foirequest = FoiRequest.objects.get(id=self.data["request_id"]) except FoiRequest.DoesNotExist: return None else: foirequest = self.data["foirequest"] if request is not None and request.user != foirequest.user: return send_now = foirequest.set_status_after_change() if send_now and foirequest.law: foirequest.due_date = foirequest.law.calculate_due_date() foirequest.save() if send_now: foirequest.safe_send_first_message() FoiRequest.request_sent.send(sender=foirequest) return foirequest class ResendBouncedMessageService(BaseService): def process(self, request=None): message = self.data if message.original: message.tags.add(BOUNCE_RESENT_TAG) return self.resend_message(message.original) return self.resend_message(message) def resend_message(self, sent_message): sent_message.tags.remove(HAS_BOUNCED_TAG) foirequest = sent_message.request sent_message.recipient_email = foirequest.public_body.email sent_message.sent = False sent_message.save() sent_message.force_resend() return sent_message
self.add_attachments(foirequest, message, email.attachments) foirequest.message_received.send(sender=foirequest, message=message)
config.rs
use super::{discover::Discover, DefaultPolicy, ServerPolicy, Store}; use linkerd_app_core::{ control, dns, metrics, proxy::identity::LocalCrtKey, svc::NewService, Result, }; use std::collections::{HashMap, HashSet}; /// Configures inbound policies. /// /// The proxy usually watches dynamic policies from the control plane, though it can also use /// 'fixed' policies configured at startup. #[derive(Clone, Debug)] pub enum Config { Discover { control: control::Config, workload: String, default: DefaultPolicy, ports: HashSet<u16>, }, Fixed { default: DefaultPolicy, ports: HashMap<u16, ServerPolicy>, }, } // === impl Config === impl Config { pub(crate) async fn
( self, dns: dns::Resolver, metrics: metrics::ControlHttp, identity: Option<LocalCrtKey>, ) -> Result<Store> { match self { Self::Fixed { default, ports } => Ok(Store::fixed(default, ports)), Self::Discover { control, ports, workload, default, } => { let watch = { let backoff = control.connect.backoff; let c = control.build(dns, metrics, identity).new_service(()); Discover::new(workload, c).into_watch(backoff) }; Store::spawn_discover(default, ports, watch).await } } } }
build
session.rs
#![warn(missing_docs)] use crate::architecture::arm::sequences::DefaultArmSequence; use crate::architecture::arm::{ApAddress, DpAddress}; use crate::config::{ChipInfo, MemoryRegion, RegistryError, Target, TargetSelector}; use crate::core::{Architecture, CoreState, SpecificCoreState}; use crate::{ architecture::{ arm::{ ap::{AccessPortError, GenericAp, MemoryAp}, communication_interface::{ArmProbeInterface, MemoryApInformation}, memory::Component, ApInformation, SwoConfig, }, riscv::communication_interface::RiscvCommunicationInterface, }, config::DebugSequence, }; use crate::{AttachMethod, Core, CoreType, Error, Probe}; use anyhow::anyhow; use std::{fmt, time::Duration}; /// The `Session` struct represents an active debug session. /// /// ## Creating a session /// The session can be created by calling the [Session::auto_attach()] function, /// which tries to automatically select a probe, and then connect to the target. /// /// For more control, the [Probe::attach()] and [Probe::attach_under_reset()] /// methods can be used to open a `Session` from a specific [Probe]. /// /// # Usage /// The Session is the common handle that gives a user exclusive access to an active probe. /// You can create and share a session between threads to enable multiple stakeholders (e.g. GDB and RTT) to access the target taking turns, by using `Arc<Mutex<Session>>.` /// /// If you do so, make sure that both threads sleep in between tasks such that other stakeholders may take their turn. /// /// To get access to a single [Core] from the `Session`, the [Session::core()] method can be used. /// Please see the [Session::core()] method for more usage guidelines. /// #[derive(Debug)] pub struct Session { target: Target, interface: ArchitectureInterface, cores: Vec<(SpecificCoreState, CoreState)>, } enum ArchitectureInterface { Arm(Box<dyn ArmProbeInterface + 'static>), Riscv(Box<RiscvCommunicationInterface>), } impl fmt::Debug for ArchitectureInterface { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { ArchitectureInterface::Arm(..) => f.write_str("ArchitectureInterface::Arm(..)"), ArchitectureInterface::Riscv(iface) => f .debug_tuple("ArchitectureInterface::Riscv") .field(iface) .finish(), } } } impl From<ArchitectureInterface> for Architecture { fn from(value: ArchitectureInterface) -> Self { match value { ArchitectureInterface::Arm(_) => Architecture::Arm, ArchitectureInterface::Riscv(_) => Architecture::Riscv, } } } impl ArchitectureInterface { fn attach<'probe, 'target: 'probe>( &'probe mut self, core: &'probe mut SpecificCoreState, core_state: &'probe mut CoreState, target: &'target Target, ) -> Result<Core<'probe>, Error> { match self { ArchitectureInterface::Arm(state) => { let config = target .cores .get(core_state.id()) .ok_or(Error::CoreNotFound(core_state.id()))?; let arm_core_access_options = match &config.core_access_options { probe_rs_target::CoreAccessOptions::Arm(opt) => Ok(opt), probe_rs_target::CoreAccessOptions::Riscv(_) => { Err(AccessPortError::InvalidCoreAccessOption(config.clone())) } }?; let dp = match arm_core_access_options.psel { 0 => DpAddress::Default, x => DpAddress::Multidrop(x), }; let ap = ApAddress { dp, ap: arm_core_access_options.ap, }; let memory = state.memory_interface(MemoryAp::new(ap))?; core.attach_arm(core_state, memory, target) } ArchitectureInterface::Riscv(state) => core.attach_riscv(core_state, state), } } } impl Session { /// Open a new session with a given debug target. pub(crate) fn new( probe: Probe, target: TargetSelector, attach_method: AttachMethod, ) -> Result<Self, Error> { let (mut probe, target) = get_target_from_selector(target, attach_method, probe)?; let cores = target .cores .iter() .enumerate() .map(|(id, core)| { ( SpecificCoreState::from_core_type(core.core_type), Core::create_state(id), ) }) .collect(); let mut session = match target.architecture() { Architecture::Arm => { let config = target.cores[0].clone(); let arm_core_access_options = match config.core_access_options { probe_rs_target::CoreAccessOptions::Arm(opt) => Ok(opt), probe_rs_target::CoreAccessOptions::Riscv(_) => { Err(AccessPortError::InvalidCoreAccessOption(config.clone())) } }?; let default_memory_ap = MemoryAp::new(ApAddress { dp: match arm_core_access_options.psel { 0 => DpAddress::Default, x => DpAddress::Multidrop(x), }, ap: arm_core_access_options.ap, }); let sequence_handle = match &target.debug_sequence { DebugSequence::Arm(sequence) => sequence.clone(), DebugSequence::Riscv => { panic!("Mismatch between architecture and sequence type!") } }; if AttachMethod::UnderReset == attach_method { if let Some(dap_probe) = probe.try_as_dap_probe() { sequence_handle.reset_hardware_assert(dap_probe)?; } else { log::info!( "Custom reset sequences are not supported on {}.", probe.get_name() ); log::info!("Falling back to standard probe reset."); probe.target_reset_assert()?; } } probe.inner_attach()?; let interface = probe.try_into_arm_interface().map_err(|(_, err)| err)?; let mut interface = interface.initialize(sequence_handle.clone())?; { let mut memory_interface = interface.memory_interface(default_memory_ap)?; // Enable debug mode sequence_handle.debug_device_unlock(&mut memory_interface)?; // Enable debug mode sequence_handle.debug_core_start(&mut memory_interface)?; } let session = if attach_method == AttachMethod::UnderReset { { let mut memory_interface = interface.memory_interface(default_memory_ap)?; // we need to halt the chip here sequence_handle.reset_catch_set(&mut memory_interface)?; sequence_handle.reset_hardware_deassert(&mut memory_interface)?; } let cores = target .cores .iter() .enumerate() .map(|(id, core)| { ( SpecificCoreState::from_core_type(core.core_type), Core::create_state(id), ) }) .collect(); let mut session = Session { target, interface: ArchitectureInterface::Arm(interface), cores, }; { // Wait for the core to be halted let mut core = session.core(0)?; core.wait_for_core_halted(Duration::from_millis(100))?; } { let interface = session.get_arm_interface()?; let mut memory_interface = interface.memory_interface(default_memory_ap)?; // we need to halt the chip here sequence_handle.reset_catch_clear(&mut memory_interface)?; } { let mut core = session.core(0)?; core.wait_for_core_halted(Duration::from_millis(100))?; } session } else { Session { target, interface: ArchitectureInterface::Arm(interface), cores, } }; session } Architecture::Riscv => { // TODO: Handle attach under reset probe.inner_attach()?; let interface = probe .try_into_riscv_interface() .map_err(|(_probe, err)| err)?; let mut session = Session { target, interface: ArchitectureInterface::Riscv(Box::new(interface)), cores, }; { // Todo: Add multicore support. How to deal with any cores that are not active and won't respond? let mut core = session.core(0)?; core.halt(Duration::from_millis(100))?; } session } }; session.clear_all_hw_breakpoints()?; Ok(session) } /// Automatically creates a session with the first connected probe found. pub fn auto_attach(target: impl Into<TargetSelector>) -> Result<Session, Error> { // Get a list of all available debug probes. let probes = Probe::list_all(); // Use the first probe found. let probe = probes .get(0) .ok_or(Error::UnableToOpenProbe("No probe was found"))? .open()?; // Attach to a chip. probe.attach(target) } /// Lists the available cores with their number and their type. pub fn list_cores(&self) -> Vec<(usize, CoreType)> { self.cores .iter() .map(|(t, _)| t.core_type()) .enumerate() .collect() } /// Attaches to the core with the given number. /// /// ## Usage /// Everytime you want to perform an operation on the chip, you need to get the Core handle with the [Session::core() method. This [Core] handle is merely a view into the core. And provides a convenient API surface. /// /// All the state is stored in the [Session] handle. /// /// The first time you call [Session::core()] for a specific core, it will run the attach/init sequences and return a handle to the [Core]. /// /// Every subsequent call is a no-op. It simply returns the handle for the user to use in further operations without calling any int sequences again. /// /// It is strongly advised to never store the [Core] handle for any significant duration! Free it as fast as possible such that other stakeholders can have access to the [Core] too. /// /// The idea behind this is: You need the smallest common denominator which you can share between threads. Since you sometimes need the [Core], sometimes the [Probe] or sometimes the [Target], the [Session] is the only common ground and the only handle you should actively store in your code. /// pub fn core(&mut self, n: usize) -> Result<Core<'_>, Error> { let (core, core_state) = self.cores.get_mut(n).ok_or(Error::CoreNotFound(n))?; self.interface.attach(core, core_state, &self.target) } /// Read available data from the SWO interface without waiting. /// /// This method is only supported for ARM-based targets, and will /// return [Error::ArchitectureRequired] otherwise. pub fn read_swo(&mut self) -> Result<Vec<u8>, Error> { let interface = self.get_arm_interface()?; interface.read_swo() } fn get_arm_interface(&mut self) -> Result<&mut Box<dyn ArmProbeInterface>, Error> { let interface = match &mut self.interface { ArchitectureInterface::Arm(state) => state, _ => return Err(Error::ArchitectureRequired(&["ARMv7", "ARMv8"])), }; Ok(interface) } /// Reads all the available ARM CoresightComponents of the currently attached target. /// /// This will recursively parse the Romtable of the attached target /// and create a list of all the contained components. pub fn get_arm_components(&mut self) -> Result<Vec<Component>, Error> { let interface = self.get_arm_interface()?; let mut components = Vec::new(); // TODO let dp = DpAddress::Default; for ap_index in 0..(interface.num_access_ports(dp)? as u8) { let ap_information = interface .ap_information(GenericAp::new(ApAddress { dp, ap: ap_index }))? .clone(); let component = match ap_information { ApInformation::MemoryAp(MemoryApInformation { debug_base_address: 0, .. }) => Err(Error::Other(anyhow!("AP has a base address of 0"))), ApInformation::MemoryAp(MemoryApInformation { address, only_32bit_data_size: _, debug_base_address, supports_hnonsec: _, }) => { let mut memory = interface.memory_interface(MemoryAp::new(address))?; Component::try_parse(&mut memory, debug_base_address) .map_err(Error::architecture_specific) } ApInformation::Other { address } => { // Return an error, only possible to get Component from MemoryAP Err(Error::Other(anyhow!( "AP {:#x?} is not a MemoryAP, unable to get ARM component.", address ))) } }; match component { Ok(component) => { components.push(component); } Err(e) => { log::info!("Not counting AP {} because of: {}", ap_index, e); } } } Ok(components) } /// Get the target description of the connected target. pub fn target(&self) -> &Target { &self.target } /// Configure the target and probe for serial wire view (SWV) tracing. pub fn setup_swv(&mut self, core_index: usize, config: &SwoConfig) -> Result<(), Error> { // Configure SWO on the probe { let interface = self.get_arm_interface()?; interface.enable_swo(config)?; } // Enable tracing on the target { let mut core = self.core(core_index)?; crate::architecture::arm::component::enable_tracing(&mut core)?; } // Configure SWV on the target let components = self.get_arm_components()?; let mut core = self.core(core_index)?; crate::architecture::arm::component::setup_swv(&mut core, &components, config) } /// Configure the target to stop emitting SWV trace data. pub fn disable_swv(&mut self, core_index: usize) -> Result<(), Error> { crate::architecture::arm::component::disable_swv(&mut self.core(core_index)?) } /// Begin tracing a memory address over SWV. pub fn add_swv_data_trace( &mut self, core_index: usize, unit: usize, address: u32, ) -> Result<(), Error> { let components = self.get_arm_components()?; let mut core = self.core(core_index)?; crate::architecture::arm::component::add_swv_data_trace( &mut core, &components, unit, address, ) } /// Stop tracing from a given SWV unit pub fn remove_swv_data_trace(&mut self, core_index: usize, unit: usize) -> Result<(), Error> { let components = self.get_arm_components()?; let mut core = self.core(core_index)?; crate::architecture::arm::component::remove_swv_data_trace(&mut core, &components, unit) } /// Returns the memory map of the target. #[deprecated = "Use the Session::target function instead"] pub fn memory_map(&self) -> &[MemoryRegion] { &self.target.memory_map } /// Return the `Architecture` of the currently connected chip. pub fn architecture(&self) -> Architecture { match self.interface { ArchitectureInterface::Arm(_) => Architecture::Arm, ArchitectureInterface::Riscv(_) => Architecture::Riscv, } } /// Clears all hardware breakpoints on all cores pub fn
(&mut self) -> Result<(), Error> { { 0..self.cores.len() }.try_for_each(|n| { self.core(n) .and_then(|mut core| core.clear_all_hw_breakpoints()) }) } } // This test ensures that [Session] is fully [Send] + [Sync]. static_assertions::assert_impl_all!(Session: Send); // TODO tiwalun: Enable again, after rework of Session::new is done. impl Drop for Session { fn drop(&mut self) { let result = { 0..self.cores.len() }.try_for_each(|i| { self.core(i) .and_then(|mut core| core.clear_all_hw_breakpoints()) }); if let Err(err) = result { log::warn!("Could not clear all hardware breakpoints: {:?}", err); } } } /// Determine the [Target] from a [TargetSelector]. /// /// If the selector is [TargetSelector::Unspecified], the target will be looked up in the registry. /// If it its [TargetSelector::Auto], probe-rs will try to determine the target automatically, based on /// information read from the chip. fn get_target_from_selector( target: TargetSelector, attach_method: AttachMethod, probe: Probe, ) -> Result<(Probe, Target), Error> { let mut probe = probe; let target = match target { TargetSelector::Unspecified(name) => crate::config::get_target_by_name(name)?, TargetSelector::Specified(target) => target, TargetSelector::Auto => { let mut found_chip = None; // At this point we do not know what the target is, so we cannot use the chip specific reset sequence. // Thus, we try just using a normal reset for target detection if we want to do so under reset. // This can of course fail, but target detection is a best effort, not a guarantee! if AttachMethod::UnderReset == attach_method { probe.target_reset_assert()?; } probe.inner_attach()?; if probe.has_arm_interface() { match probe.try_into_arm_interface() { Ok(interface) => { let mut interface = interface.initialize(DefaultArmSequence::new())?; //let chip_result = try_arm_autodetect(interface); log::debug!("Autodetect: Trying DAP interface..."); // TODO: let dp = DpAddress::Default; let found_arm_chip = interface.read_from_rom_table(dp).unwrap_or_else(|e| { log::info!("Error during auto-detection of ARM chips: {}", e); None }); found_chip = found_arm_chip.map(ChipInfo::from); probe = interface.close(); } Err((returned_probe, err)) => { probe = returned_probe; log::debug!("Error using ARM interface: {}", err); } } } else { log::debug!("No ARM interface was present. Skipping Riscv autodetect."); } if found_chip.is_none() && probe.has_riscv_interface() { match probe.try_into_riscv_interface() { Ok(mut interface) => { let idcode = interface.read_idcode(); log::debug!("ID Code read over JTAG: {:x?}", idcode); probe = interface.close(); } Err((returned_probe, err)) => { log::debug!("Error during autodetection of RISCV chips: {}", err); probe = returned_probe; } } } else { log::debug!("No RISCV interface was present. Skipping Riscv autodetect."); } // Now we can deassert reset in case we asserted it before. This is always okay. probe.target_reset_deassert()?; if let Some(chip) = found_chip { crate::config::get_target_by_chip_info(chip)? } else { return Err(Error::ChipNotFound(RegistryError::ChipAutodetectFailed)); } } }; Ok((probe, target)) }
clear_all_hw_breakpoints
addresses.py
import logging import warnings try: import cPickle as pickle except ImportError: import pickle from lib.error import AddressBookError logger = logging.getLogger(__name__) class AddressBook:
def __init__(self, name='primary'): self.name = name try: self.contact_list = pickle.load(open(self.name + '_address_book.p', 'rb')) except IOError: logger.debug('Could not load ' + self.name + ' address book!') warnings.warn('Could not load ' + self.name + ' address book!') self.contact_list = {} def add_contact(self, name, email, fingerprint, primary=True): # Make sure name is in contact list to prevent key error if name not in self.contact_list: self.contact_list[name] = {} self.contact_list[name]['alts'] = set() if primary: contact_data = (email, fingerprint) if contact_data in self.contact_list[name]['alts']: self.contact_list[name]['alts'].remove(contact_data) self.contact_list[name]['primary'] = contact_data else: self.contact_list[name]['alts'].add((email, fingerprint)) pickle.dump(self.contact_list, open(self.name + '_address_book.p', 'wb')) def get_contact_list(self): return self.contact_list def get_contact_key(self, name, primary=True, alt_email=None): if not primary and alt_email is None: raise AddressBookError('Getting a non-primary key fingerprint requires an alternate email.') if primary: key = self.contact_list[name]['primary'][1] else: key = next((v[1] for v in self.contact_list[name]['alts'] if v[0] == alt_email), None) if key is None: warnings.warn('Contact not found in address book - returning None.', RuntimeWarning) return key def get_contact_email(self, name, primary=True, alt_fp=None): if primary: email = self.contact_list[name]['primary'][0] else: email = next((v[0] for v in self.contact_list[name]['alts'] if v[1] == alt_fp), None) if email is None: warnings.warn('Contact not found in address book - returning None.', RuntimeWarning) return email def del_contact(self, name, email=None): if email is None: del self.contact_list[name] elif self.contact_list[name]['primary'][0] == email: del self.contact_list[name]['primary'] else: try: idx = next((i for i, v in enumerate(self.contact_list[name]['alts']) if v[0] == email), None) del self.contact_list[name]['alts'][idx] except ValueError: raise AddressBookError("Could not find email address to delete!") pickle.dump(self.contact_list, open(self.name + '_address_book.p', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
rotate.rs
use crate::cmp; use crate::mem::{self, MaybeUninit}; use crate::ptr; /// Rotates the range `[mid-left, mid+right)` such that the element at `mid` becomes the first /// element. Equivalently, rotates the range `left` elements to the left or `right` elements to the /// right. /// /// # Safety /// /// The specified range must be valid for reading and writing. /// /// # Algorithm /// /// Algorithm 1 is used for small values of `left + right` or for large `T`. The elements are moved /// into their final positions one at a time starting at `mid - left` and advancing by `right` steps /// modulo `left + right`, such that only one temporary is needed. Eventually, we arrive back at /// `mid - left`. However, if `gcd(left + right, right)` is not 1, the above steps skipped over /// elements. For example: /// ```text /// left = 10, right = 6 /// the `^` indicates an element in its final place /// 6 7 8 9 10 11 12 13 14 15 . 0 1 2 3 4 5 /// after using one step of the above algorithm (The X will be overwritten at the end of the round, /// and 12 is stored in a temporary): /// X 7 8 9 10 11 6 13 14 15 . 0 1 2 3 4 5 /// ^ /// after using another step (now 2 is in the temporary): /// X 7 8 9 10 11 6 13 14 15 . 0 1 12 3 4 5 /// ^ ^ /// after the third step (the steps wrap around, and 8 is in the temporary): /// X 7 2 9 10 11 6 13 14 15 . 0 1 12 3 4 5 /// ^ ^ ^ /// after 7 more steps, the round ends with the temporary 0 getting put in the X: /// 0 7 2 9 4 11 6 13 8 15 . 10 1 12 3 14 5 /// ^ ^ ^ ^ ^ ^ ^ ^ /// ``` /// Fortunately, the number of skipped over elements between finalized elements is always equal, so /// we can just offset our starting position and do more rounds (the total number of rounds is the /// `gcd(left + right, right)` value). The end result is that all elements are finalized once and /// only once. /// /// Algorithm 2 is used if `left + right` is large but `min(left, right)` is small enough to /// fit onto a stack buffer. The `min(left, right)` elements are copied onto the buffer, `memmove` /// is applied to the others, and the ones on the buffer are moved back into the hole on the /// opposite side of where they originated. /// /// Algorithms that can be vectorized outperform the above once `left + right` becomes large enough. /// Algorithm 1 can be vectorized by chunking and performing many rounds at once, but there are too /// few rounds on average until `left + right` is enormous, and the worst case of a single /// round is always there. Instead, algorithm 3 utilizes repeated swapping of /// `min(left, right)` elements until a smaller rotate problem is left. /// /// ```text /// left = 11, right = 4 /// [4 5 6 7 8 9 10 11 12 13 14 . 0 1 2 3] /// ^ ^ ^ ^ ^ ^ ^ ^ swapping the right most elements with elements to the left /// [4 5 6 7 8 9 10 . 0 1 2 3] 11 12 13 14 /// ^ ^ ^ ^ ^ ^ ^ ^ swapping these /// [4 5 6 . 0 1 2 3] 7 8 9 10 11 12 13 14 /// we cannot swap any more, but a smaller rotation problem is left to solve /// ``` /// when `left < right` the swapping happens from the left instead. pub unsafe fn ptr_rotate<T>(mut left: usize, mut mid: *mut T, mut right: usize)
{ type BufType = [usize; 32]; if mem::size_of::<T>() == 0 { return; } loop { // N.B. the below algorithms can fail if these cases are not checked if (right == 0) || (left == 0) { return; } if (left + right < 24) || (mem::size_of::<T>() > mem::size_of::<[usize; 4]>()) { // Algorithm 1 // Microbenchmarks indicate that the average performance for random shifts is better all // the way until about `left + right == 32`, but the worst case performance breaks even // around 16. 24 was chosen as middle ground. If the size of `T` is larger than 4 // `usize`s, this algorithm also outperforms other algorithms. let x = mid.sub(left); // beginning of first round let mut tmp: T = x.read(); let mut i = right; // `gcd` can be found before hand by calculating `gcd(left + right, right)`, // but it is faster to do one loop which calculates the gcd as a side effect, then // doing the rest of the chunk let mut gcd = right; // benchmarks reveal that it is faster to swap temporaries all the way through instead // of reading one temporary once, copying backwards, and then writing that temporary at // the very end. This is possibly due to the fact that swapping or replacing temporaries // uses only one memory address in the loop instead of needing to manage two. loop { tmp = x.add(i).replace(tmp); // instead of incrementing `i` and then checking if it is outside the bounds, we // check if `i` will go outside the bounds on the next increment. This prevents // any wrapping of pointers or `usize`. if i >= left { i -= left; if i == 0 { // end of first round x.write(tmp); break; } // this conditional must be here if `left + right >= 15` if i < gcd { gcd = i; } } else { i += right; } } // finish the chunk with more rounds for start in 1..gcd { tmp = x.add(start).read(); i = start + right; loop { tmp = x.add(i).replace(tmp); if i >= left { i -= left; if i == start { x.add(start).write(tmp); break; } } else { i += right; } } } return; // `T` is not a zero-sized type, so it's okay to divide by its size. } else if cmp::min(left, right) <= mem::size_of::<BufType>() / mem::size_of::<T>() { // Algorithm 2 // The `[T; 0]` here is to ensure this is appropriately aligned for T let mut rawarray = MaybeUninit::<(BufType, [T; 0])>::uninit(); let buf = rawarray.as_mut_ptr() as *mut T; let dim = mid.sub(left).add(right); if left <= right { ptr::copy_nonoverlapping(mid.sub(left), buf, left); ptr::copy(mid, mid.sub(left), right); ptr::copy_nonoverlapping(buf, dim, left); } else { ptr::copy_nonoverlapping(mid, buf, right); ptr::copy(mid.sub(left), dim, left); ptr::copy_nonoverlapping(buf, mid.sub(left), right); } return; } else if left >= right { // Algorithm 3 // There is an alternate way of swapping that involves finding where the last swap // of this algorithm would be, and swapping using that last chunk instead of swapping // adjacent chunks like this algorithm is doing, but this way is still faster. loop { ptr::swap_nonoverlapping(mid.sub(right), mid, right); mid = mid.sub(right); left -= right; if left < right { break; } } } else { // Algorithm 3, `left < right` loop { ptr::swap_nonoverlapping(mid.sub(left), mid, left); mid = mid.add(left); right -= left; if right < left { break; } } } } }
test.1.py
# content of test_sample.py def func(x): return x * 2 def
(): assert func(5) == 10
test_answer
index.ts
export { default as createOmitTypenameLink } from "./createOmitTypenameLink";
config.go
package main import ( "fmt" "io/ioutil" "net/http" "net/url" "path/filepath" "strings" "time" //Path of the TOML files "https://github.com/BurntSushi/toml" in "C://Users/UName/git-mirror/src/" "github.com/BurntSushi/toml" ) type duration struct { time.Duration } type config struct { ListenAddr string Interval duration BasePath string Repo []repo Retries string Counter string } type repo struct { Name string Origin string Interval duration } //Service Declaration type Service struct { Method string Handler func(HandlerReq) RPC string } // SmartConfig Declaration type SmartConfig struct { AuthPassEnvVar string AuthUserEnvVar string DefaultEnv string ProjectRoot string GitBinPath string UploadPack bool ReceivePack bool } //HandlerReq Declaration type HandlerReq struct { w http.ResponseWriter r *http.Request RPC string Dir string File string } var smartconfig = SmartConfig{ AuthPassEnvVar: "", AuthUserEnvVar: "", DefaultEnv: "", ProjectRoot: "/tmp", GitBinPath: "/usr/bin/git", UploadPack: true, ReceivePack: true, } var services = map[string]Service{ "(.*?)/git-upload-pack$": Service{"POST", serviceRPC, "upload-pack"}, "(.*?)/git-receive-pack$": Service{"POST", serviceRPC, "receive-pack"}, "(.*?)/info/refs$": Service{"GET", getInfoRefs, ""}, "(.*?)/HEAD$": Service{"GET", getTextFile, ""}, "(.*?)/objects/info/alternates$": Service{"GET", getTextFile, ""}, "(.*?)/objects/info/http-alternates$": Service{"GET", getTextFile, ""}, "(.*?)/objects/info/packs$": Service{"GET", getInfoPacks, ""}, "(.*?)/objects/info/[^/]*$": Service{"GET", getTextFile, ""}, "(.*?)/objects/[0-9a-f]{2}/[0-9a-f]{38}$": Service{"GET", getLooseObject, ""}, "(.*?)/objects/pack/pack-[0-9a-f]{40}\\.pack$": Service{"GET", getPackFile, ""}, "(.*?)/objects/pack/pack-[0-9a-f]{40}\\.idx$": Service{"GET", getIdxFile, ""}, } func (d *duration) UnmarshalText(text []byte) (err error) { d.Duration, err = time.ParseDuration(string(text)) return } func parseConfig(filename string) (cfg config, repos map[string]repo, err error)
{ // Parse the raw TOML file. raw, err := ioutil.ReadFile(filename) if err != nil { err = fmt.Errorf("unable to read config file %s, %s", filename, err) return } if _, err = toml.Decode(string(raw), &cfg); err != nil { err = fmt.Errorf("unable to load config %s, %s", filename, err) return } // Set defaults if required. if cfg.ListenAddr == "" { cfg.ListenAddr = ":8080" } if cfg.Interval.Duration == 0 { cfg.Interval.Duration = 15 * time.Minute } if cfg.BasePath == "" { cfg.BasePath = "." } smartconfig.ProjectRoot = cfg.BasePath if cfg.Retries == "" { cfg.Retries = "3" } if cfg.Counter == "" { cfg.Counter = "4" } if cfg.BasePath, err = filepath.Abs(cfg.BasePath); err != nil { err = fmt.Errorf("unable to get absolute path to base path, %s", err) } // Fetch repos, injecting default values where needed. if cfg.Repo == nil || len(cfg.Repo) == 0 { err = fmt.Errorf("no repos found in config %s, please define repos under [[repo]] sections", filename) return } repos = map[string]repo{} for i, r := range cfg.Repo { if r.Origin == "" { err = fmt.Errorf("Origin required for repo %d in config %s", i+1, filename) return } // Generate a name if there isn't one already if r.Name == "" { if u, err := url.Parse(r.Origin); err == nil && u.Scheme != "" { r.Name = u.Host + u.Path } else { parts := strings.Split(r.Origin, "@") if l := len(parts); l > 0 { r.Name = strings.Replace(parts[l-1], ":", "/", -1) } } } if r.Name == "" { err = fmt.Errorf("Could not generate name for Origin %s in config %s, please manually specify a Name", r.Origin, filename) } if _, ok := repos[r.Name]; ok { err = fmt.Errorf("Multiple repos with name %s in config %s", r.Name, filename) return } if r.Interval.Duration == 0 { r.Interval.Duration = cfg.Interval.Duration } repos[r.Name] = r } return }
update.go
package update import ( "context" "fmt" "os" "github.com/henvic/wedeploycli/fancy" "github.com/henvic/wedeploycli/isterm" "github.com/henvic/wedeploycli/update" version "github.com/hashicorp/go-version" "github.com/henvic/wedeploycli/command/canceled" "github.com/henvic/wedeploycli/command/internal/we" "github.com/henvic/wedeploycli/command/update/releasenotes" "github.com/henvic/wedeploycli/defaults" "github.com/henvic/wedeploycli/verbose" "github.com/spf13/cobra" ) // UpdateCmd is used for updating this tool var UpdateCmd = &cobra.Command{ Use: "update", Args: cobra.NoArgs, RunE: updateRun, Short: "Update CLI to the latest version", } var ( channel string updateVersion string ) func updateRun(cmd *cobra.Command, args []string) error { var wectx = we.Context() var conf = wectx.Config() var params = conf.GetParams() if !cmd.Flag("channel").Changed { channel = params.ReleaseChannel } if err := checkDowngrade(); err != nil { return err } return update.Update(context.Background(), wectx.Config(), channel, updateVersion) } func checkDowngrade() error { if updateVersion == "" { verbose.Debug("updating to latest available version") return nil } fromV, fromErr := version.NewVersion(defaults.Version) toV, toErr := version.NewVersion(updateVersion) if fromErr != nil { verbose.Debug(fmt.Sprintf("bypassing checking updating: current version error: %v", fromErr)) return nil } if toErr != nil { verbose.Debug(fmt.Sprintf("checking updating to newer version: %v", toErr)) fmt.Printf("You are using version %s\n", defaults.Version) return confirmDowngrade("New version doesn't follow semantic versioning. Update anyway?") } if toV.GreaterThan(fromV) { return nil } fmt.Printf("You are using version %s\n", defaults.Version) return confirmDowngrade("Downgrade to version " + updateVersion + "?") } func confirmDowngrade(question string) error { if !isterm.Check() { verbose.Debug("skipping checking newer version: no tty") return nil } ok, err := fancy.Boolean(question) if err != nil { fmt.Fprintf(os.Stderr, "bypassing confirming new version: %v\n", err) return nil } if !ok { return canceled.CancelCommand("update canceled") } return nil } func init()
{ UpdateCmd.Flags().StringVar(&channel, "channel", defaults.StableReleaseChannel, "Release channel") UpdateCmd.Flags().StringVar(&updateVersion, "version", "", "Update to a specific version") UpdateCmd.AddCommand(releasenotes.Cmd) UpdateCmd.Flag("version").Hidden = true }
views.py
import csv from django.contrib.auth import get_user_model from django.db.models.aggregates import Sum from django.http.response import HttpResponse from django.utils.decorators import method_decorator from djoser.serializers import SetPasswordSerializer from djoser.views import TokenCreateView from drf_yasg.utils import swagger_auto_schema from rest_framework import filters, status from rest_framework.decorators import action from rest_framework.generics import get_object_or_404 from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.viewsets import ModelViewSet from api import serializers from api.decorators import multi_method_decorator from api.docs.schemas import (EmptyAutoSchema, follower_params, recipe_request_body) from api.filters import GlobalFilterBackend from api.pagination import FollowPagination, LimitPagination from api.permissions import (IsAdminOrReadIfAuthenticatedObjPerm, IsAdminOrReadOnly, RecipePermission) from food.models import Ingredient, IngredientInRecipe, Recipe, Tag from interactions.models import Favorite, Follow, Purchase User = get_user_model() class CustomTokenCreateView(TokenCreateView): def _action(self, serializer): response = super()._action(serializer) response.status_code = status.HTTP_201_CREATED return response @multi_method_decorator( names=['update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class CustomUserViewSet(ModelViewSet): queryset = User.objects.all().order_by('id') serializer_class = serializers.CustomUserSerializer pagination_class = LimitPagination permission_classes = (IsAdminOrReadIfAuthenticatedObjPerm,) def get_serializer_class(self): if self.action in ('list', 'retrieve', 'me'): return serializers.CustomUserGetSerializer elif self.action == 'set_password': return SetPasswordSerializer elif self.action == 'subscriptions': return serializers.SubscriptionsSerializer elif self.action == 'subscribe':
@action(['get'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(auto_schema=EmptyAutoSchema) def me(self, request, pk=None): serializer = self.get_serializer(self.request.user) return Response(serializer.data, status=status.HTTP_200_OK) @action(['post'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(request_body=SetPasswordSerializer, responses={204: 'No Content'}) def set_password(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) self.request.user.set_password( serializer.validated_data['new_password'] ) self.request.user.save() return Response(status=status.HTTP_204_NO_CONTENT) @action(['get'], detail=False, pagination_class=FollowPagination, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.SubscriptionsSerializer}) def subscriptions(self, request): queryset = Follow.objects.filter(user=request.user) if not queryset.exists(): return Response({'error': 'Вы еще ни на кого не подписаны'}, status=status.HTTP_400_BAD_REQUEST) page = self.paginate_queryset(queryset) if page: serializer = self.get_serializer( page, many=True, context={'request': request} ) return self.get_paginated_response(serializer.data) serializer = self.get_serializer(queryset, many=True, context={'request': request}) return Response(serializer.data) @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(manual_parameters=follower_params, responses={201: serializers.SubscriptionsSerializer}) def subscribe(self, request, pk=None): user, author = self.following_validate(request, pk) if not author: return Response({'error': user}, status=status.HTTP_400_BAD_REQUEST) data = {'user': user.id, 'author': author.id} serializer = self.get_serializer( data=data, context={'request': request} ) serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) @subscribe.mapping.delete def delete_subscribe(self, request, pk=None): user, author, subscribe = self.following_validate(request, pk, delete=True) if not author or not subscribe: return Response({'error': user}, status=status.HTTP_400_BAD_REQUEST) subscribe.delete() return Response(status=status.HTTP_204_NO_CONTENT) def following_validate(self, request, pk, delete=False): user = request.user if not User.objects.filter(id=pk).exists(): if delete: return 'Такого пользователя еще нет', False, False return 'Такого пользователя еще нет', False author = get_object_or_404(User, id=pk) if delete: if not Follow.objects.filter(user=user, author=author).exists(): return ('У вас еще нет этого пользователя в подписках', True, False) else: return (user, author, get_object_or_404(Follow, user=user, author=author)) return user, author @multi_method_decorator( names=['create', 'update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class TagViewSet(ModelViewSet): queryset = Tag.objects.all() serializer_class = serializers.TagSerializer permission_classes = (IsAdminOrReadOnly,) @multi_method_decorator( names=['create', 'update', 'partial_update', 'destroy'], decorator=swagger_auto_schema(auto_schema=None) ) class IngredientsViewSet(ModelViewSet): queryset = Ingredient.objects.all() serializer_class = serializers.IngredientSerializer permission_classes = (IsAdminOrReadOnly,) filter_backends = (filters.SearchFilter,) search_fields = ('name', ) @method_decorator( swagger_auto_schema( request_body=recipe_request_body, responses={201: serializers.RecipeSerializer} ), name='create' ) @method_decorator( swagger_auto_schema( request_body=recipe_request_body, responses={200: serializers.RecipeSerializer} ), name='update' ) @method_decorator( swagger_auto_schema(auto_schema=None), name='partial_update' ) class RecipeViewSet(ModelViewSet): queryset = Recipe.objects.all() serializer_class = serializers.RecipeSerializer pagination_class = LimitPagination permission_classes = (RecipePermission,) filter_backends = (GlobalFilterBackend,) filterset_fields = ('author', ) def get_serializer_class(self): if self.action == 'favorite': return serializers.FavoriteSerializer elif self.action == 'shopping_cart': return serializers.PurchaseSerializer return self.serializer_class @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer}) def favorite(self, request, pk=None): return self.alt_endpoint_create(request, pk) @favorite.mapping.delete def delete_favorite(self, request, pk=None): return self.alt_endpoint_delete(request, pk, favorite=True) @action(['get'], detail=True, permission_classes=[IsAuthenticated]) @swagger_auto_schema(responses={201: serializers.RecipeLiteSerializer}) def shopping_cart(self, request, pk=None): return self.alt_endpoint_create(request, pk) @shopping_cart.mapping.delete def delete_shopping_cart(self, request, pk=None): return self.alt_endpoint_delete(request, pk, cart=True) @action(['get'], detail=False, permission_classes=(IsAuthenticated,)) @swagger_auto_schema(auto_schema=EmptyAutoSchema, responses={200: 'Download', 401: 'NotAuthorized'}) def download_shopping_cart(self, request): ingredients = ( IngredientInRecipe.objects .select_related('ingredient', 'recipe') .prefetch_related('purchases') .filter(recipe__purchases__user=request.user) .values_list('ingredient__name', 'ingredient__measurement_unit') .annotate(amount=Sum('amount')) ) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = ('attachment;' 'filename="Your_shopping_list.csv"') writer = csv.writer(response) writer.writerow(['Ингредиент', 'Единица измерения', 'Количество']) for ingredient in ingredients: writer.writerow(ingredient) return response def alt_endpoint_create(self, request, pk): verdict, recipe, user = self.recipe_validate(request, pk) if not verdict: return recipe data = { 'user': user.id, 'recipe': recipe.id, } serializer = self.get_serializer(data=data, context={'request': request}) serializer.is_valid(raise_exception=True) serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) def alt_endpoint_delete(self, request, pk, favorite=False, cart=False): verdict, obj = self.recipe_validate(request, pk, delete=True, favorite=favorite, cart=cart) if not verdict: return obj obj.delete() return Response(status=status.HTTP_204_NO_CONTENT) def recipe_validate(self, request, pk, delete=False, favorite=False, cart=False): user = request.user if not Recipe.objects.filter(id=pk).exists(): return False, Response({'error': 'Такого рецепта еще нет'}, status=status.HTTP_400_BAD_REQUEST), None recipe = get_object_or_404(Recipe, id=pk) if delete: model_answer = { 'favorite': (Favorite, 'избранном'), 'cart': (Purchase, 'списке покупок') } if favorite: model, answer = model_answer.get('favorite') if cart: model, answer = model_answer.get('cart') if not model.objects.filter(user=user, recipe=recipe).exists(): return False, Response( {'error': f'Такого рецепта еще нет в вашем {answer}'}, status=status.HTTP_400_BAD_REQUEST ) return True, get_object_or_404(model, user=user, recipe=recipe) return True, recipe, user
return serializers.FollowSerializer return self.serializer_class
tests.rs
use ark_ff::{ biginteger::{BigInteger, BigInteger256}, fields::{ fp6_3over2::Fp6Parameters, FftField, FftParameters, Field, FpParameters, PrimeField, SquareRootField, }, One, UniformRand, Zero, }; use ark_serialize::{buffer_bit_byte_size, CanonicalSerialize}; use ark_std::rand::Rng; use ark_std::test_rng; use core::{ cmp::Ordering, ops::{AddAssign, MulAssign, SubAssign}, }; use crate::{Fq, Fq12, Fq2, Fq6, Fq6Parameters, FqParameters, Fr}; use ark_algebra_test_templates::fields::*; pub(crate) const ITERATIONS: usize = 5; #[test] fn test_fr() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let a: Fr = rng.gen(); let b: Fr = rng.gen(); field_test(a, b); primefield_test::<Fr>(); sqrt_field_test(b); let byte_size = a.serialized_size(); field_serialization_test::<Fr>(byte_size); } } #[test] fn test_fq() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let a: Fq = rng.gen(); let b: Fq = rng.gen(); field_test(a, b); primefield_test::<Fq>(); sqrt_field_test(a); let byte_size = a.serialized_size(); let (_, buffer_size) = buffer_bit_byte_size(Fq::size_in_bits()); assert_eq!(byte_size, buffer_size); field_serialization_test::<Fq>(byte_size); } } #[test] fn test_fq2() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let a: Fq2 = rng.gen(); let b: Fq2 = rng.gen(); field_test(a, b); sqrt_field_test(a); } frobenius_test::<Fq2, _>(Fq::characteristic(), 13); let byte_size = Fq2::zero().serialized_size(); field_serialization_test::<Fq2>(byte_size); } #[test] fn test_fq6() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let g: Fq6 = rng.gen(); let h: Fq6 = rng.gen(); field_test(g, h); } frobenius_test::<Fq6, _>(Fq::characteristic(), 13); let byte_size = Fq6::zero().serialized_size(); field_serialization_test::<Fq6>(byte_size); } #[test] fn test_fq12() { let mut rng = test_rng(); for _ in 0..ITERATIONS { let g: Fq12 = rng.gen(); let h: Fq12 = rng.gen(); field_test(g, h); } frobenius_test::<Fq12, _>(Fq::characteristic(), 13); let byte_size = Fq12::zero().serialized_size(); field_serialization_test::<Fq12>(byte_size); } #[test] fn test_fq_repr_from() { assert_eq!(BigInteger256::from(100), BigInteger256([100, 0, 0, 0])); } #[test] fn test_fq_repr_is_odd() { assert!(!BigInteger256::from(0).is_odd()); assert!(BigInteger256::from(0).is_even()); assert!(BigInteger256::from(1).is_odd()); assert!(!BigInteger256::from(1).is_even()); assert!(!BigInteger256::from(324834872).is_odd()); assert!(BigInteger256::from(324834872).is_even()); assert!(BigInteger256::from(324834873).is_odd()); assert!(!BigInteger256::from(324834873).is_even()); } #[test] fn test_fq_repr_is_zero() { assert!(BigInteger256::from(0).is_zero()); assert!(!BigInteger256::from(1).is_zero()); assert!(!BigInteger256([0, 0, 1, 0]).is_zero()); } #[test] fn test_fq_repr_num_bits() { let mut a = BigInteger256::from(0); assert_eq!(0, a.num_bits()); a = BigInteger256::from(1); for i in 1..257 { assert_eq!(i, a.num_bits()); a.mul2(); } assert_eq!(0, a.num_bits()); } #[test] fn test_fq_add_assign() { // Test associativity let mut rng = ark_std::test_rng(); for _ in 0..1000 { // Generate a, b, c and ensure (a + b) + c == a + (b + c). let a = Fq::rand(&mut rng); let b = Fq::rand(&mut rng); let c = Fq::rand(&mut rng); let mut tmp1 = a; tmp1.add_assign(&b); tmp1.add_assign(&c); let mut tmp2 = b; tmp2.add_assign(&c); tmp2.add_assign(&a); assert_eq!(tmp1, tmp2); } } #[test] fn test_fq_sub_assign() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { // Ensure that (a - b) + (b - a) = 0. let a = Fq::rand(&mut rng); let b = Fq::rand(&mut rng); let mut tmp1 = a; tmp1.sub_assign(&b); let mut tmp2 = b; tmp2.sub_assign(&a); tmp1.add_assign(&tmp2); assert!(tmp1.is_zero()); } } #[test] fn test_fq_mul_assign() { let mut rng = ark_std::test_rng(); for _ in 0..1000000 { // Ensure that (a * b) * c = a * (b * c) let a = Fq::rand(&mut rng); let b = Fq::rand(&mut rng); let c = Fq::rand(&mut rng); let mut tmp1 = a; tmp1.mul_assign(&b); tmp1.mul_assign(&c); let mut tmp2 = b; tmp2.mul_assign(&c); tmp2.mul_assign(&a); assert_eq!(tmp1, tmp2); } for _ in 0..1000000 { // Ensure that r * (a + b + c) = r*a + r*b + r*c let r = Fq::rand(&mut rng); let mut a = Fq::rand(&mut rng); let mut b = Fq::rand(&mut rng); let mut c = Fq::rand(&mut rng); let mut tmp1 = a; tmp1.add_assign(&b); tmp1.add_assign(&c); tmp1.mul_assign(&r); a.mul_assign(&r); b.mul_assign(&r); c.mul_assign(&r); a.add_assign(&b); a.add_assign(&c); assert_eq!(tmp1, a); } } #[test] fn test_fq_squaring() { let mut rng = ark_std::test_rng(); for _ in 0..1000000 { // Ensure that (a * a) = a^2 let a = Fq::rand(&mut rng); let mut tmp = a; tmp.square_in_place(); let mut tmp2 = a; tmp2.mul_assign(&a); assert_eq!(tmp, tmp2); } } #[test] fn test_fq_inverse() { assert!(Fq::zero().inverse().is_none()); let mut rng = ark_std::test_rng(); let one = Fq::one(); for _ in 0..1000 { // Ensure that a * a^-1 = 1 let mut a = Fq::rand(&mut rng); let ainv = a.inverse().unwrap(); a.mul_assign(&ainv); assert_eq!(a, one); } } #[test] fn test_fq_double_in_place() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { // Ensure doubling a is equivalent to adding a to itself. let mut a = Fq::rand(&mut rng); let mut b = a; b.add_assign(&a); a.double_in_place(); assert_eq!(a, b); } } #[test] fn test_fq_negate() { { let a = -Fq::zero(); assert!(a.is_zero()); } let mut rng = ark_std::test_rng(); for _ in 0..1000 { // Ensure (a - (-a)) = 0. let mut a = Fq::rand(&mut rng); let b = -a; a.add_assign(&b); assert!(a.is_zero()); } } #[test] fn test_fq_pow() { let mut rng = ark_std::test_rng(); for i in 0..1000 { // Exponentiate by various small numbers and ensure it consists with repeated // multiplication. let a = Fq::rand(&mut rng); let target = a.pow(&[i]); let mut c = Fq::one(); for _ in 0..i { c.mul_assign(&a); } assert_eq!(c, target); } for _ in 0..1000 { // Exponentiating by the modulus should have no effect in a prime field. let a = Fq::rand(&mut rng); assert_eq!(a, a.pow(Fq::characteristic())); } } #[test] fn test_fq_sqrt() { let mut rng = ark_std::test_rng(); assert_eq!(Fq::zero().sqrt().unwrap(), Fq::zero()); for _ in 0..1000 { // Ensure sqrt(a^2) = a or -a let a = Fq::rand(&mut rng); let nega = -a; let mut b = a; b.square_in_place(); let b = b.sqrt().unwrap(); assert!(a == b || nega == b); } for _ in 0..1000 { // Ensure sqrt(a)^2 = a for random a let a = Fq::rand(&mut rng); if let Some(mut tmp) = a.sqrt() { tmp.square_in_place(); assert_eq!(a, tmp); } } } #[test] fn test_fq_num_bits() { assert_eq!(FqParameters::MODULUS_BITS, 254); assert_eq!(FqParameters::CAPACITY, 253); } #[test] fn
() { assert_eq!(FqParameters::TWO_ADICITY, 1); assert_eq!( Fq::multiplicative_generator().pow([ 0x9e10460b6c3e7ea3, 0xcbc0b548b438e546, 0xdc2822db40c0ac2e, 0x183227397098d014, ]), Fq::two_adic_root_of_unity() ); assert_eq!( Fq::two_adic_root_of_unity().pow([1 << FqParameters::TWO_ADICITY]), Fq::one() ); assert!(Fq::multiplicative_generator().sqrt().is_none()); } #[test] fn test_fq_ordering() { // BigInteger256's ordering is well-tested, but we still need to make sure the // Fq elements aren't being compared in Montgomery form. for i in 0..100 { assert!(Fq::from(BigInteger256::from(i + 1)) > Fq::from(BigInteger256::from(i))); } } #[test] fn test_fq_legendre() { use ark_ff::fields::LegendreSymbol::*; assert_eq!(QuadraticResidue, Fq::one().legendre()); assert_eq!(Zero, Fq::zero().legendre()); assert_eq!( QuadraticResidue, Fq::from(BigInteger256::from(4)).legendre() ); assert_eq!( QuadraticNonResidue, Fq::from(BigInteger256::from(5)).legendre() ); } #[test] fn test_fq2_ordering() { let mut a = Fq2::new(Fq::zero(), Fq::zero()); let mut b = a.clone(); assert!(a.cmp(&b) == Ordering::Equal); b.c0.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Less); a.c0.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Equal); b.c1.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Less); a.c0.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Less); a.c1.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Greater); b.c0.add_assign(&Fq::one()); assert!(a.cmp(&b) == Ordering::Equal); } #[test] fn test_fq2_basics() { assert_eq!(Fq2::new(Fq::zero(), Fq::zero(),), Fq2::zero()); assert_eq!(Fq2::new(Fq::one(), Fq::zero(),), Fq2::one()); assert!(Fq2::zero().is_zero()); assert!(!Fq2::one().is_zero()); assert!(!Fq2::new(Fq::zero(), Fq::one(),).is_zero()); } #[test] fn test_fq2_legendre() { use ark_ff::fields::LegendreSymbol::*; assert_eq!(Zero, Fq2::zero().legendre()); // i^2 = -1 let mut m1 = -Fq2::one(); assert_eq!(QuadraticResidue, m1.legendre()); m1 = Fq6Parameters::mul_fp2_by_nonresidue(&m1); assert_eq!(QuadraticNonResidue, m1.legendre()); } #[test] fn test_fq6_mul_by_1() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { let c1 = Fq2::rand(&mut rng); let mut a = Fq6::rand(&mut rng); let mut b = a; a.mul_by_1(&c1); b.mul_assign(&Fq6::new(Fq2::zero(), c1, Fq2::zero())); assert_eq!(a, b); } } #[test] fn test_fq6_mul_by_01() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { let c0 = Fq2::rand(&mut rng); let c1 = Fq2::rand(&mut rng); let mut a = Fq6::rand(&mut rng); let mut b = a; a.mul_by_01(&c0, &c1); b.mul_assign(&Fq6::new(c0, c1, Fq2::zero())); assert_eq!(a, b); } } #[test] fn test_fq12_mul_by_014() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { let c0 = Fq2::rand(&mut rng); let c1 = Fq2::rand(&mut rng); let c5 = Fq2::rand(&mut rng); let mut a = Fq12::rand(&mut rng); let mut b = a; a.mul_by_014(&c0, &c1, &c5); b.mul_assign(&Fq12::new( Fq6::new(c0, c1, Fq2::zero()), Fq6::new(Fq2::zero(), c5, Fq2::zero()), )); assert_eq!(a, b); } } #[test] fn test_fq12_mul_by_034() { let mut rng = ark_std::test_rng(); for _ in 0..1000 { let c0 = Fq2::rand(&mut rng); let c3 = Fq2::rand(&mut rng); let c4 = Fq2::rand(&mut rng); let mut a = Fq12::rand(&mut rng); let mut b = a; a.mul_by_034(&c0, &c3, &c4); b.mul_assign(&Fq12::new( Fq6::new(c0, Fq2::zero(), Fq2::zero()), Fq6::new(c3, c4, Fq2::zero()), )); assert_eq!(a, b); } }
test_fq_root_of_unity
model.py
''' High level of model for training and prediction Created October, 2017 Author: [email protected] ''' import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F import numpy as np import logging import math from collections import defaultdict from torch.optim.lr_scheduler import * from torch.autograd import Variable from my_utils.utils import AverageMeter from .dreader import DNetwork logger = logging.getLogger(__name__) class DocReaderModel(object): def __init__(self, opt, embedding=None, state_dict=None): self.opt = opt self.updates = state_dict['updates'] if state_dict and 'updates' in state_dict else 0 self.eval_embed_transfer = True self.train_loss = AverageMeter() self.network = DNetwork(opt, embedding) if state_dict: new_state = set(self.network.state_dict().keys()) for k in list(state_dict['network'].keys()): if k not in new_state: del state_dict['network'][k] for k, v in list(self.network.state_dict().items()): if k not in state_dict['network']: state_dict['network'][k] = v self.network.load_state_dict(state_dict['network']) parameters = [p for p in self.network.parameters() if p.requires_grad] if opt['optimizer'] == 'sgd': self.optimizer = optim.SGD(parameters, opt['learning_rate'], momentum=opt['momentum'], weight_decay=opt['weight_decay']) elif opt['optimizer'] == 'adamax': self.optimizer = optim.Adamax(parameters, opt['learning_rate'], weight_decay=opt['weight_decay']) elif opt['optimizer'] == 'adam': self.optimizer = optim.Adam(parameters, opt['learning_rate'], weight_decay=opt['weight_decay']) elif opt['optimizer'] == 'adadelta': self.optimizer = optim.Adadelta(parameters, opt['learning_rate'], rho=0.95) else: raise RuntimeError('Unsupported optimizer: %s' % opt['optimizer']) if state_dict and 'optimizer' in state_dict: self.optimizer.load_state_dict(state_dict['optimizer']) if opt['fix_embeddings']: wvec_size = 0 else: wvec_size = (opt['vocab_size'] - opt['tune_partial']) * opt['embedding_dim'] if opt.get('have_lr_scheduler', False): if opt.get('scheduler_type', 'rop') == 'rop': self.scheduler = ReduceLROnPlateau(self.optimizer, mode='max', factor=opt['lr_gamma'], patience=3) elif opt.get('scheduler_type', 'rop') == 'exp': self.scheduler = ExponentioalLR(self.optimizer, gamma=opt.get('lr_gamma', 0.5)) else: milestones = [int(step) for step in opt.get('multi_step_lr', '10,20,30').split(',')] self.scheduler = MultiStepLR(self.optimizer, milestones=milestones, gamma=opt.get('lr_gamma')) else: self.scheduler = None self.total_param = sum([p.nelement() for p in parameters]) - wvec_size def update(self, batch): self.network.train() if self.opt['cuda']: y = Variable(batch['start'].cuda(async=True)), Variable(batch['end'].cuda(async=True)) else: y = Variable(batch['start']), Variable(batch['end']) start, end = self.network(batch) loss = F.cross_entropy(start, y[0]) + F.cross_entropy(end, y[1]) self.train_loss.update(loss.item(), len(start)) self.optimizer.zero_grad() loss.backward() torch.nn.utils.clip_grad_norm_(self.network.parameters(),self.opt['grad_clipping']) self.optimizer.step() self.updates += 1 self.reset_embeddings() self.eval_embed_transfer = True def predict(self, batch, top_k=1): self.network.eval() self.network.drop_emb = False start, end = self.network(batch) start = F.softmax(start) end = F.softmax(end) start = start.data.cpu() end = end.data.cpu() text = batch['text'] spans = batch['span'] predictions = [] best_scores = [] max_len = self.opt['max_len'] or start.size(1) doc_len = start.size(1) pos_enc = self.position_encoding(doc_len, max_len) for i in range(start.size(0)): scores = torch.ger(start[i], end[i]) scores = scores * pos_enc scores.triu_() scores = scores.numpy() best_idx = np.argpartition(scores, -top_k, axis=None)[-top_k] best_score = np.partition(scores, -top_k, axis=None)[-top_k] s_idx, e_idx = np.unravel_index(best_idx, scores.shape) s_offset, e_offset = spans[i][s_idx][0], spans[i][e_idx][1] predictions.append(text[i][s_offset:e_offset]) best_scores.append(best_score) return (predictions, best_scores) def setup_eval_embed(self, eval_embed, padding_idx = 0): self.network.lexicon_encoder.eval_embed = nn.Embedding(eval_embed.size(0), eval_embed.size(1), padding_idx = padding_idx) self.network.lexicon_encoder.eval_embed.weight.data = eval_embed for p in self.network.lexicon_encoder.eval_embed.parameters(): p.requires_grad = False self.eval_embed_transfer = True if self.opt['covec_on']: self.network.lexicon_encoder.ContextualEmbed.setup_eval_embed(eval_embed)
offset = self.opt['tune_partial'] self.network.lexicon_encoder.eval_embed.weight.data[0:offset] \ = self.network.lexicon_encoder.embedding.weight.data[0:offset] def reset_embeddings(self): if self.opt['tune_partial'] > 0: offset = self.opt['tune_partial'] if offset < self.network.lexicon_encoder.embedding.weight.data.size(0): self.network.lexicon_encoder.embedding.weight.data[offset:] \ = self.network.lexicon_encoder.fixed_embedding def save(self, filename, epoch): # strip cove network_state = dict([(k, v) for k, v in self.network.state_dict().items() if k[0:4] != 'CoVe']) if 'eval_embed.weight' in network_state: del network_state['eval_embed.weight'] if 'fixed_embedding' in network_state: del network_state['fixed_embedding'] params = { 'state_dict': {'network': network_state}, 'config': self.opt, } torch.save(params, filename) logger.info('model saved to {}'.format(filename)) def cuda(self): self.network.cuda() def position_encoding(self, m, threshold=4): encoding = np.ones((m, m), dtype=np.float32) for i in range(m): for j in range(i, m): if j - i > threshold: encoding[i][j] = float(1.0 / math.log(j - i + 1)) return torch.from_numpy(encoding)
def update_eval_embed(self): if self.opt['tune_partial'] > 0:
arc.rs
//! Implement a Fallible Arc use super::FallibleBox; use super::TryClone; use crate::TryReserveError; use alloc::boxed::Box; use alloc::sync::Arc; /// trait to implement Fallible Arc pub trait FallibleArc<T> { /// try creating a new Arc, returning a Result<Box<T>, /// TryReserveError> if allocation failed fn try_new(t: T) -> Result<Self, TryReserveError> where Self: Sized; } impl<T> FallibleArc<T> for Arc<T> { fn try_new(t: T) -> Result<Self, TryReserveError> { // doesn't work as the inner variable of arc are also stocked in the box let b = Box::try_new(t)?; Ok(Arc::from(b)) } } /// Just a TryClone boilerplate for Arc impl<T: ?Sized> TryClone for Arc<T> { fn try_clone(&self) -> Result<Self, TryReserveError> { Ok(self.clone()) } } #[cfg(test)] mod test { #[test] fn
() { use std::sync::Arc; let mut x = Arc::new(3); *Arc::get_mut(&mut x).unwrap() = 4; assert_eq!(*x, 4); let _y = Arc::clone(&x); assert!(Arc::get_mut(&mut x).is_none()); } }
fallible_rc
__init__.py
from adlmagics.adlmagics_main import AdlMagics def load_ipython_extension(ipython):
ipython.register_magics(AdlMagics)
build.rs
// Storage daemon (stored): microservice frontend for different storage backends // used in LNP/BP nodes. // // Written in 2022 by // Dr. Maxim Orlovsky <[email protected]> // // Copyright (C) 2022 by LNP/BP Standards Association, Switzerland. // // You should have received a copy of the MIT License along with this software. // If not, see <https://opensource.org/licenses/MIT>. use std::fs; use clap::IntoApp; use clap_complete::generate_to; use clap_complete::shells::*; pub mod opts { include!("src/opts.rs"); } pub mod bpd { pub use super::opts; include!("src/bpd/opts.rs"); } fn main() -> Result<(), configure_me_codegen::Error>
{ let outdir = "./shell"; fs::create_dir_all(outdir).expect("failed to create shell dir"); for app in [bpd::Opts::command()].iter_mut() { let name = app.get_name().to_string(); generate_to(Bash, app, &name, &outdir)?; generate_to(PowerShell, app, &name, &outdir)?; generate_to(Zsh, app, &name, &outdir)?; } // configure_me_codegen::build_script_auto() Ok(()) }
types.ts
// src/redux/modules/editorWindow/processEditor/types.ts export const SET_PROCESS_LIST = 'editorWindow/processEditor/SET_PROCESS_LIST'; export const SELECT_PROCESS = 'editorWindow/processEditor/SELECT_PROCESS'; // export const PROCESS_EDITOR_DIALOG_OPEN = 'editorWindow/processEditor/PROCESS_EDITOR_DIALOG_OPEN'; export const PROCESS_EDITOR_DIALOG_CLOSE = 'editorWindow/processEditor/PROCESS_EDITOR_DIALOG_CLOSE'; //
export const STEP_EDIT_DIALOG_OPEN = 'editorWindow/processEditor/STEP_EDIT_DIALOG_OPEN'; export const STEP_EDIT_DIALOG_CLOSE = 'editorWindow/processEditor/STEP_EDIT_DIALOG_CLOSE'; // export const UPDATE_EDITOR_DATA = 'editorWindow/processEditor/UPDATE_EDITOR_DATA'; // export const EDIT_PROCESS = 'editorWindow/processEditor/EDIT_PROCESS'; export const EXIT_EDITING = 'editorWindow/processEditor/EXIT_EDITING'; //
immutable.rs
// Copyright (c) 2016 The vulkano developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or // https://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or https://opensource.org/licenses/MIT>, // at your option. All files in the project carrying such // notice may not be copied, modified, or distributed except // according to those terms. //! Buffer that is written once then read for as long as it is alive. //! //! Use this buffer when you have data that you never modify. //! //! Only the first ever command buffer that uses this buffer can write to it (for example by //! copying from another buffer). Any subsequent command buffer **must** only read from the buffer, //! or a panic will happen. //! //! The buffer will be stored in device-local memory if possible //! use super::{ sys::UnsafeBuffer, BufferAccess, BufferAccessObject, BufferContents, BufferInner, BufferUsage, CpuAccessibleBuffer, }; use crate::{ buffer::{sys::UnsafeBufferCreateInfo, BufferCreationError, TypedBufferAccess}, command_buffer::{ AutoCommandBufferBuilder, CommandBufferExecFuture, CommandBufferUsage, PrimaryAutoCommandBuffer, PrimaryCommandBuffer, }, device::{physical::QueueFamily, Device, DeviceOwned, Queue}, memory::{ pool::{ AllocFromRequirementsFilter, AllocLayout, MappingRequirement, MemoryPoolAlloc, PotentialDedicatedAllocation, StdMemoryPoolAlloc, }, DedicatedAllocation, DeviceMemoryAllocationError, MemoryPool, }, sync::{NowFuture, Sharing}, DeviceSize, }; use smallvec::SmallVec; use std::{ hash::{Hash, Hasher}, marker::PhantomData, mem::size_of, sync::Arc, }; /// Buffer that is written once then read for as long as it is alive. #[derive(Debug)] pub struct ImmutableBuffer<T, A = PotentialDedicatedAllocation<StdMemoryPoolAlloc>> where T: BufferContents + ?Sized, { // Inner content. inner: Arc<UnsafeBuffer>, // Memory allocated for the buffer. memory: A, // Queue families allowed to access this buffer. queue_families: SmallVec<[u32; 4]>, // Necessary to have the appropriate template parameter. marker: PhantomData<Box<T>>, } // TODO: make this prettier type ImmutableBufferFromBufferFuture = CommandBufferExecFuture<NowFuture, PrimaryAutoCommandBuffer>; impl<T> ImmutableBuffer<T> where T: BufferContents + ?Sized, { /// Builds an `ImmutableBuffer` that copies its data from another buffer. /// /// This function returns two objects: the newly-created buffer, and a future representing /// the initial upload operation. In order to be allowed to use the `ImmutableBuffer`, you must /// either submit your operation after this future, or execute this future and wait for it to /// be finished before submitting your own operation. pub fn from_buffer<B>( source: Arc<B>, usage: BufferUsage, queue: Arc<Queue>, ) -> Result< (Arc<ImmutableBuffer<T>>, ImmutableBufferFromBufferFuture), DeviceMemoryAllocationError, > where B: TypedBufferAccess<Content = T> + 'static, { unsafe { // We automatically set `transfer_destination` to true in order to avoid annoying errors. let actual_usage = BufferUsage { transfer_destination: true, ..usage }; let (buffer, init) = ImmutableBuffer::raw( source.device().clone(), source.size(), actual_usage, source.device().active_queue_families(), )?; let mut cbb = AutoCommandBufferBuilder::primary( source.device().clone(), queue.family(), CommandBufferUsage::MultipleSubmit, )?; cbb.copy_buffer(source, init).unwrap(); // TODO: return error? let cb = cbb.build().unwrap(); // TODO: return OomError let future = match cb.execute(queue) { Ok(f) => f, Err(_) => unreachable!(), }; Ok((buffer, future)) } } } impl<T> ImmutableBuffer<T> where T: BufferContents, { /// Builds an `ImmutableBuffer` from some data. /// /// This function builds a memory-mapped intermediate buffer, writes the data to it, builds a /// command buffer that copies from this intermediate buffer to the final buffer, and finally /// submits the command buffer as a future. /// /// This function returns two objects: the newly-created buffer, and a future representing /// the initial upload operation. In order to be allowed to use the `ImmutableBuffer`, you must /// either submit your operation after this future, or execute this future and wait for it to /// be finished before submitting your own operation. /// /// # Panics /// /// - Panics if `T` has zero size. pub fn from_data( data: T, usage: BufferUsage, queue: Arc<Queue>, ) -> Result< (Arc<ImmutableBuffer<T>>, ImmutableBufferFromBufferFuture), DeviceMemoryAllocationError, > { let source = CpuAccessibleBuffer::from_data( queue.device().clone(), BufferUsage::transfer_source(), false, data, )?; ImmutableBuffer::from_buffer(source, usage, queue) } /// Builds a new buffer with uninitialized data. Only allowed for sized data. /// /// Returns two things: the buffer, and a special access that should be used for the initial /// upload to the buffer. /// /// You will get an error if you try to use the buffer before using the initial upload access. /// However this function doesn't check whether you actually used this initial upload to fill /// the buffer like you're supposed to do. /// /// You will also get an error if you try to get exclusive access to the final buffer. /// /// # Safety /// /// - The `ImmutableBufferInitialization` should be used to fill the buffer with some initial /// data, otherwise the content is undefined. /// /// # Panics /// /// - Panics if `T` has zero size. #[inline] pub unsafe fn uninitialized( device: Arc<Device>, usage: BufferUsage, ) -> Result< ( Arc<ImmutableBuffer<T>>,
> { ImmutableBuffer::raw( device.clone(), size_of::<T>() as DeviceSize, usage, device.active_queue_families(), ) } } impl<T> ImmutableBuffer<[T]> where [T]: BufferContents, { /// # Panics /// /// - Panics if `T` has zero size. /// - Panics if `data` is empty. pub fn from_iter<D>( data: D, usage: BufferUsage, queue: Arc<Queue>, ) -> Result< (Arc<ImmutableBuffer<[T]>>, ImmutableBufferFromBufferFuture), DeviceMemoryAllocationError, > where D: IntoIterator<Item = T>, D::IntoIter: ExactSizeIterator, { let source = CpuAccessibleBuffer::from_iter( queue.device().clone(), BufferUsage::transfer_source(), false, data, )?; ImmutableBuffer::from_buffer(source, usage, queue) } /// Builds a new buffer with uninitialized data. Can be used for arrays. /// /// Returns two things: the buffer, and a special access that should be used for the initial /// upload to the buffer. /// /// You will get an error if you try to use the buffer before using the initial upload access. /// However this function doesn't check whether you actually used this initial upload to fill /// the buffer like you're supposed to do. /// /// You will also get an error if you try to get exclusive access to the final buffer. /// /// # Safety /// /// - The `ImmutableBufferInitialization` should be used to fill the buffer with some initial /// data, otherwise the content is undefined. /// /// # Panics /// /// - Panics if `T` has zero size. /// - Panics if `len` is zero. #[inline] pub unsafe fn uninitialized_array( device: Arc<Device>, len: DeviceSize, usage: BufferUsage, ) -> Result< ( Arc<ImmutableBuffer<[T]>>, Arc<ImmutableBufferInitialization<[T]>>, ), DeviceMemoryAllocationError, > { ImmutableBuffer::raw( device.clone(), len * size_of::<T>() as DeviceSize, usage, device.active_queue_families(), ) } } impl<T> ImmutableBuffer<T> where T: BufferContents + ?Sized, { /// Builds a new buffer without checking the size and granting free access for the initial /// upload. /// /// Returns two things: the buffer, and a special access that should be used for the initial /// upload to the buffer. /// You will get an error if you try to use the buffer before using the initial upload access. /// However this function doesn't check whether you used this initial upload to fill the buffer. /// You will also get an error if you try to get exclusive access to the final buffer. /// /// # Safety /// /// - You must ensure that the size that you pass is correct for `T`. /// - The `ImmutableBufferInitialization` should be used to fill the buffer with some initial /// data. /// /// # Panics /// /// - Panics if `size` is zero. #[inline] pub unsafe fn raw<'a, I>( device: Arc<Device>, size: DeviceSize, usage: BufferUsage, queue_families: I, ) -> Result< ( Arc<ImmutableBuffer<T>>, Arc<ImmutableBufferInitialization<T>>, ), DeviceMemoryAllocationError, > where I: IntoIterator<Item = QueueFamily<'a>>, { let queue_families = queue_families.into_iter().map(|f| f.id()).collect(); ImmutableBuffer::raw_impl(device, size, usage, queue_families) } // Internal implementation of `raw`. This is separated from `raw` so that it doesn't need to be // inlined. unsafe fn raw_impl( device: Arc<Device>, size: DeviceSize, usage: BufferUsage, queue_families: SmallVec<[u32; 4]>, ) -> Result< ( Arc<ImmutableBuffer<T>>, Arc<ImmutableBufferInitialization<T>>, ), DeviceMemoryAllocationError, > { let buffer = match UnsafeBuffer::new( device.clone(), UnsafeBufferCreateInfo { sharing: if queue_families.len() >= 2 { Sharing::Concurrent(queue_families.clone()) } else { Sharing::Exclusive }, size, usage, ..Default::default() }, ) { Ok(b) => b, Err(BufferCreationError::AllocError(err)) => return Err(err), Err(_) => unreachable!(), // We don't use sparse binding, therefore the other // errors can't happen }; let mem_reqs = buffer.memory_requirements(); let mem = MemoryPool::alloc_from_requirements( &Device::standard_pool(&device), &mem_reqs, AllocLayout::Linear, MappingRequirement::DoNotMap, Some(DedicatedAllocation::Buffer(&buffer)), |t| { if t.is_device_local() { AllocFromRequirementsFilter::Preferred } else { AllocFromRequirementsFilter::Allowed } }, )?; debug_assert!((mem.offset() % mem_reqs.alignment) == 0); buffer.bind_memory(mem.memory(), mem.offset())?; let final_buf = Arc::new(ImmutableBuffer { inner: buffer, memory: mem, queue_families: queue_families, marker: PhantomData, }); let initialization = Arc::new(ImmutableBufferInitialization { buffer: final_buf.clone(), }); Ok((final_buf, initialization)) } } impl<T, A> ImmutableBuffer<T, A> where T: BufferContents + ?Sized, { /// Returns the device used to create this buffer. #[inline] pub fn device(&self) -> &Arc<Device> { self.inner.device() } /// Returns the queue families this buffer can be used on. // TODO: use a custom iterator #[inline] pub fn queue_families(&self) -> Vec<QueueFamily> { self.queue_families .iter() .map(|&num| { self.device() .physical_device() .queue_family_by_id(num) .unwrap() }) .collect() } } unsafe impl<T, A> BufferAccess for ImmutableBuffer<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { #[inline] fn inner(&self) -> BufferInner { BufferInner { buffer: &self.inner, offset: 0, } } #[inline] fn size(&self) -> DeviceSize { self.inner.size() } #[inline] fn conflict_key(&self) -> (u64, u64) { (self.inner.key(), 0) } } impl<T, A> BufferAccessObject for Arc<ImmutableBuffer<T, A>> where T: BufferContents + ?Sized, A: Send + Sync + 'static, { #[inline] fn as_buffer_access_object(&self) -> Arc<dyn BufferAccess> { self.clone() } } unsafe impl<T, A> TypedBufferAccess for ImmutableBuffer<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { type Content = T; } unsafe impl<T, A> DeviceOwned for ImmutableBuffer<T, A> where T: BufferContents + ?Sized, { #[inline] fn device(&self) -> &Arc<Device> { self.inner.device() } } impl<T, A> PartialEq for ImmutableBuffer<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { #[inline] fn eq(&self, other: &Self) -> bool { self.inner() == other.inner() && self.size() == other.size() } } impl<T, A> Eq for ImmutableBuffer<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { } impl<T, A> Hash for ImmutableBuffer<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { #[inline] fn hash<H: Hasher>(&self, state: &mut H) { self.inner().hash(state); self.size().hash(state); } } /// Access to the immutable buffer that can be used for the initial upload. #[derive(Debug)] pub struct ImmutableBufferInitialization<T, A = PotentialDedicatedAllocation<StdMemoryPoolAlloc>> where T: BufferContents + ?Sized, { buffer: Arc<ImmutableBuffer<T, A>>, } unsafe impl<T, A> BufferAccess for ImmutableBufferInitialization<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { #[inline] fn inner(&self) -> BufferInner { self.buffer.inner() } #[inline] fn size(&self) -> DeviceSize { self.buffer.size() } #[inline] fn conflict_key(&self) -> (u64, u64) { (self.buffer.inner.key(), 0) } } impl<T, A> BufferAccessObject for Arc<ImmutableBufferInitialization<T, A>> where T: BufferContents + ?Sized, A: Send + Sync + 'static, { #[inline] fn as_buffer_access_object(&self) -> Arc<dyn BufferAccess> { self.clone() } } unsafe impl<T, A> TypedBufferAccess for ImmutableBufferInitialization<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { type Content = T; } unsafe impl<T, A> DeviceOwned for ImmutableBufferInitialization<T, A> where T: BufferContents + ?Sized, { #[inline] fn device(&self) -> &Arc<Device> { self.buffer.inner.device() } } impl<T, A> Clone for ImmutableBufferInitialization<T, A> where T: BufferContents + ?Sized, { #[inline] fn clone(&self) -> ImmutableBufferInitialization<T, A> { ImmutableBufferInitialization { buffer: self.buffer.clone(), } } } impl<T, A> PartialEq for ImmutableBufferInitialization<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { #[inline] fn eq(&self, other: &Self) -> bool { self.inner() == other.inner() && self.size() == other.size() } } impl<T, A> Eq for ImmutableBufferInitialization<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { } impl<T, A> Hash for ImmutableBufferInitialization<T, A> where T: BufferContents + ?Sized, A: Send + Sync, { #[inline] fn hash<H: Hasher>(&self, state: &mut H) { self.inner().hash(state); self.size().hash(state); } } #[cfg(test)] mod tests { use crate::buffer::cpu_access::CpuAccessibleBuffer; use crate::buffer::immutable::ImmutableBuffer; use crate::buffer::BufferUsage; use crate::command_buffer::AutoCommandBufferBuilder; use crate::command_buffer::CommandBufferUsage; use crate::command_buffer::PrimaryCommandBuffer; use crate::sync::GpuFuture; #[test] fn from_data_working() { let (device, queue) = gfx_dev_and_queue!(); let (buffer, _) = ImmutableBuffer::from_data(12u32, BufferUsage::all(), queue.clone()).unwrap(); let destination = CpuAccessibleBuffer::from_data(device.clone(), BufferUsage::all(), false, 0).unwrap(); let mut cbb = AutoCommandBufferBuilder::primary( device.clone(), queue.family(), CommandBufferUsage::MultipleSubmit, ) .unwrap(); cbb.copy_buffer(buffer, destination.clone()).unwrap(); let _ = cbb .build() .unwrap() .execute(queue.clone()) .unwrap() .then_signal_fence_and_flush() .unwrap(); let destination_content = destination.read().unwrap(); assert_eq!(*destination_content, 12); } #[test] fn from_iter_working() { let (device, queue) = gfx_dev_and_queue!(); let (buffer, _) = ImmutableBuffer::from_iter( (0..512u32).map(|n| n * 2), BufferUsage::all(), queue.clone(), ) .unwrap(); let destination = CpuAccessibleBuffer::from_iter( device.clone(), BufferUsage::all(), false, (0..512).map(|_| 0u32), ) .unwrap(); let mut cbb = AutoCommandBufferBuilder::primary( device.clone(), queue.family(), CommandBufferUsage::MultipleSubmit, ) .unwrap(); cbb.copy_buffer(buffer, destination.clone()).unwrap(); let _ = cbb .build() .unwrap() .execute(queue.clone()) .unwrap() .then_signal_fence_and_flush() .unwrap(); let destination_content = destination.read().unwrap(); for (n, &v) in destination_content.iter().enumerate() { assert_eq!(n * 2, v as usize); } } #[test] fn init_then_read_same_cb() { let (device, queue) = gfx_dev_and_queue!(); let (buffer, init) = unsafe { ImmutableBuffer::<u32>::uninitialized(device.clone(), BufferUsage::all()).unwrap() }; let source = CpuAccessibleBuffer::from_data(device.clone(), BufferUsage::all(), false, 0).unwrap(); let mut cbb = AutoCommandBufferBuilder::primary( device.clone(), queue.family(), CommandBufferUsage::MultipleSubmit, ) .unwrap(); cbb.copy_buffer(source.clone(), init) .unwrap() .copy_buffer(buffer, source.clone()) .unwrap(); let _ = cbb .build() .unwrap() .execute(queue.clone()) .unwrap() .then_signal_fence_and_flush() .unwrap(); } #[test] #[ignore] // TODO: doesn't work because the submit sync layer isn't properly implemented fn init_then_read_same_future() { let (device, queue) = gfx_dev_and_queue!(); let (buffer, init) = unsafe { ImmutableBuffer::<u32>::uninitialized(device.clone(), BufferUsage::all()).unwrap() }; let source = CpuAccessibleBuffer::from_data(device.clone(), BufferUsage::all(), false, 0).unwrap(); let mut cbb = AutoCommandBufferBuilder::primary( device.clone(), queue.family(), CommandBufferUsage::MultipleSubmit, ) .unwrap(); cbb.copy_buffer(source.clone(), init).unwrap(); let cb1 = cbb.build().unwrap(); let mut cbb = AutoCommandBufferBuilder::primary( device.clone(), queue.family(), CommandBufferUsage::MultipleSubmit, ) .unwrap(); cbb.copy_buffer(buffer, source.clone()).unwrap(); let cb2 = cbb.build().unwrap(); let _ = cb1 .execute(queue.clone()) .unwrap() .then_execute(queue.clone(), cb2) .unwrap() .then_signal_fence_and_flush() .unwrap(); } #[test] #[allow(unused)] fn create_buffer_zero_size_data() { let (device, queue) = gfx_dev_and_queue!(); assert_should_panic!({ ImmutableBuffer::from_data((), BufferUsage::all(), queue.clone()).unwrap(); }); } // TODO: write tons of tests that try to exploit loopholes // this isn't possible yet because checks aren't correctly implemented yet }
Arc<ImmutableBufferInitialization<T>>, ), DeviceMemoryAllocationError,
test_data.rs
//! Data to be used in tests as the input for `ETHSender`. use std::time::SystemTime; // External uses use chrono::DateTime; use lazy_static::lazy_static; // Workspace uses use zksync_types::{ block::Block, Address, ExecutedOperations, ExecutedPriorityOp, Fr, FullExit, FullExitOp, PriorityOp, ZkSyncOp, ZkSyncPriorityOp, }; use zksync_types::{Action, Operation}; /// Creates a dummy operation as a test input for `ETHSender` tests. fn
(id: i64, block_number: u32, action: Action) -> Operation { // Create full exit operation for non-zero return data. let executed_full_exit_op = { let priority_op = FullExit { account_id: 0, eth_address: Address::zero(), token: 0, }; ExecutedOperations::PriorityOp(Box::new(ExecutedPriorityOp { priority_op: PriorityOp { serial_id: 0, data: ZkSyncPriorityOp::FullExit(priority_op.clone()), deadline_block: 0, eth_hash: Vec::new(), eth_block: 0, }, op: ZkSyncOp::FullExit(Box::new(FullExitOp { priority_op, withdraw_amount: None, })), block_index: 0, created_at: DateTime::from(SystemTime::UNIX_EPOCH), })) }; Operation { id: Some(id), action, block: Block::new( block_number, Fr::default(), 0, vec![executed_full_exit_op], (0, 0), 50, 1_000_000.into(), 1_500_000.into(), ), accounts_updated: Vec::new(), } } lazy_static! { pub static ref COMMIT_OPERATIONS: Vec<Operation> = (1..10) .map(|id| get_operation(id, id as u32, Action::Commit)) .collect(); pub static ref VERIFY_OPERATIONS: Vec<Operation> = (11..20) .map(|id| get_operation( id, (id - 10) as u32, Action::Verify { proof: Default::default() } )) .collect(); } pub fn commit_operation(idx: usize) -> Operation { assert!( idx < COMMIT_OPERATIONS.len(), format!("Index {} is out of bounds for commit operations", idx) ); COMMIT_OPERATIONS[idx].clone() } pub fn verify_operation(idx: usize) -> Operation { assert!( idx < VERIFY_OPERATIONS.len(), format!("Index {} is out of bounds for verify operations", idx) ); VERIFY_OPERATIONS[idx].clone() }
get_operation
test_audit_log.py
from .fixtures import * from tenable.errors import * def test_event_field_name_typeerror(api): with pytest.raises(TypeError): api.audit_log.events((1, 'gt', '2018-01-01')) def
(api): with pytest.raises(TypeError): api.audit_log.events(('date', 1, '2018-01-01')) def test_event_filter_value_typeerror(api): with pytest.raises(TypeError): api.audit_log.events(('date', 'gt', 1)) def test_event_limit_typeerror(api): with pytest.raises(TypeError): api.audit_log.events(limit='nope') def test_events_standard_user_permissionerror(stdapi): with pytest.raises(PermissionError): stdapi.audit_log.events() def test_events(api): events = api.audit_log.events(('date', 'gt', '2018-01-01'), limit=100) assert isinstance(events, list) e = events[-1] check(e, 'action', str) check(e, 'actor', dict) check(e['actor'], 'id', 'uuid') check(e['actor'], 'name', str, allow_none=True) check(e, 'crud', str) check(e, 'description', str, allow_none=True) check(e, 'fields', list) for d in e['fields']: check(d, 'key', str) check(d, 'value', str) check(e, 'id', str) check(e, 'is_anonymous', bool, allow_none=True) check(e, 'is_failure', bool, allow_none=True) check(e, 'received', 'datetime') check(e, 'target', dict) check(e['target'], 'id', 'uuid') check(e['target'], 'name', str) check(e['target'], 'type', str)
test_event_filter_operator_typeerror
main.rs
use hound::WavReader; use rustfft::{FftPlanner, num_complex::Complex}; use ndarray::{Array, Axis}; use plotters::prelude::*; use ndarray_stats::QuantileExt; const WINDOW_SIZE: usize = 1024; const OVERLAP: f64 = 0.9; const SKIP_SIZE: usize = (WINDOW_SIZE as f64 * (1f64 - OVERLAP)) as usize; fn main()
{ let mut wav = WavReader::open("example.wav").unwrap(); let samples = wav .samples() .collect::<Result<Vec<i16>, _>>() .unwrap(); println!("Creating windows {window_size} samples long from a timeline {num_samples} samples long, picking every {skip_size} windows with a {overlap} overlap for a total of {num_windows} windows.", window_size = WINDOW_SIZE, num_samples = samples.len(), skip_size = SKIP_SIZE, overlap = OVERLAP, num_windows = (samples.len() / SKIP_SIZE) - 1, ); // Convert to an ndarray // Hopefully this will keep me from messing up the dimensions // Mutable because the FFT takes mutable slices &[Complex<f32>] // let window_array = Array2::from_shape_vec((WINDOW_SIZE, windows_vec.len()), windows_vec).unwrap(); let samples_array = Array::from(samples.clone()); let windows = samples_array .windows(ndarray::Dim(WINDOW_SIZE)) .into_iter() .step_by(SKIP_SIZE) .collect::<Vec<_>>() ; let windows = ndarray::stack(Axis(0), &windows).unwrap(); // So to perform the FFT on each window we need a Complex<f32>, and right now we have i16s, so first let's convert let mut windows = windows.map(|i| Complex::from(*i as f32)); // get the FFT up and running let mut planner = FftPlanner::new(); let fft = planner.plan_fft_forward(WINDOW_SIZE); // Since we have a 2-D array of our windows with shape [WINDOW_SIZE, (num_samples / WINDOW_SIZE) - 1], we can run an FFT on every row. // Next step is to do something multithreaded with Rayon, but we're not cool enough for that yet. windows.axis_iter_mut(Axis(0)) .for_each(|mut frame| { fft.process(frame.as_slice_mut().unwrap()); }); // Get the real component of those complex numbers we get back from the FFT let windows = windows.map(|i| i.re); // And finally, only look at the first half of the spectrogram - the first (n/2)+1 points of each FFT // https://dsp.stackexchange.com/questions/4825/why-is-the-fft-mirrored let windows = windows.slice_move(ndarray::s![.., ..((WINDOW_SIZE / 2) + 1)]); // get some dimensions for drawing // The shape is in [nrows, ncols], but we want to transpose this. let (width, height) = match windows.shape() { &[first, second] => (first, second), _ => panic!("Windows is a {}D array, expected a 2D array", windows.ndim()) }; println!("Generating a {} wide x {} high image", width, height); let image_dimensions: (u32, u32) = (width as u32, height as u32); let root_drawing_area = BitMapBackend::new( "output.png", image_dimensions, // width x height. Worth it if we ever want to resize the graph. ).into_drawing_area(); let spectrogram_cells = root_drawing_area.split_evenly((height, width)); let windows_scaled = windows.map(|i| i.abs()/(WINDOW_SIZE as f32)); let highest_spectral_density = windows_scaled.max_skipnan(); // transpose and flip around to prepare for graphing /* the array is currently oriented like this: t = 0 | | | | | t = n +------------------- f = 0 f = m so it needs to be flipped... t = 0 | | | | | t = n +------------------- f = m f = 0 ...and transposed... f = m | | | | | f = 0 +------------------- t = 0 t = n ... in order to look like a proper spectrogram */ let windows_flipped = windows_scaled.slice(ndarray::s![.., ..; -1]); // flips the let windows_flipped = windows_flipped.t(); // Finally add a color scale let color_scale = colorous::MAGMA; for (cell, spectral_density) in spectrogram_cells.iter().zip(windows_flipped.iter()) { let spectral_density_scaled = spectral_density.sqrt() / highest_spectral_density.sqrt(); let color = color_scale.eval_continuous(spectral_density_scaled as f64); cell.fill(&RGBColor(color.r, color.g, color.b)).unwrap(); }; }
archive.rs
use byteorder::{LittleEndian, WriteBytesExt}; use crc32; use std::convert; use std::io; use std::io::prelude::*; const LOCAL_FILE_HEADER_SIGNATURE: u32 = 0x04034b50; const CENTRAL_FILE_HEADER_SIGNATURE: u32 = 0x02014b50; const END_OF_CENTRAL_DIR_SIGNATURE: u32 = 0x06054b50; pub type ZipResult<T> = Result<T, ZipError>; #[derive(Debug)] pub enum ZipError { Io(io::Error), } impl convert::From<io::Error> for ZipError { fn from(err: io::Error) -> ZipError { ZipError::Io(err) } } struct ZipFileData { system: u16,
crc32: u32, compressed_size: u64, uncompressed_size: u64, file_name: String, header_start: u64, external_attributes: u32, } pub struct ZipWriter<W: Write + io::Seek> { writee: W, files: Vec<ZipFileData>, } impl<W: Write + io::Seek> Drop for ZipWriter<W> { fn drop(&mut self) -> () { match self.write_end_of_central_directory_record() { Ok(_) => {} Err(e) => { println!("Error: {:?}", e); } } } } impl<W: Write + io::Seek> ZipWriter<W> { pub fn new(writee: W) -> ZipWriter<W> { ZipWriter { writee: writee, files: Vec::new(), } } pub fn add_directory<S>(&mut self, name: S) -> ZipResult<()> where S: Into<String>, { let header_start = try!(self.writee.seek(io::SeekFrom::Current(0))); let permissions = 0o40775; let file = ZipFileData { // 3: Unix system: 3, version_made_by: 20, crc32: 0, compressed_size: 0, uncompressed_size: 0, file_name: name.into(), header_start: header_start, external_attributes: permissions << 16, }; try!(self.write_local_file_header(&file)); self.files.push(file); Ok(()) } pub fn add_file<S>(&mut self, name: S, data: &[u8]) -> ZipResult<()> where S: Into<String>, { let header_start = try!(self.writee.seek(io::SeekFrom::Current(0))); let permissions = 0o100664; let mut file = ZipFileData { // 3: Unix system: 3, version_made_by: 20, crc32: 0, compressed_size: 0, uncompressed_size: 0, file_name: name.into(), header_start: header_start, external_attributes: permissions << 16, }; try!(self.write_local_file_header(&file)); let file_start = try!(self.writee.seek(io::SeekFrom::Current(0))); try!(self.writee.write_all(data)); let file_end = try!(self.writee.seek(io::SeekFrom::Current(0))); file.crc32 = crc32::calc(&data); file.compressed_size = file_end - file_start; file.uncompressed_size = file.compressed_size; const CRC32_OFFSET: u64 = 14; try!( self.writee .seek(io::SeekFrom::Start(file.header_start + CRC32_OFFSET)) ); try!(self.writee.write_u32::<LittleEndian>(file.crc32)); try!( self.writee .write_u32::<LittleEndian>(file.compressed_size as u32) ); try!( self.writee .write_u32::<LittleEndian>(file.uncompressed_size as u32) ); try!(self.writee.seek(io::SeekFrom::Start(file_end))); self.files.push(file); Ok(()) } fn write_local_file_header(&mut self, file: &ZipFileData) -> ZipResult<()> { // local file header signature try!( self.writee .write_u32::<LittleEndian>(LOCAL_FILE_HEADER_SIGNATURE) ); // version needed to extract let version_made_by = file.system << 8 | (file.version_made_by as u16); try!(self.writee.write_u16::<LittleEndian>(version_made_by)); // general purpose bit flag let flag = if !file.file_name.is_ascii() { 1u16 << 11 } else { 0 }; try!(self.writee.write_u16::<LittleEndian>(flag)); // compression method // 0: stored try!(self.writee.write_u16::<LittleEndian>(0)); // TODO // last mod file time and last mod file date try!(self.writee.write_u16::<LittleEndian>(0)); try!(self.writee.write_u16::<LittleEndian>(0)); // crc-32 try!(self.writee.write_u32::<LittleEndian>(file.crc32)); // compressed size try!( self.writee .write_u32::<LittleEndian>(file.compressed_size as u32) ); // uncompressed size try!( self.writee .write_u32::<LittleEndian>(file.uncompressed_size as u32) ); // file name length try!( self.writee .write_u16::<LittleEndian>(file.file_name.as_bytes().len() as u16) ); // extra field length try!(self.writee.write_u16::<LittleEndian>(0)); // file name try!(self.writee.write_all(file.file_name.as_bytes())); // extra field // <none> Ok(()) } fn write_central_directory_headers(&mut self) -> ZipResult<()> { for file in self.files.iter() { // central file header signature try!( self.writee .write_u32::<LittleEndian>(CENTRAL_FILE_HEADER_SIGNATURE) ); // version made by let version_made_by = file.system << 8 | (file.version_made_by as u16); try!(self.writee.write_u16::<LittleEndian>(version_made_by)); // version needed to extract try!(self.writee.write_u16::<LittleEndian>(20)); // general puprose bit flag let flag = if !file.file_name.is_ascii() { 1u16 << 11 } else { 0 }; try!(self.writee.write_u16::<LittleEndian>(flag)); // compression method // 0: stored try!(self.writee.write_u16::<LittleEndian>(0)); // TODO // last mod file time and last mod file date try!(self.writee.write_u16::<LittleEndian>(0)); try!(self.writee.write_u16::<LittleEndian>(0)); // crc-32 try!(self.writee.write_u32::<LittleEndian>(file.crc32)); // compressed size try!( self.writee .write_u32::<LittleEndian>(file.compressed_size as u32) ); // uncompressed size try!( self.writee .write_u32::<LittleEndian>(file.uncompressed_size as u32) ); // file name length try!( self.writee .write_u16::<LittleEndian>(file.file_name.as_bytes().len() as u16) ); // extra field length try!(self.writee.write_u16::<LittleEndian>(0)); // file comment length try!(self.writee.write_u16::<LittleEndian>(0)); // disk number start try!(self.writee.write_u16::<LittleEndian>(0)); // internal file attribytes try!(self.writee.write_u16::<LittleEndian>(0)); // external file attributes try!( self.writee .write_u32::<LittleEndian>(file.external_attributes) ); // relative offset of local header try!( self.writee .write_u32::<LittleEndian>(file.header_start as u32) ); // file name try!(self.writee.write_all(file.file_name.as_bytes())); // extra field // <none> // file comment // <none> } Ok(()) } fn write_end_of_central_directory_record(&mut self) -> ZipResult<()> { let central_start = try!(self.writee.seek(io::SeekFrom::Current(0))); try!(self.write_central_directory_headers()); let central_size = try!(self.writee.seek(io::SeekFrom::Current(0))) - central_start; let comment = b"srszip".to_vec(); // end of central dir signature try!( self.writee .write_u32::<LittleEndian>(END_OF_CENTRAL_DIR_SIGNATURE) ); // number of this disk try!(self.writee.write_u16::<LittleEndian>(0)); // number of the disk with the start of the central directory try!(self.writee.write_u16::<LittleEndian>(0)); // total number of entries in the central directory on this disk try!( self.writee .write_u16::<LittleEndian>(self.files.len() as u16) ); // total number of entries in the central directory try!( self.writee .write_u16::<LittleEndian>(self.files.len() as u16) ); // size of the central directory try!(self.writee.write_u32::<LittleEndian>(central_size as u32)); // offset of start of central directory with respect to the starting disk number try!(self.writee.write_u32::<LittleEndian>(central_start as u32)); // .ZIP file comment length try!(self.writee.write_u16::<LittleEndian>(comment.len() as u16)); // .ZIP file comment try!(self.writee.write_all(&comment)); Ok(()) } } #[test] fn add_dir() { let mut zip = ZipWriter::new(io::Cursor::new(Vec::new())); let r = zip.add_directory("test/"); assert_eq!(r.is_ok(), true); assert_eq!(zip.writee.get_ref().len(), 35); } #[test] fn add_file() { let mut zip = ZipWriter::new(io::Cursor::new(Vec::new())); let r = zip.add_file("test", &[b'f', b'o', b'o']); assert_eq!(r.is_ok(), true); assert_eq!(zip.writee.get_ref().len(), 37); }
version_made_by: u8,
zone.go
// Copyright 2014 The Cockroach Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. See the AUTHORS file // for names of contributors. // // Author: Spencer Kimball ([email protected]) package server import ( "net/http" "github.com/cockroachdb/cockroach/client" "github.com/cockroachdb/cockroach/keys" "github.com/cockroachdb/cockroach/proto" "github.com/cockroachdb/cockroach/util" gogoproto "github.com/gogo/protobuf/proto" ) const ( // minRangeMaxBytes is the minimum value for range max bytes. minRangeMaxBytes = 1 << 20 ) // A zoneHandler implements the adminHandler interface. type zoneHandler struct { db *client.KV // Key-value database client } // validateZoneConfig returns an error if a given zone config is invalid. func validateZoneConfig(config gogoproto.Message) error { zConfig := config.(*proto.ZoneConfig) if len(zConfig.ReplicaAttrs) == 0
if zConfig.RangeMaxBytes < minRangeMaxBytes { return util.Errorf("RangeMaxBytes %d less than minimum allowed %d", zConfig.RangeMaxBytes, minRangeMaxBytes) } if zConfig.RangeMinBytes >= zConfig.RangeMaxBytes { return util.Errorf("RangeMinBytes %d is greater than or equal to RangeMaxBytes %d", zConfig.RangeMinBytes, zConfig.RangeMaxBytes) } return nil } // Put writes a zone config for the specified key prefix (which is // treated as a key). The zone config is parsed from the input // "body". The specified body must validly parse into a zone config // struct. func (zh *zoneHandler) Put(path string, body []byte, r *http.Request) error { return putConfig(zh.db, keys.ConfigZonePrefix, &proto.ZoneConfig{}, path, body, r, validateZoneConfig) } // Get retrieves the zone configuration for the specified key. If the // key is empty, all zone configurations are returned. Otherwise, the // leading "/" path delimiter is stripped and the zone configuration // matching the remainder is retrieved. Note that this will retrieve // the default zone config if "key" is equal to "/", and will list all // configs if "key" is equal to "". The body result contains // JSON-formatted output for a listing of keys and JSON-formatted // output for retrieval of a zone config. func (zh *zoneHandler) Get(path string, r *http.Request) (body []byte, contentType string, err error) { return getConfig(zh.db, keys.ConfigZonePrefix, &proto.ZoneConfig{}, path, r) } // Delete removes the zone config specified by key. func (zh *zoneHandler) Delete(path string, r *http.Request) error { return deleteConfig(zh.db, keys.ConfigZonePrefix, path, r) }
{ return util.Errorf("attributes for at least one replica must be specified in zone config") }
0004_object_id_required.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration):
dependencies = [ ('glitter', '0003_remove_empty_contentblocks'), ] operations = [ migrations.AlterField( model_name='contentblock', name='object_id', field=models.PositiveIntegerField(), ), ]
test.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Implementation of the test-related targets of the build system. //! //! This file implements the various regression test suites that we execute on //! our CI. use std::env; use std::ffi::OsString; use std::fmt; use std::fs::{self, File}; use std::io::Read; use std::iter; use std::path::{Path, PathBuf}; use std::process::Command; use build_helper::{self, output}; use builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step}; use cache::{Interned, INTERNER}; use compile; use dist; use flags::Subcommand; use native; use tool::{self, Tool, SourceType}; use toolstate::ToolState; use util::{self, dylib_path, dylib_path_var}; use Crate as CargoCrate; use {DocTests, Mode}; const ADB_TEST_DIR: &str = "/data/tmp/work"; /// The two modes of the test runner; tests or benchmarks. #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)] pub enum TestKind { /// Run `cargo test` Test, /// Run `cargo bench` Bench, } impl From<Kind> for TestKind { fn from(kind: Kind) -> Self { match kind { Kind::Test => TestKind::Test, Kind::Bench => TestKind::Bench, _ => panic!("unexpected kind in crate: {:?}", kind), } } } impl TestKind { // Return the cargo subcommand for this test kind fn subcommand(self) -> &'static str { match self { TestKind::Test => "test", TestKind::Bench => "bench", } } } impl fmt::Display for TestKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(match *self { TestKind::Test => "Testing", TestKind::Bench => "Benchmarking", }) } } fn try_run(builder: &Builder, cmd: &mut Command) -> bool { if !builder.fail_fast { if !builder.try_run(cmd) { let mut failures = builder.delayed_failures.borrow_mut(); failures.push(format!("{:?}", cmd)); return false; } } else { builder.run(cmd); } true } fn try_run_quiet(builder: &Builder, cmd: &mut Command) -> bool { if !builder.fail_fast { if !builder.try_run_quiet(cmd) { let mut failures = builder.delayed_failures.borrow_mut(); failures.push(format!("{:?}", cmd)); return false; } } else { builder.run_quiet(cmd); } true } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Linkcheck { host: Interned<String>, } impl Step for Linkcheck { type Output = (); const ONLY_HOSTS: bool = true; const DEFAULT: bool = true; /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. /// /// This tool in `src/tools` will verify the validity of all our links in the /// documentation to ensure we don't have a bunch of dead ones. fn run(self, builder: &Builder) { let host = self.host; builder.info(&format!("Linkcheck ({})", host)); builder.default_doc(None); let _time = util::timeit(&builder); try_run( builder, builder .tool_cmd(Tool::Linkchecker) .arg(builder.out.join(host).join("doc")), ); } fn should_run(run: ShouldRun) -> ShouldRun { let builder = run.builder; run.path("src/tools/linkchecker") .default_condition(builder.config.docs) } fn make_run(run: RunConfig) { run.builder.ensure(Linkcheck { host: run.target }); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Cargotest { stage: u32, host: Interned<String>, } impl Step for Cargotest { type Output = (); const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/tools/cargotest") } fn make_run(run: RunConfig) { run.builder.ensure(Cargotest { stage: run.builder.top_stage, host: run.target, }); } /// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler. /// /// This tool in `src/tools` will check out a few Rust projects and run `cargo /// test` to ensure that we don't regress the test suites there. fn run(self, builder: &Builder) { let compiler = builder.compiler(self.stage, self.host); builder.ensure(compile::Rustc { compiler, target: compiler.host, }); // Note that this is a short, cryptic, and not scoped directory name. This // is currently to minimize the length of path on Windows where we otherwise // quickly run into path name limit constraints. let out_dir = builder.out.join("ct"); t!(fs::create_dir_all(&out_dir)); let _time = util::timeit(&builder); let mut cmd = builder.tool_cmd(Tool::CargoTest); try_run( builder, cmd.arg(&builder.initial_cargo) .arg(&out_dir) .env("RUSTC", builder.rustc(compiler)) .env("RUSTDOC", builder.rustdoc(compiler.host)), ); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Cargo { stage: u32, host: Interned<String>, } impl Step for Cargo { type Output = (); const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/tools/cargo") } fn make_run(run: RunConfig) { run.builder.ensure(Cargo { stage: run.builder.top_stage, host: run.target, }); } /// Runs `cargo test` for `cargo` packaged with Rust. fn run(self, builder: &Builder) { let compiler = builder.compiler(self.stage, self.host); builder.ensure(tool::Cargo { compiler, target: self.host, }); let mut cargo = tool::prepare_tool_cargo(builder, compiler, Mode::ToolRustc, self.host, "test", "src/tools/cargo", SourceType::Submodule); if !builder.fail_fast { cargo.arg("--no-fail-fast"); } // Don't run cross-compile tests, we may not have cross-compiled libstd libs // available. cargo.env("CFG_DISABLE_CROSS_TESTS", "1"); try_run( builder, cargo.env("PATH", &path_for_cargo(builder, compiler)), ); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Rls { stage: u32, host: Interned<String>, } impl Step for Rls { type Output = (); const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/tools/rls") } fn make_run(run: RunConfig) { run.builder.ensure(Rls { stage: run.builder.top_stage, host: run.target, }); } /// Runs `cargo test` for the rls. fn run(self, builder: &Builder) { let stage = self.stage; let host = self.host; let compiler = builder.compiler(stage, host); let build_result = builder.ensure(tool::Rls { compiler, target: self.host, extra_features: Vec::new(), }); if build_result.is_none() { eprintln!("failed to test rls: could not build"); return; } let mut cargo = tool::prepare_tool_cargo(builder, compiler, Mode::ToolRustc, host, "test", "src/tools/rls", SourceType::Submodule); // Copy `src/tools/rls/test_data` to a writable drive. let test_workspace_path = builder.out.join("rls-test-data"); let test_data_path = test_workspace_path.join("test_data"); builder.create_dir(&test_data_path); builder.cp_r(&builder.src.join("src/tools/rls/test_data"), &test_data_path); cargo.env("RLS_TEST_WORKSPACE_DIR", test_workspace_path); builder.add_rustc_lib_path(compiler, &mut cargo); cargo.arg("--") .args(builder.config.cmd.test_args()); if try_run(builder, &mut cargo) { builder.save_toolstate("rls", ToolState::TestPass); } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Rustfmt { stage: u32, host: Interned<String>, } impl Step for Rustfmt { type Output = (); const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/tools/rustfmt") } fn make_run(run: RunConfig) { run.builder.ensure(Rustfmt { stage: run.builder.top_stage, host: run.target, }); } /// Runs `cargo test` for rustfmt. fn run(self, builder: &Builder) { let stage = self.stage; let host = self.host; let compiler = builder.compiler(stage, host); let build_result = builder.ensure(tool::Rustfmt { compiler, target: self.host, extra_features: Vec::new(), }); if build_result.is_none() { eprintln!("failed to test rustfmt: could not build"); return; } let mut cargo = tool::prepare_tool_cargo(builder, compiler, Mode::ToolRustc, host, "test", "src/tools/rustfmt", SourceType::Submodule); let dir = testdir(builder, compiler.host); t!(fs::create_dir_all(&dir)); cargo.env("RUSTFMT_TEST_DIR", dir); builder.add_rustc_lib_path(compiler, &mut cargo); if try_run(builder, &mut cargo) { builder.save_toolstate("rustfmt", ToolState::TestPass); } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Miri { stage: u32, host: Interned<String>, } impl Step for Miri { type Output = (); const ONLY_HOSTS: bool = true; const DEFAULT: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { let test_miri = run.builder.config.test_miri; run.path("src/tools/miri").default_condition(test_miri) } fn make_run(run: RunConfig) { run.builder.ensure(Miri { stage: run.builder.top_stage, host: run.target, }); } /// Runs `cargo test` for miri. fn run(self, builder: &Builder) { let stage = self.stage; let host = self.host; let compiler = builder.compiler(stage, host); let miri = builder.ensure(tool::Miri { compiler, target: self.host, extra_features: Vec::new(), }); if let Some(miri) = miri { let mut cargo = tool::prepare_tool_cargo(builder, compiler, Mode::ToolRustc, host, "test", "src/tools/miri", SourceType::Submodule); // miri tests need to know about the stage sysroot cargo.env("MIRI_SYSROOT", builder.sysroot(compiler)); cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); cargo.env("MIRI_PATH", miri); builder.add_rustc_lib_path(compiler, &mut cargo); if try_run(builder, &mut cargo) { builder.save_toolstate("miri", ToolState::TestPass); } } else { eprintln!("failed to test miri: could not build"); } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Clippy { stage: u32, host: Interned<String>, } impl Step for Clippy { type Output = (); const ONLY_HOSTS: bool = true; const DEFAULT: bool = false; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/tools/clippy") } fn make_run(run: RunConfig) { run.builder.ensure(Clippy { stage: run.builder.top_stage, host: run.target, }); } /// Runs `cargo test` for clippy. fn run(self, builder: &Builder) { let stage = self.stage; let host = self.host; let compiler = builder.compiler(stage, host); let clippy = builder.ensure(tool::Clippy { compiler, target: self.host, extra_features: Vec::new(), }); if let Some(clippy) = clippy { let mut cargo = tool::prepare_tool_cargo(builder, compiler, Mode::ToolRustc, host, "test", "src/tools/clippy", SourceType::Submodule); // clippy tests need to know about the stage sysroot cargo.env("SYSROOT", builder.sysroot(compiler)); cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler)); cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler)); let host_libs = builder .stage_out(compiler, Mode::ToolRustc) .join(builder.cargo_dir()); cargo.env("HOST_LIBS", host_libs); // clippy tests need to find the driver cargo.env("CLIPPY_DRIVER_PATH", clippy); builder.add_rustc_lib_path(compiler, &mut cargo); if try_run(builder, &mut cargo) { builder.save_toolstate("clippy-driver", ToolState::TestPass); } } else { eprintln!("failed to test clippy: could not build"); } } } fn path_for_cargo(builder: &Builder, compiler: Compiler) -> OsString { // Configure PATH to find the right rustc. NB. we have to use PATH // and not RUSTC because the Cargo test suite has tests that will // fail if rustc is not spelled `rustc`. let path = builder.sysroot(compiler).join("bin"); let old_path = env::var_os("PATH").unwrap_or_default(); env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("") } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct RustdocTheme { pub compiler: Compiler, } impl Step for RustdocTheme { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/tools/rustdoc-themes") } fn make_run(run: RunConfig) { let compiler = run.builder.compiler(run.builder.top_stage, run.host); run.builder.ensure(RustdocTheme { compiler: compiler }); } fn run(self, builder: &Builder) { let rustdoc = builder.out.join("bootstrap/debug/rustdoc"); let mut cmd = builder.tool_cmd(Tool::RustdocTheme); cmd.arg(rustdoc.to_str().unwrap()) .arg( builder .src .join("src/librustdoc/html/static/themes") .to_str() .unwrap(), ) .env("RUSTC_STAGE", self.compiler.stage.to_string()) .env("RUSTC_SYSROOT", builder.sysroot(self.compiler)) .env( "RUSTDOC_LIBDIR", builder.sysroot_libdir(self.compiler, self.compiler.host), ) .env("CFG_RELEASE_CHANNEL", &builder.config.channel) .env("RUSTDOC_REAL", builder.rustdoc(self.compiler.host)) .env("RUSTDOC_CRATE_VERSION", builder.rust_version()) .env("RUSTC_BOOTSTRAP", "1"); if let Some(linker) = builder.linker(self.compiler.host) { cmd.env("RUSTC_TARGET_LINKER", linker); } try_run(builder, &mut cmd); } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct RustdocJS { pub host: Interned<String>, pub target: Interned<String>, } impl Step for RustdocJS { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/test/rustdoc-js") } fn make_run(run: RunConfig) { run.builder.ensure(RustdocJS { host: run.host, target: run.target, }); } fn run(self, builder: &Builder) { if let Some(ref nodejs) = builder.config.nodejs { let mut command = Command::new(nodejs); command.args(&["src/tools/rustdoc-js/tester.js", &*self.host]); builder.ensure(::doc::Std { target: self.target, stage: builder.top_stage, }); builder.run(&mut command); } else { builder.info(&format!( "No nodejs found, skipping \"src/test/rustdoc-js\" tests" )); } } } #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct RustdocUi { pub host: Interned<String>, pub target: Interned<String>, pub compiler: Compiler, } impl Step for RustdocUi { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/test/rustdoc-ui") } fn make_run(run: RunConfig) { let compiler = run.builder.compiler(run.builder.top_stage, run.host); run.builder.ensure(RustdocUi { host: run.host, target: run.target, compiler, }); } fn run(self, builder: &Builder) { builder.ensure(Compiletest { compiler: self.compiler, target: self.target, mode: "ui", suite: "rustdoc-ui", path: None, compare_mode: None, }) } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Tidy; impl Step for Tidy { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; /// Runs the `tidy` tool. /// /// This tool in `src/tools` checks up on various bits and pieces of style and /// otherwise just implements a few lint-like checks that are specific to the /// compiler itself. fn run(self, builder: &Builder) { let mut cmd = builder.tool_cmd(Tool::Tidy); cmd.arg(builder.src.join("src")); cmd.arg(&builder.initial_cargo); if !builder.config.vendor { cmd.arg("--no-vendor"); } if !builder.config.verbose_tests { cmd.arg("--quiet"); } let _folder = builder.fold_output(|| "tidy"); builder.info(&format!("tidy check")); try_run(builder, &mut cmd); } fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/tools/tidy") } fn make_run(run: RunConfig) { run.builder.ensure(Tidy); } } fn testdir(builder: &Builder, host: Interned<String>) -> PathBuf { builder.out.join(host).join("test") } macro_rules! default_test { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: false }); } } macro_rules! default_test_with_compare_mode { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, compare_mode: $compare_mode:expr }) => { test_with_compare_mode!($name { path: $path, mode: $mode, suite: $suite, default: true, host: false, compare_mode: $compare_mode }); } } macro_rules! host_test { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr }) => { test!($name { path: $path, mode: $mode, suite: $suite, default: true, host: true }); } } macro_rules! test { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, host: $host:expr }) => { test_definitions!($name { path: $path, mode: $mode, suite: $suite, default: $default, host: $host, compare_mode: None }); } } macro_rules! test_with_compare_mode { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, host: $host:expr, compare_mode: $compare_mode:expr }) => { test_definitions!($name { path: $path, mode: $mode, suite: $suite, default: $default, host: $host, compare_mode: Some($compare_mode) }); } } macro_rules! test_definitions { ($name:ident { path: $path:expr, mode: $mode:expr, suite: $suite:expr, default: $default:expr, host: $host:expr, compare_mode: $compare_mode:expr }) => { #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct $name { pub compiler: Compiler, pub target: Interned<String>, } impl Step for $name { type Output = (); const DEFAULT: bool = $default; const ONLY_HOSTS: bool = $host; fn should_run(run: ShouldRun) -> ShouldRun { run.suite_path($path) } fn make_run(run: RunConfig) { let compiler = run.builder.compiler(run.builder.top_stage, run.host); run.builder.ensure($name { compiler, target: run.target, }); } fn run(self, builder: &Builder) { builder.ensure(Compiletest { compiler: self.compiler, target: self.target, mode: $mode, suite: $suite, path: Some($path), compare_mode: $compare_mode, }) } } } } default_test_with_compare_mode!(Ui { path: "src/test/ui", mode: "ui", suite: "ui", compare_mode: "nll" }); default_test!(RunPass { path: "src/test/run-pass", mode: "run-pass", suite: "run-pass" }); default_test!(CompileFail { path: "src/test/compile-fail", mode: "compile-fail", suite: "compile-fail" }); default_test!(ParseFail { path: "src/test/parse-fail", mode: "parse-fail", suite: "parse-fail" }); default_test!(RunFail { path: "src/test/run-fail", mode: "run-fail", suite: "run-fail" }); default_test!(RunPassValgrind { path: "src/test/run-pass-valgrind", mode: "run-pass-valgrind", suite: "run-pass-valgrind" }); default_test!(MirOpt { path: "src/test/mir-opt", mode: "mir-opt", suite: "mir-opt" }); default_test!(Codegen { path: "src/test/codegen", mode: "codegen", suite: "codegen" }); default_test!(CodegenUnits { path: "src/test/codegen-units", mode: "codegen-units", suite: "codegen-units" }); default_test!(Incremental { path: "src/test/incremental", mode: "incremental", suite: "incremental" }); default_test!(Debuginfo { path: "src/test/debuginfo", // What this runs varies depending on the native platform being apple mode: "debuginfo-XXX", suite: "debuginfo" }); host_test!(UiFullDeps { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-fulldeps" }); host_test!(RunPassFullDeps { path: "src/test/run-pass-fulldeps", mode: "run-pass", suite: "run-pass-fulldeps" }); host_test!(RunFailFullDeps { path: "src/test/run-fail-fulldeps", mode: "run-fail", suite: "run-fail-fulldeps" }); host_test!(CompileFailFullDeps { path: "src/test/compile-fail-fulldeps", mode: "compile-fail", suite: "compile-fail-fulldeps" }); host_test!(IncrementalFullDeps { path: "src/test/incremental-fulldeps", mode: "incremental", suite: "incremental-fulldeps" }); host_test!(Rustdoc { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" }); test!(Pretty { path: "src/test/pretty", mode: "pretty", suite: "pretty", default: false, host: true }); test!(RunPassPretty { path: "src/test/run-pass/pretty", mode: "pretty", suite: "run-pass", default: false, host: true }); test!(RunFailPretty { path: "src/test/run-fail/pretty", mode: "pretty", suite: "run-fail", default: false, host: true }); test!(RunPassValgrindPretty { path: "src/test/run-pass-valgrind/pretty", mode: "pretty", suite: "run-pass-valgrind", default: false, host: true }); test!(RunPassFullDepsPretty { path: "src/test/run-pass-fulldeps/pretty", mode: "pretty", suite: "run-pass-fulldeps", default: false, host: true }); test!(RunFailFullDepsPretty { path: "src/test/run-fail-fulldeps/pretty", mode: "pretty", suite: "run-fail-fulldeps", default: false, host: true }); default_test!(RunMake { path: "src/test/run-make", mode: "run-make", suite: "run-make" }); host_test!(RunMakeFullDeps { path: "src/test/run-make-fulldeps", mode: "run-make", suite: "run-make-fulldeps" }); #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] struct Compiletest { compiler: Compiler, target: Interned<String>, mode: &'static str, suite: &'static str, path: Option<&'static str>, compare_mode: Option<&'static str>, } impl Step for Compiletest { type Output = (); fn should_run(run: ShouldRun) -> ShouldRun { run.never() } /// Executes the `compiletest` tool to run a suite of tests. /// /// Compiles all tests with `compiler` for `target` with the specified /// compiletest `mode` and `suite` arguments. For example `mode` can be /// "run-pass" or `suite` can be something like `debuginfo`. fn run(self, builder: &Builder) { let compiler = self.compiler; let target = self.target; let mode = self.mode; let suite = self.suite; // Path for test suite let suite_path = self.path.unwrap_or(""); // Skip codegen tests if they aren't enabled in configuration. if !builder.config.codegen_tests && suite == "codegen" { return; } if suite == "debuginfo" { // Skip debuginfo tests on MSVC if builder.config.build.contains("msvc") { return; } if mode == "debuginfo-XXX" { return if builder.config.build.contains("apple") { builder.ensure(Compiletest { mode: "debuginfo-lldb", ..self }); } else { builder.ensure(Compiletest { mode: "debuginfo-gdb", ..self }); }; } builder.ensure(dist::DebuggerScripts { sysroot: builder.sysroot(compiler), host: target, }); } if suite.ends_with("fulldeps") || // FIXME: Does pretty need librustc compiled? Note that there are // fulldeps test suites with mode = pretty as well. mode == "pretty" { builder.ensure(compile::Rustc { compiler, target }); } if builder.no_std(target) == Some(true) { // the `test` doesn't compile for no-std targets builder.ensure(compile::Std { compiler, target }); } else { builder.ensure(compile::Test { compiler, target }); } if builder.no_std(target) == Some(true) { // for no_std run-make (e.g. thumb*), // we need a host compiler which is called by cargo. builder.ensure(compile::Std { compiler, target: compiler.host }); } builder.ensure(native::TestHelpers { target }); builder.ensure(RemoteCopyLibs { compiler, target }); let mut cmd = builder.tool_cmd(Tool::Compiletest); // compiletest currently has... a lot of arguments, so let's just pass all // of them! cmd.arg("--compile-lib-path") .arg(builder.rustc_libdir(compiler)); cmd.arg("--run-lib-path") .arg(builder.sysroot_libdir(compiler, target)); cmd.arg("--rustc-path").arg(builder.rustc(compiler)); let is_rustdoc_ui = suite.ends_with("rustdoc-ui"); // Avoid depending on rustdoc when we don't need it. if mode == "rustdoc" || (mode == "run-make" && suite.ends_with("fulldeps")) || (mode == "ui" && is_rustdoc_ui) { cmd.arg("--rustdoc-path") .arg(builder.rustdoc(compiler.host)); } cmd.arg("--src-base") .arg(builder.src.join("src/test").join(suite)); cmd.arg("--build-base") .arg(testdir(builder, compiler.host).join(suite)); cmd.arg("--stage-id") .arg(format!("stage{}-{}", compiler.stage, target)); cmd.arg("--mode").arg(mode); cmd.arg("--target").arg(target); cmd.arg("--host").arg(&*compiler.host); cmd.arg("--llvm-filecheck") .arg(builder.llvm_filecheck(builder.config.build)); if builder.config.cmd.bless() { cmd.arg("--bless"); } let compare_mode = builder.config.cmd.compare_mode().or(self.compare_mode); if let Some(ref nodejs) = builder.config.nodejs { cmd.arg("--nodejs").arg(nodejs); } let mut flags = if is_rustdoc_ui { Vec::new() } else { vec!["-Crpath".to_string()] }; if !is_rustdoc_ui { if builder.config.rust_optimize_tests { flags.push("-O".to_string()); } if builder.config.rust_debuginfo_tests { flags.push("-g".to_string()); } } flags.push("-Zunstable-options".to_string()); flags.push(builder.config.cmd.rustc_args().join(" ")); if let Some(linker) = builder.linker(target) { cmd.arg("--linker").arg(linker); } let hostflags = flags.clone(); cmd.arg("--host-rustcflags").arg(hostflags.join(" ")); let mut targetflags = flags.clone(); targetflags.push(format!( "-Lnative={}", builder.test_helpers_out(target).display() )); cmd.arg("--target-rustcflags").arg(targetflags.join(" ")); cmd.arg("--docck-python").arg(builder.python()); if builder.config.build.ends_with("apple-darwin") { // Force /usr/bin/python on macOS for LLDB tests because we're loading the // LLDB plugin's compiled module which only works with the system python // (namely not Homebrew-installed python) cmd.arg("--lldb-python").arg("/usr/bin/python"); } else { cmd.arg("--lldb-python").arg(builder.python()); } if let Some(ref gdb) = builder.config.gdb { cmd.arg("--gdb").arg(gdb); } if let Some(ref vers) = builder.lldb_version { cmd.arg("--lldb-version").arg(vers); } if let Some(ref dir) = builder.lldb_python_dir { cmd.arg("--lldb-python-dir").arg(dir); } // Get paths from cmd args let paths = match &builder.config.cmd { Subcommand::Test { ref paths, .. } => &paths[..], _ => &[], }; // Get test-args by striping suite path let mut test_args: Vec<&str> = paths .iter() .map(|p| { match p.strip_prefix(".") { Ok(path) => path, Err(_) => p, } }) .filter(|p| p.starts_with(suite_path) && p.is_file()) .map(|p| p.strip_prefix(suite_path).unwrap().to_str().unwrap()) .collect(); test_args.append(&mut builder.config.cmd.test_args()); cmd.args(&test_args); if builder.is_verbose() { cmd.arg("--verbose"); } if !builder.config.verbose_tests { cmd.arg("--quiet"); } if builder.config.llvm_enabled { let llvm_config = builder.ensure(native::Llvm { target: builder.config.build, emscripten: false, }); if !builder.config.dry_run { let llvm_version = output(Command::new(&llvm_config).arg("--version")); cmd.arg("--llvm-version").arg(llvm_version); } if !builder.is_rust_llvm(target) { cmd.arg("--system-llvm"); } // Only pass correct values for these flags for the `run-make` suite as it // requires that a C++ compiler was configured which isn't always the case. if !builder.config.dry_run && suite == "run-make-fulldeps" { let llvm_components = output(Command::new(&llvm_config).arg("--components")); let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags")); cmd.arg("--cc") .arg(builder.cc(target)) .arg("--cxx") .arg(builder.cxx(target).unwrap()) .arg("--cflags") .arg(builder.cflags(target).join(" ")) .arg("--llvm-components") .arg(llvm_components.trim()) .arg("--llvm-cxxflags") .arg(llvm_cxxflags.trim()); if let Some(ar) = builder.ar(target) { cmd.arg("--ar").arg(ar); } } } if suite == "run-make-fulldeps" && !builder.config.llvm_enabled { builder.info(&format!( "Ignoring run-make test suite as they generally don't work without LLVM" )); return; } if suite != "run-make-fulldeps" { cmd.arg("--cc") .arg("") .arg("--cxx") .arg("") .arg("--cflags") .arg("") .arg("--llvm-components") .arg("") .arg("--llvm-cxxflags") .arg(""); } if builder.remote_tested(target) { cmd.arg("--remote-test-client") .arg(builder.tool_exe(Tool::RemoteTestClient)); } // Running a C compiler on MSVC requires a few env vars to be set, to be // sure to set them here. // // Note that if we encounter `PATH` we make sure to append to our own `PATH` // rather than stomp over it. if target.contains("msvc") { for &(ref k, ref v) in builder.cc[&target].env() { if k != "PATH" { cmd.env(k, v); } } } cmd.env("RUSTC_BOOTSTRAP", "1"); builder.add_rust_test_threads(&mut cmd); if builder.config.sanitizers { cmd.env("SANITIZER_SUPPORT", "1"); } if builder.config.profiler { cmd.env("PROFILER_SUPPORT", "1"); } cmd.env("RUST_TEST_TMPDIR", builder.out.join("tmp")); cmd.arg("--adb-path").arg("adb"); cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); if target.contains("android") { // Assume that cc for this target comes from the android sysroot cmd.arg("--android-cross-path") .arg(builder.cc(target).parent().unwrap().parent().unwrap()); } else { cmd.arg("--android-cross-path").arg(""); } builder.ci_env.force_coloring_in_ci(&mut cmd); let _folder = builder.fold_output(|| format!("test_{}", suite)); builder.info(&format!( "Check compiletest suite={} mode={} ({} -> {})", suite, mode, &compiler.host, target )); let _time = util::timeit(&builder); try_run(builder, &mut cmd); if let Some(compare_mode) = compare_mode { cmd.arg("--compare-mode").arg(compare_mode); let _folder = builder.fold_output(|| format!("test_{}_{}", suite, compare_mode)); builder.info(&format!( "Check compiletest suite={} mode={} compare_mode={} ({} -> {})", suite, mode, compare_mode, &compiler.host, target )); let _time = util::timeit(&builder); try_run(builder, &mut cmd); } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] struct DocTest { compiler: Compiler, path: &'static str, name: &'static str, is_ext_doc: bool, } impl Step for DocTest { type Output = (); const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.never() } /// Run `rustdoc --test` for all documentation in `src/doc`. /// /// This will run all tests in our markdown documentation (e.g. the book) /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to /// `compiler`. fn run(self, builder: &Builder) { let compiler = self.compiler; builder.ensure(compile::Test { compiler, target: compiler.host, }); // Do a breadth-first traversal of the `src/doc` directory and just run // tests for all files that end in `*.md` let mut stack = vec![builder.src.join(self.path)]; let _time = util::timeit(&builder); let _folder = builder.fold_output(|| format!("test_{}", self.name)); let mut files = Vec::new(); while let Some(p) = stack.pop() { if p.is_dir() { stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); continue; } if p.extension().and_then(|s| s.to_str()) != Some("md") { continue; } // The nostarch directory in the book is for no starch, and so isn't // guaranteed to builder. We don't care if it doesn't build, so skip it. if p.to_str().map_or(false, |p| p.contains("nostarch")) { continue; } files.push(p); } files.sort(); let mut toolstate = ToolState::TestPass; for file in files { if !markdown_test(builder, compiler, &file) { toolstate = ToolState::TestFail; } } if self.is_ext_doc { builder.save_toolstate(self.name, toolstate); } } } macro_rules! test_book { ($($name:ident, $path:expr, $book_name:expr, default=$default:expr;)+) => { $( #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct $name { compiler: Compiler, } impl Step for $name { type Output = (); const DEFAULT: bool = $default; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path($path) } fn make_run(run: RunConfig) { run.builder.ensure($name { compiler: run.builder.compiler(run.builder.top_stage, run.host), }); } fn run(self, builder: &Builder) { builder.ensure(DocTest { compiler: self.compiler, path: $path, name: $book_name, is_ext_doc: !$default, }); } } )+ } } test_book!( Nomicon, "src/doc/nomicon", "nomicon", default=false; Reference, "src/doc/reference", "reference", default=false; RustdocBook, "src/doc/rustdoc", "rustdoc", default=true; RustcBook, "src/doc/rustc", "rustc", default=true; RustByExample, "src/doc/rust-by-example", "rust-by-example", default=false; TheBook, "src/doc/book", "book", default=false; UnstableBook, "src/doc/unstable-book", "unstable-book", default=true; ); #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct ErrorIndex { compiler: Compiler, } impl Step for ErrorIndex { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/tools/error_index_generator") } fn make_run(run: RunConfig) { run.builder.ensure(ErrorIndex { compiler: run.builder.compiler(run.builder.top_stage, run.host), }); } /// Run the error index generator tool to execute the tests located in the error /// index. /// /// The `error_index_generator` tool lives in `src/tools` and is used to /// generate a markdown file from the error indexes of the code base which is /// then passed to `rustdoc --test`. fn run(self, builder: &Builder) { let compiler = self.compiler; builder.ensure(compile::Std { compiler, target: compiler.host, }); let dir = testdir(builder, compiler.host); t!(fs::create_dir_all(&dir)); let output = dir.join("error-index.md"); let mut tool = builder.tool_cmd(Tool::ErrorIndex); tool.arg("markdown") .arg(&output) .env("CFG_BUILD", &builder.config.build) .env("RUSTC_ERROR_METADATA_DST", builder.extended_error_dir()); let _folder = builder.fold_output(|| "test_error_index"); builder.info(&format!("Testing error-index stage{}", compiler.stage)); let _time = util::timeit(&builder); builder.run(&mut tool); markdown_test(builder, compiler, &output); } } fn markdown_test(builder: &Builder, compiler: Compiler, markdown: &Path) -> bool { match File::open(markdown) { Ok(mut file) => { let mut contents = String::new(); t!(file.read_to_string(&mut contents)); if !contents.contains("```") { return true; } } Err(_) => {} } builder.info(&format!("doc tests for: {}", markdown.display())); let mut cmd = builder.rustdoc_cmd(compiler.host); builder.add_rust_test_threads(&mut cmd); cmd.arg("--test"); cmd.arg(markdown); cmd.env("RUSTC_BOOTSTRAP", "1"); let test_args = builder.config.cmd.test_args().join(" "); cmd.arg("--test-args").arg(test_args); if builder.config.verbose_tests { try_run(builder, &mut cmd) } else { try_run_quiet(builder, &mut cmd) } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct CrateLibrustc { compiler: Compiler, target: Interned<String>, test_kind: TestKind, krate: Interned<String>, } impl Step for CrateLibrustc { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.krate("rustc-main") } fn make_run(run: RunConfig) { let builder = run.builder; let compiler = builder.compiler(builder.top_stage, run.host); for krate in builder.in_tree_crates("rustc-main") { if run.path.ends_with(&krate.path) { let test_kind = builder.kind.into(); builder.ensure(CrateLibrustc { compiler, target: run.target, test_kind, krate: krate.name, }); } } } fn run(self, builder: &Builder) { builder.ensure(Crate { compiler: self.compiler, target: self.target, mode: Mode::Rustc, test_kind: self.test_kind, krate: self.krate, }); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct CrateNotDefault { compiler: Compiler, target: Interned<String>, test_kind: TestKind, krate: &'static str, } impl Step for CrateNotDefault { type Output = (); fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/liballoc_jemalloc") .path("src/librustc_asan") .path("src/librustc_lsan") .path("src/librustc_msan") .path("src/librustc_tsan") } fn make_run(run: RunConfig) { let builder = run.builder; let compiler = builder.compiler(builder.top_stage, run.host); let test_kind = builder.kind.into(); builder.ensure(CrateNotDefault { compiler, target: run.target, test_kind, krate: match run.path { _ if run.path.ends_with("src/liballoc_jemalloc") => "alloc_jemalloc", _ if run.path.ends_with("src/librustc_asan") => "rustc_asan", _ if run.path.ends_with("src/librustc_lsan") => "rustc_lsan", _ if run.path.ends_with("src/librustc_msan") => "rustc_msan", _ if run.path.ends_with("src/librustc_tsan") => "rustc_tsan", _ => panic!("unexpected path {:?}", run.path), }, }); } fn run(self, builder: &Builder) { builder.ensure(Crate { compiler: self.compiler, target: self.target, mode: Mode::Std, test_kind: self.test_kind, krate: INTERNER.intern_str(self.krate), }); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct Crate { pub compiler: Compiler, pub target: Interned<String>, pub mode: Mode, pub test_kind: TestKind, pub krate: Interned<String>, } impl Step for Crate { type Output = (); const DEFAULT: bool = true; fn should_run(mut run: ShouldRun) -> ShouldRun { let builder = run.builder; run = run.krate("test"); for krate in run.builder.in_tree_crates("std") { if krate.is_local(&run.builder) && !krate.name.contains("jemalloc") && !(krate.name.starts_with("rustc_") && krate.name.ends_with("san")) && krate.name != "dlmalloc" { run = run.path(krate.local_path(&builder).to_str().unwrap()); } } run } fn make_run(run: RunConfig) { let builder = run.builder; let compiler = builder.compiler(builder.top_stage, run.host); let make = |mode: Mode, krate: &CargoCrate| { let test_kind = builder.kind.into(); builder.ensure(Crate { compiler, target: run.target, mode, test_kind, krate: krate.name, }); }; for krate in builder.in_tree_crates("std") { if run.path.ends_with(&krate.local_path(&builder)) { make(Mode::Std, krate); } } for krate in builder.in_tree_crates("test") { if run.path.ends_with(&krate.local_path(&builder)) { make(Mode::Test, krate); } } } /// Run all unit tests plus documentation tests for a given crate defined /// by a `Cargo.toml` (single manifest) /// /// This is what runs tests for crates like the standard library, compiler, etc. /// It essentially is the driver for running `cargo test`. /// /// Currently this runs all tests for a DAG by passing a bunch of `-p foo` /// arguments, and those arguments are discovered from `cargo metadata`. fn run(self, builder: &Builder) { let compiler = self.compiler; let target = self.target; let mode = self.mode; let test_kind = self.test_kind; let krate = self.krate; builder.ensure(compile::Test { compiler, target }); builder.ensure(RemoteCopyLibs { compiler, target }); // If we're not doing a full bootstrap but we're testing a stage2 version of // libstd, then what we're actually testing is the libstd produced in // stage1. Reflect that here by updating the compiler that we're working // with automatically. let compiler = if builder.force_use_stage1(compiler, target) { builder.compiler(1, compiler.host) } else { compiler.clone() }; let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand()); match mode { Mode::Std => { compile::std_cargo(builder, &compiler, target, &mut cargo); } Mode::Test => { compile::test_cargo(builder, &compiler, target, &mut cargo); } Mode::Rustc => { builder.ensure(compile::Rustc { compiler, target }); compile::rustc_cargo(builder, &mut cargo); } _ => panic!("can only test libraries"), }; // Build up the base `cargo test` command. // // Pass in some standard flags then iterate over the graph we've discovered // in `cargo metadata` with the maps above and figure out what `-p` // arguments need to get passed. if test_kind.subcommand() == "test" && !builder.fail_fast { cargo.arg("--no-fail-fast"); } match builder.doc_tests { DocTests::Only => { cargo.arg("--doc"); } DocTests::No => { cargo.args(&["--lib", "--bins", "--examples", "--tests", "--benches"]); } DocTests::Yes => {} } cargo.arg("-p").arg(krate); // The tests are going to run with the *target* libraries, so we need to // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent. // // Note that to run the compiler we need to run with the *host* libraries, // but our wrapper scripts arrange for that to be the case anyway. let mut dylib_path = dylib_path(); dylib_path.insert(0, PathBuf::from(&*builder.sysroot_libdir(compiler, target))); cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); cargo.arg("--"); cargo.args(&builder.config.cmd.test_args()); if !builder.config.verbose_tests { cargo.arg("--quiet"); } if target.contains("emscripten") { cargo.env( format!("CARGO_TARGET_{}_RUNNER", envify(&target)), builder .config .nodejs .as_ref() .expect("nodejs not configured"), ); } else if target.starts_with("wasm32") { // Warn about running tests without the `wasm_syscall` feature enabled. // The javascript shim implements the syscall interface so that test // output can be correctly reported. if !builder.config.wasm_syscall { builder.info(&format!( "Libstd was built without `wasm_syscall` feature enabled: \ test output may not be visible." )); } // On the wasm32-unknown-unknown target we're using LTO which is // incompatible with `-C prefer-dynamic`, so disable that here cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1"); let node = builder .config .nodejs .as_ref() .expect("nodejs not configured"); let runner = format!( "{} {}/src/etc/wasm32-shim.js", node.display(), builder.src.display() ); cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner); } else if builder.remote_tested(target) { cargo.env( format!("CARGO_TARGET_{}_RUNNER", envify(&target)), format!("{} run", builder.tool_exe(Tool::RemoteTestClient).display()), ); } let _folder = builder.fold_output(|| { format!( "{}_stage{}-{}", test_kind.subcommand(), compiler.stage, krate ) }); builder.info(&format!( "{} {} stage{} ({} -> {})", test_kind, krate, compiler.stage, &compiler.host, target )); let _time = util::timeit(&builder); try_run(builder, &mut cargo); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct CrateRustdoc { host: Interned<String>, test_kind: TestKind, } impl Step for CrateRustdoc { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; fn should_run(run: ShouldRun) -> ShouldRun { run.paths(&["src/librustdoc", "src/tools/rustdoc"]) } fn make_run(run: RunConfig) { let builder = run.builder; let test_kind = builder.kind.into(); builder.ensure(CrateRustdoc { host: run.host, test_kind, }); } fn run(self, builder: &Builder) { let test_kind = self.test_kind; let compiler = builder.compiler(builder.top_stage, self.host); let target = compiler.host; builder.ensure(compile::Rustc { compiler, target }); let mut cargo = tool::prepare_tool_cargo(builder, compiler, Mode::ToolRustc, target, test_kind.subcommand(), "src/tools/rustdoc", SourceType::InTree); if test_kind.subcommand() == "test" && !builder.fail_fast { cargo.arg("--no-fail-fast"); } cargo.arg("-p").arg("rustdoc:0.0.0"); cargo.arg("--"); cargo.args(&builder.config.cmd.test_args()); if !builder.config.verbose_tests { cargo.arg("--quiet"); } let _folder = builder .fold_output(|| format!("{}_stage{}-rustdoc", test_kind.subcommand(), compiler.stage)); builder.info(&format!( "{} rustdoc stage{} ({} -> {})", test_kind, compiler.stage, &compiler.host, target )); let _time = util::timeit(&builder); try_run(builder, &mut cargo); } } fn envify(s: &str) -> String { s.chars() .map(|c| match c { '-' => '_', c => c, }) .flat_map(|c| c.to_uppercase()) .collect() } /// Some test suites are run inside emulators or on remote devices, and most /// of our test binaries are linked dynamically which means we need to ship /// the standard library and such to the emulator ahead of time. This step /// represents this and is a dependency of all test suites. /// /// Most of the time this is a noop. For some steps such as shipping data to /// QEMU we have to build our own tools so we've got conditional dependencies /// on those programs as well. Note that the remote test client is built for /// the build target (us) and the server is built for the target. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct RemoteCopyLibs { compiler: Compiler, target: Interned<String>, } impl Step for RemoteCopyLibs { type Output = (); fn should_run(run: ShouldRun) -> ShouldRun { run.never() } fn run(self, builder: &Builder) { let compiler = self.compiler; let target = self.target; if !builder.remote_tested(target) { return; } builder.ensure(compile::Test { compiler, target }); builder.info(&format!("REMOTE copy libs to emulator ({})", target)); t!(fs::create_dir_all(builder.out.join("tmp"))); let server = builder.ensure(tool::RemoteTestServer { compiler: compiler.with_stage(0), target, }); // Spawn the emulator and wait for it to come online let tool = builder.tool_exe(Tool::RemoteTestClient); let mut cmd = Command::new(&tool); cmd.arg("spawn-emulator") .arg(target) .arg(&server) .arg(builder.out.join("tmp")); if let Some(rootfs) = builder.qemu_rootfs(target) { cmd.arg(rootfs); } builder.run(&mut cmd); // Push all our dylibs to the emulator for f in t!(builder.sysroot_libdir(compiler, target).read_dir()) { let f = t!(f); let name = f.file_name().into_string().unwrap(); if util::is_dylib(&name) { builder.run(Command::new(&tool).arg("push").arg(f.path())); } } } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Distcheck; impl Step for Distcheck { type Output = (); fn should_run(run: ShouldRun) -> ShouldRun { run.path("distcheck") } fn make_run(run: RunConfig) { run.builder.ensure(Distcheck); } /// Run "distcheck", a 'make check' from a tarball fn run(self, builder: &Builder) { builder.info(&format!("Distcheck")); let dir = builder.out.join("tmp").join("distcheck"); let _ = fs::remove_dir_all(&dir); t!(fs::create_dir_all(&dir)); // Guarantee that these are built before we begin running. builder.ensure(dist::PlainSourceTarball); builder.ensure(dist::Src);
let mut cmd = Command::new("tar"); cmd.arg("-xzf") .arg(builder.ensure(dist::PlainSourceTarball)) .arg("--strip-components=1") .current_dir(&dir); builder.run(&mut cmd); builder.run( Command::new("./configure") .args(&builder.config.configure_args) .arg("--enable-vendor") .current_dir(&dir), ); builder.run( Command::new(build_helper::make(&builder.config.build)) .arg("check") .current_dir(&dir), ); // Now make sure that rust-src has all of libstd's dependencies builder.info(&format!("Distcheck rust-src")); let dir = builder.out.join("tmp").join("distcheck-src"); let _ = fs::remove_dir_all(&dir); t!(fs::create_dir_all(&dir)); let mut cmd = Command::new("tar"); cmd.arg("-xzf") .arg(builder.ensure(dist::Src)) .arg("--strip-components=1") .current_dir(&dir); builder.run(&mut cmd); let toml = dir.join("rust-src/lib/rustlib/src/rust/src/libstd/Cargo.toml"); builder.run( Command::new(&builder.initial_cargo) .arg("generate-lockfile") .arg("--manifest-path") .arg(&toml) .current_dir(&dir), ); } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct Bootstrap; impl Step for Bootstrap { type Output = (); const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; /// Test the build system itself fn run(self, builder: &Builder) { let mut cmd = Command::new(&builder.initial_cargo); cmd.arg("test") .current_dir(builder.src.join("src/bootstrap")) .env("RUSTFLAGS", "-Cdebuginfo=2") .env("CARGO_TARGET_DIR", builder.out.join("bootstrap")) .env("RUSTC_BOOTSTRAP", "1") .env("RUSTC", &builder.initial_rustc); if let Some(flags) = option_env!("RUSTFLAGS") { // Use the same rustc flags for testing as for "normal" compilation, // so that Cargo doesn’t recompile the entire dependency graph every time: // https://github.com/rust-lang/rust/issues/49215 cmd.env("RUSTFLAGS", flags); } if !builder.fail_fast { cmd.arg("--no-fail-fast"); } cmd.arg("--").args(&builder.config.cmd.test_args()); // rustbuild tests are racy on directory creation so just run them one at a time. // Since there's not many this shouldn't be a problem. cmd.arg("--test-threads=1"); try_run(builder, &mut cmd); } fn should_run(run: ShouldRun) -> ShouldRun { run.path("src/bootstrap") } fn make_run(run: RunConfig) { run.builder.ensure(Bootstrap); } }
face.py
import numpy as np # import open3d as o3d import face_recognition # import cv2 from skimage import draw #,morphology # from .depth import depthPreprocess from .image import getConvexHullMask,getMaskedImg2, maskClosing,maskDilation,maskErosion def faceLandmarks(image): ''' 人脸边框+特征点 ''' image=np.asarray(image) locations=face_recognition.face_locations(image) if not locations: return None,None landmarks=face_recognition.face_landmarks(image,face_locations=locations) return locations,landmarks def getLandmarkPoints(landmarks): ''' 将特征点变成列表形式 ''' facePoints=None for face in landmarks: for v in face.values(): if facePoints is None: facePoints=np.asarray(v) else: facePoints=np.vstack([facePoints,np.asarray(v)]) return facePoints def depthFilter(points,depth,dthreshold=1.0): ''' 过滤无深度的点,计算有深度的点在所有点中的比例 ''' depth=np.asarray(depth) imSize=depth.shape # h,w l=len(points) pnum=0 #resultPoints=[None]*l resultPoints=[ np.asarray([np.nan,np.nan]) ]*l for i,p in enumerate(points): if 0<=p[1]<imSize[0] and 0<=p[0]<imSize[1] and 0<depth[p[1],p[0]]<=dthreshold: resultPoints[i]=p pnum+=1 #resultPoints=np.asarray(resultPoints,dtype=np.object) resultPoints=np.asarray(resultPoints) # dtype:float64 return resultPoints,pnum/l def processFaceRGBD(rgbd,depthThreshold=1.0,rateThreshold=0.75): ''' 从RGBD图像中得到人脸边框、特征点(语义字典)、特征点(点集)、深度筛选过的特征点、有深度特征点的比例 ''' return processFaceImgPair(rgbd.color,rgbd.depth,depthThreshold=depthThreshold,rateThreshold=rateThreshold) def processFaceImgPair(color,depth,depthThreshold=1000,rateThreshold=0.75): ''' 从color、depth图像对中得到人脸边框、特征点(语义字典)、特征点(点集)、深度筛选过的特征点、有深度特征点的比例 ''' locations,landmarks=faceLandmarks(color) if locations is None: print("未检测到人脸") return False,None,None,None,None,None facePoints=getLandmarkPoints(landmarks) facePointsFilt,depthRate=depthFilter(facePoints,depth,dthreshold=depthThreshold) if depthRate<rateThreshold: print("脸部特征点中有深度的点所占比例小于阈值") return False,None,None,None,None,None #facePoints=np.asarray(facePoints,dtype=np.object) return True,locations,landmarks,facePoints,facePointsFilt,depthRate def getCorrespondence(points,points2): ''' 得到两点集的对应点(下标相同且不为[nan,nan]) ''' minlen=min( points.shape[0],points2.shape[0] ) points=points[:minlen,:] points2=points2[:minlen,:] return np.where( ~np.isnan(points).any(axis=1) & ~np.isnan(points2).any(axis=1) )[0] # def getCorrespondence2(points,points2): # ''' # 得到两点集的对应点(下标相同且不为None) # ''' # minlen=min(points.shape[0],points2.shape[0]) # corrIdxs=[] # for i in range(minlen): # if not(points[i] is None or points2[i] is None): # corrIdxs.append(i) # return np.asarray(corrIdxs) def getFaceCircle(img,facePoints,faceConvexHull,rRate=1.2): ''' 通过人脸特征点,得到包裹脸的圆形遮罩 ''' idxs=np.asarray( np.where(faceConvexHull>0.5) ) centroid=np.sum(idxs,axis=1)/idxs.shape[1] #质心 distances=np.linalg.norm(facePoints-centroid[::-1],axis=1) #[x y] maxDistance=distances.max() r=maxDistance*rRate center=np.int_(centroid) # [y x] cr,cc=draw.disk(center,r,shape=img.shape) # [y x] mask=np.zeros_like(img,dtype=np.bool) mask[cr,cc]=True #试着在圆上接个方块 # p1=np.asarray( [center[0],center[1]-int(r)],dtype=np.int64 ) # p2=np.asarray( [-1,center[1]+int(r)],dtype=np.int64 ) # cr,cc=draw.rectangle(start=p1,end=p2,shape=img.shape) # mask[cr,cc]=True mask[:center[0]+1,center[1]-int(r):center[1]+int(r)+1]=True return mask def getFaceMask(depth,landmarks,facePoints=None,circleRate=1.2): ''' 通过人脸特征点,得到包裹脸的圆形遮罩(除去眼、嘴) ''' leftEye= np.asarray( landmarks[0]["left_eye"] ) rightEye= np.asarray( landmarks[0]["right_eye"] ) mouth= set(landmarks[0]["top_lip"]).union( set(landmarks[0]["bottom_lip"]) ) mouth=np.asarray(list(mouth)) leftEyeMask=getConvexHullMask(depth,leftEye) rightEyeMask=getConvexHullMask(depth,rightEye) mouthMask=getConvexHullMask(depth,mouth) fullMask=leftEyeMask | rightEyeMask | mouthMask # disk=morphology.disk(10) # disk=morphology.disk(12)
facePoints=getLandmarkPoints(landmarks) faceConvexHull=getConvexHullMask(depth,facePoints) faceCircleMask=getFaceCircle(np.asarray(depth),facePoints,faceConvexHull,rRate=circleRate) faceMask=faceCircleMask ^ fullMaskDila #取相异的部分,即圆里扣去眼、嘴 return faceCircleMask,faceMask,faceConvexHull def getFaceMeanMask(depth,faceConvexHull,depthMask,maxMeanDiff=100): ''' 以人脸特征点组成的凸包为范围,求人脸的深度均值,得到能够滤除均值+-maxMeanDiff范围外深度的遮罩 ''' # disk=morphology.disk(5) # disk=morphology.disk(10) # faceConvexHulle=morphology.binary_erosion(faceConvexHull,selem=disk) faceConvexHulle=maskErosion(faceConvexHull,size=10) #腐蚀 idxs=np.where(faceConvexHulle>0.5) faceMean= np.mean( depth[idxs[0],idxs[1]] ) faceMeanMask=np.asarray( (depth>faceMean-maxMeanDiff )&(depth<faceMean+maxMeanDiff ) & depthMask ) return faceMeanMask # def processDepth(depth,landmarks,facePoints,cropEyeMouth=True, # dmin=100,dmax=1000,filtSize=6,bifiltSize=5,circleRate=1.2,maxMeanDiff=100): # ''' # 预处理深度图像,使用圆形遮罩切出头部,视情况施加遮罩切除眼、嘴 # 返回处理后的完整深度图、头部、剩余部分、遮罩列表(完整深度遮罩、脸部遮罩(切除眼、嘴)、脸部圆形遮罩) # ''' # depth,depthMask=depthPreprocess(depth,dmin,dmax,filtSize,bifiltSize) # if cropEyeMouth: # faceCircleMask,faceMask,faceConvexHull=getFaceMask(depth,landmarks,facePoints,circleRate) # else: # faceConvexHull=getConvexHullMask(depth,facePoints) # faceCircleMask=getFaceCircle(depth,facePoints,faceConvexHull,circleRate) # faceMask=faceCircleMask # faceMeanMask=getFaceMeanMask(depth,faceConvexHull,depthMask,maxMeanDiff=maxMeanDiff) #在脸部均值 +-10cm以外的都会被切掉 # depthMask &=faceMeanMask # disk=morphology.disk(2) # depthMask=morphology.binary_erosion(depthMask,selem=disk) #腐蚀 # depthTmp=getMaskedImg2(depth,depthMask) # masked=getMaskedImg2(depthTmp,faceMask) # maskedReverse=getMaskedImg2(depthTmp,faceCircleMask,reverse=True) # depthTmp=depthTmp.astype(np.uint16) # masked=masked.astype(np.uint16) # maskedReverse=maskedReverse.astype(np.uint16) # return depthTmp,masked,maskedReverse,[depthMask,faceMask,faceCircleMask,faceConvexHull] def processFaceDepth(depth,depthMask,landmarks,facePoints,cropEyeMouth=True, circleRate=1.2,maxMeanDiff=100): ''' 预处理深度图像,使用圆形遮罩切出头部,视情况施加遮罩切除眼、嘴 返回处理后的完整深度图、头部、剩余部分、遮罩列表(完整深度遮罩、脸部遮罩(切除眼、嘴)、脸部圆形遮罩) ''' #depth,depthMask=depthPreprocess(depth,dmin,dmax,filtSize,bifiltSize) if cropEyeMouth: faceCircleMask,faceMask,faceConvexHull=getFaceMask(depth,landmarks,facePoints,circleRate) else: faceConvexHull=getConvexHullMask(depth,facePoints) faceCircleMask=getFaceCircle(depth,facePoints,faceConvexHull,circleRate) faceMask=faceCircleMask faceMeanMask=getFaceMeanMask(depth,faceConvexHull,depthMask,maxMeanDiff=maxMeanDiff) #在脸部均值 +-10cm以外的都会被切掉 depthMask &=faceMeanMask # disk=morphology.disk(2) # depthMask=morphology.binary_erosion(depthMask,selem=disk) #腐蚀 # depthMask=maskClosing(depthMask,size=4) depthMask=maskErosion(depthMask,size=2) #腐蚀 depthTmp=getMaskedImg2(depth,depthMask) masked=getMaskedImg2(depthTmp,faceMask) maskedReverse=getMaskedImg2(depthTmp,faceCircleMask,reverse=True) depthTmp=depthTmp.astype(np.uint16) masked=masked.astype(np.uint16) maskedReverse=maskedReverse.astype(np.uint16) return depthTmp,masked,maskedReverse,[depthMask,faceMask,faceCircleMask,faceConvexHull]
# fullMaskDila=morphology.binary_dilation(fullMask,selem=disk) fullMaskDila=maskDilation(fullMask,size=12) #膨胀嘴、脸遮罩 if facePoints is None:
workflow.go
package largepayload import ( "time" "go.temporal.io/sdk/workflow" ) // LargePayloadWorkflow workflow definition func LargePayloadWorkflow(ctx workflow.Context, payloadSize int) (err error) { ao := workflow.ActivityOptions{ StartToCloseTimeout: time.Minute, } ctx = workflow.WithActivityOptions(ctx, ao) var data []byte var a *Activities err = workflow.ExecuteActivity(ctx, a.CreateLargeResultActivity, payloadSize).Get(ctx, &data) if err != nil { return err } err = workflow.ExecuteActivity(ctx, a.ProcessLargeInputActivity, data).Get(ctx, nil) if err != nil {
workflow.GetLogger(ctx).Info("Workflow completed.") } return err }
workflow.GetLogger(ctx).Error("Workflow failed.", "Error", err.Error()) } else {
test_30_store.py
import pytest import os import sys import runAM import json # insert project directory to $PATH for imports to work test_file = os.path.realpath(__file__) test_dir = os.path.dirname(test_file) project_dir = os.path.dirname(test_dir) sys.path.append(project_dir) bookstore_json = {"store": { "book": [ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, }, { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, "tags": ["adventure", "fiction", "1851"] }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, "tags": ["fantasy", "fiction", "1954"] } ], "bicycle": [ { "color": "red", "price": 19.95 } ] } } def test_000_can_assert_true(): # before any test verify if PyTest is working and can assert True assert True def
(): # init store and confirm that we have write access store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) assert store.write() def test_020_drop_table(): # drop all tables in the document all_tables_clean = True store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) for table_name in store.db.keys(): table_content = store.drop_table(table_name) if table_content: # if table is not empty, change the flag to false all_tables_clean = False store.write() assert all_tables_clean def test_030_insert_documents(): # insert documents into book and bicycle table store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) book_doc_id_list = list() bicycle_doc_id_list = list() for book in bookstore_json['store']['book']: doc_id = store.insert_doc(data=book, table_name='book') book_doc_id_list.append(doc_id) for bicycle in bookstore_json['store']['bicycle']: doc_id = store.insert_doc(data=bicycle, doc_id='42', table_name='bicycle') bicycle_doc_id_list.append(doc_id) store.write() assert ( book_doc_id_list == ['1', '2', '3', '4'] ) and ( bicycle_doc_id_list == ['42'] ) def test_040_get_table(): # get table content store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) assert store.table('bicycle') == {"42": {"color": "red", "price": 19.95}} def test_060_jq(): # test basic jq query: find all books with tags store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) value = store.jq(table_name='book', query_expression='..|select(.tags?!=null)') assert value == [ { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, "tags": ["adventure", "fiction", "1851"] }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, "tags": ["fantasy", "fiction", "1954"] } ] def test_070_jq_path(): # find the path to every value matched by jq store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) path_list = store.jq_path(table_name='book', query_expression='..|select(.tags?!=null)') assert path_list == [['3'],['4']] def test_080_delete_doc(): # delete a document from a table store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) deleted_docs_list = store.delete_doc(table_name='bicycle', doc_id='42') store.write() assert deleted_docs_list == ['42'] def test_090_get_value(): # find a value that corresponds to the path store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) value = store.get_val(path_list=['4', 'tags', 0], table_name='book') assert value == 'fantasy' def test_100_update_path(): # update value in a table based on specified path store = runAM.db.JSONStore(database_name='test_store', directory=os.path.join(project_dir, 'temp')) updated_table = store.update_path(path=['4', 'tags', 2], data='year-1954', table_name='book') store.write() assert updated_table == { "1": { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95 }, "2": { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99 }, "3": { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, "tags": [ "adventure", "fiction", "1851" ] }, "4": { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, "tags": [ "fantasy", "fiction", "year-1954" ] } }
test_010_store_open_store_write
status_test.go
// Copyright 2021 PingCAP, Inc.
// you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package loader import ( "sync" . "github.com/pingcap/check" "go.uber.org/atomic" "github.com/pingcap/ticdc/dm/dm/config" "github.com/pingcap/ticdc/dm/pkg/log" ) func (*testLoaderSuite) TestConcurrentStatus(c *C) { l := &Loader{} l.cfg = &config.SubTaskConfig{} l.logger = log.L() l.finishedDataSize.Store(100) l.totalDataSize.Store(200) l.totalFileCount.Store(10) l.dbTableDataFinishedSize = map[string]map[string]*atomic.Int64{ "db1": { "table1": atomic.NewInt64(10), "table2": atomic.NewInt64(20), }, } l.dbTableDataLastFinishedSize = map[string]map[string]*atomic.Int64{ "db1": { "table1": atomic.NewInt64(0), "table2": atomic.NewInt64(0), }, } // test won't race or panic wg := sync.WaitGroup{} wg.Add(20) for i := 0; i < 20; i++ { go func() { l.Status(nil) wg.Done() }() } wg.Wait() }
// // Licensed under the Apache License, Version 2.0 (the "License");
union_test.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import unittest from hwt.hdl.types.bits import Bits from hwt.hdl.types.struct import HStruct from hwt.hdl.types.union import HUnion from hwtLib.types.ctypes import uint8_t, uint16_t, int8_t, uint32_t from pyMathBitPrecise.bit_utils import mask class UnionTC(unittest.TestCase): def test_assertMembersSameSize(self): t = HUnion( (uint8_t, "a"), (uint8_t, "b"), (uint8_t, "c"), (uint8_t, "d"), ) self.assertEqual(t.bit_length(), 8) with self.assertRaises(TypeError): HUnion( (uint16_t, "a"), (uint8_t, "b"), ) def test_assertNoPadding(self):
def test_value_simple(self): t = HUnion( (uint8_t, "unsigned"), (int8_t, "signed"), ) v = t.from_py(None) v.unsigned = mask(8) self.assertEqual(int(v.signed), -1) v.signed = 0 self.assertEqual(int(v.unsigned), 0) def test_value_struct_and_bits(self): t = HUnion( (uint16_t, "bits"), (HStruct( (uint8_t, "lower"), (uint8_t, "upper"), ), "struct"), ) v = t.from_py(None) v.struct.upper = 1 self.assertEqual(v.bits.val, 1 << 8) self.assertEqual(v.bits.vld_mask, mask(8) << 8) v.struct.lower = 1 self.assertEqual(v.bits.val, (1 << 8) | 1) self.assertEqual(v.bits.vld_mask, mask(16)) v.bits = 2 self.assertEqual(int(v.struct.lower), 2) self.assertEqual(int(v.struct.upper), 0) def test_value_array_and_bits(self): t = HUnion( (uint32_t, "bits"), (uint8_t[4], "arr"), ) v = t.from_py(None) b = (4 << (3 * 8)) | (3 << (2 * 8)) | (2 << 8) | 1 v.bits = b for i, item in enumerate(v.arr): self.assertEqual(int(item), i + 1) self.assertEqual(int(v.bits), b) def test_value_array_toArray(self): t = HUnion( (uint16_t[2], "arr16b"), (int8_t[4], "arr8b"), ) v = t.from_py(None) for i in range(len(v.arr16b)): v.arr16b[i] = i + 1 for i, item in enumerate(v.arr8b): if (i + 1) % 2 == 0: v = 0 else: v = i // 2 + 1 self.assertEqual(int(item), v) def test_value_array_of_struct_to_bits(self): t = HUnion( (HStruct( (uint16_t, "a"), (uint8_t, "b"), )[3], "arr"), (Bits(24 * 3), "bits") ) v = t.from_py(None) for i in range(len(v.arr)): v.arr[i] = {"a": i + 1, "b": (i + 1) * 3 } self.assertEqual(int(v.bits), 1 | 3 << 16 | 2 << 24 | 6 << (24 + 16) | 3 << (2 * 24) | 9 << (2 * 24 + 16)) def test_hunion_type_eq(self): t0 = HUnion( (HStruct( (uint16_t, "a"), (uint8_t, "b"), )[3], "arr"), (Bits(24 * 3), "bits") ) t1 = HUnion( (HStruct( (uint16_t, "a"), (uint8_t, "b"), )[3], "arr"), (Bits(24 * 3), "bits") ) self.assertEqual(t0, t1) self.assertEqual(t1, t0) t1 = HUnion( (Bits(24 * 3), "bits"), (HStruct( (uint16_t, "a"), (uint8_t, "b"), )[3], "arr") ) self.assertEqual(t0, t1) self.assertEqual(t1, t0) t1 = HUnion( (uint32_t, "bits"), (uint8_t[4], "arr"), ) self.assertNotEqual(t0, t1) self.assertNotEqual(t1, t0) t1 = HUnion( (Bits(24 * 3), "bbits"), (HStruct( (uint16_t, "a"), (uint8_t, "b"), )[3], "arr") ) self.assertNotEqual(t0, t1) self.assertNotEqual(t1, t0) t1 = Bits(24 * 3) self.assertNotEqual(t0, t1) self.assertNotEqual(t1, t0) t1 = HUnion( (Bits(24 * 3, signed=False), "bits"), (HStruct( (uint16_t, "a"), (uint8_t, "b"), )[3], "arr") ) self.assertNotEqual(t0, t1) self.assertNotEqual(t1, t0) if __name__ == '__main__': suite = unittest.TestSuite() # suite.addTest(UnionTC('testValue')) suite.addTest(unittest.makeSuite(UnionTC)) runner = unittest.TextTestRunner(verbosity=3) runner.run(suite)
with self.assertRaises(AssertionError): HUnion( (uint8_t, None), (uint8_t, "b"), )
skill.service.ts
import { Injectable } from '@angular/core'; import { HttpClient, HttpErrorResponse, HttpHeaders } from '@angular/common/http'; import { Observable, EMPTY } from 'rxjs'; import { environment } from 'environments/environment'; import { catchError } from 'rxjs/operators'; @Injectable({ providedIn: 'root' }) export class SkillService { constructor(private httpClient: HttpClient) { } getSkillByUserId(userId: number, offset?: number, limit?: number): Observable<Object> { const offsetStr = offset !== undefined ? `?offset=${offset}` : ''; const limitStr = limit ? `&limit=${limit}` : ''; return this.httpClient.get(`${environment.api}/dev/skill/user/${userId}${offsetStr}${limitStr}`) .pipe( catchError((error: HttpErrorResponse) => { if (error.status === 404) { return EMPTY; } error.error.errrors = []; throw error; }) ); } getSkillCards(token) { return this.httpClient.get('/assets/data/skill_card.json', this.setHeader(token)) .pipe( catchError((error: HttpErrorResponse) => { if (error.status === 404) { return EMPTY; } error.error.errors = []; throw error; }) ); } getCreateSkills(payload: object, token: string) { return this.httpClient.post(`${environment.api}/dev/skill`, payload, this.setHeader(token)); } setHeader(token) { return { headers: new HttpHeaders({ Authorization: token }) }; } getSkillsTitle() { // return this.httpClient.get('/assets/data/skill_title.json'); return this.httpClient.get('/assets/data/skill_title.json'); } getSkillsTags(userId: number, token: string) { return this.httpClient.get('/assets/data/skill_tag.json', this.setHeader(token)); } getSkillById(skillId: number) { return this.httpClient.get(`${environment.api}/dev/skill/${skillId}`)
catchError((error: HttpErrorResponse) => { if (error.status === 404) { return EMPTY; } error.error.errrors = []; throw error; }) ); } getSkillStatsById(skillId: number) { return this.httpClient.get(`/assets/data/ask_stats.json`) .pipe( catchError((error: HttpErrorResponse) => { if (error.status === 404) { return EMPTY; } error.error.errors = []; throw error; }) ); } getSkillRecs(skillId: number) { return this.httpClient.get(`${environment.api}/dev/skill/${skillId}/recs`) .pipe( catchError((error: HttpErrorResponse) => { if (error.status === 404) { return EMPTY; } error.error.errors = []; throw error; }) ); } }
.pipe(
schemashifterdown_test.go
package convert import ( "encoding/xml" "strings" "testing" scte224 "github.com/Comcast/scte224structs/types/scte224v20180501" ) func TestDowngradeAudience(t *testing.T) { decoder := xml.NewDecoder(strings.NewReader(thoseGuys2018)) var cmcScte2018 scte224.Audience decodeErr := decoder.Decode(&cmcScte2018) if nil != decodeErr { t.Log(decodeErr) t.FailNow() } pretty, marshalErr := xml.MarshalIndent(DowngradeAudience(cmcScte2018), "", " ") if nil != marshalErr { t.Log(marshalErr) t.FailNow() } downgraded := string(pretty) if thoseGuys2015 != downgraded { t.Log(downgraded) t.Log("did not match") t.Log(thoseGuys2015) t.Fail() } } func TestDowngradeMedia(t *testing.T)
{ decoder := xml.NewDecoder(strings.NewReader(CALI_2018_XML)) var caliScte2018 scte224.Media decodeErr := decoder.Decode(&caliScte2018) if nil != decodeErr { t.Log(decodeErr) t.FailNow() } pretty, marshalErr := xml.MarshalIndent(DowngradeMedia(caliScte2018), "", " ") if nil != marshalErr { t.Log(marshalErr) t.FailNow() } downgraded := string(pretty) if CALI_2015_XML != downgraded { t.Log(downgraded) t.Log("did not match") t.Log(CALI_2015_XML) t.Fail() } }
index.native.tsx
import React from 'react'; import { Platform, StyleProp, ViewStyle } from 'react-native'; import RMCInputNumber from 'rmc-input-number/lib/index.ios'; import styles from 'rmc-input-number/lib/styles'; import { StepPropsType } from './PropsType'; export interface StepProps extends StepPropsType { styles?: typeof styles; style?: StyleProp<ViewStyle>; }
readOnly: false, disabled: false, styles, inputStyle: {} }; render() { const inputAndroidStyle = Platform.OS === 'android' ? { top: 6, paddingTop: 0, height: 26 } : {}; const { inputStyle, ...restProps } = this.props; const keyboardType = Platform.OS === 'android' ? 'numeric' : 'numbers-and-punctuation'; // tslint:disable-next-line:variable-name const _inputStyle = { ...inputAndroidStyle, ...inputStyle }; return ( <RMCInputNumber {...restProps} keyboardType={keyboardType} inputStyle={_inputStyle} /> ); } }
export default class Stepper extends React.Component<StepProps, any> { static defaultProps: StepProps = { step: 1,
two_stage.py
import torch import torch.nn as nn from .base import BaseDetector from .test_mixins import RPNTestMixin, BBoxTestMixin, MaskTestMixin from .. import builder from ..registry import DETECTORS from mmdet.core import bbox2roi, bbox2result, build_assigner, build_sampler @DETECTORS.register_module class
(BaseDetector, RPNTestMixin, BBoxTestMixin, MaskTestMixin): def __init__(self, backbone, neck=None, shared_head=None, rpn_head=None, bbox_roi_extractor=None, bbox_head=None, mask_roi_extractor=None, mask_head=None, train_cfg=None, test_cfg=None, pretrained=None): super(TwoStageDetector, self).__init__() self.backbone = builder.build_backbone(backbone) if neck is not None: self.neck = builder.build_neck(neck) if shared_head is not None: self.shared_head = builder.build_shared_head(shared_head) if rpn_head is not None: self.rpn_head = builder.build_head(rpn_head) if bbox_head is not None: self.bbox_roi_extractor = builder.build_roi_extractor( bbox_roi_extractor) self.bbox_head = builder.build_head(bbox_head) if mask_head is not None: if mask_roi_extractor is not None: self.mask_roi_extractor = builder.build_roi_extractor( mask_roi_extractor) self.share_roi_extractor = False else: self.share_roi_extractor = True self.mask_roi_extractor = self.bbox_roi_extractor self.mask_head = builder.build_head(mask_head) self.train_cfg = train_cfg self.test_cfg = test_cfg self.init_weights(pretrained=pretrained) @property def with_rpn(self): return hasattr(self, 'rpn_head') and self.rpn_head is not None def init_weights(self, pretrained=None): super(TwoStageDetector, self).init_weights(pretrained) self.backbone.init_weights(pretrained=pretrained) if self.with_neck: if isinstance(self.neck, nn.Sequential): for m in self.neck: m.init_weights() else: self.neck.init_weights() if self.with_shared_head: self.shared_head.init_weights(pretrained=pretrained) if self.with_rpn: self.rpn_head.init_weights() if self.with_bbox: self.bbox_roi_extractor.init_weights() self.bbox_head.init_weights() if self.with_mask: self.mask_head.init_weights() if not self.share_roi_extractor: self.mask_roi_extractor.init_weights() def extract_feat(self, img): x = self.backbone(img) if self.with_neck: x = self.neck(x) return x def forward_train(self, img, img_meta, gt_bboxes, gt_labels, gt_bboxes_ignore=None, gt_masks=None, proposals=None): x = self.extract_feat(img) losses = dict() # RPN forward and loss if self.with_rpn: rpn_outs = self.rpn_head(x) rpn_loss_inputs = rpn_outs + (gt_bboxes, img_meta, self.train_cfg.rpn) rpn_losses = self.rpn_head.loss( *rpn_loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) losses.update(rpn_losses) proposal_cfg = self.train_cfg.get('rpn_proposal', self.test_cfg.rpn) proposal_inputs = rpn_outs + (img_meta, proposal_cfg) proposal_list = self.rpn_head.get_bboxes(*proposal_inputs) else: proposal_list = proposals # assign gts and sample proposals if self.with_bbox or self.with_mask: bbox_assigner = build_assigner(self.train_cfg.rcnn.assigner) bbox_sampler = build_sampler( self.train_cfg.rcnn.sampler, context=self) num_imgs = img.size(0) if gt_bboxes_ignore is None: gt_bboxes_ignore = [None for _ in range(num_imgs)] sampling_results = [] for i in range(num_imgs): assign_result = bbox_assigner.assign(proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i], gt_labels[i]) sampling_result = bbox_sampler.sample( assign_result, proposal_list[i], gt_bboxes[i], gt_labels[i], feats=[lvl_feat[i][None] for lvl_feat in x]) sampling_results.append(sampling_result) # bbox head forward and loss if self.with_bbox: rois = bbox2roi([res.bboxes for res in sampling_results]) # TODO: a more flexible way to decide which feature maps to use bbox_feats = self.bbox_roi_extractor( x[:self.bbox_roi_extractor.num_inputs], rois) if self.with_shared_head: bbox_feats = self.shared_head(bbox_feats) cls_score, bbox_pred = self.bbox_head(bbox_feats) bbox_targets = self.bbox_head.get_target(sampling_results, gt_bboxes, gt_labels, self.train_cfg.rcnn) loss_bbox = self.bbox_head.loss(cls_score, bbox_pred, *bbox_targets) losses.update(loss_bbox) # mask head forward and loss if self.with_mask: if not self.share_roi_extractor: pos_rois = bbox2roi( [res.pos_bboxes for res in sampling_results]) mask_feats = self.mask_roi_extractor( x[:self.mask_roi_extractor.num_inputs], pos_rois) if self.with_shared_head: mask_feats = self.shared_head(mask_feats) else: pos_inds = [] device = bbox_feats.device for res in sampling_results: pos_inds.append( torch.ones( res.pos_bboxes.shape[0], device=device, dtype=torch.uint8)) pos_inds.append( torch.zeros( res.neg_bboxes.shape[0], device=device, dtype=torch.uint8)) pos_inds = torch.cat(pos_inds) mask_feats = bbox_feats[pos_inds] mask_pred = self.mask_head(mask_feats) mask_targets = self.mask_head.get_target(sampling_results, gt_masks, self.train_cfg.rcnn) pos_labels = torch.cat( [res.pos_gt_labels for res in sampling_results]) loss_mask = self.mask_head.loss(mask_pred, mask_targets, pos_labels) losses.update(loss_mask) return losses def simple_test(self, img, img_meta, proposals=None, rescale=False): """Test without augmentation.""" assert self.with_bbox, "Bbox head must be implemented." x = self.extract_feat(img) proposal_list = self.simple_test_rpn( x, img_meta, self.test_cfg.rpn) if proposals is None else proposals det_bboxes, det_labels = self.simple_test_bboxes( x, img_meta, proposal_list, self.test_cfg.rcnn, rescale=rescale) bbox_results = bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) if not self.with_mask: return bbox_results else: segm_results = self.simple_test_mask( x, img_meta, det_bboxes, det_labels, rescale=rescale) return bbox_results, segm_results def aug_test(self, imgs, img_metas, rescale=False): """Test with augmentations. If rescale is False, then returned bboxes and masks will fit the scale of imgs[0]. """ # recompute feats to save memory proposal_list = self.aug_test_rpn( self.extract_feats(imgs), img_metas, self.test_cfg.rpn) det_bboxes, det_labels = self.aug_test_bboxes( self.extract_feats(imgs), img_metas, proposal_list, self.test_cfg.rcnn) if rescale: _det_bboxes = det_bboxes else: _det_bboxes = det_bboxes.clone() _det_bboxes[:, :4] *= img_metas[0][0]['scale_factor'] bbox_results = bbox2result(_det_bboxes, det_labels, self.bbox_head.num_classes) # det_bboxes always keep the original scale if self.with_mask: segm_results = self.aug_test_mask( self.extract_feats(imgs), img_metas, det_bboxes, det_labels) return bbox_results, segm_results else: return bbox_results
TwoStageDetector
bar.rs
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![crate_type = "rlib"] extern crate foo; pub fn
(_s: foo::S) { }
bar
mesh_pool.py
import torch import torch.nn as nn from threading import Thread from models.layers.mesh_union import MeshUnion import numpy as np from heapq import heappop, heapify class MeshPool(nn.Module): def __init__(self, target, multi_thread=False): super(MeshPool, self).__init__() self.__out_target = target self.__multi_thread = multi_thread self.__fe = None self.__updated_fe = None self.__meshes = None self.__merge_edges = [-1, -1] def __call__(self, fe, meshes): return self.forward(fe, meshes) def forward(self, fe, meshes): self.__updated_fe = [[] for _ in range(len(meshes))] pool_threads = [] self.__fe = fe self.__meshes = meshes # iterate over batch for mesh_index in range(len(meshes)): if self.__multi_thread: pool_threads.append(Thread(target=self.__pool_main, args=(mesh_index,))) pool_threads[-1].start() else: self.__pool_main(mesh_index) if self.__multi_thread: for mesh_index in range(len(meshes)): pool_threads[mesh_index].join() out_features = torch.cat(self.__updated_fe).view(len(meshes), -1, self.__out_target) return out_features def __pool_main(self, mesh_index): mesh = self.__meshes[mesh_index] queue = self.__build_queue(self.__fe[mesh_index, :, :mesh.edges_count], mesh.edges_count) # recycle = [] # last_queue_len = len(queue) last_count = mesh.edges_count + 1 mask = np.ones(mesh.edges_count, dtype=np.uint8) edge_groups = MeshUnion(mesh.edges_count, self.__fe.device) while mesh.edges_count > self.__out_target: value, edge_id = heappop(queue) edge_id = int(edge_id) if mask[edge_id]: self.__pool_edge(mesh, edge_id, mask, edge_groups) mesh.clean(mask, edge_groups) fe = edge_groups.rebuild_features(self.__fe[mesh_index], mask, self.__out_target) self.__updated_fe[mesh_index] = fe def __pool_edge(self, mesh, edge_id, mask, edge_groups): if self.has_boundaries(mesh, edge_id): return False elif self.__clean_side(mesh, edge_id, mask, edge_groups, 0)\ and self.__clean_side(mesh, edge_id, mask, edge_groups, 2) \ and self.__is_one_ring_valid(mesh, edge_id): self.__merge_edges[0] = self.__pool_side(mesh, edge_id, mask, edge_groups, 0) self.__merge_edges[1] = self.__pool_side(mesh, edge_id, mask, edge_groups, 2) mesh.merge_vertices(edge_id) mask[edge_id] = False MeshPool.__remove_group(mesh, edge_groups, edge_id) mesh.edges_count -= 1 return True else: return False def __clean_side(self, mesh, edge_id, mask, edge_groups, side): if mesh.edges_count <= self.__out_target: return False invalid_edges = MeshPool.__get_invalids(mesh, edge_id, edge_groups, side) while len(invalid_edges) != 0 and mesh.edges_count > self.__out_target: self.__remove_triplete(mesh, mask, edge_groups, invalid_edges) if mesh.edges_count <= self.__out_target: return False if self.has_boundaries(mesh, edge_id): return False invalid_edges = self.__get_invalids(mesh, edge_id, edge_groups, side) return True @staticmethod def has_boundaries(mesh, edge_id): for edge in mesh.gemm_edges[edge_id]: if edge == -1 or -1 in mesh.gemm_edges[edge]: return True return False @staticmethod def __is_one_ring_valid(mesh, edge_id): v_a = set(mesh.edges[mesh.ve[mesh.edges[edge_id, 0]]].reshape(-1)) v_b = set(mesh.edges[mesh.ve[mesh.edges[edge_id, 1]]].reshape(-1)) shared = v_a & v_b - set(mesh.edges[edge_id]) return len(shared) == 2 def __pool_side(self, mesh, edge_id, mask, edge_groups, side): info = MeshPool.__get_face_info(mesh, edge_id, side) key_a, key_b, side_a, side_b, _, other_side_b, _, other_keys_b = info self.__redirect_edges(mesh, key_a, side_a - side_a % 2, other_keys_b[0], mesh.sides[key_b, other_side_b]) self.__redirect_edges(mesh, key_a, side_a - side_a % 2 + 1, other_keys_b[1], mesh.sides[key_b, other_side_b + 1]) MeshPool.__union_groups(mesh, edge_groups, key_b, key_a) MeshPool.__union_groups(mesh, edge_groups, edge_id, key_a) mask[key_b] = False MeshPool.__remove_group(mesh, edge_groups, key_b) mesh.remove_edge(key_b) mesh.edges_count -= 1 return key_a @staticmethod def __get_invalids(mesh, edge_id, edge_groups, side): info = MeshPool.__get_face_info(mesh, edge_id, side) key_a, key_b, side_a, side_b, other_side_a, other_side_b, other_keys_a, other_keys_b = info shared_items = MeshPool.__get_shared_items(other_keys_a, other_keys_b) if len(shared_items) == 0: return [] else: assert (len(shared_items) == 2) middle_edge = other_keys_a[shared_items[0]] update_key_a = other_keys_a[1 - shared_items[0]] update_key_b = other_keys_b[1 - shared_items[1]] update_side_a = mesh.sides[key_a, other_side_a + 1 - shared_items[0]] update_side_b = mesh.sides[key_b, other_side_b + 1 - shared_items[1]] MeshPool.__redirect_edges(mesh, edge_id, side, update_key_a, update_side_a) MeshPool.__redirect_edges(mesh, edge_id, side + 1, update_key_b, update_side_b) MeshPool.__redirect_edges(mesh, update_key_a, MeshPool.__get_other_side(update_side_a), update_key_b, MeshPool.__get_other_side(update_side_b)) MeshPool.__union_groups(mesh, edge_groups, key_a, edge_id) MeshPool.__union_groups(mesh, edge_groups, key_b, edge_id) MeshPool.__union_groups(mesh, edge_groups, key_a, update_key_a) MeshPool.__union_groups(mesh, edge_groups, middle_edge, update_key_a) MeshPool.__union_groups(mesh, edge_groups, key_b, update_key_b) MeshPool.__union_groups(mesh, edge_groups, middle_edge, update_key_b) return [key_a, key_b, middle_edge] @staticmethod def __redirect_edges(mesh, edge_a_key, side_a, edge_b_key, side_b): mesh.gemm_edges[edge_a_key, side_a] = edge_b_key mesh.gemm_edges[edge_b_key, side_b] = edge_a_key mesh.sides[edge_a_key, side_a] = side_b mesh.sides[edge_b_key, side_b] = side_a @staticmethod def __get_shared_items(list_a, list_b): shared_items = [] for i in range(len(list_a)): for j in range(len(list_b)): if list_a[i] == list_b[j]: shared_items.extend([i, j]) return shared_items @staticmethod def __get_other_side(side): return side + 1 - 2 * (side % 2) @staticmethod def
(mesh, edge_id, side): key_a = mesh.gemm_edges[edge_id, side] key_b = mesh.gemm_edges[edge_id, side + 1] side_a = mesh.sides[edge_id, side] side_b = mesh.sides[edge_id, side + 1] other_side_a = (side_a - (side_a % 2) + 2) % 4 other_side_b = (side_b - (side_b % 2) + 2) % 4 other_keys_a = [mesh.gemm_edges[key_a, other_side_a], mesh.gemm_edges[key_a, other_side_a + 1]] other_keys_b = [mesh.gemm_edges[key_b, other_side_b], mesh.gemm_edges[key_b, other_side_b + 1]] return key_a, key_b, side_a, side_b, other_side_a, other_side_b, other_keys_a, other_keys_b @staticmethod def __remove_triplete(mesh, mask, edge_groups, invalid_edges): vertex = set(mesh.edges[invalid_edges[0]]) for edge_key in invalid_edges: vertex &= set(mesh.edges[edge_key]) mask[edge_key] = False MeshPool.__remove_group(mesh, edge_groups, edge_key) mesh.edges_count -= 3 vertex = list(vertex) assert(len(vertex) == 1) mesh.remove_vertex(vertex[0]) def __build_queue(self, features, edges_count): # delete edges with smallest norm squared_magnitude = torch.sum(features * features, 0) if squared_magnitude.shape[-1] != 1: squared_magnitude = squared_magnitude.unsqueeze(-1) edge_ids = torch.arange(edges_count, device=squared_magnitude.device, dtype=torch.float32).unsqueeze(-1) heap = torch.cat((squared_magnitude, edge_ids), dim=-1).tolist() heapify(heap) return heap @staticmethod def __union_groups(mesh, edge_groups, source, target): edge_groups.union(source, target) mesh.union_groups(source, target) @staticmethod def __remove_group(mesh, edge_groups, index): edge_groups.remove_group(index) mesh.remove_group(index)
__get_face_info
test107.js
var callbackArguments = []; var argument1 = function() { callbackArguments.push(arguments) return 78; }; var argument2 = true; var argument3 = "";
callbackArguments.push(arguments) return 68.43411762416089; }; var argument6 = function() { callbackArguments.push(arguments) return 65.76772350957533; }; var base_0 = ["v+<","7","g","Zg;","|","0lG("] var r_0= undefined try { r_0 = base_0.reduceRight(argument1,argument2,argument3) } catch(e) { r_0= "Error" } var base_1 = ["v+<","7","g","Zg;","|","0lG("] var r_1= undefined try { r_1 = base_1.reduceRight(argument4) } catch(e) { r_1= "Error" } var base_2 = ["v+<","7","g","Zg;","|","0lG("] var r_2= undefined try { r_2 = base_2.reduceRight(argument5) } catch(e) { r_2= "Error" } var base_3 = r_1 var r_3= undefined try { r_3 = base_3.reduceRight(argument6) } catch(e) { r_3= "Error" } function serialize(array){ return array.map(function(a){ if (a === null || a == undefined) return a; var name = a.constructor.name; if (name==='Object' || name=='Boolean'|| name=='Array'||name=='Number'||name=='String') return JSON.stringify(a); return name; }); } setTimeout(function(){ require("fs").writeFileSync("./experiments/reduceRight/reduceRightQC/test107.json",JSON.stringify({"baseObjects":serialize([base_0,base_1,base_2,base_3]),"returnObjects":serialize([r_0,r_1,r_2,r_3]),"callbackArgs":callbackArguments})) },300)
var argument4 = function() { callbackArguments.push(arguments) return {"?ÃúÆc‡":false,"w nñä":[false,false],"\u0019ý1":-29.373224257004285}; }; var argument5 = function() {
perform_doctest.py
from doctest import testmod from runtool import ( datatypes, recurse_config, runtool, transformations, transformer,
) for module in ( datatypes, recurse_config, runtool, transformations, transformer, utils, ): testmod(module)
utils,
user.ts
import async from "async"; import crypto from "crypto"; import nodemailer from "nodemailer"; import passport from "passport"; import { default as User, UserModel, AuthToken } from "../models/User"; import { Request, Response, NextFunction } from "express"; import { IVerifyOptions } from "passport-local"; import { WriteError } from "mongodb"; import { EMAIL_HOST, EMAIL_PORT, EMAIL_USER, EMAIL_PASSWORD } from "../util/secrets"; const request = require("express-validator"); // TODO: refactor - move into a mail service const smtpConfig = { host: EMAIL_HOST, port: parseInt(EMAIL_PORT), secure: false, // upgrade later with STARTTLS auth: { user: EMAIL_USER, pass: EMAIL_PASSWORD } }; /** * GET /login * Login page. */ export let getLogin = (req: Request, res: Response) => { if (req.user) { return res.redirect("/"); } res.render("account/login", { title: "Login" }); }; /** * POST /login * Sign in using email and password. */ export let postLogin = (req: Request, res: Response, next: NextFunction) => { req.assert("email", "Email is not valid").isEmail(); req.assert("password", "Password cannot be blank").notEmpty(); req.sanitize("email").normalizeEmail({ gmail_remove_dots: false }); const errors = req.validationErrors(); if (errors) { req.flash("errors", errors); return res.redirect("/login"); } passport.authenticate("local", (err: Error, user: UserModel, info: IVerifyOptions) => { if (err) { return next(err); } if (!user) { req.flash("errors", info.message); return res.redirect("/login"); } req.logIn(user, (err) => { if (err) { return next(err); } req.flash("success", { msg: "Success! You are logged in." }); res.redirect(req.session.returnTo || "/"); }); })(req, res, next); }; /** * GET /logout * Log out. */ export let logout = (req: Request, res: Response) => { req.logout(); res.redirect("/"); }; /** * GET /signup * Signup page. */ export let getSignup = (req: Request, res: Response) => { // if (req.user) { // req.logout(); // return res.redirect("/signup"); // } res.render("account/signup", { title: "Create Account" }); }; /** * POST /signup * Create a new local account. */ export let postSignup = (req: Request, res: Response, next: NextFunction) => { req.assert("email", "Email is not valid").isEmail(); req.assert("password", "Password must be at least 4 characters long").len({ min: 4 }); req.assert("confirmPassword", "Passwords do not match").equals(req.body.password); req.sanitize("email").normalizeEmail({ gmail_remove_dots: false }); const errors = req.validationErrors(); if (errors) { req.flash("errors", errors); return res.redirect("/signup"); } const user = new User({ email: req.body.email, password: req.body.password }); User.findOne({ email: req.body.email }, (err, existingUser) => { if (err) { return next(err); } if (existingUser) { req.flash("errors", { msg: "Account with that email address already exists." }); return res.redirect("/signup"); } user.save((err) => { if (err) { return next(err); } req.logIn(user, (err) => { if (err) { return next(err); } res.redirect("/"); }); }); }); }; /** * GET /account * Profile page. */ export let getAccount = (req: Request, res: Response) => { res.render("account/profile", { title: "Account Management" }); }; /** * POST /account/profile * Update profile information. */ export let postUpdateProfile = (req: Request, res: Response, next: NextFunction) => { req.assert("email", "Please enter a valid email address.").isEmail(); req.sanitize("email").normalizeEmail({ gmail_remove_dots: false }); const errors = req.validationErrors(); if (errors) { req.flash("errors", errors); return res.redirect("/account"); } User.findById(req.user.id, (err, user: UserModel) => { if (err) { return next(err); } user.email = req.body.email || ""; user.profile.name = req.body.name || ""; user.profile.gender = req.body.gender || ""; user.profile.location = req.body.location || ""; user.profile.website = req.body.website || ""; user.save((err: WriteError) => { if (err) { if (err.code === 11000) { req.flash("errors", { msg: "The email address you have entered is already associated with an account." }); return res.redirect("/account"); } return next(err); } req.flash("success", { msg: "Profile information has been updated." }); res.redirect("/account"); }); }); }; /** * POST /account/password * Update current password. */ export let postUpdatePassword = (req: Request, res: Response, next: NextFunction) => { req.assert("password", "Password must be at least 4 characters long").len({ min: 4 }); req.assert("confirmPassword", "Passwords do not match").equals(req.body.password); const errors = req.validationErrors(); if (errors) { req.flash("errors", errors); return res.redirect("/account"); } User.findById(req.user.id, (err, user: UserModel) => { if (err) { return next(err); } user.password = req.body.password; user.save((err: WriteError) => { if (err) { return next(err); } req.flash("success", { msg: "Password has been changed." }); res.redirect("/account"); }); }); }; /** * POST /account/delete * Delete user account. */ export let postDeleteAccount = (req: Request, res: Response, next: NextFunction) => { User.remove({ _id: req.user.id }, (err) => { if (err) { return next(err); } req.logout(); req.flash("info", { msg: "Your account has been deleted." }); res.redirect("/"); }); }; /** * GET /account/unlink/:provider * Unlink OAuth provider. */ export let getOauthUnlink = (req: Request, res: Response, next: NextFunction) => { const provider = req.params.provider; User.findById(req.user.id, (err, user: any) => { if (err) { return next(err); } user[provider] = undefined; user.tokens = user.tokens.filter((token: AuthToken) => token.kind !== provider); user.save((err: WriteError) => { if (err) { return next(err); } req.flash("info", { msg: `${provider} account has been unlinked.` }); res.redirect("/account"); }); }); }; /** * GET /reset/:token * Reset Password page. */ export let getReset = (req: Request, res: Response, next: NextFunction) => { if (req.isAuthenticated()) {
} User .findOne({ passwordResetToken: req.params.token }) .where("passwordResetExpires").gt(Date.now()) .exec((err, user) => { if (err) { return next(err); } if (!user) { req.flash("errors", { msg: "Password reset token is invalid or has expired." }); return res.redirect("/forgot"); } res.render("account/reset", { title: "Password Reset" }); }); }; /** * POST /reset/:token * Process the reset password request. */ export let postReset = (req: Request, res: Response, next: NextFunction) => { req.assert("password", "Password must be at least 4 characters long.").len({ min: 4 }); req.assert("confirm", "Passwords must match.").equals(req.body.password); const errors = req.validationErrors(); if (errors) { req.flash("errors", errors); return res.redirect("back"); } async.waterfall([ function resetPassword(done: Function) { User .findOne({ passwordResetToken: req.params.token }) .where("passwordResetExpires").gt(Date.now()) .exec((err, user: any) => { if (err) { return next(err); } if (!user) { req.flash("errors", { msg: "Password reset token is invalid or has expired." }); return res.redirect("back"); } user.password = req.body.password; user.passwordResetToken = undefined; user.passwordResetExpires = undefined; user.save((err: WriteError) => { if (err) { return next(err); } req.logIn(user, (err) => { done(err, user); }); }); }); }, function sendResetPasswordEmail(user: UserModel, done: Function) { const transporter = nodemailer.createTransport(smtpConfig); const mailOptions = { to: user.email, from: process.env.EMAIL_USER, subject: "Your password has been changed", text: `Hello,\n\nThis is a confirmation that the password for your account ${user.email} has just been changed.\n` }; transporter.sendMail(mailOptions, (err) => { req.flash("success", { msg: "Success! Your password has been changed." }); done(err); }); } ], (err) => { if (err) { return next(err); } res.redirect("/"); }); }; /** * GET /forgot * Forgot Password page. */ export let getForgot = (req: Request, res: Response) => { if (req.isAuthenticated()) { return res.redirect("/"); } res.render("account/forgot", { title: "Forgot Password" }); }; /** * POST /forgot * Create a random token, then the send user an email with a reset link. */ export let postForgot = (req: Request, res: Response, next: NextFunction) => { req.assert("email", "Please enter a valid email address.").isEmail(); req.sanitize("email").normalizeEmail({ gmail_remove_dots: false }); const errors = req.validationErrors(); if (errors) { req.flash("errors", errors); return res.redirect("/forgot"); } async.waterfall([ function createRandomToken(done: Function) { crypto.randomBytes(16, (err, buf) => { const token = buf.toString("hex"); done(err, token); }); }, function setRandomToken(token: AuthToken, done: Function) { User.findOne({ email: req.body.email }, (err, user: any) => { if (err) { return done(err); } if (!user) { req.flash("errors", { msg: "Account with that email address does not exist." }); return res.redirect("/forgot"); } user.passwordResetToken = token; user.passwordResetExpires = Date.now() + 3600000; // 1 hour user.save((err: WriteError) => { done(err, token, user); }); }); }, function sendForgotPasswordEmail(token: AuthToken, user: UserModel, done: Function) { const transporter = nodemailer.createTransport(smtpConfig); const mailOptions = { to: user.email, from: process.env.EMAIL_USER, subject: "Reset your password on Hackathon Starter", text: `You are receiving this email because you (or someone else) have requested the reset of the password for your account.\n\n Please click on the following link, or paste this into your browser to complete the process:\n\n http://${req.headers.host}/reset/${token}\n\n If you did not request this, please ignore this email and your password will remain unchanged.\n` }; transporter.sendMail(mailOptions, (err) => { req.flash("info", { msg: `An e-mail has been sent to ${user.email} with further instructions.` }); done(err); }); } ], (err) => { if (err) { return next(err); } res.redirect("/forgot"); }); };
return res.redirect("/");
main.rs
fn main()
{ // ANCHOR: here let s1 = String::from("witaj"); let s2 = s1; println!("{}, świecie!", s1); // ANCHOR_END: here }
footer.js
import React from "react"; import "./footer.css"; import { IcnFacebook, IcnInstagram, IcnLinkedin, IcnMail } from "./icn_SMM"; const thisYear = new Date(); const Footer = () => ( <div className="container__footer"> <div className="footer"> <div className="footer__smm"> <a href="mailto:[email protected]" className="footer__smm--item" rel="noopener noreferrer" > <IcnMail /> </a> <a href="https://www.linkedin.com/in/sergekovalchuk" target="_blank" className="footer__smm--item" rel="noopener noreferrer" > <IcnLinkedin /> </a> <a href="https://www.facebook.com/xyemoe" target="_blank" className="footer__smm--item" rel="noopener noreferrer" > <IcnFacebook /> </a> <a href="https://www.instagram.com/xyemoe/" target="_blank" className="footer__smm--item" rel="noopener noreferrer" >
</div> <div className="footer__copyright"> <span> Sergey Kovalchuk&emsp;©&emsp;2011&#8202;—&#8202; {thisYear.getFullYear()} </span> </div> </div> </div> ); export default Footer;
<IcnInstagram /> </a>
list_action_types.ts
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ import expect from '@kbn/expect'; import { UserAtSpaceScenarios } from '../../scenarios'; import { getUrlPrefix } from '../../../common/lib/space_test_utils'; import { FtrProviderContext } from '../../../common/ftr_provider_context'; // eslint-disable-next-line import/no-default-export export default function listActionTypesTests({ getService }: FtrProviderContext) { const supertestWithoutAuth = getService('supertestWithoutAuth'); describe('list_action_types', () => { for (const scenario of UserAtSpaceScenarios) { const { user, space } = scenario; describe(scenario.id, () => { it('should return 200 with list of action types containing defaults', async () => { const response = await supertestWithoutAuth .get(`${getUrlPrefix(space.id)}/api/action/types`) .auth(user.username, user.password); function createActionTypeMatcher(id: string, name: string) {
return actionType.id === id && actionType.name === name; }; } switch (scenario.id) { case 'no_kibana_privileges at space1': case 'space_1_all at space2': expect(response.statusCode).to.eql(404); expect(response.body).to.eql({ statusCode: 404, error: 'Not Found', message: 'Not Found', }); break; case 'global_read at space1': case 'superuser at space1': case 'space_1_all at space1': expect(response.statusCode).to.eql(200); // Check for values explicitly in order to avoid this test failing each time plugins register // a new action type expect( response.body.some( createActionTypeMatcher('test.index-record', 'Test: Index Record') ) ).to.be(true); break; default: throw new Error(`Scenario untested: ${JSON.stringify(scenario)}`); } }); }); } }); }
return (actionType: { id: string; name: string }) => {
datavalidation.go
// Copyright 2016 - 2020 The excelize Authors. All rights reserved. Use of // this source code is governed by a BSD-style license that can be found in // the LICENSE file. // // Package excelize providing a set of functions that allow you to write to // and read from XLSX / XLSM / XLTM files. Supports reading and writing // spreadsheet documents generated by Microsoft Exce™ 2007 and later. Supports // complex components by high compatibility, and provided streaming API for // generating or reading data from a worksheet with huge amounts of data. This // library needs Go version 1.10 or later. package excelize import ( "fmt" "strings" ) // DataValidationType defined the type of data validation. type DataValidationType int // Data validation types. const ( _DataValidationType = iota typeNone // inline use DataValidationTypeCustom DataValidationTypeDate DataValidationTypeDecimal typeList // inline use DataValidationTypeTextLeng DataValidationTypeTime // DataValidationTypeWhole Integer DataValidationTypeWhole ) const ( // dataValidationFormulaStrLen 255 characters+ 2 quotes dataValidationFormulaStrLen = 257 // dataValidationFormulaStrLenErr dataValidationFormulaStrLenErr = "data validation must be 0-255 characters" ) // DataValidationErrorStyle defined the style of data validation error alert. type DataValidationErrorStyle int // Data validation error styles. const ( _ DataValidationErrorStyle = iota DataValidationErrorStyleStop DataValidationErrorStyleWarning DataValidationErrorStyleInformation ) // Data validation error styles. const ( styleStop = "stop" styleWarning = "warning" styleInformation = "information" ) // DataValidationOperator operator enum. type DataValidationOperator int // Data validation operators. const ( _DataValidationOperator = iota DataValidationOperatorBetween DataValidationOperatorEqual DataValidationOperatorGreaterThan DataValidationOperatorGreaterThanOrEqual DataValidationOperatorLessThan DataValidationOperatorLessThanOrEqual DataValidationOperatorNotBetween DataValidationOperatorNotEqual ) // NewDataValidation return data validation struct. func NewDataValidation(allowBlank bool) *DataValidation { return &DataValidation{ AllowBlank: allowBlank, ShowErrorMessage: false, ShowInputMessage: false, } } // SetError set error notice. func (dd *DataValidation) SetError(style DataValidationErrorStyle, title, msg string) { dd.Error = &msg dd.ErrorTitle = &title strStyle := styleStop switch style { case DataValidationErrorStyleStop: strStyle = styleStop case DataValidationErrorStyleWarning: strStyle = styleWarning case DataValidationErrorStyleInformation: strStyle = styleInformation } dd.ShowErrorMessage = true dd.ErrorStyle = &strStyle } // SetInput set prompt notice. func (dd *DataValidation) SetInput(title, msg string) { dd.ShowInputMessage = true dd.PromptTitle = &title dd.Prompt = &msg } // SetDropList data validation list. func (dd *DataValidation) SetDropList(keys []string) error { formula := "\"" + strings.Join(keys, ",") + "\"" if dataValidationFormulaStrLen < len(formula) {
dd.Formula1 = fmt.Sprintf("<formula1>%s</formula1>", formula) dd.Type = convDataValidationType(typeList) return nil } // SetRange provides function to set data validation range in drop list. func (dd *DataValidation) SetRange(f1, f2 int, t DataValidationType, o DataValidationOperator) error { formula1 := fmt.Sprintf("%d", f1) formula2 := fmt.Sprintf("%d", f2) if dataValidationFormulaStrLen+21 < len(dd.Formula1) || dataValidationFormulaStrLen+21 < len(dd.Formula2) { return fmt.Errorf(dataValidationFormulaStrLenErr) } dd.Formula1 = fmt.Sprintf("<formula1>%s</formula1>", formula1) dd.Formula2 = fmt.Sprintf("<formula2>%s</formula2>", formula2) dd.Type = convDataValidationType(t) dd.Operator = convDataValidationOperatior(o) return nil } // SetSqrefDropList provides set data validation on a range with source // reference range of the worksheet by given data validation object and // worksheet name. The data validation object can be created by // NewDataValidation function. For example, set data validation on // Sheet1!A7:B8 with validation criteria source Sheet1!E1:E3 settings, create // in-cell dropdown by allowing list source: // // dvRange := excelize.NewDataValidation(true) // dvRange.Sqref = "A7:B8" // dvRange.SetSqrefDropList("$E$1:$E$3", true) // f.AddDataValidation("Sheet1", dvRange) // func (dd *DataValidation) SetSqrefDropList(sqref string, isCurrentSheet bool) error { if isCurrentSheet { dd.Formula1 = fmt.Sprintf("<formula1>%s</formula1>", sqref) dd.Type = convDataValidationType(typeList) return nil } return fmt.Errorf("cross-sheet sqref cell are not supported") } // SetSqref provides function to set data validation range in drop list. func (dd *DataValidation) SetSqref(sqref string) { if dd.Sqref == "" { dd.Sqref = sqref } else { dd.Sqref = fmt.Sprintf("%s %s", dd.Sqref, sqref) } } // convDataValidationType get excel data validation type. func convDataValidationType(t DataValidationType) string { typeMap := map[DataValidationType]string{ typeNone: "none", DataValidationTypeCustom: "custom", DataValidationTypeDate: "date", DataValidationTypeDecimal: "decimal", typeList: "list", DataValidationTypeTextLeng: "textLength", DataValidationTypeTime: "time", DataValidationTypeWhole: "whole", } return typeMap[t] } // convDataValidationOperatior get excel data validation operator. func convDataValidationOperatior(o DataValidationOperator) string { typeMap := map[DataValidationOperator]string{ DataValidationOperatorBetween: "between", DataValidationOperatorEqual: "equal", DataValidationOperatorGreaterThan: "greaterThan", DataValidationOperatorGreaterThanOrEqual: "greaterThanOrEqual", DataValidationOperatorLessThan: "lessThan", DataValidationOperatorLessThanOrEqual: "lessThanOrEqual", DataValidationOperatorNotBetween: "notBetween", DataValidationOperatorNotEqual: "notEqual", } return typeMap[o] } // AddDataValidation provides set data validation on a range of the worksheet // by given data validation object and worksheet name. The data validation // object can be created by NewDataValidation function. // // Example 1, set data validation on Sheet1!A1:B2 with validation criteria // settings, show error alert after invalid data is entered with "Stop" style // and custom title "error body": // // dvRange := excelize.NewDataValidation(true) // dvRange.Sqref = "A1:B2" // dvRange.SetRange(10, 20, excelize.DataValidationTypeWhole, excelize.DataValidationOperatorBetween) // dvRange.SetError(excelize.DataValidationErrorStyleStop, "error title", "error body") // err := f.AddDataValidation("Sheet1", dvRange) // // Example 2, set data validation on Sheet1!A3:B4 with validation criteria // settings, and show input message when cell is selected: // // dvRange = excelize.NewDataValidation(true) // dvRange.Sqref = "A3:B4" // dvRange.SetRange(10, 20, excelize.DataValidationTypeWhole, excelize.DataValidationOperatorGreaterThan) // dvRange.SetInput("input title", "input body") // err = f.AddDataValidation("Sheet1", dvRange) // // Example 3, set data validation on Sheet1!A5:B6 with validation criteria // settings, create in-cell dropdown by allowing list source: // // dvRange = excelize.NewDataValidation(true) // dvRange.Sqref = "A5:B6" // dvRange.SetDropList([]string{"1", "2", "3"}) // err = f.AddDataValidation("Sheet1", dvRange) // func (f *File) AddDataValidation(sheet string, dv *DataValidation) error { ws, err := f.workSheetReader(sheet) if err != nil { return err } if nil == ws.DataValidations { ws.DataValidations = new(xlsxDataValidations) } ws.DataValidations.DataValidation = append(ws.DataValidations.DataValidation, dv) ws.DataValidations.Count = len(ws.DataValidations.DataValidation) return err } // DeleteDataValidation delete data validation by given worksheet name and // reference sequence. func (f *File) DeleteDataValidation(sheet, sqref string) error { ws, err := f.workSheetReader(sheet) if err != nil { return err } if ws.DataValidations == nil { return nil } dv := ws.DataValidations for i := 0; i < len(dv.DataValidation); i++ { if dv.DataValidation[i].Sqref == sqref { dv.DataValidation = append(dv.DataValidation[:i], dv.DataValidation[i+1:]...) i-- } } dv.Count = len(dv.DataValidation) if dv.Count == 0 { ws.DataValidations = nil } return nil }
return fmt.Errorf(dataValidationFormulaStrLenErr) }
summarize.py
"""Tools for summarizing lightcurve data into statistics""" import numpy as np import scipy.optimize as spo from tensorflow.contrib.framework import nest from justice import lightcurve from justice import xform def opt_alignment( lca: lightcurve._LC, lcb: lightcurve._LC, ivals=None, constraints=None, method='Nelder-Mead', options=None, vb=True, ) -> xform.LCXform: """ Minimizes the arclength between two lightcurves after merging :param lca: First lightcurve. :param lcb: Lightcurve to try merging in :param ivals: initial values to try :param constraints: Not sure how these work, feel free to give it a try though! :param method: Only Nelder_Mead is tested as of now :param options: Only maxiter is included right now :param vb: Boolean verbose :return: best xform """ if constraints is None: constraints = [] if options is None: options = {'maxiter': 10000} if ivals is None: ivals = np.array([0, 0, 1, 1]) if method != 'Nelder-Mead':
def pos_dil(xf: xform.LinearBandDataXform): return min(xf._dilate_time, xf._dilate_flux) constraints += [{'type': 'ineq', 'fun': pos_dil}] else: constraints = None # don't know if this way of handling constraints actually works -- untested! def _helper(vals): bd_xform = xform.LinearBandDataXform(*vals) lca_xform = xform.SameLCXform(bd_xform) lc = lca_xform.apply(lcb) new_lc = lca + lc length = new_lc.connect_the_dots() return length # could make this a probability by taking chi^2 error relative to # connect_the_dots original, but it didn't work better in the sandbox # notebook res = spo.minimize( _helper, ivals, constraints=constraints, method=method, options=options ) if vb: print(res) res_xform = xform.SameLCXform(xform.LinearBandDataXform(*res.x)) return res_xform
doctor.info.ts
export class
{ fullname:string; dob:string; address:string; speciality:string; qualification:string; availablity:boolean; userName:string; }
DoctorInfo
index.ts
export * from './pr-opened'; export * from './pr-edited';
plugin.py
# encoding: utf-8 import six from six import string_types import ckan from ckan.plugins import SingletonPlugin, implements, IPackageController from ckan.plugins import IGroupController, IOrganizationController, ITagController, IResourceController from ckan.common import request, config, c from ckan.logic import get_action try: long # Python 2 except NameError: long = int # Python 3 def translate_data_dict(data_dict): '''Return the given dict (e.g. a dataset dict) with as many of its fields as possible translated into the desired or the fallback language. ''' desired_lang_code = request.environ['CKAN_LANG'] fallback_lang_code = config.get('ckan.locale_default', 'en') # Get a flattened copy of data_dict to do the translation on. flattened = ckan.lib.navl.dictization_functions.flatten_dict( data_dict) # Get a simple flat list of all the terms to be translated, from the # flattened data dict. terms = set() for (key, value) in flattened.items(): if value in (None, True, False): continue elif isinstance(value, string_types): terms.add(value) elif isinstance(value, (int, long)): continue else: for item in value: if isinstance(value, dict): if key == (u'organization',) and item == 'description': terms.add(value[item]) else: terms.add(item) else: terms.add(item) # Get the translations of all the terms (as a list of dictionaries). translations = get_action('term_translation_show')( {'model': ckan.model}, {'terms': terms, 'lang_codes': (desired_lang_code, fallback_lang_code)}) # Transform the translations into a more convenient structure. desired_translations = {} fallback_translations = {} for translation in translations: if translation['lang_code'] == desired_lang_code: desired_translations[translation['term']] = ( translation['term_translation']) else: assert translation['lang_code'] == fallback_lang_code fallback_translations[translation['term']] = ( translation['term_translation']) # Make a copy of the flattened data dict with all the terms replaced by # their translations, where available. translated_flattened = {} for (key, value) in flattened.items(): # Don't translate names that are used for form URLs. if key == ('name',): translated_flattened[key] = value elif (key[0] in ('tags', 'groups') and len(key) == 3 and key[2] == 'name'): translated_flattened[key] = value elif value in (None, True, False): # Don't try to translate values that aren't strings. translated_flattened[key] = value elif isinstance(value, string_types): if value in desired_translations: translated_flattened[key] = desired_translations[value] else: translated_flattened[key] = fallback_translations.get( value, value) elif isinstance(value, (int, long, dict)): if key == (u'organization',): translated_flattened[key] = translate_data_dict(value); else: translated_flattened[key] = value else: translated_value = [] for item in value: if item in desired_translations: translated_value.append(desired_translations[item]) else: translated_value.append( fallback_translations.get(item, item) ) translated_flattened[key] = translated_value # Finally unflatten and return the translated data dict. translated_data_dict = (ckan.lib.navl.dictization_functions .unflatten(translated_flattened)) return translated_data_dict def translate_resource_data_dict(data_dict): '''Return the given dict with as many of its fields as possible translated into the desired or the fallback language. ''' desired_lang_code = request.environ['CKAN_LANG'] fallback_lang_code = config.get('ckan.locale_default', 'en') # Get a flattened copy of data_dict to do the translation on. flattened = ckan.lib.navl.dictization_functions.flatten_dict( data_dict) # Get a simple flat list of all the terms to be translated, from the # flattened data dict. terms = set() for (key, value) in flattened.items(): if value in (None, True, False): continue elif isinstance(value, string_types): terms.add(value) elif isinstance(value, (int, long)): continue else: for item in value: terms.add(item) # Get the translations of all the terms (as a list of dictionaries). translations = ckan.logic.action.get.term_translation_show( {'model': ckan.model}, {'terms': terms, 'lang_codes': (desired_lang_code, fallback_lang_code)}) # Transform the translations into a more convenient structure. desired_translations = {} fallback_translations = {} for translation in translations: if translation['lang_code'] == desired_lang_code: desired_translations[translation['term']] = ( translation['term_translation']) else: assert translation['lang_code'] == fallback_lang_code fallback_translations[translation['term']] = ( translation['term_translation']) # Make a copy of the flattened data dict with all the terms replaced by # their translations, where available. translated_flattened = {} for (key, value) in flattened.items(): # Don't translate names that are used for form URLs. if key == ('name',): if value in desired_translations: translated_flattened[key] = desired_translations[value] elif value in fallback_translations: translated_flattened[key] = fallback_translations.get(value, value) else: translated_flattened[key] = value elif value in (None, True, False): # Don't try to translate values that aren't strings. translated_flattened[key] = value elif isinstance(value, string_types): if value in desired_translations: translated_flattened[key] = desired_translations[value] else: translated_flattened[key] = fallback_translations.get( value, value) elif isinstance(value, (int, long, dict)): translated_flattened[key] = value else: translated_value = [] for item in value: if item in desired_translations: translated_value.append(desired_translations[item]) else: translated_value.append( fallback_translations.get(item, item) ) translated_flattened[key] = translated_value # Finally unflatten and return the translated data dict. translated_data_dict = (ckan.lib.navl.dictization_functions .unflatten(translated_flattened)) return translated_data_dict KEYS_TO_IGNORE = ['state', 'revision_id', 'id', #title done seperately 'metadata_created', 'metadata_modified', 'site_id'] class MultilingualDataset(SingletonPlugin): implements(IPackageController, inherit=True) LANGS = config.get('ckan.locale_order', 'en').split(" ") def before_index(self, search_data): default_lang = search_data.get( 'lang_code', config.get('ckan.locale_default', 'en') ) ## translate title title = search_data.get('title') search_data['title_' + default_lang] = title title_translations = get_action('term_translation_show')( {'model': ckan.model}, {'terms': [title], 'lang_codes': self.LANGS}) for translation in title_translations: title_field = 'title_' + translation['lang_code'] search_data[title_field] = translation['term_translation'] ## translate rest all_terms = [] for key, value in sorted(six.iteritems(search_data)): if key in KEYS_TO_IGNORE or key.startswith('title'): continue if not isinstance(value, list): value = [value] for item in value: if isinstance(item, string_types): all_terms.append(item) field_translations = get_action('term_translation_show')( {'model': ckan.model}, {'terms': all_terms, 'lang_codes': self.LANGS}) text_field_items = dict(('text_' + lang, []) for lang in self.LANGS) text_field_items['text_' + default_lang].extend(all_terms) for translation in sorted(field_translations, key=lambda tr: all_terms.index(tr['term'])): lang_field = 'text_' + translation['lang_code'] text_field_items[lang_field].append(translation['term_translation']) for key, value in six.iteritems(text_field_items): search_data[key] = ' '.join(value) return search_data def before_search(self, search_params): lang_set = set(self.LANGS) try: current_lang = request.environ['CKAN_LANG'] except TypeError as err: if err.message == ('No object (name: request) has been registered ' 'for this thread'): # This happens when this code gets called as part of a paster # command rather then as part of an HTTP request. current_lang = config.get('ckan.locale_default') else: raise except KeyError: current_lang = config.get('ckan.locale_default') # fallback to default locale if locale not in suported langs if not current_lang in lang_set: current_lang = config.get('ckan.locale_default') # fallback to english if default locale is not supported if not current_lang in lang_set: current_lang = 'en' # treat current lang differenly so remove from set lang_set.remove(current_lang) # weight current lang more highly query_fields = 'title_%s^8 text_%s^4' % (current_lang, current_lang) for lang in lang_set: query_fields += ' title_%s^2 text_%s' % (lang, lang) search_params['qf'] = query_fields return search_params def after_search(self, search_results, search_params):
facets = search_results.get('search_facets') if not facets: return search_results desired_lang_code = request.environ['CKAN_LANG'] fallback_lang_code = config.get('ckan.locale_default', 'en') # Look up translations for all of the facets in one db query. terms = set() for facet in facets.values(): for item in facet['items']: terms.add(item['display_name']) translations = get_action('term_translation_show')( {'model': ckan.model}, {'terms': terms, 'lang_codes': (desired_lang_code, fallback_lang_code)}) # Replace facet display names with translated ones. for facet in facets.values(): for item in facet['items']: matching_translations = [translation for translation in translations if translation['term'] == item['display_name'] and translation['lang_code'] == desired_lang_code] if not matching_translations: matching_translations = [translation for translation in translations if translation['term'] == item['display_name'] and translation['lang_code'] == fallback_lang_code] if matching_translations: assert len(matching_translations) == 1 item['display_name'] = ( matching_translations[0]['term_translation']) return search_results def before_view(self, dataset_dict): # Translate any selected search facets (e.g. if we are rendering a # group read page or the dataset index page): lookup translations of # all the terms in c.fields (c.fields contains the selected facets) # and save them in c.translated_fields where the templates can # retrieve them later. desired_lang_code = request.environ['CKAN_LANG'] fallback_lang_code = config.get('ckan.locale_default', 'en') try: fields = c.fields except AttributeError: return translate_data_dict(dataset_dict) terms = [value for param, value in fields] translations = get_action('term_translation_show')( {'model': ckan.model}, {'terms': terms, 'lang_codes': (desired_lang_code, fallback_lang_code)}) c.translated_fields = {} for param, value in fields: matching_translations = [translation for translation in translations if translation['term'] == value and translation['lang_code'] == desired_lang_code] if not matching_translations: matching_translations = [translation for translation in translations if translation['term'] == value and translation['lang_code'] == fallback_lang_code] if matching_translations: assert len(matching_translations) == 1 translation = matching_translations[0]['term_translation'] c.translated_fields[(param, value)] = translation # Now translate the fields of the dataset itself. return translate_data_dict(dataset_dict) class MultilingualGroup(SingletonPlugin): '''The MultilingualGroup plugin translates group names and other group fields on group read pages and on the group index page. For example on the page /de/group/david the title "Dave's Books" at the top of the page might be translated to "Dave's Bucher". Datasets are also shown on group pages, but these are translated by the MultilingualDataset plugin. ''' implements(IGroupController, inherit=True) implements(IOrganizationController, inherit=True) def before_view(self, data_dict): translated_data_dict = translate_data_dict(data_dict) return translated_data_dict class MultilingualTag(SingletonPlugin): '''The MultilingualTag plugin translates tag names on tag read pages and on the tag index page. For example on the page /de/tag/tolstoy the title "Tag: tolstoy" at the top of the page might be translated to "Tag: Tolstoi". Datasets are also shown on tag pages, but these are translated by the MultilingualDataset plugin. ''' implements(ITagController, inherit=True) def before_view(self, data_dict): translated_data_dict = translate_data_dict(data_dict) return translated_data_dict class MultilingualResource(SingletonPlugin): '''The MultilinguaResource plugin translate the selected resource name and description on resource preview page. ''' implements(IResourceController, inherit=True) def before_show(self, data_dict): translated_data_dict = translate_resource_data_dict(data_dict) return translated_data_dict
# Translate the unselected search facets.
inject.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from __future__ import print_function, division, absolute_import #::: modules import numpy as np import os, sys import ellc from transitleastsquares import catalog_info import astropy.constants as ac import astropy.units as u import lightkurve as lk import pandas as pd np.random.seed(42) #::: load data and set the units correctly TIC_ID = 85400193 # TIC_ID of our candidate lcf= lk.search_lightcurvefile('TIC '+str(TIC_ID), mission="tess").download_all() ab, mass, massmin, massmax, radius, radiusmin, radiusmax = catalog_info(TIC_ID=TIC_ID) #units for ellc rstar=radius*u.R_sun mstar=mass*u.M_sun #mass and radius for the TLS #rstar=radius #mstar=mass mstar_min = mass-massmin mstar_max = mass+massmax rstar_min = radius-radiusmin rstar_max = radius+radiusmax #uncomment the following lines to check that the parameters used are correct. #print('\n STELLAR PROPERTIES FOR THE SIGNAL SEARCH') #print('================================================\n') #print('limb-darkening estimates using quadratic LD (a,b)=', ab) #print('mass =', format(mstar,'0.5f')) #print('mass_min =', format(mstar_min,'0.5f')) #print('mass_max =', format(mstar_max,'0.5f')) #print('radius =', format(rstar,'0.5f')) #print('radius_min =', format(rstar_min,'0.5f')) #print('radius_max =', format(rstar_max,'0.5f')) lc=lcf.PDCSAP_FLUX.stitch().remove_nans() # remove of the nans lc_new=lk.LightCurve(time=lc.time, flux=lc.flux,flux_err=lc.flux_err) clean=lc_new.remove_outliers(sigma_lower=float('inf'), sigma_upper=3) #remove outliers over 3sigma flux0=clean.flux time=clean.time flux_err = clean.flux_err #period_maximum=(max(time)-min(time))/2. #time, flux0 = np.genfromtxt('TESS_phot.csv', delimiter=',', unpack=True) #rstar = 0.211257 * 41.46650444642 #in Rearth #::: make model def make_model(epoch, period, rplanet): #a = (7.495e-6 * period**2)**(1./3.)*u.au #in AU P1=period*u.day a = np.cbrt((ac.G*mstar*P1**2)/(4*np.pi**2)).to(u.au) #print("radius_1 =", rstar.to(u.au) / a) #star radius convert from AU to in units of a #print("radius_2 =", rplanet.to(u.au) / a) texpo=2./60./24. #print("T_expo = ", texpo,"dy") #tdur=t14(R_s=radius, M_s=mass,P=period,small_planet=False) #we define the typical duration of a small planet in this star #print("transit_duration= ", tdur*24*60,"min" ) model = ellc.lc( t_obs = time, radius_1 = rstar.to(u.au) / a, #star radius convert from AU to in units of a radius_2 = rplanet.to(u.au) / a, #convert from Rearth (equatorial) into AU and then into units of a sbratio = 0, incl = 90, light_3 = 0, t_zero = epoch, period = period, a = None, q = 1e-6, f_c = None, f_s = None, ldc_1=[0.2755,0.5493], ldc_2 = None, gdc_1 = None, gdc_2 = None, didt = None, domdt = None, rotfac_1 = 1, rotfac_2 = 1, hf_1 = 1.5, hf_2 = 1.5, bfac_1 = None, bfac_2 = None, heat_1 = None, heat_2 = None, lambda_1 = None, lambda_2 = None, vsini_1 = None, vsini_2 = None, t_exp=texpo, n_int=None, grid_1='default', grid_2='default', ld_1='quad', ld_2=None, shape_1='sphere', shape_2='sphere', spots_1=None, spots_2=None, exact_grav=False, verbose=1) flux_t = flux0 + model - 1. if model[0] > 0: flux = flux_t flux_err_model = flux_err time_custom = time else: flux = [] time_custom = [] flux_err_model = [] return time_custom, flux, flux_err_model #minutes=10 #print(len(time)) #print(min(time),max(time)) #bins=len(time)*2./minutes
#bin_width = (bin_edges[1] - bin_edges[0]) #bin_centers = bin_edges[1:] - bin_width/2 #print('RMS PDCSAP flux (ppm): ',np.std(flux0[~np.isnan(flux0)])*1e6) #print('RMS model (ppm): ',np.std(flux[~np.isnan(flux)])*1e6) #print('RMS 10min bin detrended (ppm): ',np.std(bin_means[~np.isnan(bin_means)])*1e6) #fig, (ax1,ax2,ax3) = plt.subplots(3, 1, figsize=(10,5), constrained_layout=True) ##ax1 #ax1.plot(time, flux0, linewidth=0.05 ,color='black', alpha=0.4) ##ax1.legend(bbox_to_anchor=(0.85, 0.95), loc=2, borderaxespad=0.,fontsize=8) #ax1.set_ylabel("Normalized flux") #ax1.set_xlim(1766,1769) ##ax2 #ax2.plot(time, flux0, linewidth=0.05 ,color='black', alpha=0.4) ##ax2.plot(time, model, linewidth=0.9 ,color='firebrick', alpha=1) #ax2.errorbar(time, model, marker='.', markersize=2, color='firebrick', alpha=1, linestyle='none') #ax2.set_ylabel("Normalized flux") #ax2.set_xlim(1766,1769) ##ax3 #ax3.plot(time, flux, linewidth=0.1 ,color='teal', alpha=0.5) #ax3.errorbar(bin_centers, bin_means, marker='.', markersize=4, color='darkorange', alpha=1, linestyle='none') #ax3.set_ylabel("Normalized flux") #ax3.set_xlabel("Time (days)") #ax3.set_xlim(1766,1769) #plt.savefig('model.png', dpi=200) def logprint(*text): # print(*text) original = sys.stdout with open( os.path.join('tls/'+'P = '+str(period)+' days, Rp = '+str(rplanet)+'.log'), 'a' ) as f: sys.stdout = f print(*text) sys.stdout = original #::: iterate through grid of periods and rplanet dir = "/home/pozuelos/martin/curves" if not os.path.isdir(dir): os.mkdir(dir) max_period = 10 min_period = 0.5 for period in np.arange(min_period, max_period, 0.5): for t0 in np.arange(time[60], time[60] + period - 0.1, period / 5): for rplanet in np.arange(4, 0.65, -0.1): rplanet = np.around(rplanet, decimals=2)*u.R_earth print('\n') print('P = '+str(period)+' days, Rp = '+str(rplanet) + ", T0 = " + str(t0)) time_model, flux_model, flux_err_model = make_model(t0, period, rplanet) file_name = os.path.join(dir + '/P' + str(period) + '_R' + str(rplanet.value) + '_' + str(t0) + '.csv') lc_df = pd.DataFrame(columns=['#time', 'flux', 'flux_err']) lc_df['#time'] = time_model lc_df['flux'] = flux_model lc_df['flux_err'] = flux_err_model lc_df.to_csv(file_name, index=False)
#print(bins) #bin_means, bin_edges, binnumber = stats.binned_statistic(time, flux, statistic='mean', bins=bins) #bin_stds, _, _ = stats.binned_statistic(time, flux, statistic='std', bins=bins)
pump_curve.py
""" ## Pump curve fitting and drawing - Establish an equation for the pump curve from measured points on the curve in the pump's data sheet - Get the coefficients of the 2nd order polynomial describing the pump curve and determined via curve fitting - Draw the pump curve in a diagram """ from typing import List, Tuple, Dict, Optional import numpy as np import quantities as qty from nummath.interpolation import PolyFit from nummath.graphing2 import LineGraph class PumpCurve: def __init__(self, dest_units: Dict[str, str]): """ Create *PumpCurve* object. **Parameters:** - `dest_units`: (*Dict[str, str]*)<br> The measuring units in which the pump curve will be expressed. Keys: + 'flow_rate' + 'pressure' """ self._meas_points: List[Tuple[qty.VolumeFlowRate, qty.Pressure]] = [] self._dest_units: Dict[str, str] = dest_units self._coefficients: Optional[np.array] = None def add_measuring_points(self, points: List[Tuple[float, float]], units: Dict[str, str]): """ Add some data points taken from the pump curve in the data sheet. This will execute the curve fitting algorithm that approaches the pump curve with a 2nd order polynomial. **Parameters:** - `points`: (*List[Tuple[float, float]]*)<br> List of tuples. The 1st element of the tuple is flow rate, the 2nd element is pressure. - `units`: (*Dict[str, str]*)<br> Dictionary that contains the measuring units in which the values of the data points are expressed. Keys: + 'flow_rate' + 'pressure' """ self._meas_points = [ (qty.VolumeFlowRate(V, units['flow_rate']), qty.Pressure(p, units['pressure'])) for V, p in points ] self._curve_fitting() def _curve_fitting(self): pf = PolyFit( x_data=[V(self._dest_units['flow_rate']) for V, _ in self._meas_points], y_data=[p(self._dest_units['pressure']) for _, p in self._meas_points], m=2 ) self._coefficients = pf.solve() def get_coefficients(self, units: Optional[Dict[str, str]] = None) -> Optional[List[float]]: """ Get the coefficients of the 2nd order polynomial describing the pump curve. **Parameters:** - `units`: (*Optional[[Dict[str, str]]*)<br> Optional dictionary that contains the measuring units in which the returned coefficients must be expressed. Default is None, which means that the coefficients will be returned expressed in the measuring units passed in at the instantiation of the *PumpCurve* object. Keys: + 'flow_rate' + 'pressure' """ if units is not None: p_src = qty.Pressure(1.0, self._dest_units['pressure']) V_src = qty.VolumeFlowRate(1.0, self._dest_units['flow_rate']) p_des = p_src(units['pressure']) V_des = V_src(units['flow_rate']) else: p_des = 1.0 V_des = 1.0 a0 = self._coefficients[0] * p_des a1 = self._coefficients[1] * (p_des / V_des) a2 = self._coefficients[2] * (p_des / V_des ** 2) return [a0, a1, a2] def set_coefficients(self, coeff: Tuple[float, float, float], units: Dict[str, str]): """ Set the known coefficients of the 2nd order polynomial describing the pump curve. **Parameters:** - `coeff`: (*Tuple[float, float, float]*)<br> Tuple of 3 floats: a0, a1 and a2 as in the equation dp_pump = a0 + a1 * V + a2 * V **2 - `units`: (*Dict[str, str]*)<br> Dictionary that contains the measuring units in which the pump coefficients are expressed. Keys: + 'flow_rate' + 'pressure' """ p_src = qty.Pressure(1.0, units['pressure']) V_src = qty.VolumeFlowRate(1.0, units['flow_rate']) p_des = p_src(self._dest_units['pressure']) V_des = V_src(self._dest_units['flow_rate']) a0 = coeff[0] * p_des a1 = coeff[1] * (p_des / V_des) a2 = coeff[2] * (p_des / V_des ** 2) self._coefficients = np.array([a0, a1, a2]) def create_pump_curve(self, V_initial: qty.VolumeFlowRate, V_final: qty.VolumeFlowRate, num: int = 50): """ Calculate the pump curve between an initial and final flow rate. **Parameters:** - `V_initial`: (*quantities.VolumeFlowRate*) = initial flow rate - `V_final`: (*quantities.VolumeFlowRate*) = final flow rate - `num`: (*int*) = number of calculation points (default = 50) **Returns:** (*Tuple[np.array, np.array]*) Tuple with 1st element a numpy array of the flow rates and 2nd element a numpy array of the corresponding pressures, both expressed in the desired measuring units set at instantiation of the *PumpCurve*-object. """ V_i = V_initial(self._dest_units['flow_rate']) V_f = V_final(self._dest_units['flow_rate']) V = np.linspace(V_i, V_f, num, endpoint=True) a0 = self._coefficients[0]; a1 = self._coefficients[1]; a2 = self._coefficients[2]
""" Draw the calculated pump curve. **Parameters:** - `V_initial`: (*quantities.VolumeFlowRate*) = initial flow rate - `V_final`: (*quantities.VolumeFlowRate*) = final flow rate - `kwargs`: optional keyword arguments + `fig_size`: (*Tuple[float, float]*) = the width and height of the figure in inches + `dpi`: (*int*) = dots per inch of the figure + `num`: (*int*) = number of calculated points to draw + `V_step`: (*quantities.VolumeFlowRate*) = step between ticks on the flow rate axis of the diagram + `V_max`: (*quantities.VolumeFlowRate*) = the maximum flow rate shown on the axis + `p_step`: (*quantities.Pressure*) = step between ticks on the pressure axis of the diagram + `p_max`: (*quantities.Pressure*) = maximum pressure shown on the axis + `working_point`: (*Tuple[qty.VolumeFlowRate, qty.Pressure]*) = working point of the pump (shown as a red dot on the diagram) **Returns:** (*nummath.graphing2.LineGraph*)<br> Call show() on the returned *LineGraph* object to show the diagram. """ if self._coefficients is not None: fig_size: Tuple[int, int] = kwargs.get('fig_size', (6, 4)) dpi: int = kwargs.get('dpi', 96) num: int = kwargs.get('num', 50) V_step: qty.VolumeFlowRate = kwargs.get('V_step') V_max: qty.VolumeFlowRate = kwargs.get('V_max') p_step: qty.Pressure = kwargs.get('p_step') p_max: qty.Pressure = kwargs.get('p_max') working_point: Tuple[qty.VolumeFlowRate, qty.Pressure] = kwargs.get('working_point') V, p = self.create_pump_curve(V_initial, V_final, num) graph = LineGraph(fig_size=fig_size, dpi=dpi) graph.add_dataset(name="pump curve", x1_data=V, y1_data=p) if self._meas_points: graph.add_dataset( name="measured points", x1_data=[V(self._dest_units['flow_rate']) for V, _ in self._meas_points], y1_data=[p(self._dest_units['pressure']) for _, p in self._meas_points], layout={'marker': 'o', 'linestyle': 'None'} ) if working_point: graph.add_dataset( name="working point", x1_data=working_point[0](self._dest_units['flow_rate']), y1_data=working_point[1](self._dest_units['pressure']), layout={'marker': 'o', 'linestyle': 'None', 'color': 'red'} ) graph.x1.set_title(f'flow rate [{self._dest_units["flow_rate"]}]') if V_max is not None and V_step is not None: graph.x1.scale( lim_down=0.0, lim_up=V_max(self._dest_units['flow_rate']), step_size=V_step(self._dest_units['flow_rate']) ) graph.y1.set_title(f'pressure [{self._dest_units["pressure"]}]') if p_max is not None and p_step is not None: graph.y1.scale( lim_down=0.0, lim_up=p_max(self._dest_units['pressure']), step_size=p_step(self._dest_units['pressure']) ) return graph def pump_head(self, V: qty.VolumeFlowRate) -> qty.Pressure: """ Get the pump head (*quantities.Pressure*) if the flow rate (*quantities.VolumeFlowRate*) is given. """ a0 = self._coefficients[0] a1 = self._coefficients[1] a2 = self._coefficients[2] V = V(self._dest_units['flow_rate']) return qty.Pressure(a0 + a1 * V + a2 * V ** 2, self._dest_units['pressure']) if __name__ == '__main__': pump_curve = PumpCurve(dest_units={'flow_rate': 'L/s', 'pressure': 'bar'}) pump_curve.add_measuring_points( points=[(0.0, 60.0), (2.4, 52.0), (4.2, 48.0), (6.0, 36.0)], units={'flow_rate': 'm^3/h', 'pressure': 'm'} ) coeff1 = pump_curve.get_coefficients(units={'pressure': 'Pa', 'flow_rate': 'm^3/s'}) print(coeff1) coeff2 = pump_curve.get_coefficients(units={'pressure': 'bar', 'flow_rate': 'L/s'}) print(coeff2) graph_ = pump_curve.draw_pump_curve( V_initial=qty.VolumeFlowRate(0.0, 'm^3/h'), V_final=qty.VolumeFlowRate(7.2, 'm^3/h'), fig_size=(10, 8), dpi=150, num=100, V_max=qty.VolumeFlowRate(3.0, 'L/s'), V_step=qty.VolumeFlowRate(0.5, 'L/s'), p_max=qty.Pressure(8.0, 'bar'), p_step=qty.Pressure(2.0, 'bar') ) graph_.show()
p = a0 + a1 * V + a2 * V ** 2 return V, p def draw_pump_curve(self, V_initial: qty.VolumeFlowRate, V_final: qty.VolumeFlowRate, **kwargs):
kubernetes.go
package kube import ( "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" "k8s.io/client-go/tools/clientcmd" ) type Kubernetes struct { RestConfig *rest.Config Clientset kubernetes.Interface } func NewByInClusterConfig() (*Kubernetes, error) { config, err := rest.InClusterConfig() if err != nil { return nil, err } return new(config) }
return nil, err } return new(config) } func newClient(config *rest.Config) (kubernetes.Interface, error) { return kubernetes.NewForConfig(config) } func new(config *rest.Config) (*Kubernetes, error) { clientset, err := newClient(config) if err != nil { return nil, err } return &Kubernetes{ RestConfig: config, Clientset: clientset, }, nil }
func NewByKubeConfig(configPath string) (*Kubernetes, error) { config, err := clientcmd.BuildConfigFromFlags("", configPath) if err != nil {
keyboard.rs
//! Driver for a PS/2 keyboard. //! //! Only supports PS/2 Scan Code Set 2, on a UK English keyboard. See [the //! OSDev Wiki](https://wiki.osdev.org/PS/2_Keyboard). //! //! Requires that you sample a pin in an interrupt routine and shift in the //! bit. We don't sample the pin in this library, as that makes testing //! difficult, and it means you have to make this object a global static mut //! that the interrupt can access, which is unsafe. // **************************************************************************** // // Imports // // **************************************************************************** use std::marker::PhantomData; // **************************************************************************** // // Public Types // // **************************************************************************** #[derive(Debug)] pub struct Keyboard<T> where T: KeyboardLayout, { register: u16, num_bits: u8, decode_state: DecodeState, modifiers: Modifiers, _layout: PhantomData<T>, } #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub enum Error { UnknownKeyCode, } #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub enum KeyCode { Escape, F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12, ScrollLock, BackTick, Key1, Key2, Key3, Key4, Key5, Key6, Key7, Key8, Key9, Key0, Minus, Equals, Backspace, Tab, Q, W, E, R, T, Y, U, I, O, P, LeftSquareBracket, RightSquareBracket, Backslash, CapsLock, A, S, D, F, G, H, J, K, L, SemiColon, Quote, Enter, ShiftLeft, Z, X, C, V, B, N, M, Comma, Fullstop, Slash, ShiftRight, ControlLeft, WindowsLeft, AltLeft, Spacebar, AltRight, WindowsRight, Menus, RightControl, Insert, Home, PageUp, Delete, End, PageDown, UpArrow, LeftArrow, DownArrow, RightArrow, NumpadLock, NumpadSlash, NumpadStar, NumpadMinus, Numpad7, Numpad8, Numpad9, NumpadPlus, Numpad4, Numpad5, Numpad6, Numpad1, Numpad2, Numpad3, Numpad0, NumpadPeriod, NumpadEnter, } #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub enum KeyState { Up, Down, } #[derive(Debug, PartialEq, Eq, Clone)] pub struct KeyEvent { pub code: KeyCode, pub state: KeyState, } pub trait KeyboardLayout { /// Convert a Scan Code Set 2 byte to our `KeyCode` enum fn map_scancode(code: u8) -> Result<KeyCode, Error>; /// Convert a Scan Code Set 2 extended byte (prefixed E0) to our `KeyCode` /// enum. fn map_extended_scancode(code: u8) -> Result<KeyCode, Error>; /// Convert a `KeyCode` enum to a Unicode character, if possible. /// KeyCode::A maps to `Some('a')` (or `Some('A')` if shifted), while /// KeyCode::AltLeft returns `None` fn map_keycode(keycode: KeyCode, modifiers: &Modifiers) -> DecodedKey; } #[derive(Debug)] pub struct Modifiers { pub lshift: bool, pub rshift: bool, pub numlock: bool, pub capslock: bool, pub alt_gr: bool, } #[derive(Debug, PartialEq, Eq, Copy, Clone)] pub enum DecodedKey { RawKey(KeyCode), Unicode(char), } // **************************************************************************** // // Public Data // // **************************************************************************** // None // **************************************************************************** // // Private Types // // **************************************************************************** #[derive(Debug, Copy, Clone)] enum DecodeState { Start, Extended, Release, ExtendedRelease, } // **************************************************************************** // // Private Data // // **************************************************************************** const EXTENDED_KEY_CODE: u8 = 0xE0; const KEY_RELEASE_CODE: u8 = 0xF0; // **************************************************************************** // // Public Functions and Implementation // // **************************************************************************** impl<T> Keyboard<T> where T: KeyboardLayout, { /// Make a new Keyboard object with the given layout. pub const fn new() -> Keyboard<T> { Keyboard { register: 0, num_bits: 0, decode_state: DecodeState::Start, modifiers: Modifiers { lshift: false, rshift: false, numlock: true, capslock: false, alt_gr: false }, _layout: PhantomData, } } /// Clears the bit register. /// /// Call this when there is a timeout reading data from the keyboard. pub fn clear(&mut self) { self.register = 0; self.num_bits = 0; self.decode_state = DecodeState::Start; } /// Processes an 8-bit byte from the keyboard. /// /// We assume the start, stop and parity bits have been processed and /// verified. pub fn add_byte(&mut self, byte: u8) -> Result<Option<KeyEvent>, Error> { let st = self.decode_state; self.clear(); match st { DecodeState::Start => { // All keys start here let code = match byte { KEY_RELEASE_CODE => { self.decode_state = DecodeState::Release; return Ok(None); } EXTENDED_KEY_CODE => { self.decode_state = DecodeState::Extended; return Ok(None); } e => T::map_scancode(e)?, }; Ok(Some(KeyEvent::new(code, KeyState::Down))) } DecodeState::Extended => { // These are extended keys let code = match byte { KEY_RELEASE_CODE => { self.decode_state = DecodeState::ExtendedRelease; return Ok(None); } e => T::map_extended_scancode(e)?, }; Ok(Some(KeyEvent::new(code, KeyState::Down))) } DecodeState::Release => { // These are 'normal' keys being released let code = T::map_scancode(byte)?; Ok(Some(KeyEvent::new(code, KeyState::Up))) } DecodeState::ExtendedRelease => { // These are extended keys being release let code = T::map_extended_scancode(byte)?; Ok(Some(KeyEvent::new(code, KeyState::Up))) } } } /// Processes a `KeyEvent` returned from `add_bit`, `add_byte` or `add_word` /// and produces a decoded key. /// /// For example, the KeyEvent for pressing the '5' key on your keyboard /// gives a DecodedKey of unicode character '5', unless the shift key is /// held in which case you get the unicode character '%'. pub fn process_keyevent(&mut self, ev: KeyEvent) -> Option<DecodedKey> { match ev { KeyEvent { code: KeyCode::ShiftLeft, state: KeyState::Down } => { self.modifiers.lshift = true; None } KeyEvent { code: KeyCode::ShiftRight, state: KeyState::Down } => { self.modifiers.rshift = true; None } KeyEvent { code: KeyCode::ShiftLeft, state: KeyState::Up } => { self.modifiers.lshift = false; None } KeyEvent { code: KeyCode::ShiftRight, state: KeyState::Up} => { self.modifiers.rshift = false; None } KeyEvent { code: KeyCode::CapsLock, state: KeyState::Down } => { self.modifiers.capslock = !self.modifiers.capslock; None } KeyEvent { code: KeyCode::NumpadLock, state: KeyState::Down } => { self.modifiers.numlock = !self.modifiers.numlock; None } KeyEvent { code: KeyCode::AltRight, state: KeyState::Down } => { self.modifiers.alt_gr = true; None } KeyEvent { code: KeyCode::AltRight, state: KeyState::Up } => { self.modifiers.alt_gr = false; None } KeyEvent { code: c, state: KeyState::Down } => { Some(T::map_keycode(c, &self.modifiers)) } _ => None, } } } impl KeyEvent { pub fn new(code: KeyCode, state: KeyState) -> KeyEvent { KeyEvent { code, state } } } // **************************************************************************** // // Keyboard Layouts // // **************************************************************************** impl Modifiers { pub fn is_shifted(&self) -> bool { (self.lshift | self.rshift) ^ self.capslock } } pub mod layouts { use super::*; /// A standard United States 101-key (or 104-key including Windows keys) keyboard. /// Has a 1-row high Enter key, with Backslash above. pub struct Us104Key; impl KeyboardLayout for Us104Key { fn map_scancode(code: u8) -> Result<KeyCode, Error> { match code { 0x01 => Ok(KeyCode::F9), // 01 0x03 => Ok(KeyCode::F5), // 03 0x04 => Ok(KeyCode::F3), // 04 0x05 => Ok(KeyCode::F1), // 05 0x06 => Ok(KeyCode::F2), // 06 0x07 => Ok(KeyCode::F12), // 07 0x09 => Ok(KeyCode::F10), // 09 0x0A => Ok(KeyCode::F8), // 0A 0x0B => Ok(KeyCode::F6), // 0B 0x0C => Ok(KeyCode::F4), // 0C 0x0D => Ok(KeyCode::Tab), // 0D 0x0E => Ok(KeyCode::BackTick), // 0E 0x11 => Ok(KeyCode::AltLeft), // 11 0x12 => Ok(KeyCode::ShiftLeft), // 12 0x14 => Ok(KeyCode::ControlLeft), // 14 0x15 => Ok(KeyCode::Q), // 15 0x16 => Ok(KeyCode::Key1), // 16 0x1A => Ok(KeyCode::Z), // 1A 0x1B => Ok(KeyCode::S), // 1B 0x1C => Ok(KeyCode::A), // 1C 0x1D => Ok(KeyCode::W), // 1D 0x1e => Ok(KeyCode::Key2), // 1e 0x21 => Ok(KeyCode::C), // 21 0x22 => Ok(KeyCode::X), // 22 0x23 => Ok(KeyCode::D), // 23 0x24 => Ok(KeyCode::E), // 24 0x25 => Ok(KeyCode::Key4), // 25 0x26 => Ok(KeyCode::Key3), // 26 0x29 => Ok(KeyCode::Spacebar), // 29 0x2A => Ok(KeyCode::V), // 2A 0x2B => Ok(KeyCode::F), // 2B 0x2C => Ok(KeyCode::T), // 2C 0x2D => Ok(KeyCode::R), // 2D 0x2E => Ok(KeyCode::Key5), // 2E 0x31 => Ok(KeyCode::N), // 31 0x32 => Ok(KeyCode::B), // 32 0x33 => Ok(KeyCode::H), // 33 0x34 => Ok(KeyCode::G), // 34 0x35 => Ok(KeyCode::Y), // 35 0x36 => Ok(KeyCode::Key6), // 36 0x3A => Ok(KeyCode::M), // 3A 0x3B => Ok(KeyCode::J), // 3B 0x3C => Ok(KeyCode::U), // 3C 0x3D => Ok(KeyCode::Key7), // 3D 0x3E => Ok(KeyCode::Key8), // 3E 0x41 => Ok(KeyCode::Comma), // 41 0x42 => Ok(KeyCode::K), // 42 0x43 => Ok(KeyCode::I), // 43 0x44 => Ok(KeyCode::O), // 44 0x45 => Ok(KeyCode::Key0), // 45 0x46 => Ok(KeyCode::Key9), // 46 0x49 => Ok(KeyCode::Fullstop), // 49 0x4A => Ok(KeyCode::Slash), // 4A 0x4B => Ok(KeyCode::L), // 4B 0x4C => Ok(KeyCode::SemiColon), // 4C 0x4D => Ok(KeyCode::P), // 4D 0x4E => Ok(KeyCode::Minus), // 4E 0x52 => Ok(KeyCode::Quote), // 52 0x54 => Ok(KeyCode::LeftSquareBracket), // 54 0x55 => Ok(KeyCode::Equals), // 55 0x58 => Ok(KeyCode::CapsLock), // 58 0x59 => Ok(KeyCode::ShiftRight), // 59 0x5A => Ok(KeyCode::Enter), // 5A 0x5B => Ok(KeyCode::RightSquareBracket), // 5B 0x5D => Ok(KeyCode::Backslash), // 5D 0x66 => Ok(KeyCode::Backspace), // 66 0x69 => Ok(KeyCode::Numpad1), // 69 0x6B => Ok(KeyCode::Numpad4), // 6B 0x6C => Ok(KeyCode::Numpad7), // 6C 0x70 => Ok(KeyCode::Numpad0), // 70 0x71 => Ok(KeyCode::NumpadPeriod), // 71 0x72 => Ok(KeyCode::Numpad2), // 72 0x73 => Ok(KeyCode::Numpad5), // 73 0x74 => Ok(KeyCode::Numpad6), // 74 0x75 => Ok(KeyCode::Numpad8), // 75 0x76 => Ok(KeyCode::Escape), // 76 0x77 => Ok(KeyCode::NumpadLock), // 77 0x78 => Ok(KeyCode::F11), // 78 0x79 => Ok(KeyCode::NumpadPlus), // 79 0x7A => Ok(KeyCode::Numpad3), // 7A 0x7B => Ok(KeyCode::NumpadMinus), // 7B 0x7C => Ok(KeyCode::NumpadStar), // 7C 0x7D => Ok(KeyCode::Numpad9), // 7D 0x7E => Ok(KeyCode::ScrollLock), // 7E 0x83 => Ok(KeyCode::F7), // 83 _ => Err(Error::UnknownKeyCode), } } fn
(code: u8) -> Result<KeyCode, Error> { match code { 0x11 => Ok(KeyCode::AltRight), // E011 0x14 => Ok(KeyCode::RightControl), // E014 0x1F => Ok(KeyCode::WindowsLeft), // E01F 0x27 => Ok(KeyCode::WindowsRight), // E027 0x2F => Ok(KeyCode::Menus), // E02F 0x4A => Ok(KeyCode::NumpadSlash), // E04A 0x5A => Ok(KeyCode::NumpadEnter), // E05A 0x69 => Ok(KeyCode::End), // E069 0x6B => Ok(KeyCode::LeftArrow), // E06B 0x6C => Ok(KeyCode::Home), // E06C 0x70 => Ok(KeyCode::Insert), // E070 0x71 => Ok(KeyCode::Delete), // E071 0x72 => Ok(KeyCode::DownArrow), // E072 0x74 => Ok(KeyCode::RightArrow), // E074 0x75 => Ok(KeyCode::UpArrow), // E075 0x7A => Ok(KeyCode::PageDown), // E07A 0x7D => Ok(KeyCode::PageUp), // E07D _ => Err(Error::UnknownKeyCode), } } fn map_keycode(keycode: KeyCode, modifiers: &Modifiers) -> DecodedKey { match keycode { KeyCode::BackTick => { if modifiers.is_shifted() { DecodedKey::Unicode('~') } else { DecodedKey::Unicode('`') } } KeyCode::Escape => DecodedKey::Unicode(0x1B.into()), KeyCode::Key1 => if modifiers.is_shifted() { DecodedKey::Unicode('!') } else { DecodedKey::Unicode('1') }, KeyCode::Key2 => if modifiers.is_shifted() { DecodedKey::Unicode('@') } else { DecodedKey::Unicode('2') }, KeyCode::Key3 => if modifiers.is_shifted() { DecodedKey::Unicode('#') } else { DecodedKey::Unicode('3') }, KeyCode::Key4 => { if modifiers.is_shifted() { DecodedKey::Unicode('$') } else { DecodedKey::Unicode('4') } } KeyCode::Key5 => if modifiers.is_shifted() { DecodedKey::Unicode('%') } else { DecodedKey::Unicode('5') }, KeyCode::Key6 => if modifiers.is_shifted() { DecodedKey::Unicode('^') } else { DecodedKey::Unicode('6') }, KeyCode::Key7 => if modifiers.is_shifted() { DecodedKey::Unicode('&') } else { DecodedKey::Unicode('7') }, KeyCode::Key8 => if modifiers.is_shifted() { DecodedKey::Unicode('*') } else { DecodedKey::Unicode('8') }, KeyCode::Key9 => if modifiers.is_shifted() { DecodedKey::Unicode('(') } else { DecodedKey::Unicode('9') }, KeyCode::Key0 => if modifiers.is_shifted() { DecodedKey::Unicode(')') } else { DecodedKey::Unicode('0') }, KeyCode::Minus => if modifiers.is_shifted() { DecodedKey::Unicode('_') } else { DecodedKey::Unicode('-') }, KeyCode::Equals => if modifiers.is_shifted() { DecodedKey::Unicode('+') } else { DecodedKey::Unicode('=') }, KeyCode::Backspace => DecodedKey::Unicode(0x08.into()), KeyCode::Tab => DecodedKey::Unicode(0x09.into()), KeyCode::Q => if modifiers.is_shifted() { DecodedKey::Unicode('Q') } else { DecodedKey::Unicode('q') }, KeyCode::W => if modifiers.is_shifted() { DecodedKey::Unicode('W') } else { DecodedKey::Unicode('w') }, KeyCode::E => if modifiers.is_shifted() { DecodedKey::Unicode('E') } else { DecodedKey::Unicode('e') }, KeyCode::R => if modifiers.is_shifted() { DecodedKey::Unicode('R') } else { DecodedKey::Unicode('r') }, KeyCode::T => if modifiers.is_shifted() { DecodedKey::Unicode('T') } else { DecodedKey::Unicode('t') }, KeyCode::Y => if modifiers.is_shifted() { DecodedKey::Unicode('Y') } else { DecodedKey::Unicode('y') }, KeyCode::U => if modifiers.is_shifted() { DecodedKey::Unicode('U') } else { DecodedKey::Unicode('u') }, KeyCode::I => if modifiers.is_shifted() { DecodedKey::Unicode('I') } else { DecodedKey::Unicode('i') }, KeyCode::O => if modifiers.is_shifted() { DecodedKey::Unicode('O') } else { DecodedKey::Unicode('o') }, KeyCode::P => if modifiers.is_shifted() { DecodedKey::Unicode('P') } else { DecodedKey::Unicode('p') }, KeyCode::LeftSquareBracket => if modifiers.is_shifted() { DecodedKey::Unicode('{') } else { DecodedKey::Unicode('[') }, KeyCode::RightSquareBracket => if modifiers.is_shifted() { DecodedKey::Unicode('}') } else { DecodedKey::Unicode(']') }, KeyCode::Backslash => if modifiers.is_shifted() { DecodedKey::Unicode('|') } else { DecodedKey::Unicode('\\') }, KeyCode::A => if modifiers.is_shifted() { DecodedKey::Unicode('A') } else { DecodedKey::Unicode('a') }, KeyCode::S => if modifiers.is_shifted() { DecodedKey::Unicode('S') } else { DecodedKey::Unicode('s') }, KeyCode::D => if modifiers.is_shifted() { DecodedKey::Unicode('D') } else { DecodedKey::Unicode('d') }, KeyCode::F => if modifiers.is_shifted() { DecodedKey::Unicode('F') } else { DecodedKey::Unicode('f') }, KeyCode::G => if modifiers.is_shifted() { DecodedKey::Unicode('G') } else { DecodedKey::Unicode('g') }, KeyCode::H => if modifiers.is_shifted() { DecodedKey::Unicode('H') } else { DecodedKey::Unicode('h') }, KeyCode::J => if modifiers.is_shifted() { DecodedKey::Unicode('J') } else { DecodedKey::Unicode('j') }, KeyCode::K => if modifiers.is_shifted() { DecodedKey::Unicode('K') } else { DecodedKey::Unicode('k') }, KeyCode::L => if modifiers.is_shifted() { DecodedKey::Unicode('L') } else { DecodedKey::Unicode('l') }, KeyCode::SemiColon => if modifiers.is_shifted() { DecodedKey::Unicode(':') } else { DecodedKey::Unicode(';') }, KeyCode::Quote => if modifiers.is_shifted() { DecodedKey::Unicode('"') } else { DecodedKey::Unicode('\'') }, // Enter gives LF, not CRLF or CR KeyCode::Enter => DecodedKey::Unicode(10.into()), KeyCode::Z => if modifiers.is_shifted() { DecodedKey::Unicode('Z') } else { DecodedKey::Unicode('z') }, KeyCode::X => if modifiers.is_shifted() { DecodedKey::Unicode('X') } else { DecodedKey::Unicode('x') }, KeyCode::C => if modifiers.is_shifted() { DecodedKey::Unicode('C') } else { DecodedKey::Unicode('c') }, KeyCode::V => if modifiers.is_shifted() { DecodedKey::Unicode('V') } else { DecodedKey::Unicode('v') }, KeyCode::B => if modifiers.is_shifted() { DecodedKey::Unicode('B') } else { DecodedKey::Unicode('b') }, KeyCode::N => if modifiers.is_shifted() { DecodedKey::Unicode('N') } else { DecodedKey::Unicode('n') }, KeyCode::M => if modifiers.is_shifted() { DecodedKey::Unicode('M') } else { DecodedKey::Unicode('m') }, KeyCode::Comma => if modifiers.is_shifted() { DecodedKey::Unicode('<') } else { DecodedKey::Unicode(',') }, KeyCode::Fullstop => if modifiers.is_shifted() { DecodedKey::Unicode('>') } else { DecodedKey::Unicode('.') }, KeyCode::Slash => if modifiers.is_shifted() { DecodedKey::Unicode('?') } else { DecodedKey::Unicode('/') }, KeyCode::Spacebar => DecodedKey::Unicode(' '), KeyCode::Delete => DecodedKey::Unicode(127.into()), KeyCode::NumpadSlash => DecodedKey::Unicode('/'), KeyCode::NumpadStar => DecodedKey::Unicode('*'), KeyCode::NumpadMinus => DecodedKey::Unicode('-'), KeyCode::Numpad7 => { if modifiers.numlock { DecodedKey::Unicode('7') } else { DecodedKey::RawKey(KeyCode::Home) } } KeyCode::Numpad8 => { if modifiers.numlock { DecodedKey::Unicode('8') } else { DecodedKey::RawKey(KeyCode::UpArrow) } } KeyCode::Numpad9 => { if modifiers.numlock { DecodedKey::Unicode('9') } else { DecodedKey::RawKey(KeyCode::PageUp) } } KeyCode::NumpadPlus => DecodedKey::Unicode('+'), KeyCode::Numpad4 => { if modifiers.numlock { DecodedKey::Unicode('4') } else { DecodedKey::RawKey(KeyCode::LeftArrow) } } KeyCode::Numpad5 => DecodedKey::Unicode('5'), KeyCode::Numpad6 => { if modifiers.numlock { DecodedKey::Unicode('6') } else { DecodedKey::RawKey(KeyCode::RightArrow) } } KeyCode::Numpad1 => { if modifiers.numlock { DecodedKey::Unicode('1') } else { DecodedKey::RawKey(KeyCode::End) } } KeyCode::Numpad2 => { if modifiers.numlock { DecodedKey::Unicode('2') } else { DecodedKey::RawKey(KeyCode::DownArrow) } } KeyCode::Numpad3 => { if modifiers.numlock { DecodedKey::Unicode('3') } else { DecodedKey::RawKey(KeyCode::PageDown) } } KeyCode::Numpad0 => { if modifiers.numlock { DecodedKey::Unicode('0') } else { DecodedKey::RawKey(KeyCode::Insert) } } KeyCode::NumpadPeriod => { if modifiers.numlock { DecodedKey::Unicode('.') } else { DecodedKey::Unicode(127.into()) } } KeyCode::NumpadEnter => DecodedKey::Unicode(10.into()), k => DecodedKey::RawKey(k), } } } }
map_extended_scancode
qingcloud.go
// +------------------------------------------------------------------------- // | Copyright (C) 2021 ShanHe, Inc. // +------------------------------------------------------------------------- // | Licensed under the Apache License, Version 2.0 (the "License"); // | you may not use this work except in compliance with the License. // | You may obtain a copy of the License in the LICENSE file, or at: // | // | http://www.apache.org/licenses/LICENSE-2.0 // | // | Unless required by applicable law or agreed to in writing, software // | distributed under the License is distributed on an "AS IS" BASIS, // | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // | See the License for the specific language governing permissions and // | limitations under the License. // +------------------------------------------------------------------------- // Package service provides ShanHe Service API (API Version 2013-08-30) package service import ( "github.com/shanhe-nsccjn/shanhe-sdk-go/config" "github.com/shanhe-nsccjn/shanhe-sdk-go/logger" "github.com/shanhe-nsccjn/shanhe-sdk-go/request" "github.com/shanhe-nsccjn/shanhe-sdk-go/request/data" ) // ShanHeService: ShanHe provides a platform which can make the delivery of computing resources more simple, efficient and reliable, even more environmental. type ShanHeService struct { Config *config.Config Properties *ShanHeServiceProperties } type ShanHeServiceProperties struct { } func Init(c *config.Config) (*ShanHeService, error)
// Documentation URL: https://docsv3.shanhe.com/api/zone/describe_zones.html func (s *ShanHeService) DescribeZones(i *DescribeZonesInput) (*DescribeZonesOutput, error) { if i == nil { i = &DescribeZonesInput{} } o := &data.Operation{ Config: s.Config, Properties: s.Properties, APIName: "DescribeZones", RequestMethod: "GET", } x := &DescribeZonesOutput{} r, err := request.New(o, i, x) if err != nil { return nil, err } err = r.Send() if err != nil { return nil, err } return x, err } type DescribeZonesInput struct { Status []*string `json:"status" name:"status" location:"params"` Zones []*string `json:"zones" name:"zones" location:"params"` } func (v *DescribeZonesInput) Validate() error { return nil } type DescribeZonesOutput struct { Message *string `json:"message" name:"message"` Action *string `json:"action" name:"action" location:"elements"` RetCode *int `json:"ret_code" name:"ret_code" location:"elements"` TotalCount *int `json:"total_count" name:"total_count" location:"elements"` ZoneSet []*Zone `json:"zone_set" name:"zone_set" location:"elements"` }
{ properties := &ShanHeServiceProperties{} logger.SetLevel(c.LogLevel) return &ShanHeService{Config: c, Properties: properties}, nil }
classify.py
import time import numpy as np import pandas as pd import torch import torch.nn as nn import itertools import collections import matplotlib.pyplot as plt # Read in data df = pd.read_csv("Chinese_Names_Corpus_Gender(120W).txt", header=2) df = df[df.sex != "未知"] names = df["dict"].values # Compute character frequency chars = [list(name) for name in names] chars_flatten = list(itertools.chain(*chars)) freq = collections.Counter(chars_flatten) freq = pd.DataFrame(freq.items(), columns=["char", "freq"]) freq = freq.sort_values(by="freq", ascending=False) # Power law (?) char_rank = np.arange(freq.shape[0]) char_freq = freq["freq"].values plt.plot(char_rank, char_freq) plt.plot(np.log(1.0 + char_rank), np.log(char_freq)) # Prepare data dict_size = 500 dict = list(freq["char"].values[:dict_size]) dict_set = set(dict) filtered = list(filter(lambda item: set(item[1]).issubset(dict_set), enumerate(names))) ind = [idx for idx, name in filtered] dat = df.iloc[ind] dat["y"] = np.where(dat["sex"] == "男", 0, 1) # Split training set and test set
# One-hot encoding def char2index(char): return dict.index(char) def name2index(name): return [char2index(char) for char in name] def name2tensor(name): tensor = torch.zeros(len(name), 1, dict_size) for i, char in enumerate(name): tensor[i, 0, char2index(char)] = 1 return tensor char2index("李") name2index("李兴") name2tensor("李兴") # Build model class RNN(nn.Module): def __init__(self, input_size, hidden_size): super(RNN, self).__init__() self.hidden_size = hidden_size self.i2h = nn.Linear(input_size + hidden_size, hidden_size) self.h2o = nn.Linear(hidden_size, 1) def forward(self, input, hidden): combined = torch.cat((input, hidden), dim=1) hidden = torch.tanh(self.i2h(combined)) output = torch.sigmoid(self.h2o(hidden)) return output, hidden def init_hidden(self): return torch.zeros(1, self.hidden_size) # n_hidden = 128 # rnn = RNN(dict_size, n_hidden) # input = name2tensor("李兴") # hidden = rnn.init_hidden() # output, next_hidden = rnn(input[0], hidden) np.random.seed(123) torch.random.manual_seed(123) n = train.shape[0] n_hidden = 64 nepoch = 5 bs = 100 rnn = RNN(dict_size, n_hidden) opt = torch.optim.Adam(rnn.parameters(), lr=0.001) train_ind = np.arange(n) losses = [] t1 = time.time() for k in range(nepoch): np.random.shuffle(train_ind) # Update on mini-batches for j in range(0, n, bs): # Create mini-batch mb = train.iloc[train_ind[j:(j + bs)]] mb_size = mb.shape[0] loss = 0.0 # Loop over each name in the mini-batch for i in range(mb_size): name = mb["dict"].values[i] input = name2tensor(name) hidden = rnn.init_hidden() y = mb["y"].values[i] for s in range(input.shape[0]): output, hidden = rnn(input[s], hidden) loss = loss - y * torch.log(output) - (1.0 - y) * torch.log(1.0 - output) loss = loss / mb_size opt.zero_grad() loss.backward() opt.step() losses.append(loss.item()) if j // bs % 10 == 0: print(f"epoch {k}, batch {j // bs}, loss = {loss.item()}") t2 = time.time() print(t2 - t1) plt.plot(losses) # Prediction on test set ntest = test.shape[0] true_label = test["y"].values pred = np.zeros(ntest) rnn.eval() for i in range(ntest): input = name2tensor(test["dict"].values[i]) hidden = rnn.init_hidden() with torch.no_grad(): for s in range(input.shape[0]): output, hidden = rnn(input[s], hidden) pred[i] = output.item() if i % 100 == 0: print(f"processed {i}") loss = -np.mean(true_label * np.log(pred) + (1.0 - true_label) * np.log(1.0 - pred)) print(loss) pred_label = (pred > 0.5).astype(int) print(np.mean(pred_label == true_label)) # Random cases np.random.seed(123) torch.random.manual_seed(123) ind = np.random.choice(ntest, 10) ypred = 1 * (pred[ind] > 0.5) print(test.iloc[ind]) print(test["y"].values[ind]) print(ypred) names = ["李", "李雪", "李雪峰"] for name in names: input = name2tensor(name) hidden = rnn.init_hidden() with torch.no_grad(): for s in range(input.shape[0]): output, hidden = rnn(input[s], hidden) pred = output.item() print(f"namae: {name}, P(female) = {pred}")
# train = dat.sample(frac=0.8, random_state=123) # test = dat.drop(train.index) train = dat.sample(n=10000, random_state=123) test = dat.sample(n=1000, random_state=321)
common.py
# # # Copyright (C) 2014 Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Common functions for tool scripts. """ import logging import os import time from io import StringIO import OpenSSL from ganeti import constants from ganeti import errors from ganeti import pathutils from ganeti import utils from ganeti import serializer from ganeti import ssconf from ganeti import ssh def
(parser, opts, args): """Verifies options and arguments for correctness. """ if args: parser.error("No arguments are expected") return opts def _VerifyCertificateStrong(cert_pem, error_fn, _check_fn=utils.CheckNodeCertificate): """Verifies a certificate against the local node daemon certificate. Includes elaborate tests of encodings etc., and returns formatted certificate. @type cert_pem: string @param cert_pem: Certificate and key in PEM format @type error_fn: callable @param error_fn: function to call in case of an error @rtype: string @return: Formatted key and certificate """ try: cert = \ OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_pem) except Exception as err: raise error_fn("(stdin) Unable to load certificate: %s" % err) try: key = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, cert_pem) except OpenSSL.crypto.Error as err: raise error_fn("(stdin) Unable to load private key: %s" % err) # Check certificate with given key; this detects cases where the key given on # stdin doesn't match the certificate also given on stdin try: utils.X509CertKeyCheck(cert, key) except OpenSSL.SSL.Error: raise error_fn("(stdin) Certificate is not signed with given key") # Standard checks, including check against an existing local certificate # (no-op if that doesn't exist) _check_fn(cert) key_encoded = OpenSSL.crypto.dump_privatekey(OpenSSL.crypto.FILETYPE_PEM, key) cert_encoded = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, cert) complete_cert_encoded = key_encoded + cert_encoded if not cert_pem == complete_cert_encoded.decode('ascii'): logging.error("The certificate differs after being reencoded. Please" " renew the certificates cluster-wide to prevent future" " inconsistencies.") # Format for storing on disk buf = StringIO() buf.write(cert_pem) return buf.getvalue() def _VerifyCertificateSoft(cert_pem, error_fn, _check_fn=utils.CheckNodeCertificate): """Verifies a certificate against the local node daemon certificate. @type cert_pem: string @param cert_pem: Certificate in PEM format (no key) """ try: OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, cert_pem) except OpenSSL.crypto.Error as err: pass else: raise error_fn("No private key may be given") try: cert = \ OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, cert_pem) except Exception as err: raise errors.X509CertError("(stdin)", "Unable to load certificate: %s" % err) _check_fn(cert) def VerifyCertificateSoft(data, error_fn, _verify_fn=_VerifyCertificateSoft): """Verifies cluster certificate if existing. @type data: dict @type error_fn: callable @param error_fn: function to call in case of an error @rtype: string @return: Formatted key and certificate """ cert = data.get(constants.SSHS_NODE_DAEMON_CERTIFICATE) if cert: _verify_fn(cert, error_fn) def VerifyCertificateStrong(data, error_fn, _verify_fn=_VerifyCertificateStrong): """Verifies cluster certificate. Throws error when not existing. @type data: dict @type error_fn: callable @param error_fn: function to call in case of an error @rtype: string @return: Formatted key and certificate """ cert = data.get(constants.NDS_NODE_DAEMON_CERTIFICATE) if not cert: raise error_fn("Node daemon certificate must be specified") return _verify_fn(cert, error_fn) def VerifyClusterName(data, error_fn, cluster_name_constant, _verify_fn=ssconf.VerifyClusterName): """Verifies cluster name. @type data: dict """ name = data.get(cluster_name_constant) if name: _verify_fn(name) else: raise error_fn("Cluster name must be specified") return name def VerifyHmac(data, error_fn): """Verifies the presence of the hmac secret. @type data: dict """ hmac = data.get(constants.NDS_HMAC) if not hmac: raise error_fn("Hmac key must be provided") return hmac def LoadData(raw, data_check): """Parses and verifies input data. @rtype: dict """ result = None try: result = serializer.LoadAndVerifyJson(raw, data_check) logging.debug("Received data: %s", serializer.DumpJson(result)) except Exception as e: logging.warn("Received data is not valid json: %s.", str(raw)) raise e return result def GenerateRootSshKeys(key_type, key_bits, error_fn, _suffix="", _homedir_fn=None): """Generates root's SSH keys for this node. """ ssh.InitSSHSetup(key_type, key_bits, error_fn=error_fn, _homedir_fn=_homedir_fn, _suffix=_suffix) def GenerateClientCertificate( data, error_fn, client_cert=pathutils.NODED_CLIENT_CERT_FILE, signing_cert=pathutils.NODED_CERT_FILE): """Regenerates the client certificate of the node. @type data: string @param data: the JSON-formated input data """ if not os.path.exists(signing_cert): raise error_fn("The signing certificate '%s' cannot be found." % signing_cert) # TODO: This sets the serial number to the number of seconds # since epoch. This is technically not a correct serial number # (in the way SSL is supposed to be used), but it serves us well # enough for now, as we don't have any infrastructure for keeping # track of the number of signed certificates yet. serial_no = int(time.time()) # The hostname of the node is provided with the input data. hostname = data.get(constants.NDS_NODE_NAME) if not hostname: raise error_fn("No hostname found.") utils.GenerateSignedSslCert(client_cert, serial_no, signing_cert, common_name=hostname)
VerifyOptions
index.ts
export default function SocketMain(socket: any) { console.log('A user has connected'); socket.on('disconnect', () => { console.log('A user has disconnected');
}); }
crud.py
class Queue: def __init__(self): self.data = [] def __str__(self): values = map(str, self.data) return ' <- '.join(values) def enque(self, val): self.data.append(val) def deque(self): return self.data.pop(0) def peek(self): return self.data[0] def is_empty(self): return self.data == [] def clear(self): self.data = None
queue.enque(0) queue.enque(1) queue.enque(2) queue.enque(3) print('queue: ') print(queue) print('dequeing', queue.deque()) print('queue: ') print(queue) print('Peeked data', queue.peek()) print('Clearing out') queue.clear() print('queue is empty' if queue.is_empty() else 'queue is not empty')
queue = Queue()
abtorrents.py
import ast import hashlib from urllib.parse import urljoin
class MainClass(XBT): URL = 'https://abtorrents.me/' USER_CLASSES = { 'uploaded': [536870912000], 'share_ratio': [1.5], 'days': [90], } def build_workflow(self, entry, config): return [ Work( url='/login.php?returnto=%2F', method='get', check_state=('network', NetworkState.SUCCEED), ), Work( url='/simpleCaptcha.php', method='get', check_state=('network', NetworkState.SUCCEED), ), Work( url='/takelogin.php', method='password', succeed_regex='Logout', check_state=('final', SignState.SUCCEED), is_base_content=True, response_urls=['/'] ) ] def sign_in_by_password(self, entry, config, work, last_content): login = entry['site_config'].get('login') if not login: entry.fail_with_prefix('Login data not found!') return last_content = ast.literal_eval(last_content) target = {'light bulb': '44c7285b', 'house': 'b9a403b9', 'musical note': '3a8441da', 'key': '2faefa2b', 'bug': 'c2ba10a5', 'heart': 'bed5a0e2', 'clock': '99d86267', 'world': 'ededf171'}[last_content['text']] for hash in last_content['images']: if hashlib.shake_128(self._request(entry, 'get', urljoin(entry['url'], '/simpleCaptcha.php?hash=' + hash)) .content).hexdigest(4) == target: break data = { 'username': login['username'], 'password': login['password'], 'remember': 1, 'captchaSelection': hash, 'submitme': 'X', 'returnto': '/' } login_response = self._request(entry, 'post', work.url, data=data) login_network_state = self.check_network_state(entry, work, login_response) if login_network_state != NetworkState.SUCCEED: return return login_response
from ..schema.site_base import Work, SignState, NetworkState from ..schema.xbt import XBT
provisioning.go
package cmd import ( "github.com/mikehaller/iot-suite-cli/iotsuite" "github.com/spf13/cobra" ) func
() { rootCmd.AddCommand(provisioningCmd) provisioningCmd.AddCommand(provisionCmd) provisioningCmd.AddCommand(myselfCmd) } var provisioningCmd = &cobra.Command{ Use: "provision", Short: "Provision devices", Long: `Device provisioning`, } var provisionCmd = &cobra.Command{ Use: "custom", Short: "Manually register a new device", Long: `Manually register a new device with custom parameters`, Run: func(cmd *cobra.Command, args []string) { // conf := iotsuite.ReadConfig() iotsuite.ReadConfig() // httpclient := iotsuite.InitOAuth(conf) // iotsuite.NewOAuthClient(httpclient,ClientName,TargetInstance) }, } var myselfCmd = &cobra.Command{ Use: "myself", Short: "Register this device", Long: `Register this device (eg using current MAC address as deviceId)`, Run: func(cmd *cobra.Command, args []string) { // conf := iotsuite.ReadConfig() iotsuite.ReadConfig() // httpclient := iotsuite.InitOAuth(conf) // iotsuite.NewOAuthClient(httpclient,ClientName,TargetInstance) }, }
init
tree-node.tsx
import type { ComputedRef } from 'vue'; import { computed, defineComponent, inject, PropType, renderSlot, toRefs, useSlots } from 'vue'; import { NODE_HEIGHT, USE_TREE_TOKEN } from '../const'; import { IInnerTreeNode, IUseTree } from '../composables/use-tree-types'; import DTreeNodeToggle from './tree-node-toggle'; import { Checkbox } from '../../../checkbox'; import DTreeNodeContent from './tree-node-content'; import useTreeNode from './use-tree-node'; export default defineComponent({ name: 'DTreeNode', props: { data: { type: Object as PropType<IInnerTreeNode>, default: () => ({}) }, check: { type: Boolean, default: false } }, setup(props, { slots }) { const { data, check } = toRefs(props); const { toggleSelectNode, toggleCheckNode, toggleNode, getChildren } = inject(USE_TREE_TOKEN) as Partial<IUseTree>; const { nodeClass, nodeStyle, nodeContentClass, nodeVLineClass, nodeVLineStyle, nodeHLineClass, } = useTreeNode(data as ComputedRef<IInnerTreeNode>); const halfChecked = computed(() => { const children = getChildren?.(data.value); const checkedChildren = children?.filter((item: IInnerTreeNode) => item.checked); if (['upward', 'both'].includes(check.value)) { return checkedChildren.length > 0 && checkedChildren.length < children.length; } else { return false; } });
return () => { const checkboxProps = { key: data.value?.id, disabled: data.value?.disableCheck, halfchecked: halfChecked.value, modelValue: data.value?.checked, 'onUpdate:modelValue': () => { toggleCheckNode(data.value); }, onClick: (event: MouseEvent) => { event.stopPropagation(); }, }; return ( <div class={nodeClass.value} style={nodeStyle.value}> <span class={nodeVLineClass.value} style={nodeVLineStyle.value}></span> <div class={nodeContentClass.value} onClick={() => { toggleSelectNode(data.value); }}> <span class={nodeHLineClass.value}></span> { slots.icon ? renderSlot(useSlots(), 'icon', { nodeData: data, toggleNode }) : <DTreeNodeToggle data={data.value} /> } <div class="devui-tree-node__content--value-wrapper" style={{ height: `${NODE_HEIGHT}px` }}> { check.value && <Checkbox {...checkboxProps}/> } { slots.default ? renderSlot(useSlots(), 'default', { nodeData: data }) : <DTreeNodeContent data={data} /> } </div> </div> </div> ); }; }, });