prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>serverreflection.go<|end_file_name|><|fim▁begin|>/*
*
* Copyright 2016 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
//go:generate protoc --go_out=plugins=grpc:. grpc_reflection_v1alpha/reflection.proto
/*
Package reflection implements server reflection service.
The service implemented is defined in:
https://github.com/grpc/grpc/blob/master/src/proto/grpc/reflection/v1alpha/reflection.proto.
To register server reflection on a gRPC server:
import "google.golang.org/grpc/reflection"
s := grpc.NewServer()
pb.RegisterYourOwnServer(s, &server{})
// Register reflection service on gRPC server.
reflection.Register(s)
s.Serve(lis)
*/
package reflection // import "google.golang.org/grpc/reflection"
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"reflect"
"strings"
"github.com/golang/protobuf/proto"
dpb "github.com/golang/protobuf/protoc-gen-go/descriptor"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
rpb "google.golang.org/grpc/reflection/grpc_reflection_v1alpha"
)
type serverReflectionServer struct {
s *grpc.Server
// TODO add more cache if necessary
serviceInfo map[string]grpc.ServiceInfo // cache for s.GetServiceInfo()
}
// Register registers the server reflection service on the given gRPC server.
func Register(s *grpc.Server) {
rpb.RegisterServerReflectionServer(s, &serverReflectionServer{
s: s,
})
}
// protoMessage is used for type assertion on proto messages.
// Generated proto message implements function Descriptor(), but Descriptor()
// is not part of interface proto.Message. This interface is needed to
// call Descriptor().
type protoMessage interface {
Descriptor() ([]byte, []int)
}
// fileDescForType gets the file descriptor for the given type.
// The given type should be a proto message.
func (s *serverReflectionServer) fileDescForType(st reflect.Type) (*dpb.FileDescriptorProto, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(protoMessage)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}
enc, _ := m.Descriptor()
return s.decodeFileDesc(enc)
}
// decodeFileDesc does decompression and unmarshalling on the given
// file descriptor byte slice.
func (s *serverReflectionServer) decodeFileDesc(enc []byte) (*dpb.FileDescriptorProto, error) {
raw, err := decompress(enc)
if err != nil {
return nil, fmt.Errorf("failed to decompress enc: %v", err)
}
fd := new(dpb.FileDescriptorProto)
if err := proto.Unmarshal(raw, fd); err != nil {
return nil, fmt.Errorf("bad descriptor: %v", err)
}
return fd, nil
}
// decompress does gzip decompression.
func decompress(b []byte) ([]byte, error) {
r, err := gzip.NewReader(bytes.NewReader(b))
if err != nil {
return nil, fmt.Errorf("bad gzipped descriptor: %v", err)
}
out, err := ioutil.ReadAll(r)
if err != nil {
return nil, fmt.Errorf("bad gzipped descriptor: %v", err)
}
return out, nil
}
func (s *serverReflectionServer) typeForName(name string) (reflect.Type, error) {
pt := proto.MessageType(name)
if pt == nil {
return nil, fmt.Errorf("unknown type: %q", name)
}
st := pt.Elem()
return st, nil
}
func (s *serverReflectionServer) fileDescContainingExtension(st reflect.Type, ext int32) (*dpb.FileDescriptorProto, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(proto.Message)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}
var extDesc *proto.ExtensionDesc
for id, desc := range proto.RegisteredExtensions(m) {
if id == ext {
extDesc = desc
break
}
}
if extDesc == nil {
return nil, fmt.Errorf("failed to find registered extension for extension number %v", ext)
}
return s.decodeFileDesc(proto.FileDescriptor(extDesc.Filename))
}
func (s *serverReflectionServer) allExtensionNumbersForType(st reflect.Type) ([]int32, error) {
m, ok := reflect.Zero(reflect.PtrTo(st)).Interface().(proto.Message)
if !ok {
return nil, fmt.Errorf("failed to create message from type: %v", st)
}
exts := proto.RegisteredExtensions(m)
out := make([]int32, 0, len(exts))
for id := range exts {
out = append(out, id)
}
return out, nil
}
// fileDescEncodingByFilename finds the file descriptor for given filename,
// does marshalling on it and returns the marshalled result.
func (s *serverReflectionServer) fileDescEncodingByFilename(name string) ([]byte, error) {
enc := proto.FileDescriptor(name)
if enc == nil {
return nil, fmt.Errorf("unknown file: %v", name)
}
fd, err := s.decodeFileDesc(enc)
if err != nil {
return nil, err
}
return proto.Marshal(fd)
}
// serviceMetadataForSymbol finds the metadata for name in s.serviceInfo.
// name should be a service name or a method name.
func (s *serverReflectionServer) serviceMetadataForSymbol(name string) (interface{}, error) {
if s.serviceInfo == nil {
s.serviceInfo = s.s.GetServiceInfo()
}
// Check if it's a service name.
if info, ok := s.serviceInfo[name]; ok {
return info.Metadata, nil
}
// Check if it's a method name.
pos := strings.LastIndex(name, ".")
// Not a valid method name.
if pos == -1 {
return nil, fmt.Errorf("unknown symbol: %v", name)
}
info, ok := s.serviceInfo[name[:pos]]
// Substring before last "." is not a service name.
if !ok {
return nil, fmt.Errorf("unknown symbol: %v", name)
}
// Search the method name in info.Methods.
var found bool
for _, m := range info.Methods {
if m.Name == name[pos+1:] {
found = true
break
}
}
if found {
return info.Metadata, nil
}
return nil, fmt.Errorf("unknown symbol: %v", name)
}
// parseMetadata finds the file descriptor bytes specified meta.
// For SupportPackageIsVersion4, m is the name of the proto file, we
// call proto.FileDescriptor to get the byte slice.
// For SupportPackageIsVersion3, m is a byte slice itself.
func parseMetadata(meta interface{}) ([]byte, bool) {
// Check if meta is the file name.
if fileNameForMeta, ok := meta.(string); ok {
return proto.FileDescriptor(fileNameForMeta), true
}
// Check if meta is the byte slice.
if enc, ok := meta.([]byte); ok {
return enc, true
}
return nil, false
}
// fileDescEncodingContainingSymbol finds the file descriptor containing the given symbol,
// does marshalling on it and returns the marshalled result.
// The given symbol can be a type, a service or a method.
func (s *serverReflectionServer) fileDescEncodingContainingSymbol(name string) ([]byte, error) {
var (
fd *dpb.FileDescriptorProto
)
// Check if it's a type name.
if st, err := s.typeForName(name); err == nil {
fd, err = s.fileDescForType(st)
if err != nil {
return nil, err
}
} else { // Check if it's a service name or a method name.
meta, err := s.serviceMetadataForSymbol(name)
// Metadata not found.
if err != nil {
return nil, err
}
// Metadata not valid.
enc, ok := parseMetadata(meta)
if !ok {
return nil, fmt.Errorf("invalid file descriptor for symbol: %v", name)<|fim▁hole|> return nil, err
}
}
return proto.Marshal(fd)
}
// fileDescEncodingContainingExtension finds the file descriptor containing given extension,
// does marshalling on it and returns the marshalled result.
func (s *serverReflectionServer) fileDescEncodingContainingExtension(typeName string, extNum int32) ([]byte, error) {
st, err := s.typeForName(typeName)
if err != nil {
return nil, err
}
fd, err := s.fileDescContainingExtension(st, extNum)
if err != nil {
return nil, err
}
return proto.Marshal(fd)
}
// allExtensionNumbersForTypeName returns all extension numbers for the given type.
func (s *serverReflectionServer) allExtensionNumbersForTypeName(name string) ([]int32, error) {
st, err := s.typeForName(name)
if err != nil {
return nil, err
}
extNums, err := s.allExtensionNumbersForType(st)
if err != nil {
return nil, err
}
return extNums, nil
}
// ServerReflectionInfo is the reflection service handler.
func (s *serverReflectionServer) ServerReflectionInfo(stream rpb.ServerReflection_ServerReflectionInfoServer) error {
for {
in, err := stream.Recv()
if err == io.EOF {
return nil
}
if err != nil {
return err
}
out := &rpb.ServerReflectionResponse{
ValidHost: in.Host,
OriginalRequest: in,
}
switch req := in.MessageRequest.(type) {
case *rpb.ServerReflectionRequest_FileByFilename:
b, err := s.fileDescEncodingByFilename(req.FileByFilename)
if err != nil {
out.MessageResponse = &rpb.ServerReflectionResponse_ErrorResponse{
ErrorResponse: &rpb.ErrorResponse{
ErrorCode: int32(codes.NotFound),
ErrorMessage: err.Error(),
},
}
} else {
out.MessageResponse = &rpb.ServerReflectionResponse_FileDescriptorResponse{
FileDescriptorResponse: &rpb.FileDescriptorResponse{FileDescriptorProto: [][]byte{b}},
}
}
case *rpb.ServerReflectionRequest_FileContainingSymbol:
b, err := s.fileDescEncodingContainingSymbol(req.FileContainingSymbol)
if err != nil {
out.MessageResponse = &rpb.ServerReflectionResponse_ErrorResponse{
ErrorResponse: &rpb.ErrorResponse{
ErrorCode: int32(codes.NotFound),
ErrorMessage: err.Error(),
},
}
} else {
out.MessageResponse = &rpb.ServerReflectionResponse_FileDescriptorResponse{
FileDescriptorResponse: &rpb.FileDescriptorResponse{FileDescriptorProto: [][]byte{b}},
}
}
case *rpb.ServerReflectionRequest_FileContainingExtension:
typeName := req.FileContainingExtension.ContainingType
extNum := req.FileContainingExtension.ExtensionNumber
b, err := s.fileDescEncodingContainingExtension(typeName, extNum)
if err != nil {
out.MessageResponse = &rpb.ServerReflectionResponse_ErrorResponse{
ErrorResponse: &rpb.ErrorResponse{
ErrorCode: int32(codes.NotFound),
ErrorMessage: err.Error(),
},
}
} else {
out.MessageResponse = &rpb.ServerReflectionResponse_FileDescriptorResponse{
FileDescriptorResponse: &rpb.FileDescriptorResponse{FileDescriptorProto: [][]byte{b}},
}
}
case *rpb.ServerReflectionRequest_AllExtensionNumbersOfType:
extNums, err := s.allExtensionNumbersForTypeName(req.AllExtensionNumbersOfType)
if err != nil {
out.MessageResponse = &rpb.ServerReflectionResponse_ErrorResponse{
ErrorResponse: &rpb.ErrorResponse{
ErrorCode: int32(codes.NotFound),
ErrorMessage: err.Error(),
},
}
} else {
out.MessageResponse = &rpb.ServerReflectionResponse_AllExtensionNumbersResponse{
AllExtensionNumbersResponse: &rpb.ExtensionNumberResponse{
BaseTypeName: req.AllExtensionNumbersOfType,
ExtensionNumber: extNums,
},
}
}
case *rpb.ServerReflectionRequest_ListServices:
if s.serviceInfo == nil {
s.serviceInfo = s.s.GetServiceInfo()
}
serviceResponses := make([]*rpb.ServiceResponse, 0, len(s.serviceInfo))
for n := range s.serviceInfo {
serviceResponses = append(serviceResponses, &rpb.ServiceResponse{
Name: n,
})
}
out.MessageResponse = &rpb.ServerReflectionResponse_ListServicesResponse{
ListServicesResponse: &rpb.ListServiceResponse{
Service: serviceResponses,
},
}
default:
return grpc.Errorf(codes.InvalidArgument, "invalid MessageRequest: %v", in.MessageRequest)
}
if err := stream.Send(out); err != nil {
return err
}
}
}<|fim▁end|>
|
}
fd, err = s.decodeFileDesc(enc)
if err != nil {
|
<|file_name|>nzbget.py<|end_file_name|><|fim▁begin|>from base64 import standard_b64encode
from datetime import timedelta
import re
import shutil
import socket
import traceback
import xmlrpclib
from couchpotato.core._base.downloader.main import DownloaderBase, ReleaseDownloadList
from couchpotato.core.helpers.encoding import ss, sp
from couchpotato.core.helpers.variable import tryInt, md5, cleanHost
from couchpotato.core.logger import CPLog
log = CPLog(__name__)
autoload = 'NZBGet'
class NZBGet(DownloaderBase):
protocol = ['nzb']
rpc = 'xmlrpc'
def download(self, data = None, media = None, filedata = None):
""" Send a torrent/nzb file to the downloader
:param data: dict returned from provider
Contains the release information
:param media: media dict with information
Used for creating the filename when possible
:param filedata: downloaded torrent/nzb filedata
The file gets downloaded in the searcher and send to this function
This is done to have failed checking before using the downloader, so the downloader
doesn't need to worry about that
:return: boolean
One faile returns false, but the downloaded should log his own errors
"""
if not media: media = {}
if not data: data = {}
if not filedata:
log.error('Unable to get NZB file: %s', traceback.format_exc())
return False
log.info('Sending "%s" to NZBGet.', data.get('name'))
nzb_name = ss('%s.nzb' % self.createNzbName(data, media))
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to drop off %s.' % nzb_name):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return False
if re.search(r"^0", rpc.version()):
xml_response = rpc.append(nzb_name, self.conf('category'), False, standard_b64encode(filedata.strip()))
else:
xml_response = rpc.append(nzb_name, self.conf('category'), tryInt(self.conf('priority')), False, standard_b64encode(filedata.strip()))
if xml_response:
log.info('NZB sent successfully to NZBGet')
nzb_id = md5(data['url']) # about as unique as they come ;)
couchpotato_id = "couchpotato=" + nzb_id
groups = rpc.listgroups()
file_id = [item['LastID'] for item in groups if item['NZBFilename'] == nzb_name]
confirmed = rpc.editqueue("GroupSetParameter", 0, couchpotato_id, file_id)
if confirmed:
log.debug('couchpotato parameter set in nzbget download')
return self.downloadReturnId(nzb_id)
else:
log.error('NZBGet could not add %s to the queue.', nzb_name)
return False
def test(self):
""" Check if connection works
:return: bool
"""
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to test connection'):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return False
return True
def getAllDownloadStatus(self, ids):
""" Get status of all active downloads
:param ids: list of (mixed) downloader ids
Used to match the releases for this downloader as there could be
other downloaders active that it should ignore
:return: list of releases
"""
log.debug('Checking NZBGet download status.')
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to check status'):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return []
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return []
# Get NZBGet data
try:
status = rpc.status()
groups = rpc.listgroups()
queue = rpc.postqueue(0)
history = rpc.history()
except:
log.error('Failed getting data: %s', traceback.format_exc(1))
return []
release_downloads = ReleaseDownloadList(self)
for nzb in groups:
try:
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
except:
nzb_id = nzb['NZBID']
if nzb_id in ids:
log.debug('Found %s in NZBGet download queue', nzb['NZBFilename'])
timeleft = -1
try:
if nzb['ActiveDownloads'] > 0 and nzb['DownloadRate'] > 0 and not (status['DownloadPaused'] or status['Download2Paused']):
timeleft = str(timedelta(seconds = nzb['RemainingSizeMB'] / status['DownloadRate'] * 2 ^ 20))
except:
pass
release_downloads.append({
'id': nzb_id,
'name': nzb['NZBFilename'],
'original_status': 'DOWNLOADING' if nzb['ActiveDownloads'] > 0 else 'QUEUED',
# Seems to have no native API function for time left. This will return the time left after NZBGet started downloading this item
'timeleft': timeleft,
})
for nzb in queue: # 'Parameters' is not passed in rpc.postqueue
if nzb['NZBID'] in ids:
log.debug('Found %s in NZBGet postprocessing queue', nzb['NZBFilename'])
release_downloads.append({
'id': nzb['NZBID'],
'name': nzb['NZBFilename'],
'original_status': nzb['Stage'],
'timeleft': str(timedelta(seconds = 0)) if not status['PostPaused'] else -1,
})
for nzb in history:
try:
nzb_id = [param['Value'] for param in nzb['Parameters'] if param['Name'] == 'couchpotato'][0]
except:
nzb_id = nzb['NZBID']
if nzb_id in ids:
log.debug('Found %s in NZBGet history. TotalStatus: %s, ParStatus: %s, ScriptStatus: %s, Log: %s', (nzb['NZBFilename'] , nzb['Status'], nzb['ParStatus'], nzb['ScriptStatus'] , nzb['Log']))
release_downloads.append({
'id': nzb_id,
'name': nzb['NZBFilename'],
'status': 'completed' if 'SUCCESS' in nzb['Status'] else 'failed',
'original_status': nzb['Status'],
'timeleft': str(timedelta(seconds = 0)),
'folder': sp(nzb['DestDir'])
})
return release_downloads<|fim▁hole|> log.info('%s failed downloading, deleting...', release_download['name'])
rpc = self.getRPC()
try:
if rpc.writelog('INFO', 'CouchPotato connected to delete some history'):
log.debug('Successfully connected to NZBGet')
else:
log.info('Successfully connected to NZBGet, but unable to send a message')
except socket.error:
log.error('NZBGet is not responding. Please ensure that NZBGet is running and host setting is correct.')
return False
except xmlrpclib.ProtocolError as e:
if e.errcode == 401:
log.error('Password is incorrect.')
else:
log.error('Protocol Error: %s', e)
return False
try:
history = rpc.history()
nzb_id = None
path = None
for hist in history:
for param in hist['Parameters']:
if param['Name'] == 'couchpotato' and param['Value'] == release_download['id']:
nzb_id = hist['ID']
path = hist['DestDir']
if nzb_id and path and rpc.editqueue('HistoryDelete', 0, "", [tryInt(nzb_id)]):
shutil.rmtree(path, True)
except:
log.error('Failed deleting: %s', traceback.format_exc(0))
return False
return True
def getRPC(self):
url = cleanHost(host = self.conf('host'), ssl = self.conf('ssl'), username = self.conf('username'), password = self.conf('password')) + self.rpc
return xmlrpclib.ServerProxy(url)
config = [{
'name': 'nzbget',
'groups': [
{
'tab': 'downloaders',
'list': 'download_providers',
'name': 'nzbget',
'label': 'NZBGet',
'description': 'Use <a href="http://nzbget.net" target="_blank">NZBGet</a> to download NZBs.',
'wizard': True,
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
'radio_group': 'nzb',
},
{
'name': 'host',
'default': 'localhost:6789',
'description': 'Hostname with port. Usually <strong>localhost:6789</strong>',
},
{
'name': 'ssl',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Use HyperText Transfer Protocol Secure, or <strong>https</strong>',
},
{
'name': 'username',
'default': 'nzbget',
'advanced': True,
'description': 'Set a different username to connect. Default: nzbget',
},
{
'name': 'password',
'type': 'password',
'description': 'Default NZBGet password is <i>tegbzn6789</i>',
},
{
'name': 'category',
'default': 'Movies',
'description': 'The category CP places the nzb in. Like <strong>movies</strong> or <strong>couchpotato</strong>',
},
{
'name': 'priority',
'advanced': True,
'default': '0',
'type': 'dropdown',
'values': [('Very Low', -100), ('Low', -50), ('Normal', 0), ('High', 50), ('Very High', 100), ('Forced', 900)],
'description': 'Only change this if you are using NZBget 13.0 or higher',
},
{
'name': 'manual',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Disable this downloader for automated searches, but use it when I manually send a release.',
},
{
'name': 'delete_failed',
'default': True,
'advanced': True,
'type': 'bool',
'description': 'Delete a release after the download has failed.',
},
],
}
],
}]<|fim▁end|>
|
def removeFailed(self, release_download):
|
<|file_name|>enclosing-scope.rs<|end_file_name|><|fim▁begin|>// Test scope annotations from `enclosing_scope` parameter<|fim▁hole|>trait Trait{}
struct Foo;
fn f<T: Trait>(x: T) {}
fn main() {
let x = || {
f(Foo{}); //~ ERROR the trait bound `Foo: Trait` is not satisfied
let y = || {
f(Foo{}); //~ ERROR the trait bound `Foo: Trait` is not satisfied
};
};
{
{
f(Foo{}); //~ ERROR the trait bound `Foo: Trait` is not satisfied
}
}
f(Foo{}); //~ ERROR the trait bound `Foo: Trait` is not satisfied
}<|fim▁end|>
|
#![feature(rustc_attrs)]
#[rustc_on_unimplemented(enclosing_scope="in this scope")]
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Created on Fri Dec 23 15:22:48 2011<|fim▁hole|>__all__ = ["bslip"]<|fim▁end|>
|
@author: moritz
"""
|
<|file_name|>html.js<|end_file_name|><|fim▁begin|>/* eslint-env browser */
/**
* Module dependencies.
*/
var Base = require('./base');
var utils = require('../utils');
var Progress = require('../browser/progress');
var escapeRe = require('escape-string-regexp');
var escape = utils.escape;
/**
* Save timer references to avoid Sinon interfering (see GH-237).
*/
/* eslint-disable no-unused-vars, no-native-reassign */
var Date = global.Date;
var setTimeout = global.setTimeout;
var setInterval = global.setInterval;
var clearTimeout = global.clearTimeout;
var clearInterval = global.clearInterval;
/* eslint-enable no-unused-vars, no-native-reassign */
/**
* Expose `HTML`.
*/
exports = module.exports = HTML;
/**
* Stats template.
*/
var statsTemplate = '<ul id="mocha-stats">'
+ '<li class="progress"><canvas width="40" height="40"></canvas></li>'
+ '<li class="passes"><a href="javascript:void(0);">passes:</a> <em>0</em></li>'
+ '<li class="failures"><a href="javascript:void(0);">failures:</a> <em>0</em></li>'
+ '<li class="duration">duration: <em>0</em>s</li>'
+ '</ul>';
/**
* Initialize a new `HTML` reporter.
*
* @api public
* @param {Runner} runner
*/
function HTML(runner) {
Base.call(this, runner);
var self = this;
var stats = this.stats;
var stat = fragment(statsTemplate);
var items = stat.getElementsByTagName('li');
var passes = items[1].getElementsByTagName('em')[0];
var passesLink = items[1].getElementsByTagName('a')[0];
var failures = items[2].getElementsByTagName('em')[0];
var failuresLink = items[2].getElementsByTagName('a')[0];
var duration = items[3].getElementsByTagName('em')[0];
var canvas = stat.getElementsByTagName('canvas')[0];
var report = fragment('<ul id="mocha-report"></ul>');
var stack = [report];
var progress;
var ctx;
var root = document.getElementById('mocha');
if (canvas.getContext) {
var ratio = window.devicePixelRatio || 1;
canvas.style.width = canvas.width;
canvas.style.height = canvas.height;
canvas.width *= ratio;
canvas.height *= ratio;
ctx = canvas.getContext('2d');
ctx.scale(ratio, ratio);
progress = new Progress();
}
if (!root) {
return error('#mocha div missing, add it to your document');
}
// pass toggle
on(passesLink, 'click', function() {
unhide();
var name = (/pass/).test(report.className) ? '' : ' pass';
report.className = report.className.replace(/fail|pass/g, '') + name;
if (report.className.trim()) {
hideSuitesWithout('test pass');
}
});
// failure toggle
on(failuresLink, 'click', function() {
unhide();
var name = (/fail/).test(report.className) ? '' : ' fail';
report.className = report.className.replace(/fail|pass/g, '') + name;
if (report.className.trim()) {
hideSuitesWithout('test fail');
}
});
root.appendChild(stat);
root.appendChild(report);
if (progress) {
progress.size(40);
}
runner.on('suite', function(suite) {
if (suite.root) {
return;
}
// suite
var url = self.suiteURL(suite);
var el = fragment('<li class="suite"><h1><a href="%e">%e</a></h1></li>', url, suite.title);
// container
stack[0].appendChild(el);
stack.unshift(document.createElement('ul'));
el.appendChild(stack[0]);
});
runner.on('suite end', function(suite) {
if (suite.root) {
return;
}
stack.shift();
});
runner.on('fail', function(test) {
if (test.type === 'hook') {
runner.emit('test end', test);
}
});
<|fim▁hole|> runner.on('test end', function(test) {
// TODO: add to stats
var percent = stats.tests / this.total * 100 | 0;
if (progress) {
progress.update(percent).draw(ctx);
}
// update stats
var ms = new Date() - stats.start;
text(passes, stats.passes);
text(failures, stats.failures);
text(duration, (ms / 1000).toFixed(2));
// test
var el;
if (test.state === 'passed') {
var url = self.testURL(test);
el = fragment('<li class="test pass %e"><h2>%e<span class="duration">%ems</span> <a href="%e" class="replay">‣</a></h2></li>', test.speed, test.title, test.duration, url);
} else if (test.pending) {
el = fragment('<li class="test pass pending"><h2>%e</h2></li>', test.title);
} else {
el = fragment('<li class="test fail"><h2>%e <a href="%e" class="replay">‣</a></h2></li>', test.title, self.testURL(test));
var stackString; // Note: Includes leading newline
var message = test.err.toString();
// <=IE7 stringifies to [Object Error]. Since it can be overloaded, we
// check for the result of the stringifying.
if (message === '[object Error]') {
message = test.err.message;
}
if (test.err.stack) {
var indexOfMessage = test.err.stack.indexOf(test.err.message);
if (indexOfMessage === -1) {
stackString = test.err.stack;
} else {
stackString = test.err.stack.substr(test.err.message.length + indexOfMessage);
}
} else if (test.err.sourceURL && test.err.line !== undefined) {
// Safari doesn't give you a stack. Let's at least provide a source line.
stackString = '\n(' + test.err.sourceURL + ':' + test.err.line + ')';
}
stackString = stackString || '';
if (test.err.htmlMessage && stackString) {
el.appendChild(fragment('<div class="html-error">%s\n<pre class="error">%e</pre></div>', test.err.htmlMessage, stackString));
} else if (test.err.htmlMessage) {
el.appendChild(fragment('<div class="html-error">%s</div>', test.err.htmlMessage));
} else {
el.appendChild(fragment('<pre class="error">%e%e</pre>', message, stackString));
}
}
// toggle code
// TODO: defer
if (!test.pending) {
var h2 = el.getElementsByTagName('h2')[0];
on(h2, 'click', function() {
pre.style.display = pre.style.display === 'none' ? 'block' : 'none';
});
var pre = fragment('<pre><code>%e</code></pre>', utils.clean(test.fn.toString()));
el.appendChild(pre);
pre.style.display = 'none';
}
// Don't call .appendChild if #mocha-report was already .shift()'ed off the stack.
if (stack[0]) {
stack[0].appendChild(el);
}
});
}
/**
* Makes a URL, preserving querystring ("search") parameters.
*
* @param {string} s
* @return {string} A new URL.
*/
function makeUrl(s) {
var search = window.location.search;
// Remove previous grep query parameter if present
if (search) {
search = search.replace(/[?&]grep=[^&\s]*/g, '').replace(/^&/, '?');
}
return window.location.pathname + (search ? search + '&' : '?') + 'grep=' + encodeURIComponent(escapeRe(s));
}
/**
* Provide suite URL.
*
* @param {Object} [suite]
*/
HTML.prototype.suiteURL = function(suite) {
return makeUrl(suite.fullTitle());
};
/**
* Provide test URL.
*
* @param {Object} [test]
*/
HTML.prototype.testURL = function(test) {
return makeUrl(test.fullTitle());
};
/**
* Display error `msg`.
*
* @param {string} msg
*/
function error(msg) {
document.body.appendChild(fragment('<div id="mocha-error">%e</div>', msg));
}
/**
* Return a DOM fragment from `html`.
*
* @param {string} html
*/
function fragment(html) {
var args = arguments;
var div = document.createElement('div');
var i = 1;
div.innerHTML = html.replace(/%([se])/g, function(_, type) {
switch (type) {
case 's': return String(args[i++]);
case 'e': return escape(args[i++]);
// no default
}
});
return div.firstChild;
}
/**
* Check for suites that do not have elements
* with `classname`, and hide them.
*
* @param {text} classname
*/
function hideSuitesWithout(classname) {
var suites = document.getElementsByClassName('suite');
for (var i = 0; i < suites.length; i++) {
var els = suites[i].getElementsByClassName(classname);
if (!els.length) {
suites[i].className += ' hidden';
}
}
}
/**
* Unhide .hidden suites.
*/
function unhide() {
var els = document.getElementsByClassName('suite hidden');
while (els.length > 0) {
els[0].className = els[0].className.replace('suite hidden', 'suite');
}
}
/**
* Set an element's text contents.
*
* @param {HTMLElement} el
* @param {string} contents
*/
function text(el, contents) {
if (el.textContent) {
el.textContent = contents;
} else {
el.innerText = contents;
}
}
/**
* Listen on `event` with callback `fn`.
*/
function on(el, event, fn) {
if (el.addEventListener) {
el.addEventListener(event, fn, false);
} else {
el.attachEvent('on' + event, fn);
}
}<|fim▁end|>
| |
<|file_name|>find_dependencies_unittest.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os
import platform
import shutil
import subprocess
import sys
import tempfile
import unittest
import zipfile
from telemetry.util import cloud_storage
from telemetry.util import find_dependencies
class FindDependenciesTest(unittest.TestCase):
@unittest.skipUnless(
cloud_storage.SupportsProdaccess(
os.path.realpath(cloud_storage.FindGsutil())),
'Could not find a depot_tools installation with gsutil.')
def testGsutil(self):
parser = optparse.OptionParser()
find_dependencies.FindDependenciesCommand.AddCommandLineArgs(parser)
options, _ = parser.parse_args([])
try:
temp_dir = tempfile.mkdtemp()
zip_path = os.path.join(temp_dir, 'gsutil.zip')
options.zip = zip_path
find_dependencies.ZipDependencies([], set(), options)
if platform.system() == 'Windows':
with zipfile.ZipFile(zip_path, 'r') as zip_file:
zip_file.extractall(temp_dir)
else:
# Use unzip instead of Python zipfile to preserve file permissions.
with open(os.devnull, 'w') as dev_null:
subprocess.call(['unzip', zip_path], cwd=temp_dir, stdout=dev_null)
third_party_path = os.path.join(temp_dir, 'telemetry', 'src', 'tools',
'telemetry', 'third_party')
# __init__.py is in Chromium src, but we didn't include any repo files.
open(os.path.join(third_party_path, '__init__.py'), 'a').close()
gsutil_path = os.path.join(third_party_path, 'gsutil', 'gsutil')
self.assertTrue(os.access(gsutil_path, os.X_OK))
with open(os.devnull, 'w') as dev_null:
# gsutil with no args should print usage and exit with exit code 0.
gsutil_command = [sys.executable, gsutil_path]
self.assertEqual(subprocess.call(gsutil_command, stdout=dev_null), 0)
<|fim▁hole|> #gsutil_command = [sys.executable, gsutil_path, 'config',
# '-o', os.path.join(temp_dir, 'config_file')]
#gsutil_process = subprocess.Popen(gsutil_command, stdout=dev_null)
#try:
# util.WaitFor(gsutil_process.poll, timeout=0.5)
# self.assertEqual(gsutil_process.returncode, 0,
# msg='gsutil config failed.')
#except exceptions.TimeoutException:
# gsutil_process.terminate()
# gsutil_process.wait()
finally:
shutil.rmtree(temp_dir)<|fim▁end|>
|
# gsutil config should wait for the user and not exit with exit code 1.
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.urls import path, include
from django.contrib import admin
from rest_framework.routers import DefaultRouter
from tasks.views import TaskItemViewSet, MainAppView, TagViewSet, ProjectViewSet, TaskCommentViewSet
admin.autodiscover()
router = DefaultRouter()
router.register(r'task', TaskItemViewSet)
router.register(r'tag', TagViewSet)
router.register(r'project', ProjectViewSet)
router.register(r'comments', TaskCommentViewSet)
urlpatterns = [
path('admin/', admin.site.urls),
path('', MainAppView.as_view(), {}, "index"),<|fim▁hole|>]<|fim▁end|>
|
path('api/', include(router.urls)),
path('rest-auth/', include('rest_auth.urls'))
|
<|file_name|>updater.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from src.sqllist import get_sql
_UPDATER_EXTRAS = {<|fim▁hole|>
def _refactor_update(sql):
"""
Special refactoring for MonetDB update..from imitation.
"""
def _get_extra_conditions(tabname):
return ' '.join(map(lambda x: 'and {0}.{1} = x.{1}'.format(tabname, x),
_UPDATER_EXTRAS[tabname]))
sqlupdate, sqlfrom = sql.strip().split('from', 1)
table, sqlupd_list = sqlupdate.split('set')
sqlupd_list = sqlupd_list.split(',')
table = table.split()[1]
if sqlfrom.endswith(';'):
sqlfrom = sqlfrom[:-1]
sqlfrom_split = sqlfrom.split('where', 1)
if len(sqlfrom_split) > 1:
[sqlfrom2, sqlwhere] = sqlfrom_split
sqlwhere = 'where %s' % sqlwhere
else:
sqlfrom2 = sqlfrom
sqlwhere = ''
for field in _UPDATER_EXTRAS[table]:
sqlwhere = sqlwhere.replace('%s.%s' % (table, field), 'x.%s' % field)
update_field = []
for sqlf in sqlupd_list:
field, update_stmt = sqlf.split('=')
update_field.append('%s = (select %s from %s x, %s %s %s)' % (
field, update_stmt.replace(table, 'x'),
table, sqlfrom2, sqlwhere,
_get_extra_conditions(table)))
result = []
for field in update_field:
result.append("""update %s set %s
where exists (select 1 from %s);""" % (table, field, sqlfrom))
return result
def run_update(conn, sql_name, *params):
"""
Run update on a given connection. Refactor it for MonetDB if needed.
"""
sql = get_sql(sql_name, *params)
if conn.is_monet():
conn.execute_set(_refactor_update(sql))
else:
conn.execute(sql)<|fim▁end|>
|
'runningcatalog': ['runcatid'],
'runningcatalog_fluxes': ['runcat_id', 'band', 'stokes'],
}
|
<|file_name|>mobileweb.py<|end_file_name|><|fim▁begin|>import datetime
import re
from django.http import HttpResponse
from django.utils.http import urlencode
import smsgateway
from smsgateway.models import SMS
from smsgateway.backends.base import SMSBackend
from smsgateway.utils import check_cell_phone_number
class MobileWebBackend(SMSBackend):
def get_send_url(self, sms_request, account_dict):
# Encode message
msg = sms_request.msg
try:
msg = msg.encode('latin-1')
except:
pass
querystring = urlencode({
'login': account_dict['username'],
'pass': account_dict['password'],
'gsmnr': sms_request.to[0][1:],
'sid': account_dict['sid'],
'msgcontent': msg,
})
return u'http://gateway.mobileweb.be/smsin.asp?%s' % querystring
def validate_send_result(self, result):
return 'accepted' in result
def handle_incoming(self, request, reply_using=None):
request_dict = request.POST if request.method == 'POST' else request.GET<|fim▁hole|>
# Check whether we've already received this message
if SMS.objects.filter(gateway_ref=request_dict['MessageID']).exists():
return HttpResponse('OK')
# Parse and process message
year, month, day, hour, minute, second, ms = map(int, re.findall(r'(\d+)', request_dict['SendDateTime']))
sms_dict = {
'sent': datetime.datetime(year, month, day, hour, minute, second),
'content': request_dict['MsgeContent'],
'sender': check_cell_phone_number(request_dict['SenderGSMNR']),
'to': request_dict['ShortCode'],
'operator': int(request_dict['Operator']),
'gateway_ref': request_dict['MessageID'],
'backend': self.get_slug(),
}
sms = SMS(**sms_dict)
response = self.process_incoming(request, sms)
# If necessary, send response SMS
if response is not None:
signature = smsgateway.get_account(reply_using)['reply_signature']
success = smsgateway.send([sms.sender], response, signature, using=reply_using)
# Sending failed, queue SMS
if not success:
smsgateway.send_queued(sms.sender, response, signature, reply_using)
return HttpResponse(response)
return HttpResponse('OK')
def get_slug(self):
return 'mobileweb'
def get_url_capacity(self):
return 1<|fim▁end|>
|
# Check whether we've gotten a SendDateTime
if not 'SendDateTime' in request_dict:
return HttpResponse('')
|
<|file_name|>ktestq.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};<|fim▁hole|>use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn ktestq_1() {
run_test(&Instruction { mnemonic: Mnemonic::KTESTQ, operand1: Some(Direct(K7)), operand2: Some(Direct(K6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 225, 248, 153, 254], OperandSize::Dword)
}
fn ktestq_2() {
run_test(&Instruction { mnemonic: Mnemonic::KTESTQ, operand1: Some(Direct(K5)), operand2: Some(Direct(K6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 225, 248, 153, 238], OperandSize::Qword)
}<|fim▁end|>
|
use ::RegType::*;
use ::instruction_def::*;
|
<|file_name|>impl-disambiguation.rs<|end_file_name|><|fim▁begin|>#![crate_name = "foo"]
pub trait Foo {}
pub struct Bar<T> { field: T }
// @has foo/trait.Foo.html '//*[@class="item-list"]//h3[@class="code-header in-band"]' \
// "impl Foo for Bar<u8>"
impl Foo for Bar<u8> {}
// @has foo/trait.Foo.html '//*[@class="item-list"]//h3[@class="code-header in-band"]' \
// "impl Foo for Bar<u16>"
impl Foo for Bar<u16> {}
// @has foo/trait.Foo.html '//*[@class="item-list"]//h3[@class="code-header in-band"]' \
// "impl<'a> Foo for &'a Bar<u8>"
impl<'a> Foo for &'a Bar<u8> {}
pub mod mod1 {
pub struct Baz {}
}
<|fim▁hole|>pub mod mod2 {
pub enum Baz {}
}
// @has foo/trait.Foo.html '//*[@class="item-list"]//h3[@class="code-header in-band"]' \
// "impl Foo for foo::mod1::Baz"
impl Foo for mod1::Baz {}
// @has foo/trait.Foo.html '//*[@class="item-list"]//h3[@class="code-header in-band"]' \
// "impl<'a> Foo for &'a foo::mod2::Baz"
impl<'a> Foo for &'a mod2::Baz {}<|fim▁end|>
| |
<|file_name|>SimpleMapReduce.java<|end_file_name|><|fim▁begin|>package com.xoba.smr;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.net.URI;
import java.util.Collection;
import java.util.Formatter;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.codec.binary.Base64;
import com.amazonaws.Request;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.handlers.AbstractRequestHandler;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.AmazonEC2Client;
import com.amazonaws.services.ec2.model.CreateTagsRequest;
import com.amazonaws.services.ec2.model.Instance;
import com.amazonaws.services.ec2.model.LaunchSpecification;
import com.amazonaws.services.ec2.model.RequestSpotInstancesRequest;
import com.amazonaws.services.ec2.model.RequestSpotInstancesResult;
import com.amazonaws.services.ec2.model.RunInstancesRequest;
import com.amazonaws.services.ec2.model.RunInstancesResult;
import com.amazonaws.services.ec2.model.Tag;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.simpledb.AmazonSimpleDB;
import com.amazonaws.services.simpledb.AmazonSimpleDBClient;
import com.amazonaws.services.sqs.AmazonSQS;
import com.amazonaws.services.sqs.AmazonSQSClient;
import com.amazonaws.services.sqs.model.SendMessageRequest;
import com.xoba.util.ILogger;
import com.xoba.util.LogFactory;
import com.xoba.util.MraUtils;
public class SimpleMapReduce {
private static final ILogger logger = LogFactory.getDefault().create();
public static void launch(Properties config, Collection<String> inputSplitPrefixes, AmazonInstance ai,
int machineCount) throws Exception {
launch(config, inputSplitPrefixes, ai, machineCount, true);
}
public static void launch(Properties config, Collection<String> inputSplitPrefixes, AmazonInstance ai,
int machineCount, boolean spotInstances) throws Exception {
if (!config.containsKey(ConfigKey.JOB_ID.toString())) {
config.put(ConfigKey.JOB_ID.toString(), MraUtils.md5Hash(new TreeMap<Object, Object>(config).toString())
.substring(0, 8));
}
String id = config.getProperty(ConfigKey.JOB_ID.toString());
AWSCredentials aws = create(config);
AmazonSimpleDB db = new AmazonSimpleDBClient(aws);
AmazonSQS sqs = new AmazonSQSClient(aws);
AmazonS3 s3 = new AmazonS3Client(aws);
final String mapQueue = config.getProperty(ConfigKey.MAP_QUEUE.toString());
final String reduceQueue = config.getProperty(ConfigKey.REDUCE_QUEUE.toString());
final String dom = config.getProperty(ConfigKey.SIMPLEDB_DOM.toString());
final String mapInputBucket = config.getProperty(ConfigKey.MAP_INPUTS_BUCKET.toString());
final String shuffleBucket = config.getProperty(ConfigKey.SHUFFLE_BUCKET.toString());
final String reduceOutputBucket = config.getProperty(ConfigKey.REDUCE_OUTPUTS_BUCKET.toString());
final int hashCard = new Integer(config.getProperty(ConfigKey.HASH_CARDINALITY.toString()));
s3.createBucket(reduceOutputBucket);
final long inputSplitCount = inputSplitPrefixes.size();<|fim▁hole|>
for (String key : inputSplitPrefixes) {
Properties p = new Properties();
p.setProperty("input", "s3://" + mapInputBucket + "/" + key);
sqs.sendMessage(new SendMessageRequest(mapQueue, serialize(p)));
}
SimpleDbCommitter.commitNewAttribute(db, dom, "parameters", "splits", "" + inputSplitCount);
SimpleDbCommitter.commitNewAttribute(db, dom, "parameters", "hashes", "" + hashCard);
SimpleDbCommitter.commitNewAttribute(db, dom, "parameters", "done", "1");
for (String hash : getAllHashes(hashCard)) {
Properties p = new Properties();
p.setProperty("input", "s3://" + shuffleBucket + "/" + hash);
sqs.sendMessage(new SendMessageRequest(reduceQueue, serialize(p)));
}
if (machineCount == 1) {
// run locally
SMRDriver.main(new String[] { MraUtils.convertToHex(serialize(config).getBytes()) });
} else if (machineCount > 1) {
// run in the cloud
String userData = produceUserData(ai, config,
new URI(config.getProperty(ConfigKey.RUNNABLE_JARFILE_URI.toString())));
logger.debugf("launching job with id %s:", id);
System.out.println(userData);
AmazonEC2 ec2 = new AmazonEC2Client(aws);
if (spotInstances) {
RequestSpotInstancesRequest sir = new RequestSpotInstancesRequest();
sir.setSpotPrice(new Double(ai.getPrice()).toString());
sir.setInstanceCount(machineCount);
sir.setType("one-time");
LaunchSpecification spec = new LaunchSpecification();
spec.setImageId(ai.getDefaultAMI());
spec.setUserData(new String(new Base64().encode(userData.getBytes("US-ASCII"))));
spec.setInstanceType(ai.getApiName());
sir.setLaunchSpecification(spec);
RequestSpotInstancesResult result = ec2.requestSpotInstances(sir);
logger.debugf("spot instance request result: %s", result);
} else {
RunInstancesRequest req = new RunInstancesRequest(ai.getDefaultAMI(), machineCount, machineCount);
req.setClientToken(id);
req.setInstanceType(ai.getApiName());
req.setInstanceInitiatedShutdownBehavior("terminate");
req.setUserData(new String(new Base64().encode(userData.getBytes("US-ASCII"))));
RunInstancesResult resp = ec2.runInstances(req);
logger.debugf("on demand reservation id: %s", resp.getReservation().getReservationId());
labelEc2Instance(ec2, resp, "smr-" + id);
}
}
}
public static void labelEc2Instance(AmazonEC2 ec2, RunInstancesResult resp, String title) throws Exception {
int tries = 0;
boolean done = false;
while (tries++ < 3 && !done) {
try {
List<String> resources = new LinkedList<String>();
for (Instance i : resp.getReservation().getInstances()) {
resources.add(i.getInstanceId());
}
List<Tag> tags = new LinkedList<Tag>();
tags.add(new Tag("Name", title));
CreateTagsRequest ctr = new CreateTagsRequest(resources, tags);
ec2.createTags(ctr);
done = true;
logger.debugf("set tag(s)");
} catch (Exception e) {
logger.warnf("exception setting tags: %s", e);
Thread.sleep(3000);
}
}
}
public static String produceUserData(AmazonInstance ai, Properties c, URI jarFileURI) throws Exception {
StringWriter sw = new StringWriter();
LinuxLineConventionPrintWriter pw = new LinuxLineConventionPrintWriter(new PrintWriter(sw));
pw.println("#!/bin/sh");
pw.println("cd /root");
pw.println("chmod 777 /mnt");
pw.println("aptitude update");
Set<String> set = new TreeSet<String>();
set.add("openjdk-6-jdk");
set.add("wget");
if (set.size() > 0) {
pw.print("aptitude install -y ");
Iterator<String> it = set.iterator();
while (it.hasNext()) {
String x = it.next();
pw.print(x);
if (it.hasNext()) {
pw.print(" ");
}
}
pw.println();
}
pw.printf("wget %s", jarFileURI);
pw.println();
String[] parts = jarFileURI.getPath().split("/");
String jar = parts[parts.length - 1];
pw.printf("java -Xmx%.0fm -cp %s %s %s", 1000 * 0.8 * ai.getMemoryGB(), jar, SMRDriver.class.getName(),
MraUtils.convertToHex(serialize(c).getBytes()));
pw.println();
pw.println("poweroff");
pw.close();
return sw.toString();
}
public static AmazonS3 getS3(AWSCredentials aws) {
AmazonS3Client s3 = new AmazonS3Client(aws);
s3.addRequestHandler(new AbstractRequestHandler() {
@Override
public void beforeRequest(Request<?> request) {
request.addHeader("x-amz-request-payer", "requester");
}
});
return s3;
}
public static String prefixedName(String p, String n) {
return p + "-" + n;
}
public static AWSCredentials create(final Properties p) {
return new AWSCredentials() {
@Override
public String getAWSSecretKey() {
return p.getProperty(ConfigKey.AWS_SECRETKEY.toString());
}
@Override
public String getAWSAccessKeyId() {
return p.getProperty(ConfigKey.AWS_KEYID.toString());
}
};
}
public static SortedSet<String> getAllHashes(long mod) {
SortedSet<String> out = new TreeSet<String>();
for (long i = 0; i < mod; i++) {
out.add(fmt(i, mod));
}
return out;
}
public static String hash(byte[] key, long mod) {
byte[] buf = MraUtils.md5HashBytesToBytes(key);
long x = Math.abs(MraUtils.extractLongValue(buf));
return fmt(x % mod, mod);
}
private static String fmt(long x, long mod) {
long places = Math.round(Math.ceil(Math.log10(mod)));
if (places == 0) {
places = 1;
}
return new Formatter().format("%0" + places + "d", x).toString();
}
public static String serialize(Properties p) throws Exception {
StringWriter sw = new StringWriter();
try {
p.store(sw, "n/a");
} finally {
sw.close();
}
return sw.toString();
}
public static Properties marshall(String s) throws Exception {
Properties p = new Properties();
StringReader sr = new StringReader(s);
try {
p.load(sr);
} finally {
sr.close();
}
return p;
}
}<|fim▁end|>
| |
<|file_name|>rawlink.rs<|end_file_name|><|fim▁begin|>// This file is part of Intrusive.
// Intrusive is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.<|fim▁hole|>
// Intrusive is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public License
// along with Intrusive. If not, see <http://www.gnu.org/licenses/>.
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[cfg(all(feature="nostd",not(test)))]
use core::prelude::*;
use std::mem;
use std::ptr;
#[allow(raw_pointer_derive)]
#[derive(Debug)]
pub struct Rawlink<T> {
p: *mut T
}
impl<T> Copy for Rawlink<T> {}
unsafe impl<T:'static+Send> Send for Rawlink<T> {}
unsafe impl<T:Send+Sync> Sync for Rawlink<T> {}
/// Rawlink is a type like Option<T> but for holding a raw pointer
impl<T> Rawlink<T> {
/// Like Option::None for Rawlink
pub fn none() -> Rawlink<T> {
Rawlink{p: ptr::null_mut()}
}
/// Like Option::Some for Rawlink
pub fn some(n: &mut T) -> Rawlink<T> {
Rawlink{p: n}
}
/// Convert the `Rawlink` into an Option value
pub fn resolve<'a>(&self) -> Option<&'a T> {
unsafe {
mem::transmute(self.p.as_ref())
}
}
/// Convert the `Rawlink` into an Option value
pub fn resolve_mut<'a>(&mut self) -> Option<&'a mut T> {
if self.p.is_null() {
None
} else {
Some(unsafe { mem::transmute(self.p) })
}
}
/// Return the `Rawlink` and replace with `Rawlink::none()`
pub fn take(&mut self) -> Rawlink<T> {
mem::replace(self, Rawlink::none())
}
}
impl<T> PartialEq for Rawlink<T> {
#[inline]
fn eq(&self, other: &Rawlink<T>) -> bool {
self.p == other.p
}
}
impl<T> Clone for Rawlink<T> {
#[inline]
fn clone(&self) -> Rawlink<T> {
Rawlink{p: self.p}
}
}
impl<T> Default for Rawlink<T> {
fn default() -> Rawlink<T> { Rawlink::none() }
}<|fim▁end|>
| |
<|file_name|>CloudComputer.java<|end_file_name|><|fim▁begin|>package com.sigmasq.timely.solver;
public class CloudComputer {
private int cpuPower;
private int memory;
private int networkBandwidth;
private int cost;
public int getCpuPower() {
return cpuPower;
}
public int getMemory() {
return memory;
}
public int getNetworkBandwidth() {
return networkBandwidth;
}
public int getCost() {
return cost;<|fim▁hole|><|fim▁end|>
|
}
}
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""
Login and logout views for the browseable API.
Add these to your root URLconf if you're using the browseable API and
your API requires authentication.
The urls must be namespaced as 'rest_framework', and you should make sure
your authentication settings include `SessionAuthentication`.
urlpatterns = patterns('',
...
url(r'^auth', include('rest_framework.urls', namespace='rest_framework'))
)
"""
from django.conf.urls.defaults import patterns, url
template_name = {'template_name': 'rest_framework/login.html'}
urlpatterns = patterns('django.contrib.auth.views',<|fim▁hole|> url(r'^login/$', 'login', template_name, name='login'),
url(r'^logout/$', 'logout', template_name, name='logout'),
)<|fim▁end|>
| |
<|file_name|>gene.py<|end_file_name|><|fim▁begin|>"""
gene.py realize the methods that are related to system recommendation.
@author: Bowen
"""
from system.models import gene, reaction, compound, reaction_compound, compound_gene, pathway, pathway_compound, organism
from system.fasta_reader import parse_fasta_str
from elasticsearch import Elasticsearch
import traceback
import urllib2
import json
from django.db.models import Q
def search_compound(keyword):
"""
search compound based on the keyword
@param keyword: the keyword that the user typed. Which would be used in search
@type keyword: str
@return: return a list that contains searched compounds
@rtype: list
"""
es = Elasticsearch()
result = format_fuzzy_result(fuzzy_search_compound(es, keyword))
return result
def fuzzy_search_compound(es, keyword):
"""
fuzzy search compound based on the keyword with elasticsearch
@param es: the elasticsearch object
@param keyword: the search keyword
@type es: Elasticsearch
@type keyword: str
@return a dict generated by the elasticsearch, which contains the search result
@rtype: dict
"""
query_body = {
"from" : 0,
"size" : 20,
"query" : {
"fuzzy_like_this" : {
"fields" : ["name"],
"like_text" : keyword,
"max_query_terms" : 20
}
}
}
result = es.search(index="biodesigners", doc_type="compounds", body=query_body)
return result
def format_fuzzy_result(es_result):
"""
format the es search result to front end processable format
@param es_result: the es search result
@type es_result: dict
@return: the front end processable format, while will be like this::
[{'compound_id': id, 'name': name},...]
@rtype: list
"""
compound_result = es_result['hits']['hits']
result = list()
if len(compound_result) != 0:
for compound_item in compound_result:
info = compound_item['_source']
compound_info = {
'compound_id': info["compound_id"],
'name': info['name'],
}
result.append(compound_info)
return result
def get_gene_info(gid):
"""
get gene information from the database
@param gid: the gene id
@ytpe gid: str
@return: gene information dict
@rtype: dict
"""
try:
gene_obj = gene.objects.get(gene_id=gid)
result = {
'gene_id': gene_obj.gene_id,
'name': gene_obj.name,
'definition': gene_obj.definition,
'organism_short': gene_obj.organism_short,
'organism': gene_obj.organism
}
return True, result
except:
traceback.print_exc()
return False, None
def get_compound_info(cid):
"""
get a specific compound's information
@param cid: compound id
@type cid: str
@return: a tunple that contains is compound can be retrived and the information
@rtype: dict
"""
try:
compound_obj = compound.objects.get(compound_id=cid)
result = {
'compound_id' : compound_obj.compound_id,
'name': compound_obj.name,
'nicknames' : compound_obj.nicknames.replace('_', '\n'),
'formula' : compound_obj.formula,
'exact_mass' : compound_obj.exact_mass,
'mol_weight' : compound_obj.mol_mass
}
return True, result
except:
traceback.print_exc()
return False, None
class gene_graph:
"""
gene graph, including calculation and generate of gene & protein relation graph
"""
def __init__(self, cid_list, ogm):
"""
constructor for gene_graph class
@param cid_list: compound id list
@type cid_list: str
@param ogm: organisms
@type ogm:str
"""
if cid_list.startswith('_'):
cid_list = cid_list[1:]
if cid_list.endswith('_'):
cid_list = cid_list[:-1]
self.cid_list = cid_list.split('_')
self.nodes = list()
self.edges = list()
self.index_dict = dict()
self.index = 0
if ogm != None:
if ogm.startswith('_'):
ogm = ogm[1:]
if ogm.endswith('_'):
ogm = ogm[:-1]
self.organisms = ogm.split('_')
else:
self.organisms = None
def get_compound_object(self, cid):
"""
get compound object by compound id
@param cid: compound id
@type cid: str
@return: compound object or none if not found
@rtype: compound
"""
try:
compound_obj = compound.objects.get(compound_id=cid)
return compound_obj
except:
return None
def retrive_gene_detain(self, gid):
"""
get gene data from ncib
@param gid: gene id
@type gid: str
@return: gene information in dict or none
@rtype: dict
"""
#get information from ncbi
baseUrl = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=gene&retmode=json&version=2.0&id='
try:
req = urllib2.Request(baseUrl + gid)
response = urllib2.urlopen(req)
resStr = response.read()
result = json.loads(resStr)
infos = result['result'][gid]
detail_info = dict()
detail_info['name'] = infos['name']
detail_info['definition'] = infos['description']
detail_info['organism'] = infos['organism']['scientificname']
return detail_info
except:
traceback.print_exc()
return None
def related_compound(self, cid):
"""
find a compound's related compound
@param cid: compound id
@type cid: str
@return: list of related compound
@rtype: list
"""
compound_obj = self.get_compound_object(cid)
if self.organisms != None:
organism_pathway_id_list = pathway.objects.filter(organism_id__in=self.organisms).values_list('pathway_id', flat=True)
else:
organism_pathway_id_list = pathway.objects.all()
valued_pathway_id_list = pathway_compound.objects.filter(pathway_id__in=organism_pathway_id_list, compound=compound_obj)
valued_compound_list = pathway_compound.objects.filter(Q(pathway_id__in=valued_pathway_id_list), ~Q(compound=compound_obj)).values_list('compound', flat=True)
compound_list = compound.objects.filter(compound_id__in=valued_compound_list)
return compound_list
def create_node(self, name, id):
"""
create a node (gene or compound) in the graph
@param name: name for the node
@param id: id for the node
@type name : str
@type id : str
"""
node_info = {
'name': name,
'id': id
}
self.nodes.append(node_info)
if id in self.index_dict.keys():
return True
self.index_dict[id] = self.index
self.index += 1
return True
def create_n_link(self, center_node, compound_obj):
"""
create nodes and link them
@param center_node: source node
@type center_node:compound
@param compound_obj: compound object
@type compound_obj: compound
"""
gene_list = self.search_gene(compound_obj)
for gene_id in gene_list:
try:
gene_obj = gene.objects.get(gene_id=gene_id)
if self.create_node(gene_obj.name, gene_obj.gene_id):
edge_info = {
'source' : self.index_dict[center_node],
'target' : self.index_dict[gene_obj.gene_id],
'relation' : compound_obj.name
}
self.edges.append(edge_info)
except:
traceback.print_exc()
pass
return gene_list[0]
def get_or_create_gene(self, gid):
"""
find gene in database, if found, return gene, or search in ncbi
@param gid: gene id
@type gid: str
@return gene object
@rtype: gene
"""
#get in database
try:
gene_obj = gene.objects.get(gene_id=gid)
return gene_obj
except:
#get from ncbi
baseUrl = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=nuccore&rettype=fasta&id='
req = urllib2.Request(baseUrl + gid)
response = urllib2.urlopen(req)
resStr = response.read()
gene_dict = parse_fasta_str(resStr)
for gn in gene_dict.keys():
gid = gn.split('|')[1]
#get detail information
new_gene_obj = gene(gene_id=gid)
detail_info = self.retrive_gene_detain(gid)
if detail_info == None:
continue
new_gene_obj.name = detail_info['name']
new_gene_obj.definition = detail_info['definition']
new_gene_obj.organism = detail_info['organism']
new_gene_obj.ntseq = gene_dict[gn]
new_gene_obj.ntseq_length = len(gene_dict[gn])
try:
new_gene_obj.save()
return new_gene_obj
except:
pass
return None
def save_relation_to_db(self, geneIdList, compound_obj):
"""
save relation between compound_obj and gene to database
@param geneIdList: gene id in a list
@type geneIdList: list
@param compound_obj: compound object
@type compound_obj: compound
"""
#create new obj
for gid in geneIdList:
new_rela_obj = compound_gene(compound=compound_obj)
gene_obj = self.get_or_create_gene(gid)
if gene_obj == None:
continue
new_rela_obj.gene = gene_obj
try:
new_rela_obj.save()
except:
pass
def search_gene(self, compound_obj):
"""
find gene realted to a compound
@param compound_obj: the compound object
@type compound_obj: compound
@return related genes
@rtype: list
"""
#search in database
obj_list = compound_gene.objects.filter(compound=compound_obj)
if len(obj_list) != 0:
geneIdList = list()
for obj in obj_list:
geneIdList.append(obj.gene.gene_id)
return geneIdList[:2]
else:
baseGeneFindUrl = 'http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=gene&retmode=json&term='
try:
req = urllib2.Request(baseGeneFindUrl + compound_obj.name)
response = urllib2.urlopen(req)
resStr = response.read()
except:
traceback.print_exc()
return None
if len(resStr) == 0:
return None
result = json.loads(resStr)
geneIdList = result['esearchresult']['idlist']
self.save_relation_to_db(geneIdList, compound_obj)
return geneIdList[:2]
def cal_graph(self):
"""
calculate the relation graph
"""
for cid in self.cid_list:
center_compound_obj = self.get_compound_object(cid)
if center_compound_obj == None:
continue
self.create_node(center_compound_obj.name, center_compound_obj.compound_id)
related_list = self.related_compound(center_compound_obj.compound_id)[:5]
for compound_obj in related_list:
new_center = self.create_n_link(center_compound_obj.compound_id, compound_obj)
self.create_node(compound_obj.name, compound_obj.compound_id)
edge_info = {
'source': self.index_dict[center_compound_obj.compound_id],
'target': self.index_dict[compound_obj.compound_id],
'relation': compound_obj.name,
}
deep_related_list = self.related_compound(compound_obj.compound_id)[:2]
for deep_compound_obj in deep_related_list:
self.create_n_link(compound_obj.compound_id, deep_compound_obj)
def get_graph(self):
"""
get the graph
@return: th graph
@rtype: dict
"""
result = {
'nodes': self.nodes,
'edges' : self.edges
}
return result
'''
def find_related_compound(cid_str):
"""
find the compound that are related to current compound in reaction
<|fim▁hole|> @param cid: list of compound id
@type cid: list
@return: dict of compound that are related to the compound, empty list will be returned if there is no related compound
@rtype: dict
"""
result = dict()
nodes = list()
edges = list()
all_genes = list()
index_dict = dict()
index = 0
if cid_str.endswith('_'):
cid_str = cid_str[:-1]
cid_list = cid_str.split('_')
for cid in cid_list:
try:
compound_obj = compound.objects.get(compound_id=cid)
#get first gene and create new node
cen_gene_id = None
try:
cen_gene_id = search_gene_in_ncbi(compound_obj.name,)[0]
if not cen_gene_id in all_genes:
all_genes.append(cen_gene_id)
gene_obj = gene.objects.get(gene_id=cen_gene_id)
node_info = {
'name': gene_obj.name,
'id': gene_obj.gene_id
}
nodes.append(node_info)
index_dict[cen_gene_id] = index
index += 1
except:
pass
# find related reactions
rid_list = reaction_compound.objects.filter(compound=compound_obj, isReactant=True).values_list('reaction_id', flat=True)
cname_list = list()
for rid in rid_list:
rs = reaction_compound.objects.filter(Q(reaction_id=rid), ~Q(compound=compound_obj))[:5]
for r in rs:
cname_list.append(r.compound.name)
for cname in cname_list:
# find genes
gene_list = search_gene_in_ncbi(cname, expect=cen_gene_id, index=1)
for gene_id in gene_list:
if gene_id in all_genes:
continue
try:
gene_obj = gene.objects.get(gene_id=gene_id)
#create new node
all_genes.append(gene_id)
node_info = {
'name' : gene_obj.name,
'id': gene_obj.gene_id
}
nodes.append(node_info)
index_dict[gene_obj.gene_id] = index
index += 1
# add edge
edge_info = {
'source': index_dict[cen_gene_id],
'target': index_dict[gene_obj.gene_id],
'relation': cname
}
edges.append(edge_info)
except:
traceback.print_exc()
pass
except:
traceback.print_exc()
pass
result = {
'nodes': nodes,
'edges': edges
}
return result
'''<|fim▁end|>
| |
<|file_name|>Crawler.py<|end_file_name|><|fim▁begin|>import google
import re
from bs4 import BeautifulSoup
def findContactPage(url):<|fim▁hole|> html = google.get_page(url)
soup = BeautifulSoup(html)
contactStr = soup.find_all('a', href=re.compile(".*?contact", re.IGNORECASE))
return contactStr
if __name__ == "__main__":
url = "http://www.wrangler.com/"
contactStr = findContactPage(url)
if(len(contactStr) > 0):
contactPage = google.get_page(contactStr[0].get("href"))
print contactStr[0].get("href")#.find_parents("a")
soup = BeautifulSoup(contactPage)
emailStr = soup.find_all(text=re.compile("[\w\.-]+@[\w\.-]+"))
if(len(emailStr) > 0) :
print addressStr
else:
print "could not find email"
else:
print "could not find contacts page"<|fim▁end|>
| |
<|file_name|>stats_test.go<|end_file_name|><|fim▁begin|>// Copyright 2020 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package core_test
import (
"context"
. "github.com/pingcap/check"
"github.com/pingcap/parser"
"github.com/pingcap/tidb/planner/core"
"github.com/pingcap/tidb/planner/property"
"github.com/pingcap/tidb/util/hint"
"github.com/pingcap/tidb/util/testkit"
"github.com/pingcap/tidb/util/testutil"
)
var _ = Suite(&testStatsSuite{})
type testStatsSuite struct {
*parser.Parser
testData testutil.TestData
}
func (s *testStatsSuite) SetUpSuite(c *C) {
s.Parser = parser.New()
s.Parser.SetParserConfig(parser.ParserConfig{EnableWindowFunction: true, EnableStrictDoubleTypeCheck: true})
var err error
s.testData, err = testutil.LoadTestSuiteData("testdata", "stats_suite")
c.Assert(err, IsNil)
}
func (s *testStatsSuite) TearDownSuite(c *C) {
c.Assert(s.testData.GenerateOutputIfNeeded(), IsNil)
}
func (s *testStatsSuite) TestGroupNDVs(c *C) {
store, dom, err := newStoreWithBootstrap()
c.Assert(err, IsNil)
defer func() {
dom.Close()
store.Close()
}()
tk := testkit.NewTestKit(c, store)
tk.MustExec("use test")
tk.MustExec("drop table if exists t1, t2")
tk.MustExec("create table t1(a int not null, b int not null, key(a,b))")
tk.MustExec("insert into t1 values(1,1),(1,2),(2,1),(2,2),(1,1)")
tk.MustExec("create table t2(a int not null, b int not null, key(a,b))")
tk.MustExec("insert into t2 values(1,1),(1,2),(1,3),(2,1),(2,2),(2,3),(3,1),(3,2),(3,3),(1,1)")
tk.MustExec("analyze table t1")
tk.MustExec("analyze table t2")<|fim▁hole|> var output []struct {
SQL string
AggInput string
JoinInput string
}
is := dom.InfoSchema()
s.testData.GetTestCases(c, &input, &output)
for i, tt := range input {
comment := Commentf("case:%v sql: %s", i, tt)
stmt, err := s.ParseOneStmt(tt, "", "")
c.Assert(err, IsNil, comment)
core.Preprocess(tk.Se, stmt, is)
builder := core.NewPlanBuilder(tk.Se, is, &hint.BlockHintProcessor{})
p, err := builder.Build(ctx, stmt)
c.Assert(err, IsNil, comment)
p, err = core.LogicalOptimize(ctx, builder.GetOptFlag(), p.(core.LogicalPlan))
c.Assert(err, IsNil, comment)
lp := p.(core.LogicalPlan)
_, err = core.RecursiveDeriveStats4Test(lp)
c.Assert(err, IsNil, comment)
var agg *core.LogicalAggregation
var join *core.LogicalJoin
stack := make([]core.LogicalPlan, 0, 2)
traversed := false
for !traversed {
switch v := lp.(type) {
case *core.LogicalAggregation:
agg = v
lp = lp.Children()[0]
case *core.LogicalJoin:
join = v
lp = v.Children()[0]
stack = append(stack, v.Children()[1])
case *core.LogicalApply:
lp = lp.Children()[0]
stack = append(stack, v.Children()[1])
case *core.LogicalUnionAll:
lp = lp.Children()[0]
for i := 1; i < len(v.Children()); i++ {
stack = append(stack, v.Children()[i])
}
case *core.DataSource:
if len(stack) == 0 {
traversed = true
} else {
lp = stack[0]
stack = stack[1:]
}
default:
lp = lp.Children()[0]
}
}
aggInput := ""
joinInput := ""
if agg != nil {
s := core.GetStats4Test(agg.Children()[0])
aggInput = property.ToString(s.GroupNDVs)
}
if join != nil {
l := core.GetStats4Test(join.Children()[0])
r := core.GetStats4Test(join.Children()[1])
joinInput = property.ToString(l.GroupNDVs) + ";" + property.ToString(r.GroupNDVs)
}
s.testData.OnRecord(func() {
output[i].SQL = tt
output[i].AggInput = aggInput
output[i].JoinInput = joinInput
})
c.Assert(aggInput, Equals, output[i].AggInput, comment)
c.Assert(joinInput, Equals, output[i].JoinInput, comment)
}
}
func (s *testStatsSuite) TestCardinalityGroupCols(c *C) {
store, dom, err := newStoreWithBootstrap()
c.Assert(err, IsNil)
defer func() {
dom.Close()
store.Close()
}()
tk := testkit.NewTestKit(c, store)
tk.MustExec("use test")
tk.MustExec("drop table if exists t1, t2")
tk.MustExec("create table t1(a int not null, b int not null, key(a,b))")
tk.MustExec("insert into t1 values(1,1),(1,2),(2,1),(2,2)")
tk.MustExec("create table t2(a int not null, b int not null, key(a,b))")
tk.MustExec("insert into t2 values(1,1),(1,2),(1,3),(2,1),(2,2),(2,3),(3,1),(3,2),(3,3)")
tk.MustExec("analyze table t1")
tk.MustExec("analyze table t2")
var input []string
var output []struct {
SQL string
Plan []string
}
s.testData.GetTestCases(c, &input, &output)
for i, tt := range input {
s.testData.OnRecord(func() {
output[i].SQL = tt
output[i].Plan = s.testData.ConvertRowsToStrings(tk.MustQuery("explain " + tt).Rows())
})
// The test point is the row count estimation for aggregations and joins.
tk.MustQuery("explain " + tt).Check(testkit.Rows(output[i].Plan...))
}
}<|fim▁end|>
|
ctx := context.Background()
var input []string
|
<|file_name|>gateway_scanner.py<|end_file_name|><|fim▁begin|>"""
GatewayScanner is an abstraction for searching for KNX/IP devices on the local network.
* It walks through all network interfaces
* and sends UDP multicast search requests
* it returns the first found device
"""
from __future__ import annotations
import asyncio
from functools import partial
import logging
from typing import TYPE_CHECKING
import netifaces
from xknx.knxip import (
DIB,
HPAI,
DIBDeviceInformation,
DIBServiceFamily,
DIBSuppSVCFamilies,
KNXIPFrame,
KNXIPServiceType,
SearchRequest,
SearchResponse,
)
from xknx.telegram import IndividualAddress
from .transport import UDPTransport
if TYPE_CHECKING:
from xknx.xknx import XKNX
logger = logging.getLogger("xknx.log")
class GatewayDescriptor:
"""Used to return information about the discovered gateways."""
def __init__(
self,
ip_addr: str,
port: int,
local_ip: str = "",
local_interface: str = "",
name: str = "UNKNOWN",
supports_routing: bool = False,
supports_tunnelling: bool = False,
supports_tunnelling_tcp: bool = False,
individual_address: IndividualAddress | None = None,
):
"""Initialize GatewayDescriptor class."""
self.name = name
self.ip_addr = ip_addr
self.port = port
self.local_interface = local_interface
self.local_ip = local_ip
self.supports_routing = supports_routing
self.supports_tunnelling = supports_tunnelling
self.supports_tunnelling_tcp = supports_tunnelling_tcp
self.individual_address = individual_address
def parse_dibs(self, dibs: list[DIB]) -> None:
"""Parse DIBs for gateway information."""
for dib in dibs:
if isinstance(dib, DIBSuppSVCFamilies):<|fim▁hole|> self.supports_routing = dib.supports(DIBServiceFamily.ROUTING)
if dib.supports(DIBServiceFamily.TUNNELING):
self.supports_tunnelling = True
self.supports_tunnelling_tcp = dib.supports(
DIBServiceFamily.TUNNELING, version=2
)
continue
if isinstance(dib, DIBDeviceInformation):
self.name = dib.name
self.individual_address = dib.individual_address
continue
def __repr__(self) -> str:
"""Return object as representation string."""
return (
"GatewayDescriptor(\n"
f" name={self.name},\n"
f" ip_addr={self.ip_addr},\n"
f" port={self.port},\n"
f" local_interface={self.local_interface},\n"
f" local_ip={self.local_ip},\n"
f" supports_routing={self.supports_routing},\n"
f" supports_tunnelling={self.supports_tunnelling},\n"
f" supports_tunnelling_tcp={self.supports_tunnelling_tcp},\n"
f" individual_address={self.individual_address}\n"
")"
)
def __str__(self) -> str:
"""Return object as readable string."""
return f"{self.individual_address} - {self.name} @ {self.ip_addr}:{self.port}"
class GatewayScanFilter:
"""Filter to limit gateway scan attempts.
If `tunnelling` and `routing` are set it is treated as AND.
KNX/IP devices that don't support `tunnelling` or `routing` aren't matched.
"""
def __init__(
self,
name: str | None = None,
tunnelling: bool | None = None,
tunnelling_tcp: bool | None = None,
routing: bool | None = None,
):
"""Initialize GatewayScanFilter class."""
self.name = name
self.tunnelling = tunnelling
self.tunnelling_tcp = tunnelling_tcp
self.routing = routing
def match(self, gateway: GatewayDescriptor) -> bool:
"""Check whether the device is a gateway and given GatewayDescriptor matches the filter."""
if self.name is not None and self.name != gateway.name:
return False
if (
self.tunnelling is not None
and self.tunnelling != gateway.supports_tunnelling
):
return False
if (
self.tunnelling_tcp is not None
and self.tunnelling_tcp != gateway.supports_tunnelling_tcp
):
return False
if self.routing is not None and self.routing != gateway.supports_routing:
return False
return (
gateway.supports_tunnelling
or gateway.supports_tunnelling_tcp
or gateway.supports_routing
)
class GatewayScanner:
"""Class for searching KNX/IP devices."""
def __init__(
self,
xknx: XKNX,
timeout_in_seconds: float = 4.0,
stop_on_found: int | None = 1,
scan_filter: GatewayScanFilter = GatewayScanFilter(),
):
"""Initialize GatewayScanner class."""
self.xknx = xknx
self.timeout_in_seconds = timeout_in_seconds
self.stop_on_found = stop_on_found
self.scan_filter = scan_filter
self.found_gateways: list[GatewayDescriptor] = []
self._udp_transports: list[UDPTransport] = []
self._response_received_event = asyncio.Event()
self._count_upper_bound = 0
"""Clean value of self.stop_on_found, computed when ``scan`` is called."""
async def scan(self) -> list[GatewayDescriptor]:
"""Scan and return a list of GatewayDescriptors on success."""
if self.stop_on_found is None:
self._count_upper_bound = 0
else:
self._count_upper_bound = max(0, self.stop_on_found)
await self._send_search_requests()
try:
await asyncio.wait_for(
self._response_received_event.wait(),
timeout=self.timeout_in_seconds,
)
except asyncio.TimeoutError:
pass
finally:
self._stop()
return self.found_gateways
def _stop(self) -> None:
"""Stop tearing down udp_transport."""
for udp_transport in self._udp_transports:
udp_transport.stop()
async def _send_search_requests(self) -> None:
"""Find all interfaces with active IPv4 connection to search for gateways."""
for interface in netifaces.interfaces():
try:
af_inet = netifaces.ifaddresses(interface)[netifaces.AF_INET]
ip_addr = af_inet[0]["addr"]
except KeyError:
logger.debug("No IPv4 address found on %s", interface)
continue
except ValueError as err:
# rare case when an interface disappears during search initialisation
logger.debug("Invalid interface %s: %s", interface, err)
continue
else:
await self._search_interface(interface, ip_addr)
async def _search_interface(self, interface: str, ip_addr: str) -> None:
"""Send a search request on a specific interface."""
logger.debug("Searching on %s / %s", interface, ip_addr)
udp_transport = UDPTransport(
self.xknx,
(ip_addr, 0),
(self.xknx.multicast_group, self.xknx.multicast_port),
multicast=True,
)
udp_transport.register_callback(
partial(self._response_rec_callback, interface=interface),
[KNXIPServiceType.SEARCH_RESPONSE],
)
await udp_transport.connect()
self._udp_transports.append(udp_transport)
discovery_endpoint = HPAI(
ip_addr=self.xknx.multicast_group, port=self.xknx.multicast_port
)
search_request = SearchRequest(self.xknx, discovery_endpoint=discovery_endpoint)
udp_transport.send(KNXIPFrame.init_from_body(search_request))
def _response_rec_callback(
self,
knx_ip_frame: KNXIPFrame,
source: HPAI,
udp_transport: UDPTransport,
interface: str = "",
) -> None:
"""Verify and handle knxipframe. Callback from internal udp_transport."""
if not isinstance(knx_ip_frame.body, SearchResponse):
logger.warning("Could not understand knxipframe")
return
gateway = GatewayDescriptor(
ip_addr=knx_ip_frame.body.control_endpoint.ip_addr,
port=knx_ip_frame.body.control_endpoint.port,
local_ip=udp_transport.local_addr[0],
local_interface=interface,
)
gateway.parse_dibs(knx_ip_frame.body.dibs)
logger.debug("Found KNX/IP device at %s: %s", source, repr(gateway))
self._add_found_gateway(gateway)
def _add_found_gateway(self, gateway: GatewayDescriptor) -> None:
if self.scan_filter.match(gateway) and not any(
_gateway.individual_address == gateway.individual_address
for _gateway in self.found_gateways
):
self.found_gateways.append(gateway)
if 0 < self._count_upper_bound <= len(self.found_gateways):
self._response_received_event.set()<|fim▁end|>
| |
<|file_name|>obj2vxpGUI.py<|end_file_name|><|fim▁begin|>#OBJ2VXP: Converts simple OBJ files to VXP expansions
#Copyright (C) 2004-2015 Foone Turing
#
#This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys
sys.path.append('code')
import pygame
from pygame.constants import *
import sockgui
sockgui.setDataPath('code')
from converterbase import ConverterBase
import os
import time
import obj2vxp
import obj2vxptex
from error import SaveError,LoadError
import ConfigParser
import vxpinstaller
class obj2vxpGUI(ConverterBase):
def __init__(self,screen):
ConverterBase.__init__(self,screen)
ui=self.ui
ys=self.makeTab(10,94,'CFG settings')
ui.add(sockgui.Label(ui,[20,ys+10],'Expansion name:'))
ui.add(sockgui.Label(ui,[20,ys+26],'Author name:'))
ui.add(sockgui.Label(ui,[20,ys+42],'Orig. Author name:'))
ui.add(sockgui.Label(ui,[20,ys+58],'Shortname:'))
ui.add(sockgui.Label(ui,[20,ys+74],'Filename:'))
self.filenamelabel=sockgui.Label(ui,[120,ys+74],'')
ui.add(self.filenamelabel)
self.namebox= sockgui.TextBox(ui,[120,ys+10-3],40)
self.authorbox= sockgui.TextBox(ui,[120,ys+26-3],40)
self.origauthorbox= sockgui.TextBox(ui,[120,ys+42-3],40)
self.shortnamebox= sockgui.TextBox(ui,[120,ys+58-3],40,callback=self.onShortNameChanged)
self.shortnamebox.setAllowedKeys(sockgui.UPPERCASE+sockgui.LOWERCASE+sockgui.DIGITS+'._-')
self.authorbox.setText(self.getAuthor())
ui.add(self.namebox)
ui.add(self.authorbox)
ui.add(self.origauthorbox)
ui.add(sockgui.Button(ui,[330,ys+42-3],'Same',callback=self.copyAuthorToOrigAuthor))
ui.add(self.shortnamebox)
self.namebox.activate()
ys=self.makeTab(ys+94+5,120,'OBJ to convert')
self.files=sockgui.ListBox(ui,[20,ys+10],[62,10],items=self.getOBJList())
if self.files.getNumItems()>0:
self.files.select(0)
ui.add(self.files)
self.enhance_color=sockgui.CheckBox(ui,[100,ys+103],'Enhance Color',self.getEnhanceColor())
self.textured=sockgui.CheckBox(ui,[200,ys+103],'Textured',self.getTextured())
ui.add(sockgui.Button(ui,[20,ys+99],'Refresh list',callback=self.refreshList))
ui.add(self.enhance_color)
ui.add(self.textured)
#ui.add(sockgui.BorderBox(ui,[10,224],[screen.get_width()-20,110]))
ys=self.makeTab(ys+120+5,30,'3dmm IDs')
ui.add(sockgui.Label(ui,[20,ys+10],'ID:'))
self.idbox=sockgui.TextBox(ui,[40,ys+7],10)
self.idbox.setAllowedKeys('0123456789')
ui.add(self.idbox)
ui.add(sockgui.Button(ui,[110,ys+7],'Generate ID',callback=self.generateNewID))
<|fim▁hole|> self.install_check=sockgui.CheckBox(ui,[240,ys+13],'Install VXP',self.getInstallCheck())
ui.add(self.install_check)
self.progress=sockgui.ProgressBox(ui,[20,ys+10],[200,16],maxvalue=6)
ui.add(self.progress)
self.errortext=sockgui.Label(ui,[20,ys+32],'')
ui.add(self.errortext)
self.startbutton=sockgui.Button(ui,[20,ys+46],'Create VXP',callback=self.createVXP)
ui.add(self.startbutton)
ui.registerHotKey(K_F5,self.updateListBox)
def refreshList(self,junk):
self.files.setItems(self.getOBJList())
def updateListBox(self,event):
if event.type==KEYUP:
self.refreshList(0)
def statusCallback(self,text):
self.errortext.setText(text)
self.ui.draw()
def createVXP(self,junk):
self.saveSettings()
self.progress.setValue(0)
try:
outfile=str(self.shortnamebox.getText())+'.vxp'
objfile=self.files.getSelectedText()
if objfile is None:
raise SaveError('no OBJ selected')
try:
uniqueid=int(self.idbox.getText())
except ValueError:
raise SaveError('Failed: Bad ID!')
name=str(self.namebox.getText())
author=str(self.authorbox.getText())
origauthor=str(self.origauthorbox.getText())
shortname=str(self.shortnamebox.getText())
enhance=self.enhance_color.isChecked()
self.errortext.setText('Converting...')
if self.textured.isChecked():
ret=obj2vxptex.CreateVXPExpansionFromOBJTextured(name,author,origauthor,outfile,shortname,objfile,
uniqueid,self.progressCallback,self.statusCallback)
else:
ret=obj2vxp.CreateVXPExpansionFromOBJ(name,author,origauthor,outfile,shortname,objfile,
uniqueid,self.progressCallback,enhance,self.statusCallback)
if ret:
self.errortext.setText('VXP saved as %s' % (outfile))
self.idbox.setText('') #So we don't reuse them by mistake.
if self.install_check.isChecked():
vxpinstaller.installVXP(outfile)
self.errortext.setText('VXP saved as %s, and installed.' % (outfile))
else:
self.errortext.setText('Failed: unknown error (!ret)')
except SaveError,e:
self.errortext.setText('Failed: ' + str(e).strip('"'))
except LoadError,e:
self.errortext.setText('Failed: ' + str(e).strip('"'))
except ValueError:
self.errortext.setText('Failed: Bad ID!')
except pygame.error,e:
self.errortext.setText('Failed: ' + str(e).strip('"'))
def copyAuthorToOrigAuthor(self,junk):
self.origauthorbox.setText(self.authorbox.getText())
def saveExtraSettings(self):
try:
self.config.add_section('obj2vxp')
except:
pass
self.config.set('obj2vxp','enhance',`self.enhance_color.isChecked()`)
self.config.set('obj2vxp','textured',`self.textured.isChecked()`)
def getEnhanceColor(self):
try:
val=self.config.get('obj2vxp','enhance')
return sockgui.BoolConv(val)
except:
return False
def getTextured(self):
try:
val=self.config.get('obj2vxp','textured')
return sockgui.BoolConv(val)
except:
return False
def getOBJList(self):
out=[]
for file in os.listdir('.'):
flower=file.lower()
if flower.endswith('.obj'):
out.append(file)
return out
def onShortNameChanged(self,data,newtext):
if newtext=='':
out=''
else:
out=self.shortnamebox.getText() + '.vxp'
self.filenamelabel.setRed(os.path.exists(out))
self.filenamelabel.setText(out)
def RunConverter(title):
pygame.display.set_caption(title+'obj2vxpGUI '+obj2vxp.version)
screen=pygame.display.set_mode((375,397))
gui=obj2vxpGUI(screen)
return gui.run()
if __name__=='__main__':
pygame.init()
RunConverter('')
def GetInfo():
return ('obj2vxp','Convert OBJs to props',None,obj2vxp.version) # None is the ICONOS.<|fim▁end|>
|
ys=self.makeTab(ys+30+5,66,'Control')
|
<|file_name|>exports.d.ts<|end_file_name|><|fim▁begin|>///<reference path="./WebdriverIO/WebdriverIO.d.ts"/>
///<reference path="./WebdriverIO/WebdriverIOEx.d.ts"/>
///<reference path="./WebdriverIO/WebdriverCSS.d.ts"/><|fim▁hole|>
///<reference path="./Jasmine/JasmineEx.d.ts"/><|fim▁end|>
| |
<|file_name|>syscalls_darwin.go<|end_file_name|><|fim▁begin|>//
// date : 2017-07-14
// author: xjdrew
//
package k1
import (
"fmt"
"net"
"os/exec"
"strings"
"github.com/songgao/water"
)
func execCommand(name, sargs string) error {
args := strings.Split(sargs, " ")
cmd := exec.Command(name, args...)
logger.Infof("exec command: %s %s", name, sargs)
return cmd.Run()
}
func initTun(tun string, ipNet *net.IPNet, mtu int) error {
ip := ipNet.IP
maskIP := net.IP(ipNet.Mask)
sargs := fmt.Sprintf("%s %s %s mtu %d netmask %s up", tun, ip.String(), ip.String(), mtu, maskIP.String())
if err := execCommand("ifconfig", sargs); err != nil {
return err
}
return addRoute(tun, ipNet)
}
func addRoute(tun string, subnet *net.IPNet) error {
ip := subnet.IP
maskIP := net.IP(subnet.Mask)
sargs := fmt.Sprintf("-n add -net %s -netmask %s -interface %s", ip.String(), maskIP.String(), tun)
return execCommand("route", sargs)
}
<|fim▁hole|>func createTun(ip net.IP, mask net.IPMask) (*water.Interface, error) {
ifce, err := water.New(water.Config{
DeviceType: water.TUN,
})
if err != nil {
return nil, err
}
logger.Infof("create %s", ifce.Name())
ipNet := &net.IPNet{
IP: ip,
Mask: mask,
}
if err := initTun(ifce.Name(), ipNet, MTU); err != nil {
return nil, err
}
return ifce, nil
}
// can't listen on tun's ip in macosx
func fixTunIP(ip net.IP) net.IP {
return net.IPv4zero
}<|fim▁end|>
| |
<|file_name|>experimentunitviews.py<|end_file_name|><|fim▁begin|>from csacompendium.research.models import ExperimentUnit
from csacompendium.utils.pagination import APILimitOffsetPagination
from csacompendium.utils.permissions import IsOwnerOrReadOnly
from csacompendium.utils.viewsutils import DetailViewUpdateDelete, CreateAPIViewHook
from rest_framework.filters import DjangoFilterBackend
from rest_framework.generics import ListAPIView
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from .filters import ExperimentUnitListFilter
from csacompendium.research.api.experimentunit.experimentunitserializers import experiment_unit_serializers
def experiment_unit_views():<|fim▁hole|> """
experiment_unit_serializer = experiment_unit_serializers()
class ExperimentUnitCreateAPIView(CreateAPIViewHook):
"""
Creates a single record.
"""
queryset = ExperimentUnit.objects.all()
serializer_class = experiment_unit_serializer['ExperimentUnitDetailSerializer']
permission_classes = [IsAuthenticated]
class ExperimentUnitListAPIView(ListAPIView):
"""
API list view. Gets all records API.
"""
queryset = ExperimentUnit.objects.all()
serializer_class = experiment_unit_serializer['ExperimentUnitListSerializer']
filter_backends = (DjangoFilterBackend,)
filter_class = ExperimentUnitListFilter
pagination_class = APILimitOffsetPagination
class ExperimentUnitDetailAPIView(DetailViewUpdateDelete):
"""
Updates a record.
"""
queryset = ExperimentUnit.objects.all()
serializer_class = experiment_unit_serializer['ExperimentUnitDetailSerializer']
permission_classes = [IsAuthenticated, IsAdminUser]
lookup_field = 'slug'
return {
'ExperimentUnitListAPIView': ExperimentUnitListAPIView,
'ExperimentUnitDetailAPIView': ExperimentUnitDetailAPIView,
'ExperimentUnitCreateAPIView': ExperimentUnitCreateAPIView
}<|fim▁end|>
|
"""
Experiment unit views
:return: All experiment unit views
:rtype: Object
|
<|file_name|>analyze-headless.py<|end_file_name|><|fim▁begin|>import sys
import os
import csv
from datetime import datetime, timedelta
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.dates import drange
from matplotlib.patches import Rectangle
import scenario_factory
# http://www.javascripter.net/faq/hextorgb.htm
PRIMA = (148/256, 164/256, 182/256)
PRIMB = (101/256, 129/256, 164/256)
PRIM = ( 31/256, 74/256, 125/256)
PRIMC = ( 41/256, 65/256, 94/256)
PRIMD = ( 10/256, 42/256, 81/256)
EC = (1, 1, 1, 0)
GRAY = (0.5, 0.5, 0.5)
WHITE = (1, 1, 1)
def plot_each_device(sc, unctrl, cntrl):
t = drange(sc.t_start, sc.t_end, timedelta(minutes=1))
for d_unctrl, d_ctrl in zip(unctrl, ctrl):
fig, ax = plt.subplots(2, sharex=True)
ax[0].set_ylabel('P$_{el}$ [kW]')
ymax = max(d_unctrl[0].max(), d_ctrl[0].max()) / 1000.0
ax[0].set_ylim(-0.01, ymax + (ymax * 0.1))
ax[0].plot_date(t, d_unctrl[0] / 1000.0, fmt='-', lw=1, label='unctrl')
ax[0].plot_date(t, d_ctrl[0] / 1000.0, fmt='-', lw=1, label='ctrl')
leg0 = ax[0].legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=4,
borderaxespad=0.0, fancybox=False)
ax[1].set_ylabel('T$_{storage}$ [\\textdegree C]')
ax[1].plot_date(t, d_unctrl[2] - 273.0, fmt='-', lw=1, label='unctrl')
ax[1].plot_date(t, d_ctrl[2] - 273.0, fmt='-', lw=1, label='ctrl')
fig.autofmt_xdate()
for label in leg0.get_texts():
label.set_fontsize('x-small')
fig.subplots_adjust(left=0.1, right=0.95, top=0.88, bottom=0.2)
def plot_aggregated(sc, bd, unctrl, ctrl, ctrl_sched, res=1):
t_day_start = sc.t_block_start - timedelta(hours=sc.t_block_start.hour,
minutes=sc.t_block_start.minute)<|fim▁hole|> i_block_end = (sc.t_block_end - t_day_start).total_seconds() / 60 / res
P_el_unctrl = unctrl[:,0,skip:].sum(0)
P_el_ctrl = ctrl[:,0,skip:].sum(0)
P_el_sched = ctrl_sched[:,skip:].sum(0)
T_storage_ctrl = ctrl[:,2,skip:]
ft = np.array([t[0]] + list(np.repeat(t[1:-1], 2)) + [t[-1]])
P_el_ctrl_fill = np.repeat(P_el_ctrl[:-1], 2)
fig, ax = plt.subplots(2, sharex=True)
fig.subplots_adjust(left=0.11, right=0.95, hspace=0.3, top=0.98, bottom=0.2)
ax[0].set_ylabel('P$_{\mathrm{el}}$ [kW]')
ymax = max(P_el_unctrl.max(), P_el_ctrl_fill.max(), P_el_sched.max(), 0) / 1000.0
ymin = min(P_el_unctrl.min(), P_el_ctrl_fill.min(), P_el_sched.min(), 0) / 1000.0
ax[0].set_ylim(ymin - abs(ymin * 0.1), ymax + abs(ymax * 0.1))
xspace = (t[-1] - t[-2])
ax[0].set_xlim(t[0], t[-1] + xspace)
# ax[0].axvline(t[i_block_start], ls='--', color='0.5')
# ax[0].axvline(t[i_block_end], ls='--', color='0.5')
ax[0].axvspan(t[i_block_start], t[i_block_end], fc=GRAY+(0.1,), ec=EC)
ax[0].axvline(t[0], ls='-', color=GRAY, lw=0.5)
ax[0].axvline(t[len(t)/2], ls='-', color=GRAY, lw=0.5)
l_unctrl, = ax[0].plot_date(t, P_el_unctrl / 1000.0, fmt=':', color=PRIMB, drawstyle='steps-post', lw=0.75)
l_unctrl.set_dashes([1.0, 1.0])
# add lw=0.0 due to bug in mpl (will show as hairline in pdf though...)
l_ctrl = ax[0].fill_between(ft, P_el_ctrl_fill / 1000.0, facecolors=PRIM+(0.5,), edgecolors=EC, lw=0.0)
# Create proxy artist as l_ctrl legend handle
l_ctrl_proxy = Rectangle((0, 0), 1, 1, fc=PRIM, ec=WHITE, lw=0.0, alpha=0.5)
l_sched, = ax[0].plot_date(t, P_el_sched / 1000.0, fmt='-', color=PRIM, drawstyle='steps-post', lw=0.75)
# colors = [
# '#348ABD', # blue
# '#7A68A6', # purple
# '#A60628', # red
# '#467821', # green
# '#CF4457', # pink
# '#188487', # turqoise
# '#E24A33', # orange
# '#1F4A7D', # primary
# '#BF9D23', # secondary
# '#BF5B23', # complementary
# '#94A4B6', # primaryA
# '#6581A4', # primaryB
# '#29415E', # primaryC
# '#0A2A51', # primaryD
# ][:len(unctrl)]
# for (c, P_el_unctrl, P_el_ctrl, P_el_sched) in zip(colors, unctrl[:,0,:], ctrl[:,0,:], ctrl_sched):
# ax[0].plot_date(t, P_el_unctrl / 1000.0, fmt='-', color=c, lw=1, label='unctrl')
# ax[0].plot_date(t, P_el_ctrl / 1000.0, fmt=':', color=c, lw=1, label='ctrl')
# ax[0].plot_date(t, P_el_sched / 1000.0, fmt='--x', color=c, lw=1, label='sched')
ymax = T_storage_ctrl.max() - 273
ymin = T_storage_ctrl.min() - 273
ax[1].set_ylim(ymin - abs(ymin * 0.01), ymax + abs(ymax * 0.01))
ax[1].set_ylabel('T$_{\mathrm{storage}}\;[^{\circ}\mathrm{C}]$', labelpad=9)
ax[1].axvspan(t[i_block_start], t[i_block_end], fc=GRAY+(0.1,), ec=EC)
ax[1].axvline(t[0], ls='-', color=GRAY, lw=0.5)
ax[1].axvline(t[len(t)/2], ls='-', color=GRAY, lw=0.5)
for v in T_storage_ctrl:
ax[1].plot_date(t, v - 273.0, fmt='-', color=PRIMA, alpha=0.25, lw=0.5)
l_T_med, = ax[1].plot_date(t, T_storage_ctrl.mean(0) - 273.0, fmt='-', color=PRIMA, alpha=0.75, lw=1.5)
ax[0].xaxis.get_major_formatter().scaled[1/24.] = '%H:%M'
ax[-1].set_xlabel('Tageszeit')
fig.autofmt_xdate()
ax[1].legend([l_sched, l_unctrl, l_ctrl_proxy, l_T_med],
['Verbundfahrplan', 'ungesteuert', 'gesteuert', 'Speichertemperaturen (Median)'],
bbox_to_anchor=(0., 1.03, 1., .103), loc=8, ncol=4,
handletextpad=0.2, mode='expand', handlelength=3,
borderaxespad=0.25, fancybox=False, fontsize='x-small')
# import pdb
# pdb.set_trace()
return fig
def plot_aggregated_SLP(sc, bd, unctrl, ctrl, ctrl_sched, res=1):
assert hasattr(sc, 'slp_file')
t_day_start = sc.t_block_start - timedelta(hours=sc.t_block_start.hour,
minutes=sc.t_block_start.minute)
skip = (t_day_start - sc.t_start).total_seconds() / 60 / res
i_block_start = (sc.t_block_start - t_day_start).total_seconds() / 60 / res
i_block_end = (sc.t_block_end - t_day_start).total_seconds() / 60 / res
t = drange(sc.t_block_start, sc.t_block_end, timedelta(minutes=res))
P_el_unctrl = unctrl[:,0,skip + i_block_start:skip + i_block_end].sum(0)
P_el_ctrl = ctrl[:,0,skip + i_block_start:skip + i_block_end].sum(0)
# ctrl correction
P_el_ctrl = np.roll(P_el_ctrl, -1, axis=0)
P_el_sched = ctrl_sched[:,skip + i_block_start:skip + i_block_end].sum(0)
T_storage_ctrl = ctrl[:,2,skip + i_block_start:skip + i_block_end]
slp = _read_slp(sc, bd)[skip + i_block_start:skip + i_block_end]
diff_ctrl = (P_el_ctrl - P_el_unctrl) / 1000.0
diff_ctrl_fill = np.repeat((slp + diff_ctrl)[:-1], 2)
slp_fill = np.repeat(slp[:-1], 2)
ft = np.array([t[0]] + list(np.repeat(t[1:-1], 2)) + [t[-1]])
P_el_ctrl_fill = np.repeat(P_el_ctrl[:-1], 2)
fig = plt.figure(figsize=(6.39, 4.25))
ax0 = fig.add_subplot(311)
ax1 = fig.add_subplot(312, sharex=ax0)
ax2 = fig.add_subplot(313, sharex=ax0)
ax = [ax0, ax1, ax2]
# bottom=0.1 doesn't work here... :(
fig.subplots_adjust(left=0.11, right=0.95, hspace=0.2, top=0.93)
ax[0].set_ylabel('P$_{\mathrm{el}}$ [kW]')
ymax = max(P_el_unctrl.max(), P_el_ctrl_fill.max(), P_el_sched.max(), 0) / 1000.0
ymin = min(P_el_unctrl.min(), P_el_ctrl_fill.min(), P_el_sched.min(), 0) / 1000.0
ax[0].set_ylim(ymin - abs(ymin * 0.1), ymax + abs(ymax * 0.1))
xspace = (t[-1] - t[-2])
ax[0].set_xlim(t[0], t[-1] + xspace)
l_unctrl, = ax[0].plot_date(t, P_el_unctrl / 1000.0, fmt=':', color=PRIMB, drawstyle='steps-post', lw=0.75)
l_unctrl.set_dashes([1.0, 1.0])
# add lw=0.0 due to bug in mpl (will show as hairline in pdf though...)
l_ctrl = ax[0].fill_between(ft, P_el_ctrl_fill / 1000.0, facecolors=PRIM+(0.5,), edgecolors=EC, lw=0.0)
# Create proxy artist as l_ctrl legend handle
l_ctrl_proxy = Rectangle((0, 0), 1, 1, fc=PRIM, ec=WHITE, lw=0.0, alpha=0.5)
l_sched, = ax[0].plot_date(t, P_el_sched / 1000.0, fmt='-', color=PRIM, drawstyle='steps-post', lw=0.75)
# colors = [
# '#348ABD', # blue
# '#7A68A6', # purple
# '#A60628', # red
# '#467821', # green
# '#CF4457', # pink
# '#188487', # turqoise
# '#E24A33', # orange
# '#1F4A7D', # primary
# '#BF9D23', # secondary
# '#BF5B23', # complementary
# '#94A4B6', # primaryA
# '#6581A4', # primaryB
# '#29415E', # primaryC
# '#0A2A51', # primaryD
# ][:len(unctrl)]
# for (c, P_el_unctrl, P_el_ctrl, P_el_sched) in zip(colors, unctrl[:,0,:], ctrl[:,0,:], ctrl_sched):
# ax[0].plot_date(t, P_el_unctrl / 1000.0, fmt='-', color=c, lw=1, label='unctrl')
# ax[0].plot_date(t, P_el_ctrl / 1000.0, fmt=':', color=c, lw=1, label='ctrl')
# ax[0].plot_date(t, P_el_sched / 1000.0, fmt='--x', color=c, lw=1, label='sched')
ymax = T_storage_ctrl.max() - 273
ymin = T_storage_ctrl.min() - 273
ax[1].set_ylim(ymin - abs(ymin * 0.01), ymax + abs(ymax * 0.01))
ax[1].set_ylabel('T$_{\mathrm{storage}}\;[^{\circ}\mathrm{C}]$', labelpad=9)
for v in T_storage_ctrl:
ax[1].plot_date(t, v - 273.0, fmt='-', color=PRIMA, alpha=0.25, lw=0.5)
l_T_med, = ax[1].plot_date(t, T_storage_ctrl.mean(0) - 273.0, fmt='-', color=PRIMA, alpha=0.75, lw=1.5)
ax[2].set_ylabel('P$_{el}$ [kW]')
ax[2].set_xlabel('Tageszeit')
ymin = min(slp.min(), (slp + diff_ctrl).min())
ax[2].set_ylim(ymin + (ymin * 0.1), 0)
ax[2].plot_date(t, slp, fmt='-', color=PRIMB, drawstyle='steps-post', lw=0.75, label='Tageslastprofil')
ax[2].fill_between(ft, diff_ctrl_fill, slp_fill, where=diff_ctrl_fill>=slp_fill, facecolors=PRIM+(0.5,), edgecolors=EC, lw=0.0)
ax[2].fill_between(ft, diff_ctrl_fill, slp_fill, where=diff_ctrl_fill<slp_fill, facecolors=PRIMB+(0.5,), edgecolors=EC, lw=0.0)
ax[0].legend([l_sched, l_unctrl, l_ctrl_proxy, l_T_med],
['Verbundfahrplan', 'ungesteuert', 'gesteuert', 'Speichertemperaturen (Median)'],
bbox_to_anchor=(0., 1.05, 1., .105), loc=8, ncol=4,
handletextpad=0.2, mode='expand', handlelength=3,
borderaxespad=0.25, fancybox=False, fontsize='x-small')
ax[2].legend(loc=1, fancybox=False, fontsize='x-small')
fig.autofmt_xdate()
ax[0].xaxis.get_major_formatter().scaled[1/24.] = '%H:%M'
return fig
def plot_samples(sc, basedir, idx=None):
sample_data = np.load(p(basedir, sc.run_pre_samplesfile))
if idx is not None:
sample_data = sample_data[idx].reshape((1,) + sample_data.shape[1:])
fig, ax = plt.subplots(len(sample_data))
if len(sample_data) == 1:
ax = [ax]
for i, samples in enumerate(sample_data):
t = np.arange(samples.shape[-1])
for s in samples:
ax[i].plot(t, s)
def norm(minimum, maximum, value):
# return value
if maximum == minimum:
return maximum
return (value - minimum) / (maximum - minimum)
def _read_slp(sc, bd):
# Read csv data
slp = []
found = False
with open(sc.slp_file, 'r', encoding='latin-1') as f:
reader = csv.reader(f, delimiter=';')
for row in reader:
if not row:
continue
if not found and row[0] == 'Datum':
found = True
elif found:
date = datetime.strptime('_'.join(row[:2]), '%d.%m.%Y_%H:%M:%S')
if date < sc.t_start:
continue
elif date >= sc.t_end:
break
# This is a demand, so negate the values
slp.append(-1.0 * float(row[2].replace(',', '.')))
slp = np.array(slp)
# Scale values
# if hasattr(sc, 'run_unctrl_datafile'):
# slp_norm = norm(slp.min(), slp.max(), slp)
# unctrl = np.load(p(bd, sc.run_unctrl_datafile)).sum(0) / 1000
# slp = slp_norm * (unctrl.max() - unctrl.min()) + unctrl.min()
MS_day_mean = 13600 # kWh, derived from SmartNord Scenario document
MS_15_mean = MS_day_mean / 96
slp = slp / np.abs(slp.mean()) * MS_15_mean
return slp
# return np.array(np.roll(slp, 224, axis=0))
def plot_slp(sc, bd):
slp = _read_slp(sc, bd)
res = 1
if (sc.t_end - sc.t_start).total_seconds() / 60 == slp.shape[-1] * 15:
res = 15
t = drange(sc.t_start, sc.t_end, timedelta(minutes=res))
fig, ax = plt.subplots()
ax.set_ylabel('P$_{el}$ [kW]')
ymax = max(slp.max(), slp.max())
ymin = min(slp.min(), slp.min())
ax.set_ylim(ymin - (ymin * 0.1), ymax + (ymax * 0.1))
ax.plot_date(t, slp, fmt='-', lw=1, label='H0')
leg0 = ax.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=4,
borderaxespad=0.0, fancybox=False)
fig.autofmt_xdate()
for label in leg0.get_texts():
label.set_fontsize('x-small')
fig.subplots_adjust(left=0.1, right=0.95, top=0.88, bottom=0.2)
return fig
def p(basedir, fn):
return os.path.join(basedir, fn)
def resample(d, resolution):
# resample the innermost axis to 'resolution'
shape = tuple(d.shape[:-1]) + (int(d.shape[-1]/resolution), resolution)
return d.reshape(shape).sum(-1)/resolution
def run(sc_file):
print()
bd = os.path.dirname(sc_file)
sc = scenario_factory.Scenario()
sc.load_JSON(sc_file)
print(sc.title)
# plot_samples(sc, bd)
# plt.show()
unctrl = np.load(p(bd, sc.run_unctrl_datafile))
pre = np.load(p(bd, sc.run_pre_datafile))
block = np.load(p(bd, sc.run_ctrl_datafile))
post = np.load(p(bd, sc.run_post_datafile))
sched = np.load(p(bd, sc.sched_file))
ctrl = np.zeros(unctrl.shape)
idx = 0
for l in (pre, block, post):
ctrl[:,:,idx:idx + l.shape[-1]] = l
idx += l.shape[-1]
if sched.shape[-1] == unctrl.shape[-1] / 15:
print('Extending schedules shape by factor 15')
sched = sched.repeat(15, axis=1)
ctrl_sched = np.zeros((unctrl.shape[0], unctrl.shape[-1]))
ctrl_sched = np.ma.array(ctrl_sched)
ctrl_sched[:,:pre.shape[-1]] = np.ma.masked
ctrl_sched[:,pre.shape[-1]:pre.shape[-1] + sched.shape[-1]] = sched
ctrl_sched[:,pre.shape[-1] + sched.shape[-1]:] = np.ma.masked
# plot_each_device(sc, unctrl, ctrl, sched)
minutes = (sc.t_end - sc.t_start).total_seconds() / 60
assert unctrl.shape[-1] == ctrl.shape[-1] == ctrl_sched.shape[-1]
shape = unctrl.shape[-1]
if hasattr(sc, 'slp_file'):
if minutes == shape:
print('data is 1-minute resolution, will be resampled by 15')
res = 15
elif minutes == shape * 15:
print('data is 15-minute resolution, all fine')
res = 1
else:
raise RuntimeError('unsupported data resolution: %.2f' % (minutes / shape))
unctrl = resample(unctrl, res)
ctrl = resample(ctrl, res)
ctrl_sched = resample(ctrl_sched, res)
fig = plot_aggregated_SLP(sc, bd, unctrl, ctrl, ctrl_sched, res=15)
else:
if minutes == shape:
print('data is 1-minute resolution, will be resampled by 60')
res = 60
elif minutes == shape * 15:
print('data is 15-minute resolution, will be resampled by 4')
res = 4
elif minutes == shape * 60:
print('data is 60-minute resolution, all fine')
res = 1
else:
raise RuntimeError('unsupported data resolution: %.2f' % (minutes / shape))
unctrl = resample(unctrl, res)
ctrl = resample(ctrl, res)
ctrl_sched = resample(ctrl_sched, res)
fig = plot_aggregated(sc, bd, unctrl, ctrl, ctrl_sched, res=60)
fig.savefig(p(bd, sc.title) + '.pdf')
fig.savefig(p(bd, sc.title) + '.png', dpi=300)
# plt.show()
if __name__ == '__main__':
for n in sys.argv[1:]:
if os.path.isdir(n):
run(p(n, '0.json'))
else:
run(n)<|fim▁end|>
|
t = drange(t_day_start, sc.t_end, timedelta(minutes=res))
skip = (t_day_start - sc.t_start).total_seconds() / 60 / res
i_block_start = (sc.t_block_start - t_day_start).total_seconds() / 60 / res
|
<|file_name|>ProjectModulesManager.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2005-2013 by Appcelerator, Inc. All Rights Reserved.
* Licensed under the terms of the Eclipse Public License (EPL).
* Please see the license.txt included with this distribution for details.
* Any modifications to this file must keep this entire header intact.
*/
/*
* Created on May 24, 2005
*
* @author Fabio Zadrozny
*/
package org.python.pydev.editor.codecompletion.revisited;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.python.pydev.core.DeltaSaver;
import org.python.pydev.core.ICodeCompletionASTManager;
import org.python.pydev.core.IInterpreterInfo;
import org.python.pydev.core.IInterpreterManager;
import org.python.pydev.core.IModule;
import org.python.pydev.core.IModulesManager;
import org.python.pydev.core.IProjectModulesManager;
import org.python.pydev.core.IPythonNature;
import org.python.pydev.core.IPythonPathNature;
import org.python.pydev.core.ISystemModulesManager;
import org.python.pydev.core.ModulesKey;
import org.python.pydev.core.log.Log;
import org.python.pydev.editor.codecompletion.revisited.javaintegration.JavaProjectModulesManagerCreator;
import org.python.pydev.plugin.nature.PythonNature;
import org.python.pydev.shared_core.io.FileUtils;
import org.python.pydev.shared_core.string.StringUtils;
import org.python.pydev.shared_core.structure.Tuple;
/**
* @author Fabio Zadrozny
*/
public final class ProjectModulesManager extends ModulesManagerWithBuild implements IProjectModulesManager {
private static final boolean DEBUG_MODULES = false;
//these attributes must be set whenever this class is restored.
private volatile IProject project;
private volatile IPythonNature nature;
public ProjectModulesManager() {
}
/**
* @see org.python.pydev.core.IProjectModulesManager#setProject(org.eclipse.core.resources.IProject, boolean)
*/
@Override
public void setProject(IProject project, IPythonNature nature, boolean restoreDeltas) {
this.project = project;
this.nature = nature;
File completionsCacheDir = this.nature.getCompletionsCacheDir();
if (completionsCacheDir == null) {
return; //project was deleted.
}
DeltaSaver<ModulesKey> d = this.deltaSaver = new DeltaSaver<ModulesKey>(completionsCacheDir, "v1_astdelta",
readFromFileMethod,
toFileMethod);
if (!restoreDeltas) {
d.clearAll(); //remove any existing deltas
} else {
d.processDeltas(this); //process the current deltas (clears current deltas automatically and saves it when the processing is concluded)
}
}
// ------------------------ delta processing
/**
* @see org.python.pydev.core.IProjectModulesManager#endProcessing()
*/
@Override
public void endProcessing() {
//save it with the updated info
nature.saveAstManager();
}
// ------------------------ end delta processing
/**
* @see org.python.pydev.core.IProjectModulesManager#setPythonNature(org.python.pydev.core.IPythonNature)
*/
@Override
public void setPythonNature(IPythonNature nature) {
this.nature = nature;
}
/**
* @see org.python.pydev.core.IProjectModulesManager#getNature()
*/
@Override
public IPythonNature getNature() {
return nature;
}
/**
* @param defaultSelectedInterpreter
* @see org.python.pydev.core.IProjectModulesManager#getSystemModulesManager()
*/
@Override
public ISystemModulesManager getSystemModulesManager() {
if (nature == null) {
Log.log("Nature still not set");
return null; //still not set (initialization)
}
try {
return nature.getProjectInterpreter().getModulesManager();
} catch (Exception e1) {
return null;
}
}
/**
* @see org.python.pydev.core.IProjectModulesManager#getAllModuleNames(boolean addDependencies, String partStartingWithLowerCase)
*/
@Override
public Set<String> getAllModuleNames(boolean addDependencies, String partStartingWithLowerCase) {
if (addDependencies) {
Set<String> s = new HashSet<String>();
IModulesManager[] managersInvolved = this.getManagersInvolved(true);
for (int i = 0; i < managersInvolved.length; i++) {
s.addAll(managersInvolved[i].getAllModuleNames(false, partStartingWithLowerCase));
}
return s;
} else {
return super.getAllModuleNames(addDependencies, partStartingWithLowerCase);
}
}
/**
* @return all the modules that start with some token (from this manager and others involved)
*/
@Override
public SortedMap<ModulesKey, ModulesKey> getAllModulesStartingWith(String strStartingWith) {
SortedMap<ModulesKey, ModulesKey> ret = new TreeMap<ModulesKey, ModulesKey>();
IModulesManager[] managersInvolved = this.getManagersInvolved(true);
for (int i = 0; i < managersInvolved.length; i++) {
ret.putAll(managersInvolved[i].getAllDirectModulesStartingWith(strStartingWith));
}
return ret;
}
/**
* @see org.python.pydev.core.IProjectModulesManager#getModule(java.lang.String, org.python.pydev.plugin.nature.PythonNature, boolean)
*/
@Override
public IModule getModule(String name, IPythonNature nature, boolean dontSearchInit) {
return getModule(name, nature, true, dontSearchInit);
}
/**
* When looking for relative, we do not check dependencies
*/
@Override
public IModule getRelativeModule(String name, IPythonNature nature) {
return super.getModule(false, name, nature, true); //cannot be a compiled module
}
/**
* @see org.python.pydev.core.IProjectModulesManager#getModule(java.lang.String, org.python.pydev.plugin.nature.PythonNature, boolean, boolean)
*/
@Override
public IModule getModule(String name, IPythonNature nature, boolean checkSystemManager, boolean dontSearchInit) {
Tuple<IModule, IModulesManager> ret = getModuleAndRelatedModulesManager(name, nature, checkSystemManager,
dontSearchInit);
if (ret != null) {
return ret.o1;
}
return null;
}
/**
* @return a tuple with the IModule requested and the IModulesManager that contained that module.
*/
@Override
public Tuple<IModule, IModulesManager> getModuleAndRelatedModulesManager(String name, IPythonNature nature,
boolean checkSystemManager, boolean dontSearchInit) {
IModule module = null;
IModulesManager[] managersInvolved = this.getManagersInvolved(true); //only get the system manager here (to avoid recursion)
for (IModulesManager m : managersInvolved) {
if (m instanceof ISystemModulesManager) {
module = ((ISystemModulesManager) m).getBuiltinModule(name, dontSearchInit);
if (module != null) {
if (DEBUG_MODULES) {
System.out.println("Trying to get:" + name + " - " + " returned builtin:" + module + " - "
+ m.getClass());
}
return new Tuple<IModule, IModulesManager>(module, m);
}
}
}
for (IModulesManager m : managersInvolved) {
if (m instanceof IProjectModulesManager) {
IProjectModulesManager pM = (IProjectModulesManager) m;
module = pM.getModuleInDirectManager(name, nature, dontSearchInit);
} else if (m instanceof ISystemModulesManager) {
ISystemModulesManager systemModulesManager = (ISystemModulesManager) m;
module = systemModulesManager.getModuleWithoutBuiltins(name, nature, dontSearchInit);
} else {
throw new RuntimeException("Unexpected: " + m);
}
if (module != null) {
if (DEBUG_MODULES) {
System.out.println("Trying to get:" + name + " - " + " returned:" + module + " - " + m.getClass());
}
return new Tuple<IModule, IModulesManager>(module, m);
}
}
if (DEBUG_MODULES) {
System.out.println("Trying to get:" + name + " - " + " returned:null - " + this.getClass());
}
return null;
}
/**
* Only searches the modules contained in the direct modules manager.
*/
@Override
public IModule getModuleInDirectManager(String name, IPythonNature nature, boolean dontSearchInit) {
return super.getModule(name, nature, dontSearchInit);
}
@Override
protected String getResolveModuleErr(IResource member) {
return "Unable to find the path " + member + " in the project were it\n"
+ "is added as a source folder for pydev (project: " + project.getName() + ")";
}
public String resolveModuleOnlyInProjectSources(String fileAbsolutePath, boolean addExternal) throws CoreException {
String onlyProjectPythonPathStr = this.nature.getPythonPathNature().getOnlyProjectPythonPathStr(addExternal);
List<String> pathItems = StringUtils.splitAndRemoveEmptyTrimmed(onlyProjectPythonPathStr, '|');
List<String> filteredPathItems = filterDuplicatesPreservingOrder(pathItems);
return this.pythonPathHelper.resolveModule(fileAbsolutePath, false, filteredPathItems, project);
}
private List<String> filterDuplicatesPreservingOrder(List<String> pathItems) {
return new ArrayList<>(new LinkedHashSet<>(pathItems));
}
/**
* @see org.python.pydev.core.IProjectModulesManager#resolveModule(java.lang.String)
*/
@Override
public String resolveModule(String full) {
return resolveModule(full, true);
}
/**
* @see org.python.pydev.core.IProjectModulesManager#resolveModule(java.lang.String, boolean)
*/
@Override
public String resolveModule(String full, boolean checkSystemManager) {
IModulesManager[] managersInvolved = this.getManagersInvolved(checkSystemManager);
for (IModulesManager m : managersInvolved) {
String mod;
if (m instanceof IProjectModulesManager) {
IProjectModulesManager pM = (IProjectModulesManager) m;
mod = pM.resolveModuleInDirectManager(full);
} else {
mod = m.resolveModule(full);
}
if (mod != null) {
return mod;
}
}
return null;
}
@Override
public String resolveModuleInDirectManager(String full) {
if (nature != null) {
return pythonPathHelper.resolveModule(full, false, nature.getProject());
}
return super.resolveModule(full);
}
@Override
public String resolveModuleInDirectManager(IFile member) {
File inOs = member.getRawLocation().toFile();
return resolveModuleInDirectManager(FileUtils.getFileAbsolutePath(inOs));
}
/**
* @see org.python.pydev.core.IProjectModulesManager#getSize(boolean)
*/
@Override
public int getSize(boolean addDependenciesSize) {
if (addDependenciesSize) {
int size = 0;
IModulesManager[] managersInvolved = this.getManagersInvolved(true);
for (int i = 0; i < managersInvolved.length; i++) {
size += managersInvolved[i].getSize(false);
}
return size;
} else {<|fim▁hole|>
/**
* @see org.python.pydev.core.IProjectModulesManager#getBuiltins()
*/
@Override
public String[] getBuiltins() {
String[] builtins = null;
ISystemModulesManager systemModulesManager = getSystemModulesManager();
if (systemModulesManager != null) {
builtins = systemModulesManager.getBuiltins();
}
return builtins;
}
/**
* @param checkSystemManager whether the system manager should be added
* @param referenced true if we should get the referenced projects
* false if we should get the referencing projects
* @return the Managers that this project references or the ones that reference this project (depends on 'referenced')
*
* Change in 1.3.3: adds itself to the list of returned managers
*/
private synchronized IModulesManager[] getManagers(boolean checkSystemManager, boolean referenced) {
CompletionCache localCompletionCache = this.completionCache;
if (localCompletionCache != null) {
IModulesManager[] ret = localCompletionCache.getManagers(referenced);
if (ret != null) {
return ret;
}
}
ArrayList<IModulesManager> list = new ArrayList<IModulesManager>();
ISystemModulesManager systemModulesManager = getSystemModulesManager();
//add itself 1st
list.add(this);
//get the projects 1st
if (project != null) {
IModulesManager javaModulesManagerForProject = JavaProjectModulesManagerCreator
.createJavaProjectModulesManagerIfPossible(project);
if (javaModulesManagerForProject != null) {
list.add(javaModulesManagerForProject);
}
Set<IProject> projs;
if (referenced) {
projs = getReferencedProjects(project);
} else {
projs = getReferencingProjects(project);
}
addModuleManagers(list, projs);
}
//the system is the last one we add
//http://sourceforge.net/tracker/index.php?func=detail&aid=1687018&group_id=85796&atid=577329
if (checkSystemManager && systemModulesManager != null) {
//may be null in initialization or if the project does not have a related interpreter manager at the present time
//(i.e.: misconfigured project)
list.add(systemModulesManager);
}
IModulesManager[] ret = list.toArray(new IModulesManager[list.size()]);
if (localCompletionCache != null) {
localCompletionCache.setManagers(ret, referenced);
}
return ret;
}
public static Set<IProject> getReferencingProjects(IProject project) {
HashSet<IProject> memo = new HashSet<IProject>();
getProjectsRecursively(project, false, memo);
memo.remove(project); //shouldn't happen unless we've a cycle...
return memo;
}
public static Set<IProject> getReferencedProjects(IProject project) {
HashSet<IProject> memo = new HashSet<IProject>();
getProjectsRecursively(project, true, memo);
memo.remove(project); //shouldn't happen unless we've a cycle...
return memo;
}
/**
* @param project the project for which we want references.
* @param referenced whether we want to get the referenced projects or the ones referencing this one.
* @param memo (out) this is the place where all the projects will e available.
*
* Note: the project itself will not be added.
*/
private static void getProjectsRecursively(IProject project, boolean referenced, HashSet<IProject> memo) {
IProject[] projects = null;
try {
if (project == null || !project.isOpen() || !project.exists() || memo.contains(projects)) {
return;
}
if (referenced) {
projects = project.getReferencedProjects();
} else {
projects = project.getReferencingProjects();
}
} catch (CoreException e) {
//ignore (it's closed)
}
if (projects != null) {
for (IProject p : projects) {
if (!memo.contains(p)) {
memo.add(p);
getProjectsRecursively(p, referenced, memo);
}
}
}
}
/**
* @param list the list that will be filled with the managers
* @param projects the projects that should have the managers added
*/
private void addModuleManagers(ArrayList<IModulesManager> list, Collection<IProject> projects) {
for (IProject project : projects) {
PythonNature nature = PythonNature.getPythonNature(project);
if (nature != null) {
ICodeCompletionASTManager otherProjectAstManager = nature.getAstManager();
if (otherProjectAstManager != null) {
IModulesManager projectModulesManager = otherProjectAstManager.getModulesManager();
if (projectModulesManager != null) {
list.add(projectModulesManager);
}
} else {
//Removed the warning below: this may be common when starting up...
//String msg = "No ast manager configured for :" + project.getName();
//Log.log(IStatus.WARNING, msg, new RuntimeException(msg));
}
}
IModulesManager javaModulesManagerForProject = JavaProjectModulesManagerCreator
.createJavaProjectModulesManagerIfPossible(project);
if (javaModulesManagerForProject != null) {
list.add(javaModulesManagerForProject);
}
}
}
/**
* @return Returns the managers that this project references, including itself.
*/
public IModulesManager[] getManagersInvolved(boolean checkSystemManager) {
return getManagers(checkSystemManager, true);
}
/**
* @return Returns the managers that reference this project, including itself.
*/
public IModulesManager[] getRefencingManagersInvolved(boolean checkSystemManager) {
return getManagers(checkSystemManager, false);
}
/**
* Helper to work as a timer to know when to check for pythonpath consistencies.
*/
private volatile long checkedPythonpathConsistency = 0;
/**
* @see org.python.pydev.core.IProjectModulesManager#getCompletePythonPath()
*/
@Override
public List<String> getCompletePythonPath(IInterpreterInfo interpreter, IInterpreterManager manager) {
List<String> l = new ArrayList<String>();
IModulesManager[] managersInvolved = getManagersInvolved(true);
for (IModulesManager m : managersInvolved) {
if (m instanceof ISystemModulesManager) {
ISystemModulesManager systemModulesManager = (ISystemModulesManager) m;
l.addAll(systemModulesManager.getCompletePythonPath(interpreter, manager));
} else {
PythonPathHelper h = (PythonPathHelper) m.getPythonPathHelper();
if (h != null) {
List<String> pythonpath = h.getPythonpath();
//Note: this was previously only l.addAll(pythonpath), and was changed to the code below as a place
//to check for consistencies in the pythonpath stored in the pythonpath helper and the pythonpath
//available in the PythonPathNature (in general, when requesting it the PythonPathHelper should be
//used, as it's a cache for the resolved values of the PythonPathNature).
boolean forceCheck = false;
ProjectModulesManager m2 = null;
String onlyProjectPythonPathStr = null;
if (m instanceof ProjectModulesManager) {
long currentTimeMillis = System.currentTimeMillis();
m2 = (ProjectModulesManager) m;
//check at most once every 20 seconds (or every time if the pythonpath is empty... in which case
//it should be fast to get it too if it's consistent).
if (pythonpath.size() == 0 || currentTimeMillis - m2.checkedPythonpathConsistency > 20 * 1000) {
try {
IPythonNature n = m.getNature();
if (n != null) {
IPythonPathNature pythonPathNature = n.getPythonPathNature();
if (pythonPathNature != null) {
onlyProjectPythonPathStr = pythonPathNature.getOnlyProjectPythonPathStr(true);
m2.checkedPythonpathConsistency = currentTimeMillis;
forceCheck = true;
}
}
} catch (Exception e) {
Log.log(e);
}
}
}
if (forceCheck) {
//Check if it's actually correct and auto-fix if it's not.
List<String> parsed = PythonPathHelper.parsePythonPathFromStr(onlyProjectPythonPathStr, null);
if (m2.nature != null && !new HashSet<String>(parsed).equals(new HashSet<String>(pythonpath))) {
// Make it right at this moment (so any other place that calls it before the restore
//takes place has the proper version).
h.setPythonPath(parsed);
// Force a rebuild as the PythonPathHelper paths are not up to date.
m2.nature.rebuildPath();
}
l.addAll(parsed); //add the proper paths
} else {
l.addAll(pythonpath);
}
}
}
}
return l;
}
}<|fim▁end|>
|
return super.getSize(addDependenciesSize);
}
}
|
<|file_name|>unit_runner.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import * as glob from 'glob';
import * as path from 'path';
import * as minimist from 'minimist';
import { execSilent } from './e2e/utils/process';
const argv = minimist(process.argv.slice(2), {
boolean: ['debug', 'verbose', 'nolink', 'nobuild'],
string: ['glob', 'ignore']
});
const specFiles = glob.sync(path.resolve(__dirname, './unit/**/*.spec.*'));
const mo = new Mocha({ timeout: 180000, reporter: 'spec' });
Promise.resolve()
.then((): any => {
if (argv['nobuild']) {
return Promise.resolve();
} else {
return execSilent('npm', ['run', 'build:prod']);
}
})
.then((): any => {
if (argv['nolink']) {
return Promise.resolve();
} else {
return execSilent('npm', ['link']);
}
})
.then(() => {
specFiles.forEach(file => mo.addFile(file));
mo.run(failures => {
process.on('exit', () => process.exit(failures));
});
});<|fim▁end|>
|
import * as Mocha from 'mocha';
|
<|file_name|>signing_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from . import model, signing, test_common, test_config
mock = test_common.import_mock()
# python2 support.
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
@mock.patch('signing.commands.lenient_run_command_output')
@mock.patch('signing.commands.macos_version', return_value=[10, 15])
class TestLinkerSignedArm64NeedsForce(unittest.TestCase):
def test_oserror(self, macos_version, lenient_run_command_output):
lenient_run_command_output.return_value = (None, None, None)
self.assertFalse(signing._linker_signed_arm64_needs_force(None))
lenient_run_command_output.assert_called_once()
def test_unsigned(self, macos_version, lenient_run_command_output):
lenient_run_command_output.return_value = (
1, b'', b'test: code object is not signed at all\n')
self.assertFalse(signing._linker_signed_arm64_needs_force(None))
lenient_run_command_output.assert_called_once()
def test_not_linker_signed(self, macos_version, lenient_run_command_output):
lenient_run_command_output.return_value = (0, b'', b'''Executable=test
Identifier=test
Format=Mach-O thin (arm64)
CodeDirectory v=20100 size=592 flags=0x2(adhoc) hashes=13+2 location=embedded
Signature=adhoc
Info.plist=not bound
TeamIdentifier=not set
Sealed Resources=none
Internal requirements count=0 size=12
''')
self.assertFalse(signing._linker_signed_arm64_needs_force(None))
lenient_run_command_output.assert_called_once()
def test_linker_signed_10_15(self, macos_version,
lenient_run_command_output):
lenient_run_command_output.return_value = (0, b'', b'''Executable=test
Identifier=test
Format=Mach-O thin (arm64)
CodeDirectory v=20400 size=512 flags=0x20002(adhoc,???) hashes=13+0 location=embedded
Signature=adhoc
Info.plist=not bound
TeamIdentifier=not set
Sealed Resources=none
Internal requirements=none
''')
self.assertTrue(signing._linker_signed_arm64_needs_force(None))
lenient_run_command_output.assert_called_once()
def test_linker_signed_10_16(self, macos_version,
lenient_run_command_output):
# 10.16 is what a Python built against an SDK < 11.0 will see 11.0 as.
macos_version.return_value = [10, 16]
lenient_run_command_output.return_value = (0, b'', b'''Executable=test
Identifier=test
Format=Mach-O thin (arm64)
CodeDirectory v=20400 size=250 flags=0x20002(adhoc,linker-signed) hashes=5+0 location=embedded
Signature=adhoc
Info.plist=not bound
TeamIdentifier=not set
Sealed Resources=none
Internal requirements=none
''')
self.assertFalse(signing._linker_signed_arm64_needs_force(None))
lenient_run_command_output.assert_not_called()
def test_linker_signed_11_0(self, macos_version,
lenient_run_command_output):
macos_version.return_value = [11, 0]
lenient_run_command_output.return_value = (0, b'', b'''Executable=test
Identifier=test
Format=Mach-O thin (arm64)
CodeDirectory v=20400 size=250 flags=0x20002(adhoc,linker-signed) hashes=5+0 location=embedded
Signature=adhoc
Info.plist=not bound
TeamIdentifier=not set<|fim▁hole|> self.assertFalse(signing._linker_signed_arm64_needs_force(None))
lenient_run_command_output.assert_not_called()
@mock.patch(
'signing.signing._linker_signed_arm64_needs_force', return_value=False)
@mock.patch('signing.commands.run_command')
class TestSignPart(unittest.TestCase):
def setUp(self):
self.paths = model.Paths('/$I', '/$O', '/$W')
self.config = test_config.TestConfig()
def test_sign_part(self, run_command, linker_signed_arm64_needs_force):
part = model.CodeSignedProduct('Test.app', 'test.signing.app')
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--timestamp', '--requirements',
'=designated => identifier "test.signing.app"', '/$W/Test.app'
])
def test_sign_part_needs_force(self, run_command,
linker_signed_arm64_needs_force):
linker_signed_arm64_needs_force.return_value = True
part = model.CodeSignedProduct('Test.app', 'test.signing.app')
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--force', '--timestamp',
'--requirements', '=designated => identifier "test.signing.app"',
'/$W/Test.app'
])
def test_sign_part_no_notary(self, run_command,
linker_signed_arm64_needs_force):
config = test_config.TestConfig(notary_user=None, notary_password=None)
part = model.CodeSignedProduct('Test.app', 'test.signing.app')
signing.sign_part(self.paths, config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--requirements',
'=designated => identifier "test.signing.app"', '/$W/Test.app'
])
def test_sign_part_no_identifier_requirement(
self, run_command, linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app', 'test.signing.app', identifier_requirement=False)
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with(
['codesign', '--sign', '[IDENTITY]', '--timestamp', '/$W/Test.app'])
def test_sign_with_identifier(self, run_command,
linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app', 'test.signing.app', sign_with_identifier=True)
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--timestamp', '--identifier',
'test.signing.app', '--requirements',
'=designated => identifier "test.signing.app"', '/$W/Test.app'
])
def test_sign_with_identifier_no_requirement(
self, run_command, linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app',
'test.signing.app',
sign_with_identifier=True,
identifier_requirement=False)
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--timestamp', '--identifier',
'test.signing.app', '/$W/Test.app'
])
def test_sign_part_with_options(self, run_command,
linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app',
'test.signing.app',
options=model.CodeSignOptions.RESTRICT +
model.CodeSignOptions.LIBRARY_VALIDATION)
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--timestamp', '--requirements',
'=designated => identifier "test.signing.app"', '--options',
'restrict,library', '/$W/Test.app'
])
def test_sign_part_with_entitlements(self, run_command,
linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app',
'test.signing.app',
entitlements='entitlements.plist',
identifier_requirement=False)
signing.sign_part(self.paths, self.config, part)
run_command.assert_called_once_with([
'codesign', '--sign', '[IDENTITY]', '--timestamp', '--entitlements',
'/$W/entitlements.plist', '/$W/Test.app'
])
def test_verify_part(self, run_command, linker_signed_arm64_needs_force):
part = model.CodeSignedProduct('Test.app', 'test.signing.app')
signing.verify_part(self.paths, part)
self.assertEqual(run_command.mock_calls, [
mock.call([
'codesign', '--display', '--verbose=5', '--requirements', '-',
'/$W/Test.app'
]),
mock.call(['codesign', '--verify', '--verbose=6', '/$W/Test.app']),
])
def test_verify_part_with_options(self, run_command,
linker_signed_arm64_needs_force):
part = model.CodeSignedProduct(
'Test.app',
'test.signing.app',
verify_options=model.VerifyOptions.DEEP +
model.VerifyOptions.IGNORE_RESOURCES)
signing.verify_part(self.paths, part)
self.assertEqual(run_command.mock_calls, [
mock.call([
'codesign', '--display', '--verbose=5', '--requirements', '-',
'/$W/Test.app'
]),
mock.call([
'codesign', '--verify', '--verbose=6', '--deep',
'--ignore-resources', '/$W/Test.app'
]),
])<|fim▁end|>
|
Sealed Resources=none
Internal requirements=none
''')
|
<|file_name|>analysis.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
This script computes bounds on the privacy cost of training the
student model from noisy aggregation of labels predicted by teachers.
It should be used only after training the student (and therefore the
teachers as well). We however include the label files required to
reproduce key results from our paper (https://arxiv.org/abs/1610.05755):
the epsilon bounds for MNIST and SVHN students.
The command that computes the epsilon bound associated
with the training of the MNIST student model (100 label queries
with a (1/20)*2=0.1 epsilon bound each) is:
python analysis.py
--counts_file=mnist_250_teachers_labels.npy
--indices_file=mnist_250_teachers_100_indices_used_by_student.npy
The command that computes the epsilon bound associated
with the training of the SVHN student model (1000 label queries
with a (1/20)*2=0.1 epsilon bound each) is:
python analysis.py
--counts_file=svhn_250_teachers_labels.npy
--max_examples=1000
--delta=1e-6
"""
import os
import math
import numpy as np
from six.moves import xrange
import tensorflow as tf
from differential_privacy.multiple_teachers.input import maybe_download
# These parameters can be changed to compute bounds for different failure rates
# or different model predictions.
tf.flags.DEFINE_integer("moments",8, "Number of moments")
tf.flags.DEFINE_float("noise_eps", 0.1, "Eps value for each call to noisymax.")
tf.flags.DEFINE_float("delta", 1e-5, "Target value of delta.")
tf.flags.DEFINE_float("beta", 0.09, "Value of beta for smooth sensitivity")
tf.flags.DEFINE_string("counts_file","","Numpy matrix with raw counts")
tf.flags.DEFINE_string("indices_file","",
"File containting a numpy matrix with indices used."
"Optional. Use the first max_examples indices if this is not provided.")
tf.flags.DEFINE_integer("max_examples",1000,
"Number of examples to use. We will use the first"
" max_examples many examples from the counts_file"
" or indices_file to do the privacy cost estimate")
tf.flags.DEFINE_float("too_small", 1e-10, "Small threshold to avoid log of 0")
tf.flags.DEFINE_bool("input_is_counts", False, "False if labels, True if counts")
FLAGS = tf.flags.FLAGS
def compute_q_noisy_max(counts, noise_eps):
"""returns ~ Pr[outcome != winner].
Args:
counts: a list of scores
noise_eps: privacy parameter for noisy_max
Returns:
q: the probability that outcome is different from true winner.
"""
# For noisy max, we only get an upper bound.
# Pr[ j beats i*] \leq (2+gap(j,i*))/ 4 exp(gap(j,i*)
# proof at http://mathoverflow.net/questions/66763/
# tight-bounds-on-probability-of-sum-of-laplace-random-variables
winner = np.argmax(counts)
counts_normalized = noise_eps * (counts - counts[winner])
counts_rest = np.array(
[counts_normalized[i] for i in xrange(len(counts)) if i != winner])
q = 0.0
for c in counts_rest:
gap = -c
q += (gap + 2.0) / (4.0 * math.exp(gap))
return min(q, 1.0 - (1.0/len(counts)))
def compute_q_noisy_max_approx(counts, noise_eps):
"""returns ~ Pr[outcome != winner].
Args:
counts: a list of scores
noise_eps: privacy parameter for noisy_max
Returns:
q: the probability that outcome is different from true winner.
"""
# For noisy max, we only get an upper bound.
# Pr[ j beats i*] \leq (2+gap(j,i*))/ 4 exp(gap(j,i*)
# proof at http://mathoverflow.net/questions/66763/
# tight-bounds-on-probability-of-sum-of-laplace-random-variables
# This code uses an approximation that is faster and easier
# to get local sensitivity bound on.
winner = np.argmax(counts)
counts_normalized = noise_eps * (counts - counts[winner])
counts_rest = np.array(
[counts_normalized[i] for i in xrange(len(counts)) if i != winner])
gap = -max(counts_rest)
q = (len(counts) - 1) * (gap + 2.0) / (4.0 * math.exp(gap))
return min(q, 1.0 - (1.0/len(counts)))
def logmgf_exact(q, priv_eps, l):
"""Computes the logmgf value given q and privacy eps.
The bound used is the min of three terms. The first term is from
https://arxiv.org/pdf/1605.02065.pdf.
The second term is based on the fact that when event has probability (1-q) for
q close to zero, q can only change by exp(eps), which corresponds to a
much smaller multiplicative change in (1-q)
The third term comes directly from the privacy guarantee.
Args:
q: pr of non-optimal outcome
priv_eps: eps parameter for DP
l: moment to compute.
Returns:
Upper bound on logmgf
"""
if q < 0.5:
t_one = (1-q) * math.pow((1-q) / (1 - math.exp(priv_eps) * q), l)
t_two = q * math.exp(priv_eps * l)
t = t_one + t_two
try:
log_t = math.log(t)
except ValueError:
print("Got ValueError in math.log for values :" + str((q, priv_eps, l, t)))
log_t = priv_eps * l
else:
log_t = priv_eps * l
return min(0.5 * priv_eps * priv_eps * l * (l + 1), log_t, priv_eps * l)
def logmgf_from_counts(counts, noise_eps, l):
"""
ReportNoisyMax mechanism with noise_eps with 2*noise_eps-DP
in our setting where one count can go up by one and another
can go down by 1.
"""
q = compute_q_noisy_max(counts, noise_eps)
return logmgf_exact(q, 2.0 * noise_eps, l)
def sens_at_k(counts, noise_eps, l, k):
"""Return sensitivity at distane k.
Args:
counts: an array of scores
noise_eps: noise parameter used
l: moment whose sensitivity is being computed
k: distance
Returns:
sensitivity: at distance k
"""
counts_sorted = sorted(counts, reverse=True)
if 0.5 * noise_eps * l > 1:
print("l too large to compute sensitivity")
return 0
# Now we can assume that at k, gap remains positive
# or we have reached the point where logmgf_exact is
# determined by the first term and ind of q.
if counts[0] < counts[1] + k:
return 0
counts_sorted[0] -= k
counts_sorted[1] += k
val = logmgf_from_counts(counts_sorted, noise_eps, l)
counts_sorted[0] -= 1
counts_sorted[1] += 1
val_changed = logmgf_from_counts(counts_sorted, noise_eps, l)
return val_changed - val
def smoothed_sens(counts, noise_eps, l, beta):
"""Compute beta-smooth sensitivity.
Args:
counts: array of scors
noise_eps: noise parameter
l: moment of interest
beta: smoothness parameter
Returns:
smooth_sensitivity: a beta smooth upper bound
"""
k = 0
smoothed_sensitivity = sens_at_k(counts, noise_eps, l, k)
while k < max(counts):
k += 1
sensitivity_at_k = sens_at_k(counts, noise_eps, l, k)
smoothed_sensitivity = max(
smoothed_sensitivity,<|fim▁hole|> return smoothed_sensitivity
def main(unused_argv):
##################################################################
# If we are reproducing results from paper https://arxiv.org/abs/1610.05755,
# download the required binaries with label information.
##################################################################
# Binaries for MNIST results
paper_binaries_mnist = \
["https://github.com/npapernot/multiple-teachers-for-privacy/blob/master/mnist_250_teachers_labels.npy?raw=true",
"https://github.com/npapernot/multiple-teachers-for-privacy/blob/master/mnist_250_teachers_100_indices_used_by_student.npy?raw=true"]
if FLAGS.counts_file == "mnist_250_teachers_labels.npy" \
or FLAGS.indices_file == "mnist_250_teachers_100_indices_used_by_student.npy":
maybe_download(paper_binaries_mnist, os.getcwd())
# Binaries for SVHN results
paper_binaries_svhn = ["https://github.com/npapernot/multiple-teachers-for-privacy/blob/master/svhn_250_teachers_labels.npy?raw=true"]
if FLAGS.counts_file == "svhn_250_teachers_labels.npy":
maybe_download(paper_binaries_svhn, os.getcwd())
input_mat = np.load(FLAGS.counts_file)
if FLAGS.input_is_counts:
counts_mat = input_mat
else:
# In this case, the input is the raw predictions. Transform
num_teachers, n = input_mat.shape
counts_mat = np.zeros((n, 10)).astype(np.int32)
for i in range(n):
for j in range(num_teachers):
counts_mat[i, int(input_mat[j, i])] += 1
n = counts_mat.shape[0]
num_examples = min(n, FLAGS.max_examples)
if not FLAGS.indices_file:
indices = np.array(range(num_examples))
else:
index_list = np.load(FLAGS.indices_file)
indices = index_list[:num_examples]
l_list = 1.0 + np.array(xrange(FLAGS.moments))
beta = FLAGS.beta
total_log_mgf_nm = np.array([0.0 for _ in l_list])
total_ss_nm = np.array([0.0 for _ in l_list])
noise_eps = FLAGS.noise_eps
for i in indices:
total_log_mgf_nm += np.array(
[logmgf_from_counts(counts_mat[i], noise_eps, l)
for l in l_list])
total_ss_nm += np.array(
[smoothed_sens(counts_mat[i], noise_eps, l, beta)
for l in l_list])
delta = FLAGS.delta
# We want delta = exp(alpha - eps l).
# Solving gives eps = (alpha - ln (delta))/l
eps_list_nm = (total_log_mgf_nm - math.log(delta)) / l_list
print("Epsilons (Noisy Max): " + str(eps_list_nm))
print("Smoothed sensitivities (Noisy Max): " + str(total_ss_nm / l_list))
# If beta < eps / 2 ln (1/delta), then adding noise Lap(1) * 2 SS/eps
# is eps,delta DP
# Also if beta < eps / 2(gamma +1), then adding noise 2(gamma+1) SS eta / eps
# where eta has density proportional to 1 / (1+|z|^gamma) is eps-DP
# Both from Corolloary 2.4 in
# http://www.cse.psu.edu/~ads22/pubs/NRS07/NRS07-full-draft-v1.pdf
# Print the first one's scale
ss_eps = 2.0 * beta * math.log(1/delta)
ss_scale = 2.0 / ss_eps
print("To get an " + str(ss_eps) + "-DP estimate of epsilon, ")
print("..add noise ~ " + str(ss_scale))
print("... times " + str(total_ss_nm / l_list))
print("Epsilon = " + str(min(eps_list_nm)) + ".")
if min(eps_list_nm) == eps_list_nm[-1]:
print("Warning: May not have used enough values of l")
# Data independent bound, as mechanism is
# 2*noise_eps DP.
data_ind_log_mgf = np.array([0.0 for _ in l_list])
data_ind_log_mgf += num_examples * np.array(
[logmgf_exact(1.0, 2.0 * noise_eps, l) for l in l_list])
data_ind_eps_list = (data_ind_log_mgf - math.log(delta)) / l_list
print("Data independent bound = " + str(min(data_ind_eps_list)) + ".")
return
if __name__ == "__main__":
tf.app.run()<|fim▁end|>
|
math.exp(-beta * k) * sensitivity_at_k)
if sensitivity_at_k == 0.0:
break
|
<|file_name|>HumanoidType.js<|end_file_name|><|fim▁begin|>BASE.require([
"BASE.data.Edm",
"BASE.odata4.ODataAnnotation"
], function () {
var ODataAnnotation = BASE.odata4.ODataAnnotation;<|fim▁hole|>
var HumanoidType = function () { };
HumanoidType.annotations = [new ODataAnnotation("Namespace.HumanoidType")];
HumanoidType.None = new Enum(0);
HumanoidType.None.name = "None";
HumanoidType.Human = new Enum(1);
HumanoidType.Human.name = "Human";
HumanoidType.Vulcan = new Enum(2);
HumanoidType.Vulcan.name = "Vulcan";
BASE.data.testing.HumanoidType = HumanoidType;
});<|fim▁end|>
|
BASE.namespace("BASE.data.testing");
|
<|file_name|>test_8chan.py<|end_file_name|><|fim▁begin|>import pytest
from chandere.errors import ChandereError
from chandere.loader import load_scraper
scraper = load_scraper("8chan")
VALID_CROSSLINK_TARGETS = [
("/tech/589254", ("tech", "589254")),
("/tech/ 589254", ("tech", "589254")),
("tech/589254", ("tech", "589254")),
("/tech 589254", ("tech", "589254")),
("tech 589254", ("tech", "589254")),
("/tech/", ("tech", None)),
("/tech", ("tech", None)),
("tech/", ("tech", None)),
("tech", ("tech", None)),
]
INVALID_CROSSLINK_TARGETS = [
"/"
]
VALID_URI_TARGETS = [
("https://8ch.net/tech/res/589254.html", ("tech", "589254")),
("http://8ch.net/tech/res/589254.html", ("tech", "589254")),
("https://8ch.net/tech/res/589254.json", ("tech", "589254")),
("http://8ch.net/tech/res/589254.json", ("tech", "589254")),
("https://8ch.net/tech/", ("tech", None)),
("http://8ch.net/tech/", ("tech", None)),
]
INVALID_URI_TARGETS = [
"https://8ch.net/",
"http://8ch.net/",
"https://google.com/",
"http://google.com/",
]
def test_parse_valid_uri_target():
for target, expected in VALID_URI_TARGETS:<|fim▁hole|>def test_parse_invalid_uri_target():
for target in INVALID_URI_TARGETS:
with pytest.raises(ChandereError):
scraper.parse_target(target)
def test_parse_valid_crosslink_target():
for target, expected in VALID_CROSSLINK_TARGETS:
assert scraper.parse_target(target) == expected
def test_parse_invalid_crosslink_target():
for target in INVALID_CROSSLINK_TARGETS:
with pytest.raises(ChandereError):
scraper.parse_target(target)<|fim▁end|>
|
assert scraper.parse_target(target) == expected
|
<|file_name|>send_grades.py<|end_file_name|><|fim▁begin|>""""
This module handles sending grades back to edX
Most of this module is a python 3 port of pylti (github.com/mitodl/sga-lti)
and should be moved back into that library.
"""
import uuid
from xml.etree import ElementTree as etree
import oauth2
from django.conf import settings
class SendGradeFailure(Exception):
""" Exception class for failures sending grades to edX"""
def send_grade(consumer_key, edx_url, result_id, grade):
""" Sends a grade to edX """
if consumer_key not in settings.LTI_OAUTH_CREDENTIALS:
raise SendGradeFailure("Invalid consumer_key %s" % consumer_key)
body = generate_request_xml(str(uuid.uuid1()), "replaceResult", result_id, grade)
secret = settings.LTI_OAUTH_CREDENTIALS[consumer_key]
response, content = _post_patched_request(consumer_key, secret, body, edx_url, "POST", "application/xml")
if isinstance(content, bytes):
content = content.decode("utf8")
if "<imsx_codeMajor>success</imsx_codeMajor>" not in content:
raise SendGradeFailure("Send grades to edX returned %s" % response.status)
def _post_patched_request(lti_key, secret, body, url, method, content_type): # pylint: disable=too-many-arguments
"""
Authorization header needs to be capitalized for some LTI clients
this function ensures that header is capitalized
:param body: body of the call
:param client: OAuth Client
:param url: outcome url
:return: response
"""
consumer = oauth2.Consumer(key=lti_key, secret=secret)
client = oauth2.Client(consumer)
import httplib2
http = httplib2.Http
# pylint: disable=protected-access
normalize = http._normalize_headers
def my_normalize(self, headers):
""" This function patches Authorization header """
ret = normalize(self, headers)
if 'authorization' in ret:
ret['Authorization'] = ret.pop('authorization')
return ret
http._normalize_headers = my_normalize
monkey_patch_function = normalize
response, content = client.request(
url,
method,
body=body.encode("utf8"),
headers={'Content-Type': content_type})
<|fim▁hole|> http._normalize_headers = monkey_patch_function
return response, content
def generate_request_xml(message_identifier_id, operation,
lis_result_sourcedid, score):
# pylint: disable=too-many-locals
"""
Generates LTI 1.1 XML for posting result to LTI consumer.
:param message_identifier_id:
:param operation:
:param lis_result_sourcedid:
:param score:
:return: XML string
"""
root = etree.Element('imsx_POXEnvelopeRequest',
xmlns='http://www.imsglobal.org/services/'
'ltiv1p1/xsd/imsoms_v1p0')
header = etree.SubElement(root, 'imsx_POXHeader')
header_info = etree.SubElement(header, 'imsx_POXRequestHeaderInfo')
version = etree.SubElement(header_info, 'imsx_version')
version.text = 'V1.0'
message_identifier = etree.SubElement(header_info,
'imsx_messageIdentifier')
message_identifier.text = message_identifier_id
body = etree.SubElement(root, 'imsx_POXBody')
xml_request = etree.SubElement(body, '%s%s' % (operation, 'Request'))
record = etree.SubElement(xml_request, 'resultRecord')
guid = etree.SubElement(record, 'sourcedGUID')
sourcedid = etree.SubElement(guid, 'sourcedId')
sourcedid.text = lis_result_sourcedid
if score is not None:
result = etree.SubElement(record, 'result')
result_score = etree.SubElement(result, 'resultScore')
language = etree.SubElement(result_score, 'language')
language.text = 'en'
text_string = etree.SubElement(result_score, 'textString')
text_string.text = score.__str__()
ret = "<?xml version='1.0' encoding='utf-8'?>\n{}".format(
etree.tostring(root, encoding='unicode'))
return ret<|fim▁end|>
|
http = httplib2.Http
# pylint: disable=protected-access
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var Logger = require("./Logger.js");
var Level = require("./Level.js");
var PrintPattern = require("./PrintPattern.js");
module.exports = (function() {
/* STATE VARIABLES */
// OUT configuration
var OUT_INTERVAL = undefined; // 1sec
var OUT_INTERVAL_TIMEOUT = 1000; // 1sec
var OUT_SIZE = 1000;
// logger objects
var loggers = {};
// appender list
var appenders = {};
appenders[Level.trace] = {};
appenders[Level.log] = {};
appenders[Level.info] = {};
appenders[Level.warn] = {};
appenders[Level.error] = {};
// information to log
var records = new Array();
// --------------------------------------------------------------------------
/* METHODS */
// add a logger to the map
// logger_name should be something like group.subgroup.name
var createLogger = function(logger_name) {
if (loggers[logger_name] == undefined) {
loggers[logger_name] = new Logger(logger_name, function(record) {
records.push(record);
writeLog();
});
}
return loggers[logger_name];
}
// create the appender objects
// the appender_config should be something like
// [{
// "name": "appender name",
// "type": "appender implementation",
// "level": "level that appender listens",
// "loggers": ["logger1", "logger2", "group.logger3"],
// Follows the optional attributes
// --------------------------------------------------
// "print_pattern": "[{y}/{M}/{d} {w} {h}:{m}:{s}.{ms}] [{lvl}] [{lg}]
// {out}",
// "config": {...[appender exclusive configuration]}
// }, ...]
var loadAppenderConfig = function(appender_configs) {
realeaseAppenders();
for ( var i in appender_configs) {
// get an appender config
var appender_config = appender_configs[i];
// create appender object
var AppenderType = require("./appender/" + appender_config.type
+ "Appender.js");
var appender_object = new AppenderType(appender_config.name,
new PrintPattern(appender_config.print_pattern),
appender_config.config);
for ( var l in appender_config.loggers) {
var listened_logger = appender_config.loggers[l];
// initialize listened logger appender list
if (appenders[Level[appender_config.level]][listened_logger] == undefined) {
appenders[Level[appender_config.level]][listened_logger] = new Array();
}
appenders[Level[appender_config.level]][listened_logger]
.push(appender_object);
}
}
}
// realease appenders internal resources;
var realeaseAppenders = function() {
for (lv in appenders) {
var level_appender = appenders[lv];
for (lg in level_appender) {
var logger_appender = level_appender[lg];
if (logger_appender.length > 0) {
for (i in logger_appender) {
var appender = logger_appender[i];
appender.release();
}<|fim▁hole|>
delete level_appender[lg];
}
}
};
// Wrapper that decides when the app will log without hold the process
var writeLog = function() {
if (OUT_INTERVAL == undefined) {
OUT_INTERVAL = setTimeout(writeLogImpl, OUT_INTERVAL_TIMEOUT);
}
};
// real log process
var writeLogImpl = function() {
for (var i = 0; i < OUT_SIZE; i++) {
// getting message record
var record = records[i];
// stop the loop when ther record list is empty;
if (record == undefined) {
break;
}
// the record should be logged on all appender that listen the same
// level or the appenders that listen lower levels
for (var level = record.level; level >= 1; level--) {
// getting appender list by level
var level_appenders = appenders[level];
// try to catch all appenders as possible
var logger_composition = record.logger.split(".");
var logger_name = undefined;
for (var lc = 0; lc < logger_composition.length; lc++) {
// logger name rebuild process
if (logger_name == undefined) {
logger_name = logger_composition[lc];
} else {
logger_name = logger_name + "."
+ logger_composition[lc];
}
// getting appender list by logger
var logger_appenders = level_appenders[logger_name];
// using appender
if (logger_appenders != undefined) {
for (a in logger_appenders) {
var appender = logger_appenders[a];
appender.write(record);
}
}
}
}
}
records.splice(0, OUT_SIZE);
// clean interval identifier
OUT_INTERVAL = undefined;
// if still remain any record, start again the log process
if (records.length > 0) {
writeLog();
}
};
// public interface
return {
"createLogger" : createLogger,
"loadAppenderConfig" : loadAppenderConfig
}
})();<|fim▁end|>
|
}
|
<|file_name|>attr-path.cc<|end_file_name|><|fim▁begin|>#include "attr-path.hh"
#include "util.hh"
namespace nix {
// !!! Shouldn't we return a pointer to a Value?
void findAlongAttrPath(EvalState & state, const string & attrPath,
Bindings & autoArgs, Expr * e, Value & v)
{
Strings tokens = tokenizeString(attrPath, ".");
Error attrError =
Error(format("attribute selection path `%1%' does not match expression") % attrPath);
string curPath;
state.mkThunk_(v, e);
foreach (Strings::iterator, i, tokens) {
if (!curPath.empty()) curPath += ".";
curPath += *i;
/* Is *i an index (integer) or a normal attribute name? */
enum { apAttr, apIndex } apType = apAttr;
string attr = *i;
int attrIndex = -1;<|fim▁hole|>
/* Evaluate the expression. */
Value vTmp;
state.autoCallFunction(autoArgs, v, vTmp);
v = vTmp;
state.forceValue(v);
/* It should evaluate to either an attribute set or an
expression, according to what is specified in the
attrPath. */
if (apType == apAttr) {
if (v.type != tAttrs)
throw TypeError(
format("the expression selected by the selection path `%1%' should be an attribute set but is %2%")
% curPath % showType(v));
Bindings::iterator a = v.attrs->find(state.symbols.create(attr));
if (a == v.attrs->end())
throw Error(format("attribute `%1%' in selection path `%2%' not found") % attr % curPath);
v = *a->value;
}
else if (apType == apIndex) {
if (v.type != tList)
throw TypeError(
format("the expression selected by the selection path `%1%' should be a list but is %2%")
% curPath % showType(v));
if (attrIndex >= v.list.length)
throw Error(format("list index %1% in selection path `%2%' is out of range") % attrIndex % curPath);
v = *v.list.elems[attrIndex];
}
}
}
}<|fim▁end|>
|
if (string2Int(attr, attrIndex)) apType = apIndex;
|
<|file_name|>fixes.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>
def fix_terminator(tokens):
if not tokens:
return
last = tokens[-1]
if last not in ('.', '?', '!') and last.endswith('.'):
tokens[-1] = last[:-1]
tokens.append('.')
def balance_quotes(tokens):
count = tokens.count("'")
if not count:
return
processed = 0
for i, token in enumerate(tokens):
if token == "'":
if processed % 2 == 0 and (i == 0 or processed != count - 1):
tokens[i] = "`"
processed += 1
def output(tokens):
if not tokens:
return
# fix_terminator(tokens)
balance_quotes(tokens)
print ' '.join(tokens)
prev = None
for line in sys.stdin:
tokens = line.split()
if len(tokens) == 1 and tokens[0] in ('"', "'", ')', ']'):
prev.append(tokens[0])
else:
output(prev)
prev = tokens
output(prev)<|fim▁end|>
|
import sys
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import arrow
import datetime
import ujson
import timeit
from flask.ext.login import login_required
from flask import (
Blueprint, render_template
)
from feedback.dashboard.vendorsurveys import (
get_rating_scale, get_surveys_by_role,
get_surveys_by_completion, get_surveys_by_purpose,
get_all_survey_responses, get_rating_by_lang,
get_rating_by_purpose, get_rating_by_role
)
from feedback.surveys.constants import SURVEY_DAYS
from feedback.surveys.models import Survey
from feedback.dashboard.permits import (
api_health, get_lifespan,
get_permit_types, trade,
get_master_permit_counts,
dump_socrata_api
)
blueprint = Blueprint(
"dashboard", __name__,
template_folder='../templates',
static_folder="../static"
)
def to_bucket(str_date):
''' Converts the DB string time to a MM-DD string format.
'''
result = arrow.get(str_date)
return result.strftime("%m-%d")
@blueprint.route("/", methods=["GET", "POST"])
def home():
json_obj = {}
json_obj_home = {}
surveys_by_date = {}
surveys_date_array = []
surveys_value_array = []
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_by_date[date_index] = 0
surveys_date_array.append(date_index)
survey_table = get_all_survey_responses(SURVEY_DAYS)
sms_rows = [x.lang for x in survey_table if x.method == 'sms']
web_rows = [x.lang for x in survey_table if x.method == 'web']
# ANALYTICS CODE
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_value_array.append(
len([x for x in survey_table if to_bucket(x.date_submitted) == date_index]))
dashboard_collection_home = [
{<|fim▁hole|> "datetime": {
"data": surveys_date_array
},
"series": [
{
"data": surveys_value_array
}
]
}
}
},
{
"title": "Satisfaction Rating".format(SURVEY_DAYS),
"data": "{0:.2f}".format(get_rating_scale(survey_table))
},
{
"title": "Survey Type".format(SURVEY_DAYS),
"data": {
"web_en": web_rows.count('en'),
"web_es": web_rows.count('es'),
"sms_en": sms_rows.count('en'),
"sms_es": sms_rows.count('es')
},
"labels": {
"web_en": "Web (English)",
"web_es": "Web (Spanish)",
"sms_en": "Text (English)",
"sms_es": "Text (Spanish)"
}
},
{},
{},
{},
{},
{},
{},
{},
{
"title": "Surveys by Survey Role",
"data": get_surveys_by_role(survey_table)
},
{},
{
"title": "How many completions?",
"data": get_surveys_by_completion(survey_table)
},
{
"title": "Respondents by Purpose",
"data": get_surveys_by_purpose(survey_table)
},
{
"title": "Ratings",
"data": {
"en": get_rating_by_lang(survey_table, 'en'),
"es": get_rating_by_lang(survey_table, 'es'),
"p1": get_rating_by_purpose(survey_table, 1),
"p2": get_rating_by_purpose(survey_table, 2),
"p3": get_rating_by_purpose(survey_table, 3),
"p4": get_rating_by_purpose(survey_table, 4),
"p5": get_rating_by_purpose(survey_table, 5),
"contractor": get_rating_by_role(survey_table, 1),
"architect": get_rating_by_role(survey_table, 2),
"permitconsultant": get_rating_by_role(survey_table, 3),
"homeowner": get_rating_by_role(survey_table, 4),
"bizowner": get_rating_by_role(survey_table, 5)
}
}
]
json_obj_home['daily_graph'] = ujson.dumps(dashboard_collection_home[0]['data']['graph'])
json_obj_home['surveys_type'] = ujson.dumps(dashboard_collection_home[2])
json_obj_home['survey_role'] = ujson.dumps(dashboard_collection_home[10])
json_obj_home['survey_complete'] = ujson.dumps(dashboard_collection_home[12])
json_obj_home['survey_purpose'] = ujson.dumps(dashboard_collection_home[13])
today = datetime.date.today()
return render_template(
"public/home.html",
api=1,
date=today.strftime('%B %d, %Y'),
json_obj=json_obj_home,
dash_obj=dashboard_collection_home,
resp_obj=survey_table,
title='Dashboard - Main'
)
@blueprint.route("/metrics", methods=["GET", "POST"])
def metrics():
json_obj = {}
surveys_by_date = {}
surveys_date_array = []
surveys_value_array = []
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_by_date[date_index] = 0
surveys_date_array.append(date_index)
survey_table = get_all_survey_responses(SURVEY_DAYS)
sms_rows = [x.lang for x in survey_table if x.method == 'sms']
web_rows = [x.lang for x in survey_table if x.method == 'web']
# ANALYTICS CODE
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_value_array.append(
len([x for x in survey_table if to_bucket(x.date_submitted) == date_index]))
dashboard_collection = [
{
"id": "graph",
"title": "Surveys Submitted".format(SURVEY_DAYS),
"data": {
"graph": {
"datetime": {
"data": surveys_date_array
},
"series": [
{
"data": surveys_value_array
}
]
}
}
},
{
"title": "Satisfaction Rating".format(SURVEY_DAYS),
"data": "{0:.2f}".format(get_rating_scale(survey_table))
},
{
"title": "Survey Type".format(SURVEY_DAYS),
"data": {
"web_en": web_rows.count('en'),
"web_es": web_rows.count('es'),
"sms_en": sms_rows.count('en'),
"sms_es": sms_rows.count('es')
},
"labels": {
"web_en": "Web (English)",
"web_es": "Web (Spanish)",
"sms_en": "Text (English)",
"sms_es": "Text (Spanish)"
}
},
{
"title": "Commercial",
"data": {
"nc": get_lifespan('nc'),
"rc": get_lifespan('rc'),
"s": get_lifespan('s')
}
},
{
"title": "Residential",
"data": {
"nr": get_lifespan('nr'),
"rr": get_lifespan('rr'),
"p": get_lifespan('p'),
"f": get_lifespan('f'),
"e": get_lifespan('e')
}
},
{
"title": "Average time from application date to permit issuance, Owner/Builder Permits, Last 30 Days",
"data": 0
},
{
"title": "Same Day Trade Permits",
"data": {
"PLUM": trade(30, 'PLUM'),
"BLDG": trade(30, 'BLDG'),
"ELEC": trade(30, 'ELEC'),
"FIRE": trade(30, 'FIRE'),
"ZIPS": trade(30, 'ZIPS')
}
},
{
"title": "(UNUSED) Avg Cost of an Open Residential Permit",
"data": 0
},
{
"title": "(UNUSED) Avg Cost of an Owner/Builder Permit",
"data": 0
},
{
"title": "Permits & sub-permits issued by type, Last 30 Days",
"data": get_permit_types()
},
{
"title": "Surveys by Survey Role",
"data": get_surveys_by_role(survey_table)
},
{
"title": "Master Permits Issued, Last 30 Days",
"data": get_master_permit_counts('permit_issued_date')
},
{
"title": "How many completions?",
"data": get_surveys_by_completion(survey_table)
},
{
"title": "Purpose",
"data": get_surveys_by_purpose(survey_table)
},
{
"title": "Ratings",
"data": {
"en": get_rating_by_lang(survey_table, 'en'),
"es": get_rating_by_lang(survey_table, 'es'),
"p1": get_rating_by_purpose(survey_table, 1),
"p2": get_rating_by_purpose(survey_table, 2),
"p3": get_rating_by_purpose(survey_table, 3),
"p4": get_rating_by_purpose(survey_table, 4),
"p5": get_rating_by_purpose(survey_table, 5),
"contractor": get_rating_by_role(survey_table, 1),
"architect": get_rating_by_role(survey_table, 2),
"permitconsultant": get_rating_by_role(survey_table, 3),
"homeowner": get_rating_by_role(survey_table, 4),
"bizowner": get_rating_by_role(survey_table, 5)
}
}
]
json_obj['daily_graph'] = ujson.dumps(dashboard_collection[0]['data']['graph'])
json_obj['surveys_type'] = ujson.dumps(dashboard_collection[2])
json_obj['permits_type'] = ujson.dumps(dashboard_collection[9])
json_obj['survey_role'] = ujson.dumps(dashboard_collection[10])
json_obj['survey_complete'] = ujson.dumps(dashboard_collection[12])
json_obj['survey_purpose'] = ujson.dumps(dashboard_collection[13])
json_obj['permits_rawjson'] = ujson.dumps(dump_socrata_api('p'))
json_obj['violations_rawjson'] = ujson.dumps(dump_socrata_api('v'))
json_obj['violations_locations_json'] = ujson.dumps(dump_socrata_api('vl'))
json_obj['violations_type_json'] = ujson.dumps(dump_socrata_api('vt'))
json_obj['violations_per_month_json'] = ujson.dumps(dump_socrata_api('vm'))
today = datetime.date.today()
return render_template(
"public/home-metrics.html",
api=api_health(),
date=today.strftime('%B %d, %Y'),
json_obj=json_obj,
dash_obj=dashboard_collection,
resp_obj=survey_table,
title='Dashboard - PIC Metrics'
)
@blueprint.route("/violations", methods=["GET", "POST"])
def violations():
json_obj = {}
surveys_by_date = {}
surveys_date_array = []
surveys_value_array = []
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_by_date[date_index] = 0
surveys_date_array.append(date_index)
survey_table = get_all_survey_responses(SURVEY_DAYS)
sms_rows = [x.lang for x in survey_table if x.method == 'sms']
web_rows = [x.lang for x in survey_table if x.method == 'web']
# ANALYTICS CODE
for i in range(SURVEY_DAYS, -1, -1):
time_i = (datetime.date.today() - datetime.timedelta(i))
date_index = time_i.strftime("%m-%d")
surveys_value_array.append(
len([x for x in survey_table if to_bucket(x.date_submitted) == date_index]))
dashboard_collection = [
{
"id": "graph",
"title": "Surveys Submitted".format(SURVEY_DAYS),
"data": {
"graph": {
"datetime": {
"data": surveys_date_array
},
"series": [
{
"data": surveys_value_array
}
]
}
}
},
{
"title": "Satisfaction Rating".format(SURVEY_DAYS),
"data": "{0:.2f}".format(get_rating_scale(survey_table))
},
{
"title": "Survey Type".format(SURVEY_DAYS),
"data": {
"web_en": web_rows.count('en'),
"web_es": web_rows.count('es'),
"sms_en": sms_rows.count('en'),
"sms_es": sms_rows.count('es')
},
"labels": {
"web_en": "Web (English)",
"web_es": "Web (Spanish)",
"sms_en": "Text (English)",
"sms_es": "Text (Spanish)"
}
},
{
"title": "Commercial",
"data": {
"nc": get_lifespan('nc'),
"rc": get_lifespan('rc'),
"s": get_lifespan('s')
}
},
{
"title": "Residential",
"data": {
"nr": get_lifespan('nr'),
"rr": get_lifespan('rr'),
"p": get_lifespan('p'),
"f": get_lifespan('f'),
"e": get_lifespan('e')
}
},
{
"title": "Average time from application date to permit issuance, Owner/Builder Permits, Last 30 Days",
"data": 0
},
{
"title": "Same Day Trade Permits",
"data": {
"PLUM": trade(30, 'PLUM'),
"BLDG": trade(30, 'BLDG'),
"ELEC": trade(30, 'ELEC'),
"FIRE": trade(30, 'FIRE'),
"ZIPS": trade(30, 'ZIPS')
}
},
{
"title": "(UNUSED) Avg Cost of an Open Residential Permit",
"data": 0
},
{
"title": "(UNUSED) Avg Cost of an Owner/Builder Permit",
"data": 0
},
{
"title": "Permits & sub-permits issued by type, Last 30 Days",
"data": get_permit_types()
},
{
"title": "Surveys by Survey Role",
"data": get_surveys_by_role(survey_table)
},
{
"title": "Master Permits Issued, Last 30 Days",
"data": get_master_permit_counts('permit_issued_date')
},
{
"title": "How many completions?",
"data": get_surveys_by_completion(survey_table)
},
{
"title": "Purpose",
"data": get_surveys_by_purpose(survey_table)
},
{
"title": "Ratings",
"data": {
"en": get_rating_by_lang(survey_table, 'en'),
"es": get_rating_by_lang(survey_table, 'es'),
"p1": get_rating_by_purpose(survey_table, 1),
"p2": get_rating_by_purpose(survey_table, 2),
"p3": get_rating_by_purpose(survey_table, 3),
"p4": get_rating_by_purpose(survey_table, 4),
"p5": get_rating_by_purpose(survey_table, 5),
"contractor": get_rating_by_role(survey_table, 1),
"architect": get_rating_by_role(survey_table, 2),
"permitconsultant": get_rating_by_role(survey_table, 3),
"homeowner": get_rating_by_role(survey_table, 4),
"bizowner": get_rating_by_role(survey_table, 5)
}
}
]
json_obj['daily_graph'] = ujson.dumps(dashboard_collection[0]['data']['graph'])
json_obj['surveys_type'] = ujson.dumps(dashboard_collection[2])
json_obj['permits_type'] = ujson.dumps(dashboard_collection[9])
json_obj['survey_role'] = ujson.dumps(dashboard_collection[10])
json_obj['survey_complete'] = ujson.dumps(dashboard_collection[12])
json_obj['survey_purpose'] = ujson.dumps(dashboard_collection[13])
json_obj['permits_rawjson'] = ujson.dumps(dump_socrata_api('p'))
json_obj['violations_rawjson'] = ujson.dumps(dump_socrata_api('v'))
json_obj['violations_locations_json'] = ujson.dumps(dump_socrata_api('vl'))
json_obj['violations_type_json'] = ujson.dumps(dump_socrata_api('vt'))
json_obj['violations_per_month_json'] = ujson.dumps(dump_socrata_api('vm'))
today = datetime.date.today()
return render_template(
"public/home-violations.html",
api=api_health(),
date=today.strftime('%B %d, %Y'),
json_obj=json_obj,
dash_obj=dashboard_collection,
resp_obj=survey_table,
title='Dashboard - Neighborhood Compliance'
)
@blueprint.route('/dashboard/feedback/', methods=['GET'])
def all_surveys():
survey_table = get_all_survey_responses(SURVEY_DAYS)
today = datetime.date.today()
return render_template(
"dashboard/all-surveys.html",
resp_obj=survey_table,
title='All Survey Responses',
date=today.strftime('%B %d, %Y')
)
@blueprint.route('/dashboard/feedback/<id>', methods=['GET'])
@login_required
def survey_detail(id):
survey = Survey.query.filter_by(id=id)
today = datetime.date.today()
return render_template(
"dashboard/survey-detail.html",
resp_obj=survey,
title='Permitting & Inspection Center User Survey Metrics: Detail',
date=today.strftime('%B %d, %Y'))
@blueprint.route("/dashboard/violations/", methods=['GET'])
def violations_detail():
json_obj = {}
json_obj['violations_type_json'] = ujson.dumps(dump_socrata_api('vt'))
today = datetime.date.today()
return render_template(
"public/violations-detail.html",
title='Violations by Type: Detail',
json_obj=json_obj,
date=today.strftime('%B %d, %Y'))<|fim▁end|>
|
"id": "graph",
"title": "Surveys Submitted".format(SURVEY_DAYS),
"data": {
"graph": {
|
<|file_name|>Device.java<|end_file_name|><|fim▁begin|>package model.device;
/**
* Title : The Mobile Robot Explorer Simulation Environment v2.0
* Copyright: GNU General Public License as published by the Free Software Foundation
* Company : Hanze University of Applied Sciences
*
* @author Dustin Meijer (2012)
* @author Alexander Jeurissen (2012)
* @author Davide Brugali (2002)
* @version 2.0
*/
import model.environment.Environment;
import model.environment.Position;
import model.robot.MobileRobot;
import java.awt.Polygon;
import java.awt.Color;
import java.io.PrintWriter;
import java.util.ArrayList;
public abstract class Device implements Runnable {
// A final object to make sure the lock cannot be overwritten with another Object
private final Object lock = new Object();
private final String name; // the name of this device
private final Polygon shape; // the device's shape in local coords
// a reference to the environment
protected final Environment environment;
// a reference to the robot
protected final MobileRobot robot;
// origin of the device reference frame with regards to the robot frame
protected final Position localPosition;
// the robot current position
protected Position robotPosition;<|fim▁hole|> // the colors of the devices
protected Color backgroundColor = Color.red;
protected Color foregroundColor = Color.blue;
// Is the device running?
protected boolean running;
// Is the device executingCommand a command?
protected boolean executingCommand;
private PrintWriter output;
// the constructor
protected Device(String name, MobileRobot robot, Position local, Environment environment) {
this.name = name;
this.robot = robot;
this.localPosition = local;
this.environment = environment;
this.shape = new Polygon();
this.robotPosition = new Position();
this.running = true;
this.executingCommand = false;
this.commands = new ArrayList<String>();
this.output = null;
robot.readPosition(this.robotPosition);
}
// this method is invoked when the geometric shape of the device is defined
protected void addPoint(int x, int y) {
shape.addPoint(x, y);
}
public boolean sendCommand(String command) {
commands.add(command);
synchronized (lock) {
// Notify the tread that is waiting for commands.
lock.notify();
}
return true;
}
protected synchronized void writeOut(String data) {
if (output != null) {
output.println(data);
} else {
System.out.println(this.name + " output not initialized");
}
}
public void setOutput(PrintWriter output) {
this.output = output;
}
public void run() {
System.out.println("Device " + this.name + " running");
do {
try {
if (executingCommand) {
// pause before the next step
synchronized (this) {
Thread.sleep(MobileRobot.delay);
}
} else if (commands.size() > 0) {
// extracts the the next command and executes it
String command = commands.remove(0);
executeCommand(command);
} else {
// waits for a new command
synchronized (lock) {
// Wait to be notified about a new command (in sendCommand()).
lock.wait();
}
}
// processes a new step
nextStep();
} catch (InterruptedException ie) {
System.err.println("Device : Run was interrupted.");
}
} while (this.running);
}
public Position getRobotPosition() {
return robotPosition;
}
public Position getLocalPosition() {
return localPosition;
}
public Polygon getShape() {
return shape;
}
public Color getBackgroundColor() {
return backgroundColor;
}
public Color getForegroundColor() {
return foregroundColor;
}
public String getName() {
return name;
}
protected abstract void executeCommand(String command);
protected abstract void nextStep();
}<|fim▁end|>
|
// the arrayList with all the commands
protected final ArrayList<String> commands;
|
<|file_name|>NaiveBayes.py<|end_file_name|><|fim▁begin|>import csv
def list_words(text):
words = []
words_tmp = text.lower().split()
for p in words_tmp:
if p not in words and len(p) > 2:
words.append(p)
return words
def training(texts):
c_words ={}
c_categories ={}
c_texts = 0
c_tot_words =0
for t in texts:
c_texts = c_texts + 1
if t[1] not in c_categories:
c_categories[t[1]] = 1
else:
c_categories[t[1]]= c_categories[t[1]] + 1
for t in texts:
words = list_words(t[0])
for p in words:
if p not in c_words:
c_tot_words = c_tot_words +1
c_words[p] = {}
for c in c_categories:
c_words[p][c] = 0
c_words[p][t[1]] = c_words[p][t[1]] + 1
return (c_words, c_categories, c_texts, c_tot_words)
def classifier(subject_line, c_words, c_categories, c_texts, c_tot_words):
category =""
category_prob = 0
for c in c_categories:
prob_c = float(c_categories[c])/float(c_texts)
words = list_words(subject_line)
prob_total_c = prob_c
for p in words:
if p in c_words:
prob_p= float(c_words[p][c])/float(c_tot_words)
prob_cond = prob_p/prob_c
prob =(prob_cond * prob_p)/ prob_c
prob_total_c = prob_total_c * prob
if category_prob < prob_total_c:
category = c
category_prob = prob_total_c
return (category, category_prob)
<|fim▁hole|>
with open('training.csv') as f:
subjects = dict(csv.reader(f, delimiter=','))
p,c,t,tp = training(subjects.items())
#First Test
clase = classifier("Available on Term Life - Free",p,c,t,tp)
print("Result: {0} ".format(clase))
#Second Test
with open("test.csv") as f:
correct = 0
tests = csv.reader(f)
for subject in tests:
clase = classifier(subject[0],p,c,t,tp)
if clase[0] == subject[1]:
correct += 1
print("Efficiency {0} of 10".format(correct))<|fim▁end|>
|
if __name__ == "__main__":
|
<|file_name|>mixin.js<|end_file_name|><|fim▁begin|>(function (tree) {
tree.mixin = {};
tree.mixin.Call = function (elements, args, index) {
this.selector = new(tree.Selector)(elements);
this.arguments = args;
this.index = index;
};
tree.mixin.Call.prototype = {
eval: function (env) {
var mixins, rules = [], match = false;
for (var i = 0; i < env.frames.length; i++) {
if ((mixins = env.frames[i].find(this.selector)).length > 0) {
for (var m = 0; m < mixins.length; m++) {
if (mixins[m].match(this.arguments, env)) {
try {
Array.prototype.push.apply(
rules, mixins[m].eval(env, this.arguments).rules);
match = true;
} catch (e) {
throw { message: e.message, index: e.index, stack: e.stack, call: this.index };
}
}
}
if (match) {
return rules;
} else {
throw { message: 'No matching definition was found for `' +
this.selector.toCSS().trim() + '(' +
this.arguments.map(function (a) {
return a.toCSS();
}).join(', ') + ")`",
index: this.index };
}
}
}
throw { message: this.selector.toCSS().trim() + " is undefined",
index: this.index };
}
};
tree.mixin.Definition = function (name, params, rules) {
this.name = name;
this.selectors = [new(tree.Selector)([new(tree.Element)(null, name)])];
this.params = params;
this.arity = params.length;
this.rules = rules;
this._lookups = {};
this.required = params.reduce(function (count, p) {
if (p.name && !p.value) { return count + 1 }
else { return count }
}, 0);
this.parent = tree.Ruleset.prototype;
this.frames = [];
};
tree.mixin.Definition.prototype = {
toCSS: function () { return "" },
variable: function (name) { return this.parent.variable.call(this, name) },
variables: function () { return this.parent.variables.call(this) },
find: function () { return this.parent.find.apply(this, arguments) },
rulesets: function () { return this.parent.rulesets.apply(this) },
eval: function (env, args) {
var frame = new(tree.Ruleset)(null, []), context;
for (var i = 0, val; i < this.params.length; i++) {
if (this.params[i].name) {
if (val = (args && args[i]) || this.params[i].value) {
frame.rules.unshift(new(tree.Rule)(this.params[i].name, val.eval(env)));
} else {
throw { message: "wrong number of arguments for " + this.name +
' (' + args.length + ' for ' + this.arity + ')' };
}
}
}
return new(tree.Ruleset)(null, this.rules.slice(0)).eval({
frames: [this, frame].concat(this.frames, env.frames)
});
},
match: function (args, env) {
var argsLength = (args && args.length) || 0, len;
if (argsLength < this.required) { return false }
len = Math.min(argsLength, this.arity);
for (var i = 0; i < len; i++) {
if (!this.params[i].name) {
if (args[i].eval(env).toCSS() != this.params[i].value.eval(env).toCSS()) {
return false;
}
}
}
return true;
}
};<|fim▁hole|>
})(require('less/tree'));<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>export default [
{
name: 'Types',
examples: [
{
name: 'List',
description: 'A list groups related content',
file: 'List',
},
{
description: 'You can also pass an array of items as props',
file: 'ListShorthand',
},
{
file: 'ListIcon',
},
{
file: 'ListDivided',
},
{
file: 'ListTree',
},
{
name: 'Bulleted',
description: 'A list can mark items with a bullet.',
file: 'Bulleted',
},
{
file: 'BulletedHorizontal',
},
{
name: 'Ordered',
description: 'A list can be ordered numerically.',
file: 'Ordered',
},
{
description:
'You can also use an `ol` and `li` to render an ordered list.',
file: 'OrderedNumber',<|fim▁hole|> file: 'Link',
},
],
},
{
name: 'Content',
examples: [
{
name: 'Item',
description: 'A list item can contain a set of items.',
file: 'Item',
},
{
name: 'Icon',
description: 'A list item can contain an icon.',
file: 'Icon',
},
{
name: 'Image',
description: 'A list item can contain an image.',
file: 'Image',
},
{
name: 'Link',
description: 'A list can contain links.',
file: 'LinkContent',
},
{
file: 'LinkDescription',
},
{
name: 'Header',
description: 'A list item can contain a header.',
file: 'Header',
},
{
name: 'Description',
description: 'A list item can contain a description.',
file: 'Description',
},
],
},
{
name: 'Variations',
examples: [
{
name: 'Inverted',
description: 'A list can be inverted to appear on a dark background.',
file: 'ListInverted',
},
],
},
];<|fim▁end|>
|
},
{
name: 'Link',
description: 'A list can be specially formatted for navigation links.',
|
<|file_name|>PostListView.js<|end_file_name|><|fim▁begin|>import React, { PropTypes } from 'react';
import { connect } from 'react-redux';
import './PostListView.scss';
import { toastr } from 'react-redux-toastr';
import { bindActionCreators } from 'redux';
import {
fetchPostsFromApi,
selectPostCategory,
clearPostsErrors,
clearPostsMessages
} from '../../../actions/actionCreators';
import CategoryFilterContainer from '../../CategoryFilterContainer/CategoryFilterContainer';
import {
PostList,
LoadingIndicator,
Divider,
MessagesSection
} from '../../../components';
import NoPostsFound from '../Misc/NoPostsFound';
// containsCategory :: Object -> Object -> Bool
const containsCategory = (post, category) => {
const categories = post.categories.filter(
(cat) => cat._id == category.id
);
return categories.length > 0;
};
// getFilteredPosts :: Object -> [Object] -> [Object]
const getFilteredPosts = (
category,
posts
) => {
if (category === null || category.name === 'All') {
return posts;
}
return posts.filter((post) => {
if (containsCategory(post, category)) {
return post;
}
return undefined;
});
};
/* Only used internally and it's so small so not worth creating a new component */
const SectionSubTitle = ({
title
}) => (
<h4 className="section-sub-title">
{title}
</h4>
);
SectionSubTitle.propTypes = {
title: PropTypes.string.isRequired
};
class PostListView extends React.Component {
constructor(props) {
super(props);
this.handleSelectCategory = this.handleSelectCategory.bind(this);
this.handleChangePage = this.handleChangePage.bind(this);
this.handleClose = this.handleClose.bind(this);
}
componentDidMount() {
const {
posts,
fetchPosts
} = this.props;
if (!posts.items || posts.items.length === 0) {
fetchPosts();
}
}
handleChangePage() {
//TODO: Implement me!!
}
handleSelectCategory(category) {
const {
selectPostCat
} = this.props;
selectPostCat(category);
}
showMessage(message) {
toastr.info(message);
}
handleClose(sender) {
const {
clearErrors,
clearMessages
} = this.props;
const theElement = sender.target.id;
if (theElement === 'button-close-error-panel') {
clearErrors();
} else if (theElement === 'button-close-messages-panel') {
clearMessages();
}
}
render() {
const {
posts,
isFetching,
postCategories,
selectedCategory,
errors,
messages
} = this.props;
const items = posts.items;
const visiblePosts = getFilteredPosts(selectedCategory, items);
return (
<LoadingIndicator isLoading={isFetching}>
<div className="post-list-view__wrapper">
<MessagesSection messages={messages} errors={errors} onClose={this.handleClose} />
<h1 className="section-header">From the Blog</h1>
<SectionSubTitle
title={selectedCategory.name == 'All' ? // eslint-disable-line
'All Posts'
:
`Selected Category: ${selectedCategory.name}`
}
/>
<Divider />
<CategoryFilterContainer
categories={postCategories}
onSelectCategory={this.handleSelectCategory}
selectedCategory={selectedCategory}
/>
{visiblePosts !== undefined && visiblePosts.length > 0 ?
<PostList
posts={visiblePosts}
onChangePage={this.handleChangePage}
/>
:
<NoPostsFound
selectedCategory={selectedCategory}
/>
}
</div>
</LoadingIndicator>
);
}
}
PostListView.propTypes = {
dispatch: PropTypes.func.isRequired,
errors: PropTypes.array.isRequired,
messages: PropTypes.array.isRequired,
posts: PropTypes.object.isRequired,
isFetching: PropTypes.bool.isRequired,
fetchPosts: PropTypes.func.isRequired,
selectPostCat: PropTypes.func.isRequired,
postCategories: PropTypes.array.isRequired,
selectedCategory: PropTypes.object.isRequired,
clearMessages: PropTypes.func.isRequired,<|fim▁hole|>const mapStateToProps = (state) => ({
posts: state.posts,
postCategories: state.posts.categories,
selectedCategory: state.posts.selectedCategory,
messages: state.messages.posts,
errors: state.errors.posts,
isFetching: state.posts.isFetching
});
// mapDispatchToProps :: {Dispatch} -> {Props}
const mapDispatchToProps = (dispatch) =>
bindActionCreators({
fetchPosts: () => fetchPostsFromApi(),
selectPostCat: (category) => selectPostCategory(category),
clearMessages: () => clearPostsMessages(),
clearErrors: () => clearPostsErrors()
}, dispatch);
export default connect(
mapStateToProps,
mapDispatchToProps
)(PostListView);<|fim▁end|>
|
clearErrors: PropTypes.func.isRequired
};
// mapStateToProps :: {State} -> {Props}
|
<|file_name|>routeDirection.ts<|end_file_name|><|fim▁begin|>export function getName(value) {
if (value == 0) {
return "上班";
}
if (value == 1) {
return "下班";
}
throw "invalid route direction";
}
<|fim▁hole|> if (name == "下班") {
return 1;
}
throw "invalid route direction";
}<|fim▁end|>
|
export function getValue(name) {
if (name == "上班") {
return 0;
}
|
<|file_name|>vectores.py<|end_file_name|><|fim▁begin|>from vectores_oo import Vector
x = input('vector U componente X= ')<|fim▁hole|>U = Vector(x,y)
m = input('vector V magnitud= ')
a = input('vector V angulo= ')
V = Vector(m=m, a=a)
E = input('Escalar= ')
print "U=%s" % U
print "V=%s" % V
print 'UxE=%s' % U.x_escalar(E)
print 'VxE=%s' % V.x_escalar(E)
print 'U+V=%s' % U.Suma(V)
print 'U.V=%s' % U.ProductoPunto(V)
print '|UxV|=%s' % U.Modulo_ProductoCruz(V)<|fim▁end|>
|
y = input('vector U componente X= ')
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod event;
use self::event::EventStopable;
use std::collections::HashMap;
pub trait ListenerCallable: PartialEq {<|fim▁hole|> fn call(&self, event_name: &str, event: &mut EventStopable);
}
pub struct EventListener {
callback: fn(event_name: &str, event: &mut EventStopable),
}
impl EventListener {
pub fn new (callback: fn(event_name: &str, event: &mut EventStopable)) -> EventListener {
EventListener {callback: callback}
}
}
impl ListenerCallable for EventListener {
fn call (&self, event_name: &str, event: &mut EventStopable) {
let callback = self.callback;
callback(event_name, event);
}
}
impl PartialEq for EventListener {
fn eq(&self, other: &EventListener) -> bool {
(self.callback as *const()) == (other.callback as *const())
}
fn ne(&self, other: &EventListener) -> bool {
!self.eq(other)
}
}
pub trait Dispatchable<S> where S: EventStopable {
fn dispatch (&self, event_name: &str, event: &mut S);
}
pub struct EventDispatcher<'a, L> where L: 'a + ListenerCallable {
listeners: HashMap<&'a str, Vec<&'a L>>,
}
impl<'a, L: 'a + ListenerCallable> EventDispatcher<'a, L> {
pub fn new() -> EventDispatcher<'a, L> {
EventDispatcher{listeners: HashMap::new()}
}
pub fn add_listener(&mut self, event_name: &'a str, listener: &'a L) {
if !self.listeners.contains_key(event_name) {
self.listeners.insert(event_name, Vec::new());
}
if let Some(mut listeners) = self.listeners.get_mut(event_name) {
listeners.push(listener);
}
}
pub fn remove_listener(&mut self, event_name: &'a str, listener: &'a mut L) {
if self.listeners.contains_key(event_name) {
if let Some(mut listeners) = self.listeners.get_mut(event_name) {
match listeners.iter().position(|x| *x == listener) {
Some(index) => {
listeners.remove(index);
},
_ => {},
}
}
}
}
}
impl<'a, S: 'a + EventStopable> Dispatchable<S> for EventDispatcher<'a, EventListener> {
fn dispatch(&self, event_name: &str, event: &mut S) {
if let Some(listeners) = self.listeners.get(event_name) {
for listener in listeners {
listener.call(event_name, event);
if !event.is_propagation_stopped() {
break;
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use super::event::*;
fn print_event_info(event_name: &str, event: &mut EventStopable) {
println!("callback from event: {}", event_name);
event.stop_propagation();
}
#[test]
fn test_dispatcher() {
let event_name = "test_a";
let mut event = Event::new();
let callback_one: fn(event_name: &str, event: &mut EventStopable) = print_event_info;
let mut listener_one = EventListener::new(callback_one);
let mut dispatcher = EventDispatcher::new();
dispatcher.dispatch(event_name, &mut event);
assert_eq!(false, event.is_propagation_stopped());
dispatcher.dispatch(event_name, &mut event);
assert_eq!(false, event.is_propagation_stopped());
dispatcher.add_listener(event_name, &mut listener_one);
dispatcher.dispatch(event_name, &mut event);
assert_eq!(true, event.is_propagation_stopped());
}
}<|fim▁end|>
| |
<|file_name|>syslogd.go<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 trivago GmbH
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package consumer
import (
"github.com/trivago/gollum/core"
"github.com/trivago/tgo/tnet"
"gopkg.in/mcuadros/go-syslog.v2"
"gopkg.in/mcuadros/go-syslog.v2/format"
"sync"
)
// Syslogd consumer plugin
//
// The syslogd consumer accepts messages from a syslogd compatible socket.
//
// Configuration example
//
// - "consumer.Syslogd":
// Address: "udp://0.0.0.0:514"
// Format: "RFC6587"
//
// Address defines the protocol, host and port or socket to bind to.
// This can either be any ip address and port like "localhost:5880" or a file
// like "unix:///var/gollum.socket". By default this is set to "udp://0.0.0.0:514".
// The protocol can be defined along with the address, e.g. "tcp://..." but
// this may be ignored if a certain protocol format does not support the desired
// transport protocol.
//
// Format defines the syslog standard to expect for message encoding.
// Three standards are currently supported, by default this is set to "RFC6587".
// * RFC3164 (https://tools.ietf.org/html/rfc3164) udp only.
// * RFC5424 (https://tools.ietf.org/html/rfc5424) udp only.
// * RFC6587 (https://tools.ietf.org/html/rfc6587) tcp or udp.
type Syslogd struct {
core.SimpleConsumer `gollumdoc:"embed_type"`
format format.Format // RFC3164, RFC5424 or RFC6587?
protocol string
address string
}
func init() {
core.TypeRegistry.Register(Syslogd{})
}
// Configure initializes this consumer with values from a plugin config.
func (cons *Syslogd) Configure(conf core.PluginConfigReader) error {
cons.SimpleConsumer.Configure(conf)
cons.protocol, cons.address = tnet.ParseAddress(conf.GetString("Address", "udp://0.0.0.0:514"), "tcp")
format := conf.GetString("Format", "RFC6587")
switch cons.protocol {
case "udp", "tcp", "unix":
default:
conf.Errors.Pushf("Unknown protocol type %s", cons.protocol) // ### return, unknown protocol ###
}
switch format {
// http://www.ietf.org/rfc/rfc3164.txt
case "RFC3164":
cons.format = syslog.RFC3164
if cons.protocol == "tcp" {
cons.Log.Warning.Print("RFC3164 demands UDP")<|fim▁hole|> // https://tools.ietf.org/html/rfc5424
case "RFC5424":
cons.format = syslog.RFC5424
if cons.protocol == "tcp" {
cons.Log.Warning.Print("RFC5424 demands UDP")
cons.protocol = "udp"
}
// https://tools.ietf.org/html/rfc6587
case "RFC6587":
cons.format = syslog.RFC6587
default:
conf.Errors.Pushf("Format %s is not supported", format)
}
return conf.Errors.OrNil()
}
// Handle implements the syslog handle interface
func (cons *Syslogd) Handle(parts format.LogParts, code int64, err error) {
content := ""
isString := false
switch cons.format {
case syslog.RFC3164:
content, isString = parts["content"].(string)
case syslog.RFC5424, syslog.RFC6587:
content, isString = parts["message"].(string)
default:
cons.Log.Error.Print("Could not determine the format to retrieve message/content")
}
if !isString {
cons.Log.Error.Print("Message/Content is not a string")
return
}
cons.Enqueue([]byte(content))
}
// Consume opens a new syslog socket.
// Messages are expected to be separated by \n.
func (cons *Syslogd) Consume(workers *sync.WaitGroup) {
server := syslog.NewServer()
server.SetFormat(cons.format)
server.SetHandler(cons)
switch cons.protocol {
case "unix":
if err := server.ListenUnixgram(cons.address); err != nil {
cons.Log.Error.Print("Failed to open unix://", cons.address)
}
case "udp":
if err := server.ListenUDP(cons.address); err != nil {
cons.Log.Error.Print("Failed to open udp://", cons.address)
}
case "tcp":
if err := server.ListenTCP(cons.address); err != nil {
cons.Log.Error.Print("Failed to open tcp://", cons.address)
}
}
server.Boot()
defer server.Kill()
cons.ControlLoop()
server.Wait()
}<|fim▁end|>
|
cons.protocol = "udp"
}
|
<|file_name|>test_checker.py<|end_file_name|><|fim▁begin|>#
# Copyright 2014 Telefonica Investigacion y Desarrollo, S.A.U
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|>#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for SPASSWORD checker."""
from keystone import tests
from keystone import exception
import keystone_spassword.contrib.spassword.checker
class TestPasswordChecker(tests.BaseTestCase):
def test_checker(self):
new_password = "stronger"
self.assertRaises(exception.ValidationError,
checker.strong_check_password(new_password))<|fim▁end|>
| |
<|file_name|>vendor.ts<|end_file_name|><|fim▁begin|>import '@angular/platform-browser-dynamic';
import '@angular/platform-browser';
import '@angular/core';
import '@angular/http';<|fim▁hole|><|fim▁end|>
|
import '@angular/router';
|
<|file_name|>AboutExpects.js<|end_file_name|><|fim▁begin|>describe("About Expects", function() {
// We shall contemplate truth by testing reality, via spec expectations.
it("should expect true", function() {
expect(true).toBeTruthy(); //This should be true
});
// To understand reality, we must compare our expectations against reality.
it("should expect equality", function () {
var expectedValue = 2;
var actualValue = 1 + 1;
expect(actualValue === expectedValue).toBeTruthy();
});
// Some ways of asserting equality are better than others.
it("should assert equality a better way", function () {
var expectedValue = 2;
var actualValue = 1 + 1;
// toEqual() compares using common sense equality.
expect(actualValue).toEqual(expectedValue);
});
<|fim▁hole|> // Sometimes you need to be really exact about what you "type."
it("should assert equality with ===", function () {
var expectedValue = 2;
var actualValue = (1 + 1);
// toBe() will always use === to compare.
expect(actualValue).toBe(expectedValue);
});
// Sometimes we will ask you to fill in the values.
it("should have filled in values", function () {
expect(1 + 1).toEqual(2);
});
});<|fim▁end|>
| |
<|file_name|>apkbuilder.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Adds the code parts to a resource APK."""
import argparse
import logging
import os
import shutil
import sys
import tempfile
import zipfile
import zlib
import finalize_apk
from util import build_utils
from util import diff_utils
from util import zipalign
# Input dex.jar files are zipaligned.
zipalign.ApplyZipFileZipAlignFix()
# Taken from aapt's Package.cpp:
_NO_COMPRESS_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.gif', '.wav', '.mp2',
'.mp3', '.ogg', '.aac', '.mpg', '.mpeg', '.mid',
'.midi', '.smf', '.jet', '.rtttl', '.imy', '.xmf',
'.mp4', '.m4a', '.m4v', '.3gp', '.3gpp', '.3g2',
'.3gpp2', '.amr', '.awb', '.wma', '.wmv', '.webm')
def _ParseArgs(args):
parser = argparse.ArgumentParser()
build_utils.AddDepfileOption(parser)
parser.add_argument(
'--assets',
help='GYP-list of files to add as assets in the form '
'"srcPath:zipPath", where ":zipPath" is optional.')
parser.add_argument(
'--java-resources', help='GYP-list of java_resources JARs to include.')
parser.add_argument('--write-asset-list',
action='store_true',
help='Whether to create an assets/assets_list file.')
parser.add_argument(
'--uncompressed-assets',
help='Same as --assets, except disables compression.')
parser.add_argument('--resource-apk',
help='An .ap_ file built using aapt',
required=True)
parser.add_argument('--output-apk',
help='Path to the output file',
required=True)
parser.add_argument('--format', choices=['apk', 'bundle-module'],
default='apk', help='Specify output format.')
parser.add_argument('--dex-file',
help='Path to the classes.dex to use')
parser.add_argument(
'--jdk-libs-dex-file',
help='Path to classes.dex created by dex_jdk_libs.py')
parser.add_argument('--uncompress-dex', action='store_true',
help='Store .dex files uncompressed in the APK')
parser.add_argument('--native-libs',
action='append',
help='GYP-list of native libraries to include. '
'Can be specified multiple times.',
default=[])
parser.add_argument('--secondary-native-libs',
action='append',
help='GYP-list of native libraries for secondary '
'android-abi. Can be specified multiple times.',
default=[])
parser.add_argument('--android-abi',
help='Android architecture to use for native libraries')
parser.add_argument('--secondary-android-abi',
help='The secondary Android architecture to use for'
'secondary native libraries')
parser.add_argument(
'--is-multi-abi',
action='store_true',
help='Will add a placeholder for the missing ABI if no native libs or '
'placeholders are set for either the primary or secondary ABI. Can only '
'be set if both --android-abi and --secondary-android-abi are set.')
parser.add_argument(
'--native-lib-placeholders',
help='GYP-list of native library placeholders to add.')
parser.add_argument(
'--secondary-native-lib-placeholders',
help='GYP-list of native library placeholders to add '
'for the secondary ABI')
parser.add_argument('--uncompress-shared-libraries', default='False',
choices=['true', 'True', 'false', 'False'],
help='Whether to uncompress native shared libraries. Argument must be '
'a boolean value.')
parser.add_argument(
'--apksigner-jar', help='Path to the apksigner executable.')
parser.add_argument('--zipalign-path',
help='Path to the zipalign executable.')
parser.add_argument('--key-path',
help='Path to keystore for signing.')
parser.add_argument('--key-passwd',
help='Keystore password')
parser.add_argument('--key-name',
help='Keystore name')
parser.add_argument(
'--min-sdk-version', required=True, help='Value of APK\'s minSdkVersion')
parser.add_argument(
'--best-compression',
action='store_true',
help='Use zip -9 rather than zip -1')
parser.add_argument(
'--library-always-compress',
action='append',
help='The list of library files that we always compress.')
parser.add_argument(
'--library-renames',
action='append',
help='The list of library files that we prepend crazy. to their names.')
parser.add_argument('--warnings-as-errors',
action='store_true',
help='Treat all warnings as errors.')
diff_utils.AddCommandLineFlags(parser)
options = parser.parse_args(args)
options.assets = build_utils.ParseGnList(options.assets)
options.uncompressed_assets = build_utils.ParseGnList(
options.uncompressed_assets)
options.native_lib_placeholders = build_utils.ParseGnList(
options.native_lib_placeholders)
options.secondary_native_lib_placeholders = build_utils.ParseGnList(
options.secondary_native_lib_placeholders)
options.java_resources = build_utils.ParseGnList(options.java_resources)
options.native_libs = build_utils.ParseGnList(options.native_libs)
options.secondary_native_libs = build_utils.ParseGnList(
options.secondary_native_libs)
options.library_always_compress = build_utils.ParseGnList(
options.library_always_compress)
options.library_renames = build_utils.ParseGnList(options.library_renames)
# --apksigner-jar, --zipalign-path, --key-xxx arguments are
# required when building an APK, but not a bundle module.
if options.format == 'apk':
required_args = [
'apksigner_jar', 'zipalign_path', 'key_path', 'key_passwd', 'key_name'
]
for required in required_args:
if not vars(options)[required]:
raise Exception('Argument --%s is required for APKs.' % (
required.replace('_', '-')))
options.uncompress_shared_libraries = \
options.uncompress_shared_libraries in [ 'true', 'True' ]
if not options.android_abi and (options.native_libs or
options.native_lib_placeholders):
raise Exception('Must specify --android-abi with --native-libs')
if not options.secondary_android_abi and (options.secondary_native_libs or
options.secondary_native_lib_placeholders):
raise Exception('Must specify --secondary-android-abi with'
' --secondary-native-libs')
if options.is_multi_abi and not (options.android_abi
and options.secondary_android_abi):
raise Exception('Must specify --is-multi-abi with both --android-abi '
'and --secondary-android-abi.')
return options
def _SplitAssetPath(path):
"""Returns (src, dest) given an asset path in the form src[:dest]."""
path_parts = path.split(':')
src_path = path_parts[0]
if len(path_parts) > 1:
dest_path = path_parts[1]
else:
dest_path = os.path.basename(src_path)
return src_path, dest_path
def _ExpandPaths(paths):
"""Converts src:dst into tuples and enumerates files within directories.
Args:
paths: Paths in the form "src_path:dest_path"
Returns:
A list of (src_path, dest_path) tuples sorted by dest_path (for stable
ordering within output .apk).
"""
ret = []
for path in paths:
src_path, dest_path = _SplitAssetPath(path)
if os.path.isdir(src_path):
for f in build_utils.FindInDirectory(src_path, '*'):
ret.append((f, os.path.join(dest_path, f[len(src_path) + 1:])))
else:
ret.append((src_path, dest_path))
ret.sort(key=lambda t:t[1])
return ret
def _GetAssetsToAdd(path_tuples,
fast_align,
disable_compression=False,
allow_reads=True):
"""Returns the list of file_detail tuples for assets in the apk.
Args:
path_tuples: List of src_path, dest_path tuples to add.
fast_align: Whether to perform alignment in python zipfile (alternatively
alignment can be done using the zipalign utility out of band).
disable_compression: Whether to disable compression.
allow_reads: If false, we do not try to read the files from disk (to find<|fim▁hole|> """
assets_to_add = []
# Group all uncompressed assets together in the hope that it will increase
# locality of mmap'ed files.
for target_compress in (False, True):
for src_path, dest_path in path_tuples:
compress = not disable_compression and (
os.path.splitext(src_path)[1] not in _NO_COMPRESS_EXTENSIONS)
if target_compress == compress:
# AddToZipHermetic() uses this logic to avoid growing small files.
# We need it here in order to set alignment correctly.
if allow_reads and compress and os.path.getsize(src_path) < 16:
compress = False
apk_path = 'assets/' + dest_path
alignment = 0 if compress and not fast_align else 4
assets_to_add.append((apk_path, src_path, compress, alignment))
return assets_to_add
def _AddFiles(apk, details):
"""Adds files to the apk.
Args:
apk: path to APK to add to.
details: A list of file detail tuples (src_path, apk_path, compress,
alignment) representing what and how files are added to the APK.
"""
for apk_path, src_path, compress, alignment in details:
# This check is only relevant for assets, but it should not matter if it is
# checked for the whole list of files.
try:
apk.getinfo(apk_path)
# Should never happen since write_build_config.py handles merging.
raise Exception(
'Multiple targets specified the asset path: %s' % apk_path)
except KeyError:
zipalign.AddToZipHermetic(
apk,
apk_path,
src_path=src_path,
compress=compress,
alignment=alignment)
def _GetNativeLibrariesToAdd(native_libs, android_abi, uncompress, fast_align,
lib_always_compress, lib_renames):
"""Returns the list of file_detail tuples for native libraries in the apk.
Returns: A list of (src_path, apk_path, compress, alignment) tuple
representing what and how native libraries are added.
"""
libraries_to_add = []
for path in native_libs:
basename = os.path.basename(path)
compress = not uncompress or any(lib_name in basename
for lib_name in lib_always_compress)
rename = any(lib_name in basename for lib_name in lib_renames)
if rename:
basename = 'crazy.' + basename
lib_android_abi = android_abi
if path.startswith('android_clang_arm64_hwasan/'):
lib_android_abi = 'arm64-v8a-hwasan'
apk_path = 'lib/%s/%s' % (lib_android_abi, basename)
alignment = 0 if compress and not fast_align else 0x1000
libraries_to_add.append((apk_path, path, compress, alignment))
return libraries_to_add
def _CreateExpectationsData(native_libs, assets):
"""Creates list of native libraries and assets."""
native_libs = sorted(native_libs)
assets = sorted(assets)
ret = []
for apk_path, _, compress, alignment in native_libs + assets:
ret.append('apk_path=%s, compress=%s, alignment=%s\n' %
(apk_path, compress, alignment))
return ''.join(ret)
def main(args):
build_utils.InitLogging('APKBUILDER_DEBUG')
args = build_utils.ExpandFileArgs(args)
options = _ParseArgs(args)
# Until Python 3.7, there's no better way to set compression level.
# The default is 6.
if options.best_compression:
# Compresses about twice as slow as the default.
zlib.Z_DEFAULT_COMPRESSION = 9
else:
# Compresses about twice as fast as the default.
zlib.Z_DEFAULT_COMPRESSION = 1
# Manually align only when alignment is necessary.
# Python's zip implementation duplicates file comments in the central
# directory, whereas zipalign does not, so use zipalign for official builds.
fast_align = options.format == 'apk' and not options.best_compression
native_libs = sorted(options.native_libs)
# Include native libs in the depfile_deps since GN doesn't know about the
# dependencies when is_component_build=true.
depfile_deps = list(native_libs)
# For targets that depend on static library APKs, dex paths are created by
# the static library's dexsplitter target and GN doesn't know about these
# paths.
if options.dex_file:
depfile_deps.append(options.dex_file)
secondary_native_libs = []
if options.secondary_native_libs:
secondary_native_libs = sorted(options.secondary_native_libs)
depfile_deps += secondary_native_libs
if options.java_resources:
# Included via .build_config, so need to write it to depfile.
depfile_deps.extend(options.java_resources)
assets = _ExpandPaths(options.assets)
uncompressed_assets = _ExpandPaths(options.uncompressed_assets)
# Included via .build_config, so need to write it to depfile.
depfile_deps.extend(x[0] for x in assets)
depfile_deps.extend(x[0] for x in uncompressed_assets)
# Bundle modules have a structure similar to APKs, except that resources
# are compiled in protobuf format (instead of binary xml), and that some
# files are located into different top-level directories, e.g.:
# AndroidManifest.xml -> manifest/AndroidManifest.xml
# classes.dex -> dex/classes.dex
# res/ -> res/ (unchanged)
# assets/ -> assets/ (unchanged)
# <other-file> -> root/<other-file>
#
# Hence, the following variables are used to control the location of files in
# the final archive.
if options.format == 'bundle-module':
apk_manifest_dir = 'manifest/'
apk_root_dir = 'root/'
apk_dex_dir = 'dex/'
else:
apk_manifest_dir = ''
apk_root_dir = ''
apk_dex_dir = ''
def _GetAssetDetails(assets, uncompressed_assets, fast_align, allow_reads):
ret = _GetAssetsToAdd(assets,
fast_align,
disable_compression=False,
allow_reads=allow_reads)
ret.extend(
_GetAssetsToAdd(uncompressed_assets,
fast_align,
disable_compression=True,
allow_reads=allow_reads))
return ret
libs_to_add = _GetNativeLibrariesToAdd(
native_libs, options.android_abi, options.uncompress_shared_libraries,
fast_align, options.library_always_compress, options.library_renames)
if options.secondary_android_abi:
libs_to_add.extend(
_GetNativeLibrariesToAdd(
secondary_native_libs, options.secondary_android_abi,
options.uncompress_shared_libraries, fast_align,
options.library_always_compress, options.library_renames))
if options.expected_file:
# We compute expectations without reading the files. This allows us to check
# expectations for different targets by just generating their build_configs
# and not have to first generate all the actual files and all their
# dependencies (for example by just passing --only-verify-expectations).
asset_details = _GetAssetDetails(assets,
uncompressed_assets,
fast_align,
allow_reads=False)
actual_data = _CreateExpectationsData(libs_to_add, asset_details)
diff_utils.CheckExpectations(actual_data, options)
if options.only_verify_expectations:
if options.depfile:
build_utils.WriteDepfile(options.depfile,
options.actual_file,
inputs=depfile_deps)
return
# If we are past this point, we are going to actually create the final apk so
# we should recompute asset details again but maybe perform some optimizations
# based on the size of the files on disk.
assets_to_add = _GetAssetDetails(
assets, uncompressed_assets, fast_align, allow_reads=True)
# Targets generally do not depend on apks, so no need for only_if_changed.
with build_utils.AtomicOutput(options.output_apk, only_if_changed=False) as f:
with zipfile.ZipFile(options.resource_apk) as resource_apk, \
zipfile.ZipFile(f, 'w') as out_apk:
def add_to_zip(zip_path, data, compress=True, alignment=4):
zipalign.AddToZipHermetic(
out_apk,
zip_path,
data=data,
compress=compress,
alignment=0 if compress and not fast_align else alignment)
def copy_resource(zipinfo, out_dir=''):
add_to_zip(
out_dir + zipinfo.filename,
resource_apk.read(zipinfo.filename),
compress=zipinfo.compress_type != zipfile.ZIP_STORED)
# Make assets come before resources in order to maintain the same file
# ordering as GYP / aapt. http://crbug.com/561862
resource_infos = resource_apk.infolist()
# 1. AndroidManifest.xml
logging.debug('Adding AndroidManifest.xml')
copy_resource(
resource_apk.getinfo('AndroidManifest.xml'), out_dir=apk_manifest_dir)
# 2. Assets
logging.debug('Adding assets/')
_AddFiles(out_apk, assets_to_add)
# 3. Dex files
logging.debug('Adding classes.dex')
if options.dex_file:
with open(options.dex_file, 'rb') as dex_file_obj:
if options.dex_file.endswith('.dex'):
max_dex_number = 1
# This is the case for incremental_install=true.
add_to_zip(
apk_dex_dir + 'classes.dex',
dex_file_obj.read(),
compress=not options.uncompress_dex)
else:
max_dex_number = 0
with zipfile.ZipFile(dex_file_obj) as dex_zip:
for dex in (d for d in dex_zip.namelist() if d.endswith('.dex')):
max_dex_number += 1
add_to_zip(
apk_dex_dir + dex,
dex_zip.read(dex),
compress=not options.uncompress_dex)
if options.jdk_libs_dex_file:
with open(options.jdk_libs_dex_file, 'rb') as dex_file_obj:
add_to_zip(
apk_dex_dir + 'classes{}.dex'.format(max_dex_number + 1),
dex_file_obj.read(),
compress=not options.uncompress_dex)
# 4. Native libraries.
logging.debug('Adding lib/')
_AddFiles(out_apk, libs_to_add)
# Add a placeholder lib if the APK should be multi ABI but is missing libs
# for one of the ABIs.
native_lib_placeholders = options.native_lib_placeholders
secondary_native_lib_placeholders = (
options.secondary_native_lib_placeholders)
if options.is_multi_abi:
if ((secondary_native_libs or secondary_native_lib_placeholders)
and not native_libs and not native_lib_placeholders):
native_lib_placeholders += ['libplaceholder.so']
if ((native_libs or native_lib_placeholders)
and not secondary_native_libs
and not secondary_native_lib_placeholders):
secondary_native_lib_placeholders += ['libplaceholder.so']
# Add placeholder libs.
for name in sorted(native_lib_placeholders):
# Note: Empty libs files are ignored by md5check (can cause issues
# with stale builds when the only change is adding/removing
# placeholders).
apk_path = 'lib/%s/%s' % (options.android_abi, name)
add_to_zip(apk_path, '', alignment=0x1000)
for name in sorted(secondary_native_lib_placeholders):
# Note: Empty libs files are ignored by md5check (can cause issues
# with stale builds when the only change is adding/removing
# placeholders).
apk_path = 'lib/%s/%s' % (options.secondary_android_abi, name)
add_to_zip(apk_path, '', alignment=0x1000)
# 5. Resources
logging.debug('Adding res/')
for info in sorted(resource_infos, key=lambda i: i.filename):
if info.filename != 'AndroidManifest.xml':
copy_resource(info)
# 6. Java resources that should be accessible via
# Class.getResourceAsStream(), in particular parts of Emma jar.
# Prebuilt jars may contain class files which we shouldn't include.
logging.debug('Adding Java resources')
for java_resource in options.java_resources:
with zipfile.ZipFile(java_resource, 'r') as java_resource_jar:
for apk_path in sorted(java_resource_jar.namelist()):
apk_path_lower = apk_path.lower()
if apk_path_lower.startswith('meta-inf/'):
continue
if apk_path_lower.endswith('/'):
continue
if apk_path_lower.endswith('.class'):
continue
add_to_zip(apk_root_dir + apk_path,
java_resource_jar.read(apk_path))
if options.format == 'apk':
zipalign_path = None if fast_align else options.zipalign_path
finalize_apk.FinalizeApk(options.apksigner_jar,
zipalign_path,
f.name,
f.name,
options.key_path,
options.key_passwd,
options.key_name,
int(options.min_sdk_version),
warnings_as_errors=options.warnings_as_errors)
logging.debug('Moving file into place')
if options.depfile:
build_utils.WriteDepfile(options.depfile,
options.output_apk,
inputs=depfile_deps)
if __name__ == '__main__':
main(sys.argv[1:])<|fim▁end|>
|
their size for example).
Returns: A list of (src_path, apk_path, compress, alignment) tuple
representing what and how assets are added.
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django import http
from django.conf.urls import patterns
from django.contrib import admin
from django.db import models
from django.forms.models import modelform_factory
from django.shortcuts import get_object_or_404
from django.template import loader, Context
from django.views.generic import View
def get_printable_field_value(instance, fieldname):
""" Get the display value of a model field, showing a comma-delimited
list for M2M fields.
"""
field = instance._meta.get_field(fieldname)
field_value = getattr(instance, fieldname)
if isinstance(field, models.ManyToManyField):
field_value = ', '.join([unicode(f) for f in
field_value.all()])
return field_value
class AjaxModelFormView(View):
""" Handles AJAX updates of a single field on an object
(You likely don't need to use this directly as the admin
registers a URL for it itself.)
"""
model = None
valid_fields = None
def __init__(self, model, valid_fields, **kwargs):
self.model = model
self.valid_fields = valid_fields
def post(self, request, object_id, *args, **kwargs):
if not request.user or not request.user.is_staff:
return http.HttpResponseForbidden()
request = request.POST.copy()
fieldname = request.pop('field', None)[0]
form_prefix = request.pop('prefix', None)[0]
# prevent setting fields that weren't made AJAX-editable
if fieldname not in self.valid_fields:
return http.HttpResponseBadRequest()
ItemForm = modelform_factory(self.model, fields=(fieldname,))
instance = get_object_or_404(self.model, pk=object_id)
form = ItemForm(request, instance=instance, prefix=form_prefix)
if not form or not form.is_valid():
return http.HttpResponseBadRequest()
form.save()
new_value = get_printable_field_value(instance, fieldname)
return http.HttpResponse(new_value)
class AjaxModelAdmin(admin.ModelAdmin):
""" Admin class providing support for inline forms in
listview that are submitted through AJAX.
"""
def __init__(self, *args, **kwargs):
HANDLER_NAME_TPL = "_%s_ajax_handler"<|fim▁hole|>
self.list_display = list(self.list_display)
self.list_display = self.list_display + map(lambda name: HANDLER_NAME_TPL % name,
self.ajax_list_display)
super(AjaxModelAdmin, self).__init__(*args, **kwargs)
for name in self.ajax_list_display:
setattr(self, HANDLER_NAME_TPL % name,
self._get_field_handler(name))
self.ajax_item_template = loader.get_template('ajax_changelist/'
'field_form.html')
def get_urls(self):
""" Add endpoint for saving a new field value. """
urls = super(AjaxModelAdmin, self).get_urls()
list_urls = patterns('',
(r'^(?P<object_id>\d+)$',
AjaxModelFormView.as_view(model=self.model,
valid_fields=self.ajax_list_display)))
return list_urls + urls
def _get_field_handler(self, fieldname):
""" Handle rendering of AJAX-editable fields for the changelist, by
dynamically building a callable for each field.
"""
def handler_function(obj, *args, **kwargs):
ItemForm = modelform_factory(self.model, fields=(fieldname,))
form = ItemForm(instance=obj, prefix="c" + unicode(obj.id))
field_value = get_printable_field_value(obj, fieldname)
# Render the field value and edit form
return self.ajax_item_template.render(Context({
'object_id': obj.id,
'field_name': fieldname,
'form': form.as_p(),
'field_value': field_value
}))
handler_function.allow_tags = True
handler_function.short_description = fieldname
return handler_function
class Media:
#FIXME: dripping jQueries is straight-up wack.
js = ('//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js',
'ajax_changelist/js/lib/jquery.django_csrf.js',
'ajax_changelist/js/admin.js',)
css = {
'all': ('ajax_changelist/css/admin.css',)
}<|fim▁end|>
|
if not hasattr(self, 'ajax_list_display'):
self.ajax_list_display = []
|
<|file_name|>tasksList.js<|end_file_name|><|fim▁begin|>/**
* Created by Adrian on 2016-05-08.
*/
import angular from "angular";
import angularMeteor from "angular-meteor";
import uiRouter from "angular-ui-router";
import "./mobile.html";
import {Tasks} from "../../../api/tasks/index";
import {name as TaskItem} from "../taskItem/taskItem";
class TasksList {
constructor($scope, $reactive) {
'ngInject';
$reactive(this).attach($scope);
this.subscribe('tasks');
this.helpers({
tasks() {
return Tasks.find();
}
});
}
}
const template = 'mobile';
<|fim▁hole|>const name = 'tasksList';
export default angular.module(name, [
angularMeteor,
uiRouter,
TaskItem
]).component(name, {
templateUrl: `imports/ui/components/${name}/${template}.html`,
controllerAs: name,
controller: TasksList
});<|fim▁end|>
| |
<|file_name|>common_test.go<|end_file_name|><|fim▁begin|>// Copyright 2018 The OpenSDS Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package oceanstor
import (
"crypto/md5"
"encoding/hex"
"fmt"
"math/rand"
"testing"
"time"
"github.com/opensds/opensds/pkg/utils"
)
func TestEncodeName(t *testing.T) {
id := "05935681-8a00-4988-bfd8-90fdb429aecd"
exspect := "05935681-477ef4d6bb4af7652c1b97"
result := EncodeName(id)
if result != exspect {
t.Error("Test EncodeName failed")
}
if len(result) > MaxNameLength {
t.Error("EncodeName exceed the max name length")
}
}
func TestEncodeHostName(t *testing.T) {
normalName := "1234567890ABCabcZz_.-"
result := EncodeHostName(normalName)
if result != normalName {
t.Error("Test EncodeHostName failed")
}
if len(result) > MaxNameLength {
t.Error("EncodeName exceed the max name length")
}
longName := "opensds-huawei-oceanstor-opensds-huawei-oceanstor"<|fim▁hole|> // generate expected result
h := md5.New()
h.Write([]byte(longName))
encodedName := hex.EncodeToString(h.Sum(nil))
expectedResult := encodedName[:MaxNameLength]
if result != expectedResult {
t.Error("Test EncodeHostName failed")
}
if len(result) > MaxNameLength {
t.Error("EncodeName exceed the max name length")
}
invalidName := "iqn.1993-08.org.debian:01:d1f6c8e930e7"
result = EncodeHostName(invalidName)
// generate expected result
h = md5.New()
h.Write([]byte(invalidName))
encodedName = hex.EncodeToString(h.Sum(nil))
expectedResult = encodedName[:MaxNameLength]
if result != expectedResult {
t.Error("Test EncodeHostName failed")
}
if len(result) > MaxNameLength {
t.Error("EncodeName exceed the max name length")
}
}
func randSeq(n int) string {
var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
b := make([]rune, n)
for i := range b {
b[i] = letters[rand.Intn(len(letters))]
}
return string(b)
}
func TestTruncateDescription(t *testing.T) {
normalDescription := "This is huawei oceanstor driver testing"
result := TruncateDescription(normalDescription)
if result != normalDescription {
t.Error("Test TruncateDescription failed")
}
if len(result) > MaxDescriptionLength {
t.Error("TruncateDescription exceed the max name length")
}
longDescription := randSeq(MaxDescriptionLength + 1)
result = TruncateDescription(longDescription)
if len(result) > MaxDescriptionLength {
t.Error("TruncateDescription exceed the max name length")
}
longDescription = randSeq(MaxDescriptionLength + 255)
result = TruncateDescription(longDescription)
if len(result) > MaxDescriptionLength {
t.Error("TruncateDescription exceed the max name length")
}
}
func TestWaitForCondition(t *testing.T) {
var count = 0
err := utils.WaitForCondition(func() (bool, error) {
count++
time.Sleep(2 * time.Microsecond)
if count >= 5 {
return true, nil
}
return false, nil
}, 1*time.Microsecond, 100*time.Second)
if err != nil {
t.Errorf("Test WaitForCondition failed, %v", err)
}
count = 0
err = utils.WaitForCondition(func() (bool, error) {
count++
time.Sleep(1 * time.Millisecond)
if count >= 5 {
return true, nil
}
return false, nil
}, 4*time.Millisecond, 100*time.Millisecond)
if err != nil {
t.Errorf("Test WaitForCondition failed, %v", err)
}
err = utils.WaitForCondition(func() (bool, error) {
return true, fmt.Errorf("test error....")
}, 4*time.Millisecond, 100*time.Millisecond)
if err == nil {
t.Errorf("Test WaitForCondition failed, %v", err)
}
count = 0
err = utils.WaitForCondition(func() (bool, error) {
count++
time.Sleep(2 * time.Millisecond)
if count >= 5 {
return true, nil
}
return false, nil
}, 2*time.Millisecond, 5*time.Millisecond)
if err == nil {
t.Errorf("Test WaitForCondition failed, %v", err)
}
}<|fim▁end|>
|
result = EncodeHostName(longName)
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>//= require ../bower_components/jquery/dist/jquery.js
'use strict';
var APP = {};
$(function() {<|fim▁hole|><|fim▁end|>
|
console.log('Hello from your jQuery application!');
});
|
<|file_name|>pub-extern-privacy.rs<|end_file_name|><|fim▁begin|>use std::cast::transmute;
mod a {
extern {
pub fn free(x: *u8);
}
}
#[fixed_stack_segment] #[inline(never)]<|fim▁hole|>}<|fim▁end|>
|
pub fn main() {
unsafe {
a::free(transmute(0));
}
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django.contrib.auth.models import User
from .models import Stock, StockHistory, StockSelection, SectorHistory, StockNews
class CommonAdmin(admin.ModelAdmin):
date_hierarchy = 'pub_date'
class SectorAdmin(CommonAdmin):
list_display = ('Symbol', 'Sector', 'pub_date')
search_fields = ['Symbol', 'Sector']
class StockAdmin(CommonAdmin):
list_display = ('Symbol', 'Name','MarketCap', 'Catagory', 'pub_date')
search_fields = ['Symbol']
class StockRelativeAdmin(CommonAdmin):
def stock_info(obj):
return '{}, {}, {}, {}'.format(<|fim▁hole|> obj.stock.MarketCap,
obj.stock.pub_date,
)
list_display = (stock_info, 'pub_date')
search_fields = ['stock__Symbol']
admin.site.register(Stock, StockAdmin)
admin.site.register(SectorHistory, SectorAdmin)
admin.site.register(StockHistory, StockRelativeAdmin)
admin.site.register(StockSelection, StockRelativeAdmin)
admin.site.register(StockNews, StockRelativeAdmin)<|fim▁end|>
|
obj.stock.Symbol,
obj.stock.Name,
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='SkipRequest',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('key', models.CharField(max_length=64, verbose_name='Sender Key')),
],
options={
'verbose_name': 'Skip request',
'verbose_name_plural': 'Skip requests',
},
bases=(models.Model,),
),<|fim▁hole|> ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('description', models.TextField(help_text='Description text for the video', verbose_name='Description', blank=True)),
('youtube_url', models.URLField(help_text='URL to a youtube video', verbose_name='Youtube URL')),
('key', models.CharField(max_length=64, null=True, verbose_name='Sender Key', blank=True)),
('deleted', models.IntegerField(default=False, verbose_name='Deleted')),
('playing', models.BooleanField(default=False, verbose_name='Playing')),
('duration', models.IntegerField(default=0, verbose_name='Duration')),
],
options={
'verbose_name': 'Video',
'verbose_name_plural': 'Videos',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='skiprequest',
name='event',
field=models.ForeignKey(verbose_name='Video', to='manager.Video'),
preserve_default=True,
),
]<|fim▁end|>
|
migrations.CreateModel(
name='Video',
fields=[
|
<|file_name|>directory_v1.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* jshint maxlen: false */
'use strict';
var createAPIRequest = require('../../lib/apirequest');
/**
* Admin Directory API
*
* @classdesc The Admin SDK Directory API lets you view and manage enterprise resources such as users and groups, administrative notifications, security features, and more.
* @namespace admin
* @version directory_v1
* @variation directory_v1
* @this Admin
* @param {object=} options Options for Admin
*/
function Admin(options) {
var self = this;
this._options = options || {};
this.asps = {
/**
* directory.asps.delete
*
* @desc Delete an ASP issued by a user.
*
* @alias directory.asps.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {integer} params.codeId - The unique ID of the ASP to be deleted.
* @param {string} params.userKey - Identifies the user in the API request. The value can be the user's primary email address, alias email address, or unique user ID.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/asps/{codeId}',
method: 'DELETE'
},
params: params,
requiredParams: ['userKey', 'codeId'],
pathParams: ['codeId', 'userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.asps.get
*
* @desc Get information about an ASP issued by a user.
*
* @alias directory.asps.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {integer} params.codeId - The unique ID of the ASP.
* @param {string} params.userKey - Identifies the user in the API request. The value can be the user's primary email address, alias email address, or unique user ID.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/asps/{codeId}',
method: 'GET'
},
params: params,
requiredParams: ['userKey', 'codeId'],
pathParams: ['codeId', 'userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.asps.list
*
* @desc List the ASPs issued by a user.
*
* @alias directory.asps.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Identifies the user in the API request. The value can be the user's primary email address, alias email address, or unique user ID.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/asps',
method: 'GET'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.channels = {
/**
* admin.channels.stop
*
* @desc Stop watching resources through this channel
*
* @alias admin.channels.stop
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
stop: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/admin/directory_v1/channels/stop',
method: 'POST'
},
params: params,
requiredParams: [],
pathParams: [],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.chromeosdevices = {
/**
* directory.chromeosdevices.get
*
* @desc Retrieve Chrome OS Device
*
* @alias directory.chromeosdevices.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.deviceId - Immutable id of Chrome OS Device
* @param {string=} params.projection - Restrict information returned to a set of selected fields.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/devices/chromeos/{deviceId}',
method: 'GET'
},
params: params,
requiredParams: ['customerId', 'deviceId'],
pathParams: ['customerId', 'deviceId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.chromeosdevices.list
*
* @desc Retrieve all Chrome OS Devices of a customer (paginated)
*
* @alias directory.chromeosdevices.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {integer=} params.maxResults - Maximum number of results to return. Default is 100
* @param {string=} params.orderBy - Column to use for sorting results
* @param {string=} params.pageToken - Token to specify next page in the list
* @param {string=} params.projection - Restrict information returned to a set of selected fields.
* @param {string=} params.query - Search string in the format given at http://support.google.com/chromeos/a/bin/answer.py?hl=en&answer=1698333
* @param {string=} params.sortOrder - Whether to return results in ascending or descending order. Only of use when orderBy is also used
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/devices/chromeos',
method: 'GET'
},
params: params,
requiredParams: ['customerId'],
pathParams: ['customerId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.chromeosdevices.patch
*
* @desc Update Chrome OS Device. This method supports patch semantics.
*
* @alias directory.chromeosdevices.patch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.deviceId - Immutable id of Chrome OS Device
* @param {string=} params.projection - Restrict information returned to a set of selected fields.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/devices/chromeos/{deviceId}',
method: 'PATCH'
},
params: params,
requiredParams: ['customerId', 'deviceId'],
pathParams: ['customerId', 'deviceId'],
context: self
};
return createAPIRequest(parameters, callback);
},
<|fim▁hole|> *
* @alias directory.chromeosdevices.update
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.deviceId - Immutable id of Chrome OS Device
* @param {string=} params.projection - Restrict information returned to a set of selected fields.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/devices/chromeos/{deviceId}',
method: 'PUT'
},
params: params,
requiredParams: ['customerId', 'deviceId'],
pathParams: ['customerId', 'deviceId'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.groups = {
/**
* directory.groups.delete
*
* @desc Delete Group
*
* @alias directory.groups.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}',
method: 'DELETE'
},
params: params,
requiredParams: ['groupKey'],
pathParams: ['groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.groups.get
*
* @desc Retrieve Group
*
* @alias directory.groups.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}',
method: 'GET'
},
params: params,
requiredParams: ['groupKey'],
pathParams: ['groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.groups.insert
*
* @desc Create Group
*
* @alias directory.groups.insert
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups',
method: 'POST'
},
params: params,
requiredParams: [],
pathParams: [],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.groups.list
*
* @desc Retrieve all groups in a domain (paginated)
*
* @alias directory.groups.list
* @memberOf! admin(directory_v1)
*
* @param {object=} params - Parameters for request
* @param {string=} params.customer - Immutable id of the Google Apps account. In case of multi-domain, to fetch all groups for a customer, fill this field instead of domain.
* @param {string=} params.domain - Name of the domain. Fill this field to get groups from only this domain. To return all groups in a multi-domain fill customer field instead.
* @param {integer=} params.maxResults - Maximum number of results to return. Default is 200
* @param {string=} params.pageToken - Token to specify next page in the list
* @param {string=} params.userKey - Email or immutable Id of the user if only those groups are to be listed, the given user is a member of. If Id, it should match with id of user object
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups',
method: 'GET'
},
params: params,
requiredParams: [],
pathParams: [],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.groups.patch
*
* @desc Update Group. This method supports patch semantics.
*
* @alias directory.groups.patch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group. If Id, it should match with id of group object
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}',
method: 'PATCH'
},
params: params,
requiredParams: ['groupKey'],
pathParams: ['groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.groups.update
*
* @desc Update Group
*
* @alias directory.groups.update
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group. If Id, it should match with id of group object
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}',
method: 'PUT'
},
params: params,
requiredParams: ['groupKey'],
pathParams: ['groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
aliases: {
/**
* directory.groups.aliases.delete
*
* @desc Remove a alias for the group
*
* @alias directory.groups.aliases.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.alias - The alias to be removed
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/aliases/{alias}',
method: 'DELETE'
},
params: params,
requiredParams: ['groupKey', 'alias'],
pathParams: ['alias', 'groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.groups.aliases.insert
*
* @desc Add a alias for the group
*
* @alias directory.groups.aliases.insert
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/aliases',
method: 'POST'
},
params: params,
requiredParams: ['groupKey'],
pathParams: ['groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.groups.aliases.list
*
* @desc List all aliases for a group
*
* @alias directory.groups.aliases.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/aliases',
method: 'GET'
},
params: params,
requiredParams: ['groupKey'],
pathParams: ['groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
}
}
};
this.members = {
/**
* directory.members.delete
*
* @desc Remove membership.
*
* @alias directory.members.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {string} params.memberKey - Email or immutable Id of the member
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/members/{memberKey}',
method: 'DELETE'
},
params: params,
requiredParams: ['groupKey', 'memberKey'],
pathParams: ['groupKey', 'memberKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.members.get
*
* @desc Retrieve Group Member
*
* @alias directory.members.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {string} params.memberKey - Email or immutable Id of the member
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/members/{memberKey}',
method: 'GET'
},
params: params,
requiredParams: ['groupKey', 'memberKey'],
pathParams: ['groupKey', 'memberKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.members.insert
*
* @desc Add user to the specified group.
*
* @alias directory.members.insert
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/members',
method: 'POST'
},
params: params,
requiredParams: ['groupKey'],
pathParams: ['groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.members.list
*
* @desc Retrieve all members in a group (paginated)
*
* @alias directory.members.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group
* @param {integer=} params.maxResults - Maximum number of results to return. Default is 200
* @param {string=} params.pageToken - Token to specify next page in the list
* @param {string=} params.roles - Comma separated role values to filter list results on.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/members',
method: 'GET'
},
params: params,
requiredParams: ['groupKey'],
pathParams: ['groupKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.members.patch
*
* @desc Update membership of a user in the specified group. This method supports patch semantics.
*
* @alias directory.members.patch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group. If Id, it should match with id of group object
* @param {string} params.memberKey - Email or immutable Id of the user. If Id, it should match with id of member object
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/members/{memberKey}',
method: 'PATCH'
},
params: params,
requiredParams: ['groupKey', 'memberKey'],
pathParams: ['groupKey', 'memberKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.members.update
*
* @desc Update membership of a user in the specified group.
*
* @alias directory.members.update
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.groupKey - Email or immutable Id of the group. If Id, it should match with id of group object
* @param {string} params.memberKey - Email or immutable Id of the user. If Id, it should match with id of member object
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/groups/{groupKey}/members/{memberKey}',
method: 'PUT'
},
params: params,
requiredParams: ['groupKey', 'memberKey'],
pathParams: ['groupKey', 'memberKey'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.mobiledevices = {
/**
* directory.mobiledevices.action
*
* @desc Take action on Mobile Device
*
* @alias directory.mobiledevices.action
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.resourceId - Immutable id of Mobile Device
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
action: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/devices/mobile/{resourceId}/action',
method: 'POST'
},
params: params,
requiredParams: ['customerId', 'resourceId'],
pathParams: ['customerId', 'resourceId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.mobiledevices.delete
*
* @desc Delete Mobile Device
*
* @alias directory.mobiledevices.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.resourceId - Immutable id of Mobile Device
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/devices/mobile/{resourceId}',
method: 'DELETE'
},
params: params,
requiredParams: ['customerId', 'resourceId'],
pathParams: ['customerId', 'resourceId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.mobiledevices.get
*
* @desc Retrieve Mobile Device
*
* @alias directory.mobiledevices.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string=} params.projection - Restrict information returned to a set of selected fields.
* @param {string} params.resourceId - Immutable id of Mobile Device
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/devices/mobile/{resourceId}',
method: 'GET'
},
params: params,
requiredParams: ['customerId', 'resourceId'],
pathParams: ['customerId', 'resourceId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.mobiledevices.list
*
* @desc Retrieve all Mobile Devices of a customer (paginated)
*
* @alias directory.mobiledevices.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {integer=} params.maxResults - Maximum number of results to return. Default is 100
* @param {string=} params.orderBy - Column to use for sorting results
* @param {string=} params.pageToken - Token to specify next page in the list
* @param {string=} params.projection - Restrict information returned to a set of selected fields.
* @param {string=} params.query - Search string in the format given at http://support.google.com/a/bin/answer.py?hl=en&answer=1408863#search
* @param {string=} params.sortOrder - Whether to return results in ascending or descending order. Only of use when orderBy is also used
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/devices/mobile',
method: 'GET'
},
params: params,
requiredParams: ['customerId'],
pathParams: ['customerId'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.notifications = {
/**
* directory.notifications.delete
*
* @desc Deletes a notification
*
* @alias directory.notifications.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customer - The unique ID for the customer's Google account. The customerId is also returned as part of the Users resource.
* @param {string} params.notificationId - The unique ID of the notification.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customer}/notifications/{notificationId}',
method: 'DELETE'
},
params: params,
requiredParams: ['customer', 'notificationId'],
pathParams: ['customer', 'notificationId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.notifications.get
*
* @desc Retrieves a notification.
*
* @alias directory.notifications.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customer - The unique ID for the customer's Google account. The customerId is also returned as part of the Users resource.
* @param {string} params.notificationId - The unique ID of the notification.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customer}/notifications/{notificationId}',
method: 'GET'
},
params: params,
requiredParams: ['customer', 'notificationId'],
pathParams: ['customer', 'notificationId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.notifications.list
*
* @desc Retrieves a list of notifications.
*
* @alias directory.notifications.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customer - The unique ID for the customer's Google account.
* @param {string=} params.language - The ISO 639-1 code of the language notifications are returned in. The default is English (en).
* @param {integer=} params.maxResults - Maximum number of notifications to return per page. The default is 100.
* @param {string=} params.pageToken - The token to specify the page of results to retrieve.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customer}/notifications',
method: 'GET'
},
params: params,
requiredParams: ['customer'],
pathParams: ['customer'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.notifications.patch
*
* @desc Updates a notification. This method supports patch semantics.
*
* @alias directory.notifications.patch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customer - The unique ID for the customer's Google account.
* @param {string} params.notificationId - The unique ID of the notification.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customer}/notifications/{notificationId}',
method: 'PATCH'
},
params: params,
requiredParams: ['customer', 'notificationId'],
pathParams: ['customer', 'notificationId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.notifications.update
*
* @desc Updates a notification.
*
* @alias directory.notifications.update
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customer - The unique ID for the customer's Google account.
* @param {string} params.notificationId - The unique ID of the notification.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customer}/notifications/{notificationId}',
method: 'PUT'
},
params: params,
requiredParams: ['customer', 'notificationId'],
pathParams: ['customer', 'notificationId'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.orgunits = {
/**
* directory.orgunits.delete
*
* @desc Remove Organization Unit
*
* @alias directory.orgunits.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.orgUnitPath - Full path of the organization unit
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/orgunits/{orgUnitPath}',
method: 'DELETE'
},
params: params,
requiredParams: ['customerId', 'orgUnitPath'],
pathParams: ['customerId', 'orgUnitPath'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.orgunits.get
*
* @desc Retrieve Organization Unit
*
* @alias directory.orgunits.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.orgUnitPath - Full path of the organization unit
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/orgunits/{orgUnitPath}',
method: 'GET'
},
params: params,
requiredParams: ['customerId', 'orgUnitPath'],
pathParams: ['customerId', 'orgUnitPath'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.orgunits.insert
*
* @desc Add Organization Unit
*
* @alias directory.orgunits.insert
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/orgunits',
method: 'POST'
},
params: params,
requiredParams: ['customerId'],
pathParams: ['customerId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.orgunits.list
*
* @desc Retrieve all Organization Units
*
* @alias directory.orgunits.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string=} params.orgUnitPath - the URL-encoded organization unit
* @param {string=} params.type - Whether to return all sub-organizations or just immediate children
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/orgunits',
method: 'GET'
},
params: params,
requiredParams: ['customerId'],
pathParams: ['customerId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.orgunits.patch
*
* @desc Update Organization Unit. This method supports patch semantics.
*
* @alias directory.orgunits.patch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.orgUnitPath - Full path of the organization unit
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/orgunits/{orgUnitPath}',
method: 'PATCH'
},
params: params,
requiredParams: ['customerId', 'orgUnitPath'],
pathParams: ['customerId', 'orgUnitPath'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.orgunits.update
*
* @desc Update Organization Unit
*
* @alias directory.orgunits.update
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.orgUnitPath - Full path of the organization unit
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/orgunits/{orgUnitPath}',
method: 'PUT'
},
params: params,
requiredParams: ['customerId', 'orgUnitPath'],
pathParams: ['customerId', 'orgUnitPath'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.schemas = {
/**
* directory.schemas.delete
*
* @desc Delete schema
*
* @alias directory.schemas.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.schemaKey - Name or immutable Id of the schema
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/schemas/{schemaKey}',
method: 'DELETE'
},
params: params,
requiredParams: ['customerId', 'schemaKey'],
pathParams: ['customerId', 'schemaKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.schemas.get
*
* @desc Retrieve schema
*
* @alias directory.schemas.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.schemaKey - Name or immutable Id of the schema
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/schemas/{schemaKey}',
method: 'GET'
},
params: params,
requiredParams: ['customerId', 'schemaKey'],
pathParams: ['customerId', 'schemaKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.schemas.insert
*
* @desc Create schema.
*
* @alias directory.schemas.insert
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/schemas',
method: 'POST'
},
params: params,
requiredParams: ['customerId'],
pathParams: ['customerId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.schemas.list
*
* @desc Retrieve all schemas for a customer
*
* @alias directory.schemas.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/schemas',
method: 'GET'
},
params: params,
requiredParams: ['customerId'],
pathParams: ['customerId'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.schemas.patch
*
* @desc Update schema. This method supports patch semantics.
*
* @alias directory.schemas.patch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.schemaKey - Name or immutable Id of the schema.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/schemas/{schemaKey}',
method: 'PATCH'
},
params: params,
requiredParams: ['customerId', 'schemaKey'],
pathParams: ['customerId', 'schemaKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.schemas.update
*
* @desc Update schema
*
* @alias directory.schemas.update
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.customerId - Immutable id of the Google Apps account
* @param {string} params.schemaKey - Name or immutable Id of the schema.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/customer/{customerId}/schemas/{schemaKey}',
method: 'PUT'
},
params: params,
requiredParams: ['customerId', 'schemaKey'],
pathParams: ['customerId', 'schemaKey'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.tokens = {
/**
* directory.tokens.delete
*
* @desc Delete all access tokens issued by a user for an application.
*
* @alias directory.tokens.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.clientId - The Client ID of the application the token is issued to.
* @param {string} params.userKey - Identifies the user in the API request. The value can be the user's primary email address, alias email address, or unique user ID.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/tokens/{clientId}',
method: 'DELETE'
},
params: params,
requiredParams: ['userKey', 'clientId'],
pathParams: ['clientId', 'userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.tokens.get
*
* @desc Get information about an access token issued by a user.
*
* @alias directory.tokens.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.clientId - The Client ID of the application the token is issued to.
* @param {string} params.userKey - Identifies the user in the API request. The value can be the user's primary email address, alias email address, or unique user ID.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/tokens/{clientId}',
method: 'GET'
},
params: params,
requiredParams: ['userKey', 'clientId'],
pathParams: ['clientId', 'userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.tokens.list
*
* @desc Returns the set of tokens specified user has issued to 3rd party applications.
*
* @alias directory.tokens.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Identifies the user in the API request. The value can be the user's primary email address, alias email address, or unique user ID.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/tokens',
method: 'GET'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
this.users = {
/**
* directory.users.delete
*
* @desc Delete user
*
* @alias directory.users.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}',
method: 'DELETE'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.get
*
* @desc retrieve user
*
* @alias directory.users.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string=} params.customFieldMask - Comma-separated list of schema names. All fields from these schemas are fetched. This should only be set when projection=custom.
* @param {string=} params.projection - What subset of fields to fetch for this user.
* @param {string} params.userKey - Email or immutable Id of the user
* @param {string=} params.viewType - Whether to fetch the ADMIN_VIEW or DOMAIN_PUBLIC view of the user.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}',
method: 'GET'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.insert
*
* @desc create user.
*
* @alias directory.users.insert
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users',
method: 'POST'
},
params: params,
requiredParams: [],
pathParams: [],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.list
*
* @desc Retrieve either deleted users or all users in a domain (paginated)
*
* @alias directory.users.list
* @memberOf! admin(directory_v1)
*
* @param {object=} params - Parameters for request
* @param {string=} params.customFieldMask - Comma-separated list of schema names. All fields from these schemas are fetched. This should only be set when projection=custom.
* @param {string=} params.customer - Immutable id of the Google Apps account. In case of multi-domain, to fetch all users for a customer, fill this field instead of domain.
* @param {string=} params.domain - Name of the domain. Fill this field to get users from only this domain. To return all users in a multi-domain fill customer field instead.
* @param {string=} params.event - Event on which subscription is intended (if subscribing)
* @param {integer=} params.maxResults - Maximum number of results to return. Default is 100. Max allowed is 500
* @param {string=} params.orderBy - Column to use for sorting results
* @param {string=} params.pageToken - Token to specify next page in the list
* @param {string=} params.projection - What subset of fields to fetch for this user.
* @param {string=} params.query - Query string search. Should be of the form "". Complete documentation is at https://developers.google.com/admin-sdk/directory/v1/guides/search-users
* @param {string=} params.showDeleted - If set to true retrieves the list of deleted users. Default is false
* @param {string=} params.sortOrder - Whether to return results in ascending or descending order.
* @param {string=} params.viewType - Whether to fetch the ADMIN_VIEW or DOMAIN_PUBLIC view of the user.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users',
method: 'GET'
},
params: params,
requiredParams: [],
pathParams: [],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.makeAdmin
*
* @desc change admin status of a user
*
* @alias directory.users.makeAdmin
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user as admin
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
makeAdmin: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/makeAdmin',
method: 'POST'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.patch
*
* @desc update user. This method supports patch semantics.
*
* @alias directory.users.patch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user. If Id, it should match with id of user object
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}',
method: 'PATCH'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.undelete
*
* @desc Undelete a deleted user
*
* @alias directory.users.undelete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - The immutable id of the user
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
undelete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/undelete',
method: 'POST'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.update
*
* @desc update user
*
* @alias directory.users.update
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user. If Id, it should match with id of user object
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}',
method: 'PUT'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.watch
*
* @desc Watch for changes in users list
*
* @alias directory.users.watch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string=} params.customFieldMask - Comma-separated list of schema names. All fields from these schemas are fetched. This should only be set when projection=custom.
* @param {string=} params.customer - Immutable id of the Google Apps account. In case of multi-domain, to fetch all users for a customer, fill this field instead of domain.
* @param {string=} params.domain - Name of the domain. Fill this field to get users from only this domain. To return all users in a multi-domain fill customer field instead.
* @param {string=} params.event - Event on which subscription is intended (if subscribing)
* @param {integer=} params.maxResults - Maximum number of results to return. Default is 100. Max allowed is 500
* @param {string=} params.orderBy - Column to use for sorting results
* @param {string=} params.pageToken - Token to specify next page in the list
* @param {string=} params.projection - What subset of fields to fetch for this user.
* @param {string=} params.query - Query string search. Should be of the form "". Complete documentation is at https://developers.google.com/admin-sdk/directory/v1/guides/search-users
* @param {string=} params.showDeleted - If set to true retrieves the list of deleted users. Default is false
* @param {string=} params.sortOrder - Whether to return results in ascending or descending order.
* @param {string=} params.viewType - Whether to fetch the ADMIN_VIEW or DOMAIN_PUBLIC view of the user.
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
watch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/watch',
method: 'POST'
},
params: params,
requiredParams: [],
pathParams: [],
context: self
};
return createAPIRequest(parameters, callback);
},
aliases: {
/**
* directory.users.aliases.delete
*
* @desc Remove a alias for the user
*
* @alias directory.users.aliases.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.alias - The alias to be removed
* @param {string} params.userKey - Email or immutable Id of the user
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/aliases/{alias}',
method: 'DELETE'
},
params: params,
requiredParams: ['userKey', 'alias'],
pathParams: ['alias', 'userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.aliases.insert
*
* @desc Add a alias for the user
*
* @alias directory.users.aliases.insert
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
insert: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/aliases',
method: 'POST'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.aliases.list
*
* @desc List all aliases for a user
*
* @alias directory.users.aliases.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string=} params.event - Event on which subscription is intended (if subscribing)
* @param {string} params.userKey - Email or immutable Id of the user
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/aliases',
method: 'GET'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.aliases.watch
*
* @desc Watch for changes in user aliases list
*
* @alias directory.users.aliases.watch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string=} params.event - Event on which subscription is intended (if subscribing)
* @param {string} params.userKey - Email or immutable Id of the user
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
watch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/aliases/watch',
method: 'POST'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
}
},
photos: {
/**
* directory.users.photos.delete
*
* @desc Remove photos for the user
*
* @alias directory.users.photos.delete
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
delete: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/photos/thumbnail',
method: 'DELETE'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.photos.get
*
* @desc Retrieve photo of a user
*
* @alias directory.users.photos.get
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
get: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/photos/thumbnail',
method: 'GET'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.photos.patch
*
* @desc Add a photo for the user. This method supports patch semantics.
*
* @alias directory.users.photos.patch
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
patch: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/photos/thumbnail',
method: 'PATCH'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.users.photos.update
*
* @desc Add a photo for the user
*
* @alias directory.users.photos.update
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user
* @param {object} params.resource - Request body data
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
update: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/photos/thumbnail',
method: 'PUT'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
}
}
};
this.verificationCodes = {
/**
* directory.verificationCodes.generate
*
* @desc Generate new backup verification codes for the user.
*
* @alias directory.verificationCodes.generate
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
generate: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/verificationCodes/generate',
method: 'POST'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.verificationCodes.invalidate
*
* @desc Invalidate the current backup verification codes for the user.
*
* @alias directory.verificationCodes.invalidate
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Email or immutable Id of the user
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
invalidate: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/verificationCodes/invalidate',
method: 'POST'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
},
/**
* directory.verificationCodes.list
*
* @desc Returns the current set of valid backup verification codes for the specified user.
*
* @alias directory.verificationCodes.list
* @memberOf! admin(directory_v1)
*
* @param {object} params - Parameters for request
* @param {string} params.userKey - Identifies the user in the API request. The value can be the user's primary email address, alias email address, or unique user ID.
* @param {callback} callback - The callback that handles the response.
* @return {object} Request object
*/
list: function(params, callback) {
var parameters = {
options: {
url: 'https://www.googleapis.com/admin/directory/v1/users/{userKey}/verificationCodes',
method: 'GET'
},
params: params,
requiredParams: ['userKey'],
pathParams: ['userKey'],
context: self
};
return createAPIRequest(parameters, callback);
}
};
}
/**
* Exports Admin object
* @type Admin
*/
module.exports = Admin;<|fim▁end|>
|
/**
* directory.chromeosdevices.update
*
* @desc Update Chrome OS Device
|
<|file_name|>consumers.py<|end_file_name|><|fim▁begin|>import logging
import requests
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives<|fim▁hole|>
logger = logging.getLogger('email')
sentry = logging.getLogger('sentry')
def send_invite(message):
try:
invite = Invitation.objects.get(
id=message.get('id'),
status__in=[Invitation.PENDING, Invitation.ERROR],
)
except Invitation.DoesNotExist:
sentry.error("Invitation to send not found", exc_info=True, extra={'message': message})
return
invite.status = Invitation.PROCESSING
invite.save()
context = {
'invite': invite,
'domain': Site.objects.get_current().domain,
}
subject = "[ContactOtter] Invitation to join ContactOtter from %s" % (invite.sender)
if invite.book:
subject = "[ContactOtter] Invitation to share %s's contact book" % (invite.sender)
txt = get_template('email/invitation.txt').render(context)
html = get_template('email/invitation.html').render(context)
try:
message = EmailMultiAlternatives(
subject=subject,
body=txt,
from_email="ContactOtter <[email protected]>",
to=[invite.email,],
)
message.attach_alternative(html, "text/html")
message.send()
invite.status = Invitation.SENT
invite.sent = timezone.now()
invite.save()
except:
sentry.exception('Problem sending invite', exc_info=True, extra={'invite_id': invite.id})
invite.status = Invitation.ERROR
invite.save()<|fim▁end|>
|
from django.template.loader import get_template
from django.utils import timezone
from invitations.models import Invitation
|
<|file_name|>ds.py<|end_file_name|><|fim▁begin|># The MIT License (MIT)
# Copyright (c) 2016, 2017 by the ESA CCI Toolbox development team and contributors
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Description
===========
This module provides Cate's data access API.
Technical Requirements
======================
**Query data store**
:Description: Allow querying registered ECV data stores using a simple function that takes a
set of query parameters and returns data source identifiers that can be used to open
datasets in Cate.
:URD-Source:
* CCIT-UR-DM0006: Data access to ESA CCI
* CCIT-UR-DM0010: The data module shall have the means to attain meta-level status information
per ECV type
* CCIT-UR-DM0013: The CCI Toolbox shall allow filtering
----
**Add data store**
:Description: Allow adding of user defined data stores specifying the access protocol and the
layout of the data.
These data stores can be used to access datasets.
:URD-Source:
* CCIT-UR-DM0011: Data access to non-CCI data
----
**Open dataset**
:Description: Allow opening an ECV dataset given an identifier returned by the *data store query*.
The dataset returned complies to the Cate common data model.
The dataset to be returned can optionally be constrained in time and space.
:URD-Source:
* CCIT-UR-DM0001: Data access and input
* CCIT-UR-DM0004: Open multiple inputs
* CCIT-UR-DM0005: Data access using different protocols>
* CCIT-UR-DM0007: Open single ECV
* CCIT-UR-DM0008: Open multiple ECV
* CCIT-UR-DM0009: Open any ECV
* CCIT-UR-DM0012: Open different formats
Verification
============
The module's unit-tests are located in
`test/test_ds.py <https://github.com/CCI-Tools/cate/blob/master/test/test_ds.py>`_
and may be executed using ``$ py.test test/test_ds.py --cov=cate/core/ds.py`` for extra code
coverage information.
Components
==========
"""
import datetime
import glob
import logging
import re
from typing import Sequence, Optional, Union, Any, Dict, Set, Tuple
import geopandas as gpd
import xarray as xr
import xcube.core.store as xcube_store
from xcube.core.select import select_subset
from xcube.core.store import DATASET_TYPE
from xcube.core.store import MutableDataStore
from xcube.util.progress import ProgressObserver
from xcube.util.progress import ProgressState
from xcube.util.progress import add_progress_observers
from .cdm import get_lon_dim_name, get_lat_dim_name
from .types import PolygonLike, TimeRangeLike, VarNamesLike, ValidationError
from ..util.monitor import ChildMonitor
from ..util.monitor import Monitor
_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%S"
__author__ = "Chris Bernat (Telespazio VEGA UK Ltd), ", \
"Tonio Fincke (Brockmann Consult GmbH), " \
"Norman Fomferra (Brockmann Consult GmbH), " \
"Marco Zühlke (Brockmann Consult GmbH)"
URL_REGEX = re.compile(
r'^(?:http|ftp)s?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' # domain
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
_LOG = logging.getLogger('cate')
DATA_STORE_POOL = xcube_store.DataStorePool()
class DataAccessWarning(UserWarning):
"""
Warnings produced by Cate's data stores and data sources instances,
used to report any problems handling data.
"""
pass
class DataAccessError(Exception):
"""
Exceptions produced by Cate's data stores and data sources instances,
used to report any problems handling data.
"""
class NetworkError(ConnectionError):
"""
Exceptions produced by Cate's data stores and data sources instances,
used to report any problems with the network or in case an endpoint
couldn't be found nor reached.
"""
pass
class DataStoreNotice:
"""
A short notice that can be exposed to users by data stores.
"""
def __init__(self, id: str, title: str, content: str, intent: str = None, icon: str = None):
"""
A short notice that can be exposed to users by data stores.
:param id: Notice ID.
:param title: A human-readable, plain text title.
:param content: A human-readable, plain text title that may be formatted using Markdown.
:param intent: Notice intent,
may be one of "default", "primary", "success", "warning", "danger"
:param icon: An option icon name. See https://blueprintjs.com/docs/versions/1/#core/icons
"""
if id is None or id == "":
raise ValueError("invalid id")
if title is None or title == "":
raise ValueError("invalid title")
if content is None or content == "":
raise ValueError("invalid content")
if intent not in {None, "default", "primary", "success", "warning", "danger"}:
raise ValueError("invalid intent")
self._dict = dict(id=id, title=title, content=content, icon=icon, intent=intent)
@property
def id(self):
return self._dict["id"]
@property
def title(self):
return self._dict["title"]
@property
def content(self):
return self._dict["content"]
@property
def intent(self):
return self._dict["intent"]
@property
def icon(self):
return self._dict["icon"]
def to_dict(self):
return dict(self._dict)
class XcubeProgressObserver(ProgressObserver):
def __init__(self, monitor: Monitor):
self._monitor = monitor
self._latest_completed_work = 0.0
def on_begin(self, state_stack: Sequence[ProgressState]):
if len(state_stack) == 1:
self._monitor.start(state_stack[0].label, state_stack[0].total_work)
def on_update(self, state_stack: Sequence[ProgressState]):
if state_stack[0].completed_work > self._latest_completed_work:
self._monitor.progress(state_stack[0].completed_work
- self._latest_completed_work,
state_stack[-1].label)
self._latest_completed_work = state_stack[0].completed_work
def on_end(self, state_stack: Sequence[ProgressState]):
if len(state_stack) == 1:
self._monitor.done()
INFO_FIELD_NAMES = sorted(["abstract",
"bbox_minx",
"bbox_miny",
"bbox_maxx",
"bbox_maxy",
"catalog_url",
"catalogue_url",
"cci_project",
"creation_date",
"data_type",
"data_types",
"ecv",
"file_format",
"file_formats",
"info_url",
"institute",
"institutes",
"licences",
"platform_id",
"platform_ids",
"processing_level",
"processing_levels",
"product_string",
"product_strings",
"product_version",
"product_versions",
"publication_date",
"sensor_id",
"sensor_ids",
"temporal_coverage_end",
"temporal_coverage_start",
"time_frequencies",
"time_frequency",
"title",
"uuid"])
def get_metadata_from_descriptor(descriptor: xcube_store.DataDescriptor) -> Dict:
metadata = dict(data_id=descriptor.data_id,
type_specifier=str(descriptor.data_type))
if descriptor.crs:
metadata['crs'] = descriptor.crs
if descriptor.bbox:
metadata['bbox'] = descriptor.bbox
if hasattr(descriptor, 'spatial_res'):
metadata['spatial_res'] = descriptor.spatial_res
if descriptor.time_range:
metadata['time_range'] = descriptor.time_range
if descriptor.time_period:
metadata['time_period'] = descriptor.time_period
if hasattr(descriptor, 'attrs') \
and isinstance(getattr(descriptor, 'attrs'), dict):
for name in INFO_FIELD_NAMES:
value = descriptor.attrs.get(name, None)
# Many values are one-element lists: turn them into scalars
if isinstance(value, list) and len(value) == 1:
value = value[0]
if value is not None:
metadata[name] = value
for vars_key in ('data_vars', 'coords'):
if hasattr(descriptor, vars_key) \
and isinstance(getattr(descriptor, vars_key), dict):
metadata[vars_key] = []
var_attrs = ['units', 'long_name', 'standard_name']
for var_name, var_descriptor in getattr(descriptor, vars_key).items():
var_dict = dict(name=var_name,
dtype=var_descriptor.dtype,
dims=var_descriptor.dims)
if var_descriptor.chunks is not None:
var_dict['chunks'] = var_descriptor.chunks
if var_descriptor.attrs:
for var_attr in var_attrs:
if var_attr in var_descriptor.attrs:
var_dict[var_attr] = var_descriptor.attrs.get(var_attr)
metadata[vars_key].append(var_dict)
return metadata
def get_info_string_from_data_descriptor(descriptor: xcube_store.DataDescriptor) -> str:
meta_info = get_metadata_from_descriptor(descriptor)
max_len = 0
for name in meta_info.keys():
max_len = max(max_len, len(name))
info_lines = []
for name, value in meta_info.items():
if name not in ('data_vars', 'coords'):
info_lines.append('%s:%s %s' % (name,
(1 + max_len - len(name)) * ' ',
value))
return '\n'.join(info_lines)
def find_data_store(ds_id: str) -> Tuple[Optional[str], Optional[xcube_store.DataStore]]:
"""
Find the data store that includes the given *ds_id*.
This will raise an exception if the *ds_id* is given in more than one data store.
:param ds_id: A data source identifier.
:return: All data sources matching the given constrains.
"""
results = []
for store_instance_id in DATA_STORE_POOL.store_instance_ids:
data_store = DATA_STORE_POOL.get_store(store_instance_id)
if data_store.has_data(ds_id):
results.append((store_instance_id, data_store))
if len(results) > 1:
raise ValidationError(f'{len(results)} data sources found for the given ID {ds_id!r}')
if len(results) == 1:
return results[0]
return None, None
def get_data_store_notices(datastore_id: str) -> Sequence[dict]:
store_id = DATA_STORE_POOL.get_store_config(datastore_id).store_id
def name_is(extension):
return store_id == extension.name
extensions = xcube_store.find_data_store_extensions(predicate=name_is)
if len(extensions) == 0:
_LOG.warning(f'Found no extension for data store {datastore_id}')
return []
return extensions[0].metadata.get('data_store_notices', [])
def get_data_descriptor(ds_id: str) -> Optional[xcube_store.DataDescriptor]:
data_store_id, data_store = find_data_store(ds_id)
if data_store:
return data_store.describe_data(ds_id)
def open_dataset(dataset_id: str,
time_range: TimeRangeLike.TYPE = None,
region: PolygonLike.TYPE = None,
var_names: VarNamesLike.TYPE = None,
data_store_id: str = None,
force_local: bool = False,
local_ds_id: str = None,
monitor: Monitor = Monitor.NONE) -> Tuple[Any, str]:
"""
Open a dataset from a data source.
:param dataset_id: The identifier of an ECV dataset. Must not be empty.
:param time_range: An optional time constraint comprising start and end date.
If given, it must be a :py:class:`TimeRangeLike`.
:param region: An optional region constraint.
If given, it must be a :py:class:`PolygonLike`.
:param var_names: Optional names of variables to be included.
If given, it must be a :py:class:`VarNamesLike`.
:param data_store_id: Optional data store identifier. If given, *ds_id* will only be
looked up from the specified data store.
:param force_local: Optional flag for remote data sources only
Whether to make a local copy of data source if it's not present
:param local_ds_id: Optional ID for newly created copy of remote data
:param monitor: A progress monitor
:return: A tuple consisting of a new dataset instance and its id
"""
if not dataset_id:
raise ValidationError('No data source given')
if data_store_id:
data_store = DATA_STORE_POOL.get_store(data_store_id)
else:
data_store_id, data_store = find_data_store(ds_id=dataset_id)
if not data_store:
raise ValidationError(f"No data store found that contains the ID '{dataset_id}'")
data_type = None
potential_data_types = data_store.get_data_types_for_data(dataset_id)
for potential_data_type in potential_data_types:
if DATASET_TYPE.is_super_type_of(potential_data_type):
data_type = potential_data_type
break
if data_type is None:
raise ValidationError(f"Could not open '{dataset_id}' as dataset.")
openers = data_store.get_data_opener_ids(dataset_id, data_type)<|fim▁hole|> open_work = 10
cache_work = 10 if force_local else 0
subset_work = 0
open_schema = data_store.get_open_data_params_schema(dataset_id, opener_id)
open_args = {}
subset_args = {}
if var_names:
var_names_list = VarNamesLike.convert(var_names)
if 'variable_names' in open_schema.properties:
open_args['variable_names'] = var_names_list
elif 'drop_variables' in open_schema.properties:
data_desc = data_store.describe_data(dataset_id, data_type)
if hasattr(data_desc, 'data_vars') \
and isinstance(getattr(data_desc, 'data_vars'), dict):
open_args['drop_variables'] = [var_name
for var_name in data_desc.data_vars.keys()
if var_name not in var_names_list]
else:
subset_args['var_names'] = var_names_list
subset_work += 1
if time_range:
time_range = TimeRangeLike.convert(time_range)
time_range = [datetime.datetime.strftime(time_range[0], '%Y-%m-%d'),
datetime.datetime.strftime(time_range[1], '%Y-%m-%d')]
if 'time_range' in open_schema.properties:
open_args['time_range'] = time_range
else:
subset_args['time_range'] = time_range
subset_work += 1
if region:
bbox = list(PolygonLike.convert(region).bounds)
if 'bbox' in open_schema.properties:
open_args['bbox'] = bbox
else:
subset_args['bbox'] = bbox
subset_work += 1
with monitor.starting('Open dataset', open_work + subset_work + cache_work):
with add_progress_observers(XcubeProgressObserver(ChildMonitor(monitor, open_work))):
dataset = data_store.open_data(data_id=dataset_id, opener_id=opener_id, **open_args)
dataset = select_subset(dataset, **subset_args)
monitor.progress(subset_work)
if force_local:
with add_progress_observers(XcubeProgressObserver(ChildMonitor(monitor, cache_work))):
dataset, dataset_id = make_local(data=dataset,
local_name=local_ds_id,
orig_dataset_name=dataset_id)
return dataset, dataset_id
def make_local(data: Any,
*,
local_name: Optional[str] = None,
orig_dataset_name: Optional[str] = None) -> Tuple[Any, str]:
local_data_store_id = 'local'
local_store = DATA_STORE_POOL.get_store(local_data_store_id)
if local_store is None:
raise ValueError(f'Cannot find data store {local_data_store_id!r}')
if not isinstance(local_store, MutableDataStore):
raise ValueError(f'Data store {local_data_store_id!r} is not writable')
if isinstance(data, xr.Dataset):
extension = '.zarr'
elif isinstance(data, gpd.GeoDataFrame):
extension = '.geojson'
else:
raise DataAccessError(f'Unsupported data type {type(data)}')
if local_name is not None and not local_name.endswith(extension):
local_name = local_name + extension
if not local_name and orig_dataset_name is not None:
i = 1
local_name = f'local.{orig_dataset_name}.{i}{extension}'
while local_store.has_data(local_name):
i += 1
local_name = f'local.{orig_dataset_name}.{i}{extension}'
local_data_id = local_store.write_data(data=data, data_id=local_name)
return local_store.open_data(data_id=local_data_id), local_data_id
def add_as_local(data_source_id: str, paths: Union[str, Sequence[str]] = None) -> Tuple[Any, str]:
paths = _resolve_input_paths(paths)
if not paths:
raise ValueError("No paths found")
# todo also support geodataframes
if len(paths) == 1:
ds = xr.open_dataset(paths[0])
else:
ds = xr.open_mfdataset(paths=paths)
return make_local(ds, local_name=data_source_id)
def _resolve_input_paths(paths: Union[str, Sequence[str]]) -> Sequence[str]:
# very similar code is used in nc2zarr
resolved_input_files = []
if isinstance(paths, str):
resolved_input_files.extend(glob.glob(paths, recursive=True))
elif paths is not None and len(paths):
for file in paths:
resolved_input_files.extend(glob.glob(file, recursive=True))
# Get rid of doubles, but preserve order
seen_input_files = set()
unique_input_files = []
for input_file in resolved_input_files:
if input_file not in seen_input_files:
unique_input_files.append(input_file)
seen_input_files.add(input_file)
return unique_input_files
def get_spatial_ext_chunk_sizes(ds_or_path: Union[xr.Dataset, str]) -> Dict[str, int]:
"""
Get the spatial, external chunk sizes for the latitude and longitude dimensions
of a dataset as provided in a variable's encoding object.
:param ds_or_path: An xarray dataset or a path to file that can be opened by xarray.
:return: A mapping from dimension name to external chunk sizes.
"""
if isinstance(ds_or_path, str):
ds = xr.open_dataset(ds_or_path, decode_times=False)
else:
ds = ds_or_path
lon_name = get_lon_dim_name(ds)
lat_name = get_lat_dim_name(ds)
if lon_name and lat_name:
chunk_sizes = get_ext_chunk_sizes(ds, {lat_name, lon_name})
else:
chunk_sizes = None
if isinstance(ds_or_path, str):
ds.close()
return chunk_sizes
def get_ext_chunk_sizes(ds: xr.Dataset, dim_names: Set[str] = None,
init_value=0, map_fn=max, reduce_fn=None) -> Dict[str, int]:
"""
Get the external chunk sizes for each dimension of a dataset as provided in a variable's encoding object.
:param ds: The dataset.
:param dim_names: The names of dimensions of data variables whose external chunking should be collected.
:param init_value: The initial value (not necessarily a chunk size) for mapping multiple different chunk sizes.
:param map_fn: The mapper function that maps a chunk size from a previous (initial) value.
:param reduce_fn: The reducer function the reduces multiple mapped chunk sizes to a single one.
:return: A mapping from dimension name to external chunk sizes.
"""
agg_chunk_sizes = None
for var_name in ds.variables:
var = ds[var_name]
if var.encoding:
chunk_sizes = var.encoding.get('chunksizes')
if chunk_sizes \
and len(chunk_sizes) == len(var.dims) \
and (not dim_names or dim_names.issubset(set(var.dims))):
for dim_name, size in zip(var.dims, chunk_sizes):
if not dim_names or dim_name in dim_names:
if agg_chunk_sizes is None:
agg_chunk_sizes = dict()
old_value = agg_chunk_sizes.get(dim_name)
agg_chunk_sizes[dim_name] = map_fn(size, init_value if old_value is None else old_value)
if agg_chunk_sizes and reduce_fn:
agg_chunk_sizes = {k: reduce_fn(v) for k, v in agg_chunk_sizes.items()}
return agg_chunk_sizes
def format_variables_info_string(descriptor: xcube_store.DataDescriptor):
"""
Return some textual information about the variables described by this DataDescriptor.
Useful for CLI / REPL applications.
:param descriptor: data descriptor
:return: a string describing the variables in the dataset
"""
meta_info = get_metadata_from_descriptor(descriptor)
variables = meta_info.get('data_vars', [])
if len(variables) == 0:
return 'No variables information available.'
info_lines = []
for variable in variables:
info_lines.append('%s (%s):' % (variable.get('name', '?'), variable.get('units', '-')))
info_lines.append(' Long name: %s' % variable.get('long_name', '?'))
info_lines.append(' CF standard name: %s' % variable.get('standard_name', '?'))
info_lines.append('')
return '\n'.join(info_lines)
def format_cached_datasets_coverage_string(cache_coverage: dict) -> str:
"""
Return a textual representation of information about cached, locally available data sets.
Useful for CLI / REPL applications.
:param cache_coverage:
:return:
"""
if not cache_coverage:
return 'No information about cached datasets available.'
info_lines = []
for date_from, date_to in sorted(cache_coverage.items()):
info_lines.append('{date_from} to {date_to}'
.format(date_from=date_from.strftime('%Y-%m-%d'),
date_to=date_to.strftime('%Y-%m-%d')))
return '\n'.join(info_lines)<|fim▁end|>
|
if len(openers) == 0:
raise DataAccessError(f'Could not find an opener for "{dataset_id}".')
opener_id = openers[0]
|
<|file_name|>core.js<|end_file_name|><|fim▁begin|>/**
* The main AWS namespace
*/
var AWS = { util: require('./util') };
/**
* @api private
* @!macro [new] nobrowser
* @note This feature is not supported in the browser environment of the SDK.
*/
var _hidden = {}; _hidden.toString(); // hack to parse macro
module.exports = AWS;
AWS.util.update(AWS, {
/**
* @constant
*/
VERSION: '2.2.9',
/**
* @api private
*/
Signers: {},
/**
* @api private
*/
Protocol: {
Json: require('./protocol/json'),
Query: require('./protocol/query'),
Rest: require('./protocol/rest'),
RestJson: require('./protocol/rest_json'),
RestXml: require('./protocol/rest_xml')
},
/**
* @api private
*/
XML: {
Builder: require('./xml/builder'),
Parser: null // conditionally set based on environment
},
/**
* @api private
*/
JSON: {
Builder: require('./json/builder'),
Parser: require('./json/parser')
},
/**
* @api private
*/
Model: {
Api: require('./model/api'),
Operation: require('./model/operation'),
Shape: require('./model/shape'),
Paginator: require('./model/paginator'),
ResourceWaiter: require('./model/resource_waiter')
},
util: require('./util'),
/**
* @api private
*/
apiLoader: function() { throw new Error('No API loader set'); }
});
require('./service');
require('./credentials');<|fim▁hole|>require('./credentials/saml_credentials');
require('./config');
require('./http');
require('./sequential_executor');
require('./event_listeners');
require('./request');
require('./response');
require('./resource_waiter');
require('./signers/request_signer');
require('./param_validator');
/**
* @readonly
* @return [AWS.SequentialExecutor] a collection of global event listeners that
* are attached to every sent request.
* @see AWS.Request AWS.Request for a list of events to listen for
* @example Logging the time taken to send a request
* AWS.events.on('send', function startSend(resp) {
* resp.startTime = new Date().getTime();
* }).on('complete', function calculateTime(resp) {
* var time = (new Date().getTime() - resp.startTime) / 1000;
* console.log('Request took ' + time + ' seconds');
* });
*
* new AWS.S3().listBuckets(); // prints 'Request took 0.285 seconds'
*/
AWS.events = new AWS.SequentialExecutor();<|fim▁end|>
|
require('./credentials/credential_provider_chain');
require('./credentials/temporary_credentials');
require('./credentials/web_identity_credentials');
require('./credentials/cognito_identity_credentials');
|
<|file_name|>TL.Point.js<|end_file_name|><|fim▁begin|>/* TL.Point
Inspired by Leaflet
TL.Point represents a point with x and y coordinates.
================================================== */
TL.Point = function (/*Number*/ x, /*Number*/ y, /*Boolean*/ round) {
this.x = (round ? Math.round(x) : x);
this.y = (round ? Math.round(y) : y);
};
TL.Point.prototype = {
add: function (point) {
return this.clone()._add(point);
},
_add: function (point) {
this.x += point.x;
this.y += point.y;
return this;
},
subtract: function (point) {
return this.clone()._subtract(point);
},
// destructive subtract (faster)
_subtract: function (point) {
this.x -= point.x;
this.y -= point.y;
return this;
},
divideBy: function (num, round) {
return new TL.Point(this.x / num, this.y / num, round);
},
<|fim▁hole|>
distanceTo: function (point) {
var x = point.x - this.x,
y = point.y - this.y;
return Math.sqrt(x * x + y * y);
},
round: function () {
return this.clone()._round();
},
// destructive round
_round: function () {
this.x = Math.round(this.x);
this.y = Math.round(this.y);
return this;
},
clone: function () {
return new TL.Point(this.x, this.y);
},
toString: function () {
return 'Point(' +
TL.Util.formatNum(this.x) + ', ' +
TL.Util.formatNum(this.y) + ')';
}
};<|fim▁end|>
|
multiplyBy: function (num) {
return new TL.Point(this.x * num, this.y * num);
},
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
from .error_detail import ErrorDetail
from .error_response import ErrorResponse
from .error_response_wrapper import ErrorResponseWrapper, ErrorResponseWrapperException
from .storage_account_properties import StorageAccountProperties
from .container_registry_properties import ContainerRegistryProperties
from .service_principal_properties import ServicePrincipalProperties
from .kubernetes_cluster_properties import KubernetesClusterProperties
from .system_service import SystemService
from .acs_cluster_properties import AcsClusterProperties
from .app_insights_properties import AppInsightsProperties
from .ssl_configuration import SslConfiguration
from .service_auth_configuration import ServiceAuthConfiguration
from .auto_scale_configuration import AutoScaleConfiguration
from .global_service_configuration import GlobalServiceConfiguration
from .operationalization_cluster import OperationalizationCluster
from .operationalization_cluster_update_parameters import OperationalizationClusterUpdateParameters
from .storage_account_credentials import StorageAccountCredentials
from .container_registry_credentials import ContainerRegistryCredentials
from .container_service_credentials import ContainerServiceCredentials
from .app_insights_credentials import AppInsightsCredentials
from .operationalization_cluster_credentials import OperationalizationClusterCredentials
from .check_system_services_updates_available_response import CheckSystemServicesUpdatesAvailableResponse
from .update_system_services_response import UpdateSystemServicesResponse
from .resource_operation_display import ResourceOperationDisplay
from .resource_operation import ResourceOperation
from .available_operations import AvailableOperations
from .operationalization_cluster_paged import OperationalizationClusterPaged
from .machine_learning_compute_management_client_enums import (
OperationStatus,
ClusterType,
OrchestratorType,
SystemServiceType,
AgentVMSizeTypes,
Status,
UpdatesAvailable,
)
__all__ = [
'Resource',
'ErrorDetail',
'ErrorResponse',
'ErrorResponseWrapper', 'ErrorResponseWrapperException',
'StorageAccountProperties',
'ContainerRegistryProperties',
'ServicePrincipalProperties',
'KubernetesClusterProperties',
'SystemService',
'AcsClusterProperties',
'AppInsightsProperties',
'SslConfiguration',
'ServiceAuthConfiguration',
'AutoScaleConfiguration',
'GlobalServiceConfiguration',
'OperationalizationCluster',
'OperationalizationClusterUpdateParameters',
'StorageAccountCredentials',
'ContainerRegistryCredentials',
'ContainerServiceCredentials',
'AppInsightsCredentials',
'OperationalizationClusterCredentials',
'CheckSystemServicesUpdatesAvailableResponse',
'UpdateSystemServicesResponse',
'ResourceOperationDisplay',
'ResourceOperation',
'AvailableOperations',<|fim▁hole|> 'SystemServiceType',
'AgentVMSizeTypes',
'Status',
'UpdatesAvailable',
]<|fim▁end|>
|
'OperationalizationClusterPaged',
'OperationStatus',
'ClusterType',
'OrchestratorType',
|
<|file_name|>internet.py<|end_file_name|><|fim▁begin|># coding=utf-8
from __future__ import unicode_literals
from ..internet import Provider as InternetProvider
class Provider(InternetProvider):
safe_email_tlds = ('com', 'net', 'fr', 'fr')
free_email_domains = (
'voila.fr', 'gmail.com', 'hotmail.fr', 'yahoo.fr', 'laposte.net', 'free.fr', 'sfr.fr', 'orange.fr', 'bouygtel.fr',
'club-internet.fr', 'dbmail.com', 'live.com', 'ifrance.com', 'noos.fr', 'tele2.fr', 'tiscali.fr', 'wanadoo.fr')
tlds = ('com', 'com', 'com', 'net', 'org', 'fr', 'fr', 'fr')
@staticmethod
def _to_ascii(string):
replacements = (
('à', 'a'), ('À', 'A'), ('ç', 'c'), ('Ç', 'c'), ('é', 'e'), ('É', 'E'), ('è', 'e'),
('È', 'E'), ('ë', 'e'), ('Ë', 'E'), ('ï', 'i'), ('Ï', 'I'), ('î', 'i'), ('Î', 'I'),
('ô', 'o'), ('Ô', 'O'), ('ù', ''), ('Ù', 'U'),
)
for search, replace in replacements:
string = string.replace(search, replace)
return string
def user_name(self):
pattern = self.random_element(self.user_name_formats)
return self._to_ascii(self.bothify(self.generator.parse(pattern))).lower()
def domain_word(self):
company = self.generator.format('company')
company_elements = company.split(' ')<|fim▁hole|><|fim▁end|>
|
company = company_elements[0]
company = company.replace(" ", "")
return self._to_ascii(company).lower()
|
<|file_name|>hashmap.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An unordered map and set type implemented as hash tables
//!
//! The tables use a keyed hash with new random keys generated for each container, so the ordering
//! of a set of keys in a hash table is randomized.
#[mutable_doc];
use container::{Container, Mutable, Map, MutableMap, Set, MutableSet};
use clone::Clone;
use cmp::{Eq, Equiv};
use hash::Hash;
use iterator::{Iterator, IteratorUtil, FromIterator, ChainIterator};
use num;
use option::{None, Option, Some};
use rand::RngUtil;
use rand;
use uint;
use vec;
use vec::{ImmutableVector, MutableVector, OwnedVector};
use kinds::Copy;
use util::{replace, unreachable};
static INITIAL_CAPACITY: uint = 32u; // 2^5
struct Bucket<K,V> {
hash: uint,
key: K,
value: V,
}
/// A hash map implementation which uses linear probing along with the SipHash
/// hash function for internal state. This means that the order of all hash maps
/// is randomized by keying each hash map randomly on creation.
///
/// It is required that the keys implement the `Eq` and `Hash` traits, although
/// this can frequently be achieved by just implementing the `Eq` and
/// `IterBytes` traits as `Hash` is automatically implemented for types that
/// implement `IterBytes`.
pub struct HashMap<K,V> {
priv k0: u64,
priv k1: u64,
priv resize_at: uint,
priv size: uint,
priv buckets: ~[Option<Bucket<K, V>>],
}
// We could rewrite FoundEntry to have type Option<&Bucket<K, V>>
// which would be nifty
enum SearchResult {
FoundEntry(uint), FoundHole(uint), TableFull
}
#[inline]
fn resize_at(capacity: uint) -> uint {
((capacity as float) * 3. / 4.) as uint
}
/// Creates a new hash map with the specified capacity.
pub fn linear_map_with_capacity<K:Eq + Hash,V>(
initial_capacity: uint) -> HashMap<K, V> {
let mut r = rand::task_rng();
linear_map_with_capacity_and_keys(r.gen(), r.gen(),
initial_capacity)
}
fn linear_map_with_capacity_and_keys<K:Eq + Hash,V>(
k0: u64, k1: u64,
initial_capacity: uint) -> HashMap<K, V> {
let cap = num::max(INITIAL_CAPACITY, initial_capacity);
HashMap {
k0: k0, k1: k1,
resize_at: resize_at(cap),
size: 0,
buckets: vec::from_fn(cap, |_| None)
}
}
impl<K:Hash + Eq,V> HashMap<K, V> {
#[inline]
fn to_bucket(&self, h: uint) -> uint {
// A good hash function with entropy spread over all of the
// bits is assumed. SipHash is more than good enough.
h % self.buckets.len()
}
#[inline]
fn next_bucket(&self, idx: uint, len_buckets: uint) -> uint {
(idx + 1) % len_buckets
}
#[inline]
fn bucket_sequence(&self, hash: uint,
op: &fn(uint) -> bool) -> bool {
let start_idx = self.to_bucket(hash);
let len_buckets = self.buckets.len();
let mut idx = start_idx;
loop {
if !op(idx) { return false; }
idx = self.next_bucket(idx, len_buckets);
if idx == start_idx {
return true;
}
}
}
#[inline]
fn bucket_for_key(&self, k: &K) -> SearchResult {
let hash = k.hash_keyed(self.k0, self.k1) as uint;
self.bucket_for_key_with_hash(hash, k)
}
#[inline]
fn bucket_for_key_equiv<Q:Hash + Equiv<K>>(&self, k: &Q)
-> SearchResult {
let hash = k.hash_keyed(self.k0, self.k1) as uint;
self.bucket_for_key_with_hash_equiv(hash, k)
}
#[inline]
fn bucket_for_key_with_hash(&self,
hash: uint,
k: &K)
-> SearchResult {
for self.bucket_sequence(hash) |i| {
match self.buckets[i] {
Some(ref bkt) => if bkt.hash == hash && *k == bkt.key {
return FoundEntry(i);
},
None => return FoundHole(i)
}
}
TableFull
}
#[inline]
fn bucket_for_key_with_hash_equiv<Q:Equiv<K>>(&self,
hash: uint,
k: &Q)
-> SearchResult {
for self.bucket_sequence(hash) |i| {
match self.buckets[i] {
Some(ref bkt) => {
if bkt.hash == hash && k.equiv(&bkt.key) {
return FoundEntry(i);
}
},
None => return FoundHole(i)
}
}
TableFull
}
/// Expand the capacity of the array to the next power of two
/// and re-insert each of the existing buckets.
#[inline]
fn expand(&mut self) {
let new_capacity = self.buckets.len() * 2;
self.resize(new_capacity);
}
/// Expands the capacity of the array and re-insert each of the
/// existing buckets.
fn resize(&mut self, new_capacity: uint) {
self.resize_at = resize_at(new_capacity);
let old_buckets = replace(&mut self.buckets,
vec::from_fn(new_capacity, |_| None));
self.size = 0;
// consume_rev_iter is more efficient
for old_buckets.consume_rev_iter().advance |bucket| {
self.insert_opt_bucket(bucket);
}
}
fn insert_opt_bucket(&mut self, bucket: Option<Bucket<K, V>>) {
match bucket {
Some(Bucket{hash: hash, key: key, value: value}) => {
self.insert_internal(hash, key, value);
}
None => {}
}
}
#[inline]
fn value_for_bucket<'a>(&'a self, idx: uint) -> &'a V {
match self.buckets[idx] {
Some(ref bkt) => &bkt.value,
None => fail!("HashMap::find: internal logic error"),
}
}
#[inline]
fn mut_value_for_bucket<'a>(&'a mut self, idx: uint) -> &'a mut V {
match self.buckets[idx] {
Some(ref mut bkt) => &mut bkt.value,
None => unreachable()
}
}
/// Inserts the key value pair into the buckets.
/// Assumes that there will be a bucket.
/// True if there was no previous entry with that key
fn insert_internal(&mut self, hash: uint, k: K, v: V) -> Option<V> {
match self.bucket_for_key_with_hash(hash, &k) {
TableFull => { fail!("Internal logic error"); }
FoundHole(idx) => {
self.buckets[idx] = Some(Bucket{hash: hash, key: k,
value: v});
self.size += 1;
None
}
FoundEntry(idx) => {
match self.buckets[idx] {
None => { fail!("insert_internal: Internal logic error") }
Some(ref mut b) => {
b.hash = hash;
b.key = k;
Some(replace(&mut b.value, v))
}
}
}
}
}
fn pop_internal(&mut self, hash: uint, k: &K) -> Option<V> {
// Removing from an open-addressed hashtable
// is, well, painful. The problem is that
// the entry may lie on the probe path for other
// entries, so removing it would make you think that
// those probe paths are empty.
//
// To address this we basically have to keep walking,
// re-inserting entries we find until we reach an empty
// bucket. We know we will eventually reach one because
// we insert one ourselves at the beginning (the removed
// entry).
//
// I found this explanation elucidating:
// http://www.maths.lse.ac.uk/Courses/MA407/del-hash.pdf
let mut idx = match self.bucket_for_key_with_hash(hash, k) {
TableFull | FoundHole(_) => return None,
FoundEntry(idx) => idx
};
let len_buckets = self.buckets.len();
let bucket = replace(&mut self.buckets[idx], None);
let value = match bucket {
None => None,
Some(Bucket{value, _}) => {
Some(value)
},
};
/* re-inserting buckets may cause changes in size, so remember
what our new size is ahead of time before we start insertions */
let size = self.size - 1;
idx = self.next_bucket(idx, len_buckets);
while self.buckets[idx].is_some() {
let bucket = replace(&mut self.buckets[idx], None);
self.insert_opt_bucket(bucket);
idx = self.next_bucket(idx, len_buckets);
}
self.size = size;
value
}
fn search(&self, hash: uint,
op: &fn(x: &Option<Bucket<K, V>>) -> bool) {
let _ = self.bucket_sequence(hash, |i| op(&self.buckets[i]));
}
}
impl<K:Hash + Eq,V> Container for HashMap<K, V> {
/// Return the number of elements in the map
fn len(&self) -> uint { self.size }
/// Return true if the map contains no elements
fn is_empty(&self) -> bool { self.len() == 0 }
}
impl<K:Hash + Eq,V> Mutable for HashMap<K, V> {
/// Clear the map, removing all key-value pairs.
fn clear(&mut self) {
for uint::range(0, self.buckets.len()) |idx| {
self.buckets[idx] = None;
}
self.size = 0;
}
}
impl<K:Hash + Eq,V> Map<K, V> for HashMap<K, V> {
/// Return true if the map contains a value for the specified key
fn contains_key(&self, k: &K) -> bool {
match self.bucket_for_key(k) {
FoundEntry(_) => {true}
TableFull | FoundHole(_) => {false}
}
}
/// Return a reference to the value corresponding to the key
fn find<'a>(&'a self, k: &K) -> Option<&'a V> {
match self.bucket_for_key(k) {
FoundEntry(idx) => Some(self.value_for_bucket(idx)),
TableFull | FoundHole(_) => None,
}
}
}
impl<K:Hash + Eq,V> MutableMap<K, V> for HashMap<K, V> {
/// Return a mutable reference to the value corresponding to the key
fn find_mut<'a>(&'a mut self, k: &K) -> Option<&'a mut V> {
let idx = match self.bucket_for_key(k) {
FoundEntry(idx) => idx,
TableFull | FoundHole(_) => return None
};
Some(self.mut_value_for_bucket(idx))
}
/// Insert a key-value pair into the map. An existing value for a
/// key is replaced by the new value. Return true if the key did
/// not already exist in the map.
fn insert(&mut self, k: K, v: V) -> bool {
self.swap(k, v).is_none()
}
/// Remove a key-value pair from the map. Return true if the key
/// was present in the map, otherwise false.
fn remove(&mut self, k: &K) -> bool {
self.pop(k).is_some()
}
/// Insert a key-value pair from the map. If the key already had a value
/// present in the map, that value is returned. Otherwise None is returned.
fn swap(&mut self, k: K, v: V) -> Option<V> {
// this could be faster.
if self.size >= self.resize_at {
// n.b.: We could also do this after searching, so
// that we do not resize if this call to insert is
// simply going to update a key in place. My sense
// though is that it's worse to have to search through
// buckets to find the right spot twice than to just
// resize in this corner case.
self.expand();
}
let hash = k.hash_keyed(self.k0, self.k1) as uint;
self.insert_internal(hash, k, v)
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
fn pop(&mut self, k: &K) -> Option<V> {
let hash = k.hash_keyed(self.k0, self.k1) as uint;
self.pop_internal(hash, k)
}
}
impl<K: Hash + Eq, V> HashMap<K, V> {
/// Create an empty HashMap
pub fn new() -> HashMap<K, V> {
HashMap::with_capacity(INITIAL_CAPACITY)
}
/// Create an empty HashMap with space for at least `n` elements in
/// the hash table.
pub fn with_capacity(capacity: uint) -> HashMap<K, V> {
linear_map_with_capacity(capacity)
}
/// Reserve space for at least `n` elements in the hash table.
pub fn reserve_at_least(&mut self, n: uint) {
if n > self.buckets.len() {
let buckets = n * 4 / 3 + 1;
self.resize(uint::next_power_of_two(buckets));
}
}
/// Modify and return the value corresponding to the key in the map, or
/// insert and return a new value if it doesn't exist.
pub fn mangle<'a,A>(&'a mut self, k: K, a: A, not_found: &fn(&K, A) -> V,
found: &fn(&K, &mut V, A)) -> &'a mut V {
if self.size >= self.resize_at {
// n.b.: We could also do this after searching, so
// that we do not resize if this call to insert is
// simply going to update a key in place. My sense
// though is that it's worse to have to search through
// buckets to find the right spot twice than to just
// resize in this corner case.
self.expand();
}
let hash = k.hash_keyed(self.k0, self.k1) as uint;
let idx = match self.bucket_for_key_with_hash(hash, &k) {
TableFull => fail!("Internal logic error"),
FoundEntry(idx) => { found(&k, self.mut_value_for_bucket(idx), a); idx }
FoundHole(idx) => {
let v = not_found(&k, a);
self.buckets[idx] = Some(Bucket{hash: hash, key: k, value: v});
self.size += 1;
idx
}
};
self.mut_value_for_bucket(idx)
}
/// Return the value corresponding to the key in the map, or insert
/// and return the value if it doesn't exist.
pub fn find_or_insert<'a>(&'a mut self, k: K, v: V) -> &'a mut V {
self.mangle(k, v, |_k, a| a, |_k,_v,_a| ())
}
/// Return the value corresponding to the key in the map, or create,
/// insert, and return a new value if it doesn't exist.
pub fn find_or_insert_with<'a>(&'a mut self, k: K, f: &fn(&K) -> V)
-> &'a mut V {
self.mangle(k, (), |k,_a| f(k), |_k,_v,_a| ())
}
/// Insert a key-value pair into the map if the key is not already present.
/// Otherwise, modify the existing value for the key.
/// Returns the new or modified value for the key.
pub fn insert_or_update_with<'a>(&'a mut self, k: K, v: V,
f: &fn(&K, &mut V)) -> &'a mut V {
self.mangle(k, v, |_k,a| a, |k,v,_a| f(k,v))
}
/// Calls a function on each element of a hash map, destroying the hash
/// map in the process.
pub fn consume(&mut self, f: &fn(K, V)) {
let buckets = replace(&mut self.buckets,
vec::from_fn(INITIAL_CAPACITY, |_| None));
self.size = 0;
for buckets.consume_iter().advance |bucket| {
match bucket {
None => {},
Some(Bucket{key, value, _}) => {
f(key, value)
}
}
}
}
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
pub fn consume_iter(self) -> HashMapConsumeIterator<K, V> {
// `consume_rev_iter` is more efficient than `consume_iter` for vectors
HashMapConsumeIterator {iter: self.buckets.consume_rev_iter()}
}
/// Retrieves a value for the given key, failing if the key is not
/// present.
pub fn get<'a>(&'a self, k: &K) -> &'a V {
match self.find(k) {
Some(v) => v,
None => fail!("No entry found for key: %?", k),
}
}
/// Retrieves a (mutable) value for the given key, failing if the key
/// is not present.
pub fn get_mut<'a>(&'a mut self, k: &K) -> &'a mut V {
match self.find_mut(k) {
Some(v) => v,
None => fail!("No entry found for key: %?", k),
}
}
/// Return true if the map contains a value for the specified key,
/// using equivalence
pub fn contains_key_equiv<Q:Hash + Equiv<K>>(&self, key: &Q) -> bool {
match self.bucket_for_key_equiv(key) {
FoundEntry(_) => {true}
TableFull | FoundHole(_) => {false}
}
}
/// Return the value corresponding to the key in the map, using
/// equivalence
pub fn find_equiv<'a, Q:Hash + Equiv<K>>(&'a self, k: &Q)
-> Option<&'a V> {
match self.bucket_for_key_equiv(k) {
FoundEntry(idx) => Some(self.value_for_bucket(idx)),
TableFull | FoundHole(_) => None,
}
}
/// Visit all keys
pub fn each_key(&self, blk: &fn(k: &K) -> bool) -> bool {
self.iter().advance(|(k, _)| blk(k))
}
/// Visit all values
pub fn each_value<'a>(&'a self, blk: &fn(v: &'a V) -> bool) -> bool {
self.iter().advance(|(_, v)| blk(v))
}
/// An iterator visiting all key-value pairs in arbitrary order.
/// Iterator element type is (&'a K, &'a V).
pub fn iter<'a>(&'a self) -> HashMapIterator<'a, K, V> {
HashMapIterator { iter: self.buckets.iter() }
}
/// An iterator visiting all key-value pairs in arbitrary order,
/// with mutable references to the values.
/// Iterator element type is (&'a K, &'a mut V).
pub fn mut_iter<'a>(&'a mut self) -> HashMapMutIterator<'a, K, V> {
HashMapMutIterator { iter: self.buckets.mut_iter() }
}
}
impl<K: Hash + Eq, V: Copy> HashMap<K, V> {
/// Like `find`, but returns a copy of the value.
pub fn find_copy(&self, k: &K) -> Option<V> {
self.find(k).map_consume(|v| copy *v)
}
/// Like `get`, but returns a copy of the value.
pub fn get_copy(&self, k: &K) -> V {
copy *self.get(k)
}
}
impl<K:Hash + Eq,V:Eq> Eq for HashMap<K, V> {
fn eq(&self, other: &HashMap<K, V>) -> bool {
if self.len() != other.len() { return false; }
for self.iter().advance |(key, value)| {
match other.find(key) {
None => return false,
Some(v) => if value != v { return false },
}
}
true
}
fn ne(&self, other: &HashMap<K, V>) -> bool { !self.eq(other) }
}
/// HashMap iterator
pub struct HashMapIterator<'self, K, V> {
priv iter: vec::VecIterator<'self, Option<Bucket<K, V>>>,
}
/// HashMap mutable values iterator
pub struct HashMapMutIterator<'self, K, V> {
priv iter: vec::VecMutIterator<'self, Option<Bucket<K, V>>>,
}
/// HashMap consume iterator
pub struct HashMapConsumeIterator<K, V> {
priv iter: vec::VecConsumeRevIterator<Option<Bucket<K, V>>>,
}
/// HashSet iterator
pub struct HashSetIterator<'self, K> {
priv iter: vec::VecIterator<'self, Option<Bucket<K, ()>>>,
}
/// HashSet consume iterator
pub struct HashSetConsumeIterator<K> {
priv iter: vec::VecConsumeRevIterator<Option<Bucket<K, ()>>>,
}
impl<'self, K, V> Iterator<(&'self K, &'self V)> for HashMapIterator<'self, K, V> {
#[inline]
fn next(&mut self) -> Option<(&'self K, &'self V)> {
for self.iter.advance |elt| {
match elt {
&Some(ref bucket) => return Some((&bucket.key, &bucket.value)),
&None => {},
}
}
None
}
}
impl<'self, K, V> Iterator<(&'self K, &'self mut V)> for HashMapMutIterator<'self, K, V> {
#[inline]
fn next(&mut self) -> Option<(&'self K, &'self mut V)> {
for self.iter.advance |elt| {
match elt {
&Some(ref mut bucket) => return Some((&bucket.key, &mut bucket.value)),
&None => {},
}
}
None
}
}
impl<K, V> Iterator<(K, V)> for HashMapConsumeIterator<K, V> {
#[inline]
fn next(&mut self) -> Option<(K, V)> {
for self.iter.advance |elt| {
match elt {
Some(Bucket {key, value, _}) => return Some((key, value)),
None => {},
}
}
None
}
}
impl<'self, K> Iterator<&'self K> for HashSetIterator<'self, K> {
#[inline]
fn next(&mut self) -> Option<&'self K> {
for self.iter.advance |elt| {
match elt {
&Some(ref bucket) => return Some(&bucket.key),
&None => {},
}
}
None
}
}
impl<K> Iterator<K> for HashSetConsumeIterator<K> {
#[inline]
fn next(&mut self) -> Option<K> {
for self.iter.advance |elt| {
match elt {
Some(bucket) => return Some(bucket.key),
None => {},
}
}
None
}
}
impl<K: Eq + Hash, V, T: Iterator<(K, V)>> FromIterator<(K, V), T> for HashMap<K, V> {
pub fn from_iterator(iter: &mut T) -> HashMap<K, V> {
let (lower, _) = iter.size_hint();
let mut map = HashMap::with_capacity(lower);
for iter.advance |(k, v)| {
map.insert(k, v);
}
map
}
}
/// An implementation of a hash set using the underlying representation of a
/// HashMap where the value is (). As with the `HashMap` type, a `HashSet`
/// requires that the elements implement the `Eq` and `Hash` traits.
pub struct HashSet<T> {
priv map: HashMap<T, ()>
}
impl<T:Hash + Eq> Eq for HashSet<T> {
fn eq(&self, other: &HashSet<T>) -> bool { self.map == other.map }
fn ne(&self, other: &HashSet<T>) -> bool { self.map != other.map }
}
impl<T:Hash + Eq> Container for HashSet<T> {
/// Return the number of elements in the set
fn len(&self) -> uint { self.map.len() }
/// Return true if the set contains no elements
fn is_empty(&self) -> bool { self.map.is_empty() }
}
impl<T:Hash + Eq> Mutable for HashSet<T> {
/// Clear the set, removing all values.
fn clear(&mut self) { self.map.clear() }
}
impl<T:Hash + Eq> Set<T> for HashSet<T> {
/// Return true if the set contains a value
fn contains(&self, value: &T) -> bool { self.map.contains_key(value) }
/// Return true if the set has no elements in common with `other`.
/// This is equivalent to checking for an empty intersection.
fn is_disjoint(&self, other: &HashSet<T>) -> bool {
self.iter().all(|v| !other.contains(v))
}
/// Return true if the set is a subset of another
fn is_subset(&self, other: &HashSet<T>) -> bool {
self.iter().all(|v| other.contains(v))
}
/// Return true if the set is a superset of another
fn is_superset(&self, other: &HashSet<T>) -> bool {
other.is_subset(self)
}
/// Visit the values representing the difference
fn difference(&self, other: &HashSet<T>, f: &fn(&T) -> bool) -> bool {
self.difference_iter(other).advance(f)
}
/// Visit the values representing the symmetric difference
fn symmetric_difference(&self,
other: &HashSet<T>,
f: &fn(&T) -> bool) -> bool {
self.symmetric_difference_iter(other).advance(f)
}
/// Visit the values representing the intersection
fn intersection(&self, other: &HashSet<T>, f: &fn(&T) -> bool) -> bool {
self.intersection_iter(other).advance(f)
}
/// Visit the values representing the union
fn union(&self, other: &HashSet<T>, f: &fn(&T) -> bool) -> bool {
self.union_iter(other).advance(f)
}
}
impl<T:Hash + Eq> MutableSet<T> for HashSet<T> {
/// Add a value to the set. Return true if the value was not already
/// present in the set.
fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()) }
/// Remove a value from the set. Return true if the value was
/// present in the set.
fn remove(&mut self, value: &T) -> bool { self.map.remove(value) }
}
impl<T:Hash + Eq> HashSet<T> {
/// Create an empty HashSet
pub fn new() -> HashSet<T> {
HashSet::with_capacity(INITIAL_CAPACITY)
}
/// Create an empty HashSet with space for at least `n` elements in
/// the hash table.
pub fn with_capacity(capacity: uint) -> HashSet<T> {
HashSet { map: HashMap::with_capacity(capacity) }
}
/// Reserve space for at least `n` elements in the hash table.
pub fn reserve_at_least(&mut self, n: uint) {
self.map.reserve_at_least(n)
}
/// Consumes all of the elements in the set, emptying it out
pub fn consume(&mut self, f: &fn(T)) {
self.map.consume(|k, _| f(k))
}
/// Creates a consuming iterator, that is, one that moves each value out
/// of the set in arbitrary order. The set cannot be used after calling
/// this.
pub fn consume_iter(self) -> HashSetConsumeIterator<T> {
// `consume_rev_iter` is more efficient than `consume_iter` for vectors
HashSetConsumeIterator {iter: self.map.buckets.consume_rev_iter()}
}
/// Returns true if the hash set contains a value equivalent to the
/// given query value.
pub fn contains_equiv<Q:Hash + Equiv<T>>(&self, value: &Q) -> bool {
self.map.contains_key_equiv(value)
}
/// An iterator visiting all elements in arbitrary order.
/// Iterator element type is &'a T.
pub fn iter<'a>(&'a self) -> HashSetIterator<'a, T> {
HashSetIterator { iter: self.map.buckets.iter() }
}
/// Visit the values representing the difference
pub fn difference_iter<'a>(&'a self, other: &'a HashSet<T>)
-> SetAlgebraIter<'a, T> {<|fim▁hole|> }
/// Visit the values representing the symmetric difference
pub fn symmetric_difference_iter<'a>(&'a self, other: &'a HashSet<T>)
-> ChainIterator<&'a T, SetAlgebraIter<'a, T>, SetAlgebraIter<'a, T>> {
self.difference_iter(other).chain_(other.difference_iter(self))
}
/// Visit the values representing the intersection
pub fn intersection_iter<'a>(&'a self, other: &'a HashSet<T>)
-> SetAlgebraIter<'a, T> {
EnvFilterIterator{iter: self.iter(), env: other,
filter: |elt, other| other.contains(elt) }
}
/// Visit the values representing the union
pub fn union_iter<'a>(&'a self, other: &'a HashSet<T>)
-> ChainIterator<&'a T, HashSetIterator<'a, T>, SetAlgebraIter<'a, T>> {
self.iter().chain_(other.difference_iter(self))
}
}
impl<K: Eq + Hash, T: Iterator<K>> FromIterator<K, T> for HashSet<K> {
pub fn from_iterator(iter: &mut T) -> HashSet<K> {
let (lower, _) = iter.size_hint();
let mut set = HashSet::with_capacity(lower);
for iter.advance |k| {
set.insert(k);
}
set
}
}
// FIXME #7814: use std::iterator::FilterIterator
/// Building block for Set operation iterators
pub struct EnvFilterIterator<A, Env, I> {
priv env: Env,
priv filter: &'static fn(&A, Env) -> bool,
priv iter: I,
}
impl<'self, A, Env: Clone, I: Iterator<&'self A>> Iterator<&'self A>
for EnvFilterIterator<A, Env, I> {
#[inline]
fn next(&mut self) -> Option<&'self A> {
loop {
match self.iter.next() {
Some(elt) => if (self.filter)(elt, self.env.clone()) {
return Some(elt)
},
None => return None,
}
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
let (_, upper) = self.iter.size_hint();
(0, upper)
}
}
/// Set operations iterator
pub type SetAlgebraIter<'self, T> =
EnvFilterIterator<T, &'self HashSet<T>, HashSetIterator<'self, T>>;
#[cfg(test)]
mod test_map {
use container::{Container, Map, Set};
use option::{None, Some};
use super::*;
use uint;
#[test]
fn test_create_capacity_zero() {
let mut m = HashMap::with_capacity(0);
assert!(m.insert(1, 1));
}
#[test]
fn test_insert() {
let mut m = HashMap::new();
assert!(m.insert(1, 2));
assert!(m.insert(2, 4));
assert_eq!(*m.get(&1), 2);
assert_eq!(*m.get(&2), 4);
}
#[test]
fn test_find_mut() {
let mut m = HashMap::new();
assert!(m.insert(1, 12));
assert!(m.insert(2, 8));
assert!(m.insert(5, 14));
let new = 100;
match m.find_mut(&5) {
None => fail!(), Some(x) => *x = new
}
assert_eq!(m.find(&5), Some(&new));
}
#[test]
fn test_insert_overwrite() {
let mut m = HashMap::new();
assert!(m.insert(1, 2));
assert_eq!(*m.get(&1), 2);
assert!(!m.insert(1, 3));
assert_eq!(*m.get(&1), 3);
}
#[test]
fn test_insert_conflicts() {
let mut m = linear_map_with_capacity(4);
assert!(m.insert(1, 2));
assert!(m.insert(5, 3));
assert!(m.insert(9, 4));
assert_eq!(*m.get(&9), 4);
assert_eq!(*m.get(&5), 3);
assert_eq!(*m.get(&1), 2);
}
#[test]
fn test_conflict_remove() {
let mut m = linear_map_with_capacity(4);
assert!(m.insert(1, 2));
assert!(m.insert(5, 3));
assert!(m.insert(9, 4));
assert!(m.remove(&1));
assert_eq!(*m.get(&9), 4);
assert_eq!(*m.get(&5), 3);
}
#[test]
fn test_is_empty() {
let mut m = linear_map_with_capacity(4);
assert!(m.insert(1, 2));
assert!(!m.is_empty());
assert!(m.remove(&1));
assert!(m.is_empty());
}
#[test]
fn test_pop() {
let mut m = HashMap::new();
m.insert(1, 2);
assert_eq!(m.pop(&1), Some(2));
assert_eq!(m.pop(&1), None);
}
#[test]
fn test_swap() {
let mut m = HashMap::new();
assert_eq!(m.swap(1, 2), None);
assert_eq!(m.swap(1, 3), Some(2));
assert_eq!(m.swap(1, 4), Some(3));
}
#[test]
fn test_find_or_insert() {
let mut m = HashMap::new::<int, int>();
assert_eq!(*m.find_or_insert(1, 2), 2);
assert_eq!(*m.find_or_insert(1, 3), 2);
}
#[test]
fn test_find_or_insert_with() {
let mut m = HashMap::new::<int, int>();
assert_eq!(*m.find_or_insert_with(1, |_| 2), 2);
assert_eq!(*m.find_or_insert_with(1, |_| 3), 2);
}
#[test]
fn test_insert_or_update_with() {
let mut m = HashMap::new::<int, int>();
assert_eq!(*m.insert_or_update_with(1, 2, |_,x| *x+=1), 2);
assert_eq!(*m.insert_or_update_with(1, 2, |_,x| *x+=1), 3);
}
#[test]
fn test_consume() {
let mut m = HashMap::new();
assert!(m.insert(1, 2));
assert!(m.insert(2, 3));
let mut m2 = HashMap::new();
do m.consume |k, v| {
m2.insert(k, v);
}
assert_eq!(m.len(), 0);
assert_eq!(m2.len(), 2);
assert_eq!(m2.get(&1), &2);
assert_eq!(m2.get(&2), &3);
}
#[test]
fn test_consume_still_usable() {
let mut m = HashMap::new();
assert!(m.insert(1, 2));
do m.consume |_, _| {}
assert!(m.insert(1, 2));
}
#[test]
fn test_consume_iter() {
let hm = {
let mut hm = HashMap::new();
hm.insert('a', 1);
hm.insert('b', 2);
hm
};
let v = hm.consume_iter().collect::<~[(char, int)]>();
assert!([('a', 1), ('b', 2)] == v || [('b', 2), ('a', 1)] == v);
}
#[test]
fn test_iterate() {
let mut m = linear_map_with_capacity(4);
for uint::range(0, 32) |i| {
assert!(m.insert(i, i*2));
}
let mut observed = 0;
for m.iter().advance |(k, v)| {
assert_eq!(*v, *k * 2);
observed |= (1 << *k);
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_find() {
let mut m = HashMap::new();
assert!(m.find(&1).is_none());
m.insert(1, 2);
match m.find(&1) {
None => fail!(),
Some(v) => assert!(*v == 2)
}
}
#[test]
fn test_eq() {
let mut m1 = HashMap::new();
m1.insert(1, 2);
m1.insert(2, 3);
m1.insert(3, 4);
let mut m2 = HashMap::new();
m2.insert(1, 2);
m2.insert(2, 3);
assert!(m1 != m2);
m2.insert(3, 4);
assert_eq!(m1, m2);
}
#[test]
fn test_expand() {
let mut m = HashMap::new();
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0u;
let old_resize_at = m.resize_at;
while old_resize_at == m.resize_at {
m.insert(i, i);
i += 1;
}
assert_eq!(m.len(), i);
assert!(!m.is_empty());
}
#[test]
fn test_find_equiv() {
let mut m = HashMap::new();
let (foo, bar, baz) = (1,2,3);
m.insert(~"foo", foo);
m.insert(~"bar", bar);
m.insert(~"baz", baz);
assert_eq!(m.find_equiv(&("foo")), Some(&foo));
assert_eq!(m.find_equiv(&("bar")), Some(&bar));
assert_eq!(m.find_equiv(&("baz")), Some(&baz));
assert_eq!(m.find_equiv(&("qux")), None);
}
#[test]
fn test_from_iter() {
let xs = ~[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<int, int> = xs.iter().transform(|&x| x).collect();
for xs.iter().advance |&(k, v)| {
assert_eq!(map.find(&k), Some(&v));
}
}
}
#[cfg(test)]
mod test_set {
use super::*;
use container::{Container, Map, Set};
use vec::ImmutableEqVector;
use uint;
#[test]
fn test_disjoint() {
let mut xs = HashSet::new();
let mut ys = HashSet::new();
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(xs.insert(5));
assert!(ys.insert(11));
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(xs.insert(7));
assert!(xs.insert(19));
assert!(xs.insert(4));
assert!(ys.insert(2));
assert!(ys.insert(-11));
assert!(xs.is_disjoint(&ys));
assert!(ys.is_disjoint(&xs));
assert!(ys.insert(7));
assert!(!xs.is_disjoint(&ys));
assert!(!ys.is_disjoint(&xs));
}
#[test]
fn test_subset_and_superset() {
let mut a = HashSet::new();
assert!(a.insert(0));
assert!(a.insert(5));
assert!(a.insert(11));
assert!(a.insert(7));
let mut b = HashSet::new();
assert!(b.insert(0));
assert!(b.insert(7));
assert!(b.insert(19));
assert!(b.insert(250));
assert!(b.insert(11));
assert!(b.insert(200));
assert!(!a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(!b.is_superset(&a));
assert!(b.insert(5));
assert!(a.is_subset(&b));
assert!(!a.is_superset(&b));
assert!(!b.is_subset(&a));
assert!(b.is_superset(&a));
}
#[test]
fn test_iterate() {
let mut a = HashSet::new();
for uint::range(0, 32) |i| {
assert!(a.insert(i));
}
let mut observed = 0;
for a.iter().advance |k| {
observed |= (1 << *k);
}
assert_eq!(observed, 0xFFFF_FFFF);
}
#[test]
fn test_intersection() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(11));
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(77));
assert!(a.insert(103));
assert!(a.insert(5));
assert!(a.insert(-5));
assert!(b.insert(2));
assert!(b.insert(11));
assert!(b.insert(77));
assert!(b.insert(-9));
assert!(b.insert(-42));
assert!(b.insert(5));
assert!(b.insert(3));
let mut i = 0;
let expected = [3, 5, 11, 77];
for a.intersection_iter(&b).advance |x| {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_difference() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(b.insert(3));
assert!(b.insert(9));
let mut i = 0;
let expected = [1, 5, 11];
for a.difference_iter(&b).advance |x| {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_symmetric_difference() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(b.insert(-2));
assert!(b.insert(3));
assert!(b.insert(9));
assert!(b.insert(14));
assert!(b.insert(22));
let mut i = 0;
let expected = [-2, 1, 5, 11, 14, 22];
for a.symmetric_difference_iter(&b).advance |x| {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_union() {
let mut a = HashSet::new();
let mut b = HashSet::new();
assert!(a.insert(1));
assert!(a.insert(3));
assert!(a.insert(5));
assert!(a.insert(9));
assert!(a.insert(11));
assert!(a.insert(16));
assert!(a.insert(19));
assert!(a.insert(24));
assert!(b.insert(-2));
assert!(b.insert(1));
assert!(b.insert(5));
assert!(b.insert(9));
assert!(b.insert(13));
assert!(b.insert(19));
let mut i = 0;
let expected = [-2, 1, 3, 5, 9, 11, 13, 16, 19, 24];
for a.union_iter(&b).advance |x| {
assert!(expected.contains(x));
i += 1
}
assert_eq!(i, expected.len());
}
#[test]
fn test_from_iter() {
let xs = ~[1, 2, 3, 4, 5, 6, 7, 8, 9];
let set: HashSet<int> = xs.iter().transform(|&x| x).collect();
for xs.iter().advance |x: &int| {
assert!(set.contains(x));
}
}
#[test]
fn test_consume_iter() {
let hs = {
let mut hs = HashSet::new();
hs.insert('a');
hs.insert('b');
hs
};
let v = hs.consume_iter().collect::<~[char]>();
assert!(['a', 'b'] == v || ['b', 'a'] == v);
}
}<|fim▁end|>
|
EnvFilterIterator{iter: self.iter(), env: other,
filter: |elt, other| !other.contains(elt) }
|
<|file_name|>actions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from pisi.actionsapi import shelltools, get, cmaketools, pisitools
def setup():<|fim▁hole|>
def build():
cmaketools.make()
def install():
cmaketools.install()
pisitools.dodoc ("AUTHORS", "ChangeLog", "COPYING")<|fim▁end|>
|
cmaketools.configure()
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import application
import platform
import exceptions
from ctypes import c_char_p
from libloader import load_library
import paths
if platform.architecture()[0][:2] == "32":
lib = load_library("api_keys32", x86_path=paths.app_path("keys/lib"))
else:
lib = load_library("api_keys64", x64_path=paths.app_path("keys/lib"))
# import linuxKeys
# lib = linuxKeys
keyring = None
def setup():
global keyring
if keyring == None:
keyring = Keyring()
class Keyring(object):
def __init__(self):
super(Keyring, self).__init__()
def _call_method(self, function):
result = getattr(lib, function)
result = c_char_p(result.__call__())<|fim▁hole|> def get(self, func):
if hasattr(application,func+"_override"):
return getattr(application,func+'_override')
return getattr(self, "_call_method")("get_"+func)<|fim▁end|>
|
return result.value
|
<|file_name|>wifi_hopping.go<|end_file_name|><|fim▁begin|>package wifi
import (
"time"
"github.com/bettercap/bettercap/network"
)
func (mod *WiFiModule) onChannel(channel int, cb func()) {
mod.chanLock.Lock()
defer mod.chanLock.Unlock()
prev := mod.stickChan
mod.stickChan = channel
if err := network.SetInterfaceChannel(mod.iface.Name(), channel); err != nil {
mod.Warning("error while hopping to channel %d: %s", channel, err)
} else {
mod.Debug("hopped on channel %d", channel)
}
cb()
mod.stickChan = prev
}
func (mod *WiFiModule) channelHopper() {
mod.reads.Add(1)
defer mod.reads.Done()
mod.Info("channel hopper started.")
for mod.Running() {
delay := mod.hopPeriod
// if we have both 2.4 and 5ghz capabilities, we have
// more channels, therefore we need to increase the time
// we hop on each one otherwise me lose information
if len(mod.frequencies) > 14 {
delay = delay * 2
}
frequencies := mod.frequencies
loopCurrentChannels:
for _, frequency := range frequencies {
channel := network.Dot11Freq2Chan(frequency)
// stick to the access point channel as long as it's selected
// or as long as we're deauthing on it
if mod.stickChan != 0 {
channel = mod.stickChan
}
mod.Debug("hopping on channel %d", channel)
mod.chanLock.Lock()
if err := network.SetInterfaceChannel(mod.iface.Name(), channel); err != nil {
mod.Warning("error while hopping to channel %d: %s", channel, err)
}
mod.chanLock.Unlock()<|fim▁hole|> select {
case _ = <-mod.hopChanges:
mod.Debug("hop changed")
break loopCurrentChannels
case <-time.After(delay):
if !mod.Running() {
return
}
}
}
}
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|><|fim▁end|>
|
This package contains a number of utilities that are used inside of openmdao.
It does not depend on any other openmdao package.
"""
|
<|file_name|>boss_erekem.cpp<|end_file_name|><|fim▁begin|>/* This file is part of the ScriptDev2 Project. See AUTHORS file for Copyright information
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.<|fim▁hole|> * This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/* ScriptData
SDName: boss_erekem
SD%Complete: 90
SDComment: Timers may need adjustments
SDCategory: Violet Hold
EndScriptData */
#include "precompiled.h"
#include "violet_hold.h"
enum
{
SAY_AGGRO = -1608012,
SAY_ADD_DIE_1 = -1608013,
SAY_ADD_DIE_2 = -1608014,
SAY_DEATH = -1608018,
// A few Sound IDs on SLAY, if there _is_ text related, fields -1608015 to -1608017 are free
SOUND_ID_SLAY_1 = 14222,
SOUND_ID_SLAY_2 = 14223,
SOUND_ID_SLAY_3 = 14224,
SPELL_BLOODLUST = 54516,
SPELL_BREAK_BONDS_H = 59463,
SPELL_CHAIN_HEAL = 54481,
SPELL_CHAIN_HEAL_H = 59473,
SPELL_EARTH_SHIELD = 54479,
SPELL_EARTH_SHIELD_H = 59471,
SPELL_EARTH_SHOCK = 54511,
SPELL_LIGHTNING_BOLT = 53044,
SPELL_STORMSTRIKE = 51876,
// Spells of adds
SPELL_GUSHING_WOUND = 39215,
SPELL_HOWLING_SCREECH = 54463,
SPELL_STRIKE = 14516
};
struct boss_erekemAI : public ScriptedAI
{
boss_erekemAI(Creature* pCreature) : ScriptedAI(pCreature)
{
m_pInstance = (instance_violet_hold*)pCreature->GetInstanceData();
m_bIsRegularMode = pCreature->GetMap()->IsRegularDifficulty();
Reset();
}
instance_violet_hold* m_pInstance;
bool m_bIsRegularMode;
uint32 m_uiBreakBondsTimer;
uint32 m_uiChainHealTimer;
uint32 m_uiEarthShieldTimer;
uint32 m_uiEarthShockTimer;
uint32 m_uiSpecialSpellTimer;
uint8 m_uiGuardiansDead;
void Reset() override
{
m_uiSpecialSpellTimer = 0;
m_uiEarthShieldTimer = urand(2000, 3000);
m_uiEarthShockTimer = urand(4000, 9000);
m_uiChainHealTimer = urand(5000, 15000);
m_uiBreakBondsTimer = urand(25000, 30000);
m_uiGuardiansDead = 0;
}
void Aggro(Unit* /*pWho*/) override
{
DoScriptText(SAY_AGGRO, m_creature);
}
void JustDied(Unit* /*pKiller*/) override
{
DoScriptText(SAY_DEATH, m_creature);
}
void KilledUnit(Unit* /*pVictim*/) override
{
switch (urand(0, 2))
{
case 0: DoPlaySoundToSet(m_creature, SOUND_ID_SLAY_1); break;
case 1: DoPlaySoundToSet(m_creature, SOUND_ID_SLAY_2); break;
case 2: DoPlaySoundToSet(m_creature, SOUND_ID_SLAY_3); break;
}
}
void GuardianJustDied()
{
DoScriptText(!m_uiGuardiansDead ? SAY_ADD_DIE_1 : SAY_ADD_DIE_2, m_creature);
++m_uiGuardiansDead;
// cast bloodlust if both guards are dead
if (m_uiGuardiansDead == 2)
DoCastSpellIfCan(m_creature, SPELL_BLOODLUST, CAST_INTERRUPT_PREVIOUS);
}
void UpdateAI(const uint32 uiDiff) override
{
if (!m_creature->SelectHostileTarget() || !m_creature->getVictim())
return;
if (m_uiEarthShieldTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature, m_bIsRegularMode ? SPELL_EARTH_SHIELD : SPELL_EARTH_SHIELD_H, CAST_AURA_NOT_PRESENT) == CAST_OK)
m_uiEarthShieldTimer = urand(25000, 30000);
}
else
m_uiEarthShieldTimer -= uiDiff;
if (m_uiEarthShockTimer < uiDiff)
{
if (Unit* pTarget = m_creature->SelectAttackingTarget(ATTACKING_TARGET_RANDOM, 0))
{
if (DoCastSpellIfCan(pTarget, SPELL_EARTH_SHOCK) == CAST_OK)
m_uiEarthShockTimer = urand(8000, 13000);
}
}
else
m_uiEarthShockTimer -= uiDiff;
if (m_uiChainHealTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature, m_bIsRegularMode ? SPELL_CHAIN_HEAL : SPELL_CHAIN_HEAL_H) == CAST_OK)
m_uiChainHealTimer = urand(15000, 25000);
}
else
m_uiChainHealTimer -= uiDiff;
// Cast Stormstrike only if both guards are down
if (m_uiSpecialSpellTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature->getVictim(), m_uiGuardiansDead == 2 ? SPELL_STORMSTRIKE : SPELL_LIGHTNING_BOLT) == CAST_OK)
m_uiSpecialSpellTimer = urand(2000, 3000);
}
else
m_uiSpecialSpellTimer -= uiDiff;
// Break bonds only on heroic
if (!m_bIsRegularMode)
{
if (m_uiBreakBondsTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature, SPELL_BREAK_BONDS_H) == CAST_OK)
m_uiBreakBondsTimer = urand(25000, 30000);
}
else
m_uiBreakBondsTimer -= uiDiff;
}
DoMeleeAttackIfReady();
}
};
CreatureAI* GetAI_boss_erekem(Creature* pCreature)
{
return new boss_erekemAI(pCreature);
}
struct npc_erekem_guardAI : public ScriptedAI
{
npc_erekem_guardAI(Creature* pCreature) : ScriptedAI(pCreature)
{
m_pInstance = ((instance_violet_hold*)pCreature->GetInstanceData());
Reset();
}
instance_violet_hold* m_pInstance;
uint32 m_uiGushingWoundTimer;
uint32 m_uiHowlingScreechTimer;
uint32 m_uiStrikeTimer;
void Reset() override
{
m_uiGushingWoundTimer = urand(9000, 14000);
m_uiHowlingScreechTimer = urand(8000, 12000);
m_uiStrikeTimer = urand(5000, 7000);
}
void JustDied(Unit* /*pKiller*/) override
{
if (!m_pInstance)
return;
if (Creature* pBoss = m_pInstance->GetSingleCreatureFromStorage(m_pInstance->GetData(TYPE_EREKEM) != DONE ? NPC_EREKEM : NPC_ARAKKOA))
{
if (!pBoss->isAlive())
return;
((boss_erekemAI*)pBoss->AI())->GuardianJustDied();
}
}
void UpdateAI(const uint32 uiDiff) override
{
if (!m_creature->SelectHostileTarget() || !m_creature->getVictim())
return;
if (m_uiGushingWoundTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature->getVictim(), SPELL_GUSHING_WOUND) == CAST_OK)
m_uiGushingWoundTimer = urand(25000, 30000);
}
else
m_uiGushingWoundTimer -= uiDiff;
if (m_uiHowlingScreechTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature, SPELL_HOWLING_SCREECH) == CAST_OK)
m_uiHowlingScreechTimer = urand(10000, 16000);
}
else
m_uiHowlingScreechTimer -= uiDiff;
if (m_uiStrikeTimer < uiDiff)
{
if (DoCastSpellIfCan(m_creature->getVictim(), SPELL_STRIKE) == CAST_OK)
m_uiStrikeTimer = urand(5000, 7000);
}
else
m_uiStrikeTimer -= uiDiff;
DoMeleeAttackIfReady();
}
};
CreatureAI* GetAI_npc_erekem_guard(Creature* pCreature)
{
return new npc_erekem_guardAI(pCreature);
}
void AddSC_boss_erekem()
{
Script* pNewScript;
pNewScript = new Script;
pNewScript->Name = "boss_erekem";
pNewScript->GetAI = &GetAI_boss_erekem;
pNewScript->RegisterSelf();
pNewScript = new Script;
pNewScript->Name = "npc_erekem_guard";
pNewScript->GetAI = &GetAI_npc_erekem_guard;
pNewScript->RegisterSelf();
}<|fim▁end|>
|
*
|
<|file_name|>compare.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Go Authors. All rights reserved.<|fim▁hole|>//
// https://tools.ietf.org/html/rfc4511
//
// CompareRequest ::= [APPLICATION 14] SEQUENCE {
// entry LDAPDN,
// ava AttributeValueAssertion }
//
// AttributeValueAssertion ::= SEQUENCE {
// attributeDesc AttributeDescription,
// assertionValue AssertionValue }
//
// AttributeDescription ::= LDAPString
// -- Constrained to <attributedescription>
// -- [RFC4512]
//
// AttributeValue ::= OCTET STRING
//
package ldap
import (
"errors"
"fmt"
"gopkg.in/asn1-ber.v1"
)
// Compare checks to see if the attribute of the dn matches value. Returns true if it does otherwise
// false with any error that occurs if any.
func (l *Conn) Compare(dn, attribute, value string) (bool, error) {
packet := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "LDAP Request")
packet.AppendChild(ber.NewInteger(ber.ClassUniversal, ber.TypePrimitive, ber.TagInteger, l.nextMessageID(), "MessageID"))
request := ber.Encode(ber.ClassApplication, ber.TypeConstructed, ApplicationCompareRequest, nil, "Compare Request")
request.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, dn, "DN"))
ava := ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagSequence, nil, "AttributeValueAssertion")
ava.AppendChild(ber.NewString(ber.ClassUniversal, ber.TypePrimitive, ber.TagOctetString, attribute, "AttributeDesc"))
ava.AppendChild(ber.Encode(ber.ClassUniversal, ber.TypeConstructed, ber.TagOctetString, value, "AssertionValue"))
request.AppendChild(ava)
packet.AppendChild(request)
l.Debug.PrintPacket(packet)
msgCtx, err := l.sendMessage(packet)
if err != nil {
return false, err
}
defer l.finishMessage(msgCtx)
l.Debug.Printf("%d: waiting for response", msgCtx.id)
packetResponse, ok := <-msgCtx.responses
if !ok {
return false, NewError(ErrorNetwork, errors.New("ldap: response channel closed"))
}
packet, err = packetResponse.ReadPacket()
l.Debug.Printf("%d: got response %p", msgCtx.id, packet)
if err != nil {
return false, err
}
if l.Debug {
if err := addLDAPDescriptions(packet); err != nil {
return false, err
}
ber.PrintPacket(packet)
}
if packet.Children[1].Tag == ApplicationCompareResponse {
resultCode, resultDescription := getLDAPResultCode(packet)
if resultCode == LDAPResultCompareTrue {
return true, nil
} else if resultCode == LDAPResultCompareFalse {
return false, nil
} else {
return false, NewError(resultCode, errors.New(resultDescription))
}
}
return false, fmt.Errorf("Unexpected Response: %d", packet.Children[1].Tag)
}<|fim▁end|>
|
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//
// File contains Compare functionality
|
<|file_name|>test-load.js<|end_file_name|><|fim▁begin|>/*global describe, beforeEach, it*/
'use strict';<|fim▁hole|>var assert = require('assert');
describe('frontender generator', function () {
it('can be imported without blowing up', function () {
var app = require('../app');
assert(app !== undefined);
});
});<|fim▁end|>
| |
<|file_name|>de.js<|end_file_name|><|fim▁begin|>CKEDITOR.plugins.setLang( 'html5audio', 'de', {
button: 'HTML5 Audio einfügen',
title: 'HTML5 Audio',
infoLabel: 'Audio Infos',<|fim▁hole|> upload: 'Hochladen',
btnUpload: 'Zum Server senden',
advanced: 'Erweitert',
autoplay: 'Autoplay?',
allowdownload: 'Download zulassen?',
yes: 'Ja',
no: 'Nein'
} );<|fim▁end|>
|
urlMissing: 'Sie haben keine URL zur Audio-Datei angegeben.',
audioProperties: 'Audio-Einstellungen',
|
<|file_name|>scene_node.rs<|end_file_name|><|fim▁begin|>use std::rc::Rc;
use std::cell::{Ref, RefMut, RefCell};
use std::mem;
use std::path::{Path, PathBuf};
use na;
use na::{Iso3, Pnt2, Vec3, Pnt3, Transformation, Rotation, Translation, RotationWithTranslation};
use resource::{Mesh, MeshManager, Texture, TextureManager, Material, MaterialManager};
use ncollide_procedural::TriMesh3;
use ncollide_procedural as procedural;
use scene::Object;
use camera::Camera;
use light::Light;
// XXX: once something like `fn foo(self: Rc<RefCell<SceneNode>>)` is allowed, this extra struct
// will not be needed any more.
/// The datas contained by a `SceneNode`.
pub struct SceneNodeData {
local_scale: Vec3<f32>,
local_transform: Iso3<f32>,
world_scale: Vec3<f32>,
world_transform: Iso3<f32>,
visible: bool,
up_to_date: bool,
children: Vec<SceneNode>,
object: Option<Object>,
// FIXME: use Weak pointers instead of the raw pointer.
parent: Option<*const RefCell<SceneNodeData>>
}
/// A node of the scene graph.
///
/// This may represent a group of other nodes, and/or contain an object that can be rendered.
#[derive(Clone)]
pub struct SceneNode {
data: Rc<RefCell<SceneNodeData>>,
}
impl SceneNodeData {
// XXX: Because `node.borrow_mut().parent = Some(self.data.downgrade())`
// causes a weird compiler error:
//
// ```
// error: mismatched types: expected `&std::cell::RefCell<scene::scene_node::SceneNodeData>`
// but found
// `std::option::Option<std::rc::Weak<std::cell::RefCell<scene::scene_node::SceneNodeData>>>`
// (expe cted &-ptr but found enum std::option::Option)
// ```
fn set_parent(&mut self, parent: *const RefCell<SceneNodeData>) {
self.parent = Some(parent);
}
// XXX: this exists because of a similar bug as `set_parent`.
fn remove_from_parent(&mut self, to_remove: &SceneNode) {
let _ = self.parent.as_ref().map(|p| {
unsafe {
let mut bp = (**p).borrow_mut();
bp.remove(to_remove)
}
});
}
fn remove(&mut self, o: &SceneNode) {
match self.children.iter().rposition(|e| &*o.data as *const RefCell<SceneNodeData> as usize ==
&*e.data as *const RefCell<SceneNodeData> as usize ) {
Some(i) => {
let _ = self.children.swap_remove(i);
},
None => { }
}
}
/// Whether this node contains an `Object`.
#[inline]
pub fn has_object(&self) -> bool {
self.object.is_some()
}
/// Whether this node has no parent.
#[inline]
pub fn is_root(&self) -> bool {
self.parent.is_none()
}
/// Render the scene graph rooted by this node.
pub fn render(&mut self, pass: usize, camera: &mut Camera, light: &Light) {
if self.visible {
self.do_render(&na::one(), &na::one(), pass, camera, light)
}
}
fn do_render(&mut self,
transform: &Iso3<f32>,
scale: &Vec3<f32>,
pass: usize,
camera: &mut Camera,
light: &Light) {
if !self.up_to_date {
self.up_to_date = true;
self.world_transform = *transform * self.local_transform;
self.world_scale = *scale * self.local_scale;
}
match self.object {
Some(ref o) => o.render(&self.world_transform, &self.world_scale, pass, camera, light),
None => { }
}
for c in self.children.iter_mut() {
let mut bc = c.data_mut();
if bc.visible {
bc.do_render(&self.world_transform, &self.world_scale, pass, camera, light)
}
}
}
/// A reference to the object possibly contained by this node.
#[inline]
pub fn object<'a>(&'a self) -> Option<&'a Object> {
self.object.as_ref()
}
/// A mutable reference to the object possibly contained by this node.
#[inline]
pub fn object_mut<'a>(&'a mut self) -> Option<&'a mut Object> {
self.object.as_mut()
}
/// A reference to the object possibly contained by this node.
///
/// # Failure
/// Fails of this node does not contains an object.
#[inline]
pub fn get_object<'a>(&'a self) -> &'a Object {
self.object().expect("This scene node does not contain an Object.")
}
/// A mutable reference to the object possibly contained by this node.
///
/// # Failure
/// Fails of this node does not contains an object.
#[inline]
pub fn get_object_mut<'a>(&'a mut self) -> &'a mut Object {
self.object_mut().expect("This scene node does not contain an Object.")
}
///////////////////~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ HERE
/* FIXME: the ~Any is kind of problematic here…
/// Attaches user-defined data to the objects contained by this node and its children.
#[inline]
pub fn set_user_data(&mut self, user_data: ~Any) {
self.apply_to_objects_mut(&mut |o| o.set_user_data(user_data))
}
*/
// FIXME: for all those set_stuff, would it be more per formant to add a special case for when
// we are on a leaf? (to avoid the call to a closure required by the apply_to_*).
/// Sets the material of the objects contained by this node and its children.
#[inline]
pub fn set_material(&mut self, material: Rc<RefCell<Box<Material + 'static>>>) {
self.apply_to_objects_mut(&mut |o| o.set_material(material.clone()))
}
/// Sets the material of the objects contained by this node and its children.
///
/// The material must already have been registered as `name`.
#[inline]
pub fn set_material_with_name(&mut self, name: &str) {
let material = MaterialManager::get_global_manager(|tm| tm.get(name).unwrap_or_else(
|| panic!("Invalid attempt to use the unregistered material: {}", name)));
self.set_material(material)
}
/// Sets the width of the lines drawn for the objects contained by this node and its children.
#[inline]
pub fn set_lines_width(&mut self, width: f32) {
self.apply_to_objects_mut(&mut |o| o.set_lines_width(width))
}
/// Sets the size of the points drawn for the objects contained by this node and its children.
#[inline]
pub fn set_points_size(&mut self, size: f32) {
self.apply_to_objects_mut(&mut |o| o.set_points_size(size))
}
/// Activates or deactivates the rendering of the surfaces of the objects contained by this node and its
/// children.
#[inline]
pub fn set_surface_rendering_activation(&mut self, active: bool) {
self.apply_to_objects_mut(&mut |o| o.set_surface_rendering_activation(active))
}
/// Activates or deactivates backface culling for the objects contained by this node and its
/// children.
#[inline]
pub fn enable_backface_culling(&mut self, active: bool) {
self.apply_to_objects_mut(&mut |o| o.enable_backface_culling(active))
}
/// Mutably accesses the vertices of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn modify_vertices<F: FnMut(&mut Vec<Pnt3<f32>>) -> ()>(&mut self, f: &mut F) {
self.apply_to_objects_mut(&mut |o| o.modify_vertices(f))
}
/// Accesses the vertices of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn read_vertices<F: FnMut(&[Pnt3<f32>]) -> ()>(&self, f: &mut F) {
self.apply_to_objects(&mut |o| o.read_vertices(f))
}
/// Recomputes the normals of the meshes of the objects contained by this node and its
/// children.
#[inline]
pub fn recompute_normals(&mut self) {
self.apply_to_objects_mut(&mut |o| o.recompute_normals())
}
/// Mutably accesses the normals of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn modify_normals<F: FnMut(&mut Vec<Vec3<f32>>) -> ()>(&mut self, f: &mut F) {
self.apply_to_objects_mut(&mut |o| o.modify_normals(f))
}
/// Accesses the normals of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn read_normals<F: FnMut(&[Vec3<f32>]) -> ()>(&self, f: &mut F) {
self.apply_to_objects(&mut |o| o.read_normals(f))
}
/// Mutably accesses the faces of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn modify_faces<F: FnMut(&mut Vec<Pnt3<u32>>) -> ()>(&mut self, f: &mut F) {
self.apply_to_objects_mut(&mut |o| o.modify_faces(f))
}
/// Accesses the faces of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn read_faces<F: FnMut(&[Pnt3<u32>]) -> ()>(&self, f: &mut F) {
self.apply_to_objects(&mut |o| o.read_faces(f))
}
/// Mutably accesses the texture coordinates of the objects contained by this node and its
/// children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn modify_uvs<F: FnMut(&mut Vec<Pnt2<f32>>) -> ()>(&mut self, f: &mut F) {
self.apply_to_objects_mut(&mut |o| o.modify_uvs(f))
}
/// Accesses the texture coordinates of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn read_uvs<F: FnMut(&[Pnt2<f32>]) -> ()>(&self, f: &mut F) {
self.apply_to_objects(&mut |o| o.read_uvs(f))
}
/// Sets the color of the objects contained by this node and its children.
///
/// Colors components must be on the range `[0.0, 1.0]`.
#[inline]
pub fn set_color(&mut self, r: f32, g: f32, b: f32) {
self.apply_to_objects_mut(&mut |o| o.set_color(r, g, b))
}
/// Sets the texture of the objects contained by this node and its children.
///
/// The texture is loaded from a file and registered by the global `TextureManager`.
///
/// # Arguments
/// * `path` - relative path of the texture on the disk
#[inline]
pub fn set_texture_from_file(&mut self, path: &Path, name: &str) {
let texture = TextureManager::get_global_manager(|tm| tm.add(path, name));
self.set_texture(texture)
}
/// Sets the texture of the objects contained by this node and its children.
///
/// The texture must already have been registered as `name`.
#[inline]
pub fn set_texture_with_name(&mut self, name: &str) {
let texture = TextureManager::get_global_manager(|tm| tm.get(name).unwrap_or_else(
|| panic!("Invalid attempt to use the unregistered texture: {}", name)));
self.set_texture(texture)
}
/// Sets the texture of the objects contained by this node and its children.
pub fn set_texture(&mut self, texture: Rc<Texture>) {
self.apply_to_objects_mut(&mut |o| o.set_texture(texture.clone()))
}
/// Applies a closure to each object contained by this node and its children.
#[inline]
pub fn apply_to_objects_mut<F: FnMut(&mut Object) -> ()>(&mut self, f: &mut F) {
match self.object {
Some(ref mut o) => f(o),
None => { }
}
for c in self.children.iter_mut() {
c.data_mut().apply_to_objects_mut(f)
}
}
/// Applies a closure to each object contained by this node and its children.
#[inline]
pub fn apply_to_objects<F: FnMut(&Object) -> ()>(&self, f: &mut F) {
match self.object {
Some(ref o) => f(o),
None => { }
}
for c in self.children.iter() {
c.data().apply_to_objects(f)
}
}
// FIXME: add folding?
/// Sets the local scaling factors of the object.
#[inline]
pub fn set_local_scale(&mut self, sx: f32, sy: f32, sz: f32) {
self.invalidate();
self.local_scale = Vec3::new(sx, sy, sz)
}
/// Returns the scaling factors of the object.
#[inline]
pub fn local_scale(&self) -> Vec3<f32> {
self.local_scale
}
/// Move and orient the object such that it is placed at the point `eye` and have its `x` axis
/// oriented toward `at`.
#[inline]
pub fn look_at(&mut self, eye: &Pnt3<f32>, at: &Pnt3<f32>, up: &Vec3<f32>) {
self.invalidate();
// FIXME: multiply by the parent's world transform?
self.local_transform = Iso3::look_at(eye, at, up)
}
/// Move and orient the object such that it is placed at the point `eye` and have its `z` axis
/// oriented toward `at`.
#[inline]
pub fn look_at_z(&mut self, eye: &Pnt3<f32>, at: &Pnt3<f32>, up: &Vec3<f32>) {
self.invalidate();
// FIXME: multiply by the parent's world transform?
self.local_transform = Iso3::look_at_z(eye, at, up)
}
/// This node local transformation.
#[inline]
pub fn local_transformation(&self) -> Iso3<f32> {
self.local_transform.clone()
}
/// Inverse of this node local transformation.
#[inline]
pub fn inv_local_transformation(&self) -> Iso3<f32> {
self.local_transform.inv_transformation()
}
/// This node world transformation.
///
/// This will force an update of the world transformation of its parents if they have been
/// invalidated.
#[inline]
#[allow(mutable_transmutes)]
pub fn world_transformation(&self) -> Iso3<f32> {
// NOTE: this is to have some kind of laziness without a `&mut self`.
unsafe {
let mself: &mut SceneNodeData = mem::transmute(self);
mself.update();
}
self.world_transform.clone()
}
/// The inverse of this node world transformation.
///
/// This will force an update of the world transformation of its parents if they have been
/// invalidated.
#[inline]
#[allow(mutable_transmutes)]
pub fn inv_world_transformation(&self) -> Iso3<f32> {
// NOTE: this is to have some kind of laziness without a `&mut self`.
unsafe {
let mself: &mut SceneNodeData = mem::transmute(self);
mself.update();
}
self.local_transform.inv_transformation()
}
/// Appends a transformation to this node local transformation.
#[inline]
pub fn append_transformation(&mut self, t: &Iso3<f32>) {
self.invalidate();
self.local_transform.append_transformation_mut(t)
}
/// Prepends a transformation to this node local transformation.
#[inline]
pub fn prepend_to_local_transformation(&mut self, t: &Iso3<f32>) {
self.invalidate();
self.local_transform.prepend_transformation_mut(t)
}
/// Set this node local transformation.
#[inline]
pub fn set_local_transformation(&mut self, t: Iso3<f32>) {
self.invalidate();
self.local_transform.set_transformation(t)
}
/// This node local translation.
#[inline]
pub fn local_translation(&self) -> Vec3<f32> {
self.local_transform.translation()
}
/// The inverse of this node local translation.
#[inline]
pub fn inv_local_translation(&self) -> Vec3<f32> {
self.local_transform.inv_translation()
}
/// Appends a translation to this node local transformation.
#[inline]
pub fn append_translation(&mut self, t: &Vec3<f32>) {
self.invalidate();
self.local_transform.append_translation_mut(t)
}
/// Prepends a translation to this node local transformation.
#[inline]
pub fn prepend_to_local_translation(&mut self, t: &Vec3<f32>) {
self.invalidate();
self.local_transform.prepend_translation_mut(t)
}
/// Sets the local translation of this node.
#[inline]
pub fn set_local_translation(&mut self, t: Vec3<f32>) {
self.invalidate();
self.local_transform.set_translation(t)
}
/// This node local rotation.
#[inline]
pub fn local_rotation(&self) -> Vec3<f32> {
self.local_transform.rotation()
}
/// The inverse of this node local rotation.
#[inline]
pub fn inv_local_rotation(&self) -> Vec3<f32> {
self.local_transform.inv_rotation()
}
/// Appends a rotation to this node local transformation.
#[inline]
pub fn append_rotation(&mut self, r: &Vec3<f32>) {
self.invalidate();
self.local_transform.append_rotation_mut(r)
}
/// Appends a rotation to this node local transformation.
#[inline]
pub fn append_rotation_wrt_center(&mut self, r: &Vec3<f32>) {
self.invalidate();
self.local_transform.append_rotation_wrt_center_mut(r)
}
/// Prepends a rotation to this node local transformation.
#[inline]
pub fn prepend_to_local_rotation(&mut self, r: &Vec3<f32>) {
self.invalidate();
self.local_transform.prepend_rotation_mut(r)
}
/// Sets the local rotation of this node.
#[inline]
pub fn set_local_rotation(&mut self, r: Vec3<f32>) {
self.invalidate();
self.local_transform.set_rotation(r)
}
fn invalidate(&mut self) {
self.up_to_date = false;
for c in self.children.iter_mut() {
let mut dm = c.data_mut();
if dm.up_to_date {
dm.invalidate()
}
}
}
// FIXME: make this public?
fn update(&mut self) {
// NOTE: makin this test
if !self.up_to_date {
match self.parent {
Some(ref mut p) => {
unsafe {
let mut dp = (**p).borrow_mut();
dp.update();
self.world_transform = self.local_transform * dp.world_transform;
self.world_scale = self.local_scale * dp.local_scale;
self.up_to_date = true;
return;
}
},
None => { }
}
// no parent
self.world_transform = self.local_transform;
self.world_scale = self.local_scale;
self.up_to_date = true;
}
}
}
impl SceneNode {
/// Creates a new scene node that is not rooted.
pub fn new(local_scale: Vec3<f32>,
local_transform: Iso3<f32>,
object: Option<Object>)
-> SceneNode {
let data = SceneNodeData {
local_scale: local_scale,
local_transform: local_transform,
world_transform: local_transform,
world_scale: local_scale,
visible: true,
up_to_date: false,
children: Vec::new(),
object: object,
parent: None
};
SceneNode {
data: Rc::new(RefCell::new(data)),
}
}
/// Creates a new empty, not rooted, node with identity transformations.
pub fn new_empty() -> SceneNode {
SceneNode::new(na::one(), na::one(), None)
}
/// Removes this node from its parent.
pub fn unlink(&mut self) {
let self_self = self.clone();
self.data_mut().remove_from_parent(&self_self)
}
/// The data of this scene node.
pub fn data<'a>(&'a self) -> Ref<'a, SceneNodeData> {
self.data.borrow()
}
/// The data of this scene node.
pub fn data_mut<'a>(&'a mut self) -> RefMut<'a, SceneNodeData> {
self.data.borrow_mut()
}
/*
*
* Methods to add objects.
*
*/
/// Adds a node without object to this node children.
pub fn add_group(&mut self) -> SceneNode {
let node = SceneNode::new_empty();
self.add_child(node.clone());
node
}
/// Adds a node as a child of `parent`.
///
/// # Failures:
/// Fails if `node` already has a parent.
pub fn add_child(&mut self, node: SceneNode) {
assert!(node.data().is_root(), "The added node must not have a parent yet.");
let mut node = node;
node.data_mut().set_parent(&*self.data);
self.data_mut().children.push(node)
}
/// Adds a node containing an object to this node children.
pub fn add_object(&mut self, local_scale: Vec3<f32>, local_transform: Iso3<f32>, object: Object) -> SceneNode {
let node = SceneNode::new(local_scale, local_transform, Some(object));
self.add_child(node.clone());
node
}
/// Adds a cube as a children of this node. The cube is initially axis-aligned and centered
/// at (0, 0, 0).
///
/// # Arguments
/// * `wx` - the cube extent along the z axis
/// * `wy` - the cube extent along the y axis
/// * `wz` - the cube extent along the z axis
pub fn add_cube(&mut self, wx: f32, wy: f32, wz: f32) -> SceneNode {
let res = self.add_geom_with_name("cube", Vec3::new(wx, wy, wz));
res.expect("Unable to load the default cube geometry.")
}
/// Adds a sphere as a children of this node. The sphere is initially centered at (0, 0, 0).
///
/// # Arguments
/// * `r` - the sphere radius
pub fn add_sphere(&mut self, r: f32) -> SceneNode {
let res = self.add_geom_with_name("sphere", Vec3::new(r * 2.0, r * 2.0, r * 2.0));
res.expect("Unable to load the default sphere geometry.")
}
/// Adds a cone to the scene. The cone is initially centered at (0, 0, 0) and points toward the
/// positive `y` axis.
///
/// # Arguments
/// * `h` - the cone height
/// * `r` - the cone base radius
pub fn add_cone(&mut self, r: f32, h: f32) -> SceneNode {
let res = self.add_geom_with_name("cone", Vec3::new(r * 2.0, h, r * 2.0));
res.expect("Unable to load the default cone geometry.")
}
/// Adds a cylinder to this node children. The cylinder is initially centered at (0, 0, 0)
/// and has its principal axis aligned with the `y` axis.
///
/// # Arguments
/// * `h` - the cylinder height
/// * `r` - the cylinder base radius
pub fn add_cylinder(&mut self, r: f32, h: f32) -> SceneNode {
let res = self.add_geom_with_name("cylinder", Vec3::new(r * 2.0, h, r * 2.0));
res.expect("Unable to load the default cylinder geometry.")
}
/// Adds a capsule to this node children. The capsule is initially centered at (0, 0, 0) and
/// has its principal axis aligned with the `y` axis.
///
/// # Arguments
/// * `h` - the capsule height
/// * `r` - the capsule caps radius
pub fn add_capsule(&mut self, r: f32, h: f32) -> SceneNode {
self.add_trimesh(procedural::capsule(&(r * 2.0), &h, 50, 50), na::one())
}
/// Adds a double-sided quad to this node children. The quad is initially centered at (0, 0,
/// 0). The quad itself is composed of a user-defined number of triangles regularly spaced on a
/// grid. This is the main way to draw height maps.
///
/// # Arguments
/// * `w` - the quad width.
/// * `h` - the quad height.
/// * `wsubdivs` - number of horizontal subdivisions. This correspond to the number of squares
/// which will be placed horizontally on each line. Must not be `0`.
/// * `hsubdivs` - number of vertical subdivisions. This correspond to the number of squares
/// which will be placed vertically on each line. Must not be `0`.
/// update.
pub fn add_quad(&mut self, w: f32, h: f32, usubdivs: usize, vsubdivs: usize) -> SceneNode {
let mut node = self.add_trimesh(procedural::quad(w, h, usubdivs, vsubdivs), na::one());
node.enable_backface_culling(false);
node
}
/// Adds a double-sided quad with the specified vertices.
pub fn add_quad_with_vertices(&mut self, vertices: &[Pnt3<f32>], nhpoints: usize, nvpoints: usize) -> SceneNode {
let geom = procedural::quad_with_vertices(vertices, nhpoints, nvpoints);
let mut node = self.add_trimesh(geom, na::one());
node.enable_backface_culling(false);
node
}
/// Creates and adds a new object using the geometry registered as `geometry_name`.
pub fn add_geom_with_name(&mut self, geometry_name: &str, scale: Vec3<f32>) -> Option<SceneNode> {
MeshManager::get_global_manager(|mm| mm.get(geometry_name)).map(|g| self.add_mesh(g, scale))
}
/// Creates and adds a new object to this node children using a mesh.
pub fn add_mesh(&mut self, mesh: Rc<RefCell<Mesh>>, scale: Vec3<f32>) -> SceneNode {
let tex = TextureManager::get_global_manager(|tm| tm.get_default());
let mat = MaterialManager::get_global_manager(|mm| mm.get_default());
let object = Object::new(mesh, 1.0, 1.0, 1.0, tex, mat);
self.add_object(scale, na::one(), object)
}
/// Creates and adds a new object using a mesh descriptor.
pub fn add_trimesh(&mut self, descr: TriMesh3<f32>, scale: Vec3<f32>) -> SceneNode {
self.add_mesh(Rc::new(RefCell::new(Mesh::from_trimesh(descr, false))), scale)
}
/// Creates and adds multiple nodes created from an obj file.
///
/// This will create a new node serving as a root of the scene described by the obj file. This
/// newly created node is added to this node's children.
pub fn add_obj(&mut self, path: &Path, mtl_dir: &Path, scale: Vec3<f32>) -> SceneNode {
let tex = TextureManager::get_global_manager(|tm| tm.get_default());
let mat = MaterialManager::get_global_manager(|mm| mm.get_default());
// FIXME: is there some error-handling stuff to do here instead of the `let _`.
let result = MeshManager::load_obj(path, mtl_dir, path.to_str().unwrap()).map(|objs| {
let mut root;
let self_root = objs.len() == 1;
let child_scale;
if self_root {
root = self.clone();
child_scale = scale;
}
else {
root = SceneNode::new(scale, na::one(), None);
self.add_child(root.clone());
child_scale = na::one();
}
for (_, mesh, mtl) in objs.into_iter() {
let mut object = Object::new(mesh, 1.0, 1.0, 1.0, tex.clone(), mat.clone());
match mtl {
None => { },
Some(mtl) => {
object.set_color(mtl.diffuse.x, mtl.diffuse.y, mtl.diffuse.z);
for t in mtl.diffuse_texture.iter() {
let mut tpath = PathBuf::new();
tpath.push(mtl_dir);
tpath.push(&t[..]);
object.set_texture_from_file(&tpath, tpath.to_str().unwrap())
}
for t in mtl.ambiant_texture.iter() {
let mut tpath = PathBuf::new();
tpath.push(mtl_dir);
tpath.push(&t[..]);
object.set_texture_from_file(&tpath, tpath.to_str().unwrap())
}
}
}
let _ = root.add_object(child_scale, na::one(), object);
}
if self_root {
root.data().children.last().expect("There was nothing on this obj file.").clone()
}
else {
root
}
});
result.unwrap()
}
/// Applies a closure to each object contained by this node and its children.
#[inline]
pub fn apply_to_scene_nodes_mut<F: FnMut(&mut SceneNode) -> ()>(&mut self, f: &mut F) {
f(self);
for c in self.data_mut().children.iter_mut() {
c.apply_to_scene_nodes_mut(f)
}
}
/// Applies a closure to each object contained by this node and its children.
#[inline]
pub fn apply_to_scene_nodes<F: FnMut(&SceneNode) -> ()>(&self, f: &mut F) {
f(self);
for c in self.data().children.iter() {
c.apply_to_scene_nodes(f)
}
}
//
//
// fwd
//
//
/// Render the scene graph rooted by this node.
pub fn render(&mut self, pass: usize, camera: &mut Camera, light: &Light) {
self.data_mut().render(pass, camera, light)
}
/// Sets the material of the objects contained by this node and its children.
#[inline]
pub fn set_material(&mut self, material: Rc<RefCell<Box<Material + 'static>>>) {
self.data_mut().set_material(material)
}
/// Sets the material of the objects contained by this node and its children.
#[inline]
pub fn set_material_with_name(&mut self, name: &str) {
self.data_mut().set_material_with_name(name)
}
/// Sets the width of the lines drawn for the objects contained by this node and its children.
#[inline]
pub fn set_lines_width(&mut self, width: f32) {
self.data_mut().set_lines_width(width)
}
/// Sets the size of the points drawn for the objects contained by this node and its children.
#[inline]
pub fn set_points_size(&mut self, size: f32) {
self.data_mut().set_points_size(size)
}
/// Activates or deactivates the rendering of the surfaces of the objects contained by this node and its
/// children.
#[inline]
pub fn set_surface_rendering_activation(&mut self, active: bool) {
self.data_mut().set_surface_rendering_activation(active)
}
/// Activates or deactivates backface culling for the objects contained by this node and its
/// children.
#[inline]
pub fn enable_backface_culling(&mut self, active: bool) {
self.data_mut().enable_backface_culling(active)
}
/// Mutably accesses the vertices of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn modify_vertices<F: FnMut(&mut Vec<Pnt3<f32>>) -> ()>(&mut self, f: &mut F) {
self.data_mut().modify_vertices(f)
}
/// Accesses the vertices of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn read_vertices<F: FnMut(&[Pnt3<f32>]) -> ()>(&self, f: &mut F) {
self.data().read_vertices(f)
}
/// Recomputes the normals of the meshes of the objects contained by this node and its
/// children.
#[inline]
pub fn recompute_normals(&mut self) {
self.data_mut().recompute_normals()
}
/// Mutably accesses the normals of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn modify_normals<F: FnMut(&mut Vec<Vec3<f32>>) -> ()>(&mut self, f: &mut F) {
self.data_mut().modify_normals(f)
}
/// Accesses the normals of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn read_normals<F: FnMut(&[Vec3<f32>]) -> ()>(&self, f: &mut F) {
self.data().read_normals(f)
}
/// Mutably accesses the faces of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn modify_faces<F: FnMut(&mut Vec<Pnt3<u32>>) -> ()>(&mut self, f: &mut F) {
self.data_mut().modify_faces(f)
}
/// Accesses the faces of the objects contained by this node and its children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn read_faces<F: FnMut(&[Pnt3<u32>]) -> ()>(&self, f: &mut F) {
self.data().read_faces(f)
}
/// Mutably accesses the texture coordinates of the objects contained by this node and its
/// children.
///
/// The provided closure is called once per object.
#[inline(always)]
pub fn modify_uvs<F: FnMut(&mut Vec<Pnt2<f32>>) -> ()>(&mut self, f: &mut F) {
self.data_mut().modify_uvs(f)
}
/// Accesses the texture coordinates of the objects contained by this node and its children.<|fim▁hole|> #[inline(always)]
pub fn read_uvs<F: FnMut(&[Pnt2<f32>]) -> ()>(&self, f: &mut F) {
self.data().read_uvs(f)
}
/// Sets the color of the objects contained by this node and its children.
///
/// Colors components must be on the range `[0.0, 1.0]`.
#[inline]
pub fn set_color(&mut self, r: f32, g: f32, b: f32) {
self.data_mut().set_color(r, g, b)
}
/// Sets the texture of the objects contained by this node and its children.
///
/// The texture is loaded from a file and registered by the global `TextureManager`.
///
/// # Arguments
/// * `path` - relative path of the texture on the disk
#[inline]
pub fn set_texture_from_file(&mut self, path: &Path, name: &str) {
self.data_mut().set_texture_from_file(path, name)
}
/// Sets the texture of the objects contained by this node and its children.
///
/// The texture must already have been registered as `name`.
#[inline]
pub fn set_texture_with_name(&mut self, name: &str) {
self.data_mut().set_texture_with_name(name)
}
/// Sets the texture of the objects contained by this node and its children.
pub fn set_texture(&mut self, texture: Rc<Texture>) {
self.data_mut().set_texture(texture)
}
/// Sets the local scaling factors of the object.
#[inline]
pub fn set_local_scale(&mut self, sx: f32, sy: f32, sz: f32) {
self.data_mut().set_local_scale(sx, sy, sz)
}
/// Move and orient the object such that it is placed at the point `eye` and have its `x` axis
/// oriented toward `at`.
#[inline]
pub fn look_at(&mut self, eye: &Pnt3<f32>, at: &Pnt3<f32>, up: &Vec3<f32>) {
self.data_mut().look_at(eye, at, up)
}
/// Move and orient the object such that it is placed at the point `eye` and have its `z` axis
/// oriented toward `at`.
#[inline]
pub fn look_at_z(&mut self, eye: &Pnt3<f32>, at: &Pnt3<f32>, up: &Vec3<f32>) {
self.data_mut().look_at_z(eye, at, up)
}
/// Appends a transformation to this node local transformation.
#[inline]
pub fn append_transformation(&mut self, t: &Iso3<f32>) {
self.data_mut().append_transformation(t)
}
/// Prepends a transformation to this node local transformation.
#[inline]
pub fn prepend_to_local_transformation(&mut self, t: &Iso3<f32>) {
self.data_mut().prepend_to_local_transformation(t)
}
/// Set this node local transformation.
#[inline]
pub fn set_local_transformation(&mut self, t: Iso3<f32>) {
self.data_mut().set_local_transformation(t)
}
/// Appends a translation to this node local transformation.
#[inline]
pub fn append_translation(&mut self, t: &Vec3<f32>) {
self.data_mut().append_translation(t)
}
/// Prepends a translation to this node local transformation.
#[inline]
pub fn prepend_to_local_translation(&mut self, t: &Vec3<f32>) {
self.data_mut().prepend_to_local_translation(t)
}
/// Sets the local translation of this node.
#[inline]
pub fn set_local_translation(&mut self, t: Vec3<f32>) {
self.data_mut().set_local_translation(t)
}
/// Appends a rotation to this node local transformation.
#[inline]
pub fn append_rotation(&mut self, r: &Vec3<f32>) {
self.data_mut().append_rotation(r)
}
/// Appends a rotation to this node local transformation.
#[inline]
pub fn append_rotation_wrt_center(&mut self, r: &Vec3<f32>) {
(*self.data_mut()).append_rotation_wrt_center(r)
}
/// Prepends a rotation to this node local transformation.
#[inline]
pub fn prepend_to_local_rotation(&mut self, r: &Vec3<f32>) {
self.data_mut().prepend_to_local_rotation(r)
}
/// Sets the local rotation of this node.
#[inline]
pub fn set_local_rotation(&mut self, r: Vec3<f32>) {
self.data_mut().set_local_rotation(r)
}
}<|fim▁end|>
|
///
/// The provided closure is called once per object.
|
<|file_name|>users_gpg_keys.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The go-github AUTHORS. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package github
import (
"fmt"
"time"
)
// GPGKey represents a GitHub user's public GPG key used to verify GPG signed commits and tags.
//
// https://developer.github.com/changes/2016-04-04-git-signing-api-preview/
type GPGKey struct {
ID *int `json:"id,omitempty"`
PrimaryKeyID *int `json:"primary_key_id,omitempty"`
KeyID *string `json:"key_id,omitempty"`
PublicKey *string `json:"public_key,omitempty"`
Emails []GPGEmail `json:"emails,omitempty"`
Subkeys []GPGKey `json:"subkeys,omitempty"`
CanSign *bool `json:"can_sign,omitempty"`
CanEncryptComms *bool `json:"can_encrypt_comms,omitempty"`
CanEncryptStorage *bool `json:"can_encrypt_storage,omitempty"`
CanCertify *bool `json:"can_certify,omitempty"`
CreatedAt *time.Time `json:"created_at,omitempty"`
ExpiresAt *time.Time `json:"expires_at,omitempty"`
}
// String stringifies a GPGKey.
func (k GPGKey) String() string {
return Stringify(k)
}
// GPGEmail represents an email address associated to a GPG key.
type GPGEmail struct {
Email *string `json:"email,omitempty"`
Verified *bool `json:"verified,omitempty"`
}
// ListGPGKeys lists the current user's GPG keys. It requires authentication
// via Basic Auth or via OAuth with at least read:gpg_key scope.
//
// GitHub API docs: https://developer.github.com/v3/users/gpg_keys/#list-your-gpg-keys
func (s *UsersService) ListGPGKeys() ([]GPGKey, *Response, error) {
req, err := s.client.NewRequest("GET", "user/gpg_keys", nil)
if err != nil {
return nil, nil, err
}
// TODO: remove custom Accept header when this API fully launches.
req.Header.Set("Accept", mediaTypeGitSigningPreview)
var keys []GPGKey
resp, err := s.client.Do(req, &keys)
if err != nil {
return nil, resp, err
}
return keys, resp, err
}
// GetGPGKey gets extended details for a single GPG key. It requires authentication
// via Basic Auth or via OAuth with at least read:gpg_key scope.
//
// GitHub API docs: https://developer.github.com/v3/users/gpg_keys/#get-a-single-gpg-key
func (s *UsersService) GetGPGKey(id int) (*GPGKey, *Response, error) {
u := fmt.Sprintf("user/gpg_keys/%v", id)
req, err := s.client.NewRequest("GET", u, nil)
if err != nil {
return nil, nil, err<|fim▁hole|>
// TODO: remove custom Accept header when this API fully launches.
req.Header.Set("Accept", mediaTypeGitSigningPreview)
key := &GPGKey{}
resp, err := s.client.Do(req, key)
if err != nil {
return nil, resp, err
}
return key, resp, err
}
// CreateGPGKey creates a GPG key. It requires authenticatation via Basic Auth
// or OAuth with at least write:gpg_key scope.
//
// GitHub API docs: https://developer.github.com/v3/users/gpg_keys/#create-a-gpg-key
func (s *UsersService) CreateGPGKey(armoredPublicKey string) (*GPGKey, *Response, error) {
gpgKey := &struct {
ArmoredPublicKey *string `json:"armored_public_key,omitempty"`
}{
ArmoredPublicKey: String(armoredPublicKey),
}
req, err := s.client.NewRequest("POST", "user/gpg_keys", gpgKey)
if err != nil {
return nil, nil, err
}
// TODO: remove custom Accept header when this API fully launches.
req.Header.Set("Accept", mediaTypeGitSigningPreview)
key := &GPGKey{}
resp, err := s.client.Do(req, key)
if err != nil {
return nil, resp, err
}
return key, resp, err
}
// DeleteGPGKey deletes a GPG key. It requires authentication via Basic Auth or
// via OAuth with at least admin:gpg_key scope.
//
// GitHub API docs: https://developer.github.com/v3/users/gpg_keys/#delete-a-gpg-key
func (s *UsersService) DeleteGPGKey(id int) (*Response, error) {
u := fmt.Sprintf("user/gpg_keys/%v", id)
req, err := s.client.NewRequest("DELETE", u, nil)
if err != nil {
return nil, err
}
// TODO: remove custom Accept header when this API fully launches.
req.Header.Set("Accept", mediaTypeGitSigningPreview)
return s.client.Do(req, nil)
}<|fim▁end|>
|
}
|
<|file_name|>csi_attacher.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package csi
import (
"crypto/sha256"
"errors"
"fmt"
"strings"
"time"
"github.com/golang/glog"
"k8s.io/api/core/v1"
storage "k8s.io/api/storage/v1alpha1"
apierrs "k8s.io/apimachinery/pkg/api/errors"
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/client-go/kubernetes"<|fim▁hole|>type csiAttacher struct {
plugin *csiPlugin
k8s kubernetes.Interface
waitSleepTime time.Duration
}
// volume.Attacher methods
var _ volume.Attacher = &csiAttacher{}
func (c *csiAttacher) Attach(spec *volume.Spec, nodeName types.NodeName) (string, error) {
if spec == nil {
glog.Error(log("attacher.Attach missing volume.Spec"))
return "", errors.New("missing spec")
}
csiSource, err := getCSISourceFromSpec(spec)
if err != nil {
glog.Error(log("attacher.Attach failed to get CSI persistent source: %v", err))
return "", err
}
node := string(nodeName)
pvName := spec.PersistentVolume.GetName()
attachID := getAttachmentName(csiSource.VolumeHandle, csiSource.Driver, node)
attachment := &storage.VolumeAttachment{
ObjectMeta: meta.ObjectMeta{
Name: attachID,
},
Spec: storage.VolumeAttachmentSpec{
NodeName: node,
Attacher: csiSource.Driver,
Source: storage.VolumeAttachmentSource{
PersistentVolumeName: &pvName,
},
},
Status: storage.VolumeAttachmentStatus{Attached: false},
}
_, err = c.k8s.StorageV1alpha1().VolumeAttachments().Create(attachment)
alreadyExist := false
if err != nil {
if !apierrs.IsAlreadyExists(err) {
glog.Error(log("attacher.Attach failed: %v", err))
return "", err
}
alreadyExist = true
}
if alreadyExist {
glog.V(4).Info(log("attachment [%v] for volume [%v] already exists (will not be recreated)", attachID, csiSource.VolumeHandle))
} else {
glog.V(4).Info(log("attachment [%v] for volume [%v] created successfully", attachID, csiSource.VolumeHandle))
}
// probe for attachment update here
// NOTE: any error from waiting for attachment is logged only. This is because
// the primariy intent of the enclosing method is to create VolumeAttachment.
// DONOT return that error here as it is mitigated in attacher.WaitForAttach.
volAttachmentOK := true
if _, err := c.waitForVolumeAttachment(csiSource.VolumeHandle, attachID, csiTimeout); err != nil {
volAttachmentOK = false
glog.Error(log("attacher.Attach attempted to wait for attachment to be ready, but failed with: %v", err))
}
glog.V(4).Info(log("attacher.Attach finished OK with VolumeAttachment verified=%t: attachment object [%s]", volAttachmentOK, attachID))
return attachID, nil
}
func (c *csiAttacher) WaitForAttach(spec *volume.Spec, attachID string, pod *v1.Pod, timeout time.Duration) (string, error) {
source, err := getCSISourceFromSpec(spec)
if err != nil {
glog.Error(log("attacher.WaitForAttach failed to extract CSI volume source: %v", err))
return "", err
}
return c.waitForVolumeAttachment(source.VolumeHandle, attachID, timeout)
}
func (c *csiAttacher) waitForVolumeAttachment(volumeHandle, attachID string, timeout time.Duration) (string, error) {
glog.V(4).Info(log("probing for updates from CSI driver for [attachment.ID=%v]", attachID))
ticker := time.NewTicker(c.waitSleepTime)
defer ticker.Stop()
timer := time.NewTimer(timeout) // TODO (vladimirvivien) investigate making this configurable
defer timer.Stop()
//TODO (vladimirvivien) instead of polling api-server, change to a api-server watch
for {
select {
case <-ticker.C:
glog.V(4).Info(log("probing VolumeAttachment [id=%v]", attachID))
attach, err := c.k8s.StorageV1alpha1().VolumeAttachments().Get(attachID, meta.GetOptions{})
if err != nil {
glog.Error(log("attacher.WaitForAttach failed (will continue to try): %v", err))
continue
}
// if being deleted, fail fast
if attach.GetDeletionTimestamp() != nil {
glog.Error(log("VolumeAttachment [%s] has deletion timestamp, will not continue to wait for attachment", attachID))
return "", errors.New("volume attachment is being deleted")
}
// attachment OK
if attach.Status.Attached {
return attachID, nil
}
// driver reports attach error
attachErr := attach.Status.AttachError
if attachErr != nil {
glog.Error(log("attachment for %v failed: %v", volumeHandle, attachErr.Message))
return "", errors.New(attachErr.Message)
}
case <-timer.C:
glog.Error(log("attacher.WaitForAttach timeout after %v [volume=%v; attachment.ID=%v]", timeout, volumeHandle, attachID))
return "", fmt.Errorf("attachment timeout for volume %v", volumeHandle)
}
}
}
func (c *csiAttacher) VolumesAreAttached(specs []*volume.Spec, nodeName types.NodeName) (map[*volume.Spec]bool, error) {
glog.V(4).Info(log("probing attachment status for %d volume(s) ", len(specs)))
attached := make(map[*volume.Spec]bool)
for _, spec := range specs {
if spec == nil {
glog.Error(log("attacher.VolumesAreAttached missing volume.Spec"))
return nil, errors.New("missing spec")
}
source, err := getCSISourceFromSpec(spec)
if err != nil {
glog.Error(log("attacher.VolumesAreAttached failed: %v", err))
continue
}
attachID := getAttachmentName(source.VolumeHandle, source.Driver, string(nodeName))
glog.V(4).Info(log("probing attachment status for VolumeAttachment %v", attachID))
attach, err := c.k8s.StorageV1alpha1().VolumeAttachments().Get(attachID, meta.GetOptions{})
if err != nil {
glog.Error(log("attacher.VolumesAreAttached failed for attach.ID=%v: %v", attachID, err))
continue
}
glog.V(4).Info(log("attacher.VolumesAreAttached attachment [%v] has status.attached=%t", attachID, attach.Status.Attached))
attached[spec] = attach.Status.Attached
}
return attached, nil
}
func (c *csiAttacher) GetDeviceMountPath(spec *volume.Spec) (string, error) {
glog.V(4).Info(log("attacher.GetDeviceMountPath is not implemented"))
return "", nil
}
func (c *csiAttacher) MountDevice(spec *volume.Spec, devicePath string, deviceMountPath string) error {
glog.V(4).Info(log("attacher.MountDevice is not implemented"))
return nil
}
var _ volume.Detacher = &csiAttacher{}
func (c *csiAttacher) Detach(volumeName string, nodeName types.NodeName) error {
// volumeName in format driverName<SEP>volumeHandle generated by plugin.GetVolumeName()
if volumeName == "" {
glog.Error(log("detacher.Detach missing value for parameter volumeName"))
return errors.New("missing exepected parameter volumeName")
}
parts := strings.Split(volumeName, volNameSep)
if len(parts) != 2 {
glog.Error(log("detacher.Detach insufficient info encoded in volumeName"))
return errors.New("volumeName missing expected data")
}
driverName := parts[0]
volID := parts[1]
attachID := getAttachmentName(volID, driverName, string(nodeName))
if err := c.k8s.StorageV1alpha1().VolumeAttachments().Delete(attachID, nil); err != nil {
glog.Error(log("detacher.Detach failed to delete VolumeAttachment [%s]: %v", attachID, err))
return err
}
glog.V(4).Info(log("detacher deleted ok VolumeAttachment.ID=%s", attachID))
return c.waitForVolumeDetachment(volID, attachID)
}
func (c *csiAttacher) waitForVolumeDetachment(volumeHandle, attachID string) error {
glog.V(4).Info(log("probing for updates from CSI driver for [attachment.ID=%v]", attachID))
ticker := time.NewTicker(c.waitSleepTime)
defer ticker.Stop()
timeout := c.waitSleepTime * 10
timer := time.NewTimer(timeout) // TODO (vladimirvivien) investigate making this configurable
defer timer.Stop()
//TODO (vladimirvivien) instead of polling api-server, change to a api-server watch
for {
select {
case <-ticker.C:
glog.V(4).Info(log("probing VolumeAttachment [id=%v]", attachID))
attach, err := c.k8s.StorageV1alpha1().VolumeAttachments().Get(attachID, meta.GetOptions{})
if err != nil {
if apierrs.IsNotFound(err) {
//object deleted or never existed, done
glog.V(4).Info(log("VolumeAttachment object [%v] for volume [%v] not found, object deleted", attachID, volumeHandle))
return nil
}
glog.Error(log("detacher.WaitForDetach failed for volume [%s] (will continue to try): %v", volumeHandle, err))
continue
}
// driver reports attach error
detachErr := attach.Status.DetachError
if detachErr != nil {
glog.Error(log("detachment for VolumeAttachment [%v] for volume [%s] failed: %v", attachID, volumeHandle, detachErr.Message))
return errors.New(detachErr.Message)
}
case <-timer.C:
glog.Error(log("detacher.WaitForDetach timeout after %v [volume=%v; attachment.ID=%v]", timeout, volumeHandle, attachID))
return fmt.Errorf("detachment timed out for volume %v", volumeHandle)
}
}
}
func (c *csiAttacher) UnmountDevice(deviceMountPath string) error {
glog.V(4).Info(log("detacher.UnmountDevice is not implemented"))
return nil
}
// getAttachmentName returns csi-<sha252(volName,csiDriverName,NodeName>
func getAttachmentName(volName, csiDriverName, nodeName string) string {
result := sha256.Sum256([]byte(fmt.Sprintf("%s%s%s", volName, csiDriverName, nodeName)))
return fmt.Sprintf("csi-%x", result)
}<|fim▁end|>
|
"k8s.io/kubernetes/pkg/volume"
)
|
<|file_name|>rust_base58.rs<|end_file_name|><|fim▁begin|>extern crate rust_base58;
use errors::prelude::*;
use self::rust_base58::{FromBase58, ToBase58};
pub fn encode(doc: &[u8]) -> String {
doc.to_base58()
}
pub fn decode(doc: &str) -> Result<Vec<u8>, IndyError> {
doc.from_base58()<|fim▁hole|> .map_err(|err| err_msg(IndyErrorKind::InvalidStructure, format!("Invalid base58 sequence: {:}", err)))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn encode_works() {
let result = encode(&[1, 2, 3]);
assert_eq!("Ldp", &result, "Got unexpected data");
}
#[test]
fn decode_works() {
let result = decode("Ldp");
assert!(result.is_ok(), "Got error");
assert_eq!(&[1, 2, 3], &result.unwrap()[..], "Get unexpected data");
}
}<|fim▁end|>
| |
<|file_name|>provider.py<|end_file_name|><|fim▁begin|># Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the available providers."""
__author__ = '[email protected] (Jason Stredwick)'
class Provider(object):
"""Define available providers."""<|fim▁hole|> DATASTORE = 'datastore'
ISSUETRACKER = 'issuetracker'<|fim▁end|>
| |
<|file_name|>long_tests_client.rs<|end_file_name|><|fim▁begin|>use grpc::ClientStubExt;
use long_tests::long_tests_pb::*;
use long_tests::long_tests_pb_grpc::*;
use futures::executor;
use std::env;
fn single_num_arg_or(cmd_args: &[String], or: u64) -> u64 {
if cmd_args.len() == 0 {
or
} else if cmd_args.len() == 1 {
cmd_args[0].parse().expect("failed to parse as u64")
} else {
panic!("too many args");
}
}
fn run_echo(client: LongTestsClient, cmd_args: &[String]) {
let count = single_num_arg_or(cmd_args, 1);
println!("running {} iterations of echo", count);
for i in 0..count {
let payload = format!("payload {}", i);
let mut req = EchoRequest::new();
req.set_payload(payload.clone());
let r = executor::block_on(
client
.echo(grpc::RequestOptions::new(), req)
.drop_metadata(),
)
.expect("failed to get echo response");
assert!(payload == r.get_payload());
}
println!("done");
}
fn main() {<|fim▁hole|> panic!("too few args")
}
let client = LongTestsClient::new_plain("localhost", 23432, Default::default()).expect("init");
let cmd = &args[1];
let cmd_args = &args[2..];
if cmd == "echo" {
run_echo(client, cmd_args);
} else {
panic!("unknown command: {}", cmd);
}
}<|fim▁end|>
|
env_logger::init();
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
|
<|file_name|>user_managers_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import rdfvalue
from grr.lib import test_lib
from grr.lib.aff4_objects import user_managers
class GRRUserTest(test_lib.AFF4ObjectTest):
def testUserPasswords(self):
with aff4.FACTORY.Create("aff4:/users/test", "GRRUser",
token=self.token) as user:
user.SetPassword("hello")
user = aff4.FACTORY.Open(user.urn, token=self.token)
self.assertFalse(user.CheckPassword("goodbye"))
self.assertTrue(user.CheckPassword("hello"))
def testLabels(self):
with aff4.FACTORY.Create("aff4:/users/test", "GRRUser",
token=self.token) as user:
user.SetLabels("hello", "world", owner="GRR")
user = aff4.FACTORY.Open(user.urn, token=self.token)
self.assertListEqual(["hello", "world"], user.GetLabelsNames())
class CheckAccessHelperTest(test_lib.AFF4ObjectTest):
def setUp(self):
super(CheckAccessHelperTest, self).setUp()
self.helper = user_managers.CheckAccessHelper("test")
self.subject = rdfvalue.RDFURN("aff4:/some/path")
def testReturnsFalseByDefault(self):
self.assertRaises(access_control.UnauthorizedAccess,
self.helper.CheckAccess, self.subject, self.token)
def testReturnsFalseOnFailedMatch(self):
self.helper.Allow("aff4:/some/otherpath")
self.assertRaises(access_control.UnauthorizedAccess,
self.helper.CheckAccess, self.subject, self.token)
def testReturnsTrueOnMatch(self):
self.helper.Allow("aff4:/some/path")
self.assertTrue(self.helper.CheckAccess(self.subject, self.token))
def testReturnsTrueIfOneMatchFails1(self):
self.helper.Allow("aff4:/some/otherpath")
self.helper.Allow("aff4:/some/path")
self.assertTrue(self.helper.CheckAccess(self.subject, self.token))
def testReturnsTrueIfOneMatchFails2(self):
self.helper.Allow("aff4:/some/path")
self.helper.Allow("aff4:/some/otherpath")
self.assertTrue(self.helper.CheckAccess(self.subject, self.token))
def testFnmatchFormatIsUsedByDefault1(self):
self.helper.Allow("aff4:/some/*")
self.assertTrue(self.helper.CheckAccess(self.subject, self.token))
def testFnmatchFormatIsUsedByDefault2(self):
self.helper.Allow("aff4:/some*")
self.assertTrue(self.helper.CheckAccess(self.subject, self.token))
def testFnmatchPatternCorrectlyMatchesFilesBelowDirectory(self):
self.helper.Allow("aff4:/some/*")
self.assertTrue(self.helper.CheckAccess(self.subject, self.token))
self.assertRaises(access_control.UnauthorizedAccess,
self.helper.CheckAccess,
rdfvalue.RDFURN("aff4:/some"), self.token)
def testCustomCheckWorksCorrectly(self):
def CustomCheck(unused_subject, unused_token):
return True
self.helper.Allow("aff4:/some/path", CustomCheck)
self.assertTrue(self.helper.CheckAccess(self.subject, self.token))
def testCustomCheckFailsCorrectly(self):
def CustomCheck(unused_subject, unused_token):
raise access_control.UnauthorizedAccess("Problem")
self.helper.Allow("aff4:/some/path", CustomCheck)
self.assertRaises(access_control.UnauthorizedAccess,
self.helper.CheckAccess, self.subject, self.token)
def testCustomCheckAcceptsAdditionalArguments(self):
def CustomCheck(subject, unused_token, another_subject):
if subject == another_subject:
return True
else:
raise access_control.UnauthorizedAccess("Problem")
self.helper.Allow("aff4:/*", CustomCheck, self.subject)
self.assertRaises(access_control.UnauthorizedAccess,
self.helper.CheckAccess,
rdfvalue.RDFURN("aff4:/some/other/path"),
self.token)
self.assertTrue(self.helper.CheckAccess(self.subject, self.token))
def Ok(self, subject, access="r"):
self.assertTrue(
self.access_manager.CheckDataStoreAccess(self.token, [subject], access))<|fim▁hole|>
def NotOk(self, subject, access="r"):
self.assertRaises(
access_control.UnauthorizedAccess,
self.access_manager.CheckDataStoreAccess,
self.token, [subject], access)
def testReadSomePaths(self):
"""Tests some real world paths."""
self.access_manager = user_managers.FullAccessControlManager()
access = "r"
self.Ok("aff4:/", access)
self.Ok("aff4:/users", access)
self.NotOk("aff4:/users/randomuser", access)
self.Ok("aff4:/blobs", access)
self.Ok("aff4:/blobs/12345678", access)
self.Ok("aff4:/FP", access)
self.Ok("aff4:/FP/12345678", access)
self.Ok("aff4:/files", access)
self.Ok("aff4:/files/12345678", access)
self.Ok("aff4:/ACL", access)
self.Ok("aff4:/ACL/randomuser", access)
self.Ok("aff4:/stats", access)
self.Ok("aff4:/stats/FileStoreStats", access)
self.Ok("aff4:/config", access)
self.Ok("aff4:/config/drivers", access)
self.Ok("aff4:/config/drivers/windows/memory/winpmem.amd64.sys", access)
self.Ok("aff4:/flows", access)
self.Ok("aff4:/flows/W:12345678", access)
self.Ok("aff4:/hunts", access)
self.Ok("aff4:/hunts/W:12345678/C.1234567890123456", access)
self.Ok("aff4:/hunts/W:12345678/C.1234567890123456/W:AAAAAAAA", access)
self.Ok("aff4:/cron", access)
self.Ok("aff4:/cron/OSBreakDown", access)
self.Ok("aff4:/crashes", access)
self.Ok("aff4:/crashes/Stream", access)
self.Ok("aff4:/audit", access)
self.Ok("aff4:/audit/log", access)
self.Ok("aff4:/C.0000000000000001", access)
self.NotOk("aff4:/C.0000000000000001/fs/os", access)
self.NotOk("aff4:/C.0000000000000001/flows/W:12345678", access)
self.Ok("aff4:/tmp", access)
self.Ok("aff4:/tmp/C8FAFC0F", access)
def testQuerySomePaths(self):
"""Tests some real world paths."""
self.access_manager = user_managers.FullAccessControlManager()
access = "rq"
self.NotOk("aff4:/", access)
self.NotOk("aff4:/users", access)
self.NotOk("aff4:/users/randomuser", access)
self.NotOk("aff4:/blobs", access)
self.NotOk("aff4:/FP", access)
self.NotOk("aff4:/files", access)
self.Ok("aff4:/files/hash/generic/sha256/" + "a" * 64, access)
self.Ok("aff4:/ACL", access)
self.Ok("aff4:/ACL/randomuser", access)
self.NotOk("aff4:/stats", access)
self.Ok("aff4:/config", access)
self.Ok("aff4:/config/drivers", access)
self.Ok("aff4:/config/drivers/windows/memory/winpmem.amd64.sys", access)
self.NotOk("aff4:/flows", access)
self.Ok("aff4:/flows/W:12345678", access)
self.Ok("aff4:/hunts", access)
self.Ok("aff4:/hunts/W:12345678/C.1234567890123456", access)
self.Ok("aff4:/hunts/W:12345678/C.1234567890123456/W:AAAAAAAA", access)
self.Ok("aff4:/cron", access)
self.Ok("aff4:/cron/OSBreakDown", access)
self.NotOk("aff4:/crashes", access)
self.NotOk("aff4:/audit", access)
self.Ok("aff4:/C.0000000000000001", access)
self.NotOk("aff4:/C.0000000000000001/fs/os", access)
self.NotOk("aff4:/C.0000000000000001/flows", access)
self.NotOk("aff4:/tmp", access)<|fim▁end|>
| |
<|file_name|>index_jsp.java<|end_file_name|><|fim▁begin|>/*
* Generated by the Jasper component of Apache Tomcat
* Version: Apache Tomcat/8.0.15
* Generated at: 2017-09-18 08:01:06 UTC
* Note: The last modified time of this file was set to
* the last modified time of the source file after
* generation to assist with modification tracking.
*/
package org.apache.jsp;
import javax.servlet.*;
import javax.servlet.http.*;
import javax.servlet.jsp.*;
public final class index_jsp extends org.apache.jasper.runtime.HttpJspBase
implements org.apache.jasper.runtime.JspSourceDependent {
private static final javax.servlet.jsp.JspFactory _jspxFactory =
javax.servlet.jsp.JspFactory.getDefaultFactory();
private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants;
private javax.el.ExpressionFactory _el_expressionfactory;
private org.apache.tomcat.InstanceManager _jsp_instancemanager;
public java.util.Map<java.lang.String,java.lang.Long> getDependants() {
return _jspx_dependants;
}
public void _jspInit() {
_el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory();
_jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig());
}
public void _jspDestroy() {
}
public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response)
throws java.io.IOException, javax.servlet.ServletException {
final java.lang.String _jspx_method = request.getMethod();
if (!"GET".equals(_jspx_method) && !"POST".equals(_jspx_method) && !"HEAD".equals(_jspx_method) && !javax.servlet.DispatcherType.ERROR.equals(request.getDispatcherType())) {
response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "JSPs only permit GET POST or HEAD");
return;
}
final javax.servlet.jsp.PageContext pageContext;
javax.servlet.http.HttpSession session = null;
final javax.servlet.ServletContext application;
final javax.servlet.ServletConfig config;<|fim▁hole|> javax.servlet.jsp.JspWriter out = null;
final java.lang.Object page = this;
javax.servlet.jsp.JspWriter _jspx_out = null;
javax.servlet.jsp.PageContext _jspx_page_context = null;
try {
response.setContentType("text/html");
pageContext = _jspxFactory.getPageContext(this, request, response,
null, true, 8192, true);
_jspx_page_context = pageContext;
application = pageContext.getServletContext();
config = pageContext.getServletConfig();
session = pageContext.getSession();
out = pageContext.getOut();
_jspx_out = out;
if (true) {
_jspx_page_context.forward("index");
return;
}
out.write('\n');
} catch (java.lang.Throwable t) {
if (!(t instanceof javax.servlet.jsp.SkipPageException)){
out = _jspx_out;
if (out != null && out.getBufferSize() != 0)
try {
if (response.isCommitted()) {
out.flush();
} else {
out.clearBuffer();
}
} catch (java.io.IOException e) {}
if (_jspx_page_context != null) _jspx_page_context.handlePageException(t);
else throw new ServletException(t);
}
} finally {
_jspxFactory.releasePageContext(_jspx_page_context);
}
}
}<|fim▁end|>
| |
<|file_name|>schemas.py<|end_file_name|><|fim▁begin|>"""pblite message schemas and related enums."""
# Stop pylint from complaining about enums:
# pyline: disable=too-few-public-methods
import enum
from hangups.pblite import Message, Field, RepeatedField, EnumField
##############################################################################
# Enums
##############################################################################
class TypingStatus(enum.Enum):
"""Typing statuses."""
TYPING = 1 # The user started typing
PAUSED = 2 # The user stopped typing with inputted text
STOPPED = 3 # The user stopped typing with no inputted text
class FocusStatus(enum.Enum):
"""Focus statuses."""
FOCUSED = 1
UNFOCUSED = 2
class FocusDevice(enum.Enum):
"""Focus devices."""
DESKTOP = 20
MOBILE = 300
UNSPECIFIED = None
class ConversationType(enum.Enum):
"""Conversation type."""
STICKY_ONE_TO_ONE = 1
GROUP = 2
class ClientConversationView(enum.Enum):
"""Conversation view."""
UNKNOWN_CONVERSATION_VIEW = 0
INBOX_VIEW = 1
ARCHIVED_VIEW = 2
class ClientNotificationLevel(enum.Enum):
"""Notification level."""
UNKNOWN = None
QUIET = 10
RING = 30
class ClientConversationStatus(enum.Enum):
"""Conversation status."""
UNKNOWN_CONVERSATION_STATUS = 0
INVITED = 1
ACTIVE = 2
LEFT = 3
class SegmentType(enum.Enum):
"""Message content segment type."""
TEXT = 0
LINE_BREAK = 1
LINK = 2
class MembershipChangeType(enum.Enum):
"""Conversation membership change type."""
JOIN = 1
LEAVE = 2
class ClientHangoutEventType(enum.Enum):
"""Hangout event type."""
# Not sure all of these are correct
START_HANGOUT = 1
END_HANGOUT = 2
JOIN_HANGOUT = 3
LEAVE_HANGOUT = 4
HANGOUT_COMING_SOON = 5
ONGOING_HANGOUT = 6
class OffTheRecordStatus(enum.Enum):
"""Off-the-record status."""
OFF_THE_RECORD = 1
ON_THE_RECORD = 2
UNKNOWN = None
class ClientOffTheRecordToggle(enum.Enum):
"""Off-the-record toggle status."""
ENABLED = 0
DISABLED = 1
class ActiveClientState(enum.Enum):
"""Active client state."""
NO_ACTIVE_CLIENT = 0
IS_ACTIVE_CLIENT = 1
OTHER_CLIENT_IS_ACTIVE = 2
##############################################################################
# pblite Messages
##############################################################################
CONVERSATION_ID = Message(
('id_', Field()),
)
USER_ID = Message(
('gaia_id', Field()),
('chat_id', Field()),
)
OPTIONAL_USER_ID = Message(
('gaia_id', Field()),
('chat_id', Field()),
is_optional=True,
)
CLIENT_SET_TYPING_NOTIFICATION = Message(
('conversation_id', CONVERSATION_ID),
('user_id', USER_ID),
('timestamp', Field()),
('status', EnumField(TypingStatus)),
is_optional=True,
)
CLIENT_SET_FOCUS_NOTIFICATION = Message(
('conversation_id', CONVERSATION_ID),
('user_id', USER_ID),
('timestamp', Field()),
('status', EnumField(FocusStatus)),
('device', EnumField(FocusDevice)),
is_optional=True,
)
CLIENT_CONVERSATION = Message(
('conversation_id', CONVERSATION_ID),
('type_', EnumField(ConversationType)),
('name', Field(is_optional=True)),
('self_conversation_state', Message(
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
('self_read_state', Message(
('participant_id', USER_ID),
('latest_read_timestamp', Field()),
)),
('status', EnumField(ClientConversationStatus)),
('notification_level', EnumField(ClientNotificationLevel)),
('view', RepeatedField(
EnumField(ClientConversationView)
)),
('inviter_id', USER_ID),
('invite_timestamp', Field()),
('sort_timestamp', Field(is_optional=True)),
('active_timestamp', Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field()),
(None, Field()),
)),
(None, Field()),
(None, Field()),
(None, Field(is_optional=True)),
('read_state', RepeatedField(
Message(
('participant_id', USER_ID),
('last_read_timestamp', Field()),
)
)),
(None, Field()),
('otr_status', EnumField(OffTheRecordStatus)),
(None, Field()),
(None, Field()),
('current_participant', RepeatedField(USER_ID)),
('participant_data', RepeatedField(
Message(
('id_', USER_ID),
('fallback_name', Field(is_optional=True)),
(None, Field(is_optional=True)),
)
)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field()),
(None, Field()),
is_optional=True,
)
MESSAGE_SEGMENT = Message(
('type_', EnumField(SegmentType)),
('text', Field(is_optional=True)), # Can be None for linebreaks
('formatting', Message(
('bold', Field(is_optional=True)),
('italic', Field(is_optional=True)),
('strikethrough', Field(is_optional=True)),
('underline', Field(is_optional=True)),
is_optional=True,
)),
('link_data', Message(
('link_target', Field(is_optional=True)),
is_optional=True,
)),
)
MESSAGE_ATTACHMENT = Message(
('embed_item', Message(
# 249 (PLUS_PHOTO), 340, 335, 0
('type_', RepeatedField(Field())),
('data_', Field()),
('data', Field(is_optional=True)), # can be a dict
)),
)
CLIENT_CHAT_MESSAGE = Message(
(None, Field(is_optional=True)), # always None?
('annotation', RepeatedField(Field(), is_optional=True)),
('message_content', Message(
('segment', RepeatedField(MESSAGE_SEGMENT, is_optional=True)),
('attachment', RepeatedField(MESSAGE_ATTACHMENT, is_optional=True)),
)),
is_optional=True,
)
CLIENT_CONVERSATION_RENAME = Message(
('new_name', Field()),
('old_name', Field()),
is_optional=True,
)
CLIENT_HANGOUT_EVENT = Message(
('event_type', EnumField(ClientHangoutEventType)),
('participant_id', RepeatedField(USER_ID)),
('hangout_duration_secs', Field(is_optional=True)),
('transferred_conversation_id', Field(is_optional=True)), # always None?
('refresh_timeout_secs', Field(is_optional=True)),
('is_periodic_refresh', Field(is_optional=True)),
(None, Field(is_optional=True)), # always 1?
is_optional=True,
)
CLIENT_OTR_MODIFICATION = Message(
('old_otr_status', EnumField(OffTheRecordStatus)),
('new_otr_status', EnumField(OffTheRecordStatus)),
('old_otr_toggle', EnumField(ClientOffTheRecordToggle)),
('new_otr_toggle', EnumField(ClientOffTheRecordToggle)),
is_optional=True,
)
CLIENT_MEMBERSHIP_CHANGE = Message(
('type_', EnumField(MembershipChangeType)),
(None, RepeatedField(Field())),
('participant_ids', RepeatedField(USER_ID)),
(None, Field()),
is_optional=True,
)
CLIENT_EVENT = Message(
('conversation_id', CONVERSATION_ID),
('sender_id', OPTIONAL_USER_ID),
('timestamp', Field()),
('self_event_state', Message(
('user_id', USER_ID),
('client_generated_id', Field(is_optional=True)),
('notification_level', EnumField(ClientNotificationLevel)),
is_optional=True,
)),
(None, Field(is_optional=True)), # always None?
(None, Field(is_optional=True)), # always 0? (expiration_timestamp?)
('chat_message', CLIENT_CHAT_MESSAGE),
(None, Field(is_optional=True)), # always None?<|fim▁hole|> ('advances_sort_timestamp', Field(is_optional=True)),
('otr_modification', CLIENT_OTR_MODIFICATION),
(None, Field(is_optional=True)), # 0, 1 or None? related to notifications?
('event_otr', EnumField(OffTheRecordStatus)),
(None, Field()), # always 1? (advances_sort_timestamp?)
)
CLIENT_EVENT_NOTIFICATION = Message(
('event', CLIENT_EVENT),
is_optional=True,
)
CLIENT_WATERMARK_NOTIFICATION = Message(
('participant_id', USER_ID),
('conversation_id', CONVERSATION_ID),
('latest_read_timestamp', Field()),
is_optional=True,
)
CLIENT_STATE_UPDATE_HEADER = Message(
('active_client_state', EnumField(ActiveClientState)),
(None, Field(is_optional=True)),
('request_trace_id', Field()),
(None, Field(is_optional=True)),
('current_server_time', Field()),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
# optional ID of the client causing the update?
(None, Field(is_optional=True)),
)
CLIENT_STATE_UPDATE = Message(
('state_update_header', CLIENT_STATE_UPDATE_HEADER),
('conversation_notification', Field(is_optional=True)), # always None?
('event_notification', CLIENT_EVENT_NOTIFICATION),
('focus_notification', CLIENT_SET_FOCUS_NOTIFICATION),
('typing_notification', CLIENT_SET_TYPING_NOTIFICATION),
('notification_level_notification', Field(is_optional=True)),
('reply_to_invite_notification', Field(is_optional=True)),
('watermark_notification', CLIENT_WATERMARK_NOTIFICATION),
(None, Field(is_optional=True)),
('settings_notification', Field(is_optional=True)),
('view_modification', Field(is_optional=True)),
('easter_egg_notification', Field(is_optional=True)),
('client_conversation', CLIENT_CONVERSATION),
('self_presence_notification', Field(is_optional=True)),
('delete_notification', Field(is_optional=True)),
('presence_notification', Field(is_optional=True)),
('block_notification', Field(is_optional=True)),
('invitation_watermark_notification', Field(is_optional=True)),
)
CLIENT_EVENT_CONTINUATION_TOKEN = Message(
('event_id', Field(is_optional=True)),
('storage_continuation_token', Field()),
('event_timestamp', Field()),
is_optional=True,
)
CLIENT_CONVERSATION_STATE = Message(
('conversation_id', CONVERSATION_ID),
('conversation', CLIENT_CONVERSATION),
('event', RepeatedField(CLIENT_EVENT)),
(None, Field(is_optional=True)),
('event_continuation_token', CLIENT_EVENT_CONTINUATION_TOKEN),
(None, Field(is_optional=True)),
(None, RepeatedField(Field())),
)
CLIENT_CONVERSATION_STATE_LIST = RepeatedField(CLIENT_CONVERSATION_STATE)
CLIENT_ENTITY = Message(
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
('id_', USER_ID),
('properties', Message(
('type_', Field(is_optional=True)), # 0, 1, or None
('display_name', Field(is_optional=True)),
('first_name', Field(is_optional=True)),
('photo_url', Field(is_optional=True)),
('emails', RepeatedField(Field())),
)),
)
ENTITY_GROUP = Message(
(None, Field()), # always 0?
(None, Field()), # some sort of ID
('entity', RepeatedField(Message(
('entity', CLIENT_ENTITY),
(None, Field()), # always 0?
))),
)
INITIAL_CLIENT_ENTITIES = Message(
(None, Field()), # 'cgserp'
(None, Field()), # a header
('entities', RepeatedField(CLIENT_ENTITY)),
(None, Field(is_optional=True)), # always None?
('group1', ENTITY_GROUP),
('group2', ENTITY_GROUP),
('group3', ENTITY_GROUP),
('group4', ENTITY_GROUP),
('group5', ENTITY_GROUP),
)
CLIENT_GET_SELF_INFO_RESPONSE = Message(
(None, Field()), # 'cgsirp'
(None, Field()), # response header
('self_entity', CLIENT_ENTITY),
)
CLIENT_RESPONSE_HEADER = Message(
('status', Field()), # 1 => success
(None, Field(is_optional=True)),
(None, Field(is_optional=True)),
('request_trace_id', Field()),
('current_server_time', Field()),
)
CLIENT_SYNC_ALL_NEW_EVENTS_RESPONSE = Message(
(None, Field()), # 'csanerp'
('response_header', CLIENT_RESPONSE_HEADER),
('sync_timestamp', Field()),
('conversation_state', RepeatedField(CLIENT_CONVERSATION_STATE)),
)
CLIENT_GET_CONVERSATION_RESPONSE = Message(
(None, Field()), # 'cgcrp'
('response_header', CLIENT_RESPONSE_HEADER),
('conversation_state', CLIENT_CONVERSATION_STATE),
)
CLIENT_GET_ENTITY_BY_ID_RESPONSE = Message(
(None, Field()), # 'cgebirp'
('response_header', CLIENT_RESPONSE_HEADER),
('entities', RepeatedField(CLIENT_ENTITY)),
)<|fim▁end|>
|
('membership_change', CLIENT_MEMBERSHIP_CHANGE),
('conversation_rename', CLIENT_CONVERSATION_RENAME),
('hangout_event', CLIENT_HANGOUT_EVENT),
('event_id', Field(is_optional=True)),
|
<|file_name|>plain.rs<|end_file_name|><|fim▁begin|>use text::style::{Style, StyleCommand, Color, PaletteColor};
use std::fmt::{self, Display};
use std::str::FromStr;
use serde::de::{Deserializer, Deserialize, Error, Visitor};
use serde::{Serializer, Serialize};
use std::iter::Peekable;
use std::slice;
// [PlainBuf] Overhead: 48 bytes for collections, 4 bytes per descriptor, random access
// [Encoded] Overhead: 24 bytes for collections, 2 to 12 bytes per descriptor
/// A buffer storing an unstyled string annotated with styles in a descriptor buffer.
#[derive(Debug)]
pub struct PlainBuf {
/// The unstyled string
string: String,
/// The descriptors holding the styles.
descriptors: Vec<(u8, Style)>
}
impl PlainBuf {
/// Creates a new, empty PlainBuf.
pub fn new() -> Self {
PlainBuf {
string: String::new(),
descriptors: Vec::new()
}
}
/// Creates a new PlainBuf where each buffer has a certain capacity.
pub fn with_capacity(string: usize, descriptors: usize) -> Self {
PlainBuf {
string: String::with_capacity(string),
descriptors: Vec::with_capacity(descriptors)
}
}
pub fn capacity(&self) -> (usize, usize) {
(self.string.capacity(), self.descriptors.capacity())
}
pub fn reserve(&mut self, string: usize, descriptors: usize) {
self.string.reserve(string);
self.descriptors.reserve(string);
}
pub fn reserve_exact(&mut self, string: usize, descriptors: usize) {
self.string.reserve_exact(string);
self.descriptors.reserve_exact(string);
}
pub fn shrink_to_fit(&mut self) {
self.string.shrink_to_fit();
self.descriptors.shrink_to_fit();
}
pub fn push(&mut self, string: &str, style: Style) {
self.string.push_str(string);
self.break_descriptor(string.len(), style);
}
fn break_descriptor(&mut self, len: usize, style: Style) {
let total_len = self.string.len();
let mut string = &self.string[total_len - len ..];
while string.len() > 0 {
let mut chunk_len = u8::max_value();
while !string.is_char_boundary(chunk_len as usize) {
chunk_len -= 1;
}
self.descriptors.push((chunk_len, style));
string = &string[chunk_len as usize ..];
}
}
pub fn unstyled(&self) -> &str {
&self.string
}
pub fn iter(&self) -> Iter {
Iter {
head: &self.string,
descriptors: self.descriptors.iter().peekable()
}
}
// TODO: Pop, Truncate
}
impl FromStr for PlainBuf {
type Err = ();
fn from_str(s: &str) -> Result<Self, ()> {
let mut reader = FormatReader::new();
reader.append(s);
Ok(reader.finish())
}
}
impl Display for PlainBuf {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut writer = FormatWriter::new(f);
let mut head = &self.string as &str;
let mut offset = 0;
for &(len, style) in &self.descriptors {
let (part, rest) = head.split_at(len as usize);
head = rest;
writer.write(part, style)?;
}
Ok(())
}
}
pub struct Iter<'a, 'b> {
head: &'a str,
descriptors: Peekable<slice::Iter<'b, (u8, Style)>>
}
impl<'a, 'b> Iter<'a, 'b> {
pub fn empty() -> Self {
Iter {
head: "",
descriptors: [].iter().peekable()
}
}
}
impl<'a, 'b> Iterator for Iter<'a, 'b> {
type Item = (&'a str, Style);
fn next(&mut self) -> Option<Self::Item> {
if let Some(&(len, style)) = self.descriptors.next() {
let mut len = len as usize;
// Try to merge descriptors of the same style together.
while let Some(&&(next_len, next_style)) = self.descriptors.peek() {
if next_style != style {
break;
}
len += next_len as usize;
self.descriptors.next();
}
let (part, rest) = self.head.split_at(len);
self.head = rest;
Some((part, style))
} else {
None
}
}
}
pub struct FormatReader {
target: PlainBuf,
marker: char,
expect_code: bool,
style: Style,
current_len: usize
}
impl FormatReader {
pub fn new() -> Self {
Self::with_marker('§')
}
pub fn with_marker(marker: char) -> Self {
FormatReader {
target: PlainBuf::new(),
marker: marker,
expect_code: false,
style: Style::new(),
current_len: 0
}
}
pub fn extend(target: PlainBuf, marker: char) -> Self {
FormatReader {
target: target,
marker: marker,
expect_code: false,
style: Style::new(),
current_len: 0
}
}
fn flush(&mut self) {
if self.current_len != 0 {
self.target.break_descriptor(self.current_len, self.style);
self.current_len = 0;
}
}
pub fn append(&mut self, string: &str) {
let mut start = 0;
for (index, char) in string.char_indices() {
if self.expect_code {
self.target.string.push_str(&string[start..start+self.current_len]);
self.flush();
self.style.process(&StyleCommand::from_code(char).unwrap_or(StyleCommand::Color(PaletteColor::White)));
self.expect_code = false;
} else if char == self.marker {
self.expect_code = true;
} else {
if self.current_len == 0 {
start = index;
}
self.current_len += utf8_len(char);
}<|fim▁hole|>
self.target.string.push_str(&string[start..start+self.current_len]);
}
pub fn finish(mut self) -> PlainBuf {
self.flush();
self.target
}
}
// TODO: Does the stdlib have a function like this?
fn utf8_len(c: char) -> usize {
let c = c as u32;
if c <= 0x7F {
1
} else if c <= 0x7FF {
2
} else if c <= 0xFFFF {
3
} else {
4
}
}
pub struct FormatWriter<'w, W> where W: fmt::Write, W: 'w {
target: &'w mut W,
current_style: Style,
marker: char
}
impl<'w, W> FormatWriter<'w, W> where W: fmt::Write {
pub fn new(target: &'w mut W) -> Self {
FormatWriter { target, current_style: Style::new(), marker: '§' }
}
pub fn with_marker(target: &'w mut W, marker: char) -> Self {
FormatWriter { target, current_style: Style::new(), marker }
}
pub fn write(&mut self, string: &str, style: Style) -> fmt::Result {
for command in self.current_style.transition(style) {
self.target.write_char(self.marker)?;
self.target.write_char(command.as_code())?;
}
self.current_style = style;
self.target.write_str(string)
}
}
impl<'de> Deserialize<'de> for PlainBuf {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> {
struct PlainVisitor;
impl<'de> Visitor<'de> for PlainVisitor {
type Value = PlainBuf;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a string containing Minecraft formatting codes")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: Error {
Ok(v.parse::<PlainBuf>().expect("Parsing a PlainBuf should never return an error!"))
}
}
deserializer.deserialize_str(PlainVisitor)
}
}
impl Serialize for PlainBuf {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
serializer.serialize_str(&self.to_string())
}
}<|fim▁end|>
|
}
|
<|file_name|>test_coraid.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import math
import mock
import mox
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import units
from cinder.brick.initiator import connector
from cinder import exception
from cinder.image import image_utils
from cinder.openstack.common import log as logging
from cinder import test
from cinder import utils
from cinder.volume import configuration as conf
from cinder.volume.drivers import coraid
from cinder.volume import volume_types
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def to_coraid_kb(gb):
return math.ceil(float(gb) * units.Gi / 1000)
def coraid_volume_size(gb):
return '{0}K'.format(to_coraid_kb(gb))
fake_esm_ipaddress = "192.168.0.1"
fake_esm_username = "darmok"
fake_esm_group = "tanagra"
fake_esm_group_id = 1
fake_esm_password = "12345678"
fake_coraid_repository_key = 'repository_key'
fake_volume_name = "volume-12345678-1234-1234-1234-1234567890ab"
fake_clone_name = "volume-ffffffff-1234-1234-1234-1234567890ab"
fake_volume_size = 10
fake_repository_name = "A-B:C:D"
fake_pool_name = "FakePool"
fake_aoetarget = 4081
fake_shelf = 16
fake_lun = 241
fake_str_aoetarget = str(fake_aoetarget)
fake_lun_addr = {"shelf": fake_shelf, "lun": fake_lun}
fake_volume_type = {'id': 1}
fake_volume = {"id": fake_volume_name,
"name": fake_volume_name,
"size": fake_volume_size,
"volume_type": fake_volume_type}
fake_clone_volume = {"name": fake_clone_name,
"size": fake_volume_size,
"volume_type": fake_volume_type}
fake_big_clone_volume = {"name": fake_clone_name,
"size": fake_volume_size + 1,
"volume_type": fake_volume_type}
fake_volume_info = {"pool": fake_pool_name,
"repo": fake_repository_name,
"vsxidx": fake_aoetarget,
"index": fake_lun,
"shelf": fake_shelf}
fake_lun_info = {"shelf": fake_shelf, "lun": fake_lun}
fake_snapshot_name = "snapshot-12345678-8888-8888-1234-1234567890ab"
fake_snapshot_id = "12345678-8888-8888-1234-1234567890ab"
fake_volume_id = "12345678-1234-1234-1234-1234567890ab"
fake_snapshot = {"id": fake_snapshot_id,
"name": fake_snapshot_name,
"volume_id": fake_volume_id,
"volume_name": fake_volume_name,
"volume_size": int(fake_volume_size) - 1,
"volume": fake_volume}
fake_configure_data = [{"addr": "cms", "data": "FAKE"}]
fake_esm_fetch = [[
{"command": "super_fake_command"},
{"reply": [
{"lv":
{"containingPool": fake_pool_name,
"lunIndex": fake_aoetarget,
"name": fake_volume_name,
"lvStatus":
{"exportedLun":
{"lun": fake_lun,
"shelf": fake_shelf}}
},
"repoName": fake_repository_name}]}]]
fake_esm_fetch_no_volume = [[
{"command": "super_fake_command"},
{"reply": []}]]
fake_esm_success = {"category": "provider",
"tracking": False,
"configState": "completedSuccessfully",
"heldPending": False,
"metaCROp": "noAction",
"message": None}
fake_group_fullpath = "admin group:%s" % (fake_esm_group)
fake_group_id = 4
fake_login_reply = {"values": [
{"fullPath": fake_group_fullpath,
"groupId": fake_group_id}],
"message": "",
"state": "adminSucceed",
"metaCROp": "noAction"}
fake_group_fail_fullpath = "fail group:%s" % (fake_esm_group)
fake_group_fail_id = 5
fake_login_reply_group_fail = {"values": [
{"fullPath": fake_group_fail_fullpath,
"groupId": fake_group_fail_id}],
"message": "",
"state": "adminSucceed",
"metaCROp": "noAction"}
def compare(a, b):
if type(a) != type(b):
return False
if type(a) == list or type(a) == tuple:
if len(a) != len(b):
return False
return all(map(lambda t: compare(t[0], t[1]), zip(a, b)))
elif type(a) == dict:
if len(a) != len(b):
return False
for k, v in a.items():
if not compare(v, b[k]):
return False
return True
else:
return a == b
def pack_data(request):
request['data'] = jsonutils.dumps(request['data'])
class FakeRpcBadRequest(Exception):
pass
class FakeRpcIsNotCalled(Exception):
def __init__(self, handle, url_params, data):
self.handle = handle
self.url_params = url_params
self.data = data
def __str__(self):
return 'Fake Rpc handle for {0}/{1}/{2} not found'.format(
self.handle, self.url_params, self.data)
class FakeRpcHandle(object):
def __init__(self, handle, url_params, data, result):
self.handle = handle
self.url_params = url_params
self.data = data
self.result = result
self._is_called = False
def set_called(self):
self._is_called = True
def __call__(self, handle, url_params, data,
allow_empty_response=False):
if handle != self.handle:
raise FakeRpcBadRequest(
'Unexpected handle name {0}. Expected {1}.'
.format(handle, self.handle))
if not compare(url_params, self.url_params):
raise FakeRpcBadRequest('Unexpected url params: {0} / {1}'
.format(url_params, self.url_params))
if not compare(data, self.data):
raise FakeRpcBadRequest('Unexpected data: {0}/{1}'
.format(data, self.data))
if callable(self.result):
return self.result()
else:
return self.result
class FakeRpc(object):
def __init__(self):
self._handles = []
def handle(self, handle, url_params, data, result):
self._handles.append(FakeRpcHandle(handle, url_params, data, result))
def __call__(self, handle_name, url_params, data,
allow_empty_response=False):
for handle in self._handles:
if (handle.handle == handle_name and
compare(handle.url_params, url_params) and
compare(handle.data, handle.data)):
handle.set_called()
return handle(handle_name, url_params, data,
allow_empty_response)
raise FakeRpcIsNotCalled(handle_name, url_params, data)
class CoraidDriverTestCase(test.TestCase):
def setUp(self):
super(CoraidDriverTestCase, self).setUp()
configuration = mox.MockObject(conf.Configuration)
configuration.append_config_values(mox.IgnoreArg())
configuration.coraid_default_repository = 'default_repository'
configuration.coraid_esm_address = fake_esm_ipaddress
configuration.coraid_user = fake_esm_username
configuration.coraid_group = fake_esm_group
configuration.coraid_password = fake_esm_password
configuration.volume_name_template = "volume-%s"
configuration.snapshot_name_template = "snapshot-%s"
configuration.coraid_repository_key = fake_coraid_repository_key
configuration.use_multipath_for_image_xfer = False
configuration.enforce_multipath_for_image_xfer = False
configuration.num_volume_device_scan_tries = 3
configuration.volume_dd_blocksize = '1M'
self.fake_rpc = FakeRpc()
self.stubs.Set(coraid.CoraidRESTClient, 'rpc', self.fake_rpc)
self.driver = coraid.CoraidDriver(configuration=configuration)
self.driver.do_setup({})
def mock_volume_types(self, repositories=None):
if not repositories:
repositories = [fake_repository_name]
self.mox.StubOutWithMock(volume_types, 'get_volume_type_extra_specs')
for repository in repositories:
(volume_types
.get_volume_type_extra_specs(fake_volume_type['id'],
fake_coraid_repository_key)
.AndReturn('<in> {0}'.format(repository)))
class CoraidDriverLoginSuccessTestCase(CoraidDriverTestCase):
def setUp(self):
super(CoraidDriverLoginSuccessTestCase, self).setUp()
login_results = {'state': 'adminSucceed',
'values': [
{'fullPath':
'admin group:{0}'.format(fake_esm_group),
'groupId': fake_esm_group_id
}]}
self.fake_rpc.handle('admin', {'op': 'login',
'username': fake_esm_username,
'password': fake_esm_password},
'Login', login_results)
self.fake_rpc.handle('admin', {'op': 'setRbacGroup',
'groupId': fake_esm_group_id},
'Group', {'state': 'adminSucceed'})
class CoraidDriverApplianceTestCase(CoraidDriverLoginSuccessTestCase):
def test_resize_volume(self):
new_volume_size = int(fake_volume_size) + 1
fetch_request = {'shelf': 'cms',
'orchStrRepo': '',
'lv': fake_volume_name}
self.fake_rpc.handle('fetch', fetch_request, None,
fake_esm_fetch)
reply = {'configState': 'completedSuccessfully'}
resize_volume_request = {'addr': 'cms',
'data': {
'lvName': fake_volume_name,
'newLvName': fake_volume_name + '-resize',
'size':
coraid_volume_size(new_volume_size),
'repoName': fake_repository_name},
'op': 'orchStrLunMods',
'args': 'resize'}
pack_data(resize_volume_request)
self.fake_rpc.handle('configure', {}, [resize_volume_request],
reply)
real_reply = self.driver.appliance.resize_volume(fake_volume_name,
new_volume_size)
self.assertEqual(reply['configState'], real_reply['configState'])
class CoraidDriverIntegrationalTestCase(CoraidDriverLoginSuccessTestCase):
def setUp(self):
super(CoraidDriverIntegrationalTestCase, self).setUp()
self.appliance = self.driver.appliance
# NOTE(nsobolevsky) prevent re-creation esm appliance
self.stubs.Set(coraid.CoraidDriver, 'appliance', self.appliance)
def test_create_volume(self):
self.mock_volume_types()
create_volume_request = {'addr': 'cms',
'data': {
'servers': [],
'size':
coraid_volume_size(fake_volume_size),
'repoName': fake_repository_name,
'lvName': fake_volume_name},
'op': 'orchStrLun',
'args': 'add'}
pack_data(create_volume_request)
self.fake_rpc.handle('configure', {}, [create_volume_request],
{'configState': 'completedSuccessfully',
'firstParam': 'fake_first_param'})
self.mox.ReplayAll()
self.driver.create_volume(fake_volume)
self.mox.VerifyAll()
@mock.patch.object(volume_types, 'get_volume_type_extra_specs')
def test_create_volume_volume_type_no_repo_key(self, volume_specs_mock):
"""Test volume creation without repo specified in volume type."""
volume_specs_mock.return_value = None
create_volume_request = {'addr': 'cms',
'data': {
'servers': [],
'size':
coraid_volume_size(fake_volume_size),
'repoName': 'default_repository',
'lvName': fake_volume_name},
'op': 'orchStrLun',
'args': 'add'}
pack_data(create_volume_request)
self.fake_rpc.handle('configure', {}, [create_volume_request],
{'configState': 'completedSuccessfully',
'firstParam': 'fake_first_param'})
self.driver.create_volume(fake_volume)
@mock.patch.object(volume_types, 'get_volume_type_extra_specs')
def test_create_volume_volume_type_no_repo_data(self, volume_specs_mock):
"""Test volume creation w/o repo in volume type nor config."""
volume_specs_mock.return_value = None
self.driver.configuration.coraid_default_repository = None
create_volume_request = {'addr': 'cms',
'data': {
'servers': [],
'size':
coraid_volume_size(fake_volume_size),
'repoName': 'default_repository',
'lvName': fake_volume_name},
'op': 'orchStrLun',
'args': 'add'}
pack_data(create_volume_request)
self.fake_rpc.handle('configure', {}, [create_volume_request],
{'configState': 'completedSuccessfully',
'firstParam': 'fake_first_param'})
self.assertRaises(exception.CoraidException,
self.driver.create_volume, fake_volume)
def test_delete_volume(self):
delete_volume_request = {'addr': 'cms',
'data': {
'repoName': fake_repository_name,
'lvName': fake_volume_name},
'op': 'orchStrLun/verified',
'args': 'delete'}
pack_data(delete_volume_request)
self.fake_rpc.handle('configure', {}, [delete_volume_request],
{'configState': 'completedSuccessfully'})
self.fake_rpc.handle('fetch', {'orchStrRepo': '',
'shelf': 'cms',
'lv': fake_volume_name},
None,
fake_esm_fetch)
self.mox.ReplayAll()
self.driver.delete_volume(fake_volume)
self.mox.VerifyAll()
def test_ping_ok(self):
self.fake_rpc.handle('fetch', {}, None, '')
self.mox.ReplayAll()
self.driver.appliance.ping()
self.mox.VerifyAll()
def test_ping_failed(self):
def rpc(handle, url_params, data,
allow_empty_response=True):
raise test.TestingException("Some exception")
self.stubs.Set(self.driver.appliance, 'rpc', rpc)
self.mox.ReplayAll()
self.assertRaises(exception.CoraidESMNotAvailable,
self.driver.appliance.ping)
self.mox.VerifyAll()
def test_delete_not_existing_lun(self):
delete_volume_request = {'addr': 'cms',
'data': {
'repoName': fake_repository_name,
'lvName': fake_volume_name},
'op': 'orchStrLun/verified',
'args': 'delete'}
pack_data(delete_volume_request)
self.fake_rpc.handle('configure', {}, [delete_volume_request],
{'configState': 'completedSuccessfully'})
self.fake_rpc.handle('fetch', {'orchStrRepo': '',
'shelf': 'cms',
'lv': fake_volume_name},
None,
fake_esm_fetch_no_volume)
self.mox.ReplayAll()
self.assertRaises(
exception.VolumeNotFound,
self.driver.appliance.delete_lun,
fake_volume['name'])
self.mox.VerifyAll()
def test_delete_not_existing_volumeappliance_is_ok(self):
def delete_lun(volume_name):
raise exception.VolumeNotFound(volume_id=fake_volume['name'])
self.stubs.Set(self.driver.appliance, 'delete_lun', delete_lun)
def ping():
pass
self.stubs.Set(self.driver.appliance, 'ping', ping)
self.mox.ReplayAll()
self.driver.delete_volume(fake_volume)
self.mox.VerifyAll()
def test_delete_not_existing_volume_sleepingappliance(self):
def delete_lun(volume_name):
raise exception.VolumeNotFound(volume_id=fake_volume['name'])
self.stubs.Set(self.driver.appliance, 'delete_lun', delete_lun)
def ping():
raise exception.CoraidESMNotAvailable(reason="Any reason")
self.stubs.Set(self.driver.appliance, 'ping', ping)
self.driver.appliance.ping = ping
self.mox.ReplayAll()
self.assertRaises(exception.CoraidESMNotAvailable,
self.driver.delete_volume,
fake_volume)
self.mox.VerifyAll()
def test_create_snapshot(self):
fetch_request = {'shelf': 'cms',
'orchStrRepo': '',
'lv': fake_volume_name}
self.fake_rpc.handle('fetch', fetch_request, None,
fake_esm_fetch)
create_snapshot_request = {'addr': 'cms',
'data': {
'repoName': fake_repository_name,
'lvName': fake_volume_name,
'newLvName': fake_snapshot_name},
'op': 'orchStrLunMods',
'args': 'addClSnap'}
pack_data(create_snapshot_request)
self.fake_rpc.handle('configure', {}, [create_snapshot_request],
{'configState': 'completedSuccessfully'})
self.mox.ReplayAll()
self.driver.create_snapshot(fake_snapshot)
self.mox.VerifyAll()
def test_delete_snapshot(self):
fetch_request = {'shelf': 'cms',
'orchStrRepo': '',
'lv': fake_snapshot_name}
self.fake_rpc.handle('fetch', fetch_request, None,
fake_esm_fetch)
delete_snapshot_request = {'addr': 'cms',
'data': {
'repoName': fake_repository_name,
'lvName': fake_snapshot_name,
'newLvName': 'noop'},
'op': 'orchStrLunMods',
'args': 'delClSnap'}
pack_data(delete_snapshot_request)
self.fake_rpc.handle('configure', {}, [delete_snapshot_request],
{'configState': 'completedSuccessfully'})
self.mox.ReplayAll()
self.driver.delete_snapshot(fake_snapshot)
self.mox.VerifyAll()
def test_create_volume_from_snapshot(self):
self.mock_volume_types()
self.mox.StubOutWithMock(self.driver.appliance, 'resize_volume')
self.driver.appliance.resize_volume(fake_volume_name,
fake_volume['size'])\
.AndReturn(None)
fetch_request = {'shelf': 'cms',
'orchStrRepo': '',
'lv': fake_snapshot_name}
self.fake_rpc.handle('fetch', fetch_request, None,
fake_esm_fetch)
create_clone_request = {'addr': 'cms',
'data': {
'lvName': fake_snapshot_name,
'repoName': fake_repository_name,
'newLvName': fake_volume_name,
'newRepoName': fake_repository_name},
'op': 'orchStrLunMods',
'args': 'addClone'}
pack_data(create_clone_request)
self.fake_rpc.handle('configure', {}, [create_clone_request],
{'configState': 'completedSuccessfully'})
self.mox.ReplayAll()
self.driver.create_volume_from_snapshot(fake_volume, fake_snapshot)
self.mox.VerifyAll()
def test_initialize_connection(self):
fetch_request = {'shelf': 'cms',
'orchStrRepo': '',
'lv': fake_volume_name}
self.fake_rpc.handle('fetch', fetch_request, None,
fake_esm_fetch)
self.mox.ReplayAll()
connection = self.driver.initialize_connection(fake_volume, {})
self.mox.VerifyAll()
self.assertEqual(connection['driver_volume_type'], 'aoe')
self.assertEqual(connection['data']['target_shelf'], fake_shelf)
self.assertEqual(connection['data']['target_lun'], fake_lun)
def test_get_repository_capabilities(self):
reply = [[{}, {'reply': [
{'name': 'repo1',
'profile':
{'fullName': 'Bronze-Bronze:Profile1'}},
{'name': 'repo2',
'profile':
{'fullName': 'Bronze-Bronze:Profile2'}}]}]]
self.fake_rpc.handle('fetch', {'orchStrRepo': ''}, None,
reply)
self.mox.ReplayAll()
capabilities = self.driver.get_volume_stats(refresh=True)
self.mox.VerifyAll()
self.assertEqual(
capabilities[fake_coraid_repository_key],
'Bronze-Bronze:Profile1:repo1 Bronze-Bronze:Profile2:repo2')
def test_create_cloned_volume(self):
self.mock_volume_types([fake_repository_name])
fetch_request = {'shelf': 'cms',
'orchStrRepo': '',
'lv': fake_volume_name}
self.fake_rpc.handle('fetch', fetch_request, None,
fake_esm_fetch)
shelf_lun = '{0}.{1}'.format(fake_shelf, fake_lun)
create_clone_request = {'addr': 'cms',
'data': {
'shelfLun': shelf_lun,
'lvName': fake_volume_name,
'repoName': fake_repository_name,
'newLvName': fake_clone_name,
'newRepoName': fake_repository_name},
'op': 'orchStrLunMods',
'args': 'addClone'}
pack_data(create_clone_request)
self.fake_rpc.handle('configure', {}, [create_clone_request],
{'configState': 'completedSuccessfully'})
self.mox.ReplayAll()
self.driver.create_cloned_volume(fake_clone_volume, fake_volume)
self.mox.VerifyAll()
def test_create_cloned_volume_with_resize(self):
self.mock_volume_types([fake_repository_name])
self.mox.StubOutWithMock(self.driver.appliance, 'resize_volume')
self.driver.appliance.resize_volume(fake_big_clone_volume['name'],
fake_big_clone_volume['size'])\
.AndReturn(None)
fetch_request = {'shelf': 'cms',
'orchStrRepo': '',
'lv': fake_volume_name}
self.fake_rpc.handle('fetch', fetch_request, None,
fake_esm_fetch)
shelf_lun = '{0}.{1}'.format(fake_shelf, fake_lun)
create_clone_request = {'addr': 'cms',
'data': {
'shelfLun': shelf_lun,
'lvName': fake_volume_name,
'repoName': fake_repository_name,
'newLvName': fake_clone_name,
'newRepoName': fake_repository_name},
'op': 'orchStrLunMods',
'args': 'addClone'}
pack_data(create_clone_request)
self.fake_rpc.handle('configure', {}, [create_clone_request],
{'configState': 'completedSuccessfully'})
self.mox.ReplayAll()
self.driver.create_cloned_volume(fake_big_clone_volume, fake_volume)
self.mox.VerifyAll()
def test_create_cloned_volume_in_different_repository(self):
self.mock_volume_types([fake_repository_name + '_another'])
fetch_request = {'shelf': 'cms',
'orchStrRepo': '',
'lv': fake_volume_name}
self.fake_rpc.handle('fetch', fetch_request, None,
fake_esm_fetch)
self.mox.ReplayAll()
self.assertRaises(
exception.CoraidException,
self.driver.create_cloned_volume,
fake_clone_volume,
fake_volume)
self.mox.VerifyAll()
def test_extend_volume(self):
self.mox.StubOutWithMock(self.driver.appliance, 'resize_volume')
self.driver.appliance.resize_volume(fake_volume_name, 10)\
.AndReturn(None)
self.mox.ReplayAll()
self.driver.extend_volume(fake_volume, 10)
self.mox.VerifyAll()
class AutoReloginCoraidTestCase(test.TestCase):
def setUp(self):
super(AutoReloginCoraidTestCase, self).setUp()
self.rest_client = coraid.CoraidRESTClient('https://fake')
self.appliance = coraid.CoraidAppliance(self.rest_client,
'fake_username',
'fake_password',
'fake_group')
def _test_auto_relogin_fail(self, state):
self.mox.StubOutWithMock(self.rest_client, 'rpc')
self.rest_client.rpc('fake_handle', {}, None, False).\
AndReturn({'state': state,
'metaCROp': 'reboot'})
self.rest_client.rpc('fake_handle', {}, None, False).\
AndReturn({'state': state,
'metaCROp': 'reboot'})
self.rest_client.rpc('fake_handle', {}, None, False).\
AndReturn({'state': state,
'metaCROp': 'reboot'})
self.mox.StubOutWithMock(self.appliance, '_ensure_session')
self.appliance._ensure_session().AndReturn(None)
self.mox.StubOutWithMock(self.appliance, '_relogin')
self.appliance._relogin().AndReturn(None)
self.appliance._relogin().AndReturn(None)
self.mox.ReplayAll()
self.assertRaises(exception.CoraidESMReloginFailed,
self.appliance.rpc,
'fake_handle', {}, None, False)
self.mox.VerifyAll()
def test_auto_relogin_fail_admin(self):
self._test_auto_relogin_fail('GeneralAdminFailure')
def test_auto_relogin_fail_inactivity(self):
self._test_auto_relogin_fail('passwordInactivityTimeout')
def test_auto_relogin_fail_absolute(self):
self._test_auto_relogin_fail('passwordAbsoluteTimeout')
def test_auto_relogin_success(self):
self.mox.StubOutWithMock(self.rest_client, 'rpc')
self.rest_client.rpc('fake_handle', {}, None, False).\
AndReturn({'state': 'GeneralAdminFailure',
'metaCROp': 'reboot'})
self.rest_client.rpc('fake_handle', {}, None, False).\
AndReturn({'state': 'ok'})
self.mox.StubOutWithMock(self.appliance, '_ensure_session')
self.appliance._ensure_session().AndReturn(None)
self.mox.StubOutWithMock(self.appliance, '_relogin')
self.appliance._relogin().AndReturn(None)
self.mox.ReplayAll()
reply = self.appliance.rpc('fake_handle', {}, None, False)
self.mox.VerifyAll()
self.assertEqual(reply['state'], 'ok')
class CoraidDriverImageTestCases(CoraidDriverTestCase):
def setUp(self):
super(CoraidDriverImageTestCases, self).setUp()
self.fake_dev_path = '/dev/ether/fake_dev'
self.fake_connection = {'driver_volume_type': 'aoe',
'data': {'target_shelf': fake_shelf,
'target_lun': fake_lun}}
self.fake_volume_info = {
'shelf': self.fake_connection['data']['target_shelf'],
'lun': self.fake_connection['data']['target_lun']}
self.mox.StubOutWithMock(self.driver, 'initialize_connection')
self.driver.initialize_connection(fake_volume, {})\
.AndReturn(self.fake_connection)
self.mox.StubOutWithMock(self.driver, 'terminate_connection')
self.driver.terminate_connection(fake_volume, mox.IgnoreArg(),
force=False).AndReturn(None)
root_helper = 'sudo cinder-rootwrap /etc/cinder/rootwrap.conf'
self.mox.StubOutWithMock(connector, 'get_connector_properties')
connector.get_connector_properties(root_helper,
CONF.my_ip, False, False).\
AndReturn({})
self.mox.StubOutWithMock(utils, 'brick_get_connector')
aoe_initiator = self.mox.CreateMockAnything()
utils.brick_get_connector('aoe',
device_scan_attempts=3,
use_multipath=False,
conn=mox.IgnoreArg()).\
AndReturn(aoe_initiator)
aoe_initiator\
.connect_volume(self.fake_connection['data'])\
.AndReturn({'path': self.fake_dev_path})
aoe_initiator.check_valid_device(self.fake_dev_path, mox.IgnoreArg())\
.AndReturn(True)
aoe_initiator.disconnect_volume(
{'target_shelf': self.fake_volume_info['shelf'],
'target_lun': self.fake_volume_info['lun']}, mox.IgnoreArg())
def test_copy_volume_to_image(self):
fake_image_service = 'fake-image-service'
fake_image_meta = 'fake-image-meta'
self.mox.StubOutWithMock(image_utils, 'upload_volume')
image_utils.upload_volume({},
fake_image_service,
fake_image_meta,
self.fake_dev_path)
self.mox.ReplayAll()
self.driver.copy_volume_to_image({},
fake_volume,
fake_image_service,
fake_image_meta)
self.mox.VerifyAll()
def test_copy_image_to_volume(self):
fake_image_service = 'fake-image-service'
fake_image_id = 'fake-image-id;'
self.mox.StubOutWithMock(image_utils, 'fetch_to_raw')
image_utils.fetch_to_raw({},
fake_image_service,
fake_image_id,
self.fake_dev_path,
mox.IgnoreArg(),
size=fake_volume_size)
self.mox.ReplayAll()
self.driver.copy_image_to_volume({},
fake_volume,
fake_image_service,
fake_image_id)<|fim▁hole|> self.mox.VerifyAll()
class CoraidResetConnectionTestCase(CoraidDriverTestCase):
def test_create_new_appliance_for_every_request(self):
self.mox.StubOutWithMock(coraid, 'CoraidRESTClient')
self.mox.StubOutWithMock(coraid, 'CoraidAppliance')
coraid.CoraidRESTClient(mox.IgnoreArg())
coraid.CoraidRESTClient(mox.IgnoreArg())
coraid.CoraidAppliance(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn('fake_app1')
coraid.CoraidAppliance(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg()).AndReturn('fake_app2')
self.mox.ReplayAll()
self.assertEqual(self.driver.appliance, 'fake_app1')
self.assertEqual(self.driver.appliance, 'fake_app2')
self.mox.VerifyAll()<|fim▁end|>
| |
<|file_name|>function-pointer-comparison-issue-54685.rs<|end_file_name|><|fim▁begin|>// min-llvm-version: 12.0
// compile-flags: -C opt-level=3
// run-pass
fn foo(_i: i32) -> i32 {
1
}
fn bar(_i: i32) -> i32 {
1
}
fn main() {
let x: fn(i32) -> i32 = foo;
let y: fn(i32) -> i32 = bar;
let s1;
if x == y {
s1 = "same".to_string();
} else {<|fim▁hole|> s1 = format!("{:?}, {:?}", x, y);
}
let s2;
if x == y {
s2 = "same".to_string();
} else {
s2 = format!("{:?}, {:?}", x, y);
}
assert_eq!(s1, s2);
}<|fim▁end|>
| |
<|file_name|>new.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from outwiker.gui.baseaction import BaseAction
from outwiker.core.commands import createNewWiki
<|fim▁hole|>class NewAction (BaseAction):
"""
Создание нового дерева заметок
"""
stringId = u"NewTree"
def __init__(self, application):
self._application = application
@property
def title(self):
return _(u"New…")
@property
def description(self):
return _(u"Create a new tree notes")
def run(self, params):
createNewWiki(self._application.mainWindow)<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use async_trait::async_trait;
use yaml_rust::Yaml;
mod assert;
mod assign;
mod delay;
mod exec;
mod request;
pub use self::assert::Assert;
pub use self::assign::Assign;
pub use self::delay::Delay;
pub use self::exec::Exec;
pub use self::request::Request;
use crate::benchmark::{Context, Pool, Reports};
use crate::config::Config;
use std::fmt;
#[async_trait]
pub trait Runnable {
async fn execute(&self, context: &mut Context, reports: &mut Reports, pool: &Pool, config: &Config);
}
#[derive(Clone)]
pub struct Report {
pub name: String,
pub duration: f64,
pub status: u16,
}
impl fmt::Debug for Report {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\n- name: {}\n duration: {}\n", self.name, self.duration)
}
}
impl fmt::Display for Report {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\n- name: {}\n duration: {}\n status: {}\n", self.name, self.duration, self.status)
}<|fim▁hole|>pub fn extract_optional<'a>(item: &'a Yaml, attr: &'a str) -> Option<&'a str> {
if let Some(s) = item[attr].as_str() {
Some(s)
} else {
if item[attr].as_hash().is_some() {
panic!("`{}` needs to be a string. Try adding quotes", attr);
} else {
None
}
}
}
pub fn extract<'a>(item: &'a Yaml, attr: &'a str) -> &'a str {
if let Some(s) = item[attr].as_str() {
s
} else {
if item[attr].as_hash().is_some() {
panic!("`{}` is required needs to be a string. Try adding quotes", attr);
} else {
panic!("Unknown node `{}` => {:?}", attr, item[attr]);
}
}
}<|fim▁end|>
|
}
|
<|file_name|>keyring.go<|end_file_name|><|fim▁begin|>package keyring
import (
"errors"
"sync"
)
var (
// ErrNotFound means the requested password was not found
ErrNotFound = errors.New("keyring: Password not found")
// ErrNoDefault means that no default keyring provider has been found
ErrNoDefault = errors.New("keyring: No suitable keyring provider found (check your build flags)")
providerInitOnce sync.Once
defaultProvider provider
providerInitError error
)
// provider provides a simple interface to keychain sevice
type provider interface {
Get(service, username string) (string, error)
Set(service, username, password string) error
}
func setupProvider() (provider, error) {
providerInitOnce.Do(func() {
defaultProvider, providerInitError = initializeProvider()
})
if providerInitError != nil {
return nil, providerInitError
} else if defaultProvider == nil {
return nil, ErrNoDefault
}
return defaultProvider, nil
}
// Get gets the password for a paricular Service and Username using the
// default keyring provider.
func Get(service, username string) (string, error) {
p, err := setupProvider()<|fim▁hole|> }
return p.Get(service, username)
}
// Set sets the password for a particular Service and Username using the
// default keyring provider.
func Set(service, username, password string) error {
p, err := setupProvider()
if err != nil {
return err
}
return p.Set(service, username, password)
}<|fim▁end|>
|
if err != nil {
return "", err
|
<|file_name|>singleton.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|>
# Mute logger
from pyknyx.services.logger import logging; logger = logging.getLogger(__name__)
from pyknyx.services.logger import logging
logger = logging.getLogger(__name__)
logging.getLogger("pyknyx").setLevel(logging.ERROR)
@six.add_metaclass(Singleton)
class SingletonTest(object):
pass
class SingletonTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_constructor(self):
s1 = SingletonTest()
s2 = SingletonTest()
self.assertIs(s1, s2)<|fim▁end|>
|
from pyknyx.common.singleton import *
import unittest
|
<|file_name|>EnvironmentAccessor.java<|end_file_name|><|fim▁begin|>/*
* Twidere - Twitter client for Android
*
* Copyright (C) 2012 Mariotaku Lee <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.mariotaku.twidere.util;
import java.io.File;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build;
import android.os.Environment;
public final class EnvironmentAccessor {
public static File getExternalCacheDir(final Context context) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.FROYO)
return GetExternalCacheDirAccessorFroyo.getExternalCacheDir(context);
final File ext_storage_dir = Environment.getExternalStorageDirectory();
if (ext_storage_dir != null && ext_storage_dir.isDirectory()) {
final String ext_cache_path = ext_storage_dir.getAbsolutePath() + "/Android/data/"
+ context.getPackageName() + "/cache/";
final File ext_cache_dir = new File(ext_cache_path);
if (ext_cache_dir.isDirectory() || ext_cache_dir.mkdirs()) return ext_cache_dir;
}
return null;
}
@TargetApi(Build.VERSION_CODES.FROYO)
private static class GetExternalCacheDirAccessorFroyo {<|fim▁hole|> private static File getExternalCacheDir(final Context context) {
return context.getExternalCacheDir();
}
}
}<|fim▁end|>
|
@TargetApi(Build.VERSION_CODES.FROYO)
|
<|file_name|>reader_range_iterator_test.go<|end_file_name|><|fim▁begin|>package tsm1
import (
"fmt"
"os"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/influxdata/influxdb"
"github.com/influxdata/influxdb/models"
"github.com/influxdata/influxdb/tsdb"
"github.com/influxdata/influxdb/tsdb/cursors"
)
func TestTimeRangeIterator(t *testing.T) {
tsm := mustWriteTSM(
bucket{
org: 0x50,
bucket: 0x60,
w: writes(
mw("cpu",
kw("tag0=val0",
vals(tvi(1000, 1), tvi(1010, 2), tvi(1020, 3)),
vals(tvi(2000, 1), tvi(2010, 2), tvi(2020, 3)),
),
kw("tag0=val1",
vals(tvi(2000, 1), tvi(2010, 2), tvi(2020, 3)),
vals(tvi(3000, 1), tvi(3010, 2), tvi(3020, 3)),
),
),
),
},
bucket{
org: 0x51,
bucket: 0x61,
w: writes(
mw("mem",
kw("tag0=val0",
vals(tvi(1000, 1), tvi(1010, 2), tvi(1020, 3)),
vals(tvi(2000, 1), tvi(2010, 2), tvi(2020, 3)),
),
kw("tag0=val1",
vals(tvi(1000, 1), tvi(1010, 2), tvi(1020, 3)),
vals(tvi(2000, 1)),
),
kw("tag0=val2",
vals(tvi(2000, 1), tvi(2010, 2), tvi(2020, 3)),
vals(tvi(3000, 1), tvi(3010, 2), tvi(3020, 3)),
),
),
),
},
)
defer tsm.RemoveAll()
orgBucket := func(org, bucket uint) []byte {
n := tsdb.EncodeName(influxdb.ID(org), influxdb.ID(bucket))
return n[:]
}
type args struct {
min int64
max int64
}
type res struct {
k string
hasData bool
}
EXP := func(r ...interface{}) (rr []res) {
for i := 0; i+1 < len(r); i += 2 {
rr = append(rr, res{k: r[i].(string), hasData: r[i+1].(bool)})
}
return
}
type test struct {
name string
args args
exp []res
expStats cursors.CursorStats
}
type bucketTest struct {
org, bucket uint
m string
tests []test
}
r := tsm.TSMReader()
runTests := func(name string, tests []bucketTest) {
t.Run(name, func(t *testing.T) {
for _, bt := range tests {
key := orgBucket(bt.org, bt.bucket)
t.Run(fmt.Sprintf("0x%x-0x%x", bt.org, bt.bucket), func(t *testing.T) {
for _, tt := range bt.tests {
t.Run(tt.name, func(t *testing.T) {
iter := r.TimeRangeIterator(key, tt.args.min, tt.args.max)
count := 0
for i, exp := range tt.exp {
if !iter.Next() {
t.Errorf("Next(%d): expected true", i)
}
expKey := makeKey(influxdb.ID(bt.org), influxdb.ID(bt.bucket), bt.m, exp.k)
if got := iter.Key(); !cmp.Equal(got, expKey) {
t.Errorf("Key(%d): -got/+exp\n%v", i, cmp.Diff(got, expKey))
}
if got := iter.HasData(); got != exp.hasData {
t.Errorf("HasData(%d): -got/+exp\n%v", i, cmp.Diff(got, exp.hasData))
}
count++
}
if count != len(tt.exp) {
t.Errorf("count: -got/+exp\n%v", cmp.Diff(count, len(tt.exp)))
}
if got := iter.Stats(); !cmp.Equal(got, tt.expStats) {
t.Errorf("Stats: -got/+exp\n%v", cmp.Diff(got, tt.expStats))
}
})
}
})
}
})
}
runTests("before delete", []bucketTest{
{
org: 0x50,
bucket: 0x60,
m: "cpu",
tests: []test{
{
name: "cover file",
args: args{
min: 900,
max: 10000,
},
exp: EXP("tag0=val0", true, "tag0=val1", true),
expStats: cursors.CursorStats{ScannedValues: 0, ScannedBytes: 0},
},
{
name: "within block",
args: args{
min: 2001,
max: 2011,
},
exp: EXP("tag0=val0", true, "tag0=val1", true),
expStats: cursors.CursorStats{ScannedValues: 6, ScannedBytes: 48},
},
{
name: "to_2999",
args: args{
min: 0,
max: 2999,
},
exp: EXP("tag0=val0", true, "tag0=val1", true),
expStats: cursors.CursorStats{ScannedValues: 0, ScannedBytes: 0},
},
{
name: "intersects block",
args: args{
min: 1500,
max: 2500,
},
exp: EXP("tag0=val0", true, "tag0=val1", true),
expStats: cursors.CursorStats{ScannedValues: 0, ScannedBytes: 0},
},
},
},
{
org: 0x51,
bucket: 0x61,
m: "mem",
tests: []test{
{
name: "cover file",
args: args{
min: 900,
max: 10000,
},
exp: EXP("tag0=val0", true, "tag0=val1", true, "tag0=val2", true),
expStats: cursors.CursorStats{ScannedValues: 0, ScannedBytes: 0},
},
{
name: "within block",
args: args{
min: 2001,
max: 2011,
},
exp: EXP("tag0=val0", true, "tag0=val1", false, "tag0=val2", true),
expStats: cursors.CursorStats{ScannedValues: 6, ScannedBytes: 48},
},
{
name: "1000_2999",
args: args{
min: 1000,
max: 2500,
},
exp: EXP("tag0=val0", true, "tag0=val1", true, "tag0=val2", true),
expStats: cursors.CursorStats{ScannedValues: 0, ScannedBytes: 0},
},
},
},
})
tsm.MustDeletePrefix(orgBucket(0x50, 0x60), 0, 2999)
tsm.MustDelete(makeKey(0x51, 0x61, "mem", "tag0=val0"))
tsm.MustDeleteRange(2000, 2999,
makeKey(0x51, 0x61, "mem", "tag0=val1"),
makeKey(0x51, 0x61, "mem", "tag0=val2"),
)
runTests("after delete", []bucketTest{
{
org: 0x50,
bucket: 0x60,
m: "cpu",
tests: []test{
{
name: "cover file",
args: args{
min: 900,
max: 10000,
},
exp: EXP("tag0=val1", true),
expStats: cursors.CursorStats{ScannedValues: 6, ScannedBytes: 48},
},
{
name: "within block",
args: args{
min: 2001,
max: 2011,
},
exp: nil,
expStats: cursors.CursorStats{ScannedValues: 0, ScannedBytes: 0},
},
{
name: "to_2999",
args: args{
min: 0,
max: 2999,
},
exp: EXP("tag0=val1", false),
expStats: cursors.CursorStats{ScannedValues: 3, ScannedBytes: 24},
},
{
name: "intersects block",
args: args{
min: 1500,
max: 2500,
},
exp: EXP("tag0=val1", false),
expStats: cursors.CursorStats{ScannedValues: 3, ScannedBytes: 24},
},
{
name: "beyond all tombstones",
args: args{
min: 3000,
max: 4000,
},
exp: EXP("tag0=val1", true),
expStats: cursors.CursorStats{ScannedValues: 0, ScannedBytes: 0},
},
},
},
{
org: 0x51,
bucket: 0x61,
m: "mem",
tests: []test{
{
name: "cover file",
args: args{
min: 900,
max: 10000,
},
exp: EXP("tag0=val1", true, "tag0=val2", true),
expStats: cursors.CursorStats{ScannedValues: 9, ScannedBytes: 72},
},
{
name: "within block",
args: args{
min: 2001,
max: 2011,
},
exp: EXP("tag0=val1", false, "tag0=val2", false),
expStats: cursors.CursorStats{ScannedValues: 3, ScannedBytes: 24},
},
{
name: "1000_2500",
args: args{
min: 1000,
max: 2500,
},
exp: EXP("tag0=val1", true, "tag0=val2", false),
expStats: cursors.CursorStats{ScannedValues: 6, ScannedBytes: 48},
},
},
},
})
}
func TestExcludeEntries(t *testing.T) {
entries := func(ts ...int64) (e []IndexEntry) {
for i := 0; i+1 < len(ts); i += 2 {
e = append(e, IndexEntry{MinTime: ts[i], MaxTime: ts[i+1]})
}
return
}
eq := func(a, b []IndexEntry) bool {
if len(a) == 0 && len(b) == 0 {
return true
}
return cmp.Equal(a, b)
}
type args struct {
e []IndexEntry
min int64
max int64
}
tests := []struct {
name string
args args
exp []IndexEntry
}{
{
args: args{
e: entries(0, 10, 12, 15, 19, 21),
min: 11,
max: 13,
},
exp: entries(12, 15),
},
{
args: args{
e: entries(0, 10, 12, 15, 19, 21),
min: 10,
max: 13,
},
exp: entries(0, 10, 12, 15),
},
{
args: args{
e: entries(0, 10, 12, 15, 19, 21),
min: 12,
max: 30,
},
exp: entries(12, 15, 19, 21),
},
{
args: args{
e: entries(0, 10, 12, 15, 19, 21),
min: 0,
max: 100,
},
exp: entries(0, 10, 12, 15, 19, 21),
},
{
args: args{
e: entries(0, 10, 13, 15, 19, 21),
min: 11,
max: 12,
},
exp: entries(),
},
{
args: args{
e: entries(12, 15, 19, 21),
min: 0,
max: 9,
},
exp: entries(),
},
{
args: args{
e: entries(12, 15, 19, 21),
min: 22,
max: 30,
},
exp: entries(),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := excludeEntries(tt.args.e, TimeRange{tt.args.min, tt.args.max}); !cmp.Equal(got, tt.exp, cmp.Comparer(eq)) {
t.Errorf("excludeEntries() -got/+exp\n%v", cmp.Diff(got, tt.exp))
}
})
}
}
func TestExcludeTimeRanges(t *testing.T) {
entries := func(ts ...int64) (e []TimeRange) {
for i := 0; i+1 < len(ts); i += 2 {
e = append(e, TimeRange{Min: ts[i], Max: ts[i+1]})
}
return
}
eq := func(a, b []TimeRange) bool {
if len(a) == 0 && len(b) == 0 {
return true
}
return cmp.Equal(a, b)
}
type args struct {
e []TimeRange
min int64
max int64
}
tests := []struct {
name string
args args
exp []TimeRange
}{
{
args: args{
e: entries(0, 10, 12, 15, 19, 21),
min: 11,
max: 13,
},
exp: entries(12, 15),
},
{
args: args{
e: entries(0, 10, 12, 15, 19, 21),
min: 10,
max: 13,
},
exp: entries(0, 10, 12, 15),
},
{
args: args{
e: entries(0, 10, 12, 15, 19, 21),
min: 12,
max: 30,
},
exp: entries(12, 15, 19, 21),
},
{
args: args{
e: entries(0, 10, 12, 15, 19, 21),
min: 0,
max: 100,
},
exp: entries(0, 10, 12, 15, 19, 21),
},
{
args: args{
e: entries(0, 10, 13, 15, 19, 21),
min: 11,
max: 12,
},
exp: entries(),
},
{
args: args{
e: entries(12, 15, 19, 21),
min: 0,
max: 9,
},
exp: entries(),
},
{
args: args{
e: entries(12, 15, 19, 21),
min: 22,
max: 30,
},
exp: entries(),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := excludeTimeRanges(tt.args.e, TimeRange{tt.args.min, tt.args.max}); !cmp.Equal(got, tt.exp, cmp.Comparer(eq)) {
t.Errorf("excludeEntries() -got/+exp\n%v", cmp.Diff(got, tt.exp))
}
})
}
}
func TestIntersectsEntries(t *testing.T) {
entries := func(ts ...int64) (e []IndexEntry) {
for i := 0; i+1 < len(ts); i += 2 {
e = append(e, IndexEntry{MinTime: ts[i], MaxTime: ts[i+1]})
}
return
}
type args struct {
e []IndexEntry
tr TimeRange
}
tests := []struct {
name string
args args
exp bool
}{
{
name: "",
args: args{
e: entries(5, 10, 13, 15, 19, 21, 22, 27),
tr: TimeRange{6, 9},
},
exp: false,
},
{
args: args{
e: entries(5, 10, 13, 15, 19, 21, 22, 27),
tr: TimeRange{11, 12},
},
exp: false,
},
{
args: args{
e: entries(5, 10, 13, 15, 19, 21, 22, 27),
tr: TimeRange{2, 4},
},
exp: false,
},
{
args: args{
e: entries(5, 10, 13, 15, 19, 21, 22, 27),
tr: TimeRange{28, 40},
},
exp: false,
},
{
args: args{
e: entries(5, 10, 13, 15, 19, 21, 22, 27),
tr: TimeRange{3, 11},
},
exp: true,
},
{
args: args{
e: entries(5, 10, 13, 15, 19, 21, 22, 27),
tr: TimeRange{5, 27},
},
exp: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := intersectsEntry(tt.args.e, tt.args.tr); got != tt.exp {
t.Errorf("excludeEntries() -got/+exp\n%v", cmp.Diff(got, tt.exp))
}
})
}
}
type bucket struct {
org, bucket influxdb.ID
w []measurementWrite
}
func writes(w ...measurementWrite) []measurementWrite {
return w
}
type measurementWrite struct {
m string
w []keyWrite
}
func mw(m string, w ...keyWrite) measurementWrite {
return measurementWrite{m, w}
}
type keyWrite struct {
k string
w []Values
}
func kw(k string, w ...Values) keyWrite { return keyWrite{k, w} }
func vals(tv ...Value) Values { return tv }
func tvi(ts int64, v int64) Value { return NewIntegerValue(ts, v) }
type tsmState struct {
dir string
file string
r *TSMReader
}
const fieldName = "v"
func makeKey(org, bucket influxdb.ID, m string, k string) []byte {
name := tsdb.EncodeName(org, bucket)
line := string(m) + "," + k
tags := make(models.Tags, 1)
tags[0] = models.NewTag(models.MeasurementTagKeyBytes, []byte(m))
tags = append(tags, models.ParseTags([]byte(line))...)
tags = append(tags, models.NewTag(models.FieldKeyTagKeyBytes, []byte(fieldName)))
return SeriesFieldKeyBytes(string(models.MakeKey(name[:], tags)), fieldName)
}
func mustWriteTSM(writes ...bucket) (s *tsmState) {
dir := mustTempDir()
defer func() {
if s == nil {
_ = os.RemoveAll(dir)
}
}()
f := mustTempFile(dir)
w, err := NewTSMWriter(f)
if err != nil {
panic(fmt.Sprintf("unexpected error creating writer: %v", err))
}
for _, ob := range writes {
for _, mw := range ob.w {
for _, kw := range mw.w {
key := makeKey(ob.org, ob.bucket, mw.m, kw.k)
for _, vw := range kw.w {
if err := w.Write(key, vw); err != nil {
panic(fmt.Sprintf("Write failed: %v", err))
}
}
}
}
}
if err := w.WriteIndex(); err != nil {
panic(fmt.Sprintf("WriteIndex: %v", err))
}
if err := w.Close(); err != nil {
panic(fmt.Sprintf("Close: %v", err))
}
fd, err := os.Open(f.Name())
if err != nil {
panic(fmt.Sprintf("os.Open: %v", err))
}
r, err := NewTSMReader(fd)
if err != nil {
panic(fmt.Sprintf("NewTSMReader: %v", err))
}
return &tsmState{
dir: dir,
file: f.Name(),
r: r,
}
}
func (s *tsmState) TSMReader() *TSMReader {
return s.r
}
func (s *tsmState) RemoveAll() {
_ = os.RemoveAll(s.dir)
}
func (s *tsmState) MustDeletePrefix(key []byte, min, max int64) {
err := s.r.DeletePrefix(key, min, max, nil, nil)
if err != nil {
panic(fmt.Sprintf("DeletePrefix: %v", err))
}
}
func (s *tsmState) MustDelete(keys ...[]byte) {
err := s.r.Delete(keys)
if err != nil {
panic(fmt.Sprintf("Delete: %v", err))
}
}
func (s *tsmState) MustDeleteRange(min, max int64, keys ...[]byte) {<|fim▁hole|> err := s.r.DeleteRange(keys, min, max)
if err != nil {
panic(fmt.Sprintf("DeleteRange: %v", err))
}
}<|fim▁end|>
| |
<|file_name|>i18n.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import gettext<|fim▁hole|>
from obozrenie.global_settings import *
current_locale, encoding = locale.getdefaultlocale()
t = gettext.translation(APPLICATION_ID, localedir=LOCALE_DIR, languages=[
current_locale], codeset=encoding, fallback=True)
_ = t.gettext<|fim▁end|>
|
import locale
|
<|file_name|>PyConsole.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Aaron Kehrer
# Licensed under the terms of the MIT License
# (see fiddle/__init__.py for details)
import os
import unicodedata
from io import StringIO
from PyQt4 import QtCore, QtGui
from fiddle.config import EDITOR_FONT, EDITOR_FONT_SIZE
class PyConsoleTextBrowser(QtGui.QTextBrowser):
def __init__(self, parent=None, process=None):
super(PyConsoleTextBrowser, self).__init__(parent)
self.process = process
# The start position in the QTextBrowser document where new user input will be inserted
self._input_insert_pos = -1
self.history = []
self.history_idx = 0
self.setLineWrapMode(QtGui.QTextEdit.NoWrap)
self.setAcceptRichText(False)
self.setReadOnly(False)
self.setOpenExternalLinks(False)
self.setOpenLinks(False)
self.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse | QtCore.Qt.TextEditorInteraction)
def keyPressEvent(self, event):
if self.process is not None:
# Skip keys modified with Ctrl or Alt
if event.modifiers() != QtCore.Qt.ControlModifier and event.modifiers() != QtCore.Qt.AltModifier:
# Get the insert cursor and make sure it's at the end of the console
cursor = self.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
if self._input_insert_pos < 0:
self._input_insert_pos = cursor.position()
# Scroll view to end of console
self.setTextCursor(cursor)
self.ensureCursorVisible()
# Process the key event
if event.key() == QtCore.Qt.Key_Up:
# Clear any previous input
self._clear_insert_line(cursor)
# Get the history
if len(self.history) > 0:
self.history_idx -= 1
try:
cursor.insertText(self.history[self.history_idx])
except IndexError:
self.history_idx += 1
cursor.insertText('')
elif event.key() == QtCore.Qt.Key_Down:
# Clear any previous input
self._clear_insert_line(cursor)
# Get the history
if len(self.history) > 0 >= self.history_idx:
self.history_idx += 1
try:
cursor.insertText(self.history[self.history_idx])
except IndexError:
self.history_idx -= 1
cursor.insertText('')
elif event.key() == QtCore.Qt.Key_Return:
txt = self._select_insert_line(cursor)
self.process.write('{0}\n'.format(txt).encode('utf-8'))
# Reset the insert position
self._input_insert_pos = -1
# Update the history
self.history.append(txt)
self.history_idx = 0
# Pass the event on to the parent for handling
return QtGui.QTextBrowser.keyPressEvent(self, event)
def _clear_insert_line(self, cursor):
"""
Remove all the displayed text from the input insert line and clear the input buffer
"""
cursor.setPosition(self._input_insert_pos, QtGui.QTextCursor.KeepAnchor)
cursor.removeSelectedText()
def _select_insert_line(self, cursor):
cursor.setPosition(self._input_insert_pos, QtGui.QTextCursor.KeepAnchor)
txt = cursor.selectedText()
cursor.clearSelection()
return txt
class PyConsoleLineEdit(QtGui.QLineEdit):
"""
https://wiki.python.org/moin/PyQt/Adding%20tab-completion%20to%20a%20QLineEdit
http://www.saltycrane.com/blog/2008/01/how-to-capture-tab-key-press-event-with/
"""
def __init__(self):
super(PyConsoleLineEdit, self).__init__()
line_font = QtGui.QFont()
line_font.setFamily(EDITOR_FONT)
line_font.setPointSize(EDITOR_FONT_SIZE)
self.setFont(line_font)
self.history = []
self.history_idx = -1
def event(self, event):
if event.type() == QtCore.QEvent.KeyPress:
if event.key() == QtCore.Qt.Key_Tab:
if self.text().strip() == '':
self.setText(self.text() + ' ')
return True
elif event.key() == QtCore.Qt.Key_Up:
if len(self.history) > 0 and self.history_idx > 0:
self.history_idx -= 1
self.setText(self.history[self.history_idx])
return True
elif event.key() == QtCore.Qt.Key_Down:
if 0 < len(self.history) > self.history_idx:
self.history_idx += 1
try:
self.setText(self.history[self.history_idx])
except IndexError:
self.setText('')
return True
elif event.key() == QtCore.Qt.Key_Return:
try:
if self.history[-1] != self.text():
self.history.append(self.text())
except IndexError:
self.history.append(self.text())<|fim▁hole|><|fim▁end|>
|
self.history_idx = len(self.history)
return QtGui.QLineEdit.event(self, event)
return QtGui.QLineEdit.event(self, event)
|
<|file_name|>inputMgr.py<|end_file_name|><|fim▁begin|># ========================== Start Copyright Notice ========================== #
# #
# Copyright 2014 F.D.I.S. #
# This file is part of Kinetic Gunner: Gunner of Angst #
# #
# For the latest version, please visit: #
# https://github.com/CertainlyUncertain/Kinetic-Gunner-Gunner-of-Angst #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
# =========================== End Copyright Notice =========================== #
# Input Manager -------------------------------------------------------------- #
import ogre.renderer.OGRE as ogre
import ogre.io.OIS as OIS
from vector import Vector3
import os
import time
class InputMgr(OIS.KeyListener, OIS.MouseListener, OIS.JoyStickListener):
''' Manages keyboard and mouse, with buffered and unbuffered input. '''
def __init__(self, engine):
''' Creates Input Listeners and Initializes Variables. '''
self.engine = engine
OIS.KeyListener.__init__(self)
OIS.MouseListener.__init__(self)
OIS.JoyStickListener.__init__(self)
self.move = 1000
self.rotate = 25
self.selectionRadius = 100
self.MB_Left_Down = False
self.MB_Right_Down = False
print "Input Manager Created."
def init(self):
''' Sets the Window and Creates Input System and Objects. '''
windowHandle = 0
renderWindow = self.engine.gfxMgr.root.getAutoCreatedWindow()<|fim▁hole|> paramList = [("WINDOW", str(windowHandle))]
if os.name == "nt":
#t = [("w32_mouse","DISCL_FOREGROUND"), ("w32_mouse", "DISCL_NONEXCLUSIVE")]
t = [("w32_mouse","DISCL_FOREGROUND"), ("w32_mouse", "DISCL_EXCLUSIVE")]
else:
t = [("x11_mouse_grab", "true"), ("x11_mouse_hide", "true")]
#t = [("x11_mouse_grab", "false"), ("x11_mouse_hide", "true")]
paramList.extend(t)
self.inputManager = OIS.createPythonInputSystem(paramList)
# Now InputManager is initialized for use. Keyboard and Mouse objects
# must still be initialized separately
self.keyboard = None
self.mouse = None
try:
self.keyboard = self.inputManager.createInputObjectKeyboard(OIS.OISKeyboard, True)
self.mouse = self.inputManager.createInputObjectMouse(OIS.OISMouse, True)
#Joystick
except Exception, e:
print "No Keyboard or mouse!!!!"
raise e
if self.keyboard:
self.keyboard.setEventCallback(self)
if self.mouse:
self.mouse.setEventCallback(self)
self.windowResized( renderWindow )
print "Input Manager Initialized."
def crosslink(self):
''' Links to other Managers. '''
pass
def tick(self, dtime):
''' Update keyboard and mouse. '''
self.keyboard.capture()
self.mouse.capture()
self.handleCamera(dtime)
self.handleModifiers(dtime)
# Quit
if self.keyboard.isKeyDown(OIS.KC_ESCAPE):
self.engine.keepRunning = False
pass
def stop(self):
''' Destory Input Objects and System. '''
self.inputManager.destroyInputObjectKeyboard(self.keyboard)
self.inputManager.destroyInputObjectMouse(self.mouse)
OIS.InputManager.destroyInputSystem(self.inputManager)
self.inputManager = None
print "Input Manager Stopped."
# Keyboard Listener ----------------------------------------------------- #
def keyPressed(self, evt):
'''Handles Toggleable Key Presses'''
# Swap Cameras (Between First-Person and Debug Views)
if self.keyboard.isKeyDown(OIS.KC_G):
self.engine.camMgr.swap()
# Pause ------------------------DEBUG-----------------------------------
if self.keyboard.isKeyDown(OIS.KC_SPACE):
time.sleep(10)
return True
def keyReleased(self, evt):
return True
def handleModifiers(self, dtime):
self.leftShiftDown = self.keyboard.isKeyDown(OIS.KC_LSHIFT)
self.leftCtrlDown = self.keyboard.isKeyDown(OIS.KC_LCONTROL)
pass
def handleCamera(self, dtime):
'''Move the camera using keyboard input.'''
# Forward
if self.keyboard.isKeyDown(OIS.KC_W):
self.engine.camMgr.transVector.z -= self.move
# Backward
if self.keyboard.isKeyDown(OIS.KC_S):
self.engine.camMgr.transVector.z += self.move
# Left
if self.keyboard.isKeyDown(OIS.KC_A):
self.engine.camMgr.transVector.x -= self.move
# Right
if self.keyboard.isKeyDown(OIS.KC_D):
self.engine.camMgr.transVector.x += self.move
# Up
if self.keyboard.isKeyDown(OIS.KC_3):
self.engine.camMgr.transVector.y += self.move
# Down
if self.keyboard.isKeyDown(OIS.KC_4):
self.engine.camMgr.transVector.y -= self.move
# Yaw
if self.keyboard.isKeyDown(OIS.KC_Q):
self.engine.camMgr.yawRot = -self.rotate
# Yaw
if self.keyboard.isKeyDown(OIS.KC_E):
self.engine.camMgr.yawRot = self.rotate
# Pitch
if self.keyboard.isKeyDown(OIS.KC_Z):
self.engine.camMgr.pitchRot = -self.rotate
# Pitch
if self.keyboard.isKeyDown(OIS.KC_X):
self.engine.camMgr.pitchRot = self.rotate
# Roll
if self.keyboard.isKeyDown(OIS.KC_R):
self.engine.camMgr.rollRot = self.rotate
# Roll
if self.keyboard.isKeyDown(OIS.KC_V):
self.engine.camMgr.rollRot = -self.rotate
pass
# MouseListener --------------------------------------------------------- #
def mouseMoved(self, evt):
currMouse = self.mouse.getMouseState()
self.engine.camMgr.yawRot += currMouse.X.rel
self.engine.camMgr.pitchRot += currMouse.Y.rel
return True
def mousePressed(self, evt, id):
#self.mouse.capture()
#self.ms = self.mouse.getMouseState()
#self.ms.width = self.engine.gfxMgr.viewPort.actualWidth
#self.ms.height = self.engine.gfxMgr.viewPort.actualHeight
#self.mousePos = (self.ms.X.abs/float(self.ms.width), self.ms.Y.abs/float(self.ms.height))
if id == OIS.MB_Left:
self.MB_Left_Down = True
elif id == OIS.MB_Right:
self.MB_Right_Down = True
return True
def mouseReleased(self, evt, id):
if id == OIS.MB_Left:
self.MB_Left_Down = False
elif id == OIS.MB_Right:
self.MB_Right_Down = False
return True
# JoystickListener ------------------------------------------------------ #
def buttonPressed(self, evt, button):
return True
def buttonReleased(self, evt, button):
return True
def axisMoved(self, evt, axis):
return True
def windowResized (self, rw):
temp = 0
width, height, depth, left, top= rw.getMetrics(temp,temp,temp, temp, temp) # Note the wrapped function as default needs unsigned int's
ms = self.mouse.getMouseState()
ms.width = width
ms.height = height
# Input Manager -------------------------------------------------------------- #<|fim▁end|>
|
windowHandle = renderWindow.getCustomAttributeUnsignedLong("WINDOW")
|
<|file_name|>Sanity.py<|end_file_name|><|fim▁begin|>#===============================================================================
#
# Sanity.py
#
# This file is part of ANNarchy.
#
# Copyright (C) 2013-2016 Julien Vitay <[email protected]>,
# Helge Uelo Dinkelbach <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ANNarchy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#===============================================================================
import re
from ANNarchy.core import Global
from ANNarchy.core.PopulationView import PopulationView
from ANNarchy.models.Synapses import DefaultSpikingSynapse, DefaultRateCodedSynapse
# No variable can have these names
reserved_variables = [
't',
'dt',
't_pre',
't_post',
't_last',
'last_spike',
'rk_post',
'rk_pre',
'i',
'j',
'active',
'refractory',
'size',
]
def check_structure(populations, projections):
"""
Checks the structure before compilation to display more useful error messages.
"""
from ANNarchy.extensions.convolution.Transpose import Transpose
# Check variable names
_check_reserved_names(populations, projections)
# Check that projections are created before compile
for proj in projections:
if isinstance(proj, Transpose):
continue
if not proj._connection_method:
Global._error('The projection between populations', proj.pre.id, 'and', proj.post.id, 'has not been connected.',
' Call a connector method before compiling the network.')
# Check if the storage formats are valid for the selected paradigm
_check_storage_formats(projections)
# Check that synapses access existing variables in the pre or post neurons
_check_prepost(populations, projections)
# Check locality of variable is respected
_check_locality(populations, projections)
def check_experimental_features(populations, projections):
"""
The idea behind this method, is to check if new experimental features are used. This
should help also the user to be aware of changes.
"""
# CPU-related formats
if Global.config['paradigm'] == "openmp":
for proj in projections:
if proj._storage_format == "csr" and proj._storage_order == "pre_to_post":
Global._warning("Compressed sparse row (CSR) and pre_to_post ordering representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "bsr":
Global._warning("Blocked sparse row (BSR) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "coo":
Global._warning("Coordinate (COO) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "ellr":
Global._warning("ELLPACK-R (ELLR) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "ell":
Global._warning("ELLPACK (ELL) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "hyb":
Global._warning("Hybrid (ELL + COO) representation is an experimental feature, we greatly appreciate bug reports.")
break
# GPU-related formats
elif Global.config['paradigm'] == "cuda":
for pop in populations:
if pop.neuron_type.description['type'] == "spike":
Global._warning('Spiking neurons on GPUs is an experimental feature. We greatly appreciate bug reports.')
break
for proj in projections:
if proj._storage_format == "ellr":
Global._warning("ELLPACK-R (ELLR) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "bsr":
Global._warning("Blocked sparse row (BSR) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "coo":
Global._warning("Coordinate (COO) representation is an experimental feature, we greatly appreciate bug reports.")
break
for proj in projections:
if proj._storage_format == "hyb":
Global._warning("Hybrid (ELL + COO) representation is an experimental feature, we greatly appreciate bug reports.")
break
else:
pass
def _check_reserved_names(populations, projections):
"""
Checks no reserved variable names is redefined
"""
# Check populations
for pop in populations:
# Reserved variable names
for term in reserved_variables:
if term in pop.attributes:
Global._print(pop.neuron_type.parameters)
Global._print(pop.neuron_type.equations)
Global._error(term + ' is a reserved variable name')
# Check projections
for proj in projections:
# Reserved variable names
for term in reserved_variables:
if term in proj.attributes:<|fim▁hole|>
def _check_storage_formats(projections):
"""
ANNarchy 4.7 introduced a set of sparse matrix formats. Some of them are not implemented for
all paradigms or might not support specific optimizations.
"""
for proj in projections:
# Most of the sparse matrix formats are not trivially invertable and therefore we can not implement
# spiking models with them
if proj.synapse_type.type == "spike" and proj._storage_format in ["ell", "ellr", "coo", "hyb"]:
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is not allowed for spiking synapses.", True)
# For some of the sparse matrix formats we don't implemented plasticity yet.
if proj.synapse_type.type == "spike" and proj._storage_format in ["dense"] and not isinstance(proj.synapse_type, DefaultSpikingSynapse):
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is only allowed for default spiking synapses yet.", True)
# For some of the sparse matrix formats we don't implemented plasticity yet.
if proj.synapse_type.type == "rate" and proj._storage_format in ["coo", "hyb"] and not isinstance(proj.synapse_type, DefaultRateCodedSynapse):
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is only allowed for default rate-coded synapses yet.", True)
# OpenMP disabled?
if proj._storage_format in ["bsr"] and Global.config["num_threads"]>1:
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is not available for OpenMP yet.", True)
# Single weight optimization available?
if proj._has_single_weight() and proj._storage_format in ["dense"]:
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is not allowed for single weight projections.", True)
# Slicing available?
if isinstance(proj.post, PopulationView) and proj._storage_format in ["dense"]:
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is not allowed for PopulationViews as target.", True)
# In some cases we don't allow the usage of non-unifom delay
if (proj.max_delay > 1 and proj.uniform_delay == -1):
if Global._check_paradigm("cuda"):
raise Global.ANNarchyException("Using non-uniform delays is not available for CUDA devices.", True)
else:
if proj._storage_format == "ellr":
raise Global.ANNarchyException("Using 'storage_format="+ proj._storage_format + "' is and non-uniform delays is not implemented.", True)
if Global._check_paradigm("cuda") and proj._storage_format == "lil":
proj._storage_format = "csr"
Global._info("LIL-type projections are not available for GPU devices ... default to CSR")
if Global._check_paradigm("cuda") and proj._storage_format == "ell":
Global._info("We would recommend to use ELLPACK-R (format=ellr) on GPUs.")
def _check_prepost(populations, projections):
"""
Checks that when a synapse uses pre.x r post.x, the variable x exists in the corresponding neuron
"""
for proj in projections:
for dep in proj.synapse_type.description['dependencies']['pre']:
if dep.startswith('sum('):
target = re.findall(r'\(([\s\w]+)\)', dep)[0].strip()
if not target in proj.pre.targets:
Global._print(proj.synapse_type.equations)
Global._error('The pre-synaptic population ' + proj.pre.name + ' receives no projection with the type ' + target)
continue
if not dep in proj.pre.attributes:
Global._print(proj.synapse_type.equations)
Global._error('The pre-synaptic population ' + proj.pre.name + ' has no variable called ' + dep)
for dep in proj.synapse_type.description['dependencies']['post']:
if dep.startswith('sum('):
target = re.findall(r'\(([\s\w]+)\)', dep)[0].strip()
if not target in proj.post.targets:
Global._print(proj.synapse_type.equations)
Global._error('The post-synaptic population ' + proj.post.name + ' receives no projection with the type ' + target)
continue
if not dep in proj.post.attributes:
Global._print(proj.synapse_type.equations)
Global._error('The post-synaptic population ' + proj.post.name + ' has no variable called ' + dep)
def _check_locality(populations, projections):
"""
Checks that a global variable does not depend on local ones.
"""
for proj in projections:
for var in proj.synapse_type.description['variables']:
if var['locality'] == 'global': # cannot depend on local or semiglobal variables
# Inside the equation
for v in var['dependencies']:
if _get_locality(v, proj.synapse_type.description) in ['local', 'semiglobal']:
Global._print(var['eq'])
Global._error('The global variable', var['name'], 'cannot depend on a synapse-specific/post-synaptic one:', v)
# As pre/post dependencies
deps = var['prepost_dependencies']
if len(deps['pre']) > 0 or len(deps['post']) > 0 :
Global._print(proj.synapse_type.equations)
Global._error('The global variable', var['name'], 'cannot depend on pre- or post-synaptic variables.')
if var['locality'] == 'semiglobal': # cannot depend on pre-synaptic variables
# Inside the equation
for v in var['dependencies']:
if _get_locality(v, proj.synapse_type.description) == 'local':
Global._print(var['eq'])
Global._error('The postsynaptic variable', var['name'], 'cannot depend on a synapse-specific one:', v)
# As pre/post dependencies
deps = var['prepost_dependencies']
if len(deps['pre']) > 0 :
Global._print(proj.synapse_type.equations)
Global._error('The postsynaptic variable', var['name'], 'cannot depend on pre-synaptic ones (e.g. pre.r).')
def _get_locality(name, description):
"Returns the locality of an attribute based on its name"
for var in description['variables'] + description['parameters']:
if var['name'] == name:
return var['locality']
return 'local'<|fim▁end|>
|
Global._print(proj.synapse_type.parameters)
Global._print(proj.synapse_type.equations)
Global._error(term + ' is a reserved variable name')
|
<|file_name|>gamma.rs<|end_file_name|><|fim▁begin|>use err::ErrMsg;
use RGSLRng;
#[derive(Debug, Clone)]
pub struct Gamma {
a: f64,
b: f64,
}
impl Gamma {
pub fn new(a: f64, b: f64) -> Result<Gamma, ()> {
if a <= 0.0 || b <= 0.0 {
return Err(());
}
Ok(Gamma { a, b })
}
#[inline]
pub fn pdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_pdf;
gamma_pdf(x, self.a, self.b)
}
}
use super::{Sample, CDF};
impl Sample for Gamma {
#[inline]
fn sample(&self, rng: &mut RGSLRng) -> f64 {
use rgsl::randist::gamma::gamma;
gamma(rng.get_gen(), self.a, self.b)
}
}
<|fim▁hole|> use rgsl::randist::gamma::gamma_P;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_P(x, self.a, self.b)
}
#[inline]
fn inverse_cdf(&self, x: f64) -> f64 {
use rgsl::randist::gamma::gamma_Pinv;
if x.is_sign_negative() {
panic!(ErrMsg::PositiveReal.panic_msg_with_arg(&self));
}
gamma_Pinv(x, self.a, self.b)
}
}<|fim▁end|>
|
impl CDF for Gamma {
#[inline]
fn cdf(&self, x: f64) -> f64 {
|
<|file_name|>CoffeeBeans.java<|end_file_name|><|fim▁begin|><|fim▁hole|>public abstract class CoffeeBeans {
public abstract String scent();
}<|fim▁end|>
|
package be.swsb.productivity.chapter5.beans;
|
<|file_name|>archive_viewer.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------
# Copyright (c) 2013-2020, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
"""
Viewer for archives packaged by archive.py
"""
from __future__ import print_function
import argparse
import os
import pprint
import sys
import tempfile
import zlib
from PyInstaller.loader import pyimod02_archive
from PyInstaller.archive.readers import CArchiveReader, NotAnArchiveError
from PyInstaller.compat import stdin_input
import PyInstaller.log
stack = []
cleanup = []
def main(name, brief, debug, rec_debug, **unused_options):
global stack
if not os.path.isfile(name):
print(name, "is an invalid file name!", file=sys.stderr)
return 1
arch = get_archive(name)
stack.append((name, arch))
if debug or brief:
show_log(arch, rec_debug, brief)
raise SystemExit(0)
else:
show(name, arch)
while 1:
try:
toks = stdin_input('? ').split(None, 1)
except EOFError:
# Ctrl-D
print(file=sys.stderr) # Clear line.
break
if not toks:
usage()
continue
if len(toks) == 1:
cmd = toks[0]
arg = ''
else:
cmd, arg = toks
cmd = cmd.upper()
if cmd == 'U':
if len(stack) > 1:
arch = stack[-1][1]
arch.lib.close()
del stack[-1]
name, arch = stack[-1]
show(name, arch)
elif cmd == 'O':
if not arg:
arg = stdin_input('open name? ')
arg = arg.strip()
try:
arch = get_archive(arg)
except NotAnArchiveError as e:
print(e, file=sys.stderr)
continue
if arch is None:
print(arg, "not found", file=sys.stderr)
continue
stack.append((arg, arch))
show(arg, arch)
elif cmd == 'X':
if not arg:
arg = stdin_input('extract name? ')
arg = arg.strip()
data = get_data(arg, arch)
if data is None:
print("Not found", file=sys.stderr)
continue
filename = stdin_input('to filename? ')
if not filename:
print(repr(data))
else:
with open(filename, 'wb') as fp:
fp.write(data)
elif cmd == 'Q':
break
else:
usage()
do_cleanup()
def do_cleanup():
global stack, cleanup
for (name, arch) in stack:
arch.lib.close()
stack = []
for filename in cleanup:
try:
os.remove(filename)
except Exception as e:
print("couldn't delete", filename, e.args, file=sys.stderr)
cleanup = []
def usage():
print("U: go Up one level", file=sys.stderr)
print("O <name>: open embedded archive name", file=sys.stderr)
print("X <name>: extract name", file=sys.stderr)
print("Q: quit", file=sys.stderr)
def get_archive(name):
if not stack:
if name[-4:].lower() == '.pyz':
return ZlibArchive(name)
return CArchiveReader(name)
parent = stack[-1][1]
try:
return parent.openEmbedded(name)
except KeyError:
return None
except (ValueError, RuntimeError):
ndx = parent.toc.find(name)
dpos, dlen, ulen, flag, typcd, name = parent.toc[ndx]
x, data = parent.extract(ndx)
tempfilename = tempfile.mktemp()
cleanup.append(tempfilename)
with open(tempfilename, 'wb') as fp:
fp.write(data)
if typcd == 'z':
return ZlibArchive(tempfilename)
else:
return CArchiveReader(tempfilename)
def get_data(name, arch):
if isinstance(arch.toc, dict):
(ispkg, pos, length) = arch.toc.get(name, (0, None, 0))
if pos is None:
return None
with arch.lib:
arch.lib.seek(arch.start + pos)
return zlib.decompress(arch.lib.read(length))
ndx = arch.toc.find(name)
dpos, dlen, ulen, flag, typcd, name = arch.toc[ndx]
x, data = arch.extract(ndx)
return data
def show(name, arch):
if isinstance(arch.toc, dict):
print(" Name: (ispkg, pos, len)")
toc = arch.toc
else:
print(" pos, length, uncompressed, iscompressed, type, name")
toc = arch.toc.data
pprint.pprint(toc)
def get_content(arch, recursive, brief, output):
if isinstance(arch.toc, dict):
toc = arch.toc
if brief:
for name, _ in toc.items():
output.append(name)
else:
output.append(toc)
else:
toc = arch.toc.data
for el in toc:
if brief:
output.append(el[5])
else:
output.append(el)
if recursive:
if el[4] in ('z', 'a'):
get_content(get_archive(el[5]), recursive, brief, output)
stack.pop()
def show_log(arch, recursive, brief):
output = []
get_content(arch, recursive, brief, output)
# first print all TOCs
for out in output:
if isinstance(out, dict):
pprint.pprint(out)
# then print the other entries
pprint.pprint([out for out in output if not isinstance(out, dict)])
def get_archive_content(filename):
"""
Get a list of the (recursive) content of archive `filename`.
This function is primary meant to be used by runtests.
"""
archive = get_archive(filename)
stack.append((filename, archive))
output = []
get_content(archive, recursive=True, brief=True, output=output)
do_cleanup()
return output
class ZlibArchive(pyimod02_archive.ZlibArchiveReader):
def checkmagic(self):
""" Overridable.
Check to see if the file object self.lib actually has a file
we understand.
"""
self.lib.seek(self.start) # default - magic is at start of file.
if self.lib.read(len(self.MAGIC)) != self.MAGIC:
raise RuntimeError("%s is not a valid %s archive file"
% (self.path, self.__class__.__name__))<|fim▁hole|> if self.lib.read(len(self.pymagic)) != self.pymagic:
print("Warning: pyz is from a different Python version",
file=sys.stderr)
self.lib.read(4)
def run():
parser = argparse.ArgumentParser()
parser.add_argument('-l', '--log',
default=False,
action='store_true',
dest='debug',
help='Print an archive log (default: %(default)s)')
parser.add_argument('-r', '--recursive',
default=False,
action='store_true',
dest='rec_debug',
help='Recursively print an archive log (default: %(default)s). '
'Can be combined with -r')
parser.add_argument('-b', '--brief',
default=False,
action='store_true',
dest='brief',
help='Print only file name. (default: %(default)s). '
'Can be combined with -r')
PyInstaller.log.__add_options(parser)
parser.add_argument('name', metavar='pyi_archive',
help="pyinstaller archive to show content of")
args = parser.parse_args()
PyInstaller.log.__process_options(parser, args)
try:
raise SystemExit(main(**vars(args)))
except KeyboardInterrupt:
raise SystemExit("Aborted by user request.")
if __name__ == '__main__':
run()<|fim▁end|>
| |
<|file_name|>InitializeOAuth.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
###############################################################################
#
# InitializeOAuth
# Generates an authorization URL that an application can use to complete the first step in the OAuth process.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class InitializeOAuth(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the InitializeOAuth Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(InitializeOAuth, self).__init__(temboo_session, '/Library/Bitly/OAuth/InitializeOAuth')
def new_input_set(self):
return InitializeOAuthInputSet()
def _make_result_set(self, result, path):
return InitializeOAuthResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return InitializeOAuthChoreographyExecution(session, exec_id, path)
class InitializeOAuthInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the InitializeOAuth
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccountName(self, value):
"""
Set the value of the AccountName input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(InitializeOAuthInputSet, self)._set_input('AccountName', value)
def set_AppKeyName(self, value):
"""
Set the value of the AppKeyName input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(InitializeOAuthInputSet, self)._set_input('AppKeyName', value)
def set_AppKeyValue(self, value):
"""
Set the value of the AppKeyValue input for this Choreo. ((optional, string) Deprecated (retained for backward compatibility only).)
"""
super(InitializeOAuthInputSet, self)._set_input('AppKeyValue', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((required, string) The Client ID provided by Bitly after registering your application.)
"""
super(InitializeOAuthInputSet, self)._set_input('ClientID', value)
def set_CustomCallbackID(self, value):
"""
Set the value of the CustomCallbackID input for this Choreo. ((optional, string) A unique identifier that you can pass to eliminate the need to wait for a Temboo generated CallbackID. Callback identifiers may only contain numbers, letters, periods, and hyphens.)
"""
super(InitializeOAuthInputSet, self)._set_input('CustomCallbackID', value)
def set_ForwardingURL(self, value):
"""
Set the value of the ForwardingURL input for this Choreo. ((optional, string) The URL that Temboo will redirect your users to after they grant access to your application. This should include the "https://" or "http://" prefix and be a fully qualified URL.)
"""<|fim▁hole|> """
A ResultSet with methods tailored to the values returned by the InitializeOAuth Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_AuthorizationURL(self):
"""
Retrieve the value for the "AuthorizationURL" output from this Choreo execution. ((string) The authorization URL that the application's user needs to go to in order to grant access to your application.)
"""
return self._output.get('AuthorizationURL', None)
def get_CallbackID(self):
"""
Retrieve the value for the "CallbackID" output from this Choreo execution. ((string) An ID used to retrieve the callback data that Temboo stores once your application's user authorizes.)
"""
return self._output.get('CallbackID', None)
class InitializeOAuthChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return InitializeOAuthResultSet(response, path)<|fim▁end|>
|
super(InitializeOAuthInputSet, self)._set_input('ForwardingURL', value)
class InitializeOAuthResultSet(ResultSet):
|
<|file_name|>GeoLocation.js<|end_file_name|><|fim▁begin|>(function() {
var myPromise = new Promise((resolve, reject) => {
navigator.geolocation.getCurrentPosition((pos) => {
resolve(pos);
})
});
function parsePosition(pos) {
return {
lat: pos.coords.latitude,
lon: pos.coords.longitude
}
}
function displayMap(pos) {
let img = document.getElementById('theImg');
img.src = "http://maps.googleapis.com/maps/api/staticmap?center=" + pos.lat + "," + pos.lon + "&zoom=13&size=500x500&sensor=false";
}
myPromise
.then(parsePosition)<|fim▁hole|>}());<|fim▁end|>
|
.then(console.log)
|
<|file_name|>implementation.js<|end_file_name|><|fim▁begin|>import { remove, attempt, isError } from 'lodash';
import uuid from 'uuid/v4';
import { fileExtension } from 'Lib/pathHelper'
import AuthenticationPage from './AuthenticationPage';
window.repoFiles = window.repoFiles || {};
function getFile(path) {
const segments = path.split('/');
let obj = window.repoFiles;
while (obj && segments.length) {
obj = obj[segments.shift()];
}
return obj || {};
}
export default class TestRepo {
constructor(config) {
this.config = config;
this.assets = [];
}
authComponent() {
return AuthenticationPage;
}
restoreUser(user) {
return this.authenticate(user);
}<|fim▁hole|> }
logout() {
return null;
}
getToken() {
return Promise.resolve('');
}
entriesByFolder(collection, extension) {
const entries = [];
const folder = collection.get('folder');
if (folder) {
for (const path in window.repoFiles[folder]) {
if (fileExtension(path) !== extension) {
continue;
}
const file = { path: `${ folder }/${ path }` };
entries.push(
{
file,
data: window.repoFiles[folder][path].content,
}
);
}
}
return Promise.resolve(entries);
}
entriesByFiles(collection) {
const files = collection.get('files').map(collectionFile => ({
path: collectionFile.get('file'),
label: collectionFile.get('label'),
}));
return Promise.all(files.map(file => ({
file,
data: getFile(file.path).content,
})));
}
getEntry(collection, slug, path) {
return Promise.resolve({
file: { path },
data: getFile(path).content,
});
}
persistEntry(entry, mediaFiles = [], options) {
const newEntry = options.newEntry || false;
const folder = entry.path.substring(0, entry.path.lastIndexOf('/'));
const fileName = entry.path.substring(entry.path.lastIndexOf('/') + 1);
window.repoFiles[folder] = window.repoFiles[folder] || {};
window.repoFiles[folder][fileName] = window.repoFiles[folder][fileName] || {};
if (newEntry) {
window.repoFiles[folder][fileName] = { content: entry.raw };
} else {
window.repoFiles[folder][fileName].content = entry.raw;
}
return Promise.resolve();
}
getMedia() {
return Promise.resolve(this.assets);
}
persistMedia({ fileObj }) {
const { name, size } = fileObj;
const objectUrl = attempt(window.URL.createObjectURL, fileObj);
const url = isError(objectUrl) ? '' : objectUrl;
const normalizedAsset = { id: uuid(), name, size, path: url, url };
this.assets.push(normalizedAsset);
return Promise.resolve(normalizedAsset);
}
deleteFile(path, commitMessage) {
const assetIndex = this.assets.findIndex(asset => asset.path === path);
if (assetIndex > -1) {
this.assets.splice(assetIndex, 1);
}
else {
const folder = path.substring(0, path.lastIndexOf('/'));
const fileName = path.substring(path.lastIndexOf('/') + 1);
delete window.repoFiles[folder][fileName];
}
return Promise.resolve();
}
}<|fim▁end|>
|
authenticate() {
return Promise.resolve();
|
<|file_name|>seed.go<|end_file_name|><|fim▁begin|>package wallet
import (
"bytes"
"crypto/rand"
"errors"
"path/filepath"
"github.com/NebulousLabs/Sia/build"
"github.com/NebulousLabs/Sia/crypto"
"github.com/NebulousLabs/Sia/modules"
"github.com/NebulousLabs/Sia/persist"
"github.com/NebulousLabs/Sia/types"
)
const (
seedFilePrefix = "Sia Wallet Encrypted Backup Seed - "
seedFileSuffix = ".seed"
)
var (
errAddressExhaustion = errors.New("current seed has used all available addresses")
errKnownSeed = errors.New("seed is already known")
)
type (
// UniqueID is a unique id randomly generated and put at the front of every
// persistence object. It is used to make sure that a different encryption
// key can be used for every persistence object.
UniqueID [crypto.EntropySize]byte
// SeedFile stores an encrypted wallet seed on disk.
SeedFile struct {
UID UniqueID
EncryptionVerification crypto.Ciphertext
Seed crypto.Ciphertext
}
)
// generateUnlockConditions provides the unlock conditions that would be
// automatically generated from the input public key.
func generateUnlockConditions(pk crypto.PublicKey) types.UnlockConditions {
return types.UnlockConditions{
PublicKeys: []types.SiaPublicKey{{
Algorithm: types.SignatureEd25519,
Key: pk[:],
}},
SignaturesRequired: 1,
}
}
// generateSpendableKey creates the keys and unlock conditions a given index of a
// seed.
func generateSpendableKey(seed modules.Seed, index uint64) spendableKey {
// Generate the keys and unlock conditions.
entropy := crypto.HashAll(seed, index)
sk, pk := crypto.GenerateKeyPairDeterministic(entropy)
return spendableKey{
UnlockConditions: generateUnlockConditions(pk),
SecretKeys: []crypto.SecretKey{sk},
}
}
// encryptAndSaveSeedFile encrypts and saves a seed file.
func (w *Wallet) encryptAndSaveSeedFile(masterKey crypto.TwofishKey, seed modules.Seed) (SeedFile, error) {
var sf SeedFile
_, err := rand.Read(sf.UID[:])
if err != nil {
return SeedFile{}, err
}
sek := uidEncryptionKey(masterKey, sf.UID)
plaintextVerification := make([]byte, encryptionVerificationLen)
sf.EncryptionVerification, err = sek.EncryptBytes(plaintextVerification)
if err != nil {
return SeedFile{}, err
}
sf.Seed, err = sek.EncryptBytes(seed[:])
if err != nil {
return SeedFile{}, err
}
seedFilename := filepath.Join(w.persistDir, seedFilePrefix+persist.RandomSuffix()+seedFileSuffix)
err = persist.SaveFile(seedMetadata, sf, seedFilename)
if err != nil {
return SeedFile{}, err
}
return sf, nil
}
// decryptSeedFile decrypts a seed file using the encryption key.
func decryptSeedFile(masterKey crypto.TwofishKey, sf SeedFile) (seed modules.Seed, err error) {
// Verify that the provided master key is the correct key.
decryptionKey := uidEncryptionKey(masterKey, sf.UID)
expectedDecryptedVerification := make([]byte, crypto.EntropySize)
decryptedVerification, err := decryptionKey.DecryptBytes(sf.EncryptionVerification)
if err != nil {
return modules.Seed{}, err
}
if !bytes.Equal(expectedDecryptedVerification, decryptedVerification) {
return modules.Seed{}, modules.ErrBadEncryptionKey
}
// Decrypt and return the seed.
plainSeed, err := decryptionKey.DecryptBytes(sf.Seed)
if err != nil {
return modules.Seed{}, err
}
copy(seed[:], plainSeed)
return seed, nil
}
// integrateSeed takes an address seed as input and from that generates
// 'publicKeysPerSeed' addresses that the wallet is able to spend.
// integrateSeed should not be called with the primary seed.
func (w *Wallet) integrateSeed(seed modules.Seed) {
for i := uint64(0); i < modules.PublicKeysPerSeed; i++ {
// Generate the key and check it is new to the wallet.
spendableKey := generateSpendableKey(seed, i)
w.keys[spendableKey.UnlockConditions.UnlockHash()] = spendableKey
}
w.seeds = append(w.seeds, seed)
}
// recoverSeed integrates a recovery seed into the wallet.
func (w *Wallet) recoverSeed(masterKey crypto.TwofishKey, seed modules.Seed) error {
// Because the recovery seed does not have a UID, duplication must be
// prevented by comparing with the list of decrypted seeds. This can only
// occur while the wallet is unlocked.
if !w.unlocked {
return modules.ErrLockedWallet
}
// Check that the seed is not already known.
for _, wSeed := range w.seeds {
if seed == wSeed {
return errKnownSeed
}
}
if seed == w.primarySeed {
return errKnownSeed
}
seedFile, err := w.encryptAndSaveSeedFile(masterKey, seed)
if err != nil {
return err
}
// Add the seed file to the wallet's set of tracked seeds and save the
// wallet settings.
w.persist.AuxiliarySeedFiles = append(w.persist.AuxiliarySeedFiles, seedFile)
err = w.saveSettings()
if err != nil {
return err
}
w.integrateSeed(seed)
return nil
}
// createSeed creates a wallet seed and encrypts it using a key derived from
// the master key, then addds it to the wallet as the primary seed, while
// making a disk backup.
func (w *Wallet) createSeed(masterKey crypto.TwofishKey, seed modules.Seed) error {
seedFile, err := w.encryptAndSaveSeedFile(masterKey, seed)
if err != nil {
return err
}
w.primarySeed = seed
w.persist.PrimarySeedFile = seedFile
w.persist.PrimarySeedProgress = 0
// The wallet preloads keys to prevent confusion for people using the same
// seed/wallet file in multiple places.
for i := uint64(0); i < modules.WalletSeedPreloadDepth; i++ {
spendableKey := generateSpendableKey(seed, i)
w.keys[spendableKey.UnlockConditions.UnlockHash()] = spendableKey
}
return w.saveSettings()
}
// initPrimarySeed loads the primary seed into the wallet.
func (w *Wallet) initPrimarySeed(masterKey crypto.TwofishKey) error {
seed, err := decryptSeedFile(masterKey, w.persist.PrimarySeedFile)
if err != nil {
return err
}
// The wallet preloads keys to prevent confusion when using the same wallet
// in multiple places.
for i := uint64(0); i < w.persist.PrimarySeedProgress+modules.WalletSeedPreloadDepth; i++ {
spendableKey := generateSpendableKey(seed, i)
w.keys[spendableKey.UnlockConditions.UnlockHash()] = spendableKey
}
w.primarySeed = seed
w.seeds = append(w.seeds, seed)
return nil
}
// initAuxiliarySeeds scans the wallet folder for wallet seeds.
func (w *Wallet) initAuxiliarySeeds(masterKey crypto.TwofishKey) error {<|fim▁hole|> }
if err != nil {
w.log.Println("UNLOCK: failed to load an auxiliary seed:", err)
continue
}
w.integrateSeed(seed)
}
return nil
}
// nextPrimarySeedAddress fetches the next address from the primary seed.
func (w *Wallet) nextPrimarySeedAddress() (types.UnlockConditions, error) {
// Check that the wallet has been unlocked.
if !w.unlocked {
return types.UnlockConditions{}, modules.ErrLockedWallet
}
// Integrate the next key into the wallet, and return the unlock
// conditions. Because the wallet preloads keys, the progress used is
// 'PrimarySeedProgress+modules.WalletSeedPreloadDepth'.
spendableKey := generateSpendableKey(w.primarySeed, w.persist.PrimarySeedProgress+modules.WalletSeedPreloadDepth)
w.keys[spendableKey.UnlockConditions.UnlockHash()] = spendableKey
w.persist.PrimarySeedProgress++
err := w.saveSettings()
if err != nil {
return types.UnlockConditions{}, err
}
return spendableKey.UnlockConditions, nil
}
// AllSeeds returns a list of all seeds known to and used by the wallet.
func (w *Wallet) AllSeeds() ([]modules.Seed, error) {
w.mu.Lock()
defer w.mu.Unlock()
if !w.unlocked {
return nil, modules.ErrLockedWallet
}
return w.seeds, nil
}
// PrimarySeed returns the decrypted primary seed of the wallet.
func (w *Wallet) PrimarySeed() (modules.Seed, uint64, error) {
w.mu.Lock()
defer w.mu.Unlock()
if !w.unlocked {
return modules.Seed{}, 0, modules.ErrLockedWallet
}
return w.primarySeed, w.persist.PrimarySeedProgress, nil
}
// NextAddress returns an unlock hash that is ready to recieve siacoins or
// siafunds. The address is generated using the primary address seed.
func (w *Wallet) NextAddress() (types.UnlockConditions, error) {
w.mu.Lock()
defer w.mu.Unlock()
return w.nextPrimarySeedAddress()
}
// LoadSeed will track all of the addresses generated by the input seed,
// reclaiming any funds that were lost due to a deleted file or lost encryption
// key. An error will be returned if the seed has already been integrated with
// the wallet.
func (w *Wallet) LoadSeed(masterKey crypto.TwofishKey, seed modules.Seed) error {
w.mu.Lock()
defer w.mu.Unlock()
err := w.checkMasterKey(masterKey)
if err != nil {
return err
}
return w.recoverSeed(masterKey, seed)
}<|fim▁end|>
|
for _, seedFile := range w.persist.AuxiliarySeedFiles {
seed, err := decryptSeedFile(masterKey, seedFile)
if build.DEBUG && err != nil {
panic(err)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.