prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>css.js<|end_file_name|><|fim▁begin|>var fs = require('fs'),
path = require('path'),
cleanCSS = require('clean-css'),
rjs = require('requirejs');
module.exports = function(grunt) {
// **css* task works pretty much the same as grunt's min task. The task
// target is the destination, data is an array of glob patterns. These
// files are concataned and run through requirejs optimizer to handle
// @import inlines in CSS files.
grunt.task.registerMultiTask('css', 'Concats, replaces @imports and minifies the CSS files', function() {
this.requiresConfig('staging');
// if defined, files get prepended by the output config value
var files = this.data;
// subtarget name is the output destination
var target = this.target;
// async task
var cb = this.async();
// concat prior to rjs optimize css, and before min max info
grunt.log.write('Writing css files to ' + target + '...');
var out = grunt.helper('mincss', files);
// only go through if their's file to process
if(!out) {
return cb();
}
// write minified file before going through rjs:optimize to possibly inline
// @imports (that are not handled by compass within .scss or .sass files)
grunt.file.write(target, out);
// replace @import statements
//
// XXX no error handling in this helper so far..
// Check that rjs returns an error when something wrong (if it throws...)
// if it is bubble the error back here
grunt.helper('rjs:optimize:css', target, function() {
// do the minification once inline imports are done
grunt.log.ok();
cb();<|fim▁hole|> });
});
//
// **mincss** basic utility to concat CSS files and run them through
// [cleanCSS](https://github.com/GoalSmashers/clean-css), might opt to use
// [https://github.com/jzaefferer/grunt-css] plugin.
//
grunt.registerHelper('mincss', function(files, o) {
o = o || {};
files = grunt.file.expandFiles(files);
return files.map(function(filepath) {
var content = grunt.file.read(filepath);
return o.nocompress ? content : cleanCSS.process(content);
}).join('');
});
// **rjs:optimize:css** is an helper using rjs to optimize a single file,
// mainly to properly import multi-level of @import statements, which can be
// tricky with all the url rewrites.
//
// file - Path to the css file to optimize
// options - (optional) rjs configuration
// cb - callback function to call on completion
grunt.registerHelper('rjs:optimize:css', function(file, options, cb) {
if(!cb) { cb = options; options = {}; }
options.cssIn = file;
options.out = options.out || file;
options.optimizeCss = 'standard.keepComments.keepLines';
var before = grunt.file.read(file);
rjs.optimize(options, function() {
grunt.helper('min_max_info', grunt.file.read(file), before);
cb();
});
});
};<|fim▁end|> | |
<|file_name|>bracket-push.rs<|end_file_name|><|fim▁begin|>extern crate bracket_push;
use bracket_push::*;
#[test]
fn paired_square_brackets() {
assert!(Brackets::from("[]").are_balanced());
}
#[test]
fn empty_string() {
assert!(Brackets::from("").are_balanced());
}
#[test]
fn unpaired_brackets() {
assert!(!Brackets::from("[[").are_balanced());
}
#[test]
fn wrong_ordered_brackets() {
assert!(!Brackets::from("}{").are_balanced());
}
#[test]
fn wrong_closing_bracket() {
assert!(!Brackets::from("{]").are_balanced());
}
#[test]
fn paired_with_whitespace() {
assert!(Brackets::from("{ }").are_balanced());
}
#[test]
fn simple_nested_brackets() {
assert!(Brackets::from("{[]}").are_balanced());
}
#[test]
fn several_paired_brackets() {
assert!(Brackets::from("{}[]").are_balanced());
}
#[test]
fn paired_and_nested_brackets() {
assert!(Brackets::from("([{}({}[])])").are_balanced());
}
#[test]
fn unopened_closing_brackets() {
assert!(!Brackets::from("{[)][]}").are_balanced());
}
<|fim▁hole|>}
#[test]
fn paired_and_wrong_nested_brackets() {
assert!(!Brackets::from("[({]})").are_balanced());
}
#[test]
fn math_expression() {
assert!(Brackets::from("(((185 + 223.85) * 15) - 543)/2").are_balanced());
}
#[test]
fn complex_latex_expression() {
let input = "\\left(\\begin{array}{cc} \\frac{1}{3} & x\\\\ \\mathrm{e}^{x} &... x^2 \
\\end{array}\\right)";
assert!(Brackets::from(input).are_balanced());
}<|fim▁end|> | #[test]
fn unpaired_and_nested_brackets() {
assert!(!Brackets::from("([{])").are_balanced()); |
<|file_name|>interfaces.go<|end_file_name|><|fim▁begin|>package storageapi
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/azure-sdk-for-go/services/storage/mgmt/2021-08-01/storage"
"github.com/Azure/go-autorest/autorest"
)
// OperationsClientAPI contains the set of methods on the OperationsClient type.
type OperationsClientAPI interface {
List(ctx context.Context) (result storage.OperationListResult, err error)
}
var _ OperationsClientAPI = (*storage.OperationsClient)(nil)
// SkusClientAPI contains the set of methods on the SkusClient type.
type SkusClientAPI interface {
List(ctx context.Context) (result storage.SkuListResult, err error)
}
var _ SkusClientAPI = (*storage.SkusClient)(nil)
// AccountsClientAPI contains the set of methods on the AccountsClient type.
type AccountsClientAPI interface {
AbortHierarchicalNamespaceMigration(ctx context.Context, resourceGroupName string, accountName string) (result storage.AccountsAbortHierarchicalNamespaceMigrationFuture, err error)
CheckNameAvailability(ctx context.Context, accountName storage.AccountCheckNameAvailabilityParameters) (result storage.CheckNameAvailabilityResult, err error)
Create(ctx context.Context, resourceGroupName string, accountName string, parameters storage.AccountCreateParameters) (result storage.AccountsCreateFuture, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string) (result autorest.Response, err error)
Failover(ctx context.Context, resourceGroupName string, accountName string) (result storage.AccountsFailoverFuture, err error)
GetProperties(ctx context.Context, resourceGroupName string, accountName string, expand storage.AccountExpand) (result storage.Account, err error)
HierarchicalNamespaceMigration(ctx context.Context, resourceGroupName string, accountName string, requestType string) (result storage.AccountsHierarchicalNamespaceMigrationFuture, err error)
List(ctx context.Context) (result storage.AccountListResultPage, err error)
ListComplete(ctx context.Context) (result storage.AccountListResultIterator, err error)
ListAccountSAS(ctx context.Context, resourceGroupName string, accountName string, parameters storage.AccountSasParameters) (result storage.ListAccountSasResponse, err error)
ListByResourceGroup(ctx context.Context, resourceGroupName string) (result storage.AccountListResultPage, err error)
ListByResourceGroupComplete(ctx context.Context, resourceGroupName string) (result storage.AccountListResultIterator, err error)
ListKeys(ctx context.Context, resourceGroupName string, accountName string, expand storage.ListKeyExpand) (result storage.AccountListKeysResult, err error)
ListServiceSAS(ctx context.Context, resourceGroupName string, accountName string, parameters storage.ServiceSasParameters) (result storage.ListServiceSasResponse, err error)
RegenerateKey(ctx context.Context, resourceGroupName string, accountName string, regenerateKey storage.AccountRegenerateKeyParameters) (result storage.AccountListKeysResult, err error)
RestoreBlobRanges(ctx context.Context, resourceGroupName string, accountName string, parameters storage.BlobRestoreParameters) (result storage.AccountsRestoreBlobRangesFuture, err error)
RevokeUserDelegationKeys(ctx context.Context, resourceGroupName string, accountName string) (result autorest.Response, err error)
Update(ctx context.Context, resourceGroupName string, accountName string, parameters storage.AccountUpdateParameters) (result storage.Account, err error)
}
var _ AccountsClientAPI = (*storage.AccountsClient)(nil)
// DeletedAccountsClientAPI contains the set of methods on the DeletedAccountsClient type.
type DeletedAccountsClientAPI interface {
Get(ctx context.Context, deletedAccountName string, location string) (result storage.DeletedAccount, err error)
List(ctx context.Context) (result storage.DeletedAccountListResultPage, err error)
ListComplete(ctx context.Context) (result storage.DeletedAccountListResultIterator, err error)
}
var _ DeletedAccountsClientAPI = (*storage.DeletedAccountsClient)(nil)
// UsagesClientAPI contains the set of methods on the UsagesClient type.
type UsagesClientAPI interface {
ListByLocation(ctx context.Context, location string) (result storage.UsageListResult, err error)
}
var _ UsagesClientAPI = (*storage.UsagesClient)(nil)
// ManagementPoliciesClientAPI contains the set of methods on the ManagementPoliciesClient type.
type ManagementPoliciesClientAPI interface {
CreateOrUpdate(ctx context.Context, resourceGroupName string, accountName string, properties storage.ManagementPolicy) (result storage.ManagementPolicy, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string) (result autorest.Response, err error)
Get(ctx context.Context, resourceGroupName string, accountName string) (result storage.ManagementPolicy, err error)
}
var _ ManagementPoliciesClientAPI = (*storage.ManagementPoliciesClient)(nil)
// BlobInventoryPoliciesClientAPI contains the set of methods on the BlobInventoryPoliciesClient type.
type BlobInventoryPoliciesClientAPI interface {
CreateOrUpdate(ctx context.Context, resourceGroupName string, accountName string, properties storage.BlobInventoryPolicy) (result storage.BlobInventoryPolicy, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string) (result autorest.Response, err error)
Get(ctx context.Context, resourceGroupName string, accountName string) (result storage.BlobInventoryPolicy, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.ListBlobInventoryPolicy, err error)
}
var _ BlobInventoryPoliciesClientAPI = (*storage.BlobInventoryPoliciesClient)(nil)
// PrivateEndpointConnectionsClientAPI contains the set of methods on the PrivateEndpointConnectionsClient type.
type PrivateEndpointConnectionsClientAPI interface {
Delete(ctx context.Context, resourceGroupName string, accountName string, privateEndpointConnectionName string) (result autorest.Response, err error)
Get(ctx context.Context, resourceGroupName string, accountName string, privateEndpointConnectionName string) (result storage.PrivateEndpointConnection, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.PrivateEndpointConnectionListResult, err error)
Put(ctx context.Context, resourceGroupName string, accountName string, privateEndpointConnectionName string, properties storage.PrivateEndpointConnection) (result storage.PrivateEndpointConnection, err error)
}
var _ PrivateEndpointConnectionsClientAPI = (*storage.PrivateEndpointConnectionsClient)(nil)
// PrivateLinkResourcesClientAPI contains the set of methods on the PrivateLinkResourcesClient type.
type PrivateLinkResourcesClientAPI interface {
ListByStorageAccount(ctx context.Context, resourceGroupName string, accountName string) (result storage.PrivateLinkResourceListResult, err error)
}
var _ PrivateLinkResourcesClientAPI = (*storage.PrivateLinkResourcesClient)(nil)
// ObjectReplicationPoliciesClientAPI contains the set of methods on the ObjectReplicationPoliciesClient type.
type ObjectReplicationPoliciesClientAPI interface {
CreateOrUpdate(ctx context.Context, resourceGroupName string, accountName string, objectReplicationPolicyID string, properties storage.ObjectReplicationPolicy) (result storage.ObjectReplicationPolicy, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string, objectReplicationPolicyID string) (result autorest.Response, err error)
Get(ctx context.Context, resourceGroupName string, accountName string, objectReplicationPolicyID string) (result storage.ObjectReplicationPolicy, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.ObjectReplicationPolicies, err error)
}
var _ ObjectReplicationPoliciesClientAPI = (*storage.ObjectReplicationPoliciesClient)(nil)
// LocalUsersClientAPI contains the set of methods on the LocalUsersClient type.
type LocalUsersClientAPI interface {
CreateOrUpdate(ctx context.Context, resourceGroupName string, accountName string, username string, properties storage.LocalUser) (result storage.LocalUser, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string, username string) (result autorest.Response, err error)
Get(ctx context.Context, resourceGroupName string, accountName string, username string) (result storage.LocalUser, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.LocalUsers, err error)
ListKeys(ctx context.Context, resourceGroupName string, accountName string, username string) (result storage.LocalUserKeys, err error)
RegeneratePassword(ctx context.Context, resourceGroupName string, accountName string, username string) (result storage.LocalUserRegeneratePasswordResult, err error)
}
var _ LocalUsersClientAPI = (*storage.LocalUsersClient)(nil)
// EncryptionScopesClientAPI contains the set of methods on the EncryptionScopesClient type.
type EncryptionScopesClientAPI interface {
Get(ctx context.Context, resourceGroupName string, accountName string, encryptionScopeName string) (result storage.EncryptionScope, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.EncryptionScopeListResultPage, err error)
ListComplete(ctx context.Context, resourceGroupName string, accountName string) (result storage.EncryptionScopeListResultIterator, err error)
Patch(ctx context.Context, resourceGroupName string, accountName string, encryptionScopeName string, encryptionScope storage.EncryptionScope) (result storage.EncryptionScope, err error)
Put(ctx context.Context, resourceGroupName string, accountName string, encryptionScopeName string, encryptionScope storage.EncryptionScope) (result storage.EncryptionScope, err error)
}
var _ EncryptionScopesClientAPI = (*storage.EncryptionScopesClient)(nil)
// BlobServicesClientAPI contains the set of methods on the BlobServicesClient type.
type BlobServicesClientAPI interface {
GetServiceProperties(ctx context.Context, resourceGroupName string, accountName string) (result storage.BlobServiceProperties, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.BlobServiceItems, err error)
SetServiceProperties(ctx context.Context, resourceGroupName string, accountName string, parameters storage.BlobServiceProperties) (result storage.BlobServiceProperties, err error)
}
var _ BlobServicesClientAPI = (*storage.BlobServicesClient)(nil)
// BlobContainersClientAPI contains the set of methods on the BlobContainersClient type.
type BlobContainersClientAPI interface {
ClearLegalHold(ctx context.Context, resourceGroupName string, accountName string, containerName string, legalHold storage.LegalHold) (result storage.LegalHold, err error)
Create(ctx context.Context, resourceGroupName string, accountName string, containerName string, blobContainer storage.BlobContainer) (result storage.BlobContainer, err error)
CreateOrUpdateImmutabilityPolicy(ctx context.Context, resourceGroupName string, accountName string, containerName string, parameters *storage.ImmutabilityPolicy, ifMatch string) (result storage.ImmutabilityPolicy, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string, containerName string) (result autorest.Response, err error)
DeleteImmutabilityPolicy(ctx context.Context, resourceGroupName string, accountName string, containerName string, ifMatch string) (result storage.ImmutabilityPolicy, err error)
ExtendImmutabilityPolicy(ctx context.Context, resourceGroupName string, accountName string, containerName string, ifMatch string, parameters *storage.ImmutabilityPolicy) (result storage.ImmutabilityPolicy, err error)
Get(ctx context.Context, resourceGroupName string, accountName string, containerName string) (result storage.BlobContainer, err error)
GetImmutabilityPolicy(ctx context.Context, resourceGroupName string, accountName string, containerName string, ifMatch string) (result storage.ImmutabilityPolicy, err error)
Lease(ctx context.Context, resourceGroupName string, accountName string, containerName string, parameters *storage.LeaseContainerRequest) (result storage.LeaseContainerResponse, err error)
List(ctx context.Context, resourceGroupName string, accountName string, maxpagesize string, filter string, include storage.ListContainersInclude) (result storage.ListContainerItemsPage, err error)
ListComplete(ctx context.Context, resourceGroupName string, accountName string, maxpagesize string, filter string, include storage.ListContainersInclude) (result storage.ListContainerItemsIterator, err error)
LockImmutabilityPolicy(ctx context.Context, resourceGroupName string, accountName string, containerName string, ifMatch string) (result storage.ImmutabilityPolicy, err error)
ObjectLevelWorm(ctx context.Context, resourceGroupName string, accountName string, containerName string) (result storage.BlobContainersObjectLevelWormFuture, err error)
SetLegalHold(ctx context.Context, resourceGroupName string, accountName string, containerName string, legalHold storage.LegalHold) (result storage.LegalHold, err error)
Update(ctx context.Context, resourceGroupName string, accountName string, containerName string, blobContainer storage.BlobContainer) (result storage.BlobContainer, err error)
}
var _ BlobContainersClientAPI = (*storage.BlobContainersClient)(nil)
// FileServicesClientAPI contains the set of methods on the FileServicesClient type.
type FileServicesClientAPI interface {
GetServiceProperties(ctx context.Context, resourceGroupName string, accountName string) (result storage.FileServiceProperties, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.FileServiceItems, err error)
SetServiceProperties(ctx context.Context, resourceGroupName string, accountName string, parameters storage.FileServiceProperties) (result storage.FileServiceProperties, err error)
}
var _ FileServicesClientAPI = (*storage.FileServicesClient)(nil)
// FileSharesClientAPI contains the set of methods on the FileSharesClient type.
type FileSharesClientAPI interface {
Create(ctx context.Context, resourceGroupName string, accountName string, shareName string, fileShare storage.FileShare, expand string) (result storage.FileShare, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string, shareName string, xMsSnapshot string, include string) (result autorest.Response, err error)
Get(ctx context.Context, resourceGroupName string, accountName string, shareName string, expand string, xMsSnapshot string) (result storage.FileShare, err error)
Lease(ctx context.Context, resourceGroupName string, accountName string, shareName string, parameters *storage.LeaseShareRequest, xMsSnapshot string) (result storage.LeaseShareResponse, err error)<|fim▁hole|> List(ctx context.Context, resourceGroupName string, accountName string, maxpagesize string, filter string, expand string) (result storage.FileShareItemsPage, err error)
ListComplete(ctx context.Context, resourceGroupName string, accountName string, maxpagesize string, filter string, expand string) (result storage.FileShareItemsIterator, err error)
Restore(ctx context.Context, resourceGroupName string, accountName string, shareName string, deletedShare storage.DeletedShare) (result autorest.Response, err error)
Update(ctx context.Context, resourceGroupName string, accountName string, shareName string, fileShare storage.FileShare) (result storage.FileShare, err error)
}
var _ FileSharesClientAPI = (*storage.FileSharesClient)(nil)
// QueueServicesClientAPI contains the set of methods on the QueueServicesClient type.
type QueueServicesClientAPI interface {
GetServiceProperties(ctx context.Context, resourceGroupName string, accountName string) (result storage.QueueServiceProperties, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.ListQueueServices, err error)
SetServiceProperties(ctx context.Context, resourceGroupName string, accountName string, parameters storage.QueueServiceProperties) (result storage.QueueServiceProperties, err error)
}
var _ QueueServicesClientAPI = (*storage.QueueServicesClient)(nil)
// QueueClientAPI contains the set of methods on the QueueClient type.
type QueueClientAPI interface {
Create(ctx context.Context, resourceGroupName string, accountName string, queueName string, queue storage.Queue) (result storage.Queue, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string, queueName string) (result autorest.Response, err error)
Get(ctx context.Context, resourceGroupName string, accountName string, queueName string) (result storage.Queue, err error)
List(ctx context.Context, resourceGroupName string, accountName string, maxpagesize string, filter string) (result storage.ListQueueResourcePage, err error)
ListComplete(ctx context.Context, resourceGroupName string, accountName string, maxpagesize string, filter string) (result storage.ListQueueResourceIterator, err error)
Update(ctx context.Context, resourceGroupName string, accountName string, queueName string, queue storage.Queue) (result storage.Queue, err error)
}
var _ QueueClientAPI = (*storage.QueueClient)(nil)
// TableServicesClientAPI contains the set of methods on the TableServicesClient type.
type TableServicesClientAPI interface {
GetServiceProperties(ctx context.Context, resourceGroupName string, accountName string) (result storage.TableServiceProperties, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.ListTableServices, err error)
SetServiceProperties(ctx context.Context, resourceGroupName string, accountName string, parameters storage.TableServiceProperties) (result storage.TableServiceProperties, err error)
}
var _ TableServicesClientAPI = (*storage.TableServicesClient)(nil)
// TableClientAPI contains the set of methods on the TableClient type.
type TableClientAPI interface {
Create(ctx context.Context, resourceGroupName string, accountName string, tableName string) (result storage.Table, err error)
Delete(ctx context.Context, resourceGroupName string, accountName string, tableName string) (result autorest.Response, err error)
Get(ctx context.Context, resourceGroupName string, accountName string, tableName string) (result storage.Table, err error)
List(ctx context.Context, resourceGroupName string, accountName string) (result storage.ListTableResourcePage, err error)
ListComplete(ctx context.Context, resourceGroupName string, accountName string) (result storage.ListTableResourceIterator, err error)
Update(ctx context.Context, resourceGroupName string, accountName string, tableName string) (result storage.Table, err error)
}
var _ TableClientAPI = (*storage.TableClient)(nil)<|fim▁end|> | |
<|file_name|>0066_auto_20210428_0912.py<|end_file_name|><|fim▁begin|># Generated by Django 2.2.19 on 2021-04-28 09:12
from django.db import migrations, models
import image_cropping.fields
class Migration(migrations.Migration):
<|fim▁hole|> dependencies = [
('markets', '0065_auto_20190426_1255'),
]
operations = [
migrations.AlterField(
model_name="logo",
name="cropping",
field=image_cropping.fields.ImageRatioField(
"image",
"400x302",
adapt_rotation=False,
allow_fullsize=False,
free_crop=False,
help_text="Use cropping tool to cut the image to the right format. Always leave enough white space around the edges and try to keep the largest possible size for good image quality.", # noqa
hide_image_field=False,
size_warning=False,
verbose_name="cropping",
),
),
migrations.AlterField(
model_name="logo",
name="image",
field=models.ImageField(
help_text="After choosing an image to upload click 'Save' to access the 'Cropping' tool and edit the image", # noqa
null=True,
upload_to="",
),
),
]<|fim▁end|> | |
<|file_name|>OptionalWeightUnitOfMeasure.java<|end_file_name|><|fim▁begin|>/* */ package com.elcuk.jaxb;
/* */
/* */ import javax.xml.bind.annotation.XmlEnum;
/* */ import javax.xml.bind.annotation.XmlType;
/* */
/* */ @XmlType(name="OptionalWeightUnitOfMeasure")
/* */ @XmlEnum
/* */ public enum OptionalWeightUnitOfMeasure
/* */ {<|fim▁hole|>/* 30 */ GR,
/* 31 */ KG,
/* 32 */ OZ,
/* 33 */ LB,
/* 34 */ MG;
/* */
/* */ public String value() {
/* 37 */ return name();
/* */ }
/* */
/* */ public static OptionalWeightUnitOfMeasure fromValue(String v) {
/* 41 */ return valueOf(v);
/* */ }
/* */ }
/* Location: /Users/mac/Desktop/jaxb/
* Qualified Name: com.elcuk.jaxb.OptionalWeightUnitOfMeasure
* JD-Core Version: 0.6.2
*/<|fim▁end|> | |
<|file_name|>GoingToZeroOrInfinity.rs<|end_file_name|><|fim▁begin|>fn going(n: i32) -> f64 {
let mut result = 0.0;
for i in 1..n+1 {
result += i as f64;
result /= i as f64;
}
result
}
fn assert_fuzzy_equals(actual: f64, expected: f64) {
let merr = 1.0e-6;
let inrange =
if expected == 0.0 {<|fim▁hole|> };
if inrange == false {
println!("Expected value must be near: {:e} but was:{:e}",
expected, actual);
} else {
//println!("....... GOOD\n");
}
assert_eq!(true, inrange);
}
fn dotest(n: i32, exp: f64) -> () {
assert_fuzzy_equals(going(n), exp);
}
fn main() {
dotest(5, 1.275);
dotest(6, 1.2125);
dotest(7, 1.173214);
dotest(8, 1.146651);
}<|fim▁end|> | (actual.abs() <= merr)
} else {
((actual - expected).abs() / expected <= merr) |
<|file_name|>calcplusplus.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# -*- coding: utf-8 -*-ç
import sys
import calcoo
import calcoohija
import csv
if __name__ == "__main__":
calc = calcoohija.CalculadoraHija()
with open(sys.argv[1]) as fichero:
reader = csv.reader(fichero)
for operandos in reader:
operacion = operandos[0]
if operacion == "suma":
resultado = calc.suma(int(operandos[1]), int(operandos[2]))
for numero in operandos[3:]:
resultado = calc.suma(int(resultado), int(numero))
print(resultado)
elif operacion == "resta":<|fim▁hole|> for numero in operandos[3:]:
resultado = calc.resta(int(resultado), int(numero))
print(resultado)
elif operacion == "multiplica":
resultado = calc.producto(int(operandos[1]), int(operandos[2]))
for numero in operandos[3:]:
resultado = calc.producto(int(resultado), int(numero))
print (resultado)
elif operacion == "divide":
resultado = calc.division(int(operandos[1]), int(operandos[2]))
for numero in operandos[3:]:
resultado = calc.division(int(resultado), int(numero))
print (resultado)<|fim▁end|> | resultado = calc.resta(int(operandos[1]), int(operandos[2])) |
<|file_name|>SphinxTranscriptionService.d.ts<|end_file_name|><|fim▁begin|>/**
* Implements a TranscriptionService for a Sphinx4 http server
*/
export default class SphinxService extends AbstractTranscriptionService {
url: string;<|fim▁hole|>}
import AbstractTranscriptionService from "./AbstractTranscriptionService";<|fim▁end|> | |
<|file_name|>_management_link_client.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import Any, Optional, TYPE_CHECKING
from azure.core.rest import HttpRequest, HttpResponse
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
from . import models
from ._configuration import ManagementLinkClientConfiguration
from .operations import Operations, ResourceLinksOperations
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials import TokenCredential
class ManagementLinkClient:
"""Azure resources can be linked together to form logical relationships. You can establish links between resources belonging to different resource groups. However, all the linked resources must belong to the same subscription. Each resource can be linked to 50 other resources. If any of the linked resources are deleted or moved, the link owner must clean up the remaining link.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.resource.links.v2016_09_01.operations.Operations
:ivar resource_links: ResourceLinksOperations operations
:vartype resource_links:
azure.mgmt.resource.links.v2016_09_01.operations.ResourceLinksOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
"""
def __init__(
self,
credential: "TokenCredential",
subscription_id: str,
base_url: str = "https://management.azure.com",
**kwargs: Any
) -> None:
self._config = ManagementLinkClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
self.operations = Operations(self._client, self._config, self._serialize, self._deserialize)
self.resource_links = ResourceLinksOperations(self._client, self._config, self._serialize, self._deserialize)
def _send_request(
self,
request, # type: HttpRequest
**kwargs: Any
) -> HttpResponse:
"""Runs the network request through the client's chained policies.
>>> from azure.core.rest import HttpRequest
>>> request = HttpRequest("GET", "https://www.example.org/")
<HttpRequest [GET], url: 'https://www.example.org/'>
>>> response = client._send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
<|fim▁hole|> return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)<|fim▁end|> | def __enter__(self):
# type: () -> ManagementLinkClient
self._client.__enter__() |
<|file_name|>webhooks.py<|end_file_name|><|fim▁begin|>from flask import request
from structlog import get_logger
from ghinbox import app
from ghinbox.tasks import create_issue
logger = get_logger()
@app.route('/hooks/postmark', methods=['POST'])
def postmark_incomming_hook():
# TODO #2 HTTP Basic Auth
<|fim▁hole|> return 'ERR', 400
logger.debug('postmark', data=inbound)
title = inbound['Subject']
body = inbound['TextBody']
logger.debug('creating issue', title=title)
create_issue.delay(title, body)
return 'OK'<|fim▁end|> | inbound = request.json
if not inbound: |
<|file_name|>AsyncTask.java<|end_file_name|><|fim▁begin|>package org.wikipedia.concurrency;
// Copied from Android 4.4.2_r2 source
// so we can use executeOnExecutor :P
//
// https://android.googlesource.com/platform/frameworks/base/+/android-4.4.2_r2/core/java/android/os/AsyncTask.java
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.os.Handler;
import android.os.Message;
import android.os.Process;
import android.support.annotation.NonNull;
import java.util.ArrayDeque;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.FutureTask;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* <p>AsyncTask enables proper and easy use of the UI thread. This class allows to
* perform background operations and publish results on the UI thread without
* having to manipulate threads and/or handlers.</p>
*
* <p>AsyncTask is designed to be a helper class around {@link Thread} and {@link Handler}
* and does not constitute a generic threading framework. AsyncTasks should ideally be
* used for short operations (a few seconds at the most.) If you need to keep threads
* running for long periods of time, it is highly recommended you use the various APIs
* provided by the <code>java.util.concurrent</code> pacakge such as {@link Executor},
* {@link ThreadPoolExecutor} and {@link FutureTask}.</p>
*
* <p>An asynchronous task is defined by a computation that runs on a background thread and
* whose result is published on the UI thread. An asynchronous task is defined by 3 generic
* types, called <code>Params</code>, <code>Progress</code> and <code>Result</code>,
* and 4 steps, called <code>onPreExecute</code>, <code>doInBackground</code>,
* <code>onProgressUpdate</code> and <code>onPostExecute</code>.</p>
*
* <div class="special reference">
* <h3>Developer Guides</h3>
* <p>For more information about using tasks and threads, read the
* <a href="{@docRoot}guide/topics/fundamentals/processes-and-threads.html">Processes and
* Threads</a> developer guide.</p>
* </div>
*
* <h2>Usage</h2>
* <p>AsyncTask must be subclassed to be used. The subclass will override at least
* one method ({@link #doInBackground}), and most often will override a
* second one ({@link #onPostExecute}.)</p>
*
* <p>Here is an example of subclassing:</p>
* <pre class="prettyprint">
* private class DownloadFilesTask extends AsyncTask<URL, Integer, Long> {
* protected Long doInBackground(URL... urls) {
* int count = urls.length;
* long totalSize = 0;
* for (int i = 0; i < count; i++) {
* totalSize += Downloader.downloadFile(urls[i]);
* publishProgress((int) ((i / (float) count) * 100));
* // Escape early if cancel() is called
* if (isCancelled()) break;
* }
* return totalSize;
* }
*
* protected void onProgressUpdate(Integer... progress) {
* setProgressPercent(progress[0]);
* }
*
* protected void onPostExecute(Long result) {
* showDialog("Downloaded " + result + " bytes");
* }
* }
* </pre>
*
* <p>Once created, a task is executed very simply:</p>
* <pre class="prettyprint">
* new DownloadFilesTask().execute(url1, url2, url3);
* </pre>
*
* <h2>AsyncTask's generic types</h2>
* <p>The three types used by an asynchronous task are the following:</p>
* <ol>
* <li><code>Params</code>, the type of the parameters sent to the task upon
* execution.</li>
* <li><code>Progress</code>, the type of the progress units published during
* the background computation.</li>
* <li><code>Result</code>, the type of the result of the background
* computation.</li>
* </ol>
* <p>Not all types are always used by an asynchronous task. To mark a type as unused,
* simply use the type {@link Void}:</p>
* <pre>
* private class MyTask extends AsyncTask<Void, Void, Void> { ... }
* </pre>
*
* <h2>The 4 steps</h2>
* <p>When an asynchronous task is executed, the task goes through 4 steps:</p>
* <ol>
* <li>{@link #onPreExecute()}, invoked on the UI thread before the task
* is executed. This step is normally used to setup the task, for instance by
* showing a progress bar in the user interface.</li>
* <li>{@link #doInBackground}, invoked on the background thread
* immediately after {@link #onPreExecute()} finishes executing. This step is used
* to perform background computation that can take a long time. The parameters
* of the asynchronous task are passed to this step. The result of the computation must
* be returned by this step and will be passed back to the last step. This step
* can also use {@link #publishProgress} to publish one or more units
* of progress. These values are published on the UI thread, in the
* {@link #onProgressUpdate} step.</li>
* <li>{@link #onProgressUpdate}, invoked on the UI thread after a
* call to {@link #publishProgress}. The timing of the execution is
* undefined. This method is used to display any form of progress in the user
* interface while the background computation is still executing. For instance,
* it can be used to animate a progress bar or show logs in a text field.</li>
* <li>{@link #onPostExecute}, invoked on the UI thread after the background
* computation finishes. The result of the background computation is passed to
* this step as a parameter.</li>
* </ol>
*
* <h2>Cancelling a task</h2>
* <p>A task can be cancelled at any time by invoking {@link #cancel(boolean)}. Invoking
* this method will cause subsequent calls to {@link #isCancelled()} to return true.
* After invoking this method, {@link #onCancelled(Object)}, instead of
* {@link #onPostExecute(Object)} will be invoked after {@link #doInBackground(Object[])}
* returns. To ensure that a task is cancelled as quickly as possible, you should always
* check the return value of {@link #isCancelled()} periodically from
* {@link #doInBackground(Object[])}, if possible (inside a loop for instance.)</p>
*
* <h2>Threading rules</h2>
* <p>There are a few threading rules that must be followed for this class to
* work properly:</p>
* <ul>
* <li>The AsyncTask class must be loaded on the UI thread. This is done
* automatically as of {@link android.os.Build.VERSION_CODES#JELLY_BEAN}.</li>
* <li>The task instance must be created on the UI thread.</li>
* <li>{@link #execute} must be invoked on the UI thread.</li>
* <li>Do not call {@link #onPreExecute()}, {@link #onPostExecute},
* {@link #doInBackground}, {@link #onProgressUpdate} manually.</li>
* <li>The task can be executed only once (an exception will be thrown if
* a second execution is attempted.)</li>
* </ul>
*
* <h2>Memory observability</h2>
* <p>AsyncTask guarantees that all callback calls are synchronized in such a way that the following
* operations are safe without explicit synchronizations.</p>
* <ul>
* <li>Set member fields in the constructor or {@link #onPreExecute}, and refer to them
* in {@link #doInBackground}.
* <li>Set member fields in {@link #doInBackground}, and refer to them in
* {@link #onProgressUpdate} and {@link #onPostExecute}.
* </ul>
*
* <h2>Order of execution</h2>
* <p>When first introduced, AsyncTasks were executed serially on a single background
* thread. Starting with {@link android.os.Build.VERSION_CODES#DONUT}, this was changed
* to a pool of threads allowing multiple tasks to operate in parallel. Starting with
* {@link android.os.Build.VERSION_CODES#HONEYCOMB}, tasks are executed on a single
* thread to avoid common application errors caused by parallel execution.</p>
* <p>If you truly want parallel execution, you can invoke
* {@link #executeOnExecutor(java.util.concurrent.Executor, Object[])} with
* {@link #THREAD_POOL_EXECUTOR}.</p>
*/
public abstract class AsyncTask<Params, Progress, Result> {
private static final String LOG_TAG = "AsyncTask";
private static final int CPU_COUNT = Runtime.getRuntime().availableProcessors();
private static final int CORE_POOL_SIZE = CPU_COUNT + 1;
private static final int MAXIMUM_POOL_SIZE = CPU_COUNT * 2 + 1;
private static final int KEEP_ALIVE = 1;
private static final ThreadFactory sThreadFactory = new ThreadFactory() {
private final AtomicInteger mCount = new AtomicInteger(1);
public Thread newThread(@NonNull Runnable r) {
return new Thread(r, "AsyncTask #" + mCount.getAndIncrement());
}
};
private static final BlockingQueue<Runnable> sPoolWorkQueue = new LinkedBlockingQueue<>(128);
/**
* An {@link Executor} that can be used to execute tasks in parallel.
*/
public static final Executor THREAD_POOL_EXECUTOR
= new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE,
TimeUnit.SECONDS, sPoolWorkQueue, sThreadFactory);
/**
* An {@link Executor} that executes tasks one at a time in serial
* order. This serialization is global to a particular process.
*/
public static final Executor SERIAL_EXECUTOR = new SerialExecutor();
private static final int MESSAGE_POST_RESULT = 0x1;
private static final int MESSAGE_POST_PROGRESS = 0x2;
private static final InternalHandler sHandler = new InternalHandler();
private static volatile Executor sDefaultExecutor = SERIAL_EXECUTOR;
private final WorkerRunnable<Params, Result> mWorker;
private final FutureTask<Result> mFuture;
private volatile Status mStatus = Status.PENDING;
private final AtomicBoolean mCancelled = new AtomicBoolean();
private final AtomicBoolean mTaskInvoked = new AtomicBoolean();
private static class SerialExecutor implements Executor {
final ArrayDeque<Runnable> mTasks = new ArrayDeque<>();
Runnable mActive;
public synchronized void execute(@NonNull final Runnable r) {
mTasks.offer(new Runnable() {
public void run() {
try {
r.run();
} finally {
scheduleNext();
}
}
});
if (mActive == null) {
scheduleNext();
}
}
protected synchronized void scheduleNext() {
if ((mActive = mTasks.poll()) != null) {
THREAD_POOL_EXECUTOR.execute(mActive);
}
}
}
/**
* Indicates the current status of the task. Each status will be set only once
* during the lifetime of a task.
*/
public enum Status {
/**
* Indicates that the task has not been executed yet.
*/
PENDING,
/**
* Indicates that the task is running.
*/
RUNNING,
/**
* Indicates that {@link AsyncTask#onPostExecute} has finished.
*/
FINISHED,
}
/** @hide Used to force static handler to be created. */
public static void init() {
sHandler.getLooper();
}
/** @hide */
public static void setDefaultExecutor(Executor exec) {
sDefaultExecutor = exec;
}
/**
* Creates a new asynchronous task. This constructor must be invoked on the UI thread.
*/
public AsyncTask() {
mWorker = new WorkerRunnable<Params, Result>() {
public Result call() throws Exception {
mTaskInvoked.set(true);
Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
//noinspection unchecked
return postResult(doInBackground(mParams));
}
};
mFuture = new FutureTask<Result>(mWorker) {
@Override
protected void done() {
try {
postResultIfNotInvoked(get());
} catch (InterruptedException e) {
android.util.Log.w(LOG_TAG, e);
} catch (ExecutionException e) {
throw new RuntimeException("An error occured while executing doInBackground()",
e.getCause());
} catch (CancellationException e) {
postResultIfNotInvoked(null);
}
}
};
}
private void postResultIfNotInvoked(Result result) {
final boolean wasTaskInvoked = mTaskInvoked.get();
if (!wasTaskInvoked) {
postResult(result);
}
}
private Result postResult(Result result) {
@SuppressWarnings("unchecked")
Message message = sHandler.obtainMessage(MESSAGE_POST_RESULT,
new AsyncTaskResult<>(this, result));
message.sendToTarget();
return result;
}
/**
* Returns the current status of this task.
*
* @return The current status.
*/
public final Status getStatus() {
return mStatus;
}
/**
* Override this method to perform a computation on a background thread. The
* specified parameters are the parameters passed to {@link #execute}
* by the caller of this task.
*
* This method can call {@link #publishProgress} to publish updates
* on the UI thread.
*
* @param params The parameters of the task.
*
* @return A result, defined by the subclass of this task.
*
* @see #onPreExecute()
* @see #onPostExecute
* @see #publishProgress
*/
protected abstract Result doInBackground(Params... params);
/**
* Runs on the UI thread before {@link #doInBackground}.
*
* @see #onPostExecute
* @see #doInBackground
*/
protected void onPreExecute() {
}
/**
* <p>Runs on the UI thread after {@link #doInBackground}. The
* specified result is the value returned by {@link #doInBackground}.</p>
*
* <p>This method won't be invoked if the task was cancelled.</p>
*
* @param result The result of the operation computed by {@link #doInBackground}.
*
* @see #onPreExecute
* @see #doInBackground
* @see #onCancelled(Object)
*/
@SuppressWarnings({"UnusedDeclaration"})
protected void onPostExecute(Result result) {
}
/**
* Runs on the UI thread after {@link #publishProgress} is invoked.
* The specified values are the values passed to {@link #publishProgress}.
*
* @param values The values indicating progress.
*
* @see #publishProgress
* @see #doInBackground
*/
@SuppressWarnings({"UnusedDeclaration"})
protected void onProgressUpdate(Progress... values) {
}
/**
* <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and
* {@link #doInBackground(Object[])} has finished.</p>
*
* <p>The default implementation simply invokes {@link #onCancelled()} and
* ignores the result. If you write your own implementation, do not call
* <code>super.onCancelled(result)</code>.</p>
*
* @param result The result, if any, computed in
* {@link #doInBackground(Object[])}, can be null
*
* @see #cancel(boolean)
* @see #isCancelled()
*/
@SuppressWarnings({"UnusedParameters"})
protected void onCancelled(Result result) {
onCancelled();
}
/**
* <p>Applications should preferably override {@link #onCancelled(Object)}.
* This method is invoked by the default implementation of
* {@link #onCancelled(Object)}.</p>
*
* <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and
* {@link #doInBackground(Object[])} has finished.</p>
*
* @see #onCancelled(Object)
* @see #cancel(boolean)
* @see #isCancelled()
*/
protected void onCancelled() {
}
/**
* Returns <tt>true</tt> if this task was cancelled before it completed
* normally. If you are calling {@link #cancel(boolean)} on the task,
* the value returned by this method should be checked periodically from
* {@link #doInBackground(Object[])} to end the task as soon as possible.
*
* @return <tt>true</tt> if task was cancelled before it completed
*
* @see #cancel(boolean)
*/
public final boolean isCancelled() {
return mCancelled.get();
}
/**
* <p>Attempts to cancel execution of this task. This attempt will
* fail if the task has already completed, already been cancelled,
* or could not be cancelled for some other reason. If successful,
* and this task has not started when <tt>cancel</tt> is called,
* this task should never run. If the task has already started,
* then the <tt>mayInterruptIfRunning</tt> parameter determines
* whether the thread executing this task should be interrupted in
* an attempt to stop the task.</p>
*
* <p>Calling this method will result in {@link #onCancelled(Object)} being
* invoked on the UI thread after {@link #doInBackground(Object[])}
* returns. Calling this method guarantees that {@link #onPostExecute(Object)}
* is never invoked. After invoking this method, you should check the
* value returned by {@link #isCancelled()} periodically from
* {@link #doInBackground(Object[])} to finish the task as early as
* possible.</p>
*
* @param mayInterruptIfRunning <tt>true</tt> if the thread executing this
* task should be interrupted; otherwise, in-progress tasks are allowed
* to complete.
*
* @return <tt>false</tt> if the task could not be cancelled,
* typically because it has already completed normally;
* <tt>true</tt> otherwise
*
* @see #isCancelled()
* @see #onCancelled(Object)
*/
public final boolean cancel(boolean mayInterruptIfRunning) {
mCancelled.set(true);
return mFuture.cancel(mayInterruptIfRunning);
}
/**
* Waits if necessary for the computation to complete, and then
* retrieves its result.
*
* @return The computed result.
*
* @throws CancellationException If the computation was cancelled.
* @throws ExecutionException If the computation threw an exception.
* @throws InterruptedException If the current thread was interrupted
* while waiting.
*/
public final Result get() throws InterruptedException, ExecutionException {
return mFuture.get();
}
/**
* Waits if necessary for at most the given time for the computation
* to complete, and then retrieves its result.
*
* @param timeout Time to wait before cancelling the operation.
* @param unit The time unit for the timeout.
*
* @return The computed result.
*
* @throws CancellationException If the computation was cancelled.
* @throws ExecutionException If the computation threw an exception.
* @throws InterruptedException If the current thread was interrupted
* while waiting.
* @throws TimeoutException If the wait timed out.
*/
public final Result get(long timeout, TimeUnit unit) throws InterruptedException,
ExecutionException, TimeoutException {
return mFuture.get(timeout, unit);
}
/**
* Executes the task with the specified parameters. The task returns
* itself (this) so that the caller can keep a reference to it.
*
* <p>Note: this function schedules the task on a queue for a single background
* thread or pool of threads depending on the platform version. When first
* introduced, AsyncTasks were executed serially on a single background thread.
* Starting with {@link android.os.Build.VERSION_CODES#DONUT}, this was changed
* to a pool of threads allowing multiple tasks to operate in parallel. Starting
* {@link android.os.Build.VERSION_CODES#HONEYCOMB}, tasks are back to being
* executed on a single thread to avoid common application errors caused
* by parallel execution. If you truly want parallel execution, you can use
* the {@link #executeOnExecutor} version of this method
* with {@link #THREAD_POOL_EXECUTOR}; however, see commentary there for warnings
* on its use.
*
* <p>This method must be invoked on the UI thread.
*
* @param params The parameters of the task.
*
* @return This instance of AsyncTask.
*
* @throws IllegalStateException If {@link #getStatus()} returns either
* {@link AsyncTask.Status#RUNNING} or {@link AsyncTask.Status#FINISHED}.
*
* @see #executeOnExecutor(java.util.concurrent.Executor, Object[])
* @see #execute(Runnable)
*/
public final AsyncTask<Params, Progress, Result> execute(Params... params) {
return executeOnExecutor(sDefaultExecutor, params);
}
/**
* Executes the task with the specified parameters. The task returns
* itself (this) so that the caller can keep a reference to it.
*
* <p>This method is typically used with {@link #THREAD_POOL_EXECUTOR} to
* allow multiple tasks to run in parallel on a pool of threads managed by
* AsyncTask, however you can also use your own {@link Executor} for custom
* behavior.
*
* <p><em>Warning:</em> Allowing multiple tasks to run in parallel from
* a thread pool is generally <em>not</em> what one wants, because the order
* of their operation is not defined. For example, if these tasks are used
* to modify any state in common (such as writing a file due to a button click),
* there are no guarantees on the order of the modifications.
* Without careful work it is possible in rare cases for the newer version
* of the data to be over-written by an older one, leading to obscure data
* loss and stability issues. Such changes are best
* executed in serial; to guarantee such work is serialized regardless of
* platform version you can use this function with {@link #SERIAL_EXECUTOR}.
*
* <p>This method must be invoked on the UI thread.
*
* @param exec The executor to use. {@link #THREAD_POOL_EXECUTOR} is available as a
* convenient process-wide thread pool for tasks that are loosely coupled.
* @param params The parameters of the task.
*
* @return This instance of AsyncTask.
*
* @throws IllegalStateException If {@link #getStatus()} returns either
* {@link AsyncTask.Status#RUNNING} or {@link AsyncTask.Status#FINISHED}.
*
* @see #execute(Object[])
*/
public final AsyncTask<Params, Progress, Result> executeOnExecutor(Executor exec,
Params... params) {
if (mStatus != Status.PENDING) {
switch (mStatus) {
case RUNNING:
throw new IllegalStateException("Cannot execute task:"
+ " the task is already running.");
case FINISHED:
throw new IllegalStateException("Cannot execute task:"
+ " the task has already been executed "
+ "(a task can be executed only once)");
}
}
mStatus = Status.RUNNING;
onPreExecute();
mWorker.mParams = params;
exec.execute(mFuture);
return this;
}
/**
* Convenience version of {@link #execute(Object...)} for use with
* a simple Runnable object. See {@link #execute(Object[])} for more
* information on the order of execution.
*
* @see #execute(Object[])
* @see #executeOnExecutor(java.util.concurrent.Executor, Object[])
*/
public static void execute(Runnable runnable) {
sDefaultExecutor.execute(runnable);
}
/**
* This method can be invoked from {@link #doInBackground} to
* publish updates on the UI thread while the background computation is
* still running. Each call to this method will trigger the execution of
* {@link #onProgressUpdate} on the UI thread.
*
* {@link #onProgressUpdate} will note be called if the task has been
* canceled.
*
* @param values The progress values to update the UI with.
*
* @see #onProgressUpdate
* @see #doInBackground<|fim▁hole|> if (!isCancelled()) {
sHandler.obtainMessage(MESSAGE_POST_PROGRESS,
new AsyncTaskResult<>(this, values)).sendToTarget();
}
}
private void finish(Result result) {
if (isCancelled()) {
onCancelled(result);
} else {
onPostExecute(result);
}
mStatus = Status.FINISHED;
}
private static class InternalHandler extends Handler {
@SuppressWarnings({"unchecked", "RawUseOfParameterizedType"})
@Override
public void handleMessage(Message msg) {
AsyncTaskResult result = (AsyncTaskResult) msg.obj;
switch (msg.what) {
case MESSAGE_POST_RESULT:
// There is only one result
result.mTask.finish(result.mData[0]);
break;
case MESSAGE_POST_PROGRESS:
result.mTask.onProgressUpdate(result.mData);
break;
}
}
}
private static abstract class WorkerRunnable<Params, Result> implements Callable<Result> {
Params[] mParams;
}
@SuppressWarnings({"RawUseOfParameterizedType"})
private static class AsyncTaskResult<Data> {
final AsyncTask mTask;
final Data[] mData;
AsyncTaskResult(AsyncTask task, Data... data) {
mTask = task;
mData = data;
}
}
}<|fim▁end|> | */
protected final void publishProgress(Progress... values) { |
<|file_name|>import3DS.py<|end_file_name|><|fim▁begin|>#***************************************************************************
#* *
#* Copyright (c) 2016 Yorik van Havre <[email protected]> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
from __future__ import print_function
import os,FreeCAD,Mesh
__title__="FreeCAD 3DS importer"
__author__ = "Yorik van Havre"
__url__ = "http://www.freecadweb.org"
DEBUG = True
## @package import3DS
# \ingroup ARCH
# \brief 3DS file format importer
#
# This module provides tools to import 3DS files.
def check3DS():
"checks if collada if available"
global dom3ds
dom3ds = None
try:
from Dice3DS import dom3ds
except ImportError:
FreeCAD.Console.PrintError("Dice3DS not found, 3DS support is disabled.\n")
return False
else:
return True
def open(filename):
"called when freecad wants to open a file"
if not check3DS():
return
docname = (os.path.splitext(os.path.basename(filename))[0]).encode("utf8")
doc = FreeCAD.newDocument(docname)
doc.Label = decode(docname)
FreeCAD.ActiveDocument = doc
read(filename)
return doc
def insert(filename,docname):
"called when freecad wants to import a file"
if not check3DS():
return
try:
doc = FreeCAD.getDocument(docname)
except NameError:
doc = FreeCAD.newDocument(docname)
FreeCAD.ActiveDocument = doc
read(filename)
return doc
def decode(name):
"decodes encoded strings"
try:
decodedName = (name.decode("utf8"))
except UnicodeDecodeError:
try:
decodedName = (name.decode("latin1"))
except UnicodeDecodeError:
FreeCAD.Console.PrintError(translate("Arch","Error: Couldn't determine character encoding"))
decodedName = name
return decodedName
def read(filename):
dom = dom3ds.read_3ds_file(filename,tight=False)
for j,d_nobj in enumerate(dom.mdata.objects):
if type(d_nobj.obj) != dom3ds.N_TRI_OBJECT:
continue
verts = []
if d_nobj.obj.points:
for d_point in d_nobj.obj.points.array:
verts.append([d_point[0],d_point[1],d_point[2]])
meshdata = []
for d_face in d_nobj.obj.faces.array:
meshdata.append([verts[int(d_face[i])] for i in xrange(3)])<|fim▁hole|> obj = FreeCAD.ActiveDocument.addObject("Mesh::Feature","Mesh")
obj.Mesh = mesh
obj.Placement = placement
else:
print("Skipping object without vertices array: ",d_nobj.obj)<|fim▁end|> | m = [tuple(r) for r in d_nobj.obj.matrix.array]
m = m[0] + m[1] + m[2] + m[3]
placement = FreeCAD.Placement(FreeCAD.Matrix(*m))
mesh = Mesh.Mesh(meshdata) |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from Players.models import Player
@admin.register(Player)
class PlayerAdmin(admin.ModelAdmin):
view_on_site = True
list_display = ('pk', 'first_name', 'last_name', 'number', 'team', 'position', 'age', 'height', 'weight')
list_filter = ['team', 'position']
search_fields = ['first_name', 'last_name']
# Disable delete when team has 5 players
def has_delete_permission(self, request, obj=None):
try:
return False if Player.objects.filter(team=obj.team).count() == 5 else True
except AttributeError:
pass<|fim▁hole|> actions = super(PlayerAdmin, self).get_actions(request)
del actions['delete_selected']
return actions<|fim▁end|> |
# Disable delete action form the list; not ideal, disables delete for all players
def get_actions(self, request): |
<|file_name|>042_ReverseWordsInSequence.java<|end_file_name|><|fim▁begin|>/**
* Created by Administrator on 2017/3/11.
*/
public class ReverseWordsInSequence {
public void reverseSequence(String str) {
if (str == null) return;
String[] strArray = str.split(" ");
StringBuilder sb = new StringBuilder();
for (int i = strArray.length-1; i >= 0; --i)
sb.append(strArray[i] + " ");
System.out.println(sb);
}
public static void main(String[] args) {
ReverseWordsInSequence r = new ReverseWordsInSequence();
String str = "I am a Students.";
r.reverseSequence(str);
}
}
/**
* Created by Administrator on 2017/3/11.
*/
public class ReverseWordsInSequence {
public String reverseSequence(String str) {
if (str == null || str.length() <= 1)
return str;
StringBuilder sb = new StringBuilder(str);
reverse(sb, 0, str.length() - 1);
<|fim▁hole|> int begin = 0, end = 0;
while (end < str.length()) {
while (end < str.length() && sb.charAt(end) != ' ')
end++;
reverse(sb, begin, end - 1);
begin = end + 1;
end = begin;
}
return sb.toString();
}
public void reverse(StringBuilder sb, int start, int end) {
if (sb == null || end <= start)
return;
while (start < end) {
char temp = sb.charAt(start);
sb.setCharAt(start, sb.charAt(end));
sb.setCharAt(end, temp);
start++;
end--;
}
}
public static void main(String[] args) {
ReverseWordsInSequence r = new ReverseWordsInSequence();
String str = "i am a students.";
System.out.print(r.reverseSequence(str));
}
}<|fim▁end|> | |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# encoding: utf-8
"""
main.py
The entry point for the book reader application.
"""
__version_info__ = (0, 0, 1)
__version__ = '.'.join(map(str, __version_info__))
__author__ = "[email protected]"
import time
import sqlite3
import pdb
import signal
import sys, os
import rfid
import config
import RPi.GPIO as GPIO
from player import Player
from status_light import StatusLight
from threading import Thread
class BookReader(object):
"""The main class that controls the player, the GPIO pins and the RFID reader"""
def __init__(self):
"""Initialize all the things"""
self.rfid_reader = rfid.Reader(**config.serial)
# setup signal handlers. SIGINT for KeyboardInterrupt
# and SIGTERM for when running from supervisord
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
self.status_light = StatusLight(config.status_light_pin)
thread = Thread(target=self.status_light.start)
thread.start()
self.setup_db()
self.player = Player(config.mpd_conn, self.status_light)
self.setup_gpio()
def setup_db(self):
"""Setup a connection to the SQLite db"""
self.db_conn = sqlite3.connect(config.db_file)
self.db_cursor = self.db_conn.cursor()
def setup_gpio(self):
"""Setup all GPIO pins"""
GPIO.setmode(GPIO.BCM)
# input pins for buttons
for pin in config.gpio_pins:
GPIO.setup(pin['pin_id'], GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.add_event_detect(pin['pin_id'], GPIO.FALLING, callback=getattr(self.player, pin['callback']), bouncetime=pin['bounce_time'])
<|fim▁hole|> def signal_handler(self, signal, frame):
"""When quiting, stop playback, close the player and release GPIO pins"""
self.player.close()
self.status_light.exit()
GPIO.cleanup()
sys.exit(0)
def loop(self):
"""The main event loop. This is where we look for new RFID cards on the RFID reader. If one is
present and different from the book that's currently playing, in which case:
1. Stop playback of the current book if one is playing
2. Start playing
"""
while True:
if self.player.is_playing():
self.on_playing()
elif self.player.finished_book():
# when at the end of a book, delete its progress from the db
# so we can listen to it again
self.db_cursor.execute(
'DELETE FROM progress WHERE book_id = %d' % self.player.book.book_id)
self.db_conn.commit()
self.player.book.reset()
rfid_card = self.rfid_reader.read()
if not rfid_card:
continue
book_id = rfid_card.get_id()
if book_id and book_id != self.player.book.book_id: # a change in book id
progress = self.db_cursor.execute(
'SELECT * FROM progress WHERE book_id = "%s"' % book_id).fetchone()
self.player.play(book_id, progress)
def on_playing(self):
"""Executed for each loop execution. Here we update self.player.book with the latest known position
and save the prigress to db"""
status = self.player.get_status()
self.player.book.elapsed = float(status['elapsed'])
self.player.book.part = int(status['song']) + 1
#print "%s second of part %s" % (self.player.book.elapsed, self.player.book.part)
self.db_cursor.execute(
'INSERT OR REPLACE INTO progress (book_id, part, elapsed) VALUES (%s, %d, %f)' %\
(self.player.book.book_id, self.player.book.part, self.player.book.elapsed))
self.db_conn.commit()
if __name__ == '__main__':
reader = BookReader()
reader.loop()<|fim▁end|> | |
<|file_name|>basic_auth.py<|end_file_name|><|fim▁begin|># Copyright 2013 Gert Kremer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
#############################################################################
#
def view_or_basicauth(view, request, test_func, realm = "", *args, **kwargs):
"""
This is a helper function used by both 'logged_in_or_basicauth' and
'has_perm_or_basicauth' that does the nitty of determining if they
are already logged in or if they have provided proper http-authorization
and returning the view if all goes well, otherwise responding with a 401.
"""
if test_func(request.user):
# Already logged in, just return the view.
#
return view(request, *args, **kwargs)
# They are not logged in. See if they provided login credentials
#
if 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2:
# NOTE: We are only support basic authentication for now.
#
if auth[0].lower() == "basic":
uname, passwd = base64.b64decode(auth[1]).split(':')
user = authenticate(username=uname, password=passwd)
if user is not None:
if user.is_active:
login(request, user)
request.user = user
return view(request, *args, **kwargs)
# Either they did not provide an authorization header or
# something in the authorization attempt failed. Send a 401
# back to them to ask them to authenticate.
#
response = HttpResponse()
response.status_code = 401
response['WWW-Authenticate'] = 'Basic realm="%s"' % realm
return response
#############################################################################
#
def logged_in_or_basicauth(realm = ""):
"""
A simple decorator that requires a user to be logged in. If they are not
logged in the request is examined for a 'authorization' header.
If the header is present it is tested for basic authentication and
the user is logged in with the provided credentials.
If the header is not present a http 401 is sent back to the
requestor to provide credentials.
The purpose of this is that in several django projects I have needed
several specific views that need to support basic authentication, yet the
web site as a whole used django's provided authentication.
The uses for this are for urls that are access programmatically such as
by rss feed readers, yet the view requires a user to be logged in. Many rss
readers support supplying the authentication credentials via http basic
auth (and they do NOT support a redirect to a form where they post a
username/password.)
Use is simple:
@logged_in_or_basicauth
def your_view:
...
You can provide the name of the realm to ask for authentication within.
"""
def view_decorator(func):
def wrapper(request, *args, **kwargs):
return view_or_basicauth(func, request,
lambda u: u.is_authenticated(),
realm, *args, **kwargs)
return wrapper
return view_decorator
#############################################################################
#
def has_perm_or_basicauth(perm, realm = ""):
"""
This is similar to the above decorator 'logged_in_or_basicauth'
except that it requires the logged in user to have a specific
permission.
Use:
@logged_in_or_basicauth('asforums.view_forumcollection')
def your_view:
...
"""
def view_decorator(func):
def wrapper(request, *args, **kwargs):
return view_or_basicauth(func, request,
lambda u: u.has_perm(perm),
realm, *args, **kwargs)
return wrapper
<|fim▁hole|><|fim▁end|> | return view_decorator |
<|file_name|>ports_list.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# Copyright (C) 2016 Nicolargo <[email protected]>
#
# Glances is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Glances is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Manage the Glances ports list (Ports plugin)."""
from glances.compat import range
from glances.logger import logger
from glances.globals import BSD
# XXX *BSDs: Segmentation fault (core dumped)
# -- https://bitbucket.org/al45tair/netifaces/issues/15
# Also used in the glances_ip plugin
if not BSD:
try:
import netifaces
netifaces_tag = True
except ImportError:
netifaces_tag = False
else:
netifaces_tag = False
class GlancesPortsList(object):
"""Manage the ports list for the ports plugin."""
_section = "ports"
_default_refresh = 60
_default_timeout = 3
def __init__(self, config=None, args=None):
# ports_list is a list of dict (JSON compliant)
# [ {'host': 'www.google.fr', 'port': 443, 'refresh': 30, 'description': Internet, 'status': True} ... ]
# Load the configuration file
self._ports_list = self.load(config)
def load(self, config):
"""Load the ports list from the configuration file."""
ports_list = []
if config is None:
logger.debug("No configuration file available. Cannot load ports list.")
elif not config.has_section(self._section):
logger.debug("No [%s] section in the configuration file. Cannot load ports list." % self._section)
else:
logger.debug("Start reading the [%s] section in the configuration file" % self._section)
refresh = int(config.get_value(self._section, 'refresh', default=self._default_refresh))
timeout = int(config.get_value(self._section, 'timeout', default=self._default_timeout))
# Add default gateway on top of the ports_list lits
default_gateway = config.get_value(self._section, 'port_default_gateway', default='False')
if default_gateway.lower().startswith('true') and netifaces_tag:
new_port = {}
new_port['host'] = netifaces.gateways()['default'][netifaces.AF_INET][0]
# ICMP
new_port['port'] = 0
new_port['description'] = 'DefaultGateway'
new_port['refresh'] = refresh
new_port['timeout'] = timeout
new_port['status'] = None
new_port['rtt_warning'] = None
logger.debug("Add default gateway %s to the static list" % (new_port['host']))
ports_list.append(new_port)
# Read the scan list
for i in range(1, 256):
new_port = {}
postfix = 'port_%s_' % str(i)
# Read mandatories configuration key: host
new_port['host'] = config.get_value(self._section, '%s%s' % (postfix, 'host'))
if new_port['host'] is None:
continue
# Read optionals configuration keys
# Port is set to 0 by default. 0 mean ICMP check instead of TCP check
new_port['port'] = config.get_value(self._section,
'%s%s' % (postfix, 'port'),
0)
new_port['description'] = config.get_value(self._section,
'%sdescription' % postfix,
default="%s:%s" % (new_port['host'], new_port['port']))
# Default status
new_port['status'] = None
# Refresh rate in second
new_port['refresh'] = refresh
# Timeout in second
new_port['timeout'] = int(config.get_value(self._section,
'%stimeout' % postfix,
default=timeout))
# RTT warning<|fim▁hole|> new_port['rtt_warning'] = config.get_value(self._section,
'%srtt_warning' % postfix,
default=None)
if new_port['rtt_warning'] is not None:
# Convert to second
new_port['rtt_warning'] = int(new_port['rtt_warning']) / 1000.0
# Add the server to the list
logger.debug("Add port %s:%s to the static list" % (new_port['host'], new_port['port']))
ports_list.append(new_port)
# Ports list loaded
logger.debug("Ports list loaded: %s" % ports_list)
return ports_list
def get_ports_list(self):
"""Return the current server list (dict of dict)."""
return self._ports_list
def set_server(self, pos, key, value):
"""Set the key to the value for the pos (position in the list)."""
self._ports_list[pos][key] = value<|fim▁end|> | |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>" Settings for tests. "
from settings.project import *
# Databases
DATABASES = {
'default': {<|fim▁hole|> 'NAME': ':memory:',
'USER': '',
'PASSWORD': '',
'TEST_CHARSET': 'utf8',
}}
# Caches
CACHES['default']['BACKEND'] = 'django.core.cache.backends.locmem.LocMemCache'
CACHES['default']['KEY_PREFIX'] = '_'.join((PROJECT_NAME, 'TST'))
# pymode:lint_ignore=W404<|fim▁end|> | 'ENGINE': 'django.db.backends.sqlite3', |
<|file_name|>subst.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type substitutions.
use middle::ty;
use middle::ty_fold;
use middle::ty_fold::{TypeFoldable, TypeFolder};
use util::ppaux::Repr;
use std::fmt;
use std::mem;
use std::raw;
use std::slice::{Items, MutItems};
use std::vec::Vec;
use syntax::codemap::{Span, DUMMY_SP};
///////////////////////////////////////////////////////////////////////////
// HomogeneousTuple3 trait
//
// This could be moved into standard library at some point.
trait HomogeneousTuple3<T> {
fn len(&self) -> uint;
fn as_slice<'a>(&'a self) -> &'a [T];
fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T];
fn iter<'a>(&'a self) -> Items<'a, T>;
fn iter_mut<'a>(&'a mut self) -> MutItems<'a, T>;
fn get<'a>(&'a self, index: uint) -> Option<&'a T>;
fn get_mut<'a>(&'a mut self, index: uint) -> Option<&'a mut T>;
}
impl<T> HomogeneousTuple3<T> for (T, T, T) {
fn len(&self) -> uint {
3
}
fn as_slice<'a>(&'a self) -> &'a [T] {
unsafe {
let ptr: *const T = mem::transmute(self);
let slice = raw::Slice { data: ptr, len: 3 };
mem::transmute(slice)
}
}
fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
unsafe {
let ptr: *const T = mem::transmute(self);
let slice = raw::Slice { data: ptr, len: 3 };
mem::transmute(slice)
}
}
fn iter<'a>(&'a self) -> Items<'a, T> {
let slice: &'a [T] = self.as_slice();
slice.iter()
}
fn iter_mut<'a>(&'a mut self) -> MutItems<'a, T> {
self.as_mut_slice().iter_mut()
}
fn get<'a>(&'a self, index: uint) -> Option<&'a T> {
self.as_slice().get(index)
}
fn get_mut<'a>(&'a mut self, index: uint) -> Option<&'a mut T> {
Some(&mut self.as_mut_slice()[index]) // wrong: fallible
}
}
///////////////////////////////////////////////////////////////////////////
/**
* A substitution mapping type/region parameters to new values. We
* identify each in-scope parameter by an *index* and a *parameter
* space* (which indices where the parameter is defined; see
* `ParamSpace`).
*/
#[deriving(Clone, PartialEq, Eq, Hash, Show)]
pub struct Substs {
pub types: VecPerParamSpace<ty::t>,
pub regions: RegionSubsts,
}
/**
* Represents the values to use when substituting lifetime parameters.
* If the value is `ErasedRegions`, then this subst is occurring during
* trans, and all region parameters will be replaced with `ty::ReStatic`. */
#[deriving(Clone, PartialEq, Eq, Hash, Show)]
pub enum RegionSubsts {
ErasedRegions,
NonerasedRegions(VecPerParamSpace<ty::Region>)
}
impl Substs {
pub fn new(t: VecPerParamSpace<ty::t>,
r: VecPerParamSpace<ty::Region>)
-> Substs
{
Substs { types: t, regions: NonerasedRegions(r) }
}
pub fn new_type(t: Vec<ty::t>,
r: Vec<ty::Region>)
-> Substs
{
Substs::new(VecPerParamSpace::new(t, Vec::new(), Vec::new()),
VecPerParamSpace::new(r, Vec::new(), Vec::new()))
}
pub fn new_trait(t: Vec<ty::t>,
r: Vec<ty::Region>,
s: ty::t)
-> Substs
{
Substs::new(VecPerParamSpace::new(t, vec!(s), Vec::new()),
VecPerParamSpace::new(r, Vec::new(), Vec::new()))
}
pub fn erased(t: VecPerParamSpace<ty::t>) -> Substs
{
Substs { types: t, regions: ErasedRegions }
}
pub fn empty() -> Substs {
Substs {
types: VecPerParamSpace::empty(),
regions: NonerasedRegions(VecPerParamSpace::empty()),
}
}
pub fn trans_empty() -> Substs {
Substs {
types: VecPerParamSpace::empty(),
regions: ErasedRegions
}
}
pub fn is_noop(&self) -> bool {
let regions_is_noop = match self.regions {
ErasedRegions => false, // may be used to canonicalize
NonerasedRegions(ref regions) => regions.is_empty(),
};
regions_is_noop && self.types.is_empty()
}
pub fn self_ty(&self) -> Option<ty::t> {
self.types.get_self().map(|&t| t)
}
pub fn with_self_ty(&self, self_ty: ty::t) -> Substs {
assert!(self.self_ty().is_none());
let mut s = (*self).clone();
s.types.push(SelfSpace, self_ty);
s
}
pub fn erase_regions(self) -> Substs {
let Substs { types: types, regions: _ } = self;
Substs { types: types, regions: ErasedRegions }
}
pub fn regions<'a>(&'a self) -> &'a VecPerParamSpace<ty::Region> {
/*!
* Since ErasedRegions are only to be used in trans, most of
* the compiler can use this method to easily access the set
* of region substitutions.
*/
match self.regions {
ErasedRegions => fail!("Erased regions only expected in trans"),
NonerasedRegions(ref r) => r
}
}
pub fn mut_regions<'a>(&'a mut self) -> &'a mut VecPerParamSpace<ty::Region> {
/*!
* Since ErasedRegions are only to be used in trans, most of
* the compiler can use this method to easily access the set
* of region substitutions.
*/
match self.regions {
ErasedRegions => fail!("Erased regions only expected in trans"),
NonerasedRegions(ref mut r) => r
}
}
pub fn with_method(self,
m_types: Vec<ty::t>,
m_regions: Vec<ty::Region>)
-> Substs
{
let Substs { types, regions } = self;
let types = types.with_vec(FnSpace, m_types);
let regions = regions.map(m_regions,
|r, m_regions| r.with_vec(FnSpace, m_regions));
Substs { types: types, regions: regions }
}
}
impl RegionSubsts {
fn map<A>(self,
a: A,
op: |VecPerParamSpace<ty::Region>, A| -> VecPerParamSpace<ty::Region>)
-> RegionSubsts {
match self {
ErasedRegions => ErasedRegions,
NonerasedRegions(r) => NonerasedRegions(op(r, a))
}
}
}
///////////////////////////////////////////////////////////////////////////
// ParamSpace
#[deriving(PartialOrd, Ord, PartialEq, Eq,
Clone, Hash, Encodable, Decodable, Show)]
pub enum ParamSpace {
TypeSpace, // Type parameters attached to a type definition, trait, or impl
SelfSpace, // Self parameter on a trait
FnSpace, // Type parameters attached to a method or fn
}
impl ParamSpace {
pub fn all() -> [ParamSpace, ..3] {
[TypeSpace, SelfSpace, FnSpace]
}
pub fn to_uint(self) -> uint {
match self {
TypeSpace => 0,
SelfSpace => 1,
FnSpace => 2,
}
}
pub fn from_uint(u: uint) -> ParamSpace {
match u {
0 => TypeSpace,
1 => SelfSpace,
2 => FnSpace,
_ => fail!("Invalid ParamSpace: {}", u)
}
}
}
/**
* Vector of things sorted by param space. Used to keep
* the set of things declared on the type, self, or method
* distinct.
*/
#[deriving(PartialEq, Eq, Clone, Hash, Encodable, Decodable)]
pub struct VecPerParamSpace<T> {
// This was originally represented as a tuple with one Vec<T> for
// each variant of ParamSpace, and that remains the abstraction
// that it provides to its clients.
//
// Here is how the representation corresponds to the abstraction
// i.e. the "abstraction function" AF:
//
// AF(self) = (self.content.slice_to(self.type_limit),
// self.content.slice(self.type_limit, self.self_limit),
// self.content.slice_from(self.self_limit))
type_limit: uint,
self_limit: uint,
content: Vec<T>,
}
impl<T:fmt::Show> fmt::Show for VecPerParamSpace<T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
try!(write!(fmt, "VecPerParamSpace {{"));
for space in ParamSpace::all().iter() {
try!(write!(fmt, "{}: {}, ", *space, self.get_slice(*space)));
}
try!(write!(fmt, "}}"));
Ok(())
}
}
impl<T> VecPerParamSpace<T> {
fn limits(&self, space: ParamSpace) -> (uint, uint) {
match space {
TypeSpace => (0, self.type_limit),
SelfSpace => (self.type_limit, self.self_limit),
FnSpace => (self.self_limit, self.content.len()),
}
}
pub fn empty() -> VecPerParamSpace<T> {
VecPerParamSpace {
type_limit: 0,
self_limit: 0,
content: Vec::new()
}
}
pub fn params_from_type(types: Vec<T>) -> VecPerParamSpace<T> {
VecPerParamSpace::empty().with_vec(TypeSpace, types)
}<|fim▁hole|> /// `t` is the type space.
/// `s` is the self space.
/// `f` is the fn space.
pub fn new(t: Vec<T>, s: Vec<T>, f: Vec<T>) -> VecPerParamSpace<T> {
let type_limit = t.len();
let self_limit = t.len() + s.len();
let mut content = t;
content.push_all_move(s);
content.push_all_move(f);
VecPerParamSpace {
type_limit: type_limit,
self_limit: self_limit,
content: content,
}
}
fn new_internal(content: Vec<T>, type_limit: uint, self_limit: uint)
-> VecPerParamSpace<T>
{
VecPerParamSpace {
type_limit: type_limit,
self_limit: self_limit,
content: content,
}
}
/// Appends `value` to the vector associated with `space`.
///
/// Unlike the `push` method in `Vec`, this should not be assumed
/// to be a cheap operation (even when amortized over many calls).
pub fn push(&mut self, space: ParamSpace, value: T) {
let (_, limit) = self.limits(space);
match space {
TypeSpace => { self.type_limit += 1; self.self_limit += 1; }
SelfSpace => { self.self_limit += 1; }
FnSpace => {}
}
self.content.insert(limit, value);
}
pub fn pop(&mut self, space: ParamSpace) -> Option<T> {
let (start, limit) = self.limits(space);
if start == limit {
None
} else {
match space {
TypeSpace => { self.type_limit -= 1; self.self_limit -= 1; }
SelfSpace => { self.self_limit -= 1; }
FnSpace => {}
}
self.content.remove(limit - 1)
}
}
pub fn truncate(&mut self, space: ParamSpace, len: uint) {
// FIXME (#15435): slow; O(n^2); could enhance vec to make it O(n).
while self.len(space) > len {
self.pop(space);
}
}
pub fn replace(&mut self, space: ParamSpace, elems: Vec<T>) {
// FIXME (#15435): slow; O(n^2); could enhance vec to make it O(n).
self.truncate(space, 0);
for t in elems.into_iter() {
self.push(space, t);
}
}
pub fn get_self<'a>(&'a self) -> Option<&'a T> {
let v = self.get_slice(SelfSpace);
assert!(v.len() <= 1);
if v.len() == 0 { None } else { Some(&v[0]) }
}
pub fn len(&self, space: ParamSpace) -> uint {
self.get_slice(space).len()
}
pub fn is_empty_in(&self, space: ParamSpace) -> bool {
self.len(space) == 0
}
pub fn get_slice<'a>(&'a self, space: ParamSpace) -> &'a [T] {
let (start, limit) = self.limits(space);
self.content.slice(start, limit)
}
pub fn get_mut_slice<'a>(&'a mut self, space: ParamSpace) -> &'a mut [T] {
let (start, limit) = self.limits(space);
self.content.slice_mut(start, limit)
}
pub fn opt_get<'a>(&'a self,
space: ParamSpace,
index: uint)
-> Option<&'a T> {
let v = self.get_slice(space);
if index < v.len() { Some(&v[index]) } else { None }
}
pub fn get<'a>(&'a self, space: ParamSpace, index: uint) -> &'a T {
&self.get_slice(space)[index]
}
pub fn iter<'a>(&'a self) -> Items<'a,T> {
self.content.iter()
}
pub fn as_slice(&self) -> &[T] {
self.content.as_slice()
}
pub fn all_vecs(&self, pred: |&[T]| -> bool) -> bool {
let spaces = [TypeSpace, SelfSpace, FnSpace];
spaces.iter().all(|&space| { pred(self.get_slice(space)) })
}
pub fn all(&self, pred: |&T| -> bool) -> bool {
self.iter().all(pred)
}
pub fn any(&self, pred: |&T| -> bool) -> bool {
self.iter().any(pred)
}
pub fn is_empty(&self) -> bool {
self.all_vecs(|v| v.is_empty())
}
pub fn map<U>(&self, pred: |&T| -> U) -> VecPerParamSpace<U> {
let result = self.iter().map(pred).collect();
VecPerParamSpace::new_internal(result,
self.type_limit,
self.self_limit)
}
pub fn map_move<U>(self, pred: |T| -> U) -> VecPerParamSpace<U> {
let (t, s, f) = self.split();
VecPerParamSpace::new(t.into_iter().map(|p| pred(p)).collect(),
s.into_iter().map(|p| pred(p)).collect(),
f.into_iter().map(|p| pred(p)).collect())
}
pub fn split(self) -> (Vec<T>, Vec<T>, Vec<T>) {
// FIXME (#15418): this does two traversals when in principle
// one would suffice. i.e. change to use `move_iter`.
let VecPerParamSpace { type_limit, self_limit, content } = self;
let mut i = 0;
let (prefix, fn_vec) = content.partition(|_| {
let on_left = i < self_limit;
i += 1;
on_left
});
let mut i = 0;
let (type_vec, self_vec) = prefix.partition(|_| {
let on_left = i < type_limit;
i += 1;
on_left
});
(type_vec, self_vec, fn_vec)
}
pub fn with_vec(mut self, space: ParamSpace, vec: Vec<T>)
-> VecPerParamSpace<T>
{
assert!(self.is_empty_in(space));
self.replace(space, vec);
self
}
}
///////////////////////////////////////////////////////////////////////////
// Public trait `Subst`
//
// Just call `foo.subst(tcx, substs)` to perform a substitution across
// `foo`. Or use `foo.subst_spanned(tcx, substs, Some(span))` when
// there is more information available (for better errors).
pub trait Subst {
fn subst(&self, tcx: &ty::ctxt, substs: &Substs) -> Self {
self.subst_spanned(tcx, substs, None)
}
fn subst_spanned(&self, tcx: &ty::ctxt,
substs: &Substs,
span: Option<Span>)
-> Self;
}
impl<T:TypeFoldable> Subst for T {
fn subst_spanned(&self,
tcx: &ty::ctxt,
substs: &Substs,
span: Option<Span>)
-> T
{
let mut folder = SubstFolder { tcx: tcx,
substs: substs,
span: span,
root_ty: None,
ty_stack_depth: 0 };
(*self).fold_with(&mut folder)
}
}
///////////////////////////////////////////////////////////////////////////
// The actual substitution engine itself is a type folder.
struct SubstFolder<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
substs: &'a Substs,
// The location for which the substitution is performed, if available.
span: Option<Span>,
// The root type that is being substituted, if available.
root_ty: Option<ty::t>,
// Depth of type stack
ty_stack_depth: uint,
}
impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> { self.tcx }
fn fold_region(&mut self, r: ty::Region) -> ty::Region {
// Note: This routine only handles regions that are bound on
// type declarations and other outer declarations, not those
// bound in *fn types*. Region substitution of the bound
// regions that appear in a function signature is done using
// the specialized routine
// `middle::typeck::check::regionmanip::replace_late_regions_in_fn_sig()`.
match r {
ty::ReEarlyBound(_, space, i, region_name) => {
match self.substs.regions {
ErasedRegions => ty::ReStatic,
NonerasedRegions(ref regions) =>
match regions.opt_get(space, i) {
Some(t) => *t,
None => {
let span = self.span.unwrap_or(DUMMY_SP);
self.tcx().sess.span_bug(
span,
format!("Type parameter out of range \
when substituting in region {} (root type={}) \
(space={}, index={})",
region_name.as_str(),
self.root_ty.repr(self.tcx()),
space, i).as_slice());
}
}
}
}
_ => r
}
}
fn fold_ty(&mut self, t: ty::t) -> ty::t {
if !ty::type_needs_subst(t) {
return t;
}
// track the root type we were asked to substitute
let depth = self.ty_stack_depth;
if depth == 0 {
self.root_ty = Some(t);
}
self.ty_stack_depth += 1;
let t1 = match ty::get(t).sty {
ty::ty_param(p) => {
check(self,
p,
t,
self.substs.types.opt_get(p.space, p.idx),
p.space,
p.idx)
}
_ => {
ty_fold::super_fold_ty(self, t)
}
};
assert_eq!(depth + 1, self.ty_stack_depth);
self.ty_stack_depth -= 1;
if depth == 0 {
self.root_ty = None;
}
return t1;
fn check(this: &SubstFolder,
p: ty::ParamTy,
source_ty: ty::t,
opt_ty: Option<&ty::t>,
space: ParamSpace,
index: uint)
-> ty::t {
match opt_ty {
Some(t) => *t,
None => {
let span = this.span.unwrap_or(DUMMY_SP);
this.tcx().sess.span_bug(
span,
format!("Type parameter `{}` ({}/{}/{}) out of range \
when substituting (root type={})",
p.repr(this.tcx()),
source_ty.repr(this.tcx()),
space,
index,
this.root_ty.repr(this.tcx())).as_slice());
}
}
}
}
}<|fim▁end|> | |
<|file_name|>Invocation.java<|end_file_name|><|fim▁begin|>package org.jruby.ext.ffi.jna;
import java.util.ArrayList;
import org.jruby.runtime.ThreadContext;
/**
* An invocation session.
* This provides post-invoke cleanup.
*/
final class Invocation {
private final ThreadContext context;
private ArrayList<Runnable> postInvokeList;
Invocation(ThreadContext context) {
this.context = context;<|fim▁hole|> if (postInvokeList != null) {
for (Runnable r : postInvokeList) {
r.run();
}
}
}
void addPostInvoke(Runnable postInvoke) {
if (postInvokeList == null) {
postInvokeList = new ArrayList<Runnable>();
}
postInvokeList.add(postInvoke);
}
ThreadContext getThreadContext() {
return context;
}
}<|fim▁end|> | }
void finish() { |
<|file_name|>submit_util.py<|end_file_name|><|fim▁begin|>'''
Created on Aug 21, 2014
@author: Dean4Devil
'''
import mysql.connector
from pycore.sql_util import MySQLHelper
class SubmitTree():
'A tree of all submits to that standard. I.e. OpenDriver is a tree, OpenDriver 0.2 is a submit.'
def __init__(self, identifier):
'Create a new Tree in memory.'
self.sql_helper = MySQLHelper("oetf_submits")
if self.sql_helper.check_exists(identifier):
self.tree = self.sql_helper.query_data(identifier, "*", delimiter="", order="id", row_num=0)
else:
# First submit in that tree. Table does not exist yet.
table = (
"CREATE TABLE IF NOT EXISTS `{}` (".format(identifier),
"`id` int(11) NOT NULL AUTO_INCREMENT,",
"`version` varchar(32) COLLATE utf8mb4_bin NOT NULL",
"`comment` text COLLATE utf8mb4_bin NOT NULL,",
"`content` text COLLATE utf8mb4_bin NOT NULL,",
"`published_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP",
"PRIMARY KEY (`id`)",
") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin AUTO_INCREMENT=1 ;")
con = self.sql_helper.return_con()
cur = con.cursor()
cur.execute(table)
self.tree = []<|fim▁hole|>
class Submit():
'Submit element'<|fim▁end|> | cur.close()
con.close() |
<|file_name|>version.ts<|end_file_name|><|fim▁begin|>/**
* Version Service
*/
import semver = require('semver');
interface VersionUtility {
normalizeName?: (name: string) => string;
compare?: (v1, v2) => number;
}<|fim▁hole|>
// Normalize version name
VersionUtility.normalizeName = function (name) {
if (name[0] == 'v') name = name.slice(1);
return name;
};
// Compare two versions
VersionUtility.compare = function (v1, v2) {
if (semver.gt(v1.tag, v2.tag)) {
return -1;
}
if (semver.lt(v1.tag, v2.tag)) {
return 1;
}
return 0;
};
export default VersionUtility;<|fim▁end|> | var VersionUtility: VersionUtility = {}; |
<|file_name|>instr_shld.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn shld_1() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(CX)), operand2: Some(Direct(DI)), operand3: Some(Literal8(53)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 164, 249, 53], OperandSize::Word)
}
#[test]
fn shld_2() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectScaledIndexedDisplaced(BP, DI, One, 23, Some(OperandSize::Word), None)), operand2: Some(Direct(BP)), operand3: Some(Literal8(81)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 164, 107, 23, 81], OperandSize::Word)
}
#[test]
fn shld_3() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(BX)), operand2: Some(Direct(BP)), operand3: Some(Literal8(118)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 164, 235, 118], OperandSize::Dword)
}
#[test]
fn shld_4() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectScaledIndexed(EAX, ESI, Four, Some(OperandSize::Word), None)), operand2: Some(Direct(DX)), operand3: Some(Literal8(49)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 164, 20, 176, 49], OperandSize::Dword)
}
#[test]
fn shld_5() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(BP)), operand2: Some(Direct(CX)), operand3: Some(Literal8(115)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 164, 205, 115], OperandSize::Qword)
}
#[test]
fn shld_6() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectDisplaced(RDX, 2042607570, Some(OperandSize::Word), None)), operand2: Some(Direct(BP)), operand3: Some(Literal8(95)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 164, 170, 210, 183, 191, 121, 95], OperandSize::Qword)
}
#[test]
fn shld_7() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(EBX)), operand2: Some(Direct(ECX)), operand3: Some(Literal8(25)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 164, 203, 25], OperandSize::Word)
}
#[test]
fn shld_8() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectDisplaced(BX, 225, Some(OperandSize::Dword), None)), operand2: Some(Direct(EDX)), operand3: Some(Literal8(115)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 164, 151, 225, 0, 115], OperandSize::Word)
}
#[test]
fn shld_9() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(EDI)), operand2: Some(Direct(ECX)), operand3: Some(Literal8(54)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 164, 207, 54], OperandSize::Dword)
}
#[test]
fn shld_10() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectDisplaced(EAX, 299914685, Some(OperandSize::Dword), None)), operand2: Some(Direct(EBX)), operand3: Some(Literal8(39)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 164, 152, 189, 85, 224, 17, 39], OperandSize::Dword)
}
#[test]
fn shld_11() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(ESI)), operand2: Some(Direct(ECX)), operand3: Some(Literal8(67)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 164, 206, 67], OperandSize::Qword)
}
#[test]
fn shld_12() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectScaledIndexed(RCX, RCX, Eight, Some(OperandSize::Dword), None)), operand2: Some(Direct(ECX)), operand3: Some(Literal8(76)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 164, 12, 201, 76], OperandSize::Qword)
}
#[test]
fn shld_13() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(RBX)), operand2: Some(Direct(RSI)), operand3: Some(Literal8(18)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[72, 15, 164, 243, 18], OperandSize::Qword)
}
#[test]
fn shld_14() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectScaledDisplaced(RDX, Eight, 1759335901, Some(OperandSize::Qword), None)), operand2: Some(Direct(RSI)), operand3: Some(Literal8(25)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[72, 15, 164, 52, 213, 221, 85, 221, 104, 25], OperandSize::Qword)
}
#[test]
fn shld_15() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(DX)), operand2: Some(Direct(DI)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 165, 250], OperandSize::Word)
}
#[test]
fn shld_16() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Indirect(DI, Some(OperandSize::Word), None)), operand2: Some(Direct(BX)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 165, 29], OperandSize::Word)
}
#[test]
fn shld_17() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(DI)), operand2: Some(Direct(BX)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 165, 223], OperandSize::Dword)
}
#[test]
fn shld_18() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectScaledDisplaced(EAX, Eight, 1920455266, Some(OperandSize::Word), None)), operand2: Some(Direct(DI)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 165, 60, 197, 98, 210, 119, 114], OperandSize::Dword)
}
#[test]
fn shld_19() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(CX)), operand2: Some(Direct(DI)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 165, 249], OperandSize::Qword)
}
#[test]
fn shld_20() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Indirect(RBX, Some(OperandSize::Word), None)), operand2: Some(Direct(BP)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 165, 43], OperandSize::Qword)
}
#[test]<|fim▁hole|>fn shld_21() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(EBP)), operand2: Some(Direct(EDI)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 165, 253], OperandSize::Word)
}
#[test]
fn shld_22() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectDisplaced(SI, 167, Some(OperandSize::Dword), None)), operand2: Some(Direct(ECX)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 165, 140, 167, 0], OperandSize::Word)
}
#[test]
fn shld_23() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(EBX)), operand2: Some(Direct(ESP)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 165, 227], OperandSize::Dword)
}
#[test]
fn shld_24() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectScaledIndexedDisplaced(EDI, EDX, Two, 763862891, Some(OperandSize::Dword), None)), operand2: Some(Direct(EDX)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 165, 148, 87, 107, 159, 135, 45], OperandSize::Dword)
}
#[test]
fn shld_25() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(EBP)), operand2: Some(Direct(EDX)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 165, 213], OperandSize::Qword)
}
#[test]
fn shld_26() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectScaledDisplaced(RBX, Eight, 792003927, Some(OperandSize::Dword), None)), operand2: Some(Direct(EDI)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 165, 60, 221, 87, 5, 53, 47], OperandSize::Qword)
}
#[test]
fn shld_27() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(Direct(RSI)), operand2: Some(Direct(RDX)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[72, 15, 165, 214], OperandSize::Qword)
}
#[test]
fn shld_28() {
run_test(&Instruction { mnemonic: Mnemonic::SHLD, operand1: Some(IndirectDisplaced(RCX, 1750215584, Some(OperandSize::Qword), None)), operand2: Some(Direct(RSP)), operand3: Some(Direct(CL)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[72, 15, 165, 161, 160, 43, 82, 104], OperandSize::Qword)
}<|fim▁end|> | |
<|file_name|>config_flow.py<|end_file_name|><|fim▁begin|>"""Config flow for Mailgun."""
from homeassistant.helpers import config_entry_flow
from .const import DOMAIN
config_entry_flow.register_webhook_flow(
DOMAIN,<|fim▁hole|> {
"mailgun_url": "https://documentation.mailgun.com/en/latest/user_manual.html#webhooks", # noqa: E501 pylint: disable=line-too-long
"docs_url": "https://www.home-assistant.io/components/mailgun/",
},
)<|fim▁end|> | "Mailgun Webhook", |
<|file_name|>audit.js<|end_file_name|><|fim▁begin|>// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileOverview WebAudio layout test utility library. Built around W3C's
* testharness.js. Includes asynchronous test task manager,
* assertion utilities.
* @dependency testharness.js
*/
(function() {
'use strict';
// Selected methods from testharness.js.
let testharnessProperties = [
'test', 'async_test', 'promise_test', 'promise_rejects', 'generate_tests',
'setup', 'done', 'assert_true', 'assert_false'
];
// Check if testharness.js is properly loaded. Throw otherwise.
for (let name in testharnessProperties) {
if (!self.hasOwnProperty(testharnessProperties[name]))
throw new Error('Cannot proceed. testharness.js is not loaded.');
}
})();
window.Audit = (function() {
'use strict';
// NOTE: Moving this method (or any other code above) will change the location
// of 'CONSOLE ERROR...' message in the expected text files.
function _logError(message) {
console.error('[audit.js] ' + message);
}
function _logPassed(message) {
test(function(arg) {
assert_true(true);
}, message);
}
function _logFailed(message, detail) {
test(function() {
assert_true(false, detail);
}, message);
}
function _throwException(message) {
throw new Error(message);
}
// TODO(hongchan): remove this hack after confirming all the tests are
// finished correctly. (crbug.com/708817)
const _testharnessDone = window.done;
window.done = () => {
_throwException('Do NOT call done() method from the test code.');
};
// Generate a descriptive string from a target value in various types.
function _generateDescription(target, options) {
let targetString;
switch (typeof target) {
case 'object':
// Handle Arrays.
if (target instanceof Array || target instanceof Float32Array ||
target instanceof Float64Array || target instanceof Uint8Array) {
let arrayElements = target.length < options.numberOfArrayElements ?
String(target) :
String(target.slice(0, options.numberOfArrayElements)) + '...';
targetString = '[' + arrayElements + ']';
} else if (target === null) {
targetString = String(target);
} else {
targetString = '' + String(target).split(/[\s\]]/)[1];
}
break;
case 'function':
if (Error.isPrototypeOf(target)) {
targetString = "EcmaScript error " + target.name;
} else {
targetString = String(target);
}
break;
default:
targetString = String(target);
break;
}
return targetString;
}
// Return a string suitable for printing one failed element in
// |beCloseToArray|.
function _formatFailureEntry(index, actual, expected, abserr, threshold) {
return '\t[' + index + ']\t' + actual.toExponential(16) + '\t' +
expected.toExponential(16) + '\t' + abserr.toExponential(16) + '\t' +
(abserr / Math.abs(expected)).toExponential(16) + '\t' +
threshold.toExponential(16);
}
// Compute the error threshold criterion for |beCloseToArray|
function _closeToThreshold(abserr, relerr, expected) {
return Math.max(abserr, relerr * Math.abs(expected));
}
/**
* @class Should
* @description Assertion subtask for the Audit task.
* @param {Task} parentTask Associated Task object.
* @param {Any} actual Target value to be tested.
* @param {String} actualDescription String description of the test target.
*/
class Should {
constructor(parentTask, actual, actualDescription) {
this._task = parentTask;
this._actual = actual;
this._actualDescription = (actualDescription || null);
this._expected = null;
this._expectedDescription = null;
this._detail = '';
// If true and the test failed, print the actual value at the
// end of the message.
this._printActualForFailure = true;
this._result = null;
/**
* @param {Number} numberOfErrors Number of errors to be printed.
* @param {Number} numberOfArrayElements Number of array elements to be
* printed in the test log.
* @param {Boolean} verbose Verbose output from the assertion.
*/
this._options = {
numberOfErrors: 4,
numberOfArrayElements: 16,
verbose: false
};
}
_processArguments(args) {
if (args.length === 0)
return;
if (args.length > 0)
this._expected = args[0];
if (typeof args[1] === 'string') {
// case 1: (expected, description, options)
this._expectedDescription = args[1];
Object.assign(this._options, args[2]);
} else if (typeof args[1] === 'object') {
// case 2: (expected, options)
Object.assign(this._options, args[1]);
}
}
_buildResultText() {
if (this._result === null)
_throwException('Illegal invocation: the assertion is not finished.');
let actualString = _generateDescription(this._actual, this._options);
// Use generated text when the description is not provided.
if (!this._actualDescription)
this._actualDescription = actualString;
if (!this._expectedDescription) {
this._expectedDescription =
_generateDescription(this._expected, this._options);
}
// For the assertion with a single operand.
this._detail =
this._detail.replace(/\$\{actual\}/g, this._actualDescription);
// If there is a second operand (i.e. expected value), we have to build
// the string for it as well.
this._detail =
this._detail.replace(/\$\{expected\}/g, this._expectedDescription);
// If there is any property in |_options|, replace the property name
// with the value.
for (let name in this._options) {
if (name === 'numberOfErrors' || name === 'numberOfArrayElements' ||
name === 'verbose') {
continue;
}
// The RegExp key string contains special character. Take care of it.
let re = '\$\{' + name + '\}';
re = re.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1');
this._detail = this._detail.replace(
new RegExp(re, 'g'), _generateDescription(this._options[name]));
}
// If the test failed, add the actual value at the end.
if (this._result === false && this._printActualForFailure === true) {
this._detail += ' Got ' + actualString + '.';
}
}
_finalize() {<|fim▁hole|> if (this._result) {
_logPassed(' ' + this._detail);
} else {
_logFailed('X ' + this._detail);
}
// This assertion is finished, so update the parent task accordingly.
this._task.update(this);
// TODO(hongchan): configurable 'detail' message.
}
_assert(condition, passDetail, failDetail) {
this._result = Boolean(condition);
this._detail = this._result ? passDetail : failDetail;
this._buildResultText();
this._finalize();
return this._result;
}
get result() {
return this._result;
}
get detail() {
return this._detail;
}
/**
* should() assertions.
*
* @example All the assertions can have 1, 2 or 3 arguments:
* should().doAssert(expected);
* should().doAssert(expected, options);
* should().doAssert(expected, expectedDescription, options);
*
* @param {Any} expected Expected value of the assertion.
* @param {String} expectedDescription Description of expected value.
* @param {Object} options Options for assertion.
* @param {Number} options.numberOfErrors Number of errors to be printed.
* (if applicable)
* @param {Number} options.numberOfArrayElements Number of array elements
* to be printed. (if
* applicable)
* @notes Some assertions can have additional options for their specific
* testing.
*/
/**
* Check if |actual| exists.
*
* @example
* should({}, 'An empty object').exist();
* @result
* "PASS An empty object does exist."
*/
exist() {
return this._assert(
this._actual !== null && this._actual !== undefined,
'${actual} does exist.', '${actual} does not exist.');
}
/**
* Check if |actual| operation wrapped in a function throws an exception
* with a expected error type correctly. |expected| is optional. If it is an
* instance of DOMException, then the description (second argument) can be
* provided to be more strict about the expected exception type. |expected|
* also can be other generic error types such as TypeError, RangeError or
* etc.
*
* @example
* should(() => { let a = b; }, 'A bad code').throw();
* should(() => { new SomeConstructor(); }, 'A bad construction')
* .throw(DOMException, 'NotSupportedError');
* should(() => { let c = d; }, 'Assigning d to c')
* .throw(ReferenceError);
* should(() => { let e = f; }, 'Assigning e to f')
* .throw(ReferenceError, { omitErrorMessage: true });
*
* @result
* "PASS A bad code threw an exception of ReferenceError: b is not
* defined."
* "PASS A bad construction threw DOMException:NotSupportedError."
* "PASS Assigning d to c threw ReferenceError: d is not defined."
* "PASS Assigning e to f threw ReferenceError: [error message
* omitted]."
*/
throw() {
this._processArguments(arguments);
this._printActualForFailure = false;
let didThrowCorrectly = false;
let passDetail, failDetail;
try {
// This should throw.
this._actual();
// Catch did not happen, so the test is failed.
failDetail = '${actual} did not throw an exception.';
} catch (error) {
let errorMessage = this._options.omitErrorMessage ?
': [error message omitted]' :
': "' + error.message + '"';
if (this._expected === null || this._expected === undefined) {
// The expected error type was not given.
didThrowCorrectly = true;
passDetail = '${actual} threw ' + error.name + errorMessage + '.';
} else if (this._expected === DOMException &&
(this._expectedDescription === undefined ||
this._expectedDescription === error.name)) {
// Handles DOMException with the associated name.
didThrowCorrectly = true;
passDetail = '${actual} threw ${expected}' + errorMessage + '.';
} else if (this._expected == error.constructor) {
// Handler other error types.
didThrowCorrectly = true;
passDetail = '${actual} threw ' + error.name + errorMessage + '.';
} else {
didThrowCorrectly = false;
failDetail =
'${actual} threw "' + error.name + '" instead of ${expected}.';
}
}
return this._assert(didThrowCorrectly, passDetail, failDetail);
}
/**
* Check if |actual| operation wrapped in a function does not throws an
* exception correctly.
*
* @example
* should(() => { let foo = 'bar'; }, 'let foo = "bar"').notThrow();
*
* @result
* "PASS let foo = "bar" did not throw an exception."
*/
notThrow() {
this._printActualForFailure = false;
let didThrowCorrectly = false;
let passDetail, failDetail;
try {
this._actual();
passDetail = '${actual} did not throw an exception.';
} catch (error) {
didThrowCorrectly = true;
failDetail = '${actual} incorrectly threw ' + error.name + ': "' +
error.message + '".';
}
return this._assert(!didThrowCorrectly, passDetail, failDetail);
}
/**
* Check if |actual| promise is resolved correctly. Note that the returned
* result from promise object will be passed to the following then()
* function.
*
* @example
* should('My promise', promise).beResolve().then((result) => {
* log(result);
* });
*
* @result
* "PASS My promise resolved correctly."
* "FAIL X My promise rejected *INCORRECTLY* with _ERROR_."
*/
beResolved() {
return this._actual.then(
function(result) {
this._assert(true, '${actual} resolved correctly.', null);
return result;
}.bind(this),
function(error) {
this._assert(
false, null,
'${actual} rejected incorrectly with ' + error + '.');
}.bind(this));
}
/**
* Check if |actual| promise is rejected correctly.
*
* @example
* should('My promise', promise).beRejected().then(nextStuff);
*
* @result
* "PASS My promise rejected correctly (with _ERROR_)."
* "FAIL X My promise resolved *INCORRECTLY*."
*/
beRejected() {
return this._actual.then(
function() {
this._assert(false, null, '${actual} resolved incorrectly.');
}.bind(this),
function(error) {
this._assert(
true, '${actual} rejected correctly with ' + error + '.', null);
}.bind(this));
}
/**
* Check if |actual| promise is rejected correctly.
*
* @example
* should(promise, 'My promise').beRejectedWith('_ERROR_').then();
*
* @result
* "PASS My promise rejected correctly with _ERROR_."
* "FAIL X My promise rejected correctly but got _ACTUAL_ERROR instead of
* _EXPECTED_ERROR_."
* "FAIL X My promise resolved incorrectly."
*/
beRejectedWith() {
this._processArguments(arguments);
return this._actual.then(
function() {
this._assert(false, null, '${actual} resolved incorrectly.');
}.bind(this),
function(error) {
if (this._expected !== error.name) {
this._assert(
false, null,
'${actual} rejected correctly but got ' + error.name +
' instead of ' + this._expected + '.');
} else {
this._assert(
true,
'${actual} rejected correctly with ' + this._expected + '.',
null);
}
}.bind(this));
}
/**
* Check if |actual| is a boolean true.
*
* @example
* should(3 < 5, '3 < 5').beTrue();
*
* @result
* "PASS 3 < 5 is true."
*/
beTrue() {
return this._assert(
this._actual === true, '${actual} is true.',
'${actual} is not true.');
}
/**
* Check if |actual| is a boolean false.
*
* @example
* should(3 > 5, '3 > 5').beFalse();
*
* @result
* "PASS 3 > 5 is false."
*/
beFalse() {
return this._assert(
this._actual === false, '${actual} is false.',
'${actual} is not false.');
}
/**
* Check if |actual| is strictly equal to |expected|. (no type coercion)
*
* @example
* should(1).beEqualTo(1);
*
* @result
* "PASS 1 is equal to 1."
*/
beEqualTo() {
this._processArguments(arguments);
return this._assert(
this._actual === this._expected, '${actual} is equal to ${expected}.',
'${actual} is not equal to ${expected}.');
}
/**
* Check if |actual| is not equal to |expected|.
*
* @example
* should(1).notBeEqualTo(2);
*
* @result
* "PASS 1 is not equal to 2."
*/
notBeEqualTo() {
this._processArguments(arguments);
return this._assert(
this._actual !== this._expected,
'${actual} is not equal to ${expected}.',
'${actual} should not be equal to ${expected}.');
}
/**
* check if |actual| is NaN
*
* @example
* should(NaN).beNaN();
*
* @result
* "PASS NaN is NaN"
*
*/
beNaN() {
this._processArguments(arguments);
return this._assert(
isNaN(this._actual),
'${actual} is NaN.',
'${actual} is not NaN but should be.');
}
/**
* check if |actual| is NOT NaN
*
* @example
* should(42).notBeNaN();
*
* @result
* "PASS 42 is not NaN"
*
*/
notBeNaN() {
this._processArguments(arguments);
return this._assert(
!isNaN(this._actual),
'${actual} is not NaN.',
'${actual} is NaN but should not be.');
}
/**
* Check if |actual| is greater than |expected|.
*
* @example
* should(2).beGreaterThanOrEqualTo(2);
*
* @result
* "PASS 2 is greater than or equal to 2."
*/
beGreaterThan() {
this._processArguments(arguments);
return this._assert(
this._actual > this._expected,
'${actual} is greater than ${expected}.',
'${actual} is not greater than ${expected}.');
}
/**
* Check if |actual| is greater than or equal to |expected|.
*
* @example
* should(2).beGreaterThan(1);
*
* @result
* "PASS 2 is greater than 1."
*/
beGreaterThanOrEqualTo() {
this._processArguments(arguments);
return this._assert(
this._actual >= this._expected,
'${actual} is greater than or equal to ${expected}.',
'${actual} is not greater than or equal to ${expected}.');
}
/**
* Check if |actual| is less than |expected|.
*
* @example
* should(1).beLessThan(2);
*
* @result
* "PASS 1 is less than 2."
*/
beLessThan() {
this._processArguments(arguments);
return this._assert(
this._actual < this._expected, '${actual} is less than ${expected}.',
'${actual} is not less than ${expected}.');
}
/**
* Check if |actual| is less than or equal to |expected|.
*
* @example
* should(1).beLessThanOrEqualTo(1);
*
* @result
* "PASS 1 is less than or equal to 1."
*/
beLessThanOrEqualTo() {
this._processArguments(arguments);
return this._assert(
this._actual <= this._expected,
'${actual} is less than or equal to ${expected}.',
'${actual} is not less than or equal to ${expected}.');
}
/**
* Check if |actual| array is filled with a constant |expected| value.
*
* @example
* should([1, 1, 1]).beConstantValueOf(1);
*
* @result
* "PASS [1,1,1] contains only the constant 1."
*/
beConstantValueOf() {
this._processArguments(arguments);
this._printActualForFailure = false;
let passed = true;
let passDetail, failDetail;
let errors = {};
let actual = this._actual;
let expected = this._expected;
for (let index = 0; index < actual.length; ++index) {
if (actual[index] !== expected)
errors[index] = actual[index];
}
let numberOfErrors = Object.keys(errors).length;
passed = numberOfErrors === 0;
if (passed) {
passDetail = '${actual} contains only the constant ${expected}.';
} else {
let counter = 0;
failDetail =
'${actual}: Expected ${expected} for all values but found ' +
numberOfErrors + ' unexpected values: ';
failDetail += '\n\tIndex\tActual';
for (let errorIndex in errors) {
failDetail += '\n\t[' + errorIndex + ']' +
'\t' + errors[errorIndex];
if (++counter >= this._options.numberOfErrors) {
failDetail +=
'\n\t...and ' + (numberOfErrors - counter) + ' more errors.';
break;
}
}
}
return this._assert(passed, passDetail, failDetail);
}
/**
* Check if |actual| array is not filled with a constant |expected| value.
*
* @example
* should([1, 0, 1]).notBeConstantValueOf(1);
* should([0, 0, 0]).notBeConstantValueOf(0);
*
* @result
* "PASS [1,0,1] is not constantly 1 (contains 1 different value)."
* "FAIL X [0,0,0] should have contain at least one value different
* from 0."
*/
notBeConstantValueOf() {
this._processArguments(arguments);
this._printActualForFailure = false;
let passed = true;
let passDetail;
let failDetail;
let differences = {};
let actual = this._actual;
let expected = this._expected;
for (let index = 0; index < actual.length; ++index) {
if (actual[index] !== expected)
differences[index] = actual[index];
}
let numberOfDifferences = Object.keys(differences).length;
passed = numberOfDifferences > 0;
if (passed) {
let valueString = numberOfDifferences > 1 ? 'values' : 'value';
passDetail = '${actual} is not constantly ${expected} (contains ' +
numberOfDifferences + ' different ' + valueString + ').';
} else {
failDetail = '${actual} should have contain at least one value ' +
'different from ${expected}.';
}
return this._assert(passed, passDetail, failDetail);
}
/**
* Check if |actual| array is identical to |expected| array element-wise.
*
* @example
* should([1, 2, 3]).beEqualToArray([1, 2, 3]);
*
* @result
* "[1,2,3] is identical to the array [1,2,3]."
*/
beEqualToArray() {
this._processArguments(arguments);
this._printActualForFailure = false;
let passed = true;
let passDetail, failDetail;
let errorIndices = [];
if (this._actual.length !== this._expected.length) {
passed = false;
failDetail = 'The array length does not match.';
return this._assert(passed, passDetail, failDetail);
}
let actual = this._actual;
let expected = this._expected;
for (let index = 0; index < actual.length; ++index) {
if (actual[index] !== expected[index])
errorIndices.push(index);
}
passed = errorIndices.length === 0;
if (passed) {
passDetail = '${actual} is identical to the array ${expected}.';
} else {
let counter = 0;
failDetail =
'${actual} expected to be equal to the array ${expected} ' +
'but differs in ' + errorIndices.length + ' places:' +
'\n\tIndex\tActual\t\t\tExpected';
for (let index of errorIndices) {
failDetail += '\n\t[' + index + ']' +
'\t' + this._actual[index].toExponential(16) + '\t' +
this._expected[index].toExponential(16);
if (++counter >= this._options.numberOfErrors) {
failDetail += '\n\t...and ' + (errorIndices.length - counter) +
' more errors.';
break;
}
}
}
return this._assert(passed, passDetail, failDetail);
}
/**
* Check if |actual| array contains only the values in |expected| in the
* order of values in |expected|.
*
* @example
* Should([1, 1, 3, 3, 2], 'My random array').containValues([1, 3, 2]);
*
* @result
* "PASS [1,1,3,3,2] contains all the expected values in the correct
* order: [1,3,2].
*/
containValues() {
this._processArguments(arguments);
this._printActualForFailure = false;
let passed = true;
let indexedActual = [];
let firstErrorIndex = null;
// Collect the unique value sequence from the actual.
for (let i = 0, prev = null; i < this._actual.length; i++) {
if (this._actual[i] !== prev) {
indexedActual.push({index: i, value: this._actual[i]});
prev = this._actual[i];
}
}
// Compare against the expected sequence.
let failMessage =
'${actual} expected to have the value sequence of ${expected} but ' +
'got ';
if (this._expected.length === indexedActual.length) {
for (let j = 0; j < this._expected.length; j++) {
if (this._expected[j] !== indexedActual[j].value) {
firstErrorIndex = indexedActual[j].index;
passed = false;
failMessage += this._actual[firstErrorIndex] + ' at index ' +
firstErrorIndex + '.';
break;
}
}
} else {
passed = false;
let indexedValues = indexedActual.map(x => x.value);
failMessage += `${indexedActual.length} values, [${
indexedValues}], instead of ${this._expected.length}.`;
}
return this._assert(
passed,
'${actual} contains all the expected values in the correct order: ' +
'${expected}.',
failMessage);
}
/**
* Check if |actual| array does not have any glitches. Note that |threshold|
* is not optional and is to define the desired threshold value.
*
* @example
* should([0.5, 0.5, 0.55, 0.5, 0.45, 0.5]).notGlitch(0.06);
*
* @result
* "PASS [0.5,0.5,0.55,0.5,0.45,0.5] has no glitch above the threshold
* of 0.06."
*
*/
notGlitch() {
this._processArguments(arguments);
this._printActualForFailure = false;
let passed = true;
let passDetail, failDetail;
let actual = this._actual;
let expected = this._expected;
for (let index = 0; index < actual.length; ++index) {
let diff = Math.abs(actual[index - 1] - actual[index]);
if (diff >= expected) {
passed = false;
failDetail = '${actual} has a glitch at index ' + index +
' of size ' + diff + '.';
}
}
passDetail =
'${actual} has no glitch above the threshold of ${expected}.';
return this._assert(passed, passDetail, failDetail);
}
/**
* Check if |actual| is close to |expected| using the given relative error
* |threshold|.
*
* @example
* should(2.3).beCloseTo(2, { threshold: 0.3 });
*
* @result
* "PASS 2.3 is 2 within an error of 0.3."
* @param {Object} options Options for assertion.
* @param {Number} options.threshold Threshold value for the comparison.
*/
beCloseTo() {
this._processArguments(arguments);
// The threshold is relative except when |expected| is zero, in which case
// it is absolute.
let absExpected = this._expected ? Math.abs(this._expected) : 1;
let error = Math.abs(this._actual - this._expected) / absExpected;
// debugger;
return this._assert(
error <= this._options.threshold,
'${actual} is ${expected} within an error of ${threshold}.',
'${actual} is not close to ${expected} within a relative error of ' +
'${threshold} (RelErr=' + error + ').');
}
/**
* Check if |target| array is close to |expected| array element-wise within
* a certain error bound given by the |options|.
*
* The error criterion is:
* abs(actual[k] - expected[k]) < max(absErr, relErr * abs(expected))
*
* If nothing is given for |options|, then absErr = relErr = 0. If
* absErr = 0, then the error criterion is a relative error. A non-zero
* absErr value produces a mix intended to handle the case where the
* expected value is 0, allowing the target value to differ by absErr from
* the expected.
*
* @param {Number} options.absoluteThreshold Absolute threshold.
* @param {Number} options.relativeThreshold Relative threshold.
*/
beCloseToArray() {
this._processArguments(arguments);
this._printActualForFailure = false;
let passed = true;
let passDetail, failDetail;
// Parsing options.
let absErrorThreshold = (this._options.absoluteThreshold || 0);
let relErrorThreshold = (this._options.relativeThreshold || 0);
// A collection of all of the values that satisfy the error criterion.
// This holds the absolute difference between the target element and the
// expected element.
let errors = {};
// Keep track of the max absolute error found.
let maxAbsError = -Infinity, maxAbsErrorIndex = -1;
// Keep track of the max relative error found, ignoring cases where the
// relative error is Infinity because the expected value is 0.
let maxRelError = -Infinity, maxRelErrorIndex = -1;
let actual = this._actual;
let expected = this._expected;
for (let index = 0; index < expected.length; ++index) {
let diff = Math.abs(actual[index] - expected[index]);
let absExpected = Math.abs(expected[index]);
let relError = diff / absExpected;
if (diff >
Math.max(absErrorThreshold, relErrorThreshold * absExpected)) {
if (diff > maxAbsError) {
maxAbsErrorIndex = index;
maxAbsError = diff;
}
if (!isNaN(relError) && relError > maxRelError) {
maxRelErrorIndex = index;
maxRelError = relError;
}
errors[index] = diff;
}
}
let numberOfErrors = Object.keys(errors).length;
let maxAllowedErrorDetail = JSON.stringify({
absoluteThreshold: absErrorThreshold,
relativeThreshold: relErrorThreshold
});
if (numberOfErrors === 0) {
// The assertion was successful.
passDetail = '${actual} equals ${expected} with an element-wise ' +
'tolerance of ' + maxAllowedErrorDetail + '.';
} else {
// Failed. Prepare the detailed failure log.
passed = false;
failDetail = '${actual} does not equal ${expected} with an ' +
'element-wise tolerance of ' + maxAllowedErrorDetail + '.\n';
// Print out actual, expected, absolute error, and relative error.
let counter = 0;
failDetail += '\tIndex\tActual\t\t\tExpected\t\tAbsError' +
'\t\tRelError\t\tTest threshold';
let printedIndices = [];
for (let index in errors) {
failDetail +=
'\n' +
_formatFailureEntry(
index, actual[index], expected[index], errors[index],
_closeToThreshold(
absErrorThreshold, relErrorThreshold, expected[index]));
printedIndices.push(index);
if (++counter > this._options.numberOfErrors) {
failDetail +=
'\n\t...and ' + (numberOfErrors - counter) + ' more errors.';
break;
}
}
// Finalize the error log: print out the location of both the maxAbs
// error and the maxRel error so we can adjust thresholds appropriately
// in the test.
failDetail += '\n' +
'\tMax AbsError of ' + maxAbsError.toExponential(16) +
' at index of ' + maxAbsErrorIndex + '.\n';
if (printedIndices.find(element => {
return element == maxAbsErrorIndex;
}) === undefined) {
// Print an entry for this index if we haven't already.
failDetail +=
_formatFailureEntry(
maxAbsErrorIndex, actual[maxAbsErrorIndex],
expected[maxAbsErrorIndex], errors[maxAbsErrorIndex],
_closeToThreshold(
absErrorThreshold, relErrorThreshold,
expected[maxAbsErrorIndex])) +
'\n';
}
failDetail += '\tMax RelError of ' + maxRelError.toExponential(16) +
' at index of ' + maxRelErrorIndex + '.\n';
if (printedIndices.find(element => {
return element == maxRelErrorIndex;
}) === undefined) {
// Print an entry for this index if we haven't already.
failDetail +=
_formatFailureEntry(
maxRelErrorIndex, actual[maxRelErrorIndex],
expected[maxRelErrorIndex], errors[maxRelErrorIndex],
_closeToThreshold(
absErrorThreshold, relErrorThreshold,
expected[maxRelErrorIndex])) +
'\n';
}
}
return this._assert(passed, passDetail, failDetail);
}
/**
* A temporary escape hat for printing an in-task message. The description
* for the |actual| is required to get the message printed properly.
*
* TODO(hongchan): remove this method when the transition from the old Audit
* to the new Audit is completed.
* @example
* should(true, 'The message is').message('truthful!', 'false!');
*
* @result
* "PASS The message is truthful!"
*/
message(passDetail, failDetail) {
return this._assert(
this._actual, '${actual} ' + passDetail, '${actual} ' + failDetail);
}
/**
* Check if |expected| property is truly owned by |actual| object.
*
* @example
* should(BaseAudioContext.prototype,
* 'BaseAudioContext.prototype').haveOwnProperty('createGain');
*
* @result
* "PASS BaseAudioContext.prototype has an own property of
* 'createGain'."
*/
haveOwnProperty() {
this._processArguments(arguments);
return this._assert(
this._actual.hasOwnProperty(this._expected),
'${actual} has an own property of "${expected}".',
'${actual} does not own the property of "${expected}".');
}
/**
* Check if |expected| property is not owned by |actual| object.
*
* @example
* should(BaseAudioContext.prototype,
* 'BaseAudioContext.prototype')
* .notHaveOwnProperty('startRendering');
*
* @result
* "PASS BaseAudioContext.prototype does not have an own property of
* 'startRendering'."
*/
notHaveOwnProperty() {
this._processArguments(arguments);
return this._assert(
!this._actual.hasOwnProperty(this._expected),
'${actual} does not have an own property of "${expected}".',
'${actual} has an own the property of "${expected}".')
}
/**
* Check if an object is inherited from a class. This looks up the entire
* prototype chain of a given object and tries to find a match.
*
* @example
* should(sourceNode, 'A buffer source node')
* .inheritFrom('AudioScheduledSourceNode');
*
* @result
* "PASS A buffer source node inherits from 'AudioScheduledSourceNode'."
*/
inheritFrom() {
this._processArguments(arguments);
let prototypes = [];
let currentPrototype = Object.getPrototypeOf(this._actual);
while (currentPrototype) {
prototypes.push(currentPrototype.constructor.name);
currentPrototype = Object.getPrototypeOf(currentPrototype);
}
return this._assert(
prototypes.includes(this._expected),
'${actual} inherits from "${expected}".',
'${actual} does not inherit from "${expected}".');
}
}
// Task Class state enum.
const TaskState = {PENDING: 0, STARTED: 1, FINISHED: 2};
/**
* @class Task
* @description WebAudio testing task. Managed by TaskRunner.
*/
class Task {
/**
* Task constructor.
* @param {Object} taskRunner Reference of associated task runner.
* @param {String||Object} taskLabel Task label if a string is given. This
* parameter can be a dictionary with the
* following fields.
* @param {String} taskLabel.label Task label.
* @param {String} taskLabel.description Description of task.
* @param {Function} taskFunction Task function to be performed.
* @return {Object} Task object.
*/
constructor(taskRunner, taskLabel, taskFunction) {
this._taskRunner = taskRunner;
this._taskFunction = taskFunction;
if (typeof taskLabel === 'string') {
this._label = taskLabel;
this._description = null;
} else if (typeof taskLabel === 'object') {
if (typeof taskLabel.label !== 'string') {
_throwException('Task.constructor:: task label must be string.');
}
this._label = taskLabel.label;
this._description = (typeof taskLabel.description === 'string') ?
taskLabel.description :
null;
} else {
_throwException(
'Task.constructor:: task label must be a string or ' +
'a dictionary.');
}
this._state = TaskState.PENDING;
this._result = true;
this._totalAssertions = 0;
this._failedAssertions = 0;
}
get label() {
return this._label;
}
get state() {
return this._state;
}
get result() {
return this._result;
}
// Start the assertion chain.
should(actual, actualDescription) {
// If no argument is given, we cannot proceed. Halt.
if (arguments.length === 0)
_throwException('Task.should:: requires at least 1 argument.');
return new Should(this, actual, actualDescription);
}
// Run this task. |this| task will be passed into the user-supplied test
// task function.
run(harnessTest) {
this._state = TaskState.STARTED;
this._harnessTest = harnessTest;
// Print out the task entry with label and description.
_logPassed(
'> [' + this._label + '] ' +
(this._description ? this._description : ''));
return new Promise((resolve, reject) => {
this._resolve = resolve;
this._reject = reject;
let result = this._taskFunction(this, this.should.bind(this));
if (result && typeof result.then === "function") {
result.then(() => this.done()).catch(reject);
}
});
}
// Update the task success based on the individual assertion/test inside.
update(subTask) {
// After one of tests fails within a task, the result is irreversible.
if (subTask.result === false) {
this._result = false;
this._failedAssertions++;
}
this._totalAssertions++;
}
// Finish the current task and start the next one if available.
done() {
assert_equals(this._state, TaskState.STARTED)
this._state = TaskState.FINISHED;
let message = '< [' + this._label + '] ';
if (this._result) {
message += 'All assertions passed. (total ' + this._totalAssertions +
' assertions)';
_logPassed(message);
} else {
message += this._failedAssertions + ' out of ' + this._totalAssertions +
' assertions were failed.'
_logFailed(message);
}
this._resolve();
}
// Runs |subTask| |time| milliseconds later. |setTimeout| is not allowed in
// WPT linter, so a thin wrapper around the harness's |step_timeout| is
// used here. Returns a Promise which is resolved after |subTask| runs.
timeout(subTask, time) {
return new Promise(resolve => {
this._harnessTest.step_timeout(() => {
let result = subTask();
if (result && typeof result.then === "function") {
// Chain rejection directly to the harness test Promise, to report
// the rejection against the subtest even when the caller of
// timeout does not handle the rejection.
result.then(resolve, this._reject());
} else {
resolve();
}
}, time);
});
}
isPassed() {
return this._state === TaskState.FINISHED && this._result;
}
toString() {
return '"' + this._label + '": ' + this._description;
}
}
/**
* @class TaskRunner
* @description WebAudio testing task runner. Manages tasks.
*/
class TaskRunner {
constructor() {
this._tasks = {};
this._taskSequence = [];
// Configure testharness.js for the async operation.
setup(new Function(), {explicit_done: true});
}
_finish() {
let numberOfFailures = 0;
for (let taskIndex in this._taskSequence) {
let task = this._tasks[this._taskSequence[taskIndex]];
numberOfFailures += task.result ? 0 : 1;
}
let prefix = '# AUDIT TASK RUNNER FINISHED: ';
if (numberOfFailures > 0) {
_logFailed(
prefix + numberOfFailures + ' out of ' + this._taskSequence.length +
' tasks were failed.');
} else {
_logPassed(
prefix + this._taskSequence.length + ' tasks ran successfully.');
}
return Promise.resolve();
}
// |taskLabel| can be either a string or a dictionary. See Task constructor
// for the detail. If |taskFunction| returns a thenable, then the task
// is considered complete when the thenable is fulfilled; otherwise the
// task must be completed with an explicit call to |task.done()|.
define(taskLabel, taskFunction) {
let task = new Task(this, taskLabel, taskFunction);
if (this._tasks.hasOwnProperty(task.label)) {
_throwException('Audit.define:: Duplicate task definition.');
return;
}
this._tasks[task.label] = task;
this._taskSequence.push(task.label);
}
// Start running all the tasks scheduled. Multiple task names can be passed
// to execute them sequentially. Zero argument will perform all defined
// tasks in the order of definition.
run() {
// Display the beginning of the test suite.
_logPassed('# AUDIT TASK RUNNER STARTED.');
// If the argument is specified, override the default task sequence with
// the specified one.
if (arguments.length > 0) {
this._taskSequence = [];
for (let i = 0; i < arguments.length; i++) {
let taskLabel = arguments[i];
if (!this._tasks.hasOwnProperty(taskLabel)) {
_throwException('Audit.run:: undefined task.');
} else if (this._taskSequence.includes(taskLabel)) {
_throwException('Audit.run:: duplicate task request.');
} else {
this._taskSequence.push(taskLabel);
}
}
}
if (this._taskSequence.length === 0) {
_throwException('Audit.run:: no task to run.');
return;
}
for (let taskIndex in this._taskSequence) {
let task = this._tasks[this._taskSequence[taskIndex]];
// Some tests assume that tasks run in sequence, which is provided by
// promise_test().
promise_test((t) => task.run(t), `Executing "${task.label}"`);
}
// Schedule a summary report on completion.
promise_test(() => this._finish(), "Audit report");
// From testharness.js. The harness now need not wait for more subtests
// to be added.
_testharnessDone();
}
}
/**
* Load file from a given URL and pass ArrayBuffer to the following promise.
* @param {String} fileUrl file URL.
* @return {Promise}
*
* @example
* Audit.loadFileFromUrl('resources/my-sound.ogg').then((response) => {
* audioContext.decodeAudioData(response).then((audioBuffer) => {
* // Do something with AudioBuffer.
* });
* });
*/
function loadFileFromUrl(fileUrl) {
return new Promise((resolve, reject) => {
let xhr = new XMLHttpRequest();
xhr.open('GET', fileUrl, true);
xhr.responseType = 'arraybuffer';
xhr.onload = () => {
// |status = 0| is a workaround for the run_web_test.py server. We are
// speculating the server quits the transaction prematurely without
// completing the request.
if (xhr.status === 200 || xhr.status === 0) {
resolve(xhr.response);
} else {
let errorMessage = 'loadFile: Request failed when loading ' +
fileUrl + '. ' + xhr.statusText + '. (status = ' + xhr.status +
')';
if (reject) {
reject(errorMessage);
} else {
new Error(errorMessage);
}
}
};
xhr.onerror = (event) => {
let errorMessage =
'loadFile: Network failure when loading ' + fileUrl + '.';
if (reject) {
reject(errorMessage);
} else {
new Error(errorMessage);
}
};
xhr.send();
});
}
/**
* @class Audit
* @description A WebAudio layout test task manager.
* @example
* let audit = Audit.createTaskRunner();
* audit.define('first-task', function (task, should) {
* should(someValue).beEqualTo(someValue);
* task.done();
* });
* audit.run();
*/
return {
/**
* Creates an instance of Audit task runner.
* @param {Object} options Options for task runner.
* @param {Boolean} options.requireResultFile True if the test suite
* requires explicit text
* comparison with the expected
* result file.
*/
createTaskRunner: function(options) {
if (options && options.requireResultFile == true) {
_logError(
'this test requires the explicit comparison with the ' +
'expected result when it runs with run_web_tests.py.');
}
return new TaskRunner();
},
/**
* Load file from a given URL and pass ArrayBuffer to the following promise.
* See |loadFileFromUrl| method for the detail.
*/
loadFileFromUrl: loadFileFromUrl
};
})();<|fim▁end|> | |
<|file_name|>notify.rs<|end_file_name|><|fim▁begin|>use notify_rust::{Notification, NotificationHint, NotificationUrgency};
use rusthub::notifications;
use std::process::Command;
use std::thread;
use std::path::Path;
const APP_NAME: &'static str = "gh-notify";
pub fn show_notification(notification: ¬ifications::Notification) {
let subject = format!("{} - {}", notification.subject.subject_type, notification.repository.name);
let body = notification.subject.title.clone();
let url = notification.subject.url.clone();
let url = url.replace("api.", "").replace("repos/", "").replace("/pulls/", "/pull/");
thread::spawn(move || {
notify_action(
&subject,
&body,
"Open in Browser",
120,
|action| {
match action {
"default" | "clicked" => {
open_link(&url);
},
"__closed" => (),
_ => ()
}
}
).unwrap_or_else(|err| error!("While showing notification: {}", err));
});
}
pub fn open_link(url: &str) {
debug!("Opening browser link: {}", url);
let _ = Command::new("sh")
.arg("-c")
.arg(format!("xdg-open '{}'", url))
.output()
.expect("Failed to open web browser instance.");
}
pub fn notify_action<F>(summary: &str, body: &str, button_text: &str, timeout: i32, action: F) -> Result<(), String> where F: FnOnce(&str) {
let icon = match Path::new("./icon.png").canonicalize() {
Ok(path) => path.to_string_lossy().into_owned(),
Err(_) => "clock".to_string()
};
let handle = try!(Notification::new()
.appname(APP_NAME)
.summary(&summary)
.icon(&icon)
.body(&body)
.action("default", &button_text) // IDENTIFIER, LABEL
.action("clicked", &button_text) // IDENTIFIER, LABEL
.hint(NotificationHint::Urgency(NotificationUrgency::Normal))
.timeout(timeout)
.show().map_err(|err| err.to_string()));
handle.wait_for_action(action);
Ok(())<|fim▁hole|><|fim▁end|> | } |
<|file_name|>client.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: latin-1 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
"Pythonic simple SOAP Client implementation"
__author__ = "Mariano Reingart ([email protected])"
__copyright__ = "Copyright (C) 2008 Mariano Reingart"
__license__ = "LGPL 3.0"
__version__ = "1.07a"
TIMEOUT = 60
import cPickle as pickle
import hashlib
import logging
import os
import tempfile
import urllib2
from urlparse import urlsplit
from simplexml import SimpleXMLElement, TYPE_MAP, REVERSE_TYPE_MAP, OrderedDict
from transport import get_http_wrapper, set_http_wrapper, get_Http
log = logging.getLogger(__name__)
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.WARNING)
class SoapFault(RuntimeError):
def __init__(self,faultcode,faultstring):
self.faultcode = faultcode
self.faultstring = faultstring
RuntimeError.__init__(self, faultcode, faultstring)
def __str__(self):
return self.__unicode__().encode("ascii", "ignore")
def __unicode__(self):
return u'%s: %s' % (self.faultcode, self.faultstring)
def __repr__(self):
return u"SoapFault(%s, %s)" % (repr(self.faultcode),
repr(self.faultstring))
# soap protocol specification & namespace
soap_namespaces = dict(
soap11="http://schemas.xmlsoap.org/soap/envelope/",
soap="http://schemas.xmlsoap.org/soap/envelope/",
soapenv="http://schemas.xmlsoap.org/soap/envelope/",
soap12="http://www.w3.org/2003/05/soap-env",
)
_USE_GLOBAL_DEFAULT = object()
class SoapClient(object):
"Simple SOAP Client (simil PHP)"
def __init__(self, location = None, action = None, namespace = None,
cert = None, trace = False, exceptions = True, proxy = None, ns=False,
soap_ns=None, wsdl = None, cache = False, cacert=None,
sessions=False, soap_server=None, timeout=_USE_GLOBAL_DEFAULT,
http_headers={}
):
"""
:param http_headers: Additional HTTP Headers; example: {'Host': 'ipsec.example.com'}
"""
self.certssl = cert
self.keyssl = None
self.location = location # server location (url)
self.action = action # SOAP base action
self.namespace = namespace # message
self.trace = trace # show debug messages
self.exceptions = exceptions # lanzar execpiones? (Soap Faults)
self.xml_request = self.xml_response = ''
self.http_headers = http_headers
if not soap_ns and not ns:
self.__soap_ns = 'soap' # 1.1
elif not soap_ns and ns:
self.__soap_ns = 'soapenv' # 1.2
else:
self.__soap_ns = soap_ns
# SOAP Server (special cases like oracle or jbossas6)
self.__soap_server = soap_server
# SOAP Header support
self.__headers = {} # general headers
self.__call_headers = None # OrderedDict to be marshalled for RPC Call
# check if the Certification Authority Cert is a string and store it
if cacert and cacert.startswith("-----BEGIN CERTIFICATE-----"):
fd, filename = tempfile.mkstemp()
f = os.fdopen(fd, 'w+b', -1)
if self.trace: log.info(u"Saving CA certificate to %s" % filename)
f.write(cacert)
cacert = filename
f.close()
self.cacert = cacert
if timeout is _USE_GLOBAL_DEFAULT:
timeout = TIMEOUT
else:
timeout = timeout
<|fim▁hole|> self.http = Http(timeout=timeout, cacert=cacert, proxy=proxy, sessions=sessions)
self.__ns = ns # namespace prefix or False to not use it
if not ns:
self.__xml = """<?xml version="1.0" encoding="UTF-8"?>
<%(soap_ns)s:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:%(soap_ns)s="%(soap_uri)s">
<%(soap_ns)s:Header/>
<%(soap_ns)s:Body>
<%(method)s xmlns="%(namespace)s">
</%(method)s>
</%(soap_ns)s:Body>
</%(soap_ns)s:Envelope>"""
else:
self.__xml = """<?xml version="1.0" encoding="UTF-8"?>
<%(soap_ns)s:Envelope xmlns:%(soap_ns)s="%(soap_uri)s" xmlns:%(ns)s="%(namespace)s">
<%(soap_ns)s:Header/>
<%(soap_ns)s:Body>
<%(ns)s:%(method)s>
</%(ns)s:%(method)s>
</%(soap_ns)s:Body>
</%(soap_ns)s:Envelope>"""
# parse wsdl url
self.services = wsdl and self.wsdl_parse(wsdl, debug=trace, cache=cache)
self.service_port = None # service port for late binding
def __getattr__(self, attr):
"Return a pseudo-method that can be called"
if not self.services: # not using WSDL?
return lambda self=self, *args, **kwargs: self.call(attr,*args,**kwargs)
else: # using WSDL:
return lambda *args, **kwargs: self.wsdl_call(attr,*args,**kwargs)
def call(self, method, *args, **kwargs):
"""Prepare xml request and make SOAP call, returning a SimpleXMLElement.
If a keyword argument called "headers" is passed with a value of a
SimpleXMLElement object, then these headers will be inserted into the
request.
"""
#TODO: method != input_message
# Basic SOAP request:
xml = self.__xml % dict(method=method, namespace=self.namespace, ns=self.__ns,
soap_ns=self.__soap_ns, soap_uri=soap_namespaces[self.__soap_ns])
request = SimpleXMLElement(xml,namespace=self.__ns and self.namespace, prefix=self.__ns)
try:
request_headers = kwargs.pop('headers')
except KeyError:
request_headers = None
# serialize parameters
if kwargs:
parameters = kwargs.items()
else:
parameters = args
if parameters and isinstance(parameters[0], SimpleXMLElement):
# merge xmlelement parameter ("raw" - already marshalled)
if parameters[0].children() is not None:
for param in parameters[0].children():
getattr(request,method).import_node(param)
elif parameters:
# marshall parameters:
for k,v in parameters: # dict: tag=valor
getattr(request,method).marshall(k,v)
elif not self.__soap_server in ('oracle', ) or self.__soap_server in ('jbossas6',):
# JBossAS-6 requires no empty method parameters!
delattr(request("Body", ns=soap_namespaces.values(),), method)
# construct header and parameters (if not wsdl given) except wsse
if self.__headers and not self.services:
self.__call_headers = dict([(k, v) for k, v in self.__headers.items()
if not k.startswith("wsse:")])
# always extract WS Security header and send it
if 'wsse:Security' in self.__headers:
#TODO: namespaces too hardwired, clean-up...
header = request('Header' , ns=soap_namespaces.values(),)
k = 'wsse:Security'
v = self.__headers[k]
header.marshall(k, v, ns=False, add_children_ns=False)
header(k)['xmlns:wsse'] = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd'
#<wsse:UsernameToken xmlns:wsu='http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'>
if self.__call_headers:
header = request('Header' , ns=soap_namespaces.values(),)
for k, v in self.__call_headers.items():
##if not self.__ns:
## header['xmlns']
header.marshall(k, v, ns=self.__ns, add_children_ns=False)
if request_headers:
header = request('Header' , ns=soap_namespaces.values(),)
for subheader in request_headers.children():
header.import_node(subheader)
self.xml_request = request.as_xml()
self.xml_response = self.send(method, self.xml_request)
response = SimpleXMLElement(self.xml_response, namespace=self.namespace)
if self.exceptions and response("Fault", ns=soap_namespaces.values(), error=False):
raise SoapFault(unicode(response.faultcode), unicode(response.faultstring))
return response
def send(self, method, xml):
"Send SOAP request using HTTP"
if self.location == 'test': return
# location = "%s" % self.location #?op=%s" % (self.location, method)
location = self.location
if self.services:
soap_action = self.action
else:
soap_action = self.action + method
headers={
'Content-type': 'text/xml; charset="UTF-8"',
'Content-length': str(len(xml)),
"SOAPAction": "\"%s\"" % (soap_action)
}
headers.update(self.http_headers)
log.info("POST %s" % location)
log.info("Headers: %s" % headers)
if self.trace:
print "-"*80
print "POST %s" % location
print '\n'.join(["%s: %s" % (k,v) for k,v in headers.items()])
print u"\n%s" % xml.decode("utf8","ignore")
response, content = self.http.request(
location, "POST", body=xml, headers=headers)
self.response = response
self.content = content
if self.trace:
print
print '\n'.join(["%s: %s" % (k,v) for k,v in response.items()])
print content#.decode("utf8","ignore")
print "="*80
return content
def get_operation(self, method):
# try to find operation in wsdl file
soap_ver = self.__soap_ns == 'soap12' and 'soap12' or 'soap11'
if not self.service_port:
for service_name, service in self.services.items():
for port_name, port in [port for port in service['ports'].items()]:
if port['soap_ver'] == soap_ver:
self.service_port = service_name, port_name
break
else:
raise RuntimeError("Cannot determine service in WSDL: "
"SOAP version: %s" % soap_ver)
else:
port = self.services[self.service_port[0]]['ports'][self.service_port[1]]
self.location = port['location']
operation = port['operations'].get(unicode(method))
if not operation:
raise RuntimeError("Operation %s not found in WSDL: "
"Service/Port Type: %s" %
(method, self.service_port))
return operation
def wsdl_call(self, method, *args, **kwargs):
"Pre and post process SOAP call, input and output parameters using WSDL"
soap_uri = soap_namespaces[self.__soap_ns]
operation = self.get_operation(method)
# get i/o type declarations:
input = operation['input']
output = operation['output']
header = operation.get('header')
if 'action' in operation:
self.action = operation['action']
# sort parameters (same order as xsd:sequence)
def sort_dict(od, d):
if isinstance(od, dict):
ret = OrderedDict()
for k in od.keys():
v = d.get(k)
# don't append null tags!
if v is not None:
if isinstance(v, dict):
v = sort_dict(od[k], v)
elif isinstance(v, list):
v = [sort_dict(od[k][0], v1)
for v1 in v]
ret[str(k)] = v
return ret
else:
return d
# construct header and parameters
if header:
self.__call_headers = sort_dict(header, self.__headers)
if input and args:
# convert positional parameters to named parameters:
d = [(k, arg) for k, arg in zip(input.values()[0].keys(), args)]
kwargs.update(dict(d))
if input and kwargs:
params = sort_dict(input.values()[0], kwargs).items()
if self.__soap_server == "axis":
# use the operation name
method = method
else:
# use the message (element) name
method = input.keys()[0]
#elif not input:
#TODO: no message! (see wsmtxca.dummy)
else:
params = kwargs and kwargs.items()
# call remote procedure
response = self.call(method, *params)
# parse results:
resp = response('Body',ns=soap_uri).children().unmarshall(output)
return resp and resp.values()[0] # pass Response tag children
def help(self, method):
"Return operation documentation and invocation/returned value example"
operation = self.get_operation(method)
input = operation.get('input')
input = input and input.values() and input.values()[0]
if isinstance(input, dict):
input = ", ".join("%s=%s" % (k,repr(v)) for k,v
in input.items())
elif isinstance(input, list):
input = repr(input)
output = operation.get('output')
if output:
output = operation['output'].values()[0]
headers = operation.get('headers') or None
return u"%s(%s)\n -> %s:\n\n%s\nHeaders: %s" % (
method,
input or "",
output and output or "",
operation.get("documentation",""),
headers,
)
def wsdl_parse(self, url, debug=False, cache=False):
"Parse Web Service Description v1.1"
log.debug("wsdl url: %s" % url)
# Try to load a previously parsed wsdl:
force_download = False
if cache:
# make md5 hash of the url for caching...
filename_pkl = "%s.pkl" % hashlib.md5(url).hexdigest()
if isinstance(cache, basestring):
filename_pkl = os.path.join(cache, filename_pkl)
if os.path.exists(filename_pkl):
log.debug("Unpickle file %s" % (filename_pkl, ))
f = open(filename_pkl, "r")
pkl = pickle.load(f)
f.close()
# sanity check:
if pkl['version'][:-1] != __version__.split(" ")[0][:-1] or pkl['url'] != url:
import warnings
warnings.warn('version or url mismatch! discarding cached wsdl', RuntimeWarning)
if debug:
log.debug('Version: %s %s' % (pkl['version'], __version__))
log.debug('URL: %s %s' % (pkl['url'], url))
force_download = True
else:
self.namespace = pkl['namespace']
self.documentation = pkl['documentation']
return pkl['services']
soap_ns = {
"http://schemas.xmlsoap.org/wsdl/soap/": 'soap11',
"http://schemas.xmlsoap.org/wsdl/soap12/": 'soap12',
}
wsdl_uri="http://schemas.xmlsoap.org/wsdl/"
xsd_uri="http://www.w3.org/2001/XMLSchema"
xsi_uri="http://www.w3.org/2001/XMLSchema-instance"
get_local_name = lambda s: s and str((':' in s) and s.split(':')[1] or s)
get_namespace_prefix = lambda s: s and str((':' in s) and s.split(':')[0] or None)
# always return an unicode object:
REVERSE_TYPE_MAP[u'string'] = unicode
def fetch(url):
"Download a document from a URL, save it locally if cache enabled"
# check / append a valid schema if not given:
url_scheme, netloc, path, query, fragment = urlsplit(url)
if not url_scheme in ('http','https', 'file'):
for scheme in ('http','https', 'file'):
try:
if not url.startswith("/") and scheme in ('http', 'https'):
tmp_url = "%s://%s" % (scheme, url)
else:
tmp_url = "%s:%s" % (scheme, url)
if debug: log.debug("Scheme not found, trying %s" % scheme)
return fetch(tmp_url)
except Exception, e:
log.error(e)
raise RuntimeError("No scheme given for url: %s" % url)
# make md5 hash of the url for caching...
filename = "%s.xml" % hashlib.md5(url).hexdigest()
if isinstance(cache, basestring):
filename = os.path.join(cache, filename)
if cache and os.path.exists(filename) and not force_download:
log.info("Reading file %s" % (filename, ))
f = open(filename, "r")
xml = f.read()
f.close()
else:
if url_scheme == 'file':
log.info("Fetching url %s using urllib2" % (url, ))
f = urllib2.urlopen(url)
xml = f.read()
else:
log.info("GET %s using %s" % (url, self.http._wrapper_version))
response, xml = self.http.request(url, "GET", None, {})
if cache:
log.info("Writing file %s" % (filename, ))
if not os.path.isdir(cache):
os.makedirs(cache)
f = open(filename, "w")
f.write(xml)
f.close()
return xml
# Open uri and read xml:
xml = fetch(url)
# Parse WSDL XML:
wsdl = SimpleXMLElement(xml, namespace=wsdl_uri)
# detect soap prefix and uri (xmlns attributes of <definitions>)
xsd_ns = None
soap_uris = {}
for k, v in wsdl[:]:
if v in soap_ns and k.startswith("xmlns:"):
soap_uris[get_local_name(k)] = v
if v== xsd_uri and k.startswith("xmlns:"):
xsd_ns = get_local_name(k)
# Extract useful data:
self.namespace = wsdl['targetNamespace']
self.documentation = unicode(wsdl('documentation', error=False) or '')
services = {}
bindings = {} # binding_name: binding
operations = {} # operation_name: operation
port_type_bindings = {} # port_type_name: binding
messages = {} # message: element
elements = {} # element: type def
for service in wsdl.service:
service_name=service['name']
if not service_name:
continue # empty service?
if debug: log.debug("Processing service %s" % service_name)
serv = services.setdefault(service_name, {'ports': {}})
serv['documentation']=service['documentation'] or ''
for port in service.port:
binding_name = get_local_name(port['binding'])
address = port('address', ns=soap_uris.values(), error=False)
location = address and address['location'] or None
soap_uri = address and soap_uris.get(address.get_prefix())
soap_ver = soap_uri and soap_ns.get(soap_uri)
bindings[binding_name] = {'service_name': service_name,
'location': location,
'soap_uri': soap_uri, 'soap_ver': soap_ver,
}
serv['ports'][port['name']] = bindings[binding_name]
for binding in wsdl.binding:
binding_name = binding['name']
if debug: log.debug("Processing binding %s" % service_name)
soap_binding = binding('binding', ns=soap_uris.values(), error=False)
transport = soap_binding and soap_binding['transport'] or None
port_type_name = get_local_name(binding['type'])
bindings[binding_name].update({
'port_type_name': port_type_name,
'transport': transport, 'operations': {},
})
port_type_bindings[port_type_name] = bindings[binding_name]
for operation in binding.operation:
op_name = operation['name']
op = operation('operation',ns=soap_uris.values(), error=False)
action = op and op['soapAction']
d = operations.setdefault(op_name, {})
bindings[binding_name]['operations'][op_name] = d
d.update({'name': op_name})
d['parts'] = {}
# input and/or ouput can be not present!
input = operation('input', error=False)
body = input and input('body', ns=soap_uris.values(), error=False)
d['parts']['input_body'] = body and body['parts'] or None
output = operation('output', error=False)
body = output and output('body', ns=soap_uris.values(), error=False)
d['parts']['output_body'] = body and body['parts'] or None
header = input and input('header', ns=soap_uris.values(), error=False)
d['parts']['input_header'] = header and {'message': header['message'], 'part': header['part']} or None
headers = output and output('header', ns=soap_uris.values(), error=False)
d['parts']['output_header'] = header and {'message': header['message'], 'part': header['part']} or None
#if action: #TODO: separe operation_binding from operation
if action:
d["action"] = action
def make_key(element_name, element_type):
"return a suitable key for elements"
# only distinguish 'element' vs other types
if element_type in ('complexType', 'simpleType'):
eltype = 'complexType'
else:
eltype = element_type
if eltype not in ('element', 'complexType', 'simpleType'):
raise RuntimeError("Unknown element type %s = %s" % (unicode(element_name), eltype))
return (unicode(element_name), eltype)
#TODO: cleanup element/schema/types parsing:
def process_element(element_name, node, element_type):
"Parse and define simple element types"
if debug:
log.debug("Processing element %s %s" % (element_name, element_type))
for tag in node:
if tag.get_local_name() in ("annotation", "documentation"):
continue
elif tag.get_local_name() in ('element', 'restriction'):
if debug: log.debug("%s has not children! %s" % (element_name,tag))
children = tag # element "alias"?
alias = True
elif tag.children():
children = tag.children()
alias = False
else:
if debug: log.debug("%s has not children! %s" % (element_name,tag))
continue #TODO: abstract?
d = OrderedDict()
for e in children:
t = e['type']
if not t:
t = e['base'] # complexContent (extension)!
if not t:
t = 'anyType' # no type given!
t = t.split(":")
if len(t)>1:
ns, type_name = t
else:
ns, type_name = None, t[0]
if element_name == type_name:
pass ## warning with infinite recursion
uri = ns and e.get_namespace_uri(ns) or xsd_uri
if uri==xsd_uri:
# look for the type, None == any
fn = REVERSE_TYPE_MAP.get(unicode(type_name), None)
else:
fn = None
if not fn:
# simple / complex type, postprocess later
fn = elements.setdefault(make_key(type_name, "complexType"), OrderedDict())
if e['name'] is not None and not alias:
e_name = unicode(e['name'])
d[e_name] = fn
else:
if debug: log.debug("complexConent/simpleType/element %s = %s" % (element_name, type_name))
d[None] = fn
if e['maxOccurs']=="unbounded" or (ns == 'SOAP-ENC' and type_name == 'Array'):
# it's an array... TODO: compound arrays?
d.array = True
if e is not None and e.get_local_name() == 'extension' and e.children():
# extend base element:
process_element(element_name, e.children(), element_type)
elements.setdefault(make_key(element_name, element_type), OrderedDict()).update(d)
# check axis2 namespace at schema types attributes
self.namespace = dict(wsdl.types("schema", ns=xsd_uri)[:]).get('targetNamespace', self.namespace)
imported_schemas = {}
def preprocess_schema(schema):
"Find schema elements and complex types"
for element in schema.children() or []:
if element.get_local_name() in ('import', ):
schema_namespace = element['namespace']
schema_location = element['schemaLocation']
if schema_location is None:
if debug: log.debug("Schema location not provided for %s!" % (schema_namespace, ))
continue
if schema_location in imported_schemas:
if debug: log.debug("Schema %s already imported!" % (schema_location, ))
continue
imported_schemas[schema_location] = schema_namespace
if debug: print "Importing schema %s from %s" % (schema_namespace, schema_location)
# Open uri and read xml:
xml = fetch(schema_location)
# Parse imported XML schema (recursively):
imported_schema = SimpleXMLElement(xml, namespace=xsd_uri)
preprocess_schema(imported_schema)
element_type = element.get_local_name()
if element_type in ('element', 'complexType', "simpleType"):
element_name = unicode(element['name'])
if debug: log.debug("Parsing Element %s: %s" % (element_type, element_name))
if element.get_local_name() == 'complexType':
children = element.children()
elif element.get_local_name() == 'simpleType':
children = element("restriction", ns=xsd_uri)
elif element.get_local_name() == 'element' and element['type']:
children = element
else:
children = element.children()
if children:
children = children.children()
elif element.get_local_name() == 'element':
children = element
if children:
process_element(element_name, children, element_type)
def postprocess_element(elements):
"Fix unresolved references (elements referenced before its definition, thanks .net)"
for k,v in elements.items():
if isinstance(v, OrderedDict):
if v.array:
elements[k] = [v] # convert arrays to python lists
if v!=elements: #TODO: fix recursive elements
postprocess_element(v)
if None in v and v[None]: # extension base?
if isinstance(v[None], dict):
for i, kk in enumerate(v[None]):
# extend base -keep orginal order-
if v[None] is not None:
elements[k].insert(kk, v[None][kk], i)
del v[None]
else: # "alias", just replace
if debug: log.debug("Replacing %s = %s" % (k, v[None]))
elements[k] = v[None]
#break
if isinstance(v, list):
for n in v: # recurse list
postprocess_element(n)
# process current wsdl schema:
for schema in wsdl.types("schema", ns=xsd_uri):
preprocess_schema(schema)
postprocess_element(elements)
for message in wsdl.message:
if debug: log.debug("Processing message %s" % message['name'])
for part in message('part', error=False) or []:
element = {}
element_name = part['element']
if not element_name:
# some implementations (axis) uses type instead
element_name = part['type']
type_ns = get_namespace_prefix(element_name)
type_uri = wsdl.get_namespace_uri(type_ns)
if type_uri == xsd_uri:
element_name = get_local_name(element_name)
fn = REVERSE_TYPE_MAP.get(unicode(element_name), None)
element = {part['name']: fn}
# emulate a true Element (complexType)
messages.setdefault((message['name'], None), {message['name']: OrderedDict()}).values()[0].update(element)
else:
element_name = get_local_name(element_name)
fn = elements.get(make_key(element_name, 'element'))
if not fn:
# some axis servers uses complexType for part messages
fn = elements.get(make_key(element_name, 'complexType'))
element = {message['name']: {part['name']: fn}}
else:
element = {element_name: fn}
messages[(message['name'], part['name'])] = element
def get_message(message_name, part_name):
if part_name:
# get the specific part of the message:
return messages.get((message_name, part_name))
else:
# get the first part for the specified message:
for (message_name_key, part_name_key), message in messages.items():
if message_name_key == message_name:
return message
for port_type in wsdl.portType:
port_type_name = port_type['name']
if debug: log.debug("Processing port type %s" % port_type_name)
binding = port_type_bindings[port_type_name]
for operation in port_type.operation:
op_name = operation['name']
op = operations[op_name]
op['documentation'] = unicode(operation('documentation', error=False) or '')
if binding['soap_ver']:
#TODO: separe operation_binding from operation (non SOAP?)
if operation("input", error=False):
input_msg = get_local_name(operation.input['message'])
input_header = op['parts'].get('input_header')
if input_header:
header_msg = get_local_name(input_header.get('message'))
header_part = get_local_name(input_header.get('part'))
# warning: some implementations use a separate message!
header = get_message(header_msg or input_msg, header_part)
else:
header = None # not enought info to search the header message:
op['input'] = get_message(input_msg, op['parts'].get('input_body'))
op['header'] = header
else:
op['input'] = None
op['header'] = None
if operation("output", error=False):
output_msg = get_local_name(operation.output['message'])
op['output'] = get_message(output_msg, op['parts'].get('output_body'))
else:
op['output'] = None
if debug:
import pprint
log.debug(pprint.pformat(services))
# Save parsed wsdl (cache)
if cache:
f = open(filename_pkl, "wb")
pkl = {
'version': __version__.split(" ")[0],
'url': url,
'namespace': self.namespace,
'documentation': self.documentation,
'services': services,
}
pickle.dump(pkl, f)
f.close()
return services
def __setitem__(self, item, value):
"Set SOAP Header value - this header will be sent for every request."
self.__headers[item] = value
def close(self):
"Finish the connection and remove temp files"
self.http.close()
if self.cacert.startswith(tempfile.gettempdir()):
if self.trace: log.info("removing %s" % self.cacert)
os.unlink(self.cacert)
def parse_proxy(proxy_str):
"Parses proxy address user:pass@host:port into a dict suitable for httplib2"
if isinstance(proxy_str, unicode):
proxy_str = proxy_str.encode("utf8")
proxy_dict = {}
if proxy_str is None:
return
if "@" in proxy_str:
user_pass, host_port = proxy_str.split("@")
else:
user_pass, host_port = "", proxy_str
if ":" in host_port:
host, port = host_port.split(":")
proxy_dict['proxy_host'], proxy_dict['proxy_port'] = host, int(port)
if ":" in user_pass:
proxy_dict['proxy_user'], proxy_dict['proxy_pass'] = user_pass.split(":")
return proxy_dict
if __name__ == "__main__":
pass<|fim▁end|> | # Create HTTP wrapper
Http = get_Http()
|
<|file_name|>findBarcode.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Code128 Barcode Detection & Analysis
(c) Charles Shiflett 2011
Finds Code128 barcodes in documents scanned in Grayscale at 300 dpi.
Usage:
Each page of the PDF must be converted to a grayscale PNG image, and should
be ordered as follows:
1001/1001-001.png
1001/1001-002.png
1001/1001-003.png
.
.
.
1099/1099-001.png
1099/1099-002.png
This program will find & enhance barcodes in those pages, and save it's
progress to a file of the same name, except with an extension of barcode.png.
"""
DEBUG=False
from PIL import Image
from PIL import ImageOps
import PIL.ImageDraw as draw
from glob import glob
import os
import re
import pdb
import sys
import numpy
import scipy.signal as ss
import math
import scipy.ndimage.interpolation
import scipy.weave
import logging
log = logging.getLogger('findBarcodes')
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
import filter
unAliasFilter = numpy.array( [ [ 0, 1, 0], [1, 4, 1], [ 0, 1, 0] ], numpy.int )
if DEBUG:
def debugger(type, value, tb):
pdb.pm()
sys.excepthook = debugger
sys.setrecursionlimit(32768)
filWidth= 102 # / 25
filHeight= 110 # / 30
def calcBarLength(length):
if length < 6:
return 1
elif length < 10:
return 2
elif length < 13:
return 3
else:
return 4
def convolve( im, filt, reshape ):
height, stride = im.shape
fh,fw = filt.shape
im = im.reshape( height * stride )
filt = filt.reshape( fh*fw )
newIm = numpy.zeros ( (height * stride), numpy.int )
code = """
int sum=0, pos;
int ys=0, fys=0;
for (int y=0; y < (height-(fh/2)); y++) {
for (int x=0; x < (stride-(fw/2)); x++) {
fys=sum=0;
pos=ys+x;
int th = ((height-y) < fh ) ? height-y : fh;
int tw = ((stride-x) < fw ) ? stride-x : fw;
for (int fy=0; fy < th; fy++) {
for (int fx=0; fx < tw; fx++) {
sum+=im[pos+fx]*filt[fys+fx];
}
fys+=fw;
pos+=stride;
}
newIm[ys+x] = sum;
}
ys+=stride;
}
"""
scipy.weave.inline(code,['height','stride','fh','fw','im','filt','newIm'])
if reshape:
return newIm.reshape(height,stride )
else:
return newIm
class barImage (object):
def __init__ ( self, im ):
self.im = numpy.array ( im.getdata() )
self.stride, self.height = im.size
self.im = self.im.reshape(self.height,self.stride)
# Note: im is indexed as [y][x] not...
def printImg( self, l=[], offset=0):
l = [ (i[1], i[2]) for i in l ]
print l
for y in range( 0, self.height-1):
output = []
for x in range( 5+offset, self.stride-1):
if x > 115+offset:
continue
i = self.im[y][x]
if (x,y) in l:
output.append("B")
elif i < 20:
output.append(".")
elif i < 64:
output.append("+")
elif i < 128:
output.append("*")
elif i < 196:
output.append("x")
else:
output.append("X")
print "%03d" % y, "".join(output)
print " 56789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789 123456789"
def applyFilter ( self, f, reshape=True ):
value = 0
filt = getattr( self, f, False)
if type(filt) == type(False):
filt = numpy.array( getattr(filter, f, False), dtype=numpy.int )
setattr( self, f, filt )
if type(filt) == type(False):
raise ValueError("Error: filter %s was not found in filter.py" % f)
return convolve( self.im, filt, reshape )
def findBarcode( self ):
results = self.applyFilter("scaledFilter", reshape=False)
list = [ (x[1], int(x[0] % self.stride), int(x[0] / self.stride)) for x in enumerate(results) if x[1] > 1000 ]
list.sort(reverse=True)
return list[0:20]
def unAlias(s):
"Remove dithering. "
#s.im= ss.convolve2d( s.im, unAliasFilter, mode="same" )
s.im=convolve( s.im, unAliasFilter, reshape=True )
s.im=numpy.piecewise(s.im, [ s.im > 1000 ], [255, 0])
return
""" Convolve operator does the following:
for y in range(1, s.height-1):
for x in range(1, s.stride-1):
if s.im[y][x-1] == s.im[y][x+1] == s.im[y+1][x] == s.im[y-1][x]:
s.im[y][x] = s.im[y][x+1]
return
"""
def bw( self, whitePoint=64):
self.im=numpy.piecewise(self.im, [self.im < whitePoint, self.im >= whitePoint], [255, 0])
#self.im=self.vApplyBW( self.im, whitePoint )
def virtualLine(self, x1, y1, x2, y2, ox=0, oy=0):
totalLength = math.sqrt(math.pow(x2-x1,2) + math.pow(y2-y1,2))
if totalLength < 300:
return []
if x1 < x2:
sx,sy,ex,ey=(x1,y1,x2,y2)
else:
sx,sy,ex,ey=(x2,y2,x1,y1)
xgain = float(ex-sx)/totalLength
ygain = float(ey-sy)/totalLength
if ex - sx < 150:
# Skip vertical codes, save them for the next run.
return []
if sx < 1 or (ex+ox) >= self.stride or sx > self.stride:
return []
if not (1< sy <self.height) or not (1< sy+ygain*totalLength <self.height):
return []
#slope = float(h2-h1)/(w2-w1)
newLine = numpy.zeros( shape=(totalLength), dtype=int )
code = """
float x=sx, y=sy;
for ( int i=1; i < int(totalLength); i++ ) {
int top = stride*int(y) + int(x),
bot = stride*int(y+1) + int(x);
float xr = x-int(x),
xl = 1-xr,
yt = y-int(y),
yb = 1-yt;
newLine[i]= im[top]*xr*yt +
im[top-1]*xl*yt +
im[bot]*xr*yb +
im[bot-1]*xl*yb;
x+=xgain;
y+=ygain;
}
"""
stride, im = self.stride, self.im
scipy.weave.inline(code,['im', 'stride', \
'newLine', 'totalLength', 'ygain', 'xgain', 'sx', 'sy'])
if DEBUG:
log.debug( "".join(
[ chr( 0x2e + int(x/6.07142857142857142857) ) for x in list(newLine) ] ) )
return newLine
def checkLineCharacteristics( self, line ):
whiteCount= blackCount= 0
if 300 < len(line) < 475:
for i in line:
if int(i) < 128:
whiteCount+=1
else:
blackCount+=1
if whiteCount >= 18:
return False
if blackCount > 1:
whiteCount=0
blackCount=0
else:
return False
return True
def getValidPoint ( self, point, possible ):
for endpoint in possible:
#print point, endpoint
found = True
for i in range ( 8, 50, 10 ):
if not found:
continue
#print point, endpoint, i
line = self.virtualLine(point[0]+2, point[1]+i, endpoint[0], endpoint[1]+i)
if not self.checkLineCharacteristics(line):
found = False
#print "False"
#print "True"
if found:
return endpoint
return False
def getValidPair ( self, l, r ):
"""Returns the first pair that is a barcode and is located at the top
edges of a barcode. """
if not l or not r:
return False
l.sort( key=lambda x: x[1] )
r.sort( key=lambda x: x[1] )
if l[0][1] > r[0][1]:
r.sort( key=lambda x: x[0], reverse=True )
res = self.getValidPoint( l[0], r )
if not res:
return self.getValidPair( l[1:], r)
return l[0], res
else:
l.sort( key=lambda x: x[0], reverse=False )
res = self.getValidPoint( r[0], l )
if not res:
return self.getValidPair( l, r[1:] )
return res, r[0]
def removeNeighbors ( self, l, rev ):
l.sort( key= lambda x: x[0], reverse=rev )
restart = False
sizeOfArray = len(l)-1
for i in range (1, sizeOfArray):
for j in range(i, sizeOfArray):
if abs( l[i-1][1] - l[j][1] ) < 5:
restart = True
l[j] = False
if restart==True:
return self.removeNeighbors ([ x for x in l if x], rev)
return l
def getCode ( self, barcode ):
"""
Return a single code from a code 128 barcode.
"""
code=[]
start = False
trend = 1
for pos, c in enumerate(barcode):
if (pos+1) >= len(barcode):
continue
if not start:
if c > int(10*250): # Ignore leading white space
start=True
level = barcode[pos+1]
code.append(pos)
continue
if abs(level - c) > 1250 and abs(level-barcode[pos+1]) > 1250:
if (trend<0 and (level-c)>0) or (trend>0 and (level-c)<0):
# Trend is in the same direction we are going, ignore.
continue
code.append(pos)
if trend > 0:
trend=-1
else:
trend=1
level = c
if trend > 0:
level = max(c, level)
else:
level = min(c, level)
if len(code) >= 7:
return code, barcode[pos:]
return False
def applyHeuristics ( self, barcode=[5,] ):
"""
Try to determine the numerical values of barcode image.
@barcode: list to prepend to output. (defaults to [5,])
@return: barcode weights (i.e. 211214... prepended with pre)
"""
rotated = numpy.rot90(self.im, 3)
values = [ int(sum( list(line)[:30] )) for line in rotated ]
characters=[]
codes=True
while (codes):
codes = self.getCode(values)
if codes:
if DEBUG:
print codes[0][0], codes[0][-1]
print "".join([ "%c" % int(v/255+0x5f) for v in values[codes[0][0]:codes[0][-1]] ])
print codes[0]
characters.append(values[codes[0][0]:codes[0][-1]])
values=codes[1]
return False
def findBarcodeLimits( self, barType ):
#origImg = self.im
"""
find the edges of a barcode.
@return: left and upper-right corner or right & upper-left corner of barcode
"""
filterName = "%sEdgeFilter%s" % ("left", "Hard")
result = self.applyFilter(filterName, reshape=False)
leftSide, rightSide = [], []
lSideLim, rSideLim = (self.stride / 2), ((self.stride/2)+1)
h,w=self.height,self.stride
filterCutoff = 18000
lx = numpy.zeros( len(result), numpy.int )
ly = numpy.zeros( len(result), numpy.int )
ry = numpy.zeros( len(result), numpy.int )
rx = numpy.zeros( len(result), numpy.int )
rets = numpy.zeros ( 2, numpy.int )
l,r = 0,0
filterlen= len(result)
code = """
int l=0, r=0; /* This code is surprisingly slow in python */
for (int i=0; i < filterlen; i++) {
if (result[i] < filterCutoff)
continue;
if (i%w < lSideLim) {
ly[l] = i/w;
lx[l++] = i%w;
}
if (i%w > rSideLim) {
ry[r] = i/w;
rx[r++] = i%w;
}
rets[0] = l;
rets[1] = r;
}
"""
scipy.weave.inline(code,['lx','rx','ry','ly','filterCutoff','filterlen','result', 'w', 'rSideLim', 'lSideLim','rets'])
rx = rx[:rets[1]]
lx = lx[:rets[0]]
leftSide = zip(lx, ly)
rightSide= zip(rx, ry)
# We need to check the lists we generated to make sure we really have
# the furthest block for a specific height range... We don't want to
# be affected by artifacting which results in minor height variation.
leftSide.sort (key = lambda x: x[0] )
rightSide.sort(key = lambda x: x[0] )
leftSide = self.removeNeighbors( leftSide, False )
#print "LEFT: ", leftSide
#print "RIGHT: ", rightSide
validPair = self.getValidPair ( leftSide, rightSide )
if not validPair:
return False
return ( (validPair[0][0]+2,validPair[0][1]+2), (validPair[1][0]+8, validPair[1][1]+2) )
hh=0
def straightenBarcode( im, filterName="Soft", prefix="" ):
global hh, newImage
hh+=1
# Find the barcode, and straighten it.
im.bw()
im.unAlias()
limits = im.findBarcodeLimits(filterName)
if limits:
if DEBUG:
newImage.putdata(im.im.reshape(im.stride*im.height))
newImage = ImageOps.invert(newImage)
d = draw.Draw(newImage)
d.line((limits[0][0], limits[0][1], limits[1][0], limits[1][1]), fill=0)
newImage.save("%s.barcode.line.%05d.png" % (prefix, hh) )
angle= ( float(limits[1][1] - limits[0][1]) /
float(limits[1][0] - limits[0][0]) )
angle= numpy.arctan(angle) * (180/math.pi)
else:
return False
im.im = scipy.ndimage.interpolation.rotate( im.im, angle, reshape=False )
return True
def createBarcode( ar, nb ):
ar=numpy.rot90(ar, 3)
b,pos=1,0
lastColor=False
if not nb:
return
for bars in nb:
if b % 2:
fill=255
else:
fill=0
b+=1
if pos > len(ar)-16:
continue
for i in range(0, bars*3):
ar[pos].fill(fill)
pos+=1
for i in range(pos, len(ar)):
ar[pos].fill(255)
pos+=1
return numpy.rot90(ar)
def doPostBarcodeAnalysis(image, prefix):
image.save("%s.barcode.post.png" % prefix )
bar = barImage( image )
bar.bw()
nb = bar.applyHeuristics()
bar.im = createBarcode( bar.im, nb )
image.putdata(bar.im.reshape(bar.stride*bar.height))
#image = image.crop((1,2,450,88))
image.save("%s.barcode.heur.png" % prefix )
newImage = False
def startRecognition( infile, rotation=False, z=0 ):
global newImage
prefix = infile[:8]
im = Image.open(infile)
if rotation:
im = im.rotate(rotation)
width, height = im.size
resized = im.resize( ( width/25, height/30 ), Image.BICUBIC )
resized = ImageOps.invert(resized)
imgData = barImage( resized )
foundBarcode, newImage, newBar = False, False, False
for probable_barcode in imgData.findBarcode():
z+=1
# Try the first 20 barcodes, and see if one of them is legit.
if foundBarcode:
continue
try:
x1, y1 = (probable_barcode[1]-3)*25, (probable_barcode[2]) * 30
x2, y2 = x1+635, y1+265
if x2 > im.size[0] or y2 > im.size[1]:
x2,y2 = im.size[0], im.size[1]
x1,y1 = im.size[0]-800, im.size[1]-265
newImage = im.crop((x1,y1,x2,y2))
newBar = barImage(newImage)
foundBarcode = straightenBarcode ( newBar, "Hard", prefix=prefix )
if DEBUG and not foundBarcode:
smoo = im.crop( (x1,y1,x2,y2) )
smoo.save("%s.fail.%03d.barcode.png" % (prefix, z) )
print "Z: ", z
except:
foundBarcode = False<|fim▁hole|>
if foundBarcode:
log.info("Found barcode for %s." % prefix )
newImage.putdata(newBar.im.reshape(newBar.stride*newBar.height))
newImage = ImageOps.invert(newImage)
newImage.save("%s.barcode.pre.png" % prefix )
try:
(x1, y1),(x2,y2) = newBar.findBarcodeLimits("Hard")
doPostBarcodeAnalysis(newImage.crop((x1-40,y1+1,x1+520,y1+90)), prefix )
except:
pass
elif not rotation:
startRecognition( infile, rotation=90, z=z )
else:
log.info("No barcode found for %s.", prefix)
validImage = re.compile('[0-9]{4}-[0-9]{3}.png')
didCommandLine = False
for infile in sys.argv:
if validImage.match(infile):
didCommandLine = True
startRecognition( infile )
if not didCommandLine:
for infile in glob("????-???.png"):
startRecognition( infile )<|fim▁end|> | raise |
<|file_name|>init.py<|end_file_name|><|fim▁begin|>''' TODO: Init package docs
'''
import logging
import uuid
from grader.models import Grader
from grader.utils.config import is_grader_dir
logger = logging.getLogger(__name__)
help = 'Initialize grader by creating grader.yml'
def setup_parser(parser):
parser.add_argument('--course-id', default=str(uuid.uuid4()),
help='Unique course ID (for docker\'s sake)')
parser.add_argument('--force', action='store_true',
help='Overwrite an existing grader.yml')
parser.add_argument('--canvas-host', default=None,
help='Canvas server to use (will prompt for canvas token')
parser.add_argument('name', help='Name of the course')
parser.set_defaults(run=run)
def run(args):
logger.debug("Setting up grader in {}".format(args.path))
# Check for existing config
if is_grader_dir(args.path) and not args.force:
logger.critical(
"grader already configured in {}. Abort!".format(args.path)
)
raise SystemExit(1)
if args.canvas_host:
canvas_token = input("Canvas access token (from {}/profile/settings): ".format(args.canvas_host))
else:
canvas_token = None<|fim▁hole|>
# Create the new grader
g = Grader.new(args.path, args.name, args.course_id, args.canvas_host, canvas_token)
logger.info("Wrote {}".format(g.config.file_path))<|fim▁end|> | |
<|file_name|>categories.js<|end_file_name|><|fim▁begin|>var _ = require('underscore');
var express = require('express');
var router = express.Router();
var Autocomplete = require('autocomplete');
var autocomplete = Autocomplete.connectAutocomplete();
var categoryNames = [];<|fim▁hole|> categoryNames.push(category.name.toLowerCase());
})
autocomplete.initialize(function(onReady) {
onReady(categoryNames);
});
/* GET categories listing. */
router.get('/autocomplete/', function(req, res, next){
var category = req.query["term"].toLowerCase();
var results = autocomplete.search(category);
var categoryResults = _.map(results, function(result){
var found = _.find(categories,function(category){
if( category.name!=undefined || category.name!=null )
return category.name.toLowerCase() === result;
else
return false
});
return {label:found.name, value:found.id};
})
res.send(categoryResults);
});
router.get('/', function(req, res, next) {
res.setHeader('Content-Type', 'application/json');
res.send(JSON.stringify(categories));
});
module.exports = router;<|fim▁end|> | var categories = require('../data/categories.json');
_.each(categories, function (category) {
if( category.name!=undefined || category.name!=null ) |
<|file_name|>chrDesktopNotificationsView.js<|end_file_name|><|fim▁begin|>var chrDesktopNotificationsView = new function () {
'use strict';
var that = this;
this.ID = 'chrDesktopNotificationsView';
this.ns = {};
this.showChrRichNotificationProgress = function (title, body, sourceUrl, noturl, iconUrl) {
try {
var opt = {
type: 'progress',
title: title,
message: body,
iconUrl: iconUrl,
contextMessage: sourceUrl,
progress: 0
};
chrome.notifications.create(opt, function (nid) {
try {
that.ns[nid] = { clickUrl: noturl };
that.startUpdateChrNotificationProgress(nid);
} catch (err) {
console.error(err.stack);
}
});
} catch (err) {
console.error(err.stack);
}
};
this.startUpdateChrNotificationProgress = function (nid) {
try {
var value = 0;
var idInterval = setInterval(function () {
value += 15;
if (value > 100) {
chrome.notifications.update(nid, { progress: 100 }, function (wasUpdated) {});
that.stopUpdateChrNotificationProgress(nid);
}
else {
chrome.notifications.update(nid, { progress: value }, function (wasUpdated) {});
}
}, 1000);
this.ns[nid].updateIdInterval = idInterval;
} catch (err) {
console.error(err.stack);
}
};
<|fim▁hole|> if (this.ns[nid] && this.ns[nid].updateIdInterval) {
clearInterval(this.ns[nid].updateIdInterval);
}
} catch (err) {
console.error(err.stack);
}
};
this.onClosedNotification = function (nid) {
try {
that.stopUpdateChrNotificationProgress(nid);
delete that.ns[nid];
} catch (err) {
console.error(err.stack);
}
};
this.onClickedNotification = function (nid) {
try {
if (that.ns[nid]) {
var url = that.ns[nid].clickUrl;
var urlToSearch = sourceController.removePostMessageNumber(url);
mediator.showTab(url, true, true, urlToSearch);
chrome.notifications.clear(nid, function (wasCleared) {});
}
else {
chrome.notifications.clear(nid, function (wasCleared) {});
}
} catch (err) {
console.error(err.stack);
}
};
(function init() {
try {
chrome.notifications.onClosed.addListener(that.onClosedNotification);
chrome.notifications.onClicked.addListener(that.onClickedNotification);
} catch (err) {
console.error(err.stack);
}
}());
};<|fim▁end|> | this.stopUpdateChrNotificationProgress = function (nid) {
try { |
<|file_name|>jquery.tosrus.html.min.js<|end_file_name|><|fim▁begin|>/* <|fim▁hole|> * jQuery Touch Optimized Sliders "R"Us
* HTML media
*
* Copyright (c) Fred Heusschen
* www.frebsite.nl
*/
!function(i){var n="tosrus",e="html";i[n].media[e]={filterAnchors:function(n){return"#"==n.slice(0,1)&&i(n).is("div")},initAnchors:function(e,t){i('<div class="'+i[n]._c("html")+'" />').append(i(t)).appendTo(e),e.removeClass(i[n]._c.loading).trigger(i[n]._e.loaded)},filterSlides:function(i){return i.is("div")},initSlides:function(){}}}(jQuery);<|fim▁end|> | |
<|file_name|>builds-correctly.js<|end_file_name|><|fim▁begin|>//// [/lib/initial-buildOutput.txt]
/lib/tsc --b /src/src/main /src/src/other
exitCode:: ExitStatus.Success
//// [/src/dist/main/a.d.ts]
export {};
//// [/src/dist/main/a.js]
"use strict";
exports.__esModule = true;
var b_1 = require("./b");
var a = b_1.b;
//// [/src/dist/main/b.d.ts]
export declare const b = 0;
//// [/src/dist/main/b.js]
"use strict";
exports.__esModule = true;
exports.b = void 0;
exports.b = 0;
//// [/src/dist/main/tsconfig.tsbuildinfo]
{
"program": {
"fileInfos": {
"../../../lib/lib.d.ts": {
"version": "3858781397-/// <reference no-default-lib=\"true\"/>\ninterface Boolean {}\ninterface Function {}\ninterface CallableFunction {}\ninterface NewableFunction {}\ninterface IArguments {}\ninterface Number { toExponential: any; }\ninterface Object {}\ninterface RegExp {}\ninterface String { charAt: any; }\ninterface Array<T> { length: number; [n: number]: T; }\ninterface ReadonlyArray<T> {}\ndeclare const console: { log(msg: any): void; };",
"signature": "3858781397-/// <reference no-default-lib=\"true\"/>\ninterface Boolean {}\ninterface Function {}\ninterface CallableFunction {}\ninterface NewableFunction {}\ninterface IArguments {}\ninterface Number { toExponential: any; }\ninterface Object {}\ninterface RegExp {}\ninterface String { charAt: any; }\ninterface Array<T> { length: number; [n: number]: T; }\ninterface ReadonlyArray<T> {}\ndeclare const console: { log(msg: any): void; };",
"affectsGlobalScope": true
},
"../../src/main/b.ts": {
"version": "-11678562673-export const b = 0;\r\n",
"signature": "-3829176033-export declare const b = 0;\r\n",
"affectsGlobalScope": false
},
"../../src/main/a.ts": {
"version": "-17071184049-import { b } from './b';\r\nconst a = b;",
"signature": "-4882119183-export {};\r\n",
"affectsGlobalScope": false
}
},
"options": {
"composite": true,
"declaration": true,
"rootDir": "../../src",
"outDir": "..",
"skipDefaultLibCheck": true,
"configFilePath": "../../src/main/tsconfig.json"
},
"referencedMap": {<|fim▁hole|> },
"exportedModulesMap": {},
"semanticDiagnosticsPerFile": [
"../../../lib/lib.d.ts",
"../../src/main/a.ts",
"../../src/main/b.ts"
]
},
"version": "FakeTSVersion"
}
//// [/src/dist/other/other.d.ts]
export declare const Other = 0;
//// [/src/dist/other/other.js]
"use strict";
exports.__esModule = true;
exports.Other = void 0;
exports.Other = 0;
//// [/src/dist/other/tsconfig.tsbuildinfo]
{
"program": {
"fileInfos": {
"../../../lib/lib.d.ts": {
"version": "3858781397-/// <reference no-default-lib=\"true\"/>\ninterface Boolean {}\ninterface Function {}\ninterface CallableFunction {}\ninterface NewableFunction {}\ninterface IArguments {}\ninterface Number { toExponential: any; }\ninterface Object {}\ninterface RegExp {}\ninterface String { charAt: any; }\ninterface Array<T> { length: number; [n: number]: T; }\ninterface ReadonlyArray<T> {}\ndeclare const console: { log(msg: any): void; };",
"signature": "3858781397-/// <reference no-default-lib=\"true\"/>\ninterface Boolean {}\ninterface Function {}\ninterface CallableFunction {}\ninterface NewableFunction {}\ninterface IArguments {}\ninterface Number { toExponential: any; }\ninterface Object {}\ninterface RegExp {}\ninterface String { charAt: any; }\ninterface Array<T> { length: number; [n: number]: T; }\ninterface ReadonlyArray<T> {}\ndeclare const console: { log(msg: any): void; };",
"affectsGlobalScope": true
},
"../../src/other/other.ts": {
"version": "-2951227185-export const Other = 0;\r\n",
"signature": "-7996259489-export declare const Other = 0;\r\n",
"affectsGlobalScope": false
}
},
"options": {
"composite": true,
"declaration": true,
"rootDir": "../../src",
"outDir": "..",
"skipDefaultLibCheck": true,
"configFilePath": "../../src/other/tsconfig.json"
},
"referencedMap": {},
"exportedModulesMap": {},
"semanticDiagnosticsPerFile": [
"../../../lib/lib.d.ts",
"../../src/other/other.ts"
]
},
"version": "FakeTSVersion"
}<|fim▁end|> | "../../src/main/a.ts": [
"../../src/main/b.ts"
] |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class ScheduleConfig(AppConfig):
name = 'schedule'<|fim▁end|> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig |
<|file_name|>networktreeitem.cpp<|end_file_name|><|fim▁begin|>//=============================================================================================================
/**
* @file networktreeitem.cpp
* @author Lorenz Esch <[email protected]>;
* Matti Hamalainen <[email protected]>
* @version 1.0
* @date January, 2016
*
* @section LICENSE
*
* Copyright (C) 2016, Lorenz Esch and Matti Hamalainen. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that
* the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials provided with the distribution.
* * Neither the name of MNE-CPP authors nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
* @brief NetworkTreeItem class definition.
*
*/
//*************************************************************************************************************
//=============================================================================================================
// INCLUDES
//=============================================================================================================
#include "networktreeitem.h"
#include "../../workers/rtSourceLoc/rtsourcelocdataworker.h"
#include "../common/metatreeitem.h"
#include "../../3dhelpers/renderable3Dentity.h"
#include "../../materials/networkmaterial.h"
#include "../../3dhelpers/custommesh.h"
#include "../../3dhelpers/geometrymultiplier.h"
#include "../../materials/geometrymultipliermaterial.h"
#include <connectivity/network/networknode.h>
#include <connectivity/network/networkedge.h>
#include <fiff/fiff_types.h>
#include <mne/mne_sourceestimate.h>
#include <mne/mne_forwardsolution.h>
//*************************************************************************************************************
//=============================================================================================================
// Qt INCLUDES
//=============================================================================================================
#include <Qt3DExtras/QSphereGeometry>
#include <Qt3DCore/QTransform>
//*************************************************************************************************************
//=============================================================================================================
// Eigen INCLUDES
//=============================================================================================================
#include <Eigen/Core>
//*************************************************************************************************************
//=============================================================================================================
// USED NAMESPACES
//=============================================================================================================
using namespace Eigen;
using namespace MNELIB;
using namespace DISP3DLIB;
using namespace CONNECTIVITYLIB;
//*************************************************************************************************************
//=============================================================================================================
// DEFINE MEMBER METHODS
//=============================================================================================================
NetworkTreeItem::NetworkTreeItem(Qt3DCore::QEntity *p3DEntityParent, int iType, const QString &text)
: AbstractMeshTreeItem(p3DEntityParent, iType, text)
, m_bNodesPlotted(false)
{
initItem();
}
//*************************************************************************************************************
void NetworkTreeItem::initItem()
{
this->setEditable(false);
this->setCheckable(true);
this->setCheckState(Qt::Checked);
this->setToolTip("Network item");
//Add meta information as item children
QList<QStandardItem*> list;
QVariant data;
QVector3D vecEdgeTrehshold(0,5,10);
if(!m_pItemNetworkThreshold) {
m_pItemNetworkThreshold = new MetaTreeItem(MetaTreeItemTypes::NetworkThreshold,
QString("%1,%2,%3").arg(vecEdgeTrehshold.x()).arg(vecEdgeTrehshold.y()).arg(vecEdgeTrehshold.z()));
}
list << m_pItemNetworkThreshold;
list << new QStandardItem(m_pItemNetworkThreshold->toolTip());
this->appendRow(list);
data.setValue(vecEdgeTrehshold);
m_pItemNetworkThreshold->setData(data, MetaTreeItemRoles::NetworkThreshold);
connect(m_pItemNetworkThreshold.data(), &MetaTreeItem::dataChanged,
this, &NetworkTreeItem::onNetworkThresholdChanged);
list.clear();
MetaTreeItem* pItemNetworkMatrix = new MetaTreeItem(MetaTreeItemTypes::NetworkMatrix, "Show network matrix");
list << pItemNetworkMatrix;
list << new QStandardItem(pItemNetworkMatrix->toolTip());
this->appendRow(list);
//Set shaders
this->removeComponent(m_pMaterial);
this->removeComponent(m_pTessMaterial);
this->removeComponent(m_pNormalMaterial);
NetworkMaterial* pNetworkMaterial = new NetworkMaterial();
this->addComponent(pNetworkMaterial);
}
//*************************************************************************************************************
void NetworkTreeItem::addData(const Network& tNetworkData)
{
//Add data which is held by this NetworkTreeItem<|fim▁hole|> QVariant data;
data.setValue(tNetworkData);
this->setData(data, Data3DTreeModelItemRoles::NetworkData);
MatrixXd matDist = tNetworkData.getConnectivityMatrix();
data.setValue(matDist);
this->setData(data, Data3DTreeModelItemRoles::NetworkDataMatrix);
//Plot network
if(m_pItemNetworkThreshold) {
plotNetwork(tNetworkData,
m_pItemNetworkThreshold->data(MetaTreeItemRoles::NetworkThreshold).value<QVector3D>());
}
}
//*************************************************************************************************************
void NetworkTreeItem::onNetworkThresholdChanged(const QVariant& vecThresholds)
{
if(vecThresholds.canConvert<QVector3D>()) {
Network tNetwork = this->data(Data3DTreeModelItemRoles::NetworkData).value<Network>();
plotNetwork(tNetwork, vecThresholds.value<QVector3D>());
}
}
//*************************************************************************************************************
void NetworkTreeItem::plotNetwork(const Network& tNetworkData, const QVector3D& vecThreshold)
{
//Create network vertices and normals
QList<NetworkNode::SPtr> lNetworkNodes = tNetworkData.getNodes();
MatrixX3f tMatVert(lNetworkNodes.size(), 3);
for(int i = 0; i < lNetworkNodes.size(); ++i) {
tMatVert(i,0) = lNetworkNodes.at(i)->getVert()(0);
tMatVert(i,1) = lNetworkNodes.at(i)->getVert()(1);
tMatVert(i,2) = lNetworkNodes.at(i)->getVert()(2);
}
MatrixX3f tMatNorm(lNetworkNodes.size(), 3);
tMatNorm.setZero();
//Draw network nodes
//TODO: Dirty hack using m_bNodesPlotted flag to get rid of memory leakage problem when putting parent to the nodes entities. Internal Qt3D problem?
if(!m_bNodesPlotted) {
Renderable3DEntity* pSourceSphereEntity = new Renderable3DEntity(this);
//create geometry
QSharedPointer<Qt3DExtras::QSphereGeometry> pSourceSphereGeometry = QSharedPointer<Qt3DExtras::QSphereGeometry>::create();
pSourceSphereGeometry->setRadius(0.001f);
//create instanced renderer
GeometryMultiplier *pSphereMesh = new GeometryMultiplier(pSourceSphereGeometry);
//Create transform matrix for each sphere instance
QVector<QMatrix4x4> vTransforms;
vTransforms.reserve(tMatVert.rows());
QVector3D tempPos;
for(int i = 0; i < tMatVert.rows(); ++i) {
QMatrix4x4 tempTransform;
tempPos.setX(tMatVert(i, 0));
tempPos.setY(tMatVert(i, 1));
tempPos.setZ(tMatVert(i, 2));
//Set position
tempTransform.translate(tempPos);
vTransforms.push_back(tempTransform);
}
//Set instance Transform
pSphereMesh->setTransforms(vTransforms);
pSourceSphereEntity->addComponent(pSphereMesh);
//Add material
GeometryMultiplierMaterial* pMaterial = new GeometryMultiplierMaterial(true);
pMaterial->setAmbient(Qt::blue);
pMaterial->setAlpha(1.0f);
pSourceSphereEntity->addComponent(pMaterial);
m_bNodesPlotted = true;
}
//Generate connection indices for Qt3D buffer
MatrixXi tMatLines;
int count = 0;
int start, end;
for(int i = 0; i < lNetworkNodes.size(); ++i) {
//Plot in edges
for(int j = 0; j < lNetworkNodes.at(i)->getEdgesIn().size(); ++j) {
start = lNetworkNodes.at(i)->getEdgesIn().at(j)->getStartNode()->getId();
end = lNetworkNodes.at(i)->getEdgesIn().at(j)->getEndNode()->getId();
if(std::fabs(lNetworkNodes.at(i)->getEdgesIn().at(j)->getWeight()) >= vecThreshold.x() &&
start != end) {
tMatLines.conservativeResize(count+1,2);
tMatLines(count,0) = start;
tMatLines(count,1) = end;
++count;
}
}
//Plot out edges
for(int j = 0; j < lNetworkNodes.at(i)->getEdgesOut().size(); ++j) {
start = lNetworkNodes.at(i)->getEdgesOut().at(j)->getStartNode()->getId();
end = lNetworkNodes.at(i)->getEdgesOut().at(j)->getEndNode()->getId();
if(std::fabs(lNetworkNodes.at(i)->getEdgesOut().at(j)->getWeight()) >= vecThreshold.x() &&
start != end) {
tMatLines.conservativeResize(count+1,2);
tMatLines(count,0) = start;
tMatLines(count,1) = end;
++count;
}
}
}
//Generate colors for Qt3D buffer
MatrixX3f matLineColor(tMatVert.rows(),3);
for(int i = 0; i < matLineColor.rows(); ++i) {
matLineColor(i,0) = 0.0f;
matLineColor(i,1) = 0.0f;
matLineColor(i,2) = 1.0f;
}
m_pCustomMesh->setMeshData(tMatVert,
tMatNorm,
tMatLines,
matLineColor,
Qt3DRender::QGeometryRenderer::Lines);
}<|fim▁end|> | |
<|file_name|>rpc_server.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from tornado.ioloop import IOLoop
from stormed import Connection, Message
def fib(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fib(n-1) + fib(n-2)
def on_connect():
global ch
ch = conn.channel()
ch.queue_declare(queue='rpc_queue', durable=True)
ch.qos(prefetch_count=1)
ch.consume('rpc_queue', on_request)
def on_request(msg):
n = int(msg.body)
print " [.] fib(%s)" % n
response = str(fib(n))
response_msg = Message(response, delivery_mode=2,
correlation_id=msg.correlation_id)
ch.publish(response_msg, exchange='', routing_key=msg.reply_to)
msg.ack()
logging.basicConfig()
ch = None
conn = Connection(host='localhost')
conn.connect(on_connect)
io_loop = IOLoop.instance()
print ' [*] Waiting for messages. To exit press CTRL+C'
try:
io_loop.start()
except KeyboardInterrupt:
conn.close(io_loop.stop)<|fim▁end|> | #!/usr/bin/env python
import logging |
<|file_name|>Tabs.Skeleton.js<|end_file_name|><|fim▁begin|>import React from 'react';
export default class TabsSkeleton extends React.Component {
render() {
const tab = (
<li className="wfp--tabs__nav-item">
<div className="wfp--tabs__nav-link"> </div><|fim▁hole|> </li>
);
return (
<nav className="wfp--tabs wfp--skeleton">
<div className="wfp--tabs-trigger">
<div className="wfp--tabs-trigger-text"> </div>
<svg width="10" height="5" viewBox="0 0 10 5" fill-rule="evenodd">
<path d="M10 0L5 5 0 0z" />
</svg>
</div>
<ul className="wfp--tabs__nav wfp--tabs__nav--hidden">
<li className="wfp--tabs__nav-item wfp--tabs__nav-item--selected">
<div className="wfp--tabs__nav-link"> </div>
</li>
{tab}
{tab}
{tab}
</ul>
</nav>
);
}
}<|fim▁end|> | |
<|file_name|>structofp13__meter__band__dscp__remark.js<|end_file_name|><|fim▁begin|>var structofp13__meter__band__dscp__remark =
[
[ "burst_size", "structofp13__meter__band__dscp__remark.html#a65ae2e874c730303bbc5d780640f7fc9", null ],
[ "len", "structofp13__meter__band__dscp__remark.html#ad56aaad8bcc7a48f6f2328ff81b0d67f", null ],
[ "pad", "structofp13__meter__band__dscp__remark.html#a5e621372646568aa14544869bbbdcf46", null ],
[ "prec_level", "structofp13__meter__band__dscp__remark.html#a753ecc92ac6a4685c02baa954acee8aa", null ],<|fim▁hole|><|fim▁end|> | [ "rate", "structofp13__meter__band__dscp__remark.html#a1087876e1f2f5eacbb785424dad28347", null ],
[ "type", "structofp13__meter__band__dscp__remark.html#a6837249197a1667e26a848d4870ebaff", null ]
]; |
<|file_name|>generate_playlist.py<|end_file_name|><|fim▁begin|>"""
Playlist Generation
"""
from os import path
from random import choice
import string
import pafy
from .. import content, g, playlists, screen, util, listview
from ..playlist import Playlist
from . import command, search, album_search
@command(r'mkp\s*(.{1,100})')
def generate_playlist(sourcefile):
"""Generate a playlist from video titles in sourcefile"""
# Hooks into this, check if the argument --description is present
if "--description" in sourcefile or "-d" in sourcefile:
description_generator(sourcefile)
return<|fim▁hole|>
expanded_sourcefile = path.expanduser(sourcefile)
if not check_sourcefile(expanded_sourcefile):
g.message = util.F('mkp empty') % expanded_sourcefile
else:
queries = read_sourcefile(expanded_sourcefile)
g.message = util.F('mkp parsed') % (len(queries), sourcefile)
if queries:
create_playlist(queries)
g.message = util.F('pl help')
g.content = content.playlists_display()
def read_sourcefile(filename):
"""Read each line as a query from filename"""
with open(filename) as srcfl:
queries = list()
for item in srcfl.readlines():
clean_item = str(item).strip()
if not clean_item:
continue
queries.append(clean_item)
return queries
def check_sourcefile(filename):
"""Check if filename exists and has a non-zero size"""
return path.isfile(filename) and path.getsize(filename) > 0
def create_playlist(queries, title=None):
"""Add a new playlist
Create playlist with a random name, get the first
match for each title in queries and append it to the playlist
"""
plname = None
if (title is not None):
plname=title.replace(" ", "-")
else:
plname=random_plname()
if not g.userpl.get(plname):
g.userpl[plname] = Playlist(plname)
for query in queries:
g.message = util.F('mkp finding') % query
screen.update()
qresult = find_best_match(query)
if qresult:
g.userpl[plname].songs.append(qresult)
if g.userpl[plname]:
playlists.save()
def find_best_match(query):
"""Find the best(first)"""
# This assumes that the first match is the best one
qs = search.generate_search_qs(query)
wdata = pafy.call_gdata('search', qs)
results = search.get_tracks_from_json(wdata)
if results:
res, score = album_search._best_song_match(
results, query, 0.1, 1.0, 0.0)
return res
def random_plname():
"""Generates a random alphanumeric string of 6 characters"""
n_chars = 6
return ''.join(choice(string.ascii_lowercase + string.digits)
for _ in range(n_chars))
def description_generator(text):
""" Fetches a videos description and parses it for
<artist> - <track> combinations
"""
if not isinstance(g.model, Playlist):
g.message = util.F("mkp desc unknown")
return
# Use only the first result, for now
num = text.replace("--description", "")
num = num.replace("-d", "")
num = util.number_string_to_list(num)[0]
query = {}
query['id'] = g.model[num].ytid
query['part'] = 'snippet'
query['maxResults'] = '1'
data = pafy.call_gdata('videos', query)['items'][0]['snippet']
title = "mkp %s" % data['title']
data = util.fetch_songs(data['description'], data['title'])
columns = [
{"name": "idx", "size": 3, "heading": "Num"},
{"name": "artist", "size": 30, "heading": "Artist"},
{"name": "title", "size": "remaining", "heading": "Title"},
]
def run_m(idx):
""" Create playlist based on the
results selected
"""
create_playlist(idx, title)
if data:
data = [listview.ListSongtitle(x) for x in data]
g.content = listview.ListView(columns, data, run_m)
g.message = util.F("mkp desc which data")
else:
g.message = util.F("mkp no valid")
return<|fim▁end|> | |
<|file_name|>dns_oa.py<|end_file_name|><|fim▁begin|>import logging
import os
import json
import shutil
import sys
import datetime
import csv, math
from tld import get_tld
from collections import OrderedDict
from utils import Util
from components.data.data import Data
from components.iana.iana_transform import IanaTransform
from components.nc.network_context import NetworkContext
from multiprocessing import Process
import pandas as pd
import time
class OA(object):
def __init__(self,date,limit=500,logger=None):
self._initialize_members(date,limit,logger)
def _initialize_members(self,date,limit,logger):
# get logger if exists. if not, create new instance.
self._logger = logging.getLogger('OA.DNS') if logger else Util.get_logger('OA.DNS',create_file=False)
# initialize required parameters.
self._scrtip_path = os.path.dirname(os.path.abspath(__file__))
self._date = date
self._table_name = "dns"
self._dns_results = []
self._limit = limit
self._data_path = None
self._ipynb_path = None
self._ingest_summary_path = None
self._dns_scores = []
self._dns_scores_headers = []
self._results_delimiter = '\t'
self._details_limit = 250
# get app configuration.
self._spot_conf = Util.get_spot_conf()
# get scores fields conf
conf_file = "{0}/dns_conf.json".format(self._scrtip_path)
self._conf = json.loads(open (conf_file).read(),object_pairs_hook=OrderedDict)
# initialize data engine
self._db = self._spot_conf.get('conf', 'DBNAME').replace("'", "").replace('"', '')
self._engine = Data(self._db,self._table_name ,self._logger)
def start(self):
####################
start = time.time()
####################
self._create_folder_structure()
self._add_ipynb()
self._get_dns_results()
self._add_tld_column()
self._add_reputation()
self._add_hh_and_severity()
self._add_iana()
self._add_network_context()
self._create_dns_scores_csv()
self._get_oa_details()
self._ingest_summary()
##################
end = time.time()
print(end - start)
##################
def _create_folder_structure(self):
# create date folder structure if it does not exist.
self._logger.info("Creating folder structure for OA (data and ipynb)")
self._data_path,self._ingest_summary_path,self._ipynb_path = Util.create_oa_folders("dns",self._date)
def _add_ipynb(self):
if os.path.isdir(self._ipynb_path):
self._logger.info("Adding edge investigation IPython Notebook")
shutil.copy("{0}/ipynb_templates/Edge_Investigation_master.ipynb".format(self._scrtip_path),"{0}/Edge_Investigation.ipynb".format(self._ipynb_path))
self._logger.info("Adding threat investigation IPython Notebook")
shutil.copy("{0}/ipynb_templates/Threat_Investigation_master.ipynb".format(self._scrtip_path),"{0}/Threat_Investigation.ipynb".format(self._ipynb_path))
else:
self._logger.error("There was a problem adding the IPython Notebooks, please check the directory exists.")
def _get_dns_results(self):
self._logger.info("Getting {0} Machine Learning Results from HDFS".format(self._date))
dns_results = "{0}/dns_results.csv".format(self._data_path)
# get hdfs path from conf file.
HUSER = self._spot_conf.get('conf', 'HUSER').replace("'", "").replace('"', '')
hdfs_path = "{0}/dns/scored_results/{1}/scores/dns_results.csv".format(HUSER,self._date)
# get results file from hdfs.
get_command = Util.get_ml_results_form_hdfs(hdfs_path,self._data_path)
self._logger.info("{0}".format(get_command))
# validate files exists
if os.path.isfile(dns_results):
# read number of results based in the limit specified.
self._logger.info("Reading {0} dns results file: {1}".format(self._date,dns_results))
self._dns_results = Util.read_results(dns_results,self._limit,self._results_delimiter)[:]
if len(self._dns_results) == 0: self._logger.error("There are not flow results.");sys.exit(1)
else:
self._logger.error("There was an error getting ML results from HDFS")
sys.exit(1)
# add headers.
self._logger.info("Adding headers")
self._dns_scores_headers = [ str(key) for (key,value) in self._conf['dns_score_fields'].items() ]
# add dns content.
self._dns_scores = [ conn[:] for conn in self._dns_results][:]
def _move_time_stamp(self,dns_data):
for dns in dns_data:
time_stamp = dns[1]
dns.remove(time_stamp)
dns.append(time_stamp)
return dns_data
def _create_dns_scores_csv(self):
dns_scores_csv = "{0}/dns_scores.csv".format(self._data_path)
dns_scores_final = self._move_time_stamp(self._dns_scores)
dns_scores_final.insert(0,self._dns_scores_headers)
Util.create_csv_file(dns_scores_csv,dns_scores_final)
# create bk file
dns_scores_bu_csv = "{0}/dns_scores_bu.csv".format(self._data_path)
Util.create_csv_file(dns_scores_bu_csv,dns_scores_final)
def _add_tld_column(self):
qry_name_col = self._conf['dns_results_fields']['dns_qry_name']
self._dns_scores = [conn + [ get_tld("http://" + str(conn[qry_name_col]), fail_silently=True) if "http://" not in str(conn[qry_name_col]) else get_tld(str(conn[qry_name_col]), fail_silently=True)] for conn in self._dns_scores ]
def _add_reputation(self):
# read configuration.
reputation_conf_file = "{0}/components/reputation/reputation_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
self._logger.info("Reading reputation configuration file: {0}".format(reputation_conf_file))
rep_conf = json.loads(open(reputation_conf_file).read())
# initialize reputation services.
self._rep_services = []
self._logger.info("Initializing reputation services.")
for service in rep_conf:
config = rep_conf[service]
module = __import__("components.reputation.{0}.{0}".format(service), fromlist=['Reputation'])
self._rep_services.append(module.Reputation(config,self._logger))
# get columns for reputation.
rep_cols = {}
indexes = [ int(value) for key, value in self._conf["add_reputation"].items()]
self._logger.info("Getting columns to add reputation based on config file: dns_conf.json".format())
for index in indexes:
col_list = []
for conn in self._dns_scores:
col_list.append(conn[index])
rep_cols[index] = list(set(col_list))
# get reputation per column.
self._logger.info("Getting reputation for each service in config")
rep_services_results = []
if self._rep_services :
for key,value in rep_cols.items():
rep_services_results = [ rep_service.check(None,value) for rep_service in self._rep_services]
rep_results = {}
for result in rep_services_results:
rep_results = {k: "{0}::{1}".format(rep_results.get(k, ""), result.get(k, "")).strip('::') for k in set(rep_results) | set(result)}
self._dns_scores = [ conn + [ rep_results[conn[key]] ] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + [""] for conn in self._dns_scores ]
def _add_hh_and_severity(self):
# add hh value and sev columns.
dns_date_index = self._conf["dns_results_fields"]["frame_time"]
self._dns_scores = [conn + [ filter(None,conn[dns_date_index].split(" "))[3].split(":")[0]] + [0] + [0] for conn in self._dns_scores ]
def _add_iana(self):
iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(iana_conf_file):
iana_config = json.loads(open(iana_conf_file).read())
dns_iana = IanaTransform(iana_config["IANA"])
dns_qry_class_index = self._conf["dns_results_fields"]["dns_qry_class"]
dns_qry_type_index = self._conf["dns_results_fields"]["dns_qry_type"]
dns_qry_rcode_index = self._conf["dns_results_fields"]["dns_qry_rcode"]
self._dns_scores = [ conn + [ dns_iana.get_name(conn[dns_qry_class_index],"dns_qry_class")] + [dns_iana.get_name(conn[dns_qry_type_index],"dns_qry_type")] + [ dns_iana.get_name(conn[dns_qry_rcode_index],"dns_qry_rcode") ] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + ["","",""] for conn in self._dns_scores ]
def _add_network_context(self):
nc_conf_file = "{0}/components/nc/nc_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(nc_conf_file):
nc_conf = json.loads(open(nc_conf_file).read())["NC"]
dns_nc = NetworkContext(nc_conf,self._logger)
ip_dst_index = self._conf["dns_results_fields"]["ip_dst"]
self._dns_scores = [ conn + [dns_nc.get_nc(conn[ip_dst_index])] for conn in self._dns_scores ]
else:
self._dns_scores = [ conn + [""] for conn in self._dns_scores ]
def _get_oa_details(self):
self._logger.info("Getting OA DNS suspicious details/chord diagram")
# start suspicious connects details process.
p_sp = Process(target=self._get_suspicious_details)
p_sp.start()
# start chord diagram process.
p_dn = Process(target=self._get_dns_dendrogram)
p_dn.start()
p_sp.join()
p_dn.join()
def _get_suspicious_details(self):
iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
if os.path.isfile(iana_conf_file):
iana_config = json.loads(open(iana_conf_file).read())
dns_iana = IanaTransform(iana_config["IANA"])
for conn in self._dns_scores:
# get data to query
date=conn[self._conf["dns_score_fields"]["frame_time"]].split(" ")
date = filter(None,date)
if len(date) == 5:
year=date[2]
month=datetime.datetime.strptime(date[0], '%b').strftime('%m')
day=date[1]
hh=conn[self._conf["dns_score_fields"]["hh"]]
dns_qry_name = conn[self._conf["dns_score_fields"]["dns_qry_name"]]
self._get_dns_details(dns_qry_name,year,month,day,hh,dns_iana)
def _get_dns_details(self,dns_qry_name,year,month,day,hh,dns_iana):
limit = self._details_limit<|fim▁hole|> edge_tmp ="{0}/edge-{1}_{2}_00.tmp".format(self._data_path,dns_qry_name.replace("/","-"),hh)
if not os.path.isfile(edge_file):
dns_qry = ("SELECT frame_time,frame_len,ip_dst,ip_src,dns_qry_name,dns_qry_class,dns_qry_type,dns_qry_rcode,dns_a FROM {0}.{1} WHERE y={2} AND m={3} AND d={4} AND dns_qry_name LIKE '%{5}%' AND h={6} LIMIT {7};").format(self._db,self._table_name,year,month,day,dns_qry_name,hh,limit)
# execute query
try:
self._engine.query(dns_qry,edge_tmp)
except:
self._logger.error("ERROR. Edge file couldn't be created for {0}, skipping this step".format(dns_qry_name))
else:
# add IANA to results.
if dns_iana:
update_rows = []
self._logger.info("Adding IANA translation to details results")
with open(edge_tmp) as dns_details_csv:
rows = csv.reader(dns_details_csv, delimiter=',', quotechar='|')
try:
next(rows)
update_rows = [[conn[0]] + [conn[1]] + [conn[2]] + [conn[3]] + [conn[4]] + [dns_iana.get_name(conn[5],"dns_qry_class")] + [dns_iana.get_name(conn[6],"dns_qry_type")] + [dns_iana.get_name(conn[7],"dns_qry_rcode")] + [conn[8]] for conn in rows]
update_rows = filter(None, update_rows)
header = [ "frame_time", "frame_len", "ip_dst","ip_src","dns_qry_name","dns_qry_class_name","dns_qry_type_name","dns_qry_rcode_name","dns_a" ]
update_rows.insert(0,header)
except IndexError:
pass
else:
self._logger.info("WARNING: NO IANA configured.")
# create edge file.
self._logger.info("Creating edge file:{0}".format(edge_file))
with open(edge_file,'wb') as dns_details_edge:
writer = csv.writer(dns_details_edge, quoting=csv.QUOTE_ALL)
if update_rows:
writer.writerows(update_rows)
else:
shutil.copy(edge_tmp,edge_file)
os.remove(edge_tmp)
def _get_dns_dendrogram(self):
limit = self._details_limit
for conn in self._dns_scores:
date=conn[self._conf["dns_score_fields"]["frame_time"]].split(" ")
date = filter(None,date)
if len(date) == 5:
year=date[2]
month=datetime.datetime.strptime(date[0], '%b').strftime('%m')
day=date[1]
ip_dst=conn[self._conf["dns_score_fields"]["ip_dst"]]
self._get_dendro(self._db,self._table_name,ip_dst,year,month,day, limit)
def _get_dendro(self,db,table,ip_dst,year,month,day,limit):
dendro_file = "{0}/dendro-{1}.csv".format(self._data_path,ip_dst)
if not os.path.isfile(dendro_file):
dndro_qry = ("SELECT dns_a, dns_qry_name, ip_dst FROM (SELECT susp.ip_dst, susp.dns_qry_name, susp.dns_a FROM {0}.{1} as susp WHERE susp.y={2} AND susp.m={3} AND susp.d={4} AND susp.ip_dst='{5}' LIMIT {6}) AS tmp GROUP BY dns_a, dns_qry_name, ip_dst").format(db,table,year,month,day,ip_dst,limit)
# execute query
self._engine.query(dndro_qry,dendro_file)
def _ingest_summary(self):
# get date parameters.
yr = self._date[:4]
mn = self._date[4:6]
dy = self._date[6:]
self._logger.info("Getting ingest summary data for the day")
ingest_summary_cols = ["date","total"]
result_rows = []
df_filtered = pd.DataFrame()
ingest_summary_file = "{0}/is_{1}{2}.csv".format(self._ingest_summary_path,yr,mn)
ingest_summary_tmp = "{0}.tmp".format(ingest_summary_file)
if os.path.isfile(ingest_summary_file):
df = pd.read_csv(ingest_summary_file, delimiter=',')
#discards previous rows from the same date
df_filtered = df[df['date'].str.contains("{0}-{1}-{2}".format(yr, mn, dy)) == False]
else:
df = pd.DataFrame()
# get ingest summary.
ingest_summary_qry = ("SELECT frame_time, COUNT(*) as total "
" FROM {0}.{1}"
" WHERE y={2} AND m={3} AND d={4} "
" AND unix_tstamp IS NOT NULL AND frame_time IS NOT NULL"
" AND frame_len IS NOT NULL AND dns_qry_name IS NOT NULL"
" AND ip_src IS NOT NULL "
" AND (dns_qry_class IS NOT NULL AND dns_qry_type IS NOT NULL AND dns_qry_rcode IS NOT NULL ) "
" GROUP BY frame_time;")
ingest_summary_qry = ingest_summary_qry.format(self._db,self._table_name, yr, mn, dy)
results_file = "{0}/results_{1}.csv".format(self._ingest_summary_path,self._date)
self._engine.query(ingest_summary_qry,output_file=results_file,delimiter=",")
if os.path.isfile(results_file):
df_results = pd.read_csv(results_file, delimiter=',')
# Forms a new dataframe splitting the minutes from the time column
df_new = pd.DataFrame([["{0}-{1}-{2} {3}:{4}".format(yr, mn, dy,val['frame_time'].split(" ")[3].split(":")[0].zfill(2),val['frame_time'].split(" ")[3].split(":")[1].zfill(2)), int(val['total']) if not math.isnan(val['total']) else 0 ] for key,val in df_results.iterrows()],columns = ingest_summary_cols)
#Groups the data by minute
sf = df_new.groupby(by=['date'])['total'].sum()
df_per_min = pd.DataFrame({'date':sf.index, 'total':sf.values})
df_final = df_filtered.append(df_per_min, ignore_index=True)
df_final.to_csv(ingest_summary_tmp,sep=',', index=False)
os.remove(results_file)
os.rename(ingest_summary_tmp,ingest_summary_file)
else:
self._logger.info("No data found for the ingest summary")<|fim▁end|> | edge_file ="{0}/edge-{1}_{2}_00.csv".format(self._data_path,dns_qry_name.replace("/","-"),hh) |
<|file_name|>managed_value_store_cache.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/extensions/api/storage/managed_value_store_cache.h"
#include <set>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/callback.h"
#include "base/file_util.h"
#include "base/logging.h"
#include "base/message_loop_proxy.h"
#include "chrome/browser/extensions/api/storage/policy_value_store.h"
#include "chrome/browser/extensions/api/storage/settings_storage_factory.h"
#include "chrome/browser/extensions/event_names.h"
#include "chrome/browser/extensions/extension_service.h"
#include "chrome/browser/value_store/value_store_change.h"
#include "chrome/common/extensions/extension.h"
#include "content/public/browser/browser_thread.h"
using content::BrowserThread;
namespace extensions {
ManagedValueStoreCache::ManagedValueStoreCache(
policy::PolicyService* policy_service,
EventRouter* event_router,
const scoped_refptr<SettingsStorageFactory>& factory,
const scoped_refptr<SettingsObserverList>& observers,
const FilePath& profile_path)
: ALLOW_THIS_IN_INITIALIZER_LIST(weak_factory_(this)),
weak_this_on_ui_(weak_factory_.GetWeakPtr()),
policy_service_(policy_service),
event_router_(event_router),
storage_factory_(factory),
observers_(observers),
base_path_(profile_path.AppendASCII(
ExtensionService::kManagedSettingsDirectoryName)) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
// |event_router| can be NULL on unit_tests.
if (event_router_)
event_router_->RegisterObserver(this, event_names::kOnSettingsChanged);
policy_service_->AddObserver(policy::POLICY_DOMAIN_EXTENSIONS, this);
}
ManagedValueStoreCache::~ManagedValueStoreCache() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
DCHECK(!event_router_);
// Delete the PolicyValueStores on FILE.
store_map_.clear();
}
void ManagedValueStoreCache::ShutdownOnUI() {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
policy_service_->RemoveObserver(policy::POLICY_DOMAIN_EXTENSIONS, this);
policy_service_ = NULL;
if (event_router_)
event_router_->UnregisterObserver(this);
event_router_ = NULL;
weak_factory_.InvalidateWeakPtrs();
}
void ManagedValueStoreCache::RunWithValueStoreForExtension(
const StorageCallback& callback,
scoped_refptr<const Extension> extension) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStore* store = GetStoreFor(extension->id());
if (store) {
callback.Run(store);
} else {
// First time that an extension calls storage.managed.get(). Create the
// store and load it with the current policy, and don't send event
// notifications.
CreateStoreFor(
extension->id(),
false,
base::Bind(&ManagedValueStoreCache::RunWithValueStoreForExtension,
base::Unretained(this),
callback,
extension));
}
}
void ManagedValueStoreCache::DeleteStorageSoon(
const std::string& extension_id) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStore* store = GetStoreFor(extension_id);
if (!store) {
// It's possible that the store exists, but hasn't been loaded yet
// (because the extension is unloaded, for example). Open the database to
// clear it if it exists.
// TODO(joaodasilva): move this check to a ValueStore method.
if (file_util::DirectoryExists(base_path_.AppendASCII(extension_id))) {
CreateStoreFor(
extension_id,
false,
base::Bind(&ManagedValueStoreCache::DeleteStorageSoon,
base::Unretained(this),
extension_id));
}
} else {
store->DeleteStorage();
store_map_.erase(extension_id);
}
}
void ManagedValueStoreCache::OnPolicyUpdated(policy::PolicyDomain domain,
const std::string& component_id,
const policy::PolicyMap& previous,
const policy::PolicyMap& current) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
BrowserThread::PostTask(
BrowserThread::FILE, FROM_HERE,
base::Bind(&ManagedValueStoreCache::UpdatePolicyOnFILE,
base::Unretained(this),
std::string(component_id),
base::Passed(current.DeepCopy())));
}
void ManagedValueStoreCache::UpdatePolicyOnFILE(
const std::string& extension_id,
scoped_ptr<policy::PolicyMap> current_policy) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStore* store = GetStoreFor(extension_id);
if (!store) {
// The extension hasn't executed any storage.managed.* calls, and isn't
// listening for onChanged() either. Ignore this notification in that case.
return;
}
// Update the policy on the backing store, and fire notifications if it
// changed.
store->SetCurrentPolicy(*current_policy, true);
}
void ManagedValueStoreCache::OnListenerAdded(
const EventListenerInfo& details) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
DCHECK_EQ(std::string(event_names::kOnSettingsChanged), details.event_name);
// This is invoked on several occasions:
//
// 1. when an extension first registers to observe storage.onChanged; in this
// case the backend doesn't have any previous data persisted, and it won't
// trigger a notification.
//
// 2. when the browser starts up and all existing extensions re-register for
// the onChanged event. In this case, if the current policy differs from
// the persisted version then a notification will be sent.
//
// 3. a policy update just occurred and sent a notification, and an extension
// with EventPages that is observing onChanged just woke up and registed
// again. In this case the policy update already persisted the current
// policy version, and |store| already exists.
BrowserThread::PostTask(
BrowserThread::FILE, FROM_HERE,
base::Bind(&ManagedValueStoreCache::CreateForExtensionOnFILE,
base::Unretained(this),
details.extension_id));
}
void ManagedValueStoreCache::CreateForExtensionOnFILE(
const std::string& extension_id) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStore* store = GetStoreFor(extension_id);
if (!store)
CreateStoreFor(extension_id, true, base::Closure());
}
PolicyValueStore* ManagedValueStoreCache::GetStoreFor(
const std::string& extension_id) {<|fim▁hole|> return it->second.get();
}
void ManagedValueStoreCache::CreateStoreFor(
const std::string& extension_id,
bool notify_if_changed,
const base::Closure& continuation) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
DCHECK(!GetStoreFor(extension_id));
// Creating or loading an existing database requires an immediate update
// with the current policy for the corresponding extension, which must be
// retrieved on UI.
BrowserThread::PostTask(
BrowserThread::UI, FROM_HERE,
base::Bind(&ManagedValueStoreCache::GetInitialPolicy,
weak_this_on_ui_,
extension_id,
notify_if_changed,
continuation));
}
void ManagedValueStoreCache::GetInitialPolicy(
const std::string& extension_id,
bool notify_if_changed,
const base::Closure& continuation) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI));
const policy::PolicyMap& policy = policy_service_->GetPolicies(
policy::POLICY_DOMAIN_EXTENSIONS, extension_id);
// Now post back to FILE to create the database.
BrowserThread::PostTask(
BrowserThread::FILE, FROM_HERE,
base::Bind(&ManagedValueStoreCache::CreateStoreWithInitialPolicy,
base::Unretained(this),
extension_id,
notify_if_changed,
base::Passed(policy.DeepCopy()),
continuation));
}
void ManagedValueStoreCache::CreateStoreWithInitialPolicy(
const std::string& extension_id,
bool notify_if_changed,
scoped_ptr<policy::PolicyMap> initial_policy,
const base::Closure& continuation) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
// If a 2nd call to CreateStoreFor() is issued before the 1st gets to execute
// its UI task, then the 2nd will enter this function but the store has
// already been created. Check for that.
PolicyValueStore* store = GetStoreFor(extension_id);
if (!store) {
// Create it now.
// If the database doesn't exist yet then this is the initial install,
// and no notifications should be issued in that case.
// TODO(joaodasilva): move this check to a ValueStore method.
if (!file_util::DirectoryExists(base_path_.AppendASCII(extension_id)))
notify_if_changed = false;
store = new PolicyValueStore(
extension_id,
observers_,
make_scoped_ptr(storage_factory_->Create(base_path_, extension_id)));
store_map_[extension_id] = make_linked_ptr(store);
}
// Send the latest policy to the store.
store->SetCurrentPolicy(*initial_policy, notify_if_changed);
// And finally resume from where this process started.
if (!continuation.is_null())
continuation.Run();
}
} // namespace extensions<|fim▁end|> | DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
PolicyValueStoreMap::iterator it = store_map_.find(extension_id);
if (it == store_map_.end())
return NULL; |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require("gulp"),
concat = require("gulp-concat"),
karma = require("karma").server,
mocha = require("gulp-mocha"),
nodemon = require("gulp-nodemon"),
notify = require("gulp-notify"),
size = require("gulp-filesize"),
sourcemaps = require("gulp-sourcemaps"),
uglify = require("gulp-uglify"),
typescript = require("gulp-tsc");
gulp.task("default", function () {
gulp.start("server");
gulp.start("watch");
});
gulp.task("watch", function () {
gulp.watch("assets/js/**/*.ts", ["typescript-client"]);
gulp.watch("assets/dist/js/*.js", ["test-client", "client"]);
});
gulp.task("client", function () {
gulp.src([
"bower_components/jquery/dist/jquery.min.js",
"bower_components/lodash/lodash.min.js",
"bower_components/rxjs/dist/rx.all.min.js",
"bower_components/Rx-jQuery/rx.jquery.js",
"assets/dist/js/lib.js"
])
.pipe(sourcemaps.init())
.pipe(concat("app.js"))
.pipe(sourcemaps.write())
.pipe(gulp.dest("assets/dist/js"))
.pipe(notify("app.js successfully compiled"))
.pipe(size());
});
gulp.task("server", function () {
nodemon({
script: "app/boot.js",
ext: "ts html",
ignore: ["assets/**/*", "README"],
env: {"NODE_ENV": "development"},
tasks: ["typescript", "test"]
}).on("restart", function () {
//console.log("Server restarted!");
});
});
gulp.task("typescript-client", function () {
tsc("assets/js", "assets/dist/js", "client");
tscClientTest("assets/js");
});
gulp.task("typescript", function () {
tsc("app", "app", "server");
});
gulp.task("test-client", function (done) {
karma.start({
configFile: process.cwd() + "/assets/js/test/karma.conf.js"
}, done);
});
gulp.task("test", function () {
return gulp.src("app/**/*.spec.js", {read: false})
.pipe(mocha({reporter: "list"}));
});
function tsc(path, out, type) {
var src = gulp.src([
path + "/**/*.ts",
// Ignore specs and typings
"!" + path + "/**/*.spec.ts",
"!" + path + "/typings/**/*"
], {base: path});
var dest;
if (type == "client") {
dest = src.pipe(typescript({
target: "ES5",
sortOutput: true,
sourceMap: false,
removeComments: true
}))
.pipe(concat("lib.js"));
//.pipe(uglify());
} else {
dest = src.pipe(typescript(
{
target: "ES5",
sourcemap: true,
declarationFiles: true,
removeComments: true
}
));
}
dest
.pipe(gulp.dest(out))
.pipe(notify(path + " tsc compiled into JavaScript"))
.pipe(size());
}
function tscClientTest(path) {
var src = gulp.src([
path + "/**/*.spec.ts"
], {base: path});
src.pipe(typescript({
target: "ES5",
sortOutput: true,
sourceMap: false,
removeComments: true
}))
.pipe(concat("all.js"))
.pipe(gulp.dest(path + "/test"))<|fim▁hole|><|fim▁end|> | .pipe(notify(path + " tscTest compiled into JavaScript"))
.pipe(size());
} |
<|file_name|>UploadSslCertCmd.java<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,<|fim▁hole|>// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.command.user.loadbalancer;
import javax.inject.Inject;
import org.apache.log4j.Logger;
import org.apache.cloudstack.api.APICommand;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.ApiErrorCode;
import org.apache.cloudstack.api.BaseCmd;
import org.apache.cloudstack.api.Parameter;
import org.apache.cloudstack.api.ServerApiException;
import org.apache.cloudstack.api.response.DomainResponse;
import org.apache.cloudstack.api.response.ProjectResponse;
import org.apache.cloudstack.api.response.SslCertResponse;
import org.apache.cloudstack.context.CallContext;
import com.cloud.exception.ConcurrentOperationException;
import com.cloud.exception.InsufficientCapacityException;
import com.cloud.exception.NetworkRuleConflictException;
import com.cloud.exception.ResourceAllocationException;
import com.cloud.exception.ResourceUnavailableException;
import org.apache.cloudstack.network.tls.CertService;
@APICommand(name = "uploadSslCert", description = "Upload a certificate to CloudStack", responseObject = SslCertResponse.class,
requestHasSensitiveInfo = false, responseHasSensitiveInfo = false)
public class UploadSslCertCmd extends BaseCmd {
public static final Logger s_logger = Logger.getLogger(UploadSslCertCmd.class.getName());
private static final String s_name = "uploadsslcertresponse";
@Inject
CertService _certService;
/////////////////////////////////////////////////////
//////////////// API parameters /////////////////////
/////////////////////////////////////////////////////
@Parameter(name = ApiConstants.CERTIFICATE, type = CommandType.STRING, required = true, description = "SSL certificate", length = 16384)
private String cert;
@Parameter(name = ApiConstants.PRIVATE_KEY, type = CommandType.STRING, required = true, description = "Private key", length = 16384)
private String key;
@Parameter(name = ApiConstants.CERTIFICATE_CHAIN, type = CommandType.STRING, description = "Certificate chain of trust", length = 2097152)
private String chain;
@Parameter(name = ApiConstants.PASSWORD, type = CommandType.STRING, description = "Password for the private key")
private String password;
@Parameter(name = ApiConstants.ACCOUNT, type = CommandType.STRING, description = "account that will own the SSL certificate")
private String accountName;
@Parameter(name = ApiConstants.PROJECT_ID, type = CommandType.UUID, entityType = ProjectResponse.class, description = "an optional project for the SSL certificate")
private Long projectId;
@Parameter(name = ApiConstants.DOMAIN_ID, type = CommandType.UUID, entityType = DomainResponse.class, description = "domain ID of the account owning the SSL certificate")
private Long domainId;
/////////////////////////////////////////////////////
/////////////////// Accessors ///////////////////////
/////////////////////////////////////////////////////
public String getCert() {
return cert;
}
public String getKey() {
return key;
}
public String getChain() {
return chain;
}
public String getPassword() {
return password;
}
public String getAccountName() {
return accountName;
}
public Long getDomainId() {
return domainId;
}
public Long getProjectId() {
return projectId;
}
/////////////////////////////////////////////////////
/////////////// API Implementation///////////////////
/////////////////////////////////////////////////////
@Override
public void execute() throws ResourceUnavailableException, InsufficientCapacityException, ServerApiException, ConcurrentOperationException,
ResourceAllocationException, NetworkRuleConflictException {
try {
SslCertResponse response = _certService.uploadSslCert(this);
setResponseObject(response);
response.setResponseName(getCommandName());
} catch (Exception e) {
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, e.getMessage());
}
}
@Override
public String getCommandName() {
return s_name;
}
@Override
public long getEntityOwnerId() {
return CallContext.current().getCallingAccount().getId();
}
}<|fim▁end|> | // software distributed under the License is distributed on an |
<|file_name|>return_statements_test.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for return_statements module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.converters import functions
from tensorflow.python.autograph.converters import return_statements
from tensorflow.python.autograph.core import converter_testing
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
class SingleReturnTest(converter_testing.TestCase):
def assertTransformedEquivalent(self, f, *inputs):
tr = self.transform(f, (functions, return_statements))
self.assertEqual(f(*inputs), tr(*inputs))
def test_straightline(self):
def f(x):
return x * x
self.assertTransformedEquivalent(f, 2)
def test_superfluous_returns(self):
def f():
retval = 1
return retval
retval = 2 # pylint:disable=unreachable
return retval
self.assertTransformedEquivalent(f)
def test_superfluous_returns_adjacent(self):
def f():
return 1
return 2 # pylint:disable=unreachable
self.assertTransformedEquivalent(f)
def test_conditional(self):
def f(x):
if x > 0:
return x
else:
return x * x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_conditional_missing_else(self):
def f(x):
if x > 0:
return x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_conditional_missing_else_then_default(self):
def f(x):
if x > 0:
return x
return x * x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_conditional_else_only_then_default(self):
def f(x):
if x < 0:
x *= x
else:
return x
return x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_conditional_nested(self):
def f(x):
if x > 0:
if x < 5:
return x
else:
return x * x
else:
return x * x * x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
self.assertTransformedEquivalent(f, 5)
def test_context_manager(self):
def f(x):
with ops.name_scope(''):
return x * x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_context_manager_in_conditional(self):
def f(x):
if x > 0:
with ops.name_scope(''):
return x * x
else:
return x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def text_conditional_in_context_manager(self):
def f(x):
with ops.name_scope(''):
if x > 0:
return x * x
else:
return x
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_no_return(self):<|fim▁hole|>
def f(x):
x *= x
self.assertTransformedEquivalent(f, 2)
def test_nested_function(self):
def f(x):
def inner_fn(y):
if y > 0:
return y * y
else:
return y
return inner_fn(x)
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_nested_function_in_control_flow(self):
def f(x):
if x:
def inner_fn(y):
return y
inner_fn(x)
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, -2)
def test_for_loop(self):
def f(n):
for _ in range(n):
return 1
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, 0)
def test_while_loop(self):
def f(n):
i = 0
s = 0
while i < n:
i += 1
s += i
if s > 4:
return s
return -1
self.assertTransformedEquivalent(f, 0)
self.assertTransformedEquivalent(f, 2)
self.assertTransformedEquivalent(f, 4)
def test_null_return(self):
def f(n):
if n > 4:
return
return
self.assertTransformedEquivalent(f, 4)
self.assertTransformedEquivalent(f, 5)
def test_nested_multiple_withs(self):
def f(x):
v = []
while x > 0:
x -= 1
with ops.name_scope(''):
if x % 2 == 0:
return v
with ops.name_scope(''):
v.append(x)
v.append(x)
return v
self.assertTransformedEquivalent(f, 0)
self.assertTransformedEquivalent(f, 1)
self.assertTransformedEquivalent(f, 3)
self.assertTransformedEquivalent(f, 4)
def test_multiple_returns_in_nested_scope(self):
def f(a):
v = []
for x in a:
x -= 1
if x > 100:
return v
try:
raise ValueError('intentional')
except ValueError: # pylint:disable=bare-except
return v
v.append(x)
return v
self.assertTransformedEquivalent(f, [])
self.assertTransformedEquivalent(f, [1])
self.assertTransformedEquivalent(f, [2])
self.assertTransformedEquivalent(f, [1, 2, 3])
if __name__ == '__main__':
test.main()<|fim▁end|> | |
<|file_name|>main_player.js<|end_file_name|><|fim▁begin|>function mainPlayer() {
this.x = 100;
this.y = 200;
this.d = "u"; // "d", "l", "r", and combinations?
this.sword = false;
this.walking = false;
this.bomb = false;
this.bow = false;
this.keyFrameRow = 0;
this.keyFrame = 0;
this.keyFrameN = 4;
this.updateFrameN = 2;
this.updateFrameDelay = 0;
this.swordType = "eagle";
this.speaking = false;
this.text = "";
this.displayq = [];
this.walkq = [];
//this.walkQueue = 0;
this.walkFlag = {
"left" : false, "right" : false, "up":false, "down":false,
"leftq":0, "rightq":0, "upq":0, "downq":0
};
this.walkKey = ["up", "right", "down", "left"];
this.walkLookup = { "up" : 0, "right" : 1, "down":2, "left":3 };
this.walkTransitionDelay = 2;
this.walkDisplayQ = [];
this.dx = 12;
this.dy = 12;
this.img_w = 16;
this.img_h = 16;
this.img_x = 0;
this.img_y = 0;
this.world_w = 64;
this.world_h = 64;
//this.swordReady = true;
this.bombReady = true;
this.bowReady = true;
this.swordKeyState = "idle"; // "fire", "warm"
this.bombKeyState = "idle"; // "fire", "warm"
this.bowKeyState = "idle"; // "fire", "warm"
this.swordKeyEvent = false;
this.swordDelayN = 3;
this.swordDelay = 0;
this.bombEvent = false;
// 0 is right. past StepN/2 is below
// so ccw
//
this.bowStep = 0;
//this.bowStepN = 32;
this.bowStepN = 16;
this.bow_da = 2.0*Math.PI/this.bowStepN;
this.bowActive = false;
this.bowEvent = "idle";
this.bowTurnDelay = 0;
this.bowTurnDelayN = 1;
this.displayq.push({ "d":"down", "t":-1 });
this.inputEvent = {};
this.state = "idle";
//SWORD STATE
this.swordKeyUp = true;
this.swordJitterX = 0;
this.swordJitterY = 0;
}
mainPlayer.prototype.init = function(x, y, d) {
this.x = x;
this.y = y;
this.d = d;
}
mainPlayer.prototype.actualDirection = function() {
var n = this.displayq.length;
return this.displayq[n-1].d;
}
mainPlayer.prototype.resetDisplayDirection = function(d) {
this.displayq = [{"d":d, "t":5 }];
}
mainPlayer.prototype.addToWalkq = function(d) {
for (var i=0; i<this.walkq.length; i++) {
if (this.walkq.d == d) { return; }
}
this.walkq.push({ "d":d, "t":5 });
}
mainPlayer.prototype.displayDirectionTick = function() {
if (this.displayq.length>1) {
this.displayq[0].t--;
if (this.displayq[0].t<0) { this.displayq.shift(); }
}
}
mainPlayer.prototype.updateDisplayDirection = function() {
var curdir = this.currentWalkDirection();
if (curdir == "stop") {
this.displayDirectionTick();
console.log(">>>", this.displayq);
return;
}
var n = this.displayq.length;
if (curdir != this.displayq[n-1].d) {
var old_dir = this.displayq[n-1].d;
this.displayq = [];
var dl = this.walkTransitionDelay;
// 'up' just looks better to me, don't know why...
//
if ((old_dir == "left") && (curdir == "right")) {
this.displayq.push({ "d":"up", "t":dl });
}
else if ((old_dir == "right") && (curdir == "left")) {
this.displayq.push({ "d":"up", "t":dl });
}
else if ((old_dir == "up") && (curdir == "down")) {
this.displayq.push({ "d":"right", "t":dl });
}
else if ((old_dir == "down") && (curdir == "up")) {
this.displayq.push({ "d":"left", "t":dl });
}
this.displayq.push({ "d":curdir, "t":-1 });
}
this.displayDirectionTick();
}
mainPlayer.prototype.updateWalkingFrame = function() {
if (this.updateFrameDelay==0) {
this.keyFrame++;
if (this.keyFrame >= this.keyFrameN) { this.keyFrame=0; }
this.updateFrameDelay = this.updateFrameN-1;
} else {
this.updateFrameDelay--;
}
}
// walking, bow and sword are mutually exclusive
// bomb is a modifier to walking.
//
mainPlayer.prototype.update = function() {
g_painter.dirty_flag = true;
for (var ev in this.inputEvent) {
//if (!this.inputEvent[ev]) { continue; }
if (ev == "swordKeyDown") {
if ((this.state == "swordAttack") ||
(this.state == "bow")) { continue; }
if (!this.swordReady()) { continue; }
if (!this.swordKeyUp) { continue; }
console.log("attack!");
this.swordAttack();
//var curdir = this.currentWalkDirection();
var curdir = this.actualDirection();
this.resetDisplayDirection(curdir);
console.log("??", curdir);
continue;
}
if (ev == "swordKeyUp") {
this.swordKeyUp = true;
continue;
}
if (ev == "bowKeyDown") {
continue;
}
if (ev == "bowKeyUp") {
continue;
}
if (ev == "upKeyDown") {
if (!this.walkFlag["up"]) { this.addToWalkq("up"); }
this.walkFlag["up"] = true;
continue;
}
if (ev == "upKeyUp") {
this.walkFlag["up"] = false;
continue;
}
if (ev == "downKeyDown") {
if (!this.walkFlag["down"]) { this.addToWalkq("down"); }
this.walkFlag["down"] = true;
continue;
}
if (ev == "downKeyUp") {
this.walkFlag["down"] = false;
continue;
}
if (ev == "leftKeyDown") {
if (!this.walkFlag["left"]) { this.addToWalkq("left"); }
this.walkFlag["left"] = true;
continue;
}
if (ev == "leftKeyUp") {
this.walkFlag["left"] = false;
continue;
}
if (ev == "rightKeyDown") {
if (!this.walkFlag["right"]) { this.addToWalkq("right"); }
this.walkFlag["right"] = true;
continue;
}
if (ev == "rightKeyUp") {
this.walkFlag["right"] = false;
continue;
}
console.log("ev", ev);
}
//console.log("state:", this.state);
this.inputEvent = {};
// initial processing of input is done.
//
if (this.state == "idle") {
if (this.walkFlag["up"] || this.walkFlag["down"] || this.walkFlag["left"] || this.walkFlag["right"]) {
this.state = "walking";
this._updateWalkQueue();
this.updateWalkingFrame();
}
this.updateDisplayDirection();
return;
}
if (this.state == "walking") {
var xy = this.dxdy();
this.x += xy[0];
this.y += xy[1];
this.updateWalkingFrame();
if (this.walkFlag["up"] || this.walkFlag["down"] || this.walkFlag["left"] || this.walkFlag["right"]) {
this.state = "walking";
this._updateWalkQueue();
} else {
this.state = "idle";
}
this.updateDisplayDirection();
return;
}
if (this.state == "swordAttack") {
if (this.swordDelay==0) {
this.swordRetract();
if (this.walkFlag["up"] || this.walkFlag["down"] || this.walkFlag["left"] || this.walkFlag["right"]) {
this.state = "walking";
this._updateWalkQueue();
} else {
this.state = "idle";
}
} else {
this.swordDelay--;
}
return;
}
}
mainPlayer.prototype.swordAttack = function() {
//DEBUG
console.log("sword attack");
this.sword = true;
this.swordDelay = this.swordDelayN-1;
this.state = "swordAttack";
this.swordKeyUp = false;
var jx = 5;
var jy = 5;
var curdir = this.actualDirection();
if (curdir == "up") {
this.swordJitterX = Math.floor((Math.random()-0.3)*jx)
this.swordJitterY = Math.floor((Math.random()+1)*jy)
} else if (curdir == "down") {
this.swordJitterX = Math.floor((Math.random()-0.5)*jx)
this.swordJitterY = Math.floor((Math.random()-1)*jy)
} else if (curdir == "right") {
this.swordJitterX = Math.floor((Math.random()-1)*jx)
this.swordJitterY = Math.floor((Math.random()-0.5)*jy)
} else if (curdir == "left") {
this.swordJitterX = Math.floor((Math.random()+1)*jx)
this.swordJitterY = Math.floor((Math.random()-0.5)*jy)
}
}
mainPlayer.prototype.swordRetract = function() {
this.sword = false;
}
mainPlayer.prototype.bombPrepare = function() {
console.log("bomb prepare");
this.bomb = true;
}
mainPlayer.prototype.bombThrow = function() {
console.log("bomb throw");
this.bomb = false;
}
mainPlayer.prototype.keyDownTransition = function(s) {
if (s == "idle") { return "fire"; }
if (s == "fire") { return "warm"; }
if (s == "warm") { return "warm"; }
return "fire";
}
// keyDown events only set flags that will be polled
// by the 'update' function. No state other than
// setting the flags should happen here.
//
mainPlayer.prototype.keyDown = function(code) {
// 'z', bomb
//
if (code == 90) {
this.inputEvent["bombKeyDown"] = true;
}
// 'x', sword
//
if (code == 88) {
this.inputEvent["swordKeyDown"] = true;
}
// 'c', bow
//
if (code == 67) {
//this.bowActive = true;
this.inputEvent["bowKeyDown"] = true;
}
// left
//
if (code==37) {
this.inputEvent["leftKeyDown"] = true;
}
// up
//
else if (code == 38) {
this.inputEvent["upKeyDown"] = true;
}
// right
//
else if (code == 39) {
this.inputEvent["rightKeyDown"] = true;
}
// down
//
else if (code == 40) {
this.inputEvent["downKeyDown"] = true;
}
}
// keyUp events only set flags that will be polled
// by the 'update' function. No state other than
// setting the flags should happen here.
//
mainPlayer.prototype.keyUp = function(code) {
// 'x', sword
//
if (code == 88) {
this.inputEvent["swordKeyUp"] = true;
}
// 'z', bomb
//
if (code == 90) {
this.inputEvent["bombKeyUp"] = true;
}
// 'c', bow
//
if (code == 67) {
this.inputEvent["bowKeyUp"] = true;
}
// left
//
if (code==37) {
this.inputEvent["leftKeyUp"] = true;
}
// up
//
else if (code == 38) {
this.inputEvent["upKeyUp"] = true;
}
// right
//
else if (code == 39) {
this.inputEvent["rightKeyUp"] = true;
}
// down
//
else if (code == 40) {
this.inputEvent["downKeyUp"] = true;
}
}
mainPlayer.prototype._updateWalkQueue= function() {
var newq = [];
for (var i=0; i<this.walkq.length; i++) {
var key = this.walkq[i].d;
if (this.walkFlag[key]) { newq.push(this.walkq[i]); }
}
this.walkq = newq;
}
// returns text direction
//
mainPlayer.prototype.currentWalkDirection = function() {
var kr = -1;
var n = this.walkq.length-1;
if (n>=0) {
kr = this.walkLookup[this.walkq[n].d];
}
if (kr<0) { return "stop"; }
return this.walkKey[kr];
}
// returns text direction
//
mainPlayer.prototype.dxdy = function() {
var dx = this.dx;
var dy = this.dy;
var xy = { "up":[0,-dy], "right":[dx,0], "down":[0,dy], "left":[-dx,0], "stop":[0,0] };
var d = this.currentWalkDirection();
return xy[d];
}
// returns text direction
//
mainPlayer.prototype.sword_dxdy = function() {
var dx = this.swordJitterX;
var dy = this.swordJitterY;
return [dx,dy];
var xy = { "up":[dx,-dy], "right":[dx,dy], "down":[dx,dy], "left":[-dx,dy], "stop":[0,0] };
var d = this.currentWalkDirection();
return xy[d];
}
mainPlayer.prototype.stopWalking = function() {
this.walking = false;
}
mainPlayer.prototype.swordReady = function() {
if (this.swordDelay==0) {return true; }
return false;
}
mainPlayer.prototype.swordUpdate = function() {
if (this.swordDelay>0) {
this.swordDelay--;
} else {
this.swordDelay=0;
}
}
mainPlayer.prototype.currentDisplayDirection = function() {
var a = "up";
if (this.displayq.length>0) { a = this.displayq[0].d; }
return a;
}
mainPlayer.prototype.draw = function() {
var kf = this.keyFrame;
var d = this.currentDisplayDirection();
var kr = this.walkLookup[d];
var imgx = kf*16;
var imgy = kr*16;
if (this.sword) {
imgy = 4*16;
imgx = kr*16;
}
if (!this.bow) {
//g_imgcache.draw_s("noether", imgx, imgy, 16, 16, this.x, this.y, this.world_w, this.world_h);
}
if ((this.state == "idle") || (this.state == "walking")) {
g_imgcache.draw_s("noether", imgx, imgy, 16, 16, this.x, this.y, this.world_w, this.world_h);
}
if (this.sword) {
var a = 0.0;
var ix = 0;
var iy = 0;
var di = this.currentDisplayDirection();
if (di == "up") {
a = -Math.PI/2.0;
iy=-16;
}
else if (di == "right") {
a = 0.0;
ix = 16;
}
else if (di == "down") {
a = Math.PI/2.0;
iy = 16;
//ix = 16;
}
else if (di == "left") {
a = Math.PI;
ix = -16;
}
ix *= 4;
iy *= 4;
var s_dxy = this.sword_dxdy();
ix += s_dxy[0];
iy += s_dxy[1];
// tree sword
//g_imgcache.draw_s("item", 0, 0, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
// zephyr sword
//g_imgcache.draw_s("item", 16, 0, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
// falcon sword
//g_imgcache.draw_s("item", 32, 0, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
// eagle sword
g_imgcache.draw_s("item", 48, 0, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
// hammer
//g_imgcache.draw_s("item", 64 , 0, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
// amythist wand
//g_imgcache.draw_s("item", 112, 0, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
// emerald wand
//g_imgcache.draw_s("item", 0, 16, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
// ank wand
//g_imgcache.draw_s("item", 16, 16, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
// bomb
//ix = 0;
//iy = -4*8;
//a = 0;
//g_imgcache.draw_s("item", 80, 16, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h, a);
g_imgcache.draw_s("noether", imgx, imgy, 16, 16, this.x, this.y, this.world_w, this.world_h);
}
if (this.bomb) {
var ix = 0, iy = -8;
var di = this.currentDisplayDirection();
if (di == "up") {
ix = 0;
iy = -8;
}
else if (di == "right") {
ix = -2;
}
else if (di == "down") {
ix = -1;
iy = -10;
}
else if (di == "left") {<|fim▁hole|>
g_imgcache.draw_s("item", 80, 16, 16, 16, this.x+ix, this.y+iy, this.world_w, this.world_h);
}
if (this.bow) {
var imx = this.bowStep*2*16;
g_imgcache.draw_s("rotbow", imx, 0, 16, 16, this.x, this.y, this.world_w, this.world_h);
} else {
//var imx = kr *8 *16;
//g_imgcache.draw_s("rotbow", imx, 0, 16, 16, this.x, this.y, this.world_w, this.world_h);
}
}<|fim▁end|> | ix = 2;
}
ix *= 4;
iy *= 4; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use std::error;
use std::marker;
#[derive(Debug)]
pub enum BencodeError
{
ParseError(&'static str)
}
impl fmt::Display for BencodeError
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
BencodeError::ParseError(err) => write!(f, "oxidation_bencode parse error: {}", err),
}
}
}
impl error::Error for BencodeError
{
fn description(&self) -> &str {
match *self {
BencodeError::ParseError(err) => &err
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
BencodeError::ParseError(_) => None
}
}
}
pub trait Bencodable<T> {
fn to_bencode_string(&self) -> String;
fn from_bencode_string(str: &'static str) -> Result<Self,BencodeError> <|fim▁hole|> fn extract_content(&self) -> T;
}<|fim▁end|> | where Self: marker::Sized; |
<|file_name|>observable-util.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Trim-marks Inc.
*
* This file is part of Vivliostyle UI.
*
* Vivliostyle UI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Vivliostyle UI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Vivliostyle UI. If not, see <http://www.gnu.org/licenses/>.
*/
import ko, { PureComputed, Observable } from "knockout";
export type ReadonlyObservable<T> = {
getter: PureComputed<T>;<|fim▁hole|> readonlyObservable<T>(value: T): ReadonlyObservable<T> {
const obs = ko.observable(value);
return {
getter: ko.pureComputed(() => obs()),
value: obs,
};
},
};
export default util;<|fim▁end|> | value: Observable<T>;
};
const util = { |
<|file_name|>EN.ts<|end_file_name|><|fim▁begin|>// tslint:disable:max-line-length
export default {
About: {
About: "ABOUT THE PROGRAM",
AppCoreVersion: "The kernel version of the application:",
AppVersion: "Version",
CSPVersion: "CryptoPro CSP:",
Compatible: "(compatibility with Electron 1.6.6, OpenSSL 1.0.2k)",
Contacts: "Contacts",
CspVersion: "CryptoProviders",
FeedBack: "FEEDBACK",
Info: "Information",
about: "About the program",
about_programm: "Application Trusted eSign is designed to create electronic signature and file encryption using digital certificates and cryptographic algorithms",
address: "Petrova St, 1, Yoshkar-Ola, Mari El Republic, Russia, 424033",
build_number: "Build number",
company_name: "Company Cifrovie Technologii",
copyright: "© 2016-2019",
developer: "Company-developer",
email: "Email",
error_message_send: "Message failed to send",
feedback_description: "Report developers about any problems or suggest ideas for improving the program",
info: "[email protected]",
info_about_certificate: "Centralized management of local and cloud-based certificate storages",
info_about_encrypt: "Protection of your documents and any other content by using encryption to address one or more recipients",
info_about_product: "Trusted eSign is a desktop app that allows you to sign documents and encrypt files using digital certificates and cryptographic algorithms.",
info_about_sign: "Sign any type of document files and other content",
link_facebook: "https://www.facebook.com/cryptoarm/",
link_trusred: "http://www.trusted.ru/",
link_twitter: "https://twitter.com/cryptoarm",
link_vk: "http://vk.com/cryptoarm",
message: "Message",
message_send: "The message is sent",
phone: {
number_one: "+7 (8362) 33-70-50",
number_three: "+7 (800) 555-65-81",
number_two: "+7 (499) 705-91-10",
},
product_NAME: "Trusted eSign",
product_name: "Trusted eSign",
send: "Send",
username: "Name",
version: "1.5.2",
version_app: "Application version",
version_full: "Version: 1.5.2",
},
Agreement: {
Agreement: "AGREEMENT",
agreement: "Agreement",
},
Algorithm: {
id_Gost28147_89: "GOST 28147-89",
id_GostR3410_2001: "GOST R 34.10-2001",
id_GostR3410_94: "GOST R 34.10-94",
id_GostR3411_94: "GOST R 34.11-94",
id_GostR3411_94_with_GostR3410_2001: "GOST R 34.11/34.10-2001",
id_GostR3411_94_with_GostR3410_94: "GOST R 34.11/34.10-94",
id_tc26_gost3410_12_256: "GOST R 34.10-2012 256 bit",
id_tc26_gost3410_12_512: "GOST R 34.10-2012 512 bit",
id_tc26_gost3411_12_256: "GOST R 34.11-2012 256 bit",
id_tc26_gost3411_12_512: "GOST R 34.11-2012 512 bit",
id_tc26_signwithdigest_gost3410_12_256: "GOST R 34.11-2012/34.10-2012 256 bit",
id_tc26_signwithdigest_gost3410_12_512: "GOST R 34.11-2012/34.10-2012 512 bit",
},
CRL: {
authorityKeyid: "Authority key id",
crlNumber: "CRL number",
crl_delete_failed: "The CRL delete failed",
crl_delete_ok: "The CRL is successfully deleted",
crl_export: "Export",
crl_export_cancel: "Cancel the CRL export",
crl_export_failed: "The CRL export failed",
crl_export_ok: "The CRL is successfully exported",
crl_import_failed: "The CRL import failed",
crl_import_ok: "The CRL successfully imported",
crls: "CRLs",
delete_crl: "Delete CRL",
export_crl: "Export the CRL",
lastUpdate: "Last update",
nextUpdate: "Next update",
realy_delete_crl: "You really want delete CRL?",
},
CSR: {
algorithm: "Algorithm",
common_name: "CN",
container: "Сontainer (will generated)",
country: "Country",
create_request: "Create request",
create_request_created: "Certificate request was successfully created",
create_selfSigned: "Create self-signed certificate",
csp_label: "Cryptography service provider",
csp_microsoft_base: "Microsoft Base Cryptografic Provaider v1.0",
csp_openssl: "OpenSSL RSA",
eku_clientAuth: "Client authentication",
eku_codeSigning: "Code signing",
eku_emailProtection: "Email protection",
eku_serverAuth: "Server authentication",
email_address: "Email Address",
exportable_key: "Mark private keys as exportable",
extKeyUsage: "Extended key usage",
fill_required_fields: "Fill out the required fields",
generate_new_key: "Create new key pair",
go_to_csr_folder: "Go to folder",
inn: "INN",
key_generation_error: "Error generation key pair",
key_length: "Key length",
key_usage: "Key usage",
key_usage_cRLSign: "CRL sign",
key_usage_dataEncipherment: "Data encipherment",
key_usage_decipherOnly: "Decipher only",
key_usage_digitalSignature: "Digital signature",
key_usage_encipherOnly: "Encipher only",
key_usage_encrypt: "Data encipherment",
key_usage_group: "Key usage group",
key_usage_keyAgreement: "Key agreement",
key_usage_keyCertSign: "Certificate sign",
key_usage_keyEncipherment: "Key encipherment",
key_usage_nonRepudiation: "Nonrepudiation",
key_usage_sign: "Digital signature",
key_usage_sign_encrypt: "Digital signature and data encipherment",
keys_params: "Key parameters",
locality_name: "Locality Name (eg, city)",
not_after: "Valid not after",
ogrnip: "OGRNIP",
organization_name: "Organization Name (eg, company)",
organizational_unit_name: "Organizational unit name",
province_name: "State or Province Name (full name)",
snils: "SNILS",
subject_params: "Subject parameters",
template_additional_fields: "Template with additional fields",
template_default: "Default template",
template_kep_fiz: "Сертификат КЭП физичексого лица",
template_kep_ip: "Сертификат КЭП индивидуального предпринимателя",
template_label: "Certificate template",
title: "Position",
},
Certificate: {
Certificate: "CERTIFICATES",
FCertificates: "MANAGMENT",
Select_Cert_Encrypt: "SELECT THE RECIPIENT'S CERTIFICATE",
Select_Cert_Sign: "SELECT THE SIGNATURE CERTIFICATE",
absent: "Private key is absent",
cert: "Certificates",
cert_chain: "Certificate hierarchy",
cert_chain_info: "Certificate chain info",
cert_chain_status: "Certificate chain status",
cert_chain_status_false: "not valid",
cert_chain_status_true: "valid",
cert_delete_failed: "The certificate delete failed",
cert_delete_ok: "The certificate is successfully deleted",
cert_export: "Export the certificate",
cert_export_cancel: "Cancel the certificate export",
cert_export_failed: "The certificate export failed",
cert_export_ok: "The certificate is successfully exported",
cert_import: "Import the certificate",
cert_import_failed: "The certificate import failed",
cert_import_from_file: "Import from file",
cert_import_ok: "The certificate is successfully imported",
cert_imported: "The certificate is already imported",
cert_info: "Certificate information",
cert_load_failed: "The certificate file cannot be read",
cert_not_found: "The certificate is not found",
cert_not_select: "A certificate is not selected",
cert_status_false: "not valid",
cert_status_true: "valid",
cert_trusted_import_failed: "The trusted certificate import failed",
cert_trusted_import_ok: "The trusted certificate is successfully imported",
cert_valid: "Is valid until",
certificate: "A certificate",
certs: "Certificates",
certs_encrypt: "Encryption certificates",
certs_getters: "The recipients' certificates",
certs_intermediate: "Intermediate certificates",
certs_my: "Personal Certificates",
certs_other: "Other user certificates",
certs_request: "Certificate Enrollment Request",
certs_root: "Trusted Root Certificates",
certs_token: "External token/card certificates",
delete_certificate: "Delete certificate",
export_cert: "Export the certificate",
import_key: "Import the key",
issuer: "Certificate Authority",
issuer_name: "Issuer",
key_import_failed: "Key import failed",
key_import_ok: "The key is attached to the certificate",
organization: "Organization",
present: "Private key is present",
priv_key: "Private key",
public_key_algorithm: "Public key algorithm",
realy_delete_certificate: "You really want delete certificate?",
serialNumber: "Serial number",
signature_digest_algorithm: "Signature digest algorithm",
subject: "Certificate owner",
thumbprint: "Thumbprint",
version: "Version",
},
CloudCSP: {
auth: "Authorization server",
cert_import_from_cloudCSP: "Import from DSS",
certificates_import_fail: "One or more certificates from DSS was not imported",
certificates_import_success: "Certificates from DSS successfully imported",
cloudCSP: "Cloud CSP",
no_installed_csp5: "Only for CryptoPro CSP 5",
request_error: "Request error",
rest: "DSS server",
},
Common: {
Back: "Back",
add_all_files: "Add files from all the subfolders?",
add_files: "Add files",
apply: "Apply",
back: "Back",
cancel: "Cancel",
clear: "Clear",
close: "Close",
copy: "Copy",
delete: "Delete",
en: "English",
error: "ERROR",
exit: "EXIT",
files_not_found: "Files are not found: perhaps they were removed or renamed",
goOver: "Go over",
next: "Next",
no: "No",
or: "or",
product: "Product",
read_file_error: "Reading from the file error",
ready: "Ready",
reset: "Reset",
ru: "Russian",
subject: "Owner",
update: "Update",
update_list: "Update list",
write_file_error: "Error: write file",
yes: "Yes",
},
Containers: {
Containers: "CONTAINERS",
certificateInfo: "Certificate information",
contNotSelected: "Container not selected",
container_delete_failed: "The container delete failed",
container_delete_ok: "The container is successfully deleted",
containers: "Containers",
containersNotFound: "Containers not found",
delete_container: "delete container by certificate (not recomended)",
installCertificate: "Install certificate",
invalid_character: "Invalid character",
},
Csp: {
cpcspPKZIVersion: "CryptoPro CSP product version",
cpcspSKZIVersion: "CryptoPro CSP SKZI version",
cspErr: "Error check CSP parameters",
libcapi: "Cannot find libcapi. Install CryptoPro CSP",
noCPLicense: "Error check license for CryptoPro CSP",
noProvider2001: "GOST 2001 provider not available",
noProvider2012: "GOST 2012 provider not available",
},
Diagnostic: {
close: "Close",
header: "Application Diagnostics",
problem_header: "Problems detected",
resolve_header: "Solutions",
},
Documents: {
Documents: "DOCUMENTS",
date: "Date",
delete_documents: "Delete documents",
docmenu_arhiver: "Arhiver",
docmenu_dectypt: "Decrypt",
docmenu_enctypt: "Encrypt",
docmenu_remove: "Remove",
docmenu_removesign: "Remove sign",
docmenu_sign: "Sign",
docmenu_verifysign: "Verify sign",
documents: "Documents",
documents_arhive: "Create arhive of documents is name ",
documents_deleted1: "Removed documents ( files: ",
documents_deleted2: " )",
encrypted_files: "Encrypted files",
filename: "Filename",
filesize: "Size",
filesize_from: "File size from",
filesize_to: "File size to",
go_to_documents_folder: "Go to folder",
mdate: "Modified date",
realy_delete_documents: "You really want delete selected documents?",
save_to_documents: "Save to Documents",
selected_all: "Selected all",
signed_files: "Signed files",
type: "Type",
},
Encrypt: {
Encrypt: "ENCRYPT",
Encryption: "FILES",
archive_files_before: "Archive files before encryption",
archive_name: "encrypt_files.zip",
decrypt: "Decrypt",
decrypt_key_failed: "The private key is not found",
delete_files_after: "Delete files after encryption",
encrypt: "Encrypt",
encrypt_and_decrypt: "Encrypt / Decrypt",
encrypt_setting: "Encryption settings",
files_archived: "The files are archived",
files_archived_failed: "File archivation failed ",
files_decrypt: "The files are decrypted",
files_decrypt_failed: "Error load key",
files_encrypt: "The files are encrypted",
files_encrypt_failed: "File encryption failed",
search_decrypt_cert_failed: "The decryption certificate is not found",
},
Events: {
Operations_log: "OPERATIONS LOG",
operations_log: "Operations log",
},
EventsFilters: {
all: "All",
certificate_generation: "Certificate generation",
certificate_import: "Certificate import",
crl_import: "CRL import",
csr_generation: "CSR generation",
date: "Date",
decrypt: "Decrypt",
delete_certificate: "Remove certificate",
delete_container: "Remove container",
encrypt: "Encrypt",
level_all: "All",
level_error: "Error",
level_info: "Info",
no_found_events: "No found events",
pkcs12_import: "Import PKCS12",
sign: "Sign",
unsign: "Unsign",
write_object_for_filter: "Write object for filter",
write_user_name: "Write user name",
},
EventsTable: {
date_and_time: "Date and time",
goto_current_logfile: "Go to the current log",
load_archive_logfile: "Load archive operations log",
no_rows: "No rows",
operation: "Operation",
operation_object: "Operation object",
operation_result: "Operation result",
search_in_doclist: "Search in documents list",
search_in_table: "Search in operations log",
status: "Status",
user_name: "User",
},
Export: {
export: "Export",
export_certificate: "Export certificate",
export_crl_format_base64: "Base-64 encoded X509_CRL (.CRL)",
export_crl_format_der: "DER encoded binary X509_CRL (.CRL)",
export_format: "Export file format",<|fim▁hole|> export_format_base64: "Base-64 encoded X509 (.CER)",
export_format_der: "DER encoded binary X509 (.CER)",
export_format_pkcs12: "Personal information exchange PKCS#12 (PFX)",
export_params: "Export parameters",
export_private_key: "Export private key",
export_private_key_with_certificate: "Export the private key with the certificate",
export_set_encoding: "Choose encoding for use in output file",
export_set_password: "Protect the private key by using a password",
no_export_private_key: "No export private key",
},
Filters: {
filters_settings: "Filters settings",
},
Help: {
Header1: "REFERENCE HELP BY THE TRUSTED ESIGN PROGRAM",
Header2: "CONTENT OF THE REFERENCE HELP",
Header3: "1. GETTING STARTED WITH THE APPLICATION. INTERFACE OF THE MAIN WINDOW",
Header4: "2. MASTER OF LICENSE INSTALLATION",
Header5: "3. MASTER OF CREATION / ELECTRONIC SIGNATURE CHECKS",
Header6: "4. MASTER OF ENCRYPTION / DISCRIPTION OF FILES",
Header7: "5. MASTER OF CERTIFICATES MANAGEMENT AND KEYS",
Help: "HELP",
Important: "Important!",
Paragraf_1_1: "This help is a short reference on the interface of the Trusted eSign application and does not cover all the features of installation, configuration and operation.",
Paragraf_1_2a: "The full version of the documentation for the software product can be obtained from the link",
Paragraf_1_2b: "КриптоАРМ ГОСТ Руководство пользователя.pdf",
Paragraf_1_3a: "Application Trusted eSign is a commercial product and for its full-fledged work it is necessary to purchase a license. You can get acquainted with the conditions of license acquisition in the online store ",
Paragraf_1_3b: "КриптоАРМ.RU",
Paragraf_2_1: "1. Getting started with the application. Main window interface",
Paragraf_2_2: "2. The License Installation Wizard",
Paragraf_2_3: "3. Wizard for creating / verifying an electronic signature",
Paragraf_2_4: "4. Encryption / Encryption Wizard",
Paragraf_2_5: "5. Certificate Management Wizard",
Paragraf_3_1: "When you start the Trusted eSign application, the main application window appears, as shown in the figure below. In the main window there are buttons for navigating to the main application wizards: the wizard for creating / verifying an electronic signature, the encryption / decryption wizard, and the wizard for managing certificates of connected storage.",
Paragraf_3_2: "In the title bar of the application there is a button for calling the side menu (the menu is shown in the figure to the right) and a button for switching the interface language. Using the application menu, you navigate to the main wizards and you can additionally access the page containing information about the program, the license installation wizard, and quick reference help.",
Paragraf_3_3: "The main application window has a fixed size without the possibility of changing it. The application saves all global settings that were made by the user. Saving occurs when the application is closed. In the process of working with the application, the user's actions are also saved, executed by him in the wizards (selection of certificates, selection of files, etc.). When you go to another wizard (application page) and return to the application, you do not have to perform the same operations again.",
Paragraf_4_1: "For the correct operation of the application, you must install a license for this software product. If, at the application startup, there are pop-up messages about the absence of a license expired in the license period, the impossibility of reading the license file, this indicates that the application will not correctly perform the operations of signing and decrypting files. The presence of a valid license for the application should be given special attention.",
Paragraf_4_2: "To install the license and get information about it in the application there is a special wizard (shown in the figure below), which allows you to install a license from a file or clipboard. In either case, the license depends on the license.lic file and places it in the user's directory.",
Paragraf_4_3a: "After the license has been successfully installed, the program must be restarted. If you receive messages like the following when entering a license for a software product: a non-correct license key, the license is not suitable for this program, the license has expired, then contact the technical support of the developer company (",
Paragraf_4_3b: ").",
Paragraf_5_1: "In the Trusted eSign application, there is a wizard in which the operations for creating an electronic signature for one or multiple files are performed. In the same wizard, you perform the operations for creating a file signature and checking signed files. The wizard page is shown in the figure below.",
Paragraf_5_2: "The wizard page is divided into three areas. On the left is the selection area of the subscriber certificate. The certificate is selected from the dialog with the display of certificates from the repositories.",
Paragraf_5_3: "In the dialog for selecting a subscriber certificate, only certificates from the Personal Certificates section that have a private key binding are displayed. Other certificates are not displayed and are not available for use in this wizard.",
Paragraf_5_4: "To verify the signature, it is enough to select the files to be scanned - files with the extension .sig, which contain an electronic signature and click on the Check button. No additional manipulation is necessary when verifying the signature.",
Paragraf_5_5: "If the source file is not found automatically when checking the signature file separated from the signature file, its selection will be offered. The result of checking signatures is displayed as a message.",
Paragraf_5_6: "To view the files before creating a signature, you can use the context menu (see the figure below). The signature is checked for the selected files. The figure shows the result of checking the signature of one of the files. Additionally, you can remove the signature from the signed files. In this case, the original file is saved, and the signature is deleted.",
Paragraf_6_1: "In the application Trusted eSign there is a wizard in which the operations of encryption and decryption of files are performed. To perform the encryption operation, the recipients of encrypted files are used as a list of certificates. Those users of the application who have a private key associated with one of the recipient certificates will be able to decrypt the files.",
Paragraf_6_2: "The Encryption / Decryption Wizard is shown in the figure below. The wizard page is divided into three areas: on the left is the selection area of the recipient certificates, below are the settings for performing the operations and on the right is the selection area of the source files and the result display.",
Paragraf_6_3: "When selecting the mode of deleting files after encryption, pay attention to providing access to encrypted files. To do this, you can add a personal certificate to the list of recipients, linked to a private key.",
Paragraf_6_4: "The figure at the right shows the dialog for selecting recipient certificates. The list of available certificates displays all certificates from the connected stores. The certificates are grouped by tabs. In the dialog, a certificate search operation is available.",
Paragraf_6_5: "In the wizard, you can modify the list of recipients and files by adding them. The encryption operation is performed only on a set of selected files. To view files before encryption, you can use the context menu (see the figure below).",
Paragraf_6_6: "Decrypt files by clicking the Decrypt button. To successfully perform the operation, one of the connected storages must contain a private key that is linked to the recipient's certificate. Otherwise an error will be issued.",
Paragraf_6_7: "If the files are successfully decrypted, the result is displayed in the same wizard and the files themselves are saved either next to the encrypted files or in the specified directory.",
Paragraf_7_1: "To manage certificates and keys, a separate wizard has been added to the application. The wizard page displays tabs with certificates grouped into them. When you first start the application, you must have a test certificate and a key attached to it in the Personal Certificates tab.",
Paragraf_7_2: "In order to import the new certificate into the repository, you can use the context menu - select the Import Certificate operation. In the dialog box that appears, select the certificate (BASE64 and DER encodings are supported) and the certificate is placed in the repository. ",
Paragraf_7_3: "Import of certificates from the repositories of supported providers is carried out at the level of using additional utilities. The most common situations are explained in the User's Guide.",
Work_App: "HOW THE APPLICATION WORKS",
help: "Help",
link_shop: "https://cryptoarm.ru/shop/trusted-esign",
link_shop_name: " КриптоАРМ.RU",
link_user_guide: "https://cryptoarm.ru/upload/docs/userguide-trusted-esign.pdf",
link_user_guide_name: " Trusted eSign v.1.2.11 Руководство пользователя.pdf",
video_failed: "This video cannot be played. Please check your Internet connection",
},
Key: {
key_load_failed: "Error load key",
},
License: {
About_License: "LICENSE INFORMATION",
About_License_CSP: "CRYPTOPRO CSP LICENSE INFORMATION",
Buy_license: "BUY LICENSE",
Enter_Key: "ENTER THE KEY",
Enter_license: "УСТАНОВИТЬ ЛИЦЕНЗИЮ",
Entered: "ENTER",
License: "LICENSE",
License_overtimes: "INTERIM LICENSE",
License_reques_send: "SEND REQUEST",
License_request: "OBTAIN A TEMPORARY LICENSE",
enter_key: "Enter the license",
enter_key_csp: "Enter the license CryptoPro CSP",
entered_the_key: "Enter the key",
failed_key: "The license key is incorrect",
failed_key_find: "The license key is not found",
failed_match_key: "The license key file does not match the application",
failed_validity_key: "The license key has expired",
get_overtimes_license: "Obtaining a temporary license",
jwtErrorCode: "Unknown error code",
jwtErrorExpired: "The license expired",
jwtErrorInternal: "The license checking internal error",
jwtErrorLoad: "Loading license failed",
jwtErrorNoLicenseInStore: "No find valid license for operation",
jwtErrorOperation: "An unknown operation requested",
jwtErrorParsing: "Can't parse license token info",
jwtErrorProduct: "The license does not match the application",
jwtErrorSign: "The license have invalid signature",
jwtErrorStarted: "The license doesn't started",
jwtErrorStoreIsLocked: "Load license was loked",
jwtErrorStructure: "The license have invalid token info structure",
jwtErrorTokenFormat: "The license token invalid format",
jwtGetLicense: "For buy a license go to:",
key: "The key",
key_file_name: "desktopkey.lic",
lic_file_choose: "Choose the license file",
lic_file_not_found: "The license file is not found",
lic_file_uncorrect: "The license file is not correct",
lic_key_correct: "The license key is valid (Days left: ",
lic_key_correct_days: "Error: license key is not correct",
lic_key_setup: "The license key is successfully set up",
lic_key_setup_fail: "The license key are not set up",
lic_key_uncorrect: "The license key is not correct",
lic_notafter: "Expiration date of the license",
lic_notbefore: "Issue date of the license",
lic_status: "The licence status",
lic_unlimited: "Unlimited",
license: "License",
license_correct: "Correct",
license_incorrect: "Incorrect",
license_key: "The license key",
license_request: "Request for a temporary license",
link_buy_license: "https://cryptoarm.ru/shop/cryptoarm-gost",
link_buy_license_csp: "https://cryptoarm.ru/shop/skzi-cryptopro-csp-4-0",
overtimes_license: "In the absence of a permanent license, you can request a temporary license. The temporary license is issued for a period of 14 days and provides full-fledged work with the application.",
overtimes_license_confirm: "To send a request for a temporary license for the Trusted eSign product, for a period of 14 days, fill out the form floor. A temporary license is issued once per workplace to familiarize yourself with all the features of the software product.",
overtimes_license_error: "The request for obtaining a temporary license for the Trusted eSign product was rejected. Perhaps, for the email address you have already issued a license. To resolve this issue, please contact technical support at [email protected].",
overtimes_license_ok: "A temporary license for the use of the Trusted eSign product was obtained. You can install it on the workstation. After installing the license, you must restart the application.",
serial_number: "Serial number",
Сlose: "CLOSE",
},
Problems: {
problem_1: "There is no crypto provider CryptoPro CSP",
problem_2: "There is no license for crypto provider CryptoPro CSP",
problem_3: "There is no license for Trusted eSign",
problem_4: "Could not check the parameters of crypto provider CryptoPro CSP",
problem_5: "Certificates not linked to a key container",
problem_6: "Trusted Crypto module is not loaded",
resolve_1_1: "The system does not have the cryptoPro CSP crypto provider installed. The further operation of the application is impossible and the application will be closed.",
resolve_1_2: "To solve this problem, install CryptoPro CSP version 5.0 and run the application again.",
resolve_1_3: "Distribution of the crypto provider CryptoPro CSP can be downloaded from the official CryptoPro website",
resolve_1_4: "Detailed instructions for installing CryptoPRO CSP on various platforms can be found in the product documentation, which is available at the link",
resolve_2_1: "The system does not have the correct license for CryptoPro CSP, which is necessary for full-function operation of the application.",
resolve_2_2: "To solve this problem, you can purchase a license for the product in the online store, accessible by reference ",
resolve_2_3: "After purchasing a license, you can install it through the application interface",
resolve_3_1: "In the application there is no installed license for the software product Crypto ARM GOST.",
resolve_3_2: "Without a license, operations related to access to the key medium will not be performed. For example, the operation of signing and decrypting files. This problem is not critical and the application is functional with limited functionality.",
resolve_3_3: "To solve this problem, you can purchase a license for the product in the online store, accessible by reference",
resolve_3_4: "After purchasing a license, you can install it through the application interface",
resolve_4_1: "When the application started, there were problems with reading the parameters of the detected CryptoPro CSP. The further operation of the application is impossible and the application will be closed.",
resolve_4_2: "To solve this problem, you need to install the required version of the CryptoPro CSP crypto provider and start the application again.",
resolve_4_3: "The CryptoPro installation procedure is described in the product documentation, which is available by reference ",
resolve_5_1: "In the application there are no certificates linked to the key container. For full work in the mode of signing and decrypting files, you need to install certificates.",
resolve_5_2: "To resolve this issue, install personal certificates in the steps described in the product documentation that is available by reference ",
resolve_5_3: "If you have certificates on a token, you can set them with a link to the key container on the page ",
resolve_5_4: "It is also possible to install certificates from DSS or generate self-signed certificates ones on the page ",
resolve_5_4_1: "It is also possible generate self-signed certificates ones on the page ",
resolve_6_1: "When you run Crypto ARM GOST, the Trusted Crypto module is not loaded.",
resolve_6_2: "To solve this problem, open the application control panel and forward error messages displayed on the Console tab to the technical support.",
resolve_6_3: "Detailed instructions on how to open the control panel under the required platform can be found in the product documentation that is available on the link ",
},
SVS: {
application_name: "Application Name",
hostname: "Hostname",
},
Services: {
SERVICES: "SERVICES",
Services: "Services",
add: "Add",
add_new_service: "Service",
already_started: "Please wait other transaction",
change: "Change",
connect: "Connect",
cryptopro_dss: "CryptoPro DSS",
cryptopro_svs: "CryptoPro SVS 2.0",
delete: "Delete",
delete_service: "Delete service",
description: "Description",
displayed_text: "Displayed text",
empty_services_list: "List of services is empty",
get_sertificates: "Get certificates",
info: "Information about service",
invalid_character: "Invalid character",
invalid_phone_number: "Invalid phone number",
megafon: "Megafon",
mobile_number: "Mobile number (Megafon)",
mobile_number_already_exists: "Mobile number already exists",
name: "Name",
pin_code_for_container: "PIN code for container",
realy_delete_service: "You really want delete service?",
service_certificates: "Certificates",
service_delete_ok: "Service was deleted",
service_not_selected: "Service is not selected",
service_settings: "Settings",
service_type: "Type of service",
services_list: "List of services",
text: "Text",
write_mobile_number: "Write mobile number, format +7XXXXXXXXXX",
write_pin: "Write pin code for container (if not required, must be empty)",
write_service_description: "Write service description",
write_service_name: "Write service name",
write_text: "Write text for show on mobile phone",
},
Settings: {
BASE: "BASE-64",
Control: "CERTIFICATES",
DER: "DER",
Datas: "ENCRYPTION",
Digital: "DIGITAL",
add_files: "Add files",
choose: "Choose",
choose_files: "Choose files",
delete_file: "Delete the file from the list",
directory_file_save: "Save the file to",
drag_drop: "Drag and drop the files",
email_error: "Email is not valid",
encoding: "Encoding type",
failed_find_directory: "The directory is not found",
field_empty: "This field cannot be empty!",
go_to_file: "Go to the file",
open_file: "Open the file",
pass_enter: "Enter the password",
password: "Password",
password_confirm: "Password confirm",
pattern_failed: "Only latin letters, numbers or a special characters",
print: "Print",
remove_all_files: "Remove all listed files",
remove_list: "Clear the list",
remove_selected: "Delete selection",
selected_all: "Select all",
setting_file: "settings.json",
settings: "Settings",
wait: "Please wait",
write_file_failed: "File saving error",
write_file_ok: "The settings saved",
},
Sign: {
Sign: "SIGN",
Signature: "SIGNATURE",
alg: "Signature algorithm",
build_chain_failed: "Chain building failed",
digest_alg: "Hash algorithm",
documents_reviewed: "Documents were reviewed before sign",
files_resigned: "The file is re-signed",
files_resigned_exist: "The file is already re-signed. Select another certificate",
files_resigned_failed: "File re-signing failed",
files_signed: "The files are signed",
files_signed_failed: "File signing failed",
files_unsigned_detached: "Cannot unsign a detached signature",
files_unsigned_failed: "Signature removal error",
files_unsigned_ok: "The signature is successfully removed",
key_not_found: "The key is not found",
load_sign_failed: "Error load sign",
resign: "Re-sign",
set_content_failed: "Error: content not found",
sign: "Sign",
sign_and_verify: "Sign / Verify the signature",
sign_content_file: "Sign content file",
sign_detached: "Save the signature as detached",
sign_error: "The signature is not valid",
sign_info: "Signature information",
sign_ok: "The signature is valid",
sign_setting: "Signature settings",
sign_time: "Add time of signature",
signercert_not_found: "Error: signer cert not found",
signingTime: "Signing time",
status: "The status",
unsign: "Remove a signature",
verify: "Verify",
verify_get_content_failed: "The original file is not found",
verify_sign_failed: "Signature verification failed",
verify_sign_founds_errors: "Errors were detected while verifying",
verify_sign_ok: "The signature is successfully verified",
verify_signercontent_founds_errors: "Error: signed content not found",
verify_signers_failed: "Error: verify signers",
},
};<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># coding: utf-8
from django.contrib import admin
from hub.models import ExtraUserDetail
from .models import AuthorizedApplication
<|fim▁hole|># Register your models here.
admin.site.register(AuthorizedApplication)
admin.site.register(ExtraUserDetail)<|fim▁end|> | |
<|file_name|>list_funds.js<|end_file_name|><|fim▁begin|>dojo.require("dijit.Dialog");
dojo.require("dijit.form.FilteringSelect");
dojo.require('dijit.form.Button');
dojo.require('dijit.TooltipDialog');
dojo.require('dijit.form.DropDownButton');
dojo.require('dijit.form.CheckBox');
dojo.require('dojox.grid.DataGrid');
dojo.require('dojo.data.ItemFileWriteStore');
dojo.require('openils.widget.OrgUnitFilteringSelect');
dojo.require('openils.acq.CurrencyType');
dojo.require('openils.Event');
dojo.require('openils.Util');
dojo.require('openils.User');
dojo.require('openils.CGI');
dojo.require('openils.PermaCrud');
dojo.require('openils.widget.AutoGrid');
dojo.require('openils.widget.ProgressDialog');
dojo.require('fieldmapper.OrgUtils');
dojo.requireLocalization('openils.acq', 'acq');
var localeStrings = dojo.i18n.getLocalization('openils.acq', 'acq');
var contextOrg;
var rolloverResponses;
var rolloverMode = false;
var fundFleshFields = [
'spent_balance',
'combined_balance',
'spent_total',
'encumbrance_total',
'debit_total',
'allocation_total'
];
var adminPermOrgs = [];
var cachedFunds = [];
function initPage() {
contextOrg = openils.User.user.ws_ou();
/* Reveal controls for rollover without money if org units say ok.
* Actual ability to do the operation is controlled in the database, of
* course. */
var ouSettings = fieldmapper.aou.fetchOrgSettingBatch(
openils.User.user.ws_ou(), ["acq.fund.allow_rollover_without_money"]
);
if (
ouSettings["acq.fund.allow_rollover_without_money"] &&
ouSettings["acq.fund.allow_rollover_without_money"].value
) {
dojo.query(".encumb_only").forEach(
function(o) { openils.Util.show(o, "table-row"); }
);
}
var connect = function() {
dojo.connect(contextOrgSelector, 'onChange',
function() {
contextOrg = this.attr('value');
dojo.byId('oils-acq-rollover-ctxt-org').innerHTML =
fieldmapper.aou.findOrgUnit(contextOrg).shortname();
rolloverMode = false;
gridDataLoader();
}
);
};
dojo.connect(refreshButton, 'onClick',
function() { rolloverMode = false; gridDataLoader(); });
new openils.User().buildPermOrgSelector(
['ADMIN_ACQ_FUND', 'VIEW_FUND'],
contextOrgSelector, contextOrg, connect);
dojo.byId('oils-acq-rollover-ctxt-org').innerHTML =
fieldmapper.aou.findOrgUnit(contextOrg).shortname();
loadYearSelector();
lfGrid.onItemReceived = function(item) {cachedFunds.push(item)};
new openils.User().getPermOrgList(
'ADMIN_ACQ_FUND',
function(list) {
adminPermOrgs = list;
loadFundGrid(
new openils.CGI().param('year')
|| new Date().getFullYear().toString());
},
true, true
);
}
function gridDataLoader() {
lfGrid.resetStore();
if(rolloverMode) {
var offset = lfGrid.displayOffset;
for(var i = offset; i < (offset + lfGrid.displayLimit - 1); i++) {
var fund = rolloverResponses[i];
if(!fund) break;
lfGrid.store.newItem(fieldmapper.acqf.toStoreItem(fund));
}
} else {
loadFundGrid();
}
}
function getBalanceInfo(rowIdx, item) {
if (!item) return '';
var fundId = this.grid.store.getValue(item, 'id');
var fund = cachedFunds.filter(function(f) { return f.id() == fundId })[0];
var cb = fund.combined_balance();
return cb ? cb.amount() : '0';
}
function loadFundGrid(year) {
openils.Util.hide('acq-fund-list-rollover-summary');
year = year || fundFilterYearSelect.attr('value');
cachedFunds = [];
lfGrid.loadAll(
{
flesh : 1,
flesh_fields : {acqf : fundFleshFields},
// by default, sort funds I can edit to the front
order_by : [
{ 'class' : 'acqf',
field : 'org',
compare : {'in' : adminPermOrgs},
direction : 'desc'
},
{ 'class' : 'acqf',
field : 'name'
}
]
}, {
year : year,
org : fieldmapper.aou.descendantNodeList(contextOrg, true)
}
);
}
function loadYearSelector() {
fieldmapper.standardRequest(
['open-ils.acq', 'open-ils.acq.fund.org.years.retrieve'],
{ async : true,
params : [openils.User.authtoken, {}, {limit_perm : 'VIEW_FUND'}],
oncomplete : function(r) {
var yearList = openils.Util.readResponse(r);
if(!yearList) return;
yearList = yearList.map(function(year){return {year:year+''};}); // dojo wants strings
var yearStore = {identifier:'year', name:'year', items:yearList};
yearStore.items = yearStore.items.sort().reverse();
fundFilterYearSelect.store = new dojo.data.ItemFileWriteStore({data:yearStore});
// default to this year
fundFilterYearSelect.setValue(new Date().getFullYear().toString());
dojo.connect(
fundFilterYearSelect,
'onChange',
function() {
rolloverMode = false;
gridDataLoader();
}
);
}
}
);
}
function performRollover(args) {
rolloverMode = true;
progressDialog.show(true, "Processing...");
rolloverResponses = [];
var method = 'open-ils.acq.fiscal_rollover';
if(args.rollover[0] == 'on') {
method += '.combined';
} else {
method += '.propagate';
}
var dryRun = args.dry_run[0] == 'on';
if(dryRun) method += '.dry_run';
var encumbOnly = args.encumb_only[0] == 'on';
var count = 0;
var amount_rolled = 0;
var year = fundFilterYearSelect.attr('value'); // TODO alternate selector?
fieldmapper.standardRequest(
['open-ils.acq', method],
{
async : true,
params : [
openils.User.authtoken,
year,
contextOrg,
(args.child_orgs[0] == 'on'),
{ encumb_only : encumbOnly }
],
onresponse : function(r) {
var resp = openils.Util.readResponse(r);
rolloverResponses.push(resp.fund);
count += 1;
amount_rolled += Number(resp.rollover_amount);
},
oncomplete : function() {
var nextYear = Number(year) + 1;
rolloverResponses = rolloverResponses.sort(
function(a, b) {
if(a.code() > b.code())
return 1;
return -1;
}
)
// add the new, rolled funds to the cache. Note that in dry-run
// mode, these are ephemeral and no longer exist on the server.
cachedFunds = cachedFunds.concat(rolloverResponses);
dojo.byId('acq-fund-list-rollover-summary-header').innerHTML =
dojo.string.substitute(
localeStrings.FUND_LIST_ROLLOVER_SUMMARY,
[nextYear]
);
dojo.byId('acq-fund-list-rollover-summary-funds').innerHTML =
dojo.string.substitute(
localeStrings.FUND_LIST_ROLLOVER_SUMMARY_FUNDS,
[nextYear, count]
);
dojo.byId('acq-fund-list-rollover-summary-rollover-amount').innerHTML =
dojo.string.substitute(
localeStrings.FUND_LIST_ROLLOVER_SUMMARY_ROLLOVER_AMOUNT,
[nextYear, amount_rolled]
);
if(!dryRun) {
openils.Util.hide('acq-fund-list-rollover-summary-dry-run');
// add the new year to the year selector if it's not already there
fundFilterYearSelect.store.fetch({
query : {year : nextYear},
onComplete:
function(list) {
if(list && list.length > 0) return;
fundFilterYearSelect.store.newItem({year : nextYear});
}
});
}
openils.Util.show('acq-fund-list-rollover-summary');
progressDialog.hide();
gridDataLoader();
}
}
);<|fim▁hole|>
openils.Util.addOnLoad(initPage);<|fim▁end|> | } |
<|file_name|>i2c_display.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
extern crate i2cdev;
use super::*;
use self::i2cdev::core::I2CDevice;
use self::i2cdev::linux::{LinuxI2CDevice, LinuxI2CError};
const MODE_REGISTER: u8 = 0x00;
const FRAME_REGISTER: u8 = 0x01;
const AUTOPLAY1_REGISTER: u8 = 0x02;
const AUTOPLAY2_REGISTER: u8 = 0x03;
const BLINK_REGISTER: u8 = 0x05;
const AUDIOSYNC_REGISTER: u8 = 0x06;
const BREATH1_REGISTER: u8 = 0x08;
const BREATH2_REGISTER: u8 = 0x09;
const SHUTDOWN_REGISTER: u8 = 0x0A;
const GAIN_REGISTER: u8 = 0x0B;
const ADC_REGISTER: u8 = 0x0C;
const CONFIG_BANK: u8 = 0x0B;
const BANK_ADDRESS: u8 = 0xFD;
const PICTURE_MODE: u8 = 0x00;
const AUTOPLAY_MODE: u8 = 0x08;
const AUDIOPLAY_MODE: u8 = 0x18;
const ENABLE_OFFSET: u8 = 0x00;
const BLINK_OFFSET: u8 = 0x12;
const COLOR_OFFSET: u8 = 0x24;
const ADDRESS: u16 = 0x74;
/// A Scroll pHAT HD device connected over I2C bus (e.g. on a Raspberry Pi).
pub struct I2CDisplay {
device: LinuxI2CDevice,
frame: u8,
}
impl I2CDisplay {
/// Creates a new I2CDisplay device using the I2C device identified by the provided
/// `device_id` (normally 1 or 2).
pub fn new(device_id: u8) -> I2CDisplay {
let device_path = format!("/dev/i2c-{}", device_id);
let d = LinuxI2CDevice::new(device_path, ADDRESS).unwrap();
let mut display = I2CDisplay {
device: d,
frame: 0,
};
display.init_display().unwrap();
display
}
fn bank(&mut self, bank: u8) -> Result<(), LinuxI2CError> {
self.write_data(BANK_ADDRESS, &[bank])
}
fn register(&mut self, bank: u8, register: u8, value: u8) -> Result<(), LinuxI2CError> {
self.bank(bank)?;
self.write_data(register, &[value])
}
fn frame(&mut self, frame: u8) -> Result<(), LinuxI2CError> {
self.register(CONFIG_BANK, FRAME_REGISTER, frame)
}
fn write_data(&mut self, base_address: u8, data: &[u8]) -> Result<(), LinuxI2CError> {
const CHUNK_SIZE: usize = 32;
for (i, chunk) in data.chunks(CHUNK_SIZE).enumerate() {
self.device
.smbus_process_block(base_address + (i * CHUNK_SIZE) as u8, chunk)?;
}
Ok(())
}
fn reset_display(&mut self) -> Result<(), LinuxI2CError> {
self.sleep(true)?;
std::thread::sleep(std::time::Duration::from_millis(10));
self.sleep(false)?;
Ok(())
}
fn init_display(&mut self) -> Result<(), LinuxI2CError> {
self.reset_display()?;
// Switch to Picture Mode.
self.register(CONFIG_BANK, MODE_REGISTER, PICTURE_MODE)?;
// Disable audio sync.
self.register(CONFIG_BANK, AUDIOSYNC_REGISTER, 0)?;
// Initialize frames 0 and 1.
for frame in 0..2 {
self.write_data(BANK_ADDRESS, &[frame])?;
// Turn off blinking for all LEDs.
self.write_data(BLINK_OFFSET, &[0; LED_COLUMNS * LED_ROWS])?;
// Set the PWM duty cycle for all LEDs to 0%.
self.write_data(COLOR_OFFSET, &[0; LED_COLUMNS * LED_ROWS])?;
// Turn all LEDs "on".
self.write_data(ENABLE_OFFSET, &[127; LED_COLUMNS * LED_ROWS])?;<|fim▁hole|> }
fn sleep(&mut self, value: bool) -> Result<(), LinuxI2CError> {
self.register(CONFIG_BANK, SHUTDOWN_REGISTER, if value { 0 } else { 1 })
}
}
impl Display for I2CDisplay {
fn show(&mut self, buffer: &[Column]) -> Result<(), Error> {
// Double buffering with frames 0 and 1.
let new_frame = (self.frame + 1) % 2;
self.bank(new_frame)?;
for y in 0..DISPLAY_HEIGHT {
for x in 0..DISPLAY_WIDTH {
let offset = if x >= 8 {
(x - 8) * 16 + y
} else {
(8 - x) * 16 - (y + 2)
};
let value = match buffer.get(x as usize) {
Some(column) => column[y as usize],
None => 0,
};
self.write_data(COLOR_OFFSET + offset as u8, &[value])?;
}
}
self.frame(new_frame)?;
self.frame = new_frame;
Ok(())
}
}<|fim▁end|> | }
Ok(()) |
<|file_name|>QTSSPrefs.cpp<|end_file_name|><|fim▁begin|>/*
*
* @APPLE_LICENSE_HEADER_START@
*
* Copyright (c) 1999-2008 Apple Inc. All Rights Reserved.
*
* This file contains Original Code and/or Modifications of Original Code
* as defined in and that are subject to the Apple Public Source License
* Version 2.0 (the 'License'). You may not use this file except in
* compliance with the License. Please obtain a copy of the License at
* http://www.opensource.apple.com/apsl/ and read it before using this
* file.
*
* The Original Code and all software distributed under the License are
* distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
* EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
* INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
* Please see the License for the specific language governing rights and
* limitations under the License.
*
* @APPLE_LICENSE_HEADER_END@
*
*/
/*
File: QTSSPrefs.cpp
Contains: Implements class defined in QTSSPrefs.h.
Change History (most recent first):
*/
#include "QTSSPrefs.h"
#include "MyAssert.h"
#include "OSMemory.h"
#include "QTSSDataConverter.h"
#include "OSArrayObjectDeleter.h"
QTSSPrefs::QTSSPrefs(XMLPrefsParser* inPrefsSource, StrPtrLen* inModuleName, QTSSDictionaryMap* inMap,
Bool16 areInstanceAttrsAllowed, QTSSPrefs* parentDictionary )
: QTSSDictionary(inMap, &fPrefsMutex),
fPrefsSource(inPrefsSource),
fPrefName(NULL),
fParentDictionary(parentDictionary)
{
if (inModuleName != NULL)
fPrefName = inModuleName->GetAsCString();
}
QTSSDictionary* QTSSPrefs::CreateNewDictionary(QTSSDictionaryMap* inMap, OSMutex* /* inMutex */)
{
return NEW QTSSPrefs(fPrefsSource, NULL, inMap, true, this );
}
void QTSSPrefs::RereadPreferences()
{
RereadObjectPreferences(GetContainerRef());
}
void QTSSPrefs::RereadObjectPreferences(ContainerRef container)
{
QTSS_Error theErr = QTSS_NoErr;
//
// Keep track of which pref attributes should remain. All others
// will be removed.
// This routine uses names because it adds and deletes attributes. This means attribute indexes,positions and counts are constantly changing.
UInt32 initialNumAttrs = 0;
if (this->GetInstanceDictMap() != NULL)
{
initialNumAttrs = this->GetInstanceDictMap()->GetNumAttrs();
};
char** modulePrefInServer;
if (initialNumAttrs > 0)
{
modulePrefInServer = NEW char*[initialNumAttrs ];
::memset(modulePrefInServer, 0, sizeof(char*) * initialNumAttrs);
}
else
{
modulePrefInServer = NULL;
}
OSMutexLocker locker(&fPrefsMutex);
UInt32 theNumPrefs = fPrefsSource->GetNumPrefsByContainer(container);
for (UInt32 i = 0; i < initialNumAttrs;i++) // pull out all the names in the server
{
QTSSAttrInfoDict* theAttrInfoPtr = NULL;
theErr = this->GetInstanceDictMap()->GetAttrInfoByIndex(i, &theAttrInfoPtr);
if (theErr != QTSS_NoErr)
continue;
UInt32 nameLen = 0;
theErr = theAttrInfoPtr->GetValuePtr(qtssAttrName,0, (void **) &modulePrefInServer[i], &nameLen);
Assert(theErr == QTSS_NoErr);
//qtss_printf("QTSSPrefs::RereadPreferences modulePrefInServer in server=%s\n",modulePrefInServer[i]);
}
// Use the names of the attributes in the attribute map as the key values for
// finding preferences in the config file.
for (UInt32 x = 0; x < theNumPrefs; x++)
{
char* thePrefTypeStr = NULL;
char* thePrefName = NULL;
(void)fPrefsSource->GetPrefValueByIndex(container, x, 0, &thePrefName, &thePrefTypeStr);
// What type is this data type?
QTSS_AttrDataType thePrefType = QTSSDataConverter::TypeStringToType(thePrefTypeStr);
//
// Check to see if there is an attribute with this name already in the
// instance map. If one matches, then we don't need to add this attribute.
QTSSAttrInfoDict* theAttrInfo = NULL;
if (this->GetInstanceDictMap() != NULL)
(void)this->GetInstanceDictMap()->GetAttrInfoByName(thePrefName,
&theAttrInfo,
false ); // false=don't return info on deleted attributes
UInt32 theLen = sizeof(QTSS_AttrDataType);
QTSS_AttributeID theAttrID = qtssIllegalAttrID;
for (UInt32 i = 0; i < initialNumAttrs;i++) // see if this name is in the server
{ if (modulePrefInServer[i] != NULL && thePrefName != NULL && 0 == ::strcmp(modulePrefInServer[i],thePrefName))
{ modulePrefInServer[i] = NULL; // in the server so don't delete later
//qtss_printf("QTSSPrefs::RereadPreferences modulePrefInServer in file and in server=%s\n",thePrefName);
}
}
if ( theAttrInfo == NULL )
{
theAttrID = this->AddPrefAttribute(thePrefName, thePrefType); // not present or deleted
this->SetPrefValuesFromFile(container, x, theAttrID, 0); // will add another or replace a deleted attribute
}
else
{
QTSS_AttrDataType theAttrType = qtssAttrDataTypeUnknown;
theErr = theAttrInfo->GetValue(qtssAttrDataType, 0, &theAttrType, &theLen);
Assert(theErr == QTSS_NoErr);
theLen = sizeof(theAttrID);
theErr = theAttrInfo->GetValue(qtssAttrID, 0, &theAttrID, &theLen);
Assert(theErr == QTSS_NoErr);
if (theAttrType != thePrefType)
{
//
// This is not the same pref as before, because the data types
// are different. Remove the old one from the map, add the new one.
(void)this->RemoveInstanceAttribute(theAttrID);
theAttrID = this->AddPrefAttribute(thePrefName, thePrefType);
}
else
{
//
// This pref already exists
}
//
// Set the values
this->SetPrefValuesFromFile(container, x, theAttrID, 0);
// Mark this pref as found.
SInt32 theIndex = this->GetInstanceDictMap()->ConvertAttrIDToArrayIndex(theAttrID);
Assert(theIndex >= 0);
}
}
// Remove all attributes that no longer apply
if (this->GetInstanceDictMap() != NULL && initialNumAttrs > 0)
{
for (UInt32 a = 0; a < initialNumAttrs; a++)
{
if (NULL != modulePrefInServer[a]) // found a pref in the server that wasn't in the file
{
QTSSAttrInfoDict* theAttrInfoPtr = NULL;
theErr = this->GetInstanceDictMap()->GetAttrInfoByName(modulePrefInServer[a], &theAttrInfoPtr);
Assert(theErr == QTSS_NoErr);
if (theErr != QTSS_NoErr) continue;
QTSS_AttributeID theAttrID = qtssIllegalAttrID;
UInt32 theLen = sizeof(theAttrID);
theErr = theAttrInfoPtr->GetValue(qtssAttrID, 0, &theAttrID, &theLen);
Assert(theErr == QTSS_NoErr);
if (theErr != QTSS_NoErr) continue;
if (0)
{ char* theName = NULL;
UInt32 nameLen = 0;
theAttrInfoPtr->GetValuePtr(qtssAttrName,0, (void **) &theName, &nameLen);
qtss_printf("QTSSPrefs::RereadPreferences about to delete modulePrefInServer=%s attr=%s id=%"_U32BITARG_"\n",modulePrefInServer[a], theName,theAttrID);
}
this->GetInstanceDictMap()->RemoveAttribute(theAttrID);
modulePrefInServer[a] = NULL;
}
}
}
delete modulePrefInServer;
}
void QTSSPrefs::SetPrefValuesFromFile(ContainerRef container, UInt32 inPrefIndex, QTSS_AttributeID inAttrID, UInt32 inNumValues)
{
ContainerRef pref = fPrefsSource->GetPrefRefByIndex(container, inPrefIndex);
SetPrefValuesFromFileWithRef(pref, inAttrID, inNumValues);
}
void QTSSPrefs::SetPrefValuesFromFileWithRef(ContainerRef pref, QTSS_AttributeID inAttrID, UInt32 inNumValues)
{
//
// We have an attribute ID for this pref, it is in the map and everything.
// Now, let's add all the values that are in the pref file.
if (pref == 0)
return;
UInt32 numPrefValues = inNumValues;
if (inNumValues == 0)
numPrefValues = fPrefsSource->GetNumPrefValues(pref);
char* thePrefName = NULL;
char* thePrefValue = NULL;
char* thePrefTypeStr = NULL;
QTSS_AttrDataType thePrefType = qtssAttrDataTypeUnknown;
// find the type. If this is a QTSSObject, then we need to call a different routine
thePrefValue = fPrefsSource->GetPrefValueByRef( pref, 0, &thePrefName, &thePrefTypeStr);
thePrefType = QTSSDataConverter::TypeStringToType(thePrefTypeStr);
if (thePrefType == qtssAttrDataTypeQTSS_Object)
{
SetObjectValuesFromFile(pref, inAttrID, numPrefValues, thePrefName);
return;
}
UInt32 maxPrefValueSize = 0;
QTSS_Error theErr = QTSS_NoErr;
//
// We have to loop through all the values associated with this pref twice:
// first, to figure out the length (in bytes) of the longest value, secondly
// to actually copy these values into the dictionary.
for (UInt32 y = 0; y < numPrefValues; y++)
{
UInt32 tempMaxPrefValueSize = 0;
thePrefValue = fPrefsSource->GetPrefValueByRef( pref, y, &thePrefName, &thePrefTypeStr);
theErr = QTSSDataConverter::StringToValue( thePrefValue, thePrefType,
NULL, &tempMaxPrefValueSize );
Assert(theErr == QTSS_NotEnoughSpace);
if (tempMaxPrefValueSize > maxPrefValueSize)
maxPrefValueSize = tempMaxPrefValueSize;
}
for (UInt32 z = 0; z < numPrefValues; z++)
{
thePrefValue = fPrefsSource->GetPrefValueByRef( pref, z, &thePrefName, &thePrefTypeStr);
this->SetPrefValue(inAttrID, z, thePrefValue, thePrefType, maxPrefValueSize);
}
//
// Make sure the dictionary knows exactly how many values are associated with
// this pref
this->SetNumValues(inAttrID, numPrefValues);
}
void QTSSPrefs::SetObjectValuesFromFile(ContainerRef pref, QTSS_AttributeID inAttrID, UInt32 inNumValues, char* prefName)
{
for (UInt32 z = 0; z < inNumValues; z++)
{
ContainerRef object = fPrefsSource->GetObjectValue( pref, z );
QTSSPrefs* prefObject;
UInt32 len = sizeof(QTSSPrefs*);
QTSS_Error err = this->GetValue(inAttrID, z, &prefObject, &len);
if (err != QTSS_NoErr)
{
UInt32 tempIndex;
err = CreateObjectValue(inAttrID, &tempIndex, (QTSSDictionary**)&prefObject, NULL, QTSSDictionary::kDontObeyReadOnly | QTSSDictionary::kDontCallCompletionRoutine);
Assert(err == QTSS_NoErr);<|fim▁hole|> Assert(tempIndex == z);
if (err != QTSS_NoErr) // this shouldn't happen
return;
StrPtrLen temp(prefName);
prefObject->fPrefName = temp.GetAsCString();
}
prefObject->RereadObjectPreferences(object);
}
//
// Make sure the dictionary knows exactly how many values are associated with
// this pref
this->SetNumValues(inAttrID, inNumValues);
}
void QTSSPrefs::SetPrefValue(QTSS_AttributeID inAttrID, UInt32 inAttrIndex,
char* inPrefValue, QTSS_AttrDataType inPrefType, UInt32 inValueSize)
{
static const UInt32 kMaxPrefValueSize = 1024;
char convertedPrefValue[kMaxPrefValueSize];
::memset(convertedPrefValue, 0, kMaxPrefValueSize);
Assert(inValueSize < kMaxPrefValueSize);
UInt32 convertedBufSize = kMaxPrefValueSize;
QTSS_Error theErr = QTSSDataConverter::StringToValue
(inPrefValue, inPrefType, convertedPrefValue, &convertedBufSize );
Assert(theErr == QTSS_NoErr);
if (inValueSize == 0)
inValueSize = convertedBufSize;
this->SetValue(inAttrID, inAttrIndex, convertedPrefValue, inValueSize, QTSSDictionary::kDontObeyReadOnly | QTSSDictionary::kDontCallCompletionRoutine);
}
QTSS_AttributeID QTSSPrefs::AddPrefAttribute(const char* inAttrName, QTSS_AttrDataType inDataType)
{
QTSS_Error theErr = this->AddInstanceAttribute( inAttrName, NULL, inDataType, qtssAttrModeRead | qtssAttrModeWrite | qtssAttrModeDelete);
Assert(theErr == QTSS_NoErr);
QTSS_AttributeID theID = qtssIllegalAttrID;
theErr = this->GetInstanceDictMap()->GetAttrID( inAttrName, &theID);
Assert(theErr == QTSS_NoErr);
return theID;
}
void QTSSPrefs::RemoveValueComplete(UInt32 inAttrIndex, QTSSDictionaryMap* inMap,
UInt32 inValueIndex)
{
ContainerRef objectRef = GetContainerRef();
ContainerRef pref = fPrefsSource->GetPrefRefByName( objectRef, inMap->GetAttrName(inAttrIndex));
Assert(pref != NULL);
if (pref != NULL)
fPrefsSource->RemovePrefValue( pref, inValueIndex);
if (fPrefsSource->WritePrefsFile())
QTSSModuleUtils::LogError(qtssWarningVerbosity, qtssMsgCantWriteFile, 0);
}
void QTSSPrefs::RemoveInstanceAttrComplete(UInt32 inAttrIndex, QTSSDictionaryMap* inMap)
{
ContainerRef objectRef = GetContainerRef();
ContainerRef pref = fPrefsSource->GetPrefRefByName( objectRef, inMap->GetAttrName(inAttrIndex));
Assert(pref != NULL);
if (pref != NULL)
{
fPrefsSource->RemovePref(pref);
}
if (fPrefsSource->WritePrefsFile())
QTSSModuleUtils::LogError(qtssWarningVerbosity, qtssMsgCantWriteFile, 0);
}
void QTSSPrefs::SetValueComplete(UInt32 inAttrIndex, QTSSDictionaryMap* inMap,
UInt32 inValueIndex, void* inNewValue, UInt32 inNewValueLen)
{
ContainerRef objectRef = GetContainerRef();
ContainerRef pref = fPrefsSource->AddPref(objectRef, inMap->GetAttrName(inAttrIndex), QTSSDataConverter::TypeToTypeString(inMap->GetAttrType(inAttrIndex)));
if (inMap->GetAttrType(inAttrIndex) == qtssAttrDataTypeQTSS_Object)
{
QTSSPrefs* object = *(QTSSPrefs**)inNewValue; // value is a pointer to a QTSSPrefs object
StrPtrLen temp(inMap->GetAttrName(inAttrIndex));
object->fPrefName = temp.GetAsCString();
if (inValueIndex == fPrefsSource->GetNumPrefValues(pref))
fPrefsSource->AddNewObject(pref);
}
else
{
OSCharArrayDeleter theValueAsString(QTSSDataConverter::ValueToString(inNewValue, inNewValueLen, inMap->GetAttrType(inAttrIndex)));
fPrefsSource->SetPrefValue(pref, inValueIndex, theValueAsString.GetObject());
}
if (fPrefsSource->WritePrefsFile())
QTSSModuleUtils::LogError(qtssWarningVerbosity, qtssMsgCantWriteFile, 0);
}
ContainerRef QTSSPrefs::GetContainerRefForObject(QTSSPrefs* object)
{
ContainerRef thisContainer = GetContainerRef();
ContainerRef pref = fPrefsSource->GetPrefRefByName(thisContainer, object->fPrefName);
if (pref == NULL)
return NULL;
if (fPrefsSource->GetNumPrefValues(pref) <= 1)
return fPrefsSource->GetObjectValue(pref, 0);
QTSSAttrInfoDict* theAttrInfoPtr = NULL;
QTSS_Error theErr = this->GetInstanceDictMap()->GetAttrInfoByName(object->fPrefName, &theAttrInfoPtr);
Assert(theErr == QTSS_NoErr);
if (theErr != QTSS_NoErr) return NULL;
QTSS_AttributeID theAttrID = qtssIllegalAttrID;
UInt32 len = sizeof(theAttrID);
theErr = theAttrInfoPtr->GetValue(qtssAttrID, 0, &theAttrID, &len);
Assert(theErr == QTSS_NoErr);
if (theErr != QTSS_NoErr) return NULL;
UInt32 index = 0;
QTSSPrefs* prefObject;
len = sizeof(prefObject);
while (this->GetValue(theAttrID, index, &prefObject, &len) == QTSS_NoErr)
{
if (prefObject == object)
{
return fPrefsSource->GetObjectValue(pref, index);
}
}
return NULL;
}
ContainerRef QTSSPrefs::GetContainerRef()
{
if (fParentDictionary == NULL) // this is a top level Pref, so it must be a module
return fPrefsSource->GetRefForModule(fPrefName);
else
return fParentDictionary->GetContainerRefForObject(this);
}<|fim▁end|> | |
<|file_name|>ConsultationRequest.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2001-2002. Department of Family Medicine, McMaster University. All Rights Reserved.
* This software is published under the GPL GNU General Public License.
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* This software was written for the
* Department of Family Medicine
* McMaster University
* Hamilton
* Ontario, Canada
*/
package org.oscarehr.common.model;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.PrePersist;
import javax.persistence.PreUpdate;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
import org.apache.commons.lang.StringUtils;
@Entity
@Table(name = "consultationRequests")
public class ConsultationRequest extends AbstractModel<Integer> implements Serializable {
private static final String ACTIVE_MARKER = "1";
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "requestId")
private Integer id;
@Column(name = "referalDate")
@Temporal(TemporalType.DATE)
private Date referralDate;
private Integer serviceId;
@ManyToOne(fetch=FetchType.EAGER, cascade=CascadeType.ALL)
@JoinColumn(name="specId")
private ProfessionalSpecialist professionalSpecialist;
@Temporal(TemporalType.DATE)
private Date appointmentDate;
@Temporal(TemporalType.TIME)
private Date appointmentTime;
@Column(name = "reason")
private String reasonForReferral;
private String clinicalInfo;
private String currentMeds;
private String allergies;
private String providerNo;
@Column(name = "demographicNo")
private Integer demographicId;
private String status = ACTIVE_MARKER;
private String statusText;
private String sendTo;
private String concurrentProblems;
private String urgency;
private String appointmentInstructions;
private boolean patientWillBook;
@Column(name = "site_name")
private String siteName;
@Temporal(TemporalType.DATE)
private Date followUpDate;
@Column(name = "signature_img")
private String signatureImg;
private String letterheadName;
private String letterheadAddress;
private String letterheadPhone;
private String letterheadFax;
@Temporal(TemporalType.TIMESTAMP)
private Date lastUpdateDate;
private Integer fdid = null;
private String source;
@ManyToOne(fetch=FetchType.EAGER, targetEntity=LookupListItem.class)
@JoinColumn(name="appointmentInstructions", referencedColumnName="value", insertable = false, updatable = false)
private LookupListItem lookupListItem;
@Override
public Integer getId() {
return(id);
}
public Date getReferralDate() {
return referralDate;
}
public void setReferralDate(Date referralDate) {
this.referralDate = referralDate;
}
public Integer getServiceId() {
return serviceId;
}
public void setServiceId(Integer serviceId) {
this.serviceId = serviceId;
}
public Date getAppointmentDate() {
return appointmentDate;
}
public void setAppointmentDate(Date appointmentDate) {
this.appointmentDate = appointmentDate;
}
public Date getAppointmentTime() {
return appointmentTime;
}
public void setAppointmentTime(Date appointmentTime) {
this.appointmentTime = appointmentTime;
}
public String getReasonForReferral() {
return reasonForReferral;
}
public void setReasonForReferral(String reasonForReferral) {
this.reasonForReferral = StringUtils.trimToNull(reasonForReferral);
}
public String getClinicalInfo() {
return clinicalInfo;
}
public void setClinicalInfo(String clinicalInfo) {
this.clinicalInfo = StringUtils.trimToNull(clinicalInfo);
}
public String getCurrentMeds() {
return currentMeds;
}
public void setCurrentMeds(String currentMeds) {
this.currentMeds = StringUtils.trimToNull(currentMeds);
}
public String getAllergies() {
return allergies;
}
public void setAllergies(String allergies) {
this.allergies = StringUtils.trimToNull(allergies);
}
public String getProviderNo() {
return providerNo;
}
public void setProviderNo(String providerNo) {
this.providerNo = StringUtils.trimToNull(providerNo);
}<|fim▁hole|> public Integer getDemographicId() {
return demographicId;
}
public void setDemographicId(Integer demographicId) {
this.demographicId = demographicId;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = StringUtils.trimToNull(status);
}
public String getStatusText() {
return statusText;
}
public void setStatusText(String statusText) {
this.statusText = StringUtils.trimToNull(statusText);
}
public String getSendTo() {
return sendTo;
}
public void setSendTo(String sendTo) {
this.sendTo = StringUtils.trimToNull(sendTo);
}
public String getConcurrentProblems() {
return concurrentProblems;
}
public void setConcurrentProblems(String concurrentProblems) {
this.concurrentProblems = StringUtils.trimToNull(concurrentProblems);
}
public String getUrgency() {
return urgency;
}
public void setUrgency(String urgency) {
this.urgency = StringUtils.trimToNull(urgency);
}
public String getSiteName() {
return siteName;
}
public void setSiteName(String siteName) {
this.siteName = siteName;
}
public boolean isPatientWillBook() {
return patientWillBook;
}
public void setPatientWillBook(boolean patientWillBook) {
this.patientWillBook = patientWillBook;
}
/**
* @return the followUpDate
*/
public Date getFollowUpDate() {
return followUpDate;
}
/**
* @param followUpDate the followUpDate to set
*/
public void setFollowUpDate(Date followUpDate) {
this.followUpDate = followUpDate;
}
/**
* @return the professionalSpecialist
*/
public ProfessionalSpecialist getProfessionalSpecialist() {
return professionalSpecialist;
}
/**
* @param professionalSpecialist the professionalSpecialist to set
*/
public void setProfessionalSpecialist(ProfessionalSpecialist professionalSpecialist) {
this.professionalSpecialist = professionalSpecialist;
}
public Integer getSpecialistId() {
if(professionalSpecialist != null)
return this.professionalSpecialist.getId();
else
return null;
}
public String getSignatureImg() {
return signatureImg;
}
public void setSignatureImg(String signatureImg) {
this.signatureImg = signatureImg;
}
public String getLetterheadName() {
return letterheadName;
}
public void setLetterheadName(String letterheadName) {
this.letterheadName = letterheadName;
}
public String getLetterheadAddress() {
return letterheadAddress;
}
public void setLetterheadAddress(String letterheadAddress) {
this.letterheadAddress = letterheadAddress;
}
public String getLetterheadPhone() {
return letterheadPhone;
}
public void setLetterheadPhone(String letterheadPhone) {
this.letterheadPhone = letterheadPhone;
}
public String getLetterheadFax() {
return letterheadFax;
}
public void setLetterheadFax(String letterheadFax) {
this.letterheadFax = letterheadFax;
}
public Integer getFdid() {
return fdid;
}
public void setFdid(Integer fdid) {
this.fdid = fdid;
}
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
@PrePersist
@PreUpdate
protected void jpa_updateLastDateUpdated() {
lastUpdateDate = new Date();
}
/**
* returns the appointment instructions value.
* This can be a display value or select list value
* if the Lookup List interface is used.
* If the table contains a hash key it most likely is a
* primary key association in the LookupListItem table.
*/
public String getAppointmentInstructions() {
return appointmentInstructions;
}
public void setAppointmentInstructions(String appointmentInstructions) {
this.appointmentInstructions = appointmentInstructions;
}
/**
* Returns the display label of the Appointment Instruction if
* the Lookup List interface is being used.
* Empty string otherwise.
*/
@Transient
public String getAppointmentInstructionsLabel() {
if( lookupListItem != null ) {
return lookupListItem.getLabel();
}
return "";
}
/**
* This will be bound if the Appointment Instructions
* value is found as a unique match in the LookupListItem
* table.
*/
public LookupListItem getLookupListItem() {
return lookupListItem;
}
public void setLookupListItem(LookupListItem lookupListItem) {
this.lookupListItem = lookupListItem;
}
public Date getLastUpdateDate() {
return lastUpdateDate;
}
public void setLastUpdateDate(Date lastUpdateDate) {
this.lastUpdateDate = lastUpdateDate;
}
}<|fim▁end|> | |
<|file_name|>tool_test.go<|end_file_name|><|fim▁begin|>package tool_test
import (
"bytes"
"strings"
"testing"
"github.com/matryer/codeform/source"
"github.com/matryer/codeform/tool"
"github.com/matryer/is"
)
func TestExecute(t *testing.T) {
is := is.New(t)
srcCode := source.Reader("source.go", strings.NewReader(`package something
type Inter1 interface {
Inter1Method1(a, b int) error
Inter1Method2(c, d int) error
}
type Inter2 interface {
Inter2Method1(a, b int) error
Inter2Method2(c, d int) error
}`))
srcTmpl := source.Reader("template.tpl", strings.NewReader(<|fim▁hole|> ))
j := tool.Job{
Code: srcCode,
Template: srcTmpl,
}
var buf bytes.Buffer
err := j.Execute(&buf)
is.NoErr(err)
is.Equal(buf.String(), `Inter1 Inter2 `)
}<|fim▁end|> | `{{ range .Packages }}{{ range .Interfaces }}{{ .Name }} {{ end }}{{ end }}`, |
<|file_name|>ExampleApp.C<|end_file_name|><|fim▁begin|>/****************************************************************/
/* DO NOT MODIFY THIS HEADER */
/* MOOSE - Multiphysics Object Oriented Simulation Environment */
/* */
/* (c) 2010 Battelle Energy Alliance, LLC */
/* ALL RIGHTS RESERVED */
/* */
/* Prepared by Battelle Energy Alliance, LLC */
/* Under Contract No. DE-AC07-05ID14517 */
/* With the U. S. Department of Energy */
/* */
/* See COPYRIGHT for full restrictions */
/****************************************************************/
#include "ExampleApp.h"
#include "Moose.h"
#include "AppFactory.h"
#include "MooseSyntax.h"
// Example 13 Includes
#include "ExampleFunction.h"
template<>
InputParameters validParams<ExampleApp>()
{
InputParameters params = validParams<MooseApp>();
params.set<bool>("use_legacy_uo_initialization") = false;
params.set<bool>("use_legacy_uo_aux_computation") = false;
return params;
}
ExampleApp::ExampleApp(InputParameters parameters) :
MooseApp(parameters)
{
srand(processor_id());
Moose::registerObjects(_factory);
ExampleApp::registerObjects(_factory);
Moose::associateSyntax(_syntax, _action_factory);
ExampleApp::associateSyntax(_syntax, _action_factory);
}
ExampleApp::~ExampleApp()
{
}
void
ExampleApp::registerApps()
{
registerApp(ExampleApp);<|fim▁hole|>void
ExampleApp::registerObjects(Factory & factory)
{
registerFunction(ExampleFunction);
}
void
ExampleApp::associateSyntax(Syntax & /*syntax*/, ActionFactory & /*action_factory*/)
{
}<|fim▁end|> | }
|
<|file_name|>car.py<|end_file_name|><|fim▁begin|>"""
Created on 11.09.2014
@author: [email protected]
"""
from abc import ABCMeta, abstractmethod
import random<|fim▁hole|>
class BaseCar(metaclass=ABCMeta):
"""
Represents the fundamentals of a car
"""
def __init__(self, env, tank_size):
"""
Constructor
:type tank_size: int
:type env: simulation.environment.SimulationEnvironment
"""
env.car = self
self.env = env
self._tankSize = float(tank_size)
self._tankFilling = BaseCar._random_tank_filling(self._tankSize)
self._current_position = None
self._fuel_type = 'e5'
self._driven_distance = float(0)
# self.log = logging.getLogger('spritsim.Car' + commuter_id)
@staticmethod
def _random_tank_filling(maximum):
"""
Returns a random tank filling in litre
Method for initializing a cars with a random tank filling between 10 and maximum litres
:param maximum: maximum tank capacity
:return: A random filling
:rtype: float
"""
return random.uniform(10, maximum)
@property
def current_position(self):
"""Returns the nodes target ID
:rtype: int
"""
return self._current_position
@property
def driven_distance(self):
"""
The car's odometer
:return: The total distance the car has traveled
:rtype: float
"""
return self._driven_distance
@property
def fuel_type(self):
"""
The car's fuel type
:return: Type of fuel (e5|diesel)
:rtype: str
"""
return self._fuel_type
@property
def tank_size(self):
"""
:return: Size of the car's tank in litre
:rtype: float
"""
return self._tankSize
@property
def current_filling(self):
"""
:return: Current filling of the car's tank
:rtype: float
"""
return self._tankFilling
def consume_fuel(self, speed, distance, road_type):
"""
:param int speed: Maximum allowed speed
:param float distance: Length of the segment
:param simulation.routing.route.RouteClazz road_type: The type of the road
:return:
"""
self._tankFilling -= self.consumption_per_km * distance
@property
@abstractmethod
def consumption_per_km(self):
"""
:return: The fuel consumption of the car per km
:rtype: float
"""
pass
@property
def km_left(self):
"""
Returns the remaining km the car can drive
:return: Distance car is able to drive
:rtype: float
"""
return self.current_filling / self.consumption_per_km
def refilled(self):
"""Car has been refilled at a filling station"""
self._tankFilling = self._tankSize
def drive(self, ignore_refill_warning=False):
"""Lets the car drive the given route
On arrival at the destination the a CommuterAction for the route is returned or if the car needs refilling
the action to search for a refilling station is returned.
:param ignore_refill_warning: Tells the function not to raise a RefillWarning (default: False)
:type ignore_refill_warning: bool
:raises RefillWarning: If the tank filling is less or equal 5.0 liter
"""
for segment in self.env.route:
self._do_driving(segment)
self.env.consume_time(datetime.timedelta(seconds=segment.travel_time))
# check if driving the segment has
if self._tankFilling <= 5.0 and not ignore_refill_warning:
raise RefillWarning()
def _do_driving(self, segment):
"""
Drives the given route segment
Uses the segment data to simulate the driving of the car. Thereby fuel is consumed to the amount calculated
by the consume_fuel method.
:param segment: a single fragment of the route
:type segment: simulation.routing.route.RouteFragment
"""
self.consume_fuel(segment.speed_limit, segment.length, segment.road_type)
self._driven_distance += segment.length
self._current_position = segment.target
class PetrolCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'e5'
@property
def consumption_per_km(self):
"""
Consumes standard of 10 Liter per 100km, an equivalent of 0.1 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.1
class DieselCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'diesel'
@property
def consumption_per_km(self):
"""
Consumes standard of 8 litre per 100km, an equivalent of 0.08 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.08
class RefillWarning(Exception):
pass<|fim▁end|> | import datetime |
<|file_name|>controlflow.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm::*;
use driver::config::FullDebugInfo;
use middle::def;
use middle::lang_items::{FailFnLangItem, FailBoundsCheckFnLangItem};
use middle::trans::_match;
use middle::trans::adt;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::cleanup::CleanupMethods;
use middle::trans::cleanup;
use middle::trans::common::*;
use middle::trans::consts;
use middle::trans::datum;
use middle::trans::expr;
use middle::trans::meth;
use middle::trans::type_::Type;
use middle::trans;
use middle::ty;
use middle::typeck::MethodCall;
use util::ppaux::Repr;
use util::ppaux;
use syntax::ast;
use syntax::ast::Ident;
use syntax::ast_util;
use syntax::codemap::Span;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::visit::Visitor;
use std::gc::Gc;
pub fn trans_stmt<'a>(cx: &'a Block<'a>,
s: &ast::Stmt)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_stmt");
let fcx = cx.fcx;
debug!("trans_stmt({})", s.repr(cx.tcx()));
if cx.sess().asm_comments() {
add_span_comment(cx, s.span, s.repr(cx.tcx()).as_slice());
}
let mut bcx = cx;
let id = ast_util::stmt_id(s);
fcx.push_ast_cleanup_scope(id);
match s.node {
ast::StmtExpr(ref e, _) | ast::StmtSemi(ref e, _) => {
bcx = trans_stmt_semi(bcx, &**e);
}
ast::StmtDecl(d, _) => {
match d.node {
ast::DeclLocal(ref local) => {
bcx = init_local(bcx, &**local);
if cx.sess().opts.debuginfo == FullDebugInfo {
trans::debuginfo::create_local_var_metadata(bcx,
&**local);
}
}
// Inner items are visited by `trans_item`/`trans_meth`.
ast::DeclItem(_) => {},
}
}
ast::StmtMac(..) => cx.tcx().sess.bug("unexpanded macro")
}
bcx = fcx.pop_and_trans_ast_cleanup_scope(
bcx, ast_util::stmt_id(s));
return bcx;
}
pub fn trans_stmt_semi<'a>(cx: &'a Block<'a>, e: &ast::Expr) -> &'a Block<'a> {
let _icx = push_ctxt("trans_stmt_semi");
let ty = expr_ty(cx, e);
if ty::type_needs_drop(cx.tcx(), ty) {
expr::trans_to_lvalue(cx, e, "stmt").bcx
} else {
expr::trans_into(cx, e, expr::Ignore)
}
}
pub fn trans_block<'a>(bcx: &'a Block<'a>,
b: &ast::Block,
mut dest: expr::Dest)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_block");
let fcx = bcx.fcx;
let mut bcx = bcx;
fcx.push_ast_cleanup_scope(b.id);
for s in b.stmts.iter() {
bcx = trans_stmt(bcx, &**s);
}
if dest != expr::Ignore {
let block_ty = node_id_type(bcx, b.id);
if b.expr.is_none() || type_is_zero_size(bcx.ccx(), block_ty) {
dest = expr::Ignore;
}
}
match b.expr {
Some(ref e) => {
bcx = expr::trans_into(bcx, &**e, dest);
}
None => {
assert!(dest == expr::Ignore || bcx.unreachable.get());
}
}
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, b.id);
return bcx;
}
pub fn trans_if<'a>(bcx: &'a Block<'a>,
if_id: ast::NodeId,
cond: &ast::Expr,
thn: ast::P<ast::Block>,
els: Option<Gc<ast::Expr>>,
dest: expr::Dest)
-> &'a Block<'a> {
debug!("trans_if(bcx={}, if_id={}, cond={}, thn={:?}, dest={})",
bcx.to_str(), if_id, bcx.expr_to_string(cond), thn.id,
dest.to_string(bcx.ccx()));
let _icx = push_ctxt("trans_if");
let mut bcx = bcx;
let cond_val = unpack_result!(bcx, expr::trans(bcx, cond).to_llbool());
// Drop branches that are known to be impossible
if is_const(cond_val) && !is_undef(cond_val) {
if const_to_uint(cond_val) == 1 {
match els {
Some(elexpr) => {
let mut trans = TransItemVisitor { ccx: bcx.fcx.ccx };
trans.visit_expr(&*elexpr, ());
}
None => {}
}
// if true { .. } [else { .. }]
bcx = trans_block(bcx, &*thn, dest);
trans::debuginfo::clear_source_location(bcx.fcx);
} else {
let mut trans = TransItemVisitor { ccx: bcx.fcx.ccx } ;
trans.visit_block(&*thn, ());
match els {
// if false { .. } else { .. }
Some(elexpr) => {
bcx = expr::trans_into(bcx, &*elexpr, dest);
trans::debuginfo::clear_source_location(bcx.fcx);
}
// if false { .. }
None => { }
}
}
return bcx;
}
let name = format!("then-block-{}-", thn.id);
let then_bcx_in = bcx.fcx.new_id_block(name.as_slice(), thn.id);
let then_bcx_out = trans_block(then_bcx_in, &*thn, dest);
trans::debuginfo::clear_source_location(bcx.fcx);
let next_bcx;
match els {
Some(elexpr) => {
let else_bcx_in = bcx.fcx.new_id_block("else-block", elexpr.id);
let else_bcx_out = expr::trans_into(else_bcx_in, &*elexpr, dest);
next_bcx = bcx.fcx.join_blocks(if_id,
[then_bcx_out, else_bcx_out]);
CondBr(bcx, cond_val, then_bcx_in.llbb, else_bcx_in.llbb);
}
None => {
next_bcx = bcx.fcx.new_id_block("next-block", if_id);
Br(then_bcx_out, next_bcx.llbb);
CondBr(bcx, cond_val, then_bcx_in.llbb, next_bcx.llbb);
}
}
// Clear the source location because it is still set to whatever has been translated
// right before.
trans::debuginfo::clear_source_location(next_bcx.fcx);
next_bcx
}
pub fn trans_while<'a>(bcx: &'a Block<'a>,
loop_id: ast::NodeId,
cond: &ast::Expr,
body: &ast::Block)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_while");
let fcx = bcx.fcx;
// bcx
// |
// cond_bcx_in <--------+
// | |
// cond_bcx_out |
// | | |
// | body_bcx_in |
// cleanup_blk | |
// | body_bcx_out --+
// next_bcx_in
let next_bcx_in = fcx.new_id_block("while_exit", loop_id);
let cond_bcx_in = fcx.new_id_block("while_cond", cond.id);
let body_bcx_in = fcx.new_id_block("while_body", body.id);
fcx.push_loop_cleanup_scope(loop_id, [next_bcx_in, cond_bcx_in]);
Br(bcx, cond_bcx_in.llbb);
// compile the block where we will handle loop cleanups
let cleanup_llbb = fcx.normal_exit_block(loop_id, cleanup::EXIT_BREAK);
// compile the condition
let Result {bcx: cond_bcx_out, val: cond_val} =
expr::trans(cond_bcx_in, cond).to_llbool();
CondBr(cond_bcx_out, cond_val, body_bcx_in.llbb, cleanup_llbb);
// loop body:
let body_bcx_out = trans_block(body_bcx_in, body, expr::Ignore);
Br(body_bcx_out, cond_bcx_in.llbb);
fcx.pop_loop_cleanup_scope(loop_id);
return next_bcx_in;
}
/// Translates a `for` loop.
pub fn trans_for<'a>(
mut bcx: &'a Block<'a>,
loop_info: NodeInfo,
pat: Gc<ast::Pat>,
head: &ast::Expr,
body: &ast::Block)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_for");
// bcx
// |
// loopback_bcx_in <-------+
// | |
// loopback_bcx_out |
// | | |
// | body_bcx_in |
// cleanup_blk | |
// | body_bcx_out --+
// next_bcx_in
// Codegen the head to create the iterator value.
let iterator_datum =
unpack_datum!(bcx, expr::trans_to_lvalue(bcx, head, "for_head"));
let iterator_type = node_id_type(bcx, head.id);
debug!("iterator type is {}, datum type is {}",
ppaux::ty_to_string(bcx.tcx(), iterator_type),
ppaux::ty_to_string(bcx.tcx(), iterator_datum.ty));
let lliterator = load_ty(bcx, iterator_datum.val, iterator_datum.ty);
// Create our basic blocks and set up our loop cleanups.
let next_bcx_in = bcx.fcx.new_id_block("for_exit", loop_info.id);
let loopback_bcx_in = bcx.fcx.new_id_block("for_loopback", head.id);
let body_bcx_in = bcx.fcx.new_id_block("for_body", body.id);
bcx.fcx.push_loop_cleanup_scope(loop_info.id,
[next_bcx_in, loopback_bcx_in]);
Br(bcx, loopback_bcx_in.llbb);
let cleanup_llbb = bcx.fcx.normal_exit_block(loop_info.id,
cleanup::EXIT_BREAK);
// Set up the method call (to `.next()`).
let method_call = MethodCall::expr(loop_info.id);
let method_type = loopback_bcx_in.tcx()
.method_map
.borrow()
.get(&method_call)
.ty;
let method_type = monomorphize_type(loopback_bcx_in, method_type);
let method_result_type = ty::ty_fn_ret(method_type);
let option_cleanup_scope = body_bcx_in.fcx.push_custom_cleanup_scope();
let option_cleanup_scope_id = cleanup::CustomScope(option_cleanup_scope);
// Compile the method call (to `.next()`).
let mut loopback_bcx_out = loopback_bcx_in;
let option_datum =
unpack_datum!(loopback_bcx_out,
datum::lvalue_scratch_datum(loopback_bcx_out,
method_result_type,
"loop_option",
false,
option_cleanup_scope_id,
(),
|(), bcx, lloption| {
let Result {
bcx: bcx,
val: _
} = callee::trans_call_inner(bcx,
Some(loop_info),
method_type,
|bcx, arg_cleanup_scope| {
meth::trans_method_callee(
bcx,
method_call,
None,
arg_cleanup_scope)
},
callee::ArgVals([lliterator]),
Some(expr::SaveIn(lloption)));
bcx
}));
// Check the discriminant; if the `None` case, exit the loop.
let option_representation = adt::represent_type(loopback_bcx_out.ccx(),
method_result_type);
let lldiscriminant = adt::trans_get_discr(loopback_bcx_out,
&*option_representation,
option_datum.val,
None);
let i1_type = Type::i1(loopback_bcx_out.ccx());
let llcondition = Trunc(loopback_bcx_out, lldiscriminant, i1_type);
CondBr(loopback_bcx_out, llcondition, body_bcx_in.llbb, cleanup_llbb);
// Now we're in the body. Unpack the `Option` value into the programmer-
// supplied pattern.
let llpayload = adt::trans_field_ptr(body_bcx_in,
&*option_representation,
option_datum.val,
1,
0);
let binding_cleanup_scope = body_bcx_in.fcx.push_custom_cleanup_scope();
let binding_cleanup_scope_id =
cleanup::CustomScope(binding_cleanup_scope);
let mut body_bcx_out =
_match::store_for_loop_binding(body_bcx_in,
pat,
llpayload,
binding_cleanup_scope_id);
// Codegen the body.
body_bcx_out = trans_block(body_bcx_out, body, expr::Ignore);
body_bcx_out.fcx.pop_custom_cleanup_scope(binding_cleanup_scope);
body_bcx_out =
body_bcx_out.fcx
.pop_and_trans_custom_cleanup_scope(body_bcx_out,
option_cleanup_scope);
Br(body_bcx_out, loopback_bcx_in.llbb);
// Codegen cleanups and leave.
next_bcx_in.fcx.pop_loop_cleanup_scope(loop_info.id);
next_bcx_in
}
pub fn trans_loop<'a>(bcx:&'a Block<'a>,
loop_id: ast::NodeId,
body: &ast::Block)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_loop");
let fcx = bcx.fcx;
// bcx
// |
// body_bcx_in
// |
// body_bcx_out
//
// next_bcx
//
// Links between body_bcx_in and next_bcx are created by
// break statements.
let next_bcx_in = bcx.fcx.new_id_block("loop_exit", loop_id);
let body_bcx_in = bcx.fcx.new_id_block("loop_body", body.id);
fcx.push_loop_cleanup_scope(loop_id, [next_bcx_in, body_bcx_in]);
Br(bcx, body_bcx_in.llbb);
let body_bcx_out = trans_block(body_bcx_in, body, expr::Ignore);
Br(body_bcx_out, body_bcx_in.llbb);
fcx.pop_loop_cleanup_scope(loop_id);
if ty::type_is_bot(node_id_type(bcx, loop_id)) {
Unreachable(next_bcx_in);
}
return next_bcx_in;
}
pub fn trans_break_cont<'a>(bcx: &'a Block<'a>,
expr_id: ast::NodeId,
opt_label: Option<Ident>,
exit: uint)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_break_cont");
let fcx = bcx.fcx;
if bcx.unreachable.get() {
return bcx;
}
// Locate loop that we will break to
let loop_id = match opt_label {
None => fcx.top_loop_scope(),
Some(_) => {
match bcx.tcx().def_map.borrow().find(&expr_id) {
Some(&def::DefLabel(loop_id)) => loop_id,
ref r => {
bcx.tcx().sess.bug(format!("{:?} in def-map for label",
r).as_slice())
}
}
}
};
// Generate appropriate cleanup code and branch
let cleanup_llbb = fcx.normal_exit_block(loop_id, exit);
Br(bcx, cleanup_llbb);
Unreachable(bcx); // anything afterwards should be ignored
return bcx;
}
pub fn trans_break<'a>(bcx: &'a Block<'a>,
expr_id: ast::NodeId,
label_opt: Option<Ident>)
-> &'a Block<'a> {
return trans_break_cont(bcx, expr_id, label_opt, cleanup::EXIT_BREAK);
}
pub fn trans_cont<'a>(bcx: &'a Block<'a>,
expr_id: ast::NodeId,
label_opt: Option<Ident>)
-> &'a Block<'a> {
return trans_break_cont(bcx, expr_id, label_opt, cleanup::EXIT_LOOP);<|fim▁hole|>
pub fn trans_ret<'a>(bcx: &'a Block<'a>,
e: Option<Gc<ast::Expr>>)
-> &'a Block<'a> {
let _icx = push_ctxt("trans_ret");
let fcx = bcx.fcx;
let mut bcx = bcx;
let dest = match (fcx.llretslotptr.get(), e) {
(Some(_), Some(e)) => {
let ret_ty = expr_ty(bcx, &*e);
expr::SaveIn(fcx.get_ret_slot(bcx, ret_ty, "ret_slot"))
}
_ => expr::Ignore,
};
match e {
Some(x) => {
bcx = expr::trans_into(bcx, &*x, dest);
match dest {
expr::SaveIn(slot) if fcx.needs_ret_allocas => {
Store(bcx, slot, fcx.llretslotptr.get().unwrap());
}
_ => {}
}
}
_ => {}
}
let cleanup_llbb = fcx.return_exit_block();
Br(bcx, cleanup_llbb);
Unreachable(bcx);
return bcx;
}
pub fn trans_fail<'a>(
bcx: &'a Block<'a>,
sp: Span,
fail_str: InternedString)
-> &'a Block<'a> {
let ccx = bcx.ccx();
let _icx = push_ctxt("trans_fail_value");
let v_str = C_str_slice(ccx, fail_str);
let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
let filename = token::intern_and_get_ident(loc.file.name.as_slice());
let filename = C_str_slice(ccx, filename);
let line = C_int(ccx, loc.line as int);
let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false);
let expr_file_line = consts::const_addr_of(ccx, expr_file_line_const, ast::MutImmutable);
let args = vec!(expr_file_line);
let did = langcall(bcx, Some(sp), "", FailFnLangItem);
let bcx = callee::trans_lang_call(bcx,
did,
args.as_slice(),
Some(expr::Ignore)).bcx;
Unreachable(bcx);
return bcx;
}
pub fn trans_fail_bounds_check<'a>(
bcx: &'a Block<'a>,
sp: Span,
index: ValueRef,
len: ValueRef)
-> &'a Block<'a> {
let ccx = bcx.ccx();
let _icx = push_ctxt("trans_fail_bounds_check");
// Extract the file/line from the span
let loc = bcx.sess().codemap().lookup_char_pos(sp.lo);
let filename = token::intern_and_get_ident(loc.file.name.as_slice());
// Invoke the lang item
let filename = C_str_slice(ccx, filename);
let line = C_int(ccx, loc.line as int);
let file_line_const = C_struct(ccx, &[filename, line], false);
let file_line = consts::const_addr_of(ccx, file_line_const, ast::MutImmutable);
let args = vec!(file_line, index, len);
let did = langcall(bcx, Some(sp), "", FailBoundsCheckFnLangItem);
let bcx = callee::trans_lang_call(bcx,
did,
args.as_slice(),
Some(expr::Ignore)).bcx;
Unreachable(bcx);
return bcx;
}<|fim▁end|> | } |
<|file_name|>data_api.py<|end_file_name|><|fim▁begin|>from colab.plugins.utils.proxy_data_api import ProxyDataAPI
<|fim▁hole|>class JenkinsDataAPI(ProxyDataAPI):
def fetch_data(self):
pass<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#Made by Kerb
import sys
from com.l2scoria import Config
from com.l2scoria.gameserver.model.quest import State
from com.l2scoria.gameserver.model.quest import QuestState
from com.l2scoria.gameserver.model.quest.jython import QuestJython as JQuest
qn = "644_GraveRobberAnnihilation"
#Drop rate
DROP_CHANCE = 75
#Npc
KARUDA = 32017<|fim▁hole|>ORC_GOODS = 8088
#Rewards
REWARDS = {
"1" : [1865 , 30], #Varnish
"2" : [1867 , 40], #Animal Skin
"3" : [1872 , 40], #Animal Bone
"4" : [1871 , 30], #Charcoal
"5" : [1870 , 30], #Coal
"6" : [1869 , 30], #Iron Ore
}
#Mobs
MOBS = [ 22003,22004,22005,22006,22008 ]
class Quest (JQuest) :
def onEvent (self,event,st) :
cond = st.getInt("cond")
htmltext = event
if event == "32017-03.htm" :
if st.getPlayer().getLevel() < 20 :
htmltext = "32017-02.htm"
st.exitQuest(1)
else :
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
elif event in REWARDS.keys() :
item, amount = REWARDS[event]
st.takeItems(ORC_GOODS,-1)
st.giveItems(item, amount)
st.playSound("ItemSound.quest_finish")
st.exitQuest(1)
return
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not carrying out your quest or don't meet the criteria.</body></html>"
st = player.getQuestState(qn)
if st :
npcId = npc.getNpcId()
id = st.getState()
cond = st.getInt("cond")
if cond == 0 :
htmltext = "32017-01.htm"
elif cond == 1 :
htmltext = "32017-04.htm"
elif cond == 2 :
if st.getQuestItemsCount(ORC_GOODS) >= 120 :
htmltext = "32017-05.htm"
else :
htmltext = "32017-04.htm"
return htmltext
def onKill(self,npc,player,isPet):
partyMember = self.getRandomPartyMember(player,"1")
if not partyMember: return
st = partyMember.getQuestState(qn)
if st :
if st.getState() == STARTED :
count = st.getQuestItemsCount(ORC_GOODS)
if st.getInt("cond") == 1 and count < 120 :
chance = DROP_CHANCE * Config.RATE_DROP_QUEST
numItems, chance = divmod(chance,100)
if st.getRandom(100) < chance :
numItems += 1
if numItems :
if count + numItems >= 120 :
numItems = 120 - count
st.playSound("ItemSound.quest_middle")
st.set("cond","2")
else:
st.playSound("ItemSound.quest_itemget")
st.giveItems(ORC_GOODS,int(numItems))
return
QUEST = Quest(644, qn, "Grave Robber Annihilation")
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(KARUDA)
QUEST.addTalkId(KARUDA)
for i in MOBS :
QUEST.addKillId(i)
STARTED.addQuestDrop(KARUDA,ORC_GOODS,1)<|fim▁end|> | #Items |
<|file_name|>FilterByID_dict_parse.py<|end_file_name|><|fim▁begin|>#-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: Eli
#
# Created: 06/04/2014
# Copyright: (c) Eli 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
def main():
pass
if __name__ == '__main__':
main()
import sys
#This script filters a data file by id's listed one per line in another file
ids = open("C:/rnaseq/mirna_data/clusters/10rep_redo_deseq-edger/DEseq2_1cpm3redo_nopara2_logFCall.txt", "r")
#Take header from ID file & initialize empty dict
head_ids = ids.readline().strip("\n")
idlist1 = {}
#id_count = 0
#Make dict of ID's (key) & selected variables/annotations (values)
for line in ids:
name = line.strip('\n').split('\t')[0]
#name = name[4:]
#if len(name.split('-')) > 3:
# name = '-'.join(name.split('-')[1:])
#arm = name.split('-')[-1]
#name = '-'.join(['-'.join(name.split('-')[0:2]), arm])
name = name.strip('cin-')
#print name
#name = name[-5:]
#values = '\t'.join(line.strip('\n').split('\t')[1:3])
values = '\t'.join(line.strip('\n').split('\t')[1:4])
#if "ENSCINP" in values:
# values2 = values[7:]
# values = "ENSCINT" + values2
#values = '\t'.join(line.strip('\n').split('\t')[2:])
#values = values[0:-3]
if name in idlist1 and len(name) > 0:
if values in idlist1[name]:
continue
else:
idlist1[name].append(values)
elif len(name) > 0:
idlist1[name] = [values]
#id_count+=1
#if id_count%1000==0:
# print id_count
ids.close
#Debugging code below:
#print 'idlist1:', len(idlist1)
#sorted(idlist1)
#print idlist1
idlist1 = ['miR-216']
data = open("C:/rnaseq/coexpression/mirna-mrna/logfc_pearson/1cpm3_5rpkm3_redo2_edger_logfcValues_pearson_targetscan_deseq2logfc_mirs2.txt", "r")
#Output merged header & initialize retrieved list + row counter
#sys.stdout.write("LogFC.consensus" + '\t' + data.readline())
#sys.stdout.write("LogFC.consensus" + '\t' + '\t'.join(data.readline().split('\t')[0:3]) + '\n')
#sys.stdout.write(data.readline())
#data.readline()
matched = 0
idlist2 = {}
out = 0
#Match ID's between lists and return associated variables
for line in data:
#print line
name = line.strip('\n').split('\t')[6]
#print name
#name = name.split('|')[3].split('.')[0] # for first ID from BLAST target
#name = name[0:7]
#if name[-1].isalpha():
# name = name[0:-1]
#print name
#variables = line.strip('\n').split('\t')[5,9,10]
#idlist2[name] = line.split('\t')[1]
descr = line.strip('\n').split('\t')[1]
#if "," in descr:
# descr = descr.split(',')[0]
#name = line[1:20] # for trimmed encin gene name
#kh = '.'.join(line.split('\t')[1].split(':')[1].split('.')[0:4])
#Loop through input dict ID's and search for "name" in associated variables
#for item in idlist1: #Loop through keys (refseq)
if name in idlist1: #match primary ID's
#for item in idlist1[name].split(' '):
sys.stdout.write('\t'.join(idlist1[0]) + '\t' + line)
#EXCHANGE ID'S BUT KEEP REST OF LINE/DESCRIPTION
# sys.stdout.write(descr + '\t' + '\t'.join(idlist1[name]) + '\n')
#else:
# sys.stdout.write(descr + '\t' + name + '\n')
#print idlist1[name]
#sys.stdout.write(line.strip('\n') + '\t' + '\t'.join(idlist1[name]) + '\n')
#continue
#matched +=1
else:
sys.stdout.write(line)
#if name in idlist1[item]: #Check for each ID in the name variable
# idlist2[name] = variables
# values = idlist1[item]
# stop = 1
#while stop <= len(values):
# if descr in idlist1[name]:
# sys.stdout.write(line)
# out+=1
#print out
#Return items in matched list (idlist2) using associations from idlist1
#for mir in idlist1:
# if mir in idlist2:
# sys.stdout.write(mir + '\t' + '\t'.join(idlist2[mir]) + '\n')
# for mrna in idlist1[mir]:
# if mrna in idlist2:
# sys.stdout.write(mrna+ '\t' + '\t'.join(idlist2[mrna]) + '\n')
#if len(idlist1[name]) > 1:
# for value in idlist1[name]: #Print all values on separate lines
# sys.stdout.write(value + '\t' + line)
#sys.stdout.write(descr + '\t' + value + '\t' + name + '\t' + '\t'.join(variables) + '\n')
# sys.stdout.write(value + '\t' + '\t'.join(line.split('\t')[0:]))
#sys.stdout.write(value + '\t' + '\t'.join(line.split('\t')[0:3]) + '\n')
# out+=1
#else:
# sys.stdout.write('\t'.join(idlist1[name]) + '\t' + line)
#sys.stdout.write(descr + '\t' + ".\t".join(idlist1[name]) + '\t' + name + '\t' + '\t'.join(variables) + '\n')
#print idlist1[name]
# sys.stdout.write(('\t'.join(idlist1[name]) + '\t' + '\t'.join(line.split('\t')[0:])))
#sys.stdout.write(name + '\t' + '\t'.join(idlist1[name]) + '\t' + '\t'.join(line.split('\t')[2:]))
# out+=1
#print matched, out
#print gene
#print idlist1[item]
# sys.stdout.write(value + "\t" + name + '\t' + line)#'\t' + '\t'.join(line.split('\t')[2:]))
# stop+=1
#continue
#if name in idlist1:
# if descr in idlist1[name]:
# sys.stdout.write(line)
# descr = idlist1[name]
<|fim▁hole|> #sys.stdout.write('\t'.join(line.split('\t')[0:2]) + '\t' + descr + '\n')
#del idlist1[name]
#else:
# pass
#sys.stdout.write(line + '\n')
#if name in idlist2:
# pass
#else:
#idlist2.append(name)
#idlist1.remove(name)
#print line
#count+=1
#Code for checking remaining values in ID list
#for item in idlist1:
# print "bakow!"
# sys.stdout.write(item + '\t' + idlist2[item] + '\t' + idlist1[item] + '\n')
#else:
# print line.split('\t')[0]
#print len(idlist1), len(idlist2)
#print len(idlist1)-len(idlist2)
#print len(idlist1)
#sorted(idlist2)
#print idlist1
#for item in idlist2:
# if item in idlist1:
# idlist1.remove(item)
#print 'idlist1-idlist2', len(idlist1)
#for item in idlist1:
# print item
#cross check input and output lists
#idlist3= []
#for thing in idlist1:
# if thing in idlist2:
# pass
# else:
# idlist3.append(thing)
#print len(idlist3)
#print len(idlist4)
#idlist4 = [x for x in idlist1 if x not in idlist2]<|fim▁end|> | # sys.stdout.write('\t'.join(idlist1[name]) + '\t' + '\t'.join(line.split('\t')[2:]))
|
<|file_name|>htmldialogelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::HTMLDialogElementBinding;
use dom::bindings::codegen::Bindings::HTMLDialogElementBinding::HTMLDialogElementMethods;
use dom::bindings::js::Root;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::Node;
use string_cache::Atom;
use util::str::DOMString;
#[dom_struct]
pub struct HTMLDialogElement {
htmlelement: HTMLElement,
return_value: DOMRefCell<DOMString>,
}
impl HTMLDialogElement {
fn new_inherited(localName: Atom,
prefix: Option<DOMString>,
document: &Document) -> HTMLDialogElement {
HTMLDialogElement {
htmlelement:<|fim▁hole|> HTMLElement::new_inherited(localName, prefix, document),
return_value: DOMRefCell::new(DOMString::new()),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: Atom,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLDialogElement> {
let element = HTMLDialogElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLDialogElementBinding::Wrap)
}
}
impl HTMLDialogElementMethods for HTMLDialogElement {
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_getter!(Open, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-open
make_bool_setter!(SetOpen, "open");
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn ReturnValue(&self) -> DOMString {
let return_value = self.return_value.borrow();
return_value.clone()
}
// https://html.spec.whatwg.org/multipage/#dom-dialog-returnvalue
fn SetReturnValue(&self, return_value: DOMString) {
*self.return_value.borrow_mut() = return_value;
}
}<|fim▁end|> | |
<|file_name|>model_describe.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Provides textual descriptions for :mod:`behave.model` elements.
"""
from behave.textutil import indent
# -----------------------------------------------------------------------------
# FUNCTIONS:<|fim▁hole|># -----------------------------------------------------------------------------
def escape_cell(cell):
"""
Escape table cell contents.
:param cell: Table cell (as unicode string).
:return: Escaped cell (as unicode string).
"""
cell = cell.replace(u'\\', u'\\\\')
cell = cell.replace(u'\n', u'\\n')
cell = cell.replace(u'|', u'\\|')
return cell
def escape_triple_quotes(text):
"""
Escape triple-quotes, used for multi-line text/doc-strings.
"""
return text.replace(u'"""', u'\\"\\"\\"')
# -----------------------------------------------------------------------------
# CLASS:
# -----------------------------------------------------------------------------
class ModelDescriptor(object):
@staticmethod
def describe_table(table, indentation=None):
"""
Provide a textual description of the table (as used w/ Gherkin).
:param table: Table to use (as :class:`behave.model.Table`)
:param indentation: Line prefix to use (as string, if any).
:return: Textual table description (as unicode string).
"""
# -- STEP: Determine output size of all cells.
cell_lengths = []
all_rows = [table.headings] + table.rows
for row in all_rows:
lengths = [len(escape_cell(c)) for c in row]
cell_lengths.append(lengths)
# -- STEP: Determine max. output size for each column.
max_lengths = []
for col in range(0, len(cell_lengths[0])):
max_lengths.append(max([c[col] for c in cell_lengths]))
# -- STEP: Build textual table description.
lines = []
for r, row in enumerate(all_rows):
line = u"|"
for c, (cell, max_length) in enumerate(zip(row, max_lengths)):
pad_size = max_length - cell_lengths[r][c]
line += u" %s%s |" % (escape_cell(cell), " " * pad_size)
line += u"\n"
lines.append(line)
if indentation:
return indent(lines, indentation)
# -- OTHERWISE:
return u"".join(lines)
@staticmethod
def describe_docstring(doc_string, indentation=None):
"""
Provide a textual description of the multi-line text/triple-quoted
doc-string (as used w/ Gherkin).
:param doc_string: Multi-line text to use.
:param indentation: Line prefix to use (as string, if any).
:return: Textual table description (as unicode string).
"""
text = escape_triple_quotes(doc_string)
text = u'"""\n' + text + '\n"""\n'
if indentation:
text = indent(text, indentation)
return text
class ModelPrinter(ModelDescriptor):
def __init__(self, stream):
super(ModelPrinter, self).__init__()
self.stream = stream
def print_table(self, table, indentation=None):
self.stream.write(self.describe_table(table, indentation))
self.stream.flush()
def print_docstring(self, text, indentation=None):
self.stream.write(self.describe_docstring(text, indentation))
self.stream.flush()<|fim▁end|> | |
<|file_name|>release_stages.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Module containing the various stages that a builder runs."""
import json
import logging
import os
from chromite.cbuildbot import commands
from chromite.cbuildbot import failures_lib
from chromite.cbuildbot import cbuildbot_run
from chromite.cbuildbot.stages import artifact_stages
from chromite.lib import cros_build_lib
from chromite.lib import gs
from chromite.lib import osutils
from chromite.lib import parallel
from chromite.lib import timeout_util
class InvalidTestConditionException(Exception):
"""Raised when pre-conditions for a test aren't met."""
class SignerTestStage(artifact_stages.ArchivingStage):
"""Run signer related tests."""
option_name = 'tests'
config_name = 'signer_tests'
# If the signer tests take longer than 30 minutes, abort. They usually take
# five minutes to run.
SIGNER_TEST_TIMEOUT = 1800
def PerformStage(self):
if not self.archive_stage.WaitForRecoveryImage():
raise InvalidTestConditionException('Missing recovery image.')
with timeout_util.Timeout(self.SIGNER_TEST_TIMEOUT):
commands.RunSignerTests(self._build_root, self._current_board)
class SignerResultsTimeout(failures_lib.StepFailure):
"""The signer did not produce any results inside the expected time."""
class SignerFailure(failures_lib.StepFailure):
"""The signer returned an error result."""
class MissingInstructionException(failures_lib.StepFailure):
"""We didn't receive the list of signing instructions PushImage uploaded."""
class MalformedResultsException(failures_lib.StepFailure):
"""The Signer results aren't formatted as we expect."""
class PaygenSigningRequirementsError(failures_lib.StepFailure):
"""Paygen stage can't run if signing failed."""
class PaygenCrostoolsNotAvailableError(failures_lib.StepFailure):
"""Paygen stage can't run if signing failed."""
class PaygenNoPaygenConfigForBoard(failures_lib.StepFailure):
"""Paygen can't run with a release.conf config for the board."""
class PaygenStage(artifact_stages.ArchivingStage):
"""Stage that generates release payloads.
If this stage is created with a 'channels' argument, it can run
independantly. Otherwise, it's dependent on values queued up by
the ArchiveStage (push_image).
"""
option_name = 'paygen'
config_name = 'paygen'
# Poll for new results every 30 seconds.
SIGNING_PERIOD = 30
# Timeout for PushImage to finish uploading images. 2 hours in seconds.
PUSHIMAGE_TIMEOUT = 2 * 60 * 60
# Timeout for the signing process. 2 hours in seconds.
SIGNING_TIMEOUT = 2 * 60 * 60
FINISHED = 'finished'
def __init__(self, builder_run, board, archive_stage, channels=None,
**kwargs):
"""Init that accepts the channels argument, if present.
Args:
builder_run: See builder_run on ArchivingStage.
board: See board on ArchivingStage.
archive_stage: See archive_stage on ArchivingStage.
channels: Explicit list of channels to generate payloads for.
If empty, will instead wait on values from push_image.
Channels is normally None in release builds, and normally set
for trybot 'payloads' builds.
"""
super(PaygenStage, self).__init__(builder_run, board, archive_stage,
**kwargs)
self.signing_results = {}
self.channels = channels
def _HandleStageException(self, exc_info):
"""Override and don't set status to FAIL but FORGIVEN instead."""
exc_type, exc_value, _exc_tb = exc_info
# If Paygen fails to find anything needed in release.conf, treat it
# as a warning, not a failure. This is common during new board bring up.
if issubclass(exc_type, PaygenNoPaygenConfigForBoard):
return self._HandleExceptionAsWarning(exc_info)
# If the exception is a TestLabFailure that means we couldn't schedule the
# test. We don't fail the build for that. We do the CompoundFailure dance,
# because that's how we'll get failures from background processes returned
# to us.
if (issubclass(exc_type, failures_lib.TestLabFailure) or
(issubclass(exc_type, failures_lib.CompoundFailure) and
exc_value.MatchesFailureType(failures_lib.TestLabFailure))):
return self._HandleExceptionAsWarning(exc_info)
return super(PaygenStage, self)._HandleStageException(exc_info)
def _JsonFromUrl(self, gs_ctx, url):
"""Fetch a GS Url, and parse it as Json.
Args:
gs_ctx: GS Context.<|fim▁hole|>
Returns:
None if the Url doesn't exist.
Parsed Json structure if it did.
Raises:
MalformedResultsException if it failed to parse.
"""
try:
signer_txt = gs_ctx.Cat(url).output
except gs.GSNoSuchKey:
return None
try:
return json.loads(signer_txt)
except ValueError:
# We should never see malformed Json, even for intermediate statuses.
raise MalformedResultsException(signer_txt)
def _SigningStatusFromJson(self, signer_json):
"""Extract a signing status from a signer result Json DOM.
Args:
signer_json: The parsed json status from a signer operation.
Returns:
string with a simple status: 'passed', 'failed', 'downloading', etc,
or '' if the json doesn't contain a status.
"""
return (signer_json or {}).get('status', {}).get('status', '')
def _CheckForResults(self, gs_ctx, instruction_urls_per_channel,
channel_notifier):
"""timeout_util.WaitForSuccess func to check a list of signer results.
Args:
gs_ctx: Google Storage Context.
instruction_urls_per_channel: Urls of the signer result files
we're expecting.
channel_notifier: BackgroundTaskRunner into which we push channels for
processing.
Returns:
Number of results not yet collected.
"""
COMPLETED_STATUS = ('passed', 'failed')
# Assume we are done, then try to prove otherwise.
results_completed = True
for channel in instruction_urls_per_channel.keys():
self.signing_results.setdefault(channel, {})
if (len(self.signing_results[channel]) ==
len(instruction_urls_per_channel[channel])):
continue
for url in instruction_urls_per_channel[channel]:
# Convert from instructions URL to instructions result URL.
url += '.json'
# We already have a result for this URL.
if url in self.signing_results[channel]:
continue
signer_json = self._JsonFromUrl(gs_ctx, url)
if self._SigningStatusFromJson(signer_json) in COMPLETED_STATUS:
# If we find a completed result, remember it.
self.signing_results[channel][url] = signer_json
# If we don't have full results for this channel, we aren't done
# waiting.
if (len(self.signing_results[channel]) !=
len(instruction_urls_per_channel[channel])):
results_completed = False
continue
# If we reach here, the channel has just been completed for the first
# time.
# If all results 'passed' the channel was successfully signed.
channel_success = True
for signer_result in self.signing_results[channel].values():
if self._SigningStatusFromJson(signer_result) != 'passed':
channel_success = False
# If we successfully completed the channel, inform paygen.
if channel_success:
channel_notifier(channel)
return results_completed
def _WaitForPushImage(self):
"""Block until push_image data is ready.
Returns:
Push_image results, expected to be of the form:
{ 'channel': ['gs://instruction_uri1', 'gs://signer_instruction_uri2'] }
Raises:
MissingInstructionException: If push_image sent us an error, or timed out.
"""
try:
instruction_urls_per_channel = self.board_runattrs.GetParallel(
'instruction_urls_per_channel', timeout=self.PUSHIMAGE_TIMEOUT)
except cbuildbot_run.AttrTimeoutError:
instruction_urls_per_channel = None
# A value of None signals an error, either in PushImage, or a timeout.
if instruction_urls_per_channel is None:
raise MissingInstructionException('PushImage results not available.')
return instruction_urls_per_channel
def _WaitForSigningResults(self,
instruction_urls_per_channel,
channel_notifier):
"""Do the work of waiting for signer results and logging them.
Args:
instruction_urls_per_channel: push_image data (see _WaitForPushImage).
channel_notifier: BackgroundTaskRunner into which we push channels for
processing.
Raises:
ValueError: If the signer result isn't valid json.
RunCommandError: If we are unable to download signer results.
"""
gs_ctx = gs.GSContext(dry_run=self._run.debug)
try:
cros_build_lib.Info('Waiting for signer results.')
timeout_util.WaitForReturnTrue(
self._CheckForResults,
func_args=(gs_ctx, instruction_urls_per_channel, channel_notifier),
timeout=self.SIGNING_TIMEOUT, period=self.SIGNING_PERIOD)
except timeout_util.TimeoutError:
msg = 'Image signing timed out.'
cros_build_lib.Error(msg)
cros_build_lib.PrintBuildbotStepText(msg)
raise SignerResultsTimeout(msg)
# Log all signer results, then handle any signing failures.
failures = []
for url_results in self.signing_results.values():
for url, signer_result in url_results.iteritems():
result_description = os.path.basename(url)
cros_build_lib.PrintBuildbotStepText(result_description)
cros_build_lib.Info('Received results for: %s', result_description)
cros_build_lib.Info(json.dumps(signer_result, indent=4))
status = self._SigningStatusFromJson(signer_result)
if status != 'passed':
failures.append(result_description)
cros_build_lib.Error('Signing failed for: %s', result_description)
if failures:
cros_build_lib.Error('Failure summary:')
for failure in failures:
cros_build_lib.Error(' %s', failure)
raise SignerFailure(failures)
def PerformStage(self):
"""Do the work of generating our release payloads."""
# Convert to release tools naming for boards.
board = self._current_board.replace('_', '-')
version = self._run.attrs.release_tag
assert version, "We can't generate payloads without a release_tag."
logging.info("Generating payloads for: %s, %s", board, version)
# Test to see if the current board has a Paygen configuration. We do
# this here, no in the sub-process so we don't have to pass back a
# failure reason.
try:
from crostools.lib import paygen_build_lib
paygen_build_lib.ValidateBoardConfig(board)
except paygen_build_lib.BoardNotConfigured:
raise PaygenNoPaygenConfigForBoard(
'No release.conf entry was found for board %s. Get a TPM to fix.' %
board)
except ImportError:
raise PaygenCrostoolsNotAvailableError()
with parallel.BackgroundTaskRunner(self._RunPaygenInProcess) as per_channel:
def channel_notifier(channel):
per_channel.put((channel, board, version, self._run.debug,
self._run.config.paygen_skip_testing,
self._run.config.paygen_skip_delta_payloads))
if self.channels:
logging.info("Using explicit channels: %s", self.channels)
# If we have an explicit list of channels, use it.
for channel in self.channels:
channel_notifier(channel)
else:
instruction_urls_per_channel = self._WaitForPushImage()
self._WaitForSigningResults(instruction_urls_per_channel,
channel_notifier)
def _RunPaygenInProcess(self, channel, board, version, debug,
skip_test_payloads, skip_delta_payloads):
"""Helper for PaygenStage that invokes payload generation.
This method is intended to be safe to invoke inside a process.
Args:
channel: Channel of payloads to generate ('stable', 'beta', etc)
board: Board of payloads to generate ('x86-mario', 'x86-alex-he', etc)
version: Version of payloads to generate.
debug: Flag telling if this is a real run, or a test run.
skip_test_payloads: Skip generating test payloads, and auto tests.
skip_delta_payloads: Skip generating delta payloads.
"""
# TODO(dgarrett): Remove when crbug.com/341152 is fixed.
# These modules are imported here because they aren't always available at
# cbuildbot startup.
# pylint: disable=F0401
try:
from crostools.lib import gspaths
from crostools.lib import paygen_build_lib
except ImportError:
# We can't generate payloads without crostools.
raise PaygenCrostoolsNotAvailableError()
# Convert to release tools naming for channels.
if not channel.endswith('-channel'):
channel += '-channel'
with osutils.TempDir(sudo_rm=True) as tempdir:
# Create the definition of the build to generate payloads for.
build = gspaths.Build(channel=channel,
board=board,
version=version)
try:
# Generate the payloads.
self._PrintLoudly('Starting %s, %s, %s' % (channel, version, board))
paygen_build_lib.CreatePayloads(build,
work_dir=tempdir,
dry_run=debug,
run_parallel=True,
run_on_builder=True,
skip_delta_payloads=skip_delta_payloads,
skip_test_payloads=skip_test_payloads,
skip_autotest=skip_test_payloads)
except (paygen_build_lib.BuildFinished,
paygen_build_lib.BuildLocked,
paygen_build_lib.BuildSkip) as e:
# These errors are normal if it's possible for another process to
# work on the same build. This process could be a Paygen server, or
# another builder (perhaps by a trybot generating payloads on request).
#
# This means the build was finished by the other process, is already
# being processed (so the build is locked), or that it's been marked
# to skip (probably done manually).
cros_build_lib.Info('Paygen skipped because: %s', e)<|fim▁end|> | url: Url to fetch and parse. |
<|file_name|>starter_imagelist.cpp<|end_file_name|><|fim▁begin|>/*
* starter_imagelist.cpp
*
* Created on: Nov 23, 2010
* Author: Ethan Rublee
*
* A starter sample for using opencv, load up an imagelist
* that was generated with imagelist_creator.cpp
* easy as CV_PI right?<|fim▁hole|> */
#include "opencv2/imgcodecs.hpp"
#include "opencv2/highgui.hpp"
#include <iostream>
#include <vector>
using namespace cv;
using namespace std;
//hide the local functions in an unnamed namespace
namespace
{
void help(char** av)
{
cout << "\nThis program gets you started being able to read images from a list in a file\n"
"Usage:\n./" << av[0] << " image_list.yaml\n"
<< "\tThis is a starter sample, to get you up and going in a copy pasta fashion.\n"
<< "\tThe program reads in an list of images from a yaml or xml file and displays\n"
<< "one at a time\n"
<< "\tTry running imagelist_creator to generate a list of images.\n"
"Using OpenCV version %s\n" << CV_VERSION << "\n" << endl;
}
bool readStringList(const string& filename, vector<string>& l)
{
l.resize(0);
FileStorage fs(filename, FileStorage::READ);
if (!fs.isOpened())
return false;
FileNode n = fs.getFirstTopLevelNode();
if (n.type() != FileNode::SEQ)
return false;
FileNodeIterator it = n.begin(), it_end = n.end();
for (; it != it_end; ++it)
l.push_back((string)*it);
return true;
}
int process(vector<string> images)
{
namedWindow("image", WINDOW_KEEPRATIO); //resizable window;
for (size_t i = 0; i < images.size(); i++)
{
Mat image = imread(images[i], IMREAD_GRAYSCALE); // do grayscale processing?
imshow("image",image);
cout << "Press a key to see the next image in the list." << endl;
waitKey(); // wait indefinitely for a key to be pressed
}
return 0;
}
}
int main(int ac, char** av)
{
cv::CommandLineParser parser(ac, av, "{help h||}{@input||}");
if (parser.has("help"))
{
help(av);
return 0;
}
std::string arg = parser.get<std::string>("@input");
if (arg.empty())
{
help(av);
return 1;
}
vector<string> imagelist;
if (!readStringList(arg,imagelist))
{
cerr << "Failed to read image list\n" << endl;
help(av);
return 1;
}
return process(imagelist);
}<|fim▁end|> | |
<|file_name|>docfix.js<|end_file_name|><|fim▁begin|>(function (w) {
var $ = w.$,
d = document,
e = d.documentElement,
g = d.getElementsByTagName('body')[0],
my = w.ilm,
contid = 0;
function fixCharts(width, fn) {
$(fn).css("width", width);
$(d).ready(function () {
var inner = $(fn).width();
setTimeout(function () {
$.each(w.ilm.charts, function (i, obj) {
obj.setSize($(fn).width() - 6, obj.containerHeight, false);
});
}, 500);
});
}
function setWidth() {
console.log(w.ilm.getWidth());
var inner = ((w.ilm.getWidth() < 1024) ? "100" : "50") + "%";
$('.float').each(function () {
fixCharts(inner, this);
});
}
w.ilm.popup="";
w.ilm.Options = function(state){
var t = this, f = $("#lingid"), g = $("#sl");
g.html("Seaded (klikk varjamiseks)");
my.settingTemplate(f);
return false;
};
w.ilm.Lingid = function (state) {
var t = this, f = $("#lingid"), g = $("#sl");
g.html("Lingid (klikk varjamiseks)");
f.html(w.ilm.lingid.process(w.ilm.lingid.JSON));
return false;
};
w.ilm.Popup = function(name, cb) {
var v = $("#popup");
if(!v) return false;
var b = $("#bghide"), hh = $('.navbar').height(), y = w.innerHeight || e.clientHeight || g.clientHeight,
act = v.attr("name"),swp = 0;
if (act) $("#ilm-" + act).parent().removeClass("active");
if(name && (!act || (act && act !== name))) {
b.css({height : $(d).height(), position : 'absolute', left : 0, top : 0}).show();
v.attr("name", name);
$("#ilm-" + name).parent().addClass("active");
if(cb) cb.call(this, name);
swp = ((y/2) - (v.height()/2)) + $(w).scrollTop();
v.css({top : (swp > 0 ? swp : hh)}).show();
}
else if(v.is(":visible")) {
v.hide();
b.hide();
v.attr("name", "");
}
return false;
};
$(d).ready(function () {
$("#pagelogo").html(ilm.logo);
//setWidth();
$("#ilm-viited").click(function(e){
//ilm.showLinks();
var b = $(e.target);
if(w.ilm.linksasmenu) {
b.attr({"data-toggle":"dropdown"});
b.addClass("dropdown-toggle");
var a = $(".ilm-viited-dropdown");
a.html(w.ilm.lingid.process(w.ilm.lingid.JSON));
a.height(w.innerHeight-(w.innerHeight/3));
} else {
b.removeClass("dropdown-toggle");
b.removeAttr("data-toggle");
w.ilm.Popup("viited",w.ilm.Lingid);
}
//return false;
});
$("#ilm-seaded").click(function(e){
my.settingTemplate("#ilm-seaded-dropdown");
//w.ilm.Popup("seaded",w.ilm.Options);
//return false;
});
$("#fctitle").on("click",function(){
w.ilm.setEstPlace(w.ilm.nextPlace());
//w.ilm.reloadest();
return false;
});
$("#datepicker").datepicker({
dateFormat: 'yy-mm-dd',
timezone: "+0"+(((my.addDst)?1:0)+2)+"00",
onSelect: function(dateText, inst) {
w.ilm.setDate(dateText);
//w.ilm.reload();
}
});
$("#curtime").on("click",function(){
$("#datepicker").datepicker('show');
});
$("#curplace").on("click",function(){
w.ilm.setCurPlace(w.ilm.nextCurPlace());
//w.ilm.reload();
return false;
});
w.ilm.loadBase();
w.ilm.loadInt(1000 * 60); // 1min
w.ilm.loadEstInt(1000 * 60 * 10); // 10min
$('#backgr').css({"display" : "block"});
$(w).on("keydown", function (e) {
//w.console.log("pressed" + e.keyCode);
var obj = $("#popup");
if(!obj) return;
if (e.keyCode === 27 || e.keyCode === 13 ) {
w.ilm.Popup("lingid", w.ilm.Lingid);
}
/*if (e.keyCode === 27 && obj.style.display === "block") {<|fim▁hole|> w.ilm.showLinks();
}*/
});
$(w).on('hashchange', function() {
console.log("hash changed " + w.location.hash);
w.ilm.hash_data();
});
});
})(window);<|fim▁end|> | w.ilm.showLinks();
}
else if (e.keyCode === 13 && obj.style.display === "none") { |
<|file_name|>markdownx-widget.js<|end_file_name|><|fim▁begin|>let widget = document.getElementsByClassName('markdownx-widget')[0];
let element = document.getElementsByClassName('markdownx');
let element_divs = element[0].getElementsByTagName('div');
let div_editor = element_divs[0];
let div_preview = element_divs[1];
let navbar_bar = document.getElementsByClassName('markdownx-toolbar')[0].getElementsByTagName('li');
let btn_preview = navbar_bar[0];
let btn_fullscreen = navbar_bar[1];
var turn_active = function(element) {
value = element.classname;
classval = element.getAttribute('class');
if (value.indexOf('active') >= 0) {
element.removeClass('active');<|fim▁hole|> else {
value += 'active'
}
}
var refresh_pretty = function() {
// 每次有都需要重新渲染code
PR.prettyPrint();
};
var enable_preview = function() {
var class_btn_preview = btn_preview.getAttribute('class');
var index = class_btn_preview.indexOf('active');
if (index >= 0) {
btn_preview.setAttribute('class', '');
div_editor.setAttribute('class', 'col-md-12 child-left');
div_preview.style.display = 'none';
}
else {
btn_preview.setAttribute('class', 'active');
div_editor.setAttribute('class', 'col-md-6 child-left');
div_preview.style.display = 'block';
}
};
var enable_fullscreen = function() {
var class_btn_fullscreen = btn_fullscreen.getAttribute('class');
var index = class_btn_fullscreen.indexOf('active');
if (index >= 0) {
btn_fullscreen.setAttribute('class', '');
widget.setAttribute('class', 'markup-widget');
}
else{
btn_fullscreen.setAttribute('class', 'active');
widget.setAttribute('class', 'markup-widget fullscreen');
}
}
Object.keys(element).map(key =>
element[key].addEventListener('markdownx.update', refresh_pretty)
);
btn_preview.addEventListener('click', enable_preview);
btn_fullscreen.addEventListener('click', enable_fullscreen);<|fim▁end|> | } |
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from rest_framework import serializers
from workers.models import (TaskConfig,
Task,
Job,
TaskProducer)
from grabbers.serializers import (MapperSerializer,
SequenceSerializer)
from grabbers.models import Sequence
from drivers.serializers import DriverSerializer
from drivers.models import Driver
from django.core.exceptions import ObjectDoesNotExist
# == helpers ==
from delphi.utils.lizers import _required_fields, _get_or_instance
class TaskConfigDetailSerializer(serializers.ModelSerializer):
'''
'''
driver=DriverSerializer()
sequence=SequenceSerializer()
class Meta:
model=TaskConfig
#no proxy by api yet - missing fields::proxy,network_cap
fields=('name','driver','sequence','mapper','round_limit')
def create(self, validated_data):
'''
'''
name=validated_data['name']
try:
task_config=TaskConfig.objects.get(name=name)
print("[-] We already this guy in db")
return task_config
except TaskConfig.DoesNotExist:
task_config=TaskConfig(name=name)
<|fim▁hole|> driver=_get_or_instance(Driver,'name',
validated_data['driver'],DriverSerializer)
sequence=_get_or_instance(Sequence,'name',
validated_data['sequence'],
SequenceSerializer)
task_config.driver=driver
task_config.sequence=sequence
return task_config
class TaskConfigListSerializer(serializers.HyperlinkedModelSerializer):
'''
'''
class Meta:
model=TaskConfig
fields=('url', 'name', 'sequence', 'driver', 'mapper','round_limit')
extra_kwargs = {
'url': {'view_name': 'api:task_config-detail', 'lookup_field':'name'},
'driver': {'view_name': 'api:driver-detail', 'lookup_field':'name'},
'sequence':{'view_name': 'api:sequence-detail', 'lookup_field':'name'},
'mapper':{'view_name':'api:mapper-detail', 'lookup_field':'name'},
}
class JobSerializer(serializers.ModelSerializer):
'''
'''
class Meta:
model=Job
fields=('status','name')
class TaskSerializer(serializers.ModelSerializer):
'''
'''
config=TaskConfigDetailSerializer()
job=JobSerializer()
class Meta:
model=Task
fields=('target_url', 'config', 'status', 'job')<|fim▁end|> | |
<|file_name|>disable.py<|end_file_name|><|fim▁begin|>#
# Copyright 2013, 2018, 2019 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" Disable blocks module """
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import os
import re
import sys
import logging
from ..tools import CMakeFileEditor
from .base import ModTool, ModToolException
logger = logging.getLogger(__name__)
class ModToolDisable(ModTool):
""" Disable block (comments out CMake entries for files) """
name = 'disable'
description = 'Disable selected block in module.'
def __init__(self, blockname=None, **kwargs):
ModTool.__init__(self, blockname, **kwargs)
self.info['pattern'] = blockname
def validate(self):
""" Validates the arguments """
ModTool._validate(self)
if not self.info['pattern'] or self.info['pattern'].isspace():
raise ModToolException("Invalid pattern!")
def run(self):
""" Go, go, go! """
def _handle_py_qa(cmake, fname):
""" Do stuff for py qa """
cmake.comment_out_lines('GR_ADD_TEST.*'+fname)
self.scm.mark_file_updated(cmake.filename)
return True
def _handle_py_mod(cmake, fname):
""" Do stuff for py extra files """
try:
with open(self._file['pyinit']) as f:
initfile = f.read()
except IOError:
logger.warning("Could not edit __init__.py, that might be a problem.")
return False
pymodname = os.path.splitext(fname)[0]
initfile = re.sub(r'((from|import)\s+\b'+pymodname+r'\b)', r'#\1', initfile)
with open(self._file['pyinit'], 'w') as f:
f.write(initfile)
self.scm.mark_file_updated(self._file['pyinit'])
return False
def _handle_cc_qa(cmake, fname):
""" Do stuff for cc qa """
if self.info['version'] == '37':
cmake.comment_out_lines(r'\$\{CMAKE_CURRENT_SOURCE_DIR\}/'+fname)
fname_base = os.path.splitext(fname)[0]
ed = CMakeFileEditor(self._file['qalib']) # Abusing the CMakeFileEditor...
ed.comment_out_lines(r'#include\s+"{}.h"'.format(fname_base), comment_str='//')
ed.comment_out_lines(r'{}::suite\(\)'.format(fname_base), comment_str='//')
ed.write()
self.scm.mark_file_updated(self._file['qalib'])
elif self.info['version'] == '38':
fname_qa_cc = 'qa_{}.cc'.format(self.info['blockname'])
cmake.comment_out_lines(fname_qa_cc)
elif self.info['version'] == '36':
cmake.comment_out_lines('add_executable.*'+fname)
cmake.comment_out_lines('target_link_libraries.*'+os.path.splitext(fname)[0])
cmake.comment_out_lines('GR_ADD_TEST.*'+os.path.splitext(fname)[0])
self.scm.mark_file_updated(cmake.filename)
return True
def _handle_h_swig(cmake, fname):
""" Comment out include files from the SWIG file,
as well as the block magic """
with open(self._file['swig']) as f:
swigfile = f.read()
(swigfile, nsubs) = re.subn(r'(.include\s+"({}/)?{}")'.format(
self.info['modname'], fname),
r'//\1', swigfile)
if nsubs > 0:
logger.info("Changing {}...".format(self._file['swig']))
if nsubs > 1: # Need to find a single BLOCK_MAGIC
blockname = os.path.splitext(fname[len(self.info['modname'])+1:])[0]
if self.info['version'] in ('37', '38'):
blockname = os.path.splitext(fname)[0]
(swigfile, nsubs) = re.subn('(GR_SWIG_BLOCK_MAGIC2?.+{}.+;)'.format(blockname), r'//\1', swigfile)
if nsubs > 1:
logger.warning("Hm, changed more then expected while editing {}.".format(self._file['swig']))
with open(self._file['swig'], 'w') as f:<|fim▁hole|> self.scm.mark_file_updated(self._file['swig'])
return False
def _handle_i_swig(cmake, fname):
""" Comment out include files from the SWIG file,
as well as the block magic """
with open(self._file['swig']) as f:
swigfile = f.read()
blockname = os.path.splitext(fname[len(self.info['modname'])+1:])[0]
if self.info['version'] in ('37', '38'):
blockname = os.path.splitext(fname)[0]
swigfile = re.sub(r'(%include\s+"'+fname+'")', r'//\1', swigfile)
logger.info("Changing {}...".format(self._file['swig']))
swigfile = re.sub('(GR_SWIG_BLOCK_MAGIC2?.+'+blockname+'.+;)', r'//\1', swigfile)
with open(self._file['swig'], 'w') as f:
f.write(swigfile)
self.scm.mark_file_updated(self._file['swig'])
return False
# This portion will be covered by the CLI
if not self.cli:
self.validate()
else:
from ..cli import cli_input
# List of special rules: 0: subdir, 1: filename re match, 2: callback
special_treatments = (
('python', r'qa.+py$', _handle_py_qa),
('python', r'^(?!qa).+py$', _handle_py_mod),
('lib', r'qa.+\.cc$', _handle_cc_qa),
('include/{}'.format(self.info['modname']), r'.+\.h$', _handle_h_swig),
('include', r'.+\.h$', _handle_h_swig),
('swig', r'.+\.i$', _handle_i_swig)
)
for subdir in self._subdirs:
if self.skip_subdirs[subdir]:
continue
if self.info['version'] in ('37', '38') and subdir == 'include':
subdir = 'include/{}'.format(self.info['modname'])
try:
cmake = CMakeFileEditor(os.path.join(subdir, 'CMakeLists.txt'))
except IOError:
continue
logger.info("Traversing {}...".format(subdir))
filenames = cmake.find_filenames_match(self.info['pattern'])
yes = self.info['yes']
for fname in filenames:
file_disabled = False
if not yes:
ans = cli_input("Really disable {}? [Y/n/a/q]: ".format(fname)).lower().strip()
if ans == 'a':
yes = True
if ans == 'q':
sys.exit(0)
if ans == 'n':
continue
for special_treatment in special_treatments:
if special_treatment[0] == subdir and re.match(special_treatment[1], fname):
file_disabled = special_treatment[2](cmake, fname)
if not file_disabled:
cmake.disable_file(fname)
cmake.write()
self.scm.mark_files_updated((os.path.join(subdir, 'CMakeLists.txt'),))
logger.warning("Careful: 'gr_modtool disable' does not resolve dependencies.")<|fim▁end|> | f.write(swigfile) |
<|file_name|>glut_viewer_gui.cpp<|end_file_name|><|fim▁begin|>/*
* _____ _ _ _ _____ _ _ _ _____ _ __ _____ _____
* / ___| | | | | | | |_ _| | | / / | | | ____| | | / / | ____| | _ \
* | | | | | | | | | | | | / / | | | |__ | | __ / / | |__ | |_| |
* | | _ | | | | | | | | | | / / | | | __| | | / | / / | __| | _ /
* | |_| | | |___ | |_| | | | | |/ / | | | |___ | |/ |/ / | |___ | | \ \
* \_____/ |_____| \_____/ |_| |___/ |_| |_____| |___/|___/ |_____| |_| \_\
*
* Version 0.9
* Bruno Levy, August 2006
* INRIA, Project ALICE
*
*/
#include "glut_viewer_gui.h"
#include <GLsdk/gl_stuff.h>
#include <GL/glut.h>
#include <iostream>
#include <stdarg.h>
#include <math.h>
#include <stdio.h>
namespace GlutViewerGUI {
// ------------------- Primitives for internal use --------------------------------------
static void printf_xy(GLfloat x, GLfloat y, const char *format, ...) {
va_list args;
char buffer[1024], *p;
va_start(args, format);
vsprintf(buffer, format, args);
va_end(args);
glPushMatrix();
glTranslatef(x, y, 0);
for (p = buffer; *p; p++) {
glutStrokeCharacter(GLUT_STROKE_MONO_ROMAN, *p);
}
glPopMatrix();
}
static void circle_arc_vertices(
GLfloat x, GLfloat y, GLfloat r1, GLfloat r2, GLfloat theta1, GLfloat theta2
) {
const GLfloat delta_theta = 1.0f ;
if(theta2 > theta1) {
for(GLfloat theta = theta1; theta <= theta2; theta += delta_theta) {
GLfloat theta_rad = theta * 3.14159f / 200.0f ;
glVertex2f(x + r1 * cos(theta_rad), y + r2 * sin(theta_rad)) ;
}
} else {
for(GLfloat theta = theta1; theta >= theta2; theta -= delta_theta) {
GLfloat theta_rad = theta * 3.14159f / 200.0f ;
glVertex2f(x + r1 * cos(theta_rad), y + r2 * sin(theta_rad)) ;
}
}
}
static void circle_arc_vertices(
GLfloat x, GLfloat y, GLfloat r, GLfloat theta1, GLfloat theta2
) {
circle_arc_vertices(x,y,r,r,theta1,theta2) ;
}
static void circle(GLfloat x, GLfloat y, GLfloat r) {
glBegin(GL_LINE_LOOP) ;
circle_arc_vertices(x,y,r,0.0f,400.0f) ;
glEnd() ;
}
static void fill_circle(GLfloat x, GLfloat y, GLfloat r) {
glBegin(GL_POLYGON) ;<|fim▁hole|> glEnd() ;
}
static void round_rectangle_vertices(
GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2, GLfloat r
) {
glVertex2f(x1+r,y2) ;
glVertex2f(x2-r,y2) ;
circle_arc_vertices(x2-r, y2-r, r, 100.0f, 0.0f) ;
glVertex2f(x2,y2-r) ;
glVertex2f(x2,y1+r) ;
circle_arc_vertices(x2-r, y1+r, r, 0.0f, -100.0f) ;
glVertex2f(x2-r,y1) ;
glVertex2f(x1+r,y1) ;
circle_arc_vertices(x1+r, y1+r, r, -100.0f, -200.0f) ;
glVertex2f(x1,y1+r) ;
glVertex2f(x1,y2-r) ;
circle_arc_vertices(x1+r, y2-r, r, -200.0f, -300.0f) ;
}
static void round_rectangle(GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2, GLfloat r) {
glBegin(GL_LINE_LOOP) ;
round_rectangle_vertices(x1, y1, x2, y2, r) ;
glEnd() ;
}
static void fill_round_rectangle(GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2, GLfloat r) {
glBegin(GL_POLYGON) ;
round_rectangle_vertices(x1, y1, x2, y2, r) ;
glEnd() ;
}
static void arrow_vertices(Direction dir, GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2) {
GLfloat x12 = 0.5 * (x1 + x2) ;
GLfloat y12 = 0.5 * (y1 + y2) ;
switch(dir) {
case DOWN:
glVertex2f(x1,y2) ;
glVertex2f(x2,y2) ;
glVertex2f(x12,y1) ;
break ;
case UP:
glVertex2f(x1,y1) ;
glVertex2f(x2,y1) ;
glVertex2f(x12,y2) ;
break ;
case LEFT:
glVertex2f(x2,y2) ;
glVertex2f(x2,y1) ;
glVertex2f(x1,y12) ;
break ;
case RIGHT:
glVertex2f(x1,y2) ;
glVertex2f(x1,y1) ;
glVertex2f(x2,y12) ;
break ;
}
}
static void arrow(Direction dir, GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2) {
glBegin(GL_LINE_LOOP) ;
arrow_vertices(dir, x1, y1, x2, y2) ;
glEnd() ;
}
static void fill_arrow(Direction dir, GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2) {
glBegin(GL_POLYGON) ;
arrow_vertices(dir, x1, y1, x2, y2) ;
glEnd() ;
}
// ------------------- Widget class --------------------------------------
Widget::Widget(
GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2
) : style_(BlueStyle), visible_(true), highlight_(false) {
set_geometry(x1, y1, x2, y2) ;
r_ = 100.0f ;
}
Widget::~Widget() {
}
void Widget::glColor(ColorRole role_in) {
ColorRole role = role_in ;
if(highlight_) {
switch(role_in) {
case Background:
role = Foreground ; break ;
case Middleground:
role = Middleground ; break ;
case Foreground:
role = Foreground ; break ;
}
}
switch(style_) {
case RedStyle: {
switch(role) {
case Background:
glColor4f(0.5f, 0.0f, 0.0f, 0.5f) ;
break ;
case Middleground:
glColor4f(1.0f, 0.5f, 0.5f, 1.0f) ;
break ;
case Foreground:
glColor4f(5.0f, 5.0f, 5.0f, 1.0f) ;
break ;
}
} break ;
case GreenStyle: {
switch(role) {
case Background:
glColor4f(0.0f, 0.5f, 0.0f, 0.5f) ;
break ;
case Middleground:
glColor4f(0.5f, 1.0f, 0.5f, 1.0f) ;
break ;
case Foreground:
glColor4f(5.0f, 5.0f, 5.0f, 1.0f) ;
break ;
}
} break ;
case BlueStyle: {
switch(role) {
case Background:
glColor4f(0.0f, 0.0f, 0.5f, 0.5f) ;
break ;
case Middleground:
glColor4f(0.5f, 0.5f, 1.0f, 1.0f) ;
break ;
case Foreground:
glColor4f(5.0f, 5.0f, 5.0f, 1.0f) ;
break ;
}
} break ;
case BWStyle: {
switch(role) {
case Background:
glColor4f(5.0f, 5.0f, 5.0f, 0.5f) ;
break ;
case Middleground:
glColor4f(0.2f, 0.2f, 0.2f, 1.0f) ;
break ;
case Foreground:
glColor4f(0.0f, 0.0f, 0.0f, 1.0f) ;
break ;
}
} break ;
}
}
GLboolean Widget::process_mouse_event(float x, float y, int button, GlutViewerEvent event) {
return contains(int(x),int(y)) ;
}
void Widget::draw() {
if(!visible()) { return ; }
draw_background() ;
draw_border() ;
}
void Widget::draw_background() {
glEnable(GL_BLEND) ;
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) ;
glColor(Background) ;
fill_round_rectangle(x1_, y1_, x2_, y2_, r_) ;
glDisable(GL_BLEND) ;
}
void Widget::draw_border() {
glColor(Foreground) ;
glLineWidth(2.0) ;
round_rectangle(x1_, y1_, x2_, y2_, r_) ;
}
//______________________________________________________________________________________________________
Container* Container::main_widget_ = NULL ;
Container::~Container() {
if(main_widget_ == this) {
main_widget_ = NULL ;
}
for(size_t i=0; i<children_.size(); i++) {
delete children_[i] ;
}
}
void Container::draw() {
if(!visible()) {
return ;
}
for(size_t i=0; i<children_.size(); i++) {
children_[i]->draw() ;
}
}
GLboolean Container::process_mouse_event(float x, float y, int button, GlutViewerEvent event) {
if(!visible()) { return GL_FALSE ; }
switch(event) {
case GLUT_VIEWER_DOWN: {
for(size_t i=0; i<children_.size(); i++) {
if(children_[i]->contains(x,y) && children_[i]->process_mouse_event(x, y, button, event)) {
active_child_ = children_[i] ;
return GL_TRUE ;
}
}
} break ;
case GLUT_VIEWER_MOVE: {
if(active_child_ != NULL) {
return active_child_->process_mouse_event(x, y, button, event) ;
}
} break ;
case GLUT_VIEWER_UP: {
if(active_child_ != NULL) {
Widget* w = active_child_ ;
active_child_ = NULL ;
return w->process_mouse_event(x, y, button, event) ;
}
} break ;
}
return GL_FALSE ;
}
void Container::draw_handler() {
if(main_widget_ != NULL) {
main_widget_->draw() ;
}
}
GLboolean Container::mouse_handler(float x, float y, int button, enum GlutViewerEvent event) {
if(main_widget_ != NULL) {
return main_widget_->process_mouse_event(x, y, button, event) ;
}
return GL_FALSE ;
}
void Container::set_as_main_widget() {
main_widget_ = this ;
glut_viewer_set_overlay_func(draw_handler) ;
glut_viewer_set_mouse_func(mouse_handler) ;
}
//______________________________________________________________________________________________________
void Panel::draw() {
Widget::draw() ;
Container::draw() ;
}
GLboolean Panel::process_mouse_event(float x, float y, int button, GlutViewerEvent event) {
if(!visible() || !contains(x,y)) {
return GL_FALSE ;
}
return Container::process_mouse_event(x,y,button,event) ;
}
//______________________________________________________________________________________________________
void Button::draw() {
Widget::draw() ;
}
GLboolean Button::process_mouse_event(float x, float y, int button, GlutViewerEvent event) {
if(visible() && contains(x,y) && event == GLUT_VIEWER_DOWN) {
pressed() ;
highlight_ = GL_TRUE ;
return GL_TRUE ;
}
if(visible() && contains(x,y) && event == GLUT_VIEWER_UP) {
highlight_ = GL_FALSE ;
return GL_TRUE ;
}
return GL_FALSE ;
}
void Button::pressed() {
if(callback_ != NULL) {
callback_(client_data_) ;
}
}
//______________________________________________________________________________________________________
void Checkbox::draw() {
if(!visible()) { return ; }
Button::draw() ;
glColor(Foreground) ;
GLfloat x = 0.5f * (x1_ + x2_) ;
GLfloat y = 0.5f * (y1_ + y2_) ;
if(toggle_) {
glColor(Middleground) ;
fill_circle(x,y,d_) ;
glColor(Foreground) ;
glLineWidth(1.0f) ;
circle(x,y,d_) ;
}
}
void Checkbox::pressed() {
toggle_ = !toggle_ ;
}
//______________________________________________________________________________________________________
ArrowButton::ArrowButton(
Direction dir, GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2
) : Button(x1, y1, x2, y2), direction_(dir) {
d_ /= 1.5 ;
r_ /= 2.0 ;
}
void ArrowButton::draw() {
Button::draw() ;
glColor(Middleground) ;
fill_arrow(direction_, x1_ + d_, y1_ + d_, x2_ - d_, y2_ - d_) ;
glColor(Foreground);
arrow(direction_, x1_ + d_, y1_ + d_, x2_ - d_, y2_ - d_) ;
}
//______________________________________________________________________________________________________
void Slider::set_value(GLfloat x, bool update) {
if(integer_) { x = GLfloat(GLint(x)) ; }
if(x < min_) { x = min_ ; }
if(x > max_) { x = max_ ; }
value_ = x ;
if(update && callback_ != NULL) { callback_(value_) ; }
}
void Slider::set_range(GLfloat x1, GLfloat x2) {
min_ = x1 ;
max_ = x2 ;
if(value_ < min_) { set_value(min_) ; }
if(value_ > max_) { set_value(max_) ; }
}
void Slider::draw() {
if(!visible()) { return ; }
Widget::draw() ;
glColor(Middleground) ;
glLineWidth(2.0f) ;
glBegin(GL_LINES) ;
glVertex2f(x1_+d_, 0.5f*(y1_+y2_)) ;
glVertex2f(x2_-d_, 0.5f*(y1_+y2_)) ;
glEnd() ;
GLfloat w = (value_ - min_) / (max_ - min_) ;
GLfloat x = w*(x2_ - d_) + (1.0f - w)*(x1_ + d_) ;
GLfloat y = 0.5f*(y1_+y2_) ;
glColor(Middleground) ;
fill_circle(x,y,d_) ;
glColor(Foreground) ;
glLineWidth(1.0f) ;
circle(x,y,d_) ;
}
GLboolean Slider::process_mouse_event(float x, float y, int button, GlutViewerEvent event) {
if(!visible()) { return GL_FALSE ; }
if(event == GLUT_VIEWER_DOWN || event == GLUT_VIEWER_MOVE) {
GLfloat w = GLfloat(x - x1_ - d_) / GLfloat(x2_ - x1_ - 2.0f * d_) ;
set_value((1.0f - w) * min_ + w * max_, continuous_update_ == GL_TRUE) ;
return GL_TRUE ;
} else if(event == GLUT_VIEWER_UP) {
set_value(value_) ;
}
return GL_FALSE ;
}
//______________________________________________________________________________________________________
void CurveEditor::draw() {
if(!visible()) { return ; }
draw_background() ;
// Draw grid
glColor(Middleground) ;
glLineWidth(1.0) ;
glBegin(GL_LINES) ;
for(unsigned int i=1; i<10; i++) {
float x = x1_ + (x2_ - x1_) * float(i) / 10.0f ;
glVertex2f(x, y1_) ;
glVertex2f(x, y2_) ;
}
for(unsigned int i=1; i<4; i++) {
float y = y1_ + (y2_ - y1_) * float(i) / 4.0f ;
glVertex2f(x1_, y) ;
glVertex2f(x2_, y) ;
}
glEnd() ;
// Draw curve
glColor(Foreground) ;
glLineWidth(2.0) ;
glBegin(GL_LINE_STRIP) ;
for(unsigned int i=0; i<CurveSize; i++) {
glVertex2f(
x1_ + (float)i * (x2_ - x1_) / (float)(CurveSize - 1),
y1_ + curve_[i] * (y2_ - y1_)
) ;
}
glEnd() ;
draw_border() ;
}
GLboolean CurveEditor::process_mouse_event(float x, float y, int button, GlutViewerEvent event) {
if(!visible()) {
return GL_FALSE ;
}
if(event == GLUT_VIEWER_DOWN && !contains(x,y)) {
return GL_FALSE ;
}
int i = int((x - x1_) * (CurveSize - 1) / (x2_ - x1_)) ;
GLfloat v = GLfloat(y - y1_) / GLfloat(y2_ - y1_) ;
if(v < 0.0) { v = 0.0 ; }
if(v > 1.0) { v = 1.0 ; }
if(i < 0) { i = 0 ; }
if(i >= CurveSize) { i = CurveSize - 1 ; }
if(event == GLUT_VIEWER_DOWN) {
last_i_ = i ;
last_v_ = v ;
return GL_TRUE ;
}
if(event == GLUT_VIEWER_UP) {
if(callback_ != NULL) {
callback_(curve_, CurveSize) ;
}
return GL_TRUE ;
}
if(event == GLUT_VIEWER_MOVE) {
if(i > last_i_) {
set_curve(last_i_, last_v_, i, v) ;
} else {
set_curve(i, v, last_i_, last_v_) ;
}
}
last_i_ = i ;
last_v_ = v ;
return GL_TRUE ;
}
void CurveEditor::set_curve(int i1, float val1, int i2, float val2) {
if(i1 == i2) {
curve_[i1] = val1 ;
} else {
for(int i=i1; i<=i2; i++) {
curve_[i] = val1 + (float)(i - i1) * (val2 - val1) / (float)(i2 - i1) ;
}
}
}
void CurveEditor::set_curve(GLfloat* curve, bool update) {
for(unsigned int i=0; i<CurveSize; i++) {
curve_[i] = curve[i] ;
}
if(update && callback_ != NULL) {
callback_(curve_, CurveSize) ;
}
}
void CurveEditor::reset(bool update) {
for(unsigned int i=0; i<CurveSize; i++) {
curve_[i] = 0.5f ;
}
if(update && callback_ != NULL) {
callback_(curve_, CurveSize) ;
}
}
void CurveEditor::reset_ramp(bool update) {
for(unsigned int i=0; i<CurveSize; i++) {
curve_[i] = float(i) / float(CurveSize - 1) ;
}
if(update && callback_ != NULL) {
callback_(curve_, CurveSize) ;
}
}
GLfloat CurveEditor::value(GLfloat x) const {
if(x < 0.0f) { x = 0.0f ; }
if(x > 1.0f) { x = 1.0f ; }
return curve_[int(x * (CurveSize - 1))] ;
}
//______________________________________________________________________________________________________
void ColormapEditor::draw() {
if(!visible()) { return ; }
draw_background() ;
// Draw curve
glColor(Foreground) ;
glLineWidth(2.0) ;
glBegin(GL_LINE_STRIP) ;
for(unsigned int i=0; i<ColormapSize; i++) {
glVertex2f(
x1_ + (float)i * (x2_ - x1_) / (float)(ColormapSize - 1),
y1_ + curve()[i] * (y2_ - y1_)
) ;
}
glEnd() ;
draw_border() ;
}
void ColormapEditor::draw_background() {
glEnable(GL_BLEND) ;
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA) ;
drawBackgroundCB_(curve(), ColormapSize) ;
glDisable(GL_BLEND) ;
}
void ColormapEditor::draw_border() {
glColor(Foreground) ;
glLineWidth(2.0) ;
glBegin(GL_LINE_LOOP) ;
glVertex2f(x1_, y1_) ;
glVertex2f(x1_, y2_) ;
glVertex2f(x2_, y2_) ;
glVertex2f(x2_, y1_) ;
glEnd() ;
}
void ColormapEditor::update(unsigned char* cmap_data, int size, int component) {
for(unsigned int i = 0; i < ColormapSize; ++i) {
int idx = (double(i) / double(ColormapSize)) * (size-1) ;
curve()[i] = double(cmap_data[4*idx + component]) / 255.0 ;
}
}
//______________________________________________________________________________________________________
void TextLabel::draw() {
if(!visible()) { return ; }
glLineWidth(textwidth_) ;
printf_xy(x1_+10, y1_+50, (char*)text_.c_str()) ;
}
//______________________________________________________________________________________________________
Spinbox::Spinbox(
GLfloat x, GLfloat y, GLenum& value, const std::vector<std::string>& labels
) : Container(x, y, x+3000, y+170), value_(value), labels_(labels) {
down_ = new ArrowButton(DOWN, x, y, x+170, y+170) ;
up_ = new ArrowButton(UP, x+200, y, x+370, y+170) ;
up_->set_callback(increment_CB, this) ;
down_->set_callback(decrement_CB, this) ;
if(value_ < 0) { value_ = 0 ; }
if(value_ >= int(labels_.size())) { value_ = (GLenum)(labels_.size() - 1) ; }
text_ = new TextLabel(x+450,y,labels_[value_]) ;
add_child(up_) ;
add_child(down_) ;
add_child(text_) ;
show() ;
}
void Spinbox::draw() {
Container::draw() ;
}
void Spinbox::increment() {
value_++ ;
if(value_ >= labels_.size()) { value_ = 0 ; }
text_->set_text(labels_[value_]) ;
}
void Spinbox::decrement() {
if(int(value_) - 1 < 0) {
value_ = (GLenum)(labels_.size() - 1) ;
} else {
value_-- ;
}
text_->set_text(labels_[value_]) ;
}
void Spinbox::increment_CB(void* spinbox) {
static_cast<Spinbox*>(spinbox)->increment() ;
}
void Spinbox::decrement_CB(void* spinbox) {
static_cast<Spinbox*>(spinbox)->decrement() ;
}
//______________________________________________________________________________________________________
void MessageBox::draw() {
if(!visible()) { return ; }
Panel::draw() ;
glLineWidth(2) ;
for(unsigned int i=0; i<message_.size(); i++) {
printf_xy(x1_+100, y2_-200-i*150, (char*)message_[i].c_str()) ;
}
}
//______________________________________________________________________________________________________
PropertyPage::PropertyPage(
GLfloat x_in, GLfloat y_in, const std::string& caption
) : Panel(x_in,y_in-10,x_in+Width,y_in) {
y_ = y2_ - 200 ;
x_caption_ = x1_ + 100 ;
x_widget_ = x1_ + 1300 ;
caption_ = add_separator(caption) ;
y1_ = y_ ;
}
TextLabel* PropertyPage::add_separator(const std::string& text) {
TextLabel* w = new TextLabel(x1_ + 400, y_, text, 2.0f) ;
add_child(w) ;
y_ -= 250 ;
y1_ = y_ ;
return w ;
}
TextLabel* PropertyPage::add_string(const std::string& text) {
TextLabel* w = new TextLabel(x1_ + 200, y_, text, 1.0f) ;
add_child(w) ;
y_ -= 150 ;
y1_ = y_ ;
return w ;
}
Slider* PropertyPage::add_slider(
const std::string& caption, GLfloat& value, GLfloat vmin, GLfloat vmax
) {
add_child(new TextLabel(x_caption_, y_, caption)) ;
Slider* w = new Slider(x_widget_, y_, x_widget_+800, y_+200, value) ;
w->set_range(vmin, vmax) ;
add_child(w) ;
y_ -= 250 ;
y1_ = y_ ;
return w ;
}
Checkbox* PropertyPage::add_toggle(
const std::string& caption, GLboolean& value
) {
add_child(new TextLabel(x_caption_, y_, caption)) ;
Checkbox* w = new Checkbox(x_widget_, y_, x_widget_+200, y_+200, value) ;
add_child(w) ;
y_ -= 250 ;
y1_ = y_ ;
return w ;
}
Spinbox* PropertyPage::add_enum(
const std::string& caption, GLenum& value, const std::vector<std::string>& labels) {
add_child(new TextLabel(x_caption_, y_, caption)) ;
Spinbox* w = new Spinbox(x_widget_, y_, value, labels) ;
add_child(w) ;
y_ -= 250 ;
y1_ = y_ ;
return w ;
}
//______________________________________________________________________________________________________
ViewerProperties::ViewerProperties(GLfloat x_left, GLfloat y_top) : PropertyPage(
x_left, y_top, "Viewer"
) {
add_toggle("Rot. light", *glut_viewer_is_enabled_ptr(GLUT_VIEWER_ROTATE_LIGHT)) ;
if(glut_viewer_is_enabled(GLUT_VIEWER_HDR)) {
add_slider("Exposure", *glut_viewer_float_ptr(GLUT_VIEWER_HDR_EXPOSURE), 0.001, 3.0) ;
add_slider("Gamma", *glut_viewer_float_ptr(GLUT_VIEWER_HDR_GAMMA), 0.2, 1.5) ;
add_toggle("Vignette", *glut_viewer_is_enabled_ptr(GLUT_VIEWER_HDR_VIGNETTE)) ;
add_slider("Blur amount", *glut_viewer_float_ptr(GLUT_VIEWER_HDR_BLUR_AMOUNT)) ;
add_slider("Blur width", *glut_viewer_float_ptr(GLUT_VIEWER_HDR_BLUR_WIDTH), 1.0, 20.0) ;
add_toggle("UnMsk.", *glut_viewer_is_enabled_ptr(GLUT_VIEWER_HDR_UNSHARP_MASKING)) ;
add_toggle("UnMsk.+", *glut_viewer_is_enabled_ptr(GLUT_VIEWER_HDR_POSITIVE_UNSHARP_MASKING)) ;
add_slider("UnMsk. Gamm", *glut_viewer_float_ptr(GLUT_VIEWER_HDR_UNSHARP_MASKING_GAMMA), 0.2, 1.5) ;
}
}
void ViewerProperties::draw() {
if(glut_viewer_is_enabled(GLUT_VIEWER_IDLE_REDRAW)) {
static char buff[256] ;
sprintf(buff, " [%4d FPS]", glut_viewer_fps()) ;
caption_->set_text("Viewer" + std::string(buff)) ;
} else {
caption_->set_text("Viewer") ;
}
PropertyPage::draw() ;
}
void ViewerProperties::apply() {
}
//______________________________________________________________________________________________________
Image::Image(
GLfloat x1, GLfloat y1, GLfloat x2, GLfloat y2, GLint texture, GLint target
) : Widget(x1, y1, x2, y2), texture_(texture), texture_target_(target) {
}
void Image::draw() {
if(texture_ == 0) { return ; }
glEnable(texture_target_) ;
glBindTexture(texture_target_, texture_) ;
glBegin(GL_QUADS) ;
glTexCoord2f(0.0, 0.0) ;
glVertex2f(x1_, y1_) ;
glTexCoord2f(1.0, 0.0) ;
glVertex2f(x2_, y1_) ;
glTexCoord2f(1.0, 1.0) ;
glVertex2f(x2_, y2_) ;
glTexCoord2f(0.0, 1.0) ;
glVertex2f(x1_, y2_) ;
glEnd() ;
glDisable(texture_target_) ;
}
//______________________________________________________________________________________________________
}<|fim▁end|> | circle_arc_vertices(x,y,r,0.0f,400.0f) ; |
<|file_name|>scope.js<|end_file_name|><|fim▁begin|>(function() {
var Scope, extend, last, _ref;
_ref = require('./helpers'), extend = _ref.extend, last = _ref.last;
exports.Scope = Scope = (function() {
Scope.root = null;
function Scope(parent, expressions, method) {
this.parent = parent;
this.expressions = expressions;
this.method = method;
this.variables = [
<|fim▁hole|> name: 'arguments',
type: 'arguments'
}
];
this.positions = {};
if (!this.parent) Scope.root = this;
}
Scope.prototype.add = function(name, type, immediate) {
if (this.shared && !immediate) return this.parent.add(name, type, immediate);
if (Object.prototype.hasOwnProperty.call(this.positions, name)) {
return this.variables[this.positions[name]].type = type;
} else {
return this.positions[name] = this.variables.push({
name: name,
type: type
}) - 1;
}
};
Scope.prototype.find = function(name, options) {
if (this.check(name, options)) return true;
this.add(name, 'var');
return false;
};
Scope.prototype.parameter = function(name) {
if (this.shared && this.parent.check(name, true)) return;
return this.add(name, 'param');
};
Scope.prototype.check = function(name, immediate) {
var found, _ref2;
found = !!this.type(name);
if (found || immediate) return found;
return !!((_ref2 = this.parent) != null ? _ref2.check(name) : void 0);
};
Scope.prototype.temporary = function(name, index) {
if (name.length > 1) {
return '_' + name + (index > 1 ? index : '');
} else {
return '_' + (index + parseInt(name, 36)).toString(36).replace(/\d/g, 'a');
}
};
Scope.prototype.type = function(name) {
var v, _i, _len, _ref2;
_ref2 = this.variables;
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
v = _ref2[_i];
if (v.name === name) return v.type;
}
return null;
};
Scope.prototype.freeVariable = function(name, reserve) {
var index, temp;
if (reserve == null) reserve = true;
index = 0;
while (this.check((temp = this.temporary(name, index)))) {
index++;
}
if (reserve) this.add(temp, 'var', true);
return temp;
};
Scope.prototype.assign = function(name, value) {
this.add(name, {
value: value,
assigned: true
}, true);
return this.hasAssignments = true;
};
Scope.prototype.hasDeclarations = function() {
return !!this.declaredVariables().length;
};
Scope.prototype.declaredVariables = function() {
var realVars, tempVars, v, _i, _len, _ref2;
realVars = [];
tempVars = [];
_ref2 = this.variables;
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
v = _ref2[_i];
if (v.type === 'var') {
(v.name.charAt(0) === '_' ? tempVars : realVars).push(v.name);
}
}
return realVars.sort().concat(tempVars.sort());
};
Scope.prototype.assignedVariables = function() {
var v, _i, _len, _ref2, _results;
_ref2 = this.variables;
_results = [];
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
v = _ref2[_i];
if (v.type.assigned) _results.push("" + v.name + " = " + v.type.value);
}
return _results;
};
return Scope;
})();
}).call(this);<|fim▁end|> | {
|
<|file_name|>spatialemis.go<|end_file_name|><|fim▁begin|>/*
Copyright © 2017 the InMAP authors.
This file is part of InMAP.
InMAP is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
InMAP is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with InMAP. If not, see <http://www.gnu.org/licenses/>.*/
package eieio
import (
"context"
"fmt"
"github.com/spatialmodel/inmap/emissions/slca"
"github.com/spatialmodel/inmap/emissions/slca/eieio/eieiorpc"
"github.com/spatialmodel/inmap/internal/hash"
"gonum.org/v1/gonum/mat"
)
type emissionsRequest struct {
demand *mat.VecDense
industries *Mask
pol slca.Pollutant
year Year
loc Location
aqm string
}
<|fim▁hole|>// If emitters == nil, combined emissions for all emitters are calculated.
func (e *SpatialEIO) Emissions(ctx context.Context, request *eieiorpc.EmissionsInput) (*eieiorpc.Vector, error) {
e.loadEmissionsOnce.Do(func() {
var c string
if e.EIEIOCache != "" {
c = e.EIEIOCache + "/individual"
}
e.emissionsCache = loadCacheOnce(func(ctx context.Context, request interface{}) (interface{}, error) {
r := request.(*emissionsRequest)
return e.emissions(ctx, r.demand, r.industries, r.aqm, r.pol, r.year, r.loc) // Actually calculate the emissions.
}, 1, e.MemCacheSize, c, vectorMarshal, vectorUnmarshal)
})
req := &emissionsRequest{
demand: rpc2vec(request.Demand),
industries: rpc2mask(request.Emitters),
pol: slca.Pollutant(request.Emission),
year: Year(request.Year),
loc: Location(request.Location),
aqm: request.AQM,
}
rr := e.emissionsCache.NewRequest(ctx, req, "emissions_"+hash.Hash(req))
resultI, err := rr.Result()
if err != nil {
return nil, err
}
return vec2rpc(resultI.(*mat.VecDense)), nil
}
// emissions returns spatially-explicit emissions caused by the
// specified economic demand. industries
// specifies the industries emissions should be calculated for.
// If industries == nil, combined emissions for all industries are calculated.
func (e *SpatialEIO) emissions(ctx context.Context, demand *mat.VecDense, industries *Mask, aqm string, pol slca.Pollutant, year Year, loc Location) (*mat.VecDense, error) {
// Calculate emission factors. matrix dimension: [# grid cells, # industries]
ef, err := e.emissionFactors(ctx, aqm, pol, year)
if err != nil {
return nil, err
}
// Calculate economic activity. vector dimension: [# industries, 1]
activity, err := e.economicImpactsSCC(demand, year, loc)
if err != nil {
return nil, err
}
if industries != nil {
// Set activity in industries we're not interested in to zero.
industries.Mask(activity)
}
r, _ := ef.Dims()
emis := mat.NewVecDense(r, nil)
emis.MulVec(ef, activity)
return emis, nil
}
// EmissionsMatrix returns spatially- and industry-explicit emissions caused by the
// specified economic demand. In the result matrix, the rows represent air quality
// model grid cells and the columns represent emitters.
func (e *SpatialEIO) EmissionsMatrix(ctx context.Context, request *eieiorpc.EmissionsMatrixInput) (*eieiorpc.Matrix, error) {
ef, err := e.emissionFactors(ctx, request.AQM, slca.Pollutant(request.Emission), Year(request.Year)) // rows = grid cells, cols = industries
if err != nil {
return nil, err
}
activity, err := e.economicImpactsSCC(array2vec(request.Demand.Data), Year(request.Year), Location(request.Location)) // rows = industries
if err != nil {
return nil, err
}
r, c := ef.Dims()
emis := mat.NewDense(r, c, nil)
emis.Apply(func(_, j int, v float64) float64 {
// Multiply each emissions factor column by the corresponding activity row.
return v * activity.At(j, 0)
}, ef)
return mat2rpc(emis), nil
}
// emissionFactors returns spatially-explicit emissions per unit of economic
// production for each industry. In the result matrix, the rows represent
// air quality model grid cells and the columns represent industries.
func (e *SpatialEIO) emissionFactors(ctx context.Context, aqm string, pol slca.Pollutant, year Year) (*mat.Dense, error) {
e.loadEFOnce.Do(func() {
e.emissionFactorCache = loadCacheOnce(e.emissionFactorsWorker, 1, 1, e.EIEIOCache,
matrixMarshal, matrixUnmarshal)
})
key := fmt.Sprintf("emissionFactors_%s_%v_%d", aqm, pol, year)
rr := e.emissionFactorCache.NewRequest(ctx, aqmPolYear{aqm: aqm, pol: pol, year: year}, key)
resultI, err := rr.Result()
if err != nil {
return nil, fmt.Errorf("eieio.emissionFactors: %s: %v", key, err)
}
return resultI.(*mat.Dense), nil
}
// emissionFactors returns spatially-explicit emissions per unit of economic
// production for each industry. In the result matrix, the rows represent
// air quality model grid cells and the columns represent industries.
func (e *SpatialEIO) emissionFactorsWorker(ctx context.Context, request interface{}) (interface{}, error) {
aqmpolyear := request.(aqmPolYear)
prod, err := e.domesticProductionSCC(aqmpolyear.year)
if err != nil {
return nil, err
}
var emisFac *mat.Dense
for i, refTemp := range e.SpatialRefs {
if len(refTemp.SCCs) == 0 {
return nil, fmt.Errorf("bea: industry %d; no SCCs", i)
}
ref := refTemp
ref.EmisYear = int(aqmpolyear.year)
ref.AQM = aqmpolyear.aqm
industryEmis, err := e.CSTConfig.EmissionsSurrogate(ctx, aqmpolyear.pol, &ref)
if err != nil {
return nil, err
}
if i == 0 {
emisFac = mat.NewDense(industryEmis.Shape[0], len(e.SpatialRefs), nil)
}
for r, v := range industryEmis.Elements {
// The emissions factor is the industry emissions divided by the
// industry economic production.
if p := prod.At(i, 0); p != 0 {
emisFac.Set(r, i, v/prod.At(i, 0))
}
}
}
return emisFac, nil
}<|fim▁end|> | // Emissions returns spatially-explicit emissions caused by the
// specified economic demand. Emitters
// specifies the emitters emissions should be calculated for. |
<|file_name|>register-web-ui-test.js<|end_file_name|><|fim▁begin|>// register-web-ui-test.js
//
// Test that the home page shows an invitation to join
//
// Copyright 2012, E14N https://e14n.com/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
var assert = require("assert"),
vows = require("vows"),
oauthutil = require("./lib/oauth"),
Browser = require("zombie"),
Step = require("step"),
setupApp = oauthutil.setupApp,
setupAppConfig = oauthutil.setupAppConfig;
var suite = vows.describe("layout test");
// A batch to test some of the layout basics
suite.addBatch({
"When we set up the app": {
topic: function() {
setupAppConfig({site: "Test"}, this.callback);
},
teardown: function(app) {
if (app && app.close) {
app.close();
}
},<|fim▁hole|> "and we visit the root URL": {
topic: function() {
var browser,
callback = this.callback;
browser = new Browser();
browser.visit("http://localhost:4815/main/register", function(err, br) {
callback(err, br);
});
},
"it works": function(err, br) {
assert.ifError(err);
assert.isTrue(br.success);
},
"and we check the content": {
topic: function(br) {
var callback = this.callback;
callback(null, br);
},
"it includes a registration div": function(err, br) {
assert.ok(br.query("div#registerpage"));
},
"it includes a registration form": function(err, br) {
assert.ok(br.query("div#registerpage form"));
},
"the registration form has a nickname field": function(err, br) {
assert.ok(br.query("div#registerpage form input[name=\"nickname\"]"));
},
"the registration form has a password field": function(err, br) {
assert.ok(br.query("div#registerpage form input[name=\"password\"]"));
},
"the registration form has a password repeat field": function(err, br) {
assert.ok(br.query("div#registerpage form input[name=\"repeat\"]"));
},
"the registration form has a submit button": function(err, br) {
assert.ok(br.query("div#registerpage form button[type=\"submit\"]"));
},
"and we submit the form": {
topic: function() {
var callback = this.callback,
br = arguments[0];
Step(
function() {
br.fill("nickname", "sparks", this);
},
function(err) {
if (err) throw err;
br.fill("password", "redplainsrider1", this);
},
function(err) {
if (err) throw err;
br.fill("repeat", "redplainsrider1", this);
},
function(err) {
if (err) throw err;
br.pressButton("button[type=\"submit\"]", this);
},
function(err) {
if (err) {
callback(err, null);
} else {
callback(null, br);
}
}
);
},
"it works": function(err, br) {
assert.ifError(err);
assert.isTrue(br.success);
}
}
}
}
}
});
suite["export"](module);<|fim▁end|> | "it works": function(err, app) {
assert.ifError(err);
}, |
<|file_name|>socket.rs<|end_file_name|><|fim▁begin|>use std::net::{SocketAddr, UdpSocket};
use std::io;
use std::time::Duration;
pub trait SocketProvider<I,S> {
fn new_state() -> S;
fn bind(addr: &SocketAddr, state: &mut S) -> Result<I, io::Error>;
fn local_addr(&self) -> Result<SocketAddr, io::Error>;
fn set_recv_timeout(&mut self, duration: Option<Duration>) -> Result<(), io::Error>;
fn recv_from(&mut self, buf: &mut [u8]) -> Result<(usize, SocketAddr), io::Error>;
fn send_to(&mut self, buf: &[u8], addr: &SocketAddr) -> Result<usize, io::Error>;
}
impl SocketProvider<UdpSocket,()> for UdpSocket {
fn new_state() -> () {
()
}
fn bind(addr: &SocketAddr, _state: &mut ()) -> Result<UdpSocket, io::Error> {
let socket = UdpSocket::bind(addr)?;
socket.set_nonblocking(true)?;
Ok(socket)
}
fn local_addr(&self) -> Result<SocketAddr, io::Error> {
UdpSocket::local_addr(self)
}
fn set_recv_timeout(&mut self, duration: Option<Duration>) -> Result<(), io::Error> {
match duration {
Some(duration) => {
self.set_read_timeout(Some(duration))?;
self.set_nonblocking(false)
},
None => {
self.set_nonblocking(true)
}
}
}
fn recv_from(&mut self, buf: &mut [u8]) -> Result<(usize, SocketAddr), io::Error> {
UdpSocket::recv_from(self, buf)
}
fn send_to(&mut self, buf: &[u8], addr: &SocketAddr) -> Result<usize, io::Error> {
UdpSocket::send_to(self, buf, addr)
}
}
#[cfg(test)]
pub mod capi_simulator {
use super::*;
use capi::*;
use std::rc::{Rc, Weak};
use std::cell::RefCell;
use std::ffi::{CString, CStr};
pub type SimulatorRef = Rc<RefCell<Simulator>>;
pub struct Simulator {
pub sim: *mut netcode_network_simulator_t
}
impl Drop for Simulator {
fn drop(&mut self) {
unsafe {
netcode_network_simulator_destroy(self.sim);
}
}
}
pub struct SimulatedSocket {
local_addr: SocketAddr,
sim: Weak<RefCell<Simulator>>
}
fn addr_to_naddr(addr: &SocketAddr) -> Result<netcode_address_t, io::Error> {
unsafe {
let mut naddr: netcode_address_t = ::std::mem::uninitialized();
let str_rep = CString::new(format!("{}", addr))
.map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "Invalid string address"))?;
match netcode_parse_address(str_rep.as_ptr(), &mut naddr) {
1 => Ok(naddr),
_ => Err(io::Error::new(io::ErrorKind::InvalidInput, "Unable to parse addr"))
}
}
}
fn naddr_to_addr(naddr: &netcode_address_t) -> Result<SocketAddr, io::Error> {
use std::str::FromStr;
unsafe {
let mut addr = [0; NETCODE_MAX_ADDRESS_STRING_LENGTH as usize];
netcode_address_to_string(::std::mem::transmute(naddr), addr.as_mut_ptr());
let cstr = CStr::from_ptr(addr.as_ptr());
SocketAddr::from_str(cstr.to_str().map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "Invalid UTF-8"))?)
.map_err(|_| io::Error::new(io::ErrorKind::InvalidData, "Unable to parse address"))
}
}
/*
impl SocketProvider<SimulatedSocket, SimulatorRef> for SimulatedSocket {
fn new_state() -> Rc<RefCell<Simulator>> {
Rc::new(RefCell::new(Simulator {
sim: unsafe { netcode_network_simulator_create() }
}))
}
<|fim▁hole|> sim: Rc::downgrade(state)
})
}
fn local_addr(&self) -> Result<SocketAddr, io::Error> {
Ok(self.local_addr)
}
fn set_recv_timeout(&mut self, _duration: Option<Duration>) -> Result<(), io::Error> {
Ok(())
}
fn recv_from(&mut self, buf: &mut [u8]) -> Result<(usize, SocketAddr), io::Error> {
unsafe {
let mut packet = [::std::ptr::null_mut(); 1];
let mut packet_len = 0;
let mut addr: netcode_address_t = ::std::mem::uninitialized();
let mut local_addr = addr_to_naddr(&self.local_addr)?;
match self.sim.upgrade() {
Some(simref) => {
let result = netcode_network_simulator_receive_packets(
simref.borrow_mut().sim,
&mut local_addr,
packet.len() as i32,
packet.as_mut_ptr(),
&mut packet_len,
&mut addr);
match result {
1 => {
let len = packet_len as usize;
buf[..len].copy_from_slice(::std::slice::from_raw_parts(packet[0], len));
free(::std::mem::transmute(packet[0]));
Ok((len as usize, naddr_to_addr(&addr)?))
},
_ => Err(io::Error::new(io::ErrorKind::WouldBlock, "No packets"))
}
}
None => Err(io::Error::new(io::ErrorKind::InvalidData, "Simulator released"))
}
}
}
fn send_to(&mut self, buf: &[u8], addr: &SocketAddr) -> Result<usize, io::Error> {
let mut from = addr_to_naddr(&self.local_addr)?;
let mut to = addr_to_naddr(addr)?;
unsafe {
match self.sim.upgrade() {
Some(simref) => {
netcode_network_simulator_send_packet(
simref.borrow_mut().sim,
&mut from,
&mut to,
::std::mem::transmute(buf.as_ptr()),
buf.len() as i32);
Ok(buf.len())
}
None => Err(io::Error::new(io::ErrorKind::InvalidData, "Simulator released"))
}
}
}
}
*/
}<|fim▁end|> | fn bind(addr: &SocketAddr, state: &mut SimulatorRef) -> Result<SimulatedSocket, io::Error> {
Ok(SimulatedSocket {
local_addr: addr.clone(), |
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>from django_nose.tools import assert_false, assert_true
from pontoon.base.tests import TestCase
from pontoon.base.utils import extension_in
class UtilsTests(TestCase):
def test_extension_in(self):
assert_true(extension_in('filename.txt', ['bat', 'txt']))
assert_true(extension_in('filename.biff', ['biff']))
assert_true(extension_in('filename.tar.gz', ['gz']))
<|fim▁hole|> assert_false(extension_in('filename.txt', ['png', 'jpg']))
assert_false(extension_in('.dotfile', ['bat', 'txt']))
# Unintuitive, but that's how splitext works.
assert_false(extension_in('filename.tar.gz', ['tar.gz']))<|fim▁end|> | |
<|file_name|>gridrenderer.js<|end_file_name|><|fim▁begin|>GridRenderer.prototype.extend(
{
<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>index.test.js<|end_file_name|><|fim▁begin|>import demand from 'must';
import { columnsParser, sortParser, filtersParser, filterParser, createFilterObject } from '../index';
import sinon from 'sinon';
describe('<List> query parsers', function () {
beforeEach(function () {
this.fields = {
name: {
path: 'name',
paths: {
first: 'name.first',
last: 'name.last',
full: 'name.full',
},
type: 'name',
label: 'Name',
size: 'full',
required: true,
hasFilterMethod: true,
defaultValue: {
first: '',
last: '',
},
},
email: {
path: 'email',
type: 'email',
label: 'Email',
size: 'full',
required: true,
defaultValue: {
email: '',
},
},
};
this.currentList = {
fields: this.fields,
defaultColumns: { columns: '__DEFAULT_COLUMNS__' },
defaultSort: { sort: '__DEFAULT_SORT__' },
expandColumns: sinon.spy(),
expandSort: sinon.spy(),
};
this.path = 'name';
this.value = { value: 'a', mode: 'contains', inverted: false };
this.filter = Object.assign({}, { path: this.path }, this.value);
});
describe('columnsParser()', function () {
describe('If an empty columns array is added', function () {
it('should call expandColumns with default columns', function () {
const columns = [];
columnsParser(columns, this.currentList);
const result = this.currentList.expandColumns.getCall(0).args[0];
demand(result).eql(this.currentList.defaultColumns);
});
});
describe('If the input columns are undefined', function () {
it('should call expandColumns with default columns', function () {
const columns = void 0;
columnsParser(columns, this.currentList);
const result = this.currentList.expandColumns.getCall(0).args[0];
demand(result).eql(this.currentList.defaultColumns);
});
});
describe('If currentList does not exist', function () {
it('throws an Error', function () {
const columns = ['name', 'email'];
let e = void 0;
try {
columnsParser(columns, null);
} catch (error) {
e = error;
}
demand(e.message).eql('No currentList selected');
});
});
});
describe('sortParser()', function () {
describe('If no path is specified', function () {
it('should return the default sort object', function () {
const path = void 0;
sortParser(path, this.currentList);
const result = this.currentList.expandSort.getCall(0).args[0];
demand(result).eql(this.currentList.defaultSort);
});
});
describe('If currentList does not exist', function () {
it('throws an Error', function () {
const path = 'email';
let e = void 0;
try {
sortParser(path, null);
} catch (error) {
e = error;
}
demand(e.message).eql('No currentList selected');
});
});
});
describe('createFilterObject()', function () {
describe('If prvided with a valid path and a valid currentList', function () {
it('returns an object with a field [object Object], and the passed in value', function () {
const expectedFilter = this.currentList.fields[this.path];
const expectedResult = { field: expectedFilter, value: this.value };
demand(createFilterObject(this.path, this.value, this.currentList.fields)).eql(expectedResult);
});
});
describe('If provided with an invalid path', function () {
it('returns undefined', function () {
const expectedResult = void 0;
demand(createFilterObject(null, null, this.currentList)).eql(expectedResult);
});
});
describe('If provided with an invalid currentListFields', function () {
it('returns undefined', function () {
const expectedResult = void 0;
demand(createFilterObject(this.path, this.value, {})).eql(expectedResult);
});
});
describe('If provided with a null value for currentListFields', function () {
it('returns undefined', function () {
const expectedResult = void 0;
demand(createFilterObject(this.path, this.value, null)).eql(expectedResult);
});
});
describe('If provided with any value that is not a plain object', function () {
it('returns undefined', function () {
const expectedResult = void 0;
demand(createFilterObject(this.path, this.value, 'currentListFields')).eql(expectedResult);
});
});
});
describe('filtersParser()', function () {
describe('Given no matching fields are found', function () {
it('returns an empty array', function () {
const invalidFilter = 'jemena';
const expectedResult = [];
const filters = [invalidFilter];
demand(filtersParser(filters, this.currentList)).eql(expectedResult);
});
});
describe('Given no filters are passed into the function', function () {
it('returns an empty array', function () {
const expectedResult = [];
demand(filtersParser(null, this.currentList)).eql(expectedResult);
});
});<|fim▁hole|> describe('Given an array of filters', function () {
it('returns an array of filters', function () {
const filter = Object.assign(
{},
{ path: 'name' },
this.value
);
const filters = [filter];
const expectedResult = [{
field: this.currentList.fields[filter.path],
value: this.value,
}];
demand(filtersParser(filters, this.currentList)).eql(expectedResult);
});
});
describe('Given a valid stringified filters array', function () {
it('returns an array of filters', function () {
const filter = Object.assign(
{},
{ path: 'name' },
this.value
);
const filters = [filter];
const stringifiedFilters = JSON.stringify(filters);
const expectedResult = [{
field: this.currentList.fields[filter.path],
value: this.value,
}];
demand(filtersParser(stringifiedFilters, this.currentList)).eql(expectedResult);
});
});
describe('If provided with an invalid stringified filters array', function () {
it('returns an empty array', function () {
const stringifiedFilters = 'jemena';
const expectedResult = [];
demand(filtersParser(stringifiedFilters, this.currentList)).eql(expectedResult);
});
});
});
describe('filterParser()', function () {
beforeEach(function () {
const firstEntry = { field: this.currentList.fields[this.path], value: this.value };
const secondEntry = { field: this.currentList.fields.email, value: this.value };
this.activeFilters = [firstEntry, secondEntry];
this.addedFilter = { path: this.filter.path, value: this.value };
});
describe('Given a valid filter object with a path and value', function () {
it('returns an expanded filter object', function () {
const expectedResult = { field: this.currentList.fields[this.path], value: this.value };
demand(filterParser(this.addedFilter, this.activeFilters, this.currentList)).eql(expectedResult);
});
});
describe('Given that activeFilters is not an array', function () {
it('throws an error', function () {
const invalidActiveFilters = 'hello there';
let e = void 0;
try {
filterParser(this.addedFilter, invalidActiveFilters, this.currentList);
} catch (error) {
e = error;
}
demand(e.message).eql('activeFilters must be an array');
});
});
describe('Given that currentList is not a valid object', function () {
it('throws an error', function () {
let e = void 0;
const invalidList = void 0;
try {
filterParser({}, this.activeFilters, invalidList);
} catch (error) {
e = error;
}
demand(e.message).eql('No currentList selected');
});
});
describe('Given that the filter does not exist in activeFilters', function () {
describe('Given that currentList is not an object of the shape that we expect', function () {
it('returns undefined', function () {
const expectedResult = void 0;
const badList = {
someKey: 'some value',
someOtherKey: 'some other value',
};
demand(filterParser(this.addedFilter, [], badList)).eql(expectedResult);
});
});
});
});
});<|fim▁end|> | |
<|file_name|>import_spec.js<|end_file_name|><|fim▁begin|>describe('import.js', function() {
describe('ImportSetup', function() {
describe('#respondToPostMessages', function() {
var test = {
callback: function(uploadId, message) { }
};
beforeEach(function() {
spyOn(test, 'callback');
spyOn(window, 'miqSparkleOff');
spyOn(window, 'clearMessages');
spyOn(window, 'showWarningMessage');
spyOn(window, 'showErrorMessage');
});
context('when the import file upload id exists', function() {
beforeEach(function() {
var event = {
data: {
import_file_upload_id: 123,
message: 'the message'
}
};
ImportSetup.respondToPostMessages(event, test.callback);
});
it('turns the sparkle off', function() {
expect(window.miqSparkleOff).toHaveBeenCalled();
});
it('clears the messages', function() {
expect(window.clearMessages).toHaveBeenCalled();
});
it('triggers the callback', function() {
expect(test.callback).toHaveBeenCalledWith(123, 'the message');
});
});
context('when the import file upload id does not exist', function() {
var event = {data: {import_file_upload_id: ''}};
context('when the message level is warning', function() {
beforeEach(function() {
event.data.message = '{"message":"lol","level":"warning"}';
ImportSetup.respondToPostMessages(event, test.callback);
});
it('turns the sparkle off', function() {
expect(window.miqSparkleOff).toHaveBeenCalled();
});
it('clears the messages', function() {
expect(window.clearMessages).toHaveBeenCalled();
});
it('displays a warning message with the message', function() {
expect(window.showWarningMessage).toHaveBeenCalledWith('lol');
});
});
context('when the message level is not warning', function() {
beforeEach(function() {
event.data.message = '{"message":"lol2","level":"error"}';
ImportSetup.respondToPostMessages(event, test.callback);
});
it('turns the sparkle off', function() {
expect(window.miqSparkleOff).toHaveBeenCalled();
});
it('clears the messages', function() {
expect(window.clearMessages).toHaveBeenCalled();
});
it('displays an error message with the message', function() {
expect(window.showErrorMessage).toHaveBeenCalledWith('lol2');
});
});
});
});
<|fim▁hole|> spyOn(window, 'addEventListener').and.callFake(
function(_, callback) {
gitPostMessageCallback = callback;
}
);
});
it('sets up an event listener', function() {
ImportSetup.listenForGitPostMessages();
expect(window.addEventListener).toHaveBeenCalledWith('message', gitPostMessageCallback);
});
describe('post message callback', function() {
var event = {};
beforeEach(function() {
spyOn(window, 'miqSparkleOff');
});
context('when the message data level is an error', function() {
beforeEach(function() {
spyOn(window, 'showErrorMessage');
spyOn($.fn, 'prop');
event.data = {
message: {level: 'error', message: 'test'}
};
gitPostMessageCallback(event);
});
it('shows the error message', function() {
expect(window.showErrorMessage).toHaveBeenCalledWith('test');
});
it('disables the git-url-import', function() {
expect($.fn.prop).toHaveBeenCalledWith('disabled', null);
expect($.fn.prop.calls.mostRecent().object.selector).toEqual('#git-url-import');
});
it('turns the spinner off', function() {
expect(window.miqSparkleOff).toHaveBeenCalled();
});
});
context('when the message data level is not error', function() {
beforeEach(function() {
spyOn(Automate, 'renderGitImport');
event.data = {
message: '{"level": "success", "message": "test"}',
git_repo_id: 123
};
});
context('when the data has branches', function() {
beforeEach(function() {
event.data.git_branches = 'branches';
gitPostMessageCallback(event);
});
it('calls renderGitImport with the branches, tags, repo_id, and message', function() {
expect(Automate.renderGitImport).toHaveBeenCalledWith('branches', undefined, 123, event.data.message);
});
it('turns the spinner off', function() {
expect(window.miqSparkleOff).toHaveBeenCalled();
});
});
context('when the data has tags with no branches', function() {
beforeEach(function() {
event.data.git_tags = 'tags';
gitPostMessageCallback(event);
});
it('calls renderGitImport with the branches, tags, repo_id, and message', function() {
expect(Automate.renderGitImport).toHaveBeenCalledWith(undefined, 'tags', 123, event.data.message);
});
it('turns the spinner off', function() {
expect(window.miqSparkleOff).toHaveBeenCalled();
});
});
context('when the data has neither tags nor branches', function() {
beforeEach(function() {
gitPostMessageCallback(event);
});
it('does not call renderGitImport', function() {
expect(Automate.renderGitImport).not.toHaveBeenCalled();
});
it('turns the spinner off', function() {
expect(window.miqSparkleOff).toHaveBeenCalled();
});
});
});
});
});
describe('SettingUpImportButton', function() {
beforeEach(function() {
var html = '';
html += '<div class="col-md-6">'
html += ' <input id="upload_button" />';
html += '</div>';
html += '<div>';
html += ' <input id="upload_file" />';
html += '</div>';
setFixtures(html);
});
it('make upload button to not be disabled', function(){
$('#upload_button').prop('disabled', true);
$('#upload_file').prop('value', 'test_value');
ImportSetup.setUpUploadImportButton('#upload_button');
expect($('#upload_button').prop('disabled')).toEqual(false);
});
it('make upload button to be disabled', function(){
$('#upload_button').prop('disabled', false);
ImportSetup.setUpUploadImportButton('#upload_button');
expect($('#upload_button').prop('disabled')).toEqual(true);
});
});
});
describe('#clearMessages', function() {
beforeEach(function() {
var html = '';
html += '<div class="import-flash-message">';
html += ' <div class="alert alert-success alert-danger alert-warning"></div>';
html += '</div>';
html += '<div class="icon-placeholder pficon pficon-ok pficon-layered"></div>';
html += '<div id="error-circle-o" class="pficon-error-circle-o"></div>';
html += '<div id="error-exclamation" class="pficon-error-exclamation"></div>';
html += '<div id="warning-triangle" class="pficon-warning-triangle-o"></div>';
html += '<div id="warning-exclamation" class="pficon-warning-exclamation"></div>';
setFixtures(html);
clearMessages();
});
it('removes alert classes', function() {
expect($('.import-flash-message')).not.toHaveClass('alert-success');
expect($('.import-flash-message')).not.toHaveClass('alert-danger');
expect($('.import-flash-message')).not.toHaveClass('alert-warning');
});
it('removes pficon classes', function() {
expect($('.icon-placeholder')).not.toHaveClass('pficon');
expect($('.icon-placeholder')).not.toHaveClass('pficon-ok');
expect($('.icon-placeholder')).not.toHaveClass('pficon-layered');
});
it('removes pficon-error-circle-o class', function() {
expect($('#error-circle-o')).not.toHaveClass('pficon-error-circle-o');
});
it('removes pficon-error-exclamation class', function() {
expect($('#error-exclamation')).not.toHaveClass('pficon-error-exclamation');
});
it('removes pficon-warning-triangle class', function() {
expect($('#warning-triangle')).not.toHaveClass('pficon-warning-triangle-o');
});
it('removes pficon-warning-exclamation class', function() {
expect($('#warning-exclamation')).not.toHaveClass('pficon-warning-exclamation');
});
});
});<|fim▁end|> | describe('#listenForGitPostMessages', function() {
var gitPostMessageCallback;
beforeEach(function() { |
<|file_name|>DjangoProjectTemplate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
""" Projy template for PythonPackage. """
# system
from datetime import date
from os import mkdir, rmdir
from shutil import move
from subprocess import call
# parent class
from projy.templates.ProjyTemplate import ProjyTemplate
# collectors
from projy.collectors.AuthorCollector import AuthorCollector
from projy.collectors.AuthorMailCollector import AuthorMailCollector
class DjangoProjectTemplate(ProjyTemplate):
""" Projy template class for PythonPackage. """
def __init__(self):
ProjyTemplate.__init__(self)
def directories(self):
""" Return the names of directories to be created. """
directories_description = [
self.project_name,
self.project_name + '/conf',
self.project_name + '/static',
]
return directories_description
def files(self):
""" Return the names of files to be created. """
files_description = [
# configuration
[ self.project_name,
'Makefile',
'DjangoMakefileTemplate' ],
[ self.project_name + '/conf',
'requirements_base.txt',
'DjangoRequirementsBaseTemplate' ],
[ self.project_name + '/conf',
'requirements_dev.txt',
'DjangoRequirementsDevTemplate' ],
[ self.project_name + '/conf',
'requirements_production.txt',
'DjangoRequirementsProdTemplate' ],
[ self.project_name + '/conf',
'nginx.conf',
'DjangoNginxConfTemplate' ],
[ self.project_name + '/conf',
'supervisord.conf',
'DjangoSupervisorConfTemplate' ],
[ self.project_name,
'fabfile.py',
'DjangoFabfileTemplate' ],
[ self.project_name,
'CHANGES.txt',
'PythonPackageCHANGESFileTemplate' ],
[ self.project_name,
'LICENSE.txt',
'GPL3FileTemplate' ],
[ self.project_name,
'README.txt',
'READMEReSTFileTemplate' ],
[ self.project_name,
'.gitignore',
'DjangoGitignoreTemplate' ],
# django files
[ self.project_name,
'dev.py',
'DjangoSettingsDevTemplate' ],
[ self.project_name,
'prod.py',
'DjangoSettingsProdTemplate' ],
]
return files_description
def substitutes(self):
""" Return the substitutions for the templating replacements. """
author_collector = AuthorCollector()<|fim▁hole|> 'date': date.today().isoformat(),
'author': author_collector.collect(),
'author_email': mail_collector.collect(),
}
return substitute_dict
def posthook(self):
# build the virtualenv
call(['make'])
# create the Django project
call(['./venv/bin/django-admin.py', 'startproject', self.project_name])
# transform original settings files into 3 files for different env
mkdir('{p}/settings'.format(p=self.project_name))
self.touch('{p}/settings/__init__.py'.format(p=self.project_name))
move('dev.py', '{p}/settings'.format(p=self.project_name))
move('prod.py', '{p}/settings'.format(p=self.project_name))
move('{p}/{p}/settings.py'.format(p=self.project_name), '{p}/settings/base.py'.format(p=self.project_name))
# organize files nicely
mkdir('{p}/templates'.format(p=self.project_name))
move('{p}/manage.py'.format(p=self.project_name), 'manage.py')
move('{p}/{p}/__init__.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name))
move('{p}/{p}/urls.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name))
move('{p}/{p}/wsgi.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name))
rmdir('{p}/{p}'.format(p=self.project_name))
# create empty git repo
call(['git', 'init'])
# replace some lines
self.replace_in_file('{p}/wsgi.py'.format(p=self.project_name),
'"{p}.settings"'.format(p=self.project_name),
'"{p}.settings.production"'.format(p=self.project_name))
self.replace_in_file('{p}/settings/base.py'.format(p=self.project_name),
u" # ('Your Name', '[email protected]'),",
u" ('{}', '{}'),".format(self.substitutes()['author'],
self.substitutes()['author_email']))<|fim▁end|> | mail_collector = AuthorMailCollector()
substitute_dict = {
'project': self.project_name,
'project_lower': self.project_name.lower(), |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joel Grand-Guillaume
# Copyright 2011-2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.<|fim▁hole|># along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import statement<|fim▁end|> | #
# You should have received a copy of the GNU Affero General Public License |
<|file_name|>_explanation.py<|end_file_name|><|fim▁begin|>import pandas as pd
import numpy as np
import scipy as sp
import sys
import warnings
import copy
import operator
import sklearn
from slicer import Slicer, Alias, Obj
# from ._order import Order
from .utils._general import OpChain
# slicer confuses pylint...
# pylint: disable=no-member
op_chain_root = OpChain("shap.Explanation")
class MetaExplanation(type):
""" This metaclass exposes the Explanation object's methods for creating template op chains.
"""
def __getitem__(cls, item):
return op_chain_root.__getitem__(item)
@property
def abs(cls):
""" Element-wize absolute value op.
"""
return op_chain_root.abs
@property
def identity(cls):
""" A no-op.
"""
return op_chain_root.identity
@property
def argsort(cls):
""" Numpy style argsort.
"""
return op_chain_root.argsort
@property
def sum(cls):
""" Numpy style sum.
"""
return op_chain_root.sum
@property
def max(cls):
""" Numpy style max.
"""
return op_chain_root.max
@property
def min(cls):
""" Numpy style min.
"""
return op_chain_root.min
@property
def mean(cls):
""" Numpy style mean.
"""
return op_chain_root.mean
@property
def sample(cls):
""" Numpy style sample.
"""
return op_chain_root.sample
@property
def hclust(cls):
""" Hierarchial clustering op.
"""
return op_chain_root.hclust
class Explanation(metaclass=MetaExplanation):
""" A slicable set of parallel arrays representing a SHAP explanation.
"""
def __init__( # pylint: disable=too-many-arguments
self,
values,
base_values=None,
data=None,
display_data=None,
instance_names=None,
feature_names=None,
output_names=None,
output_indexes=None,
lower_bounds=None,
upper_bounds=None,
error_std=None,
main_effects=None,
hierarchical_values=None,
clustering=None,
compute_time=None
):
self.op_history = []
self.compute_time = compute_time
# cloning. TODOsomeday: better cloning :)
if issubclass(type(values), Explanation):
e = values
values = e.values
base_values = e.base_values
data = e.data
self.output_dims = compute_output_dims(values, base_values, data, output_names)
values_shape = _compute_shape(values)
if output_names is None and len(self.output_dims) == 1:
output_names = [f"Output {i}" for i in range(values_shape[self.output_dims[0]])]
if len(_compute_shape(feature_names)) == 1: # TODOsomeday: should always be an alias once slicer supports per-row aliases
if len(values_shape) >= 1 and len(feature_names) == values_shape[0]:
feature_names = Alias(list(feature_names), 0)
elif len(values_shape) >= 2 and len(feature_names) == values_shape[1]:
feature_names = Alias(list(feature_names), 1)
if len(_compute_shape(output_names)) == 1: # TODOsomeday: should always be an alias once slicer supports per-row aliases
output_names = Alias(list(output_names), self.output_dims[0])
# if len(values_shape) >= 1 and len(output_names) == values_shape[0]:
# output_names = Alias(list(output_names), 0)
# elif len(values_shape) >= 2 and len(output_names) == values_shape[1]:
# output_names = Alias(list(output_names), 1)
if output_names is not None and not isinstance(output_names, Alias):
l = len(_compute_shape(output_names))
if l == 0:
pass
elif l == 1:
output_names = Obj(output_names, self.output_dims)
elif l == 2:
output_names = Obj(output_names, [0] + list(self.output_dims))
else:
raise ValueError("shap.Explanation does not yet support output_names of order greater than 3!")
if not hasattr(base_values, "__len__") or len(base_values) == 0:
pass
elif len(_compute_shape(base_values)) == len(self.output_dims):
base_values = Obj(base_values, list(self.output_dims))
else:
base_values = Obj(base_values, [0] + list(self.output_dims))
self._s = Slicer(
values=values,
base_values=base_values,
data=list_wrap(data),
display_data=list_wrap(display_data),
instance_names=None if instance_names is None else Alias(instance_names, 0),
feature_names=feature_names,
output_names=output_names,
output_indexes=None if output_indexes is None else (self.output_dims, output_indexes),
lower_bounds=list_wrap(lower_bounds),
upper_bounds=list_wrap(upper_bounds),
error_std=list_wrap(error_std),
main_effects=list_wrap(main_effects),
hierarchical_values=list_wrap(hierarchical_values),
clustering=None if clustering is None else Obj(clustering, [0])
)
@property
def shape(self):
""" Compute the shape over potentially complex data nesting.
"""
return _compute_shape(self._s.values)
@property
def values(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.values
@values.setter
def values(self, new_values):
self._s.values = new_values
@property
def base_values(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.base_values
@base_values.setter
def base_values(self, new_base_values):
self._s.base_values = new_base_values
@property
def data(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.data
@data.setter
def data(self, new_data):
self._s.data = new_data<|fim▁hole|>
@property
def display_data(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.display_data
@display_data.setter
def display_data(self, new_display_data):
if issubclass(type(new_display_data), pd.DataFrame):
new_display_data = new_display_data.values
self._s.display_data = new_display_data
@property
def instance_names(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.instance_names
@property
def output_names(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.output_names
@output_names.setter
def output_names(self, new_output_names):
self._s.output_names = new_output_names
@property
def output_indexes(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.output_indexes
@property
def feature_names(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.feature_names
@feature_names.setter
def feature_names(self, new_feature_names):
self._s.feature_names = new_feature_names
@property
def lower_bounds(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.lower_bounds
@property
def upper_bounds(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.upper_bounds
@property
def error_std(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.error_std
@property
def main_effects(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.main_effects
@main_effects.setter
def main_effects(self, new_main_effects):
self._s.main_effects = new_main_effects
@property
def hierarchical_values(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.hierarchical_values
@hierarchical_values.setter
def hierarchical_values(self, new_hierarchical_values):
self._s.hierarchical_values = new_hierarchical_values
@property
def clustering(self):
""" Pass-through from the underlying slicer object.
"""
return self._s.clustering
@clustering.setter
def clustering(self, new_clustering):
self._s.clustering = new_clustering
def cohorts(self, cohorts):
""" Split this explanation into several cohorts.
Parameters
----------
cohorts : int or array
If this is an integer then we auto build that many cohorts using a decision tree. If this is
an array then we treat that as an array of cohort names/ids for each instance.
"""
if isinstance(cohorts, int):
return _auto_cohorts(self, max_cohorts=cohorts)
if isinstance(cohorts, (list, tuple, np.ndarray)):
cohorts = np.array(cohorts)
return Cohorts(**{name: self[cohorts == name] for name in np.unique(cohorts)})
raise Exception("The given set of cohort indicators is not recognized! Please give an array or int.")
def __repr__(self):
""" Display some basic printable info, but not everything.
"""
out = ".values =\n"+self.values.__repr__()
if self.base_values is not None:
out += "\n\n.base_values =\n"+self.base_values.__repr__()
if self.data is not None:
out += "\n\n.data =\n"+self.data.__repr__()
return out
def __getitem__(self, item):
""" This adds support for OpChain indexing.
"""
new_self = None
if not isinstance(item, tuple):
item = (item,)
# convert any OpChains or magic strings
pos = -1
for t in item: # pylint: disable=too-many-nested-blocks
pos += 1
# skip over Ellipsis
if t == Ellipsis:
pos += len(self.shape) - len(item)
continue
orig_t = t
if issubclass(type(t), OpChain):
t = t.apply(self)
if issubclass(type(t), (np.int64, np.int32)): # because slicer does not like numpy indexes
t = int(t)
elif issubclass(type(t), np.ndarray):
t = [int(v) for v in t] # slicer wants lists not numpy arrays for indexing
elif issubclass(type(t), Explanation):
t = t.values
elif isinstance(t, str):
# work around for 2D output_names since they are not yet slicer supported
output_names_dims = []
if "output_names" in self._s._objects:
output_names_dims = self._s._objects["output_names"].dim
elif "output_names" in self._s._aliases:
output_names_dims = self._s._aliases["output_names"].dim
if pos != 0 and pos in output_names_dims:
if len(output_names_dims) == 1:
t = np.argwhere(np.array(self.output_names) == t)[0][0]
elif len(output_names_dims) == 2:
new_values = []
new_base_values = []
new_data = []
new_self = copy.deepcopy(self)
for i, v in enumerate(self.values):
for j, s in enumerate(self.output_names[i]):
if s == t:
new_values.append(np.array(v[:,j]))
new_data.append(np.array(self.data[i]))
new_base_values.append(self.base_values[i][j])
new_self = Explanation(
np.array(new_values),
np.array(new_base_values),
np.array(new_data),
self.display_data,
self.instance_names,
np.array(new_data),
t, # output_names
self.output_indexes,
self.lower_bounds,
self.upper_bounds,
self.error_std,
self.main_effects,
self.hierarchical_values,
self.clustering
)
new_self.op_history = copy.copy(self.op_history)
# new_self = copy.deepcopy(self)
# new_self.values = np.array(new_values)
# new_self.base_values = np.array(new_base_values)
# new_self.data = np.array(new_data)
# new_self.output_names = t
# new_self.feature_names = np.array(new_data)
# new_self.clustering = None
# work around for 2D feature_names since they are not yet slicer supported
feature_names_dims = []
if "feature_names" in self._s._objects:
feature_names_dims = self._s._objects["feature_names"].dim
if pos != 0 and pos in feature_names_dims and len(feature_names_dims) == 2:
new_values = []
new_data = []
for i, val_i in enumerate(self.values):
for s,v,d in zip(self.feature_names[i], val_i, self.data[i]):
if s == t:
new_values.append(v)
new_data.append(d)
new_self = copy.deepcopy(self)
new_self.values = new_values
new_self.data = new_data
new_self.feature_names = t
new_self.clustering = None
# return new_self
if issubclass(type(t), (np.int8, np.int16, np.int32, np.int64)):
t = int(t)
if t is not orig_t:
tmp = list(item)
tmp[pos] = t
item = tuple(tmp)
# call slicer for the real work
item = tuple(v for v in item) # SML I cut out: `if not isinstance(v, str)`
if len(item) == 0:
return new_self
if new_self is None:
new_self = copy.copy(self)
new_self._s = new_self._s.__getitem__(item)
new_self.op_history.append({
"name": "__getitem__",
"args": (item,),
"prev_shape": self.shape
})
return new_self
def __len__(self):
return self.shape[0]
def __copy__(self):
new_exp = Explanation(
self.values,
self.base_values,
self.data,
self.display_data,
self.instance_names,
self.feature_names,
self.output_names,
self.output_indexes,
self.lower_bounds,
self.upper_bounds,
self.error_std,
self.main_effects,
self.hierarchical_values,
self.clustering
)
new_exp.op_history = copy.copy(self.op_history)
return new_exp
def _apply_binary_operator(self, other, binary_op, op_name):
new_exp = self.__copy__()
new_exp.op_history = copy.copy(self.op_history)
new_exp.op_history.append({
"name": op_name,
"args": (other,),
"prev_shape": self.shape
})
if isinstance(other, Explanation):
new_exp.values = binary_op(new_exp.values, other.values)
if new_exp.data is not None:
new_exp.data = binary_op(new_exp.data, other.data)
if new_exp.base_values is not None:
new_exp.base_values = binary_op(new_exp.base_values, other.base_values)
else:
new_exp.values = binary_op(new_exp.values, other)
if new_exp.data is not None:
new_exp.data = binary_op(new_exp.data, other)
if new_exp.base_values is not None:
new_exp.base_values = binary_op(new_exp.base_values, other)
return new_exp
def __add__(self, other):
return self._apply_binary_operator(other, operator.add, "__add__")
def __radd__(self, other):
return self._apply_binary_operator(other, operator.add, "__add__")
def __sub__(self, other):
return self._apply_binary_operator(other, operator.sub, "__sub__")
def __rsub__(self, other):
return self._apply_binary_operator(other, operator.sub, "__sub__")
def __mul__(self, other):
return self._apply_binary_operator(other, operator.mul, "__mul__")
def __rmul__(self, other):
return self._apply_binary_operator(other, operator.mul, "__mul__")
def __truediv__(self, other):
return self._apply_binary_operator(other, operator.truediv, "__truediv__")
# @property
# def abs(self):
# """ Element-size absolute value operator.
# """
# new_self = copy.copy(self)
# new_self.values = np.abs(new_self.values)
# new_self.op_history.append({
# "name": "abs",
# "prev_shape": self.shape
# })
# return new_self
def _numpy_func(self, fname, **kwargs):
""" Apply a numpy-style function to this Explanation.
"""
new_self = copy.copy(self)
axis = kwargs.get("axis", None)
# collapse the slicer to right shape
if axis == 0:
new_self = new_self[0]
elif axis == 1:
new_self = new_self[1]
elif axis == 2:
new_self = new_self[2]
if axis in [0,1,2]:
new_self.op_history = new_self.op_history[:-1] # pop off the slicing operation we just used
if self.feature_names is not None and not is_1d(self.feature_names) and axis == 0:
new_values = self._flatten_feature_names()
new_self.feature_names = np.array(list(new_values.keys()))
new_self.values = np.array([getattr(np, fname)(v,0) for v in new_values.values()])
new_self.clustering = None
else:
new_self.values = getattr(np, fname)(np.array(self.values), **kwargs)
if new_self.data is not None:
try:
new_self.data = getattr(np, fname)(np.array(self.data), **kwargs)
except:
new_self.data = None
if new_self.base_values is not None and issubclass(type(axis), int) and len(self.base_values.shape) > axis:
new_self.base_values = getattr(np, fname)(self.base_values, **kwargs)
elif issubclass(type(axis), int):
new_self.base_values = None
if axis == 0 and self.clustering is not None and len(self.clustering.shape) == 3:
if self.clustering.std(0).sum() < 1e-8:
new_self.clustering = self.clustering[0]
else:
new_self.clustering = None
new_self.op_history.append({
"name": fname,
"kwargs": kwargs,
"prev_shape": self.shape,
"collapsed_instances": axis == 0
})
return new_self
def mean(self, axis):
""" Numpy-style mean function.
"""
return self._numpy_func("mean", axis=axis)
def max(self, axis):
""" Numpy-style mean function.
"""
return self._numpy_func("max", axis=axis)
def min(self, axis):
""" Numpy-style mean function.
"""
return self._numpy_func("min", axis=axis)
def sum(self, axis=None, grouping=None):
""" Numpy-style mean function.
"""
if grouping is None:
return self._numpy_func("sum", axis=axis)
elif axis == 1 or len(self.shape) == 1:
return group_features(self, grouping)
else:
raise Exception("Only axis = 1 is supported for grouping right now...")
def hstack(self, other):
""" Stack two explanations column-wise.
"""
assert self.shape[0] == other.shape[0], "Can't hstack explanations with different numbers of rows!"
assert np.max(np.abs(self.base_values - other.base_values)) < 1e-6, "Can't hstack explanations with different base values!"
new_exp = Explanation(
np.hstack([self.values, other.values]),
np.hstack([self.values, other.values]),
self.base_values,
self.data,
self.display_data,
self.instance_names,
self.feature_names,
self.output_names,
self.output_indexes,
self.lower_bounds,
self.upper_bounds,
self.error_std,
self.main_effects,
self.hierarchical_values,
self.clustering
)
return self._numpy_func("min", axis=axis)
# def reshape(self, *args):
# return self._numpy_func("reshape", newshape=args)
@property
def abs(self):
return self._numpy_func("abs")
@property
def identity(self):
return self
@property
def argsort(self):
return self._numpy_func("argsort")
@property
def flip(self):
return self._numpy_func("flip")
def hclust(self, metric="sqeuclidean", axis=0):
""" Computes an optimal leaf ordering sort order using hclustering.
hclust(metric="sqeuclidean")
Parameters
----------
metric : string
A metric supported by scipy clustering.
axis : int
The axis to cluster along.
"""
values = self.values
if len(values.shape) != 2:
raise Exception("The hclust order only supports 2D arrays right now!")
if axis == 1:
values = values.T
# compute a hierarchical clustering and return the optimal leaf ordering
D = sp.spatial.distance.pdist(values, metric)
cluster_matrix = sp.cluster.hierarchy.complete(D)
inds = sp.cluster.hierarchy.leaves_list(sp.cluster.hierarchy.optimal_leaf_ordering(cluster_matrix, D))
return inds
def sample(self, max_samples, replace=False, random_state=0):
""" Randomly samples the instances (rows) of the Explanation object.
Parameters
----------
max_samples : int
The number of rows to sample. Note that if replace=False then less than
fewer than max_samples will be drawn if explanation.shape[0] < max_samples.
replace : bool
Sample with or without replacement.
"""
prev_seed = np.random.seed(random_state)
inds = np.random.choice(self.shape[0], min(max_samples, self.shape[0]), replace=replace)
np.random.seed(prev_seed)
return self[list(inds)]
def _flatten_feature_names(self):
new_values = {}
for i in range(len(self.values)):
for s,v in zip(self.feature_names[i], self.values[i]):
if s not in new_values:
new_values[s] = []
new_values[s].append(v)
return new_values
def _use_data_as_feature_names(self):
new_values = {}
for i in range(len(self.values)):
for s,v in zip(self.data[i], self.values[i]):
if s not in new_values:
new_values[s] = []
new_values[s].append(v)
return new_values
def percentile(self, q, axis=None):
new_self = copy.deepcopy(self)
if self.feature_names is not None and not is_1d(self.feature_names) and axis == 0:
new_values = self._flatten_feature_names()
new_self.feature_names = np.array(list(new_values.keys()))
new_self.values = np.array([np.percentile(v, q) for v in new_values.values()])
new_self.clustering = None
else:
new_self.values = np.percentile(new_self.values, q, axis)
new_self.data = np.percentile(new_self.data, q, axis)
#new_self.data = None
new_self.op_history.append({
"name": "percentile",
"args": (axis,),
"prev_shape": self.shape,
"collapsed_instances": axis == 0
})
return new_self
def group_features(shap_values, feature_map):
# TODOsomeday: support and deal with clusterings
reverse_map = {}
for name in feature_map:
reverse_map[feature_map[name]] = reverse_map.get(feature_map[name], []) + [name]
curr_names = shap_values.feature_names
sv_new = copy.deepcopy(shap_values)
found = {}
i = 0
rank1 = len(shap_values.shape) == 1
for name in curr_names:
new_name = feature_map.get(name, name)
if new_name in found:
continue
found[new_name] = True
new_name = feature_map.get(name, name)
cols_to_sum = reverse_map.get(new_name, [new_name])
old_inds = [curr_names.index(v) for v in cols_to_sum]
if rank1:
sv_new.values[i] = shap_values.values[old_inds].sum()
sv_new.data[i] = shap_values.data[old_inds].sum()
else:
sv_new.values[:,i] = shap_values.values[:,old_inds].sum(1)
sv_new.data[:,i] = shap_values.data[:,old_inds].sum(1)
sv_new.feature_names[i] = new_name
i += 1
return Explanation(
sv_new.values[:i] if rank1 else sv_new.values[:,:i],
base_values = sv_new.base_values,
data = sv_new.data[:i] if rank1 else sv_new.data[:,:i],
display_data = None if sv_new.display_data is None else (sv_new.display_data[:,:i] if rank1 else sv_new.display_data[:,:i]),
instance_names = None,
feature_names = None if sv_new.feature_names is None else sv_new.feature_names[:i],
output_names = None,
output_indexes = None,
lower_bounds = None,
upper_bounds = None,
error_std = None,
main_effects = None,
hierarchical_values = None,
clustering = None
)
def compute_output_dims(values, base_values, data, output_names):
""" Uses the passed data to infer which dimensions correspond to the model's output.
"""
values_shape = _compute_shape(values)
# input shape matches the data shape
if data is not None:
data_shape = _compute_shape(data)
# if we are not given any data we assume it would be the same shape as the given values
else:
data_shape = values_shape
# output shape is known from the base values or output names
if output_names is not None:
output_shape = _compute_shape(output_names)
# if our output_names are per sample then we need to drop the sample dimension here
if values_shape[-len(output_shape):] != output_shape and \
values_shape[-len(output_shape)+1:] == output_shape[1:] and values_shape[0] == output_shape[0]:
output_shape = output_shape[1:]
elif base_values is not None:
output_shape = _compute_shape(base_values)[1:]
else:
output_shape = tuple()
interaction_order = len(values_shape) - len(data_shape) - len(output_shape)
values_dims = list(range(len(values_shape)))
output_dims = range(len(data_shape) + interaction_order, len(values_shape))
return tuple(output_dims)
def is_1d(val):
return not (isinstance(val[0], list) or isinstance(val[0], np.ndarray))
class Op():
pass
class Percentile(Op):
def __init__(self, percentile):
self.percentile = percentile
def add_repr(self, s, verbose=False):
return "percentile("+s+", "+str(self.percentile)+")"
def _first_item(x):
for item in x:
return item
return None
def _compute_shape(x):
if not hasattr(x, "__len__") or isinstance(x, str):
return tuple()
elif not sp.sparse.issparse(x) and len(x) > 0 and isinstance(_first_item(x), str):
return (None,)
else:
if isinstance(x, dict):
return (len(x),) + _compute_shape(x[next(iter(x))])
# 2D arrays we just take their shape as-is
if len(getattr(x, "shape", tuple())) > 1:
return x.shape
# 1D arrays we need to look inside
if len(x) == 0:
return (0,)
elif len(x) == 1:
return (1,) + _compute_shape(_first_item(x))
else:
first_shape = _compute_shape(_first_item(x))
if first_shape == tuple():
return (len(x),)
else: # we have an array of arrays...
matches = np.ones(len(first_shape), dtype=np.bool)
for i in range(1, len(x)):
shape = _compute_shape(x[i])
assert len(shape) == len(first_shape), "Arrays in Explanation objects must have consistent inner dimensions!"
for j in range(0, len(shape)):
matches[j] &= shape[j] == first_shape[j]
return (len(x),) + tuple(first_shape[j] if match else None for j, match in enumerate(matches))
class Cohorts():
def __init__(self, **kwargs):
self.cohorts = kwargs
for k in self.cohorts:
assert isinstance(self.cohorts[k], Explanation), "All the arguments to a Cohorts set must be Explanation objects!"
def __getitem__(self, item):
new_cohorts = Cohorts()
for k in self.cohorts:
new_cohorts.cohorts[k] = self.cohorts[k].__getitem__(item)
return new_cohorts
def __getattr__(self, name):
new_cohorts = Cohorts()
for k in self.cohorts:
new_cohorts.cohorts[k] = getattr(self.cohorts[k], name)
return new_cohorts
def __call__(self, *args, **kwargs):
new_cohorts = Cohorts()
for k in self.cohorts:
new_cohorts.cohorts[k] = self.cohorts[k].__call__(*args, **kwargs)
return new_cohorts
def __repr__(self):
return f"<shap._explanation.Cohorts object with {len(self.cohorts)} cohorts of sizes: {[v.shape for v in self.cohorts.values()]}>"
def _auto_cohorts(shap_values, max_cohorts):
""" This uses a DecisionTreeRegressor to build a group of cohorts with similar SHAP values.
"""
# fit a decision tree that well spearates the SHAP values
m = sklearn.tree.DecisionTreeRegressor(max_leaf_nodes=max_cohorts)
m.fit(shap_values.data, shap_values.values)
# group instances by their decision paths
paths = m.decision_path(shap_values.data).toarray()
unique_paths = np.unique(m.decision_path(shap_values.data).todense(), axis=0)
path_names = []
# mark each instance with a path name
for i in range(shap_values.shape[0]):
name = ""
for j in range(len(paths[i])):
if paths[i,j] > 0:
feature = m.tree_.feature[j]
threshold = m.tree_.threshold[j]
val = shap_values.data[i,feature]
if feature >= 0:
name += str(shap_values.feature_names[feature])
if val < threshold:
name += " < "
else:
name += " >= "
name += str(threshold) + " & "
path_names.append(name[:-3]) # the -3 strips off the last unneeded ' & '
path_names = np.array(path_names)
# split the instances into cohorts by their path names
cohorts = {}
for name in np.unique(path_names):
cohorts[name] = shap_values[path_names == name]
return Cohorts(**cohorts)
def list_wrap(x):
""" A helper to patch things since slicer doesn't handle arrays of arrays (it does handle lists of arrays)
"""
if isinstance(x, np.ndarray) and len(x.shape) == 1 and isinstance(x[0], np.ndarray):
return [v for v in x]
else:
return x<|fim▁end|> | |
<|file_name|>BackRightAutonomous.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/**
*
* @author Bill
*/
public class BackRightAutonomous extends CommandBase {
public BackRightAutonomous() {
// Use requires() here to declare subsystem dependencies
// eg. requires(chassis);
}
<|fim▁hole|> protected void initialize() {
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return false;
}
// Called once after isFinished returns true
protected void end() {
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
}
}<|fim▁end|> |
// Called just before this Command runs the first time
|
<|file_name|>sizelimit.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 OpenStack, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Request Body limiting middleware.
"""
import webob.dec
import webob.exc
from cinder import flags
from cinder.openstack.common import cfg
from cinder.openstack.common import log as logging
from cinder import wsgi<|fim▁hole|>
#default request size is 112k
max_request_body_size_opt = cfg.IntOpt('osapi_max_request_body_size',
default=114688,
help='Max size for body of a request')
FLAGS = flags.FLAGS
FLAGS.register_opt(max_request_body_size_opt)
LOG = logging.getLogger(__name__)
class RequestBodySizeLimiter(wsgi.Middleware):
"""Add a 'cinder.context' to WSGI environ."""
def __init__(self, *args, **kwargs):
super(RequestBodySizeLimiter, self).__init__(*args, **kwargs)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
if (req.content_length > FLAGS.osapi_max_request_body_size
or len(req.body) > FLAGS.osapi_max_request_body_size):
msg = _("Request is too large.")
raise webob.exc.HTTPBadRequest(explanation=msg)
else:
return self.application<|fim▁end|> | |
<|file_name|>test_ipminative.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2013 International Business Machines Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test class for Native IPMI power driver module.
"""
import mock
from oslo_config import cfg
from pyghmi import exceptions as pyghmi_exception
from ironic.common import boot_devices
from ironic.common import driver_factory
from ironic.common import exception
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.drivers.modules import console_utils
from ironic.drivers.modules import ipminative
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
CONF = cfg.CONF
INFO_DICT = db_utils.get_test_ipmi_info()
class IPMINativePrivateMethodTestCase(db_base.DbTestCase):
"""Test cases for ipminative private methods."""
def setUp(self):
super(IPMINativePrivateMethodTestCase, self).setUp()
self.node = obj_utils.create_test_node(self.context,
driver='fake_ipminative',
driver_info=INFO_DICT)
self.info = ipminative._parse_driver_info(self.node)
def test__parse_driver_info(self):
# make sure we get back the expected things
self.assertIsNotNone(self.info.get('address'))
self.assertIsNotNone(self.info.get('username'))
self.assertIsNotNone(self.info.get('password'))
self.assertIsNotNone(self.info.get('uuid'))
# make sure error is raised when info, eg. username, is missing
info = dict(INFO_DICT)
del info['ipmi_username']
node = obj_utils.get_test_node(self.context, driver_info=info)
self.assertRaises(exception.MissingParameterValue,
ipminative._parse_driver_info,
node)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_status_on(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_power.return_value = {'powerstate': 'on'}
state = ipminative._power_status(self.info)
ipmicmd.get_power.assert_called_once_with()
self.assertEqual(states.POWER_ON, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_status_off(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_power.return_value = {'powerstate': 'off'}
state = ipminative._power_status(self.info)
ipmicmd.get_power.assert_called_once_with()
self.assertEqual(states.POWER_OFF, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_status_error(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_power.return_value = {'powerstate': 'Error'}
state = ipminative._power_status(self.info)
ipmicmd.get_power.assert_called_once_with()
self.assertEqual(states.ERROR, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_on(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_power.return_value = {'powerstate': 'on'}
self.config(retry_timeout=400, group='ipmi')
state = ipminative._power_on(self.info)
ipmicmd.set_power.assert_called_once_with('on', 400)
self.assertEqual(states.POWER_ON, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__power_off(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_power.return_value = {'powerstate': 'off'}
self.config(retry_timeout=500, group='ipmi')
state = ipminative._power_off(self.info)
ipmicmd.set_power.assert_called_once_with('off', 500)
self.assertEqual(states.POWER_OFF, state)
@mock.patch('pyghmi.ipmi.command.Command')
def test__reboot(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_power.return_value = {'powerstate': 'on'}
self.config(retry_timeout=600, group='ipmi')
state = ipminative._reboot(self.info)
ipmicmd.set_power.assert_called_once_with('boot', 600)
self.assertEqual(states.POWER_ON, state)
def _create_sensor_object(self, value, type_, name, states=None,
units='fake_units', health=0):
if states is None:
states = []
return type('Reading', (object, ), {'value': value, 'type': type_,
'name': name, 'states': states,
'units': units, 'health': health})()
@mock.patch('pyghmi.ipmi.command.Command')
def test__get_sensors_data(self, ipmi_mock):
reading_1 = self._create_sensor_object('fake_value1',
'fake_type_A',
'fake_name1')
reading_2 = self._create_sensor_object('fake_value2',
'fake_type_A',
'fake_name2')
reading_3 = self._create_sensor_object('fake_value3',
'fake_type_B',
'fake_name3')
readings = [reading_1, reading_2, reading_3]
ipmicmd = ipmi_mock.return_value
ipmicmd.get_sensor_data.return_value = readings
expected = {
'fake_type_A': {
'fake_name1': {
'Health': '0',
'Sensor ID': 'fake_name1',
'Sensor Reading': 'fake_value1 fake_units',
'States': '[]',
'Units': 'fake_units'
},
'fake_name2': {
'Health': '0',
'Sensor ID': 'fake_name2',
'Sensor Reading': 'fake_value2 fake_units',
'States': '[]',
'Units': 'fake_units'
}
},
'fake_type_B': {
'fake_name3': {
'Health': '0',
'Sensor ID': 'fake_name3',
'Sensor Reading': 'fake_value3 fake_units',
'States': '[]', 'Units': 'fake_units'
}
}
}
ret = ipminative._get_sensors_data(self.info)
self.assertEqual(expected, ret)
@mock.patch('pyghmi.ipmi.command.Command')
def test__get_sensors_data_missing_values(self, ipmi_mock):
reading_1 = self._create_sensor_object('fake_value1',
'fake_type_A',
'fake_name1')
reading_2 = self._create_sensor_object(None,
'fake_type_A',
'fake_name2')
reading_3 = self._create_sensor_object(None,
'fake_type_B',
'fake_name3')
readings = [reading_1, reading_2, reading_3]
ipmicmd = ipmi_mock.return_value
ipmicmd.get_sensor_data.return_value = readings
expected = {
'fake_type_A': {
'fake_name1': {
'Health': '0',
'Sensor ID': 'fake_name1',
'Sensor Reading': 'fake_value1 fake_units',
'States': '[]',
'Units': 'fake_units'
}
}
}
ret = ipminative._get_sensors_data(self.info)
self.assertEqual(expected, ret)
class IPMINativeDriverTestCase(db_base.DbTestCase):
"""Test cases for ipminative.NativeIPMIPower class functions."""
def setUp(self):
super(IPMINativeDriverTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_ipminative")
self.driver = driver_factory.get_driver("fake_ipminative")
self.node = obj_utils.create_test_node(self.context,
driver='fake_ipminative',
driver_info=INFO_DICT)
self.info = ipminative._parse_driver_info(self.node)
def test_get_properties(self):
expected = ipminative.COMMON_PROPERTIES
self.assertEqual(expected, self.driver.power.get_properties())
self.assertEqual(expected, self.driver.management.get_properties())
expected = ipminative.COMMON_PROPERTIES.keys()
expected += ipminative.CONSOLE_PROPERTIES.keys()
self.assertEqual(sorted(expected),
sorted(self.driver.console.get_properties().keys()))
self.assertEqual(sorted(expected),
sorted(self.driver.get_properties().keys()))
<|fim▁hole|> @mock.patch('pyghmi.ipmi.command.Command')
def test_get_power_state(self, ipmi_mock):
# Getting the mocked command.
cmd_mock = ipmi_mock.return_value
# Getting the get power mock.
get_power_mock = cmd_mock.get_power
return_values = [{'powerstate': 'error'},
{'powerstate': 'on'},
{'powerstate': 'off'}]
get_power_mock.side_effect = lambda: return_values.pop()
with task_manager.acquire(self.context, self.node.uuid) as task:
pstate = self.driver.power.get_power_state(task)
self.assertEqual(states.POWER_OFF, pstate)
pstate = self.driver.power.get_power_state(task)
self.assertEqual(states.POWER_ON, pstate)
pstate = self.driver.power.get_power_state(task)
self.assertEqual(states.ERROR, pstate)
self.assertEqual(3, get_power_mock.call_count,
"pyghmi.ipmi.command.Command.get_power was not"
" called 3 times.")
@mock.patch.object(ipminative, '_power_on')
def test_set_power_on_ok(self, power_on_mock):
power_on_mock.return_value = states.POWER_ON
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.driver.power.set_power_state(
task, states.POWER_ON)
power_on_mock.assert_called_once_with(self.info)
@mock.patch.object(ipminative, '_power_off')
def test_set_power_off_ok(self, power_off_mock):
power_off_mock.return_value = states.POWER_OFF
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.driver.power.set_power_state(
task, states.POWER_OFF)
power_off_mock.assert_called_once_with(self.info)
@mock.patch('pyghmi.ipmi.command.Command')
def test_set_power_on_fail(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_power.return_value = {'powerstate': 'error'}
self.config(retry_timeout=500, group='ipmi')
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.assertRaises(exception.PowerStateFailure,
self.driver.power.set_power_state,
task,
states.POWER_ON)
ipmicmd.set_power.assert_called_once_with('on', 500)
@mock.patch('pyghmi.ipmi.command.Command')
def test_set_boot_device_ok(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_bootdev.return_value = None
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.driver.management.set_boot_device(task, boot_devices.PXE)
# PXE is converted to 'network' internally by ipminative
ipmicmd.set_bootdev.assert_called_once_with('network', persist=False)
def test_set_boot_device_bad_device(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.InvalidParameterValue,
self.driver.management.set_boot_device,
task,
'fake-device')
@mock.patch.object(ipminative, '_reboot')
def test_reboot_ok(self, reboot_mock):
reboot_mock.return_value = None
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.driver.power.reboot(task)
reboot_mock.assert_called_once_with(self.info)
@mock.patch('pyghmi.ipmi.command.Command')
def test_reboot_fail(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.set_power.return_value = {'powerstate': 'error'}
self.config(retry_timeout=500, group='ipmi')
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.assertRaises(exception.PowerStateFailure,
self.driver.power.reboot,
task)
ipmicmd.set_power.assert_called_once_with('boot', 500)
def test_management_interface_get_supported_boot_devices(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
expected = [boot_devices.PXE, boot_devices.DISK,
boot_devices.CDROM, boot_devices.BIOS]
self.assertEqual(sorted(expected), sorted(task.driver.management.
get_supported_boot_devices()))
@mock.patch('pyghmi.ipmi.command.Command')
def test_management_interface_get_boot_device_good(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_bootdev.return_value = {'bootdev': 'hd'}
with task_manager.acquire(self.context, self.node.uuid) as task:
bootdev = self.driver.management.get_boot_device(task)
self.assertEqual(boot_devices.DISK, bootdev['boot_device'])
self.assertIsNone(bootdev['persistent'])
@mock.patch('pyghmi.ipmi.command.Command')
def test_management_interface_get_boot_device_persistent(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_bootdev.return_value = {'bootdev': 'hd',
'persistent': True}
with task_manager.acquire(self.context, self.node.uuid) as task:
bootdev = self.driver.management.get_boot_device(task)
self.assertEqual(boot_devices.DISK, bootdev['boot_device'])
self.assertTrue(bootdev['persistent'])
@mock.patch('pyghmi.ipmi.command.Command')
def test_management_interface_get_boot_device_fail(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_bootdev.side_effect = pyghmi_exception.IpmiException
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.IPMIFailure,
self.driver.management.get_boot_device, task)
@mock.patch('pyghmi.ipmi.command.Command')
def test_management_interface_get_boot_device_fail_dict(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_bootdev.return_value = {'error': 'boooom'}
with task_manager.acquire(self.context, self.node.uuid) as task:
self.assertRaises(exception.IPMIFailure,
self.driver.management.get_boot_device, task)
@mock.patch('pyghmi.ipmi.command.Command')
def test_management_interface_get_boot_device_unknown(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_bootdev.return_value = {'bootdev': 'unknown'}
with task_manager.acquire(self.context, self.node.uuid) as task:
expected = {'boot_device': None, 'persistent': None}
self.assertEqual(expected,
self.driver.management.get_boot_device(task))
def test_management_interface_validate_good(self):
with task_manager.acquire(self.context, self.node.uuid) as task:
task.driver.management.validate(task)
def test_management_interface_validate_fail(self):
# Missing IPMI driver_info information
node = obj_utils.create_test_node(self.context,
uuid=utils.generate_uuid(),
driver='fake_ipminative')
with task_manager.acquire(self.context, node.uuid) as task:
self.assertRaises(exception.MissingParameterValue,
task.driver.management.validate, task)
@mock.patch('pyghmi.ipmi.command.Command')
def test_get_sensors_data(self, ipmi_mock):
ipmicmd = ipmi_mock.return_value
ipmicmd.get_sensor_data.return_value = None
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.driver.management.get_sensors_data(task)
ipmicmd.get_sensor_data.assert_called_once_with()
@mock.patch.object(console_utils, 'start_shellinabox_console',
autospec=True)
def test_start_console(self, mock_exec):
mock_exec.return_value = None
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.driver.console.start_console(task)
mock_exec.assert_called_once_with(self.info['uuid'],
self.info['port'],
mock.ANY)
self.assertTrue(mock_exec.called)
@mock.patch.object(console_utils, 'start_shellinabox_console',
autospec=True)
def test_start_console_fail(self, mock_exec):
mock_exec.side_effect = exception.ConsoleSubprocessFailed(
error='error')
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.assertRaises(exception.ConsoleSubprocessFailed,
self.driver.console.start_console,
task)
@mock.patch.object(console_utils, 'stop_shellinabox_console',
autospec=True)
def test_stop_console(self, mock_exec):
mock_exec.return_value = None
with task_manager.acquire(self.context,
self.node['uuid']) as task:
self.driver.console.stop_console(task)
mock_exec.assert_called_once_with(self.info['uuid'])
self.assertTrue(mock_exec.called)
@mock.patch.object(console_utils, 'stop_shellinabox_console',
autospec=True)
def test_stop_console_fail(self, mock_stop):
mock_stop.side_effect = exception.ConsoleError()
with task_manager.acquire(self.context,
self.node.uuid) as task:
self.assertRaises(exception.ConsoleError,
self.driver.console.stop_console,
task)
mock_stop.assert_called_once_with(self.node.uuid)
@mock.patch.object(console_utils, 'get_shellinabox_console_url',
autospec=True)
def test_get_console(self, mock_exec):
url = 'http://localhost:4201'
mock_exec.return_value = url
expected = {'type': 'shellinabox', 'url': url}
with task_manager.acquire(self.context,
self.node.uuid) as task:
console_info = self.driver.console.get_console(task)
self.assertEqual(expected, console_info)
mock_exec.assert_called_once_with(self.info['port'])
self.assertTrue(mock_exec.called)<|fim▁end|> | |
<|file_name|>modifyvolumeresponse.cpp<|end_file_name|><|fim▁begin|>/*
Copyright 2013-2021 Paul Colby
This file is part of QtAws.
QtAws is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
QtAws is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with the QtAws. If not, see <http://www.gnu.org/licenses/>.
*/
#include "modifyvolumeresponse.h"
#include "modifyvolumeresponse_p.h"
#include <QDebug>
#include <QNetworkReply>
#include <QXmlStreamReader>
namespace QtAws {
namespace EC2 {
/*!
* \class QtAws::EC2::ModifyVolumeResponse
* \brief The ModifyVolumeResponse class provides an interace for EC2 ModifyVolume responses.
*
* \inmodule QtAwsEC2
*
* <fullname>Amazon Elastic Compute Cloud</fullname>
*
* Amazon Elastic Compute Cloud (Amazon EC2) provides secure and resizable computing capacity in the AWS Cloud. Using
* Amazon EC2 eliminates the need to invest in hardware up front, so you can develop and deploy applications faster. Amazon
* Virtual Private Cloud (Amazon VPC) enables you to provision a logically isolated section of the AWS Cloud where you can
* launch AWS resources in a virtual network that you've defined. Amazon Elastic Block Store (Amazon EBS) provides block
* level storage volumes for use with EC2 instances. EBS volumes are highly available and reliable storage volumes that can
* be attached to any running instance and used like a hard
*
* drive>
*
* To learn more, see the following
*
* resources> <ul> <li>
*
* Amazon EC2: <a href="http://aws.amazon.com/ec2">AmazonEC2 product page</a>, <a
* href="http://aws.amazon.com/documentation/ec2">Amazon EC2 documentation</a>
*
* </p </li> <li>
*
* Amazon EBS: <a href="http://aws.amazon.com/ebs">Amazon EBS product page</a>, <a
* href="http://aws.amazon.com/documentation/ebs">Amazon EBS documentation</a>
*
* </p </li> <li>
*
* Amazon VPC: <a href="http://aws.amazon.com/vpc">Amazon VPC product page</a>, <a
* href="http://aws.amazon.com/documentation/vpc">Amazon VPC documentation</a>
*
* </p </li> <li>
*
* AWS VPN: <a href="http://aws.amazon.com/vpn">AWS VPN product page</a>, <a
* href="http://aws.amazon.com/documentation/vpn">AWS VPN documentation</a>
*
* \sa Ec2Client::modifyVolume
*/
/*!
* Constructs a ModifyVolumeResponse object for \a reply to \a request, with parent \a parent.
*/
ModifyVolumeResponse::ModifyVolumeResponse(
const ModifyVolumeRequest &request,
QNetworkReply * const reply,
QObject * const parent)
: Ec2Response(new ModifyVolumeResponsePrivate(this), parent)
{
setRequest(new ModifyVolumeRequest(request));
setReply(reply);
}
/*!
* \reimp
*/
const ModifyVolumeRequest * ModifyVolumeResponse::request() const
{
Q_D(const ModifyVolumeResponse);
return static_cast<const ModifyVolumeRequest *>(d->request);
}
/*!
* \reimp
* Parses a successful EC2 ModifyVolume \a response.
*/
void ModifyVolumeResponse::parseSuccess(QIODevice &response)<|fim▁hole|>{
//Q_D(ModifyVolumeResponse);
QXmlStreamReader xml(&response);
/// @todo
}
/*!
* \class QtAws::EC2::ModifyVolumeResponsePrivate
* \brief The ModifyVolumeResponsePrivate class provides private implementation for ModifyVolumeResponse.
* \internal
*
* \inmodule QtAwsEC2
*/
/*!
* Constructs a ModifyVolumeResponsePrivate object with public implementation \a q.
*/
ModifyVolumeResponsePrivate::ModifyVolumeResponsePrivate(
ModifyVolumeResponse * const q) : Ec2ResponsePrivate(q)
{
}
/*!
* Parses a EC2 ModifyVolume response element from \a xml.
*/
void ModifyVolumeResponsePrivate::parseModifyVolumeResponse(QXmlStreamReader &xml)
{
Q_ASSERT(xml.name() == QLatin1String("ModifyVolumeResponse"));
Q_UNUSED(xml) ///< @todo
}
} // namespace EC2
} // namespace QtAws<|fim▁end|> | |
<|file_name|>pens_test.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division, absolute_import
import unittest
from cu2qu.pens import Cu2QuPen, Cu2QuPointPen
from . import CUBIC_GLYPHS, QUAD_GLYPHS
from .utils import DummyGlyph, DummyPointGlyph
from .utils import DummyPen, DummyPointPen
from fontTools.misc.loggingTools import CapturingLogHandler
from textwrap import dedent
import logging
MAX_ERR = 1.0
class _TestPenMixin(object):
"""Collection of tests that are shared by both the SegmentPen and the
PointPen test cases, plus some helper methods.
"""
maxDiff = None
def diff(self, expected, actual):
import difflib
expected = str(self.Glyph(expected)).splitlines(True)
actual = str(self.Glyph(actual)).splitlines(True)
diff = difflib.unified_diff(
expected, actual, fromfile='expected', tofile='actual')
return "".join(diff)
def convert_glyph(self, glyph, **kwargs):
# draw source glyph onto a new glyph using a Cu2Qu pen and return it
converted = self.Glyph()
pen = getattr(converted, self.pen_getter_name)()
quadpen = self.Cu2QuPen(pen, MAX_ERR, **kwargs)
getattr(glyph, self.draw_method_name)(quadpen)
return converted
def expect_glyph(self, source, expected):
converted = self.convert_glyph(source)
self.assertNotEqual(converted, source)
if not converted.approx(expected):
print(self.diff(expected, converted))
self.fail("converted glyph is different from expected")<|fim▁hole|> self.expect_glyph(CUBIC_GLYPHS['A'], QUAD_GLYPHS['A'])
def test_convert_composite_glyph(self):
source = CUBIC_GLYPHS['Aacute']
converted = self.convert_glyph(source)
# components don't change after quadratic conversion
self.assertEqual(converted, source)
def test_convert_mixed_glyph(self):
# this contains a mix of contours and components
self.expect_glyph(CUBIC_GLYPHS['Eacute'], QUAD_GLYPHS['Eacute'])
def test_reverse_direction(self):
for name in ('a', 'A', 'Eacute'):
source = CUBIC_GLYPHS[name]
normal_glyph = self.convert_glyph(source)
reversed_glyph = self.convert_glyph(source, reverse_direction=True)
# the number of commands is the same, just their order is iverted
self.assertTrue(
len(normal_glyph.outline), len(reversed_glyph.outline))
self.assertNotEqual(normal_glyph, reversed_glyph)
def test_stats(self):
stats = {}
for name in CUBIC_GLYPHS.keys():
source = CUBIC_GLYPHS[name]
self.convert_glyph(source, stats=stats)
self.assertTrue(stats)
self.assertTrue('1' in stats)
self.assertEqual(type(stats['1']), int)
def test_addComponent(self):
pen = self.Pen()
quadpen = self.Cu2QuPen(pen, MAX_ERR)
quadpen.addComponent("a", (1, 2, 3, 4, 5.0, 6.0))
# components are passed through without changes
self.assertEqual(str(pen).splitlines(), [
"pen.addComponent('a', (1, 2, 3, 4, 5.0, 6.0))",
])
class TestCu2QuPen(unittest.TestCase, _TestPenMixin):
def __init__(self, *args, **kwargs):
super(TestCu2QuPen, self).__init__(*args, **kwargs)
self.Glyph = DummyGlyph
self.Pen = DummyPen
self.Cu2QuPen = Cu2QuPen
self.pen_getter_name = 'getPen'
self.draw_method_name = 'draw'
def test__check_contour_is_open(self):
msg = "moveTo is required"
quadpen = Cu2QuPen(DummyPen(), MAX_ERR)
with self.assertRaisesRegex(AssertionError, msg):
quadpen.lineTo((0, 0))
with self.assertRaisesRegex(AssertionError, msg):
quadpen.qCurveTo((0, 0), (1, 1))
with self.assertRaisesRegex(AssertionError, msg):
quadpen.curveTo((0, 0), (1, 1), (2, 2))
with self.assertRaisesRegex(AssertionError, msg):
quadpen.closePath()
with self.assertRaisesRegex(AssertionError, msg):
quadpen.endPath()
quadpen.moveTo((0, 0)) # now it works
quadpen.lineTo((1, 1))
quadpen.qCurveTo((2, 2), (3, 3))
quadpen.curveTo((4, 4), (5, 5), (6, 6))
quadpen.closePath()
def test__check_contour_closed(self):
msg = "closePath or endPath is required"
quadpen = Cu2QuPen(DummyPen(), MAX_ERR)
quadpen.moveTo((0, 0))
with self.assertRaisesRegex(AssertionError, msg):
quadpen.moveTo((1, 1))
with self.assertRaisesRegex(AssertionError, msg):
quadpen.addComponent("a", (1, 0, 0, 1, 0, 0))
# it works if contour is closed
quadpen.closePath()
quadpen.moveTo((1, 1))
quadpen.endPath()
quadpen.addComponent("a", (1, 0, 0, 1, 0, 0))
def test_qCurveTo_no_points(self):
quadpen = Cu2QuPen(DummyPen(), MAX_ERR)
quadpen.moveTo((0, 0))
with self.assertRaisesRegex(
AssertionError, "illegal qcurve segment point count: 0"):
quadpen.qCurveTo()
def test_qCurveTo_1_point(self):
pen = DummyPen()
quadpen = Cu2QuPen(pen, MAX_ERR)
quadpen.moveTo((0, 0))
quadpen.qCurveTo((1, 1))
self.assertEqual(str(pen).splitlines(), [
"pen.moveTo((0, 0))",
"pen.lineTo((1, 1))",
])
def test_qCurveTo_more_than_1_point(self):
pen = DummyPen()
quadpen = Cu2QuPen(pen, MAX_ERR)
quadpen.moveTo((0, 0))
quadpen.qCurveTo((1, 1), (2, 2))
self.assertEqual(str(pen).splitlines(), [
"pen.moveTo((0, 0))",
"pen.qCurveTo((1, 1), (2, 2))",
])
def test_curveTo_no_points(self):
quadpen = Cu2QuPen(DummyPen(), MAX_ERR)
quadpen.moveTo((0, 0))
with self.assertRaisesRegex(
AssertionError, "illegal curve segment point count: 0"):
quadpen.curveTo()
def test_curveTo_1_point(self):
pen = DummyPen()
quadpen = Cu2QuPen(pen, MAX_ERR)
quadpen.moveTo((0, 0))
quadpen.curveTo((1, 1))
self.assertEqual(str(pen).splitlines(), [
"pen.moveTo((0, 0))",
"pen.lineTo((1, 1))",
])
def test_curveTo_2_points(self):
pen = DummyPen()
quadpen = Cu2QuPen(pen, MAX_ERR)
quadpen.moveTo((0, 0))
quadpen.curveTo((1, 1), (2, 2))
self.assertEqual(str(pen).splitlines(), [
"pen.moveTo((0, 0))",
"pen.qCurveTo((1, 1), (2, 2))",
])
def test_curveTo_3_points(self):
pen = DummyPen()
quadpen = Cu2QuPen(pen, MAX_ERR)
quadpen.moveTo((0, 0))
quadpen.curveTo((1, 1), (2, 2), (3, 3))
self.assertEqual(str(pen).splitlines(), [
"pen.moveTo((0, 0))",
"pen.qCurveTo((0.75, 0.75), (2.25, 2.25), (3, 3))",
])
def test_curveTo_more_than_3_points(self):
# a 'SuperBezier' as described in fontTools.basePen.AbstractPen
pen = DummyPen()
quadpen = Cu2QuPen(pen, MAX_ERR)
quadpen.moveTo((0, 0))
quadpen.curveTo((1, 1), (2, 2), (3, 3), (4, 4))
self.assertEqual(str(pen).splitlines(), [
"pen.moveTo((0, 0))",
"pen.qCurveTo((0.75, 0.75), (1.625, 1.625), (2, 2))",
"pen.qCurveTo((2.375, 2.375), (3.25, 3.25), (4, 4))",
])
def test_addComponent(self):
pen = DummyPen()
quadpen = Cu2QuPen(pen, MAX_ERR)
quadpen.addComponent("a", (1, 2, 3, 4, 5.0, 6.0))
# components are passed through without changes
self.assertEqual(str(pen).splitlines(), [
"pen.addComponent('a', (1, 2, 3, 4, 5.0, 6.0))",
])
def test_ignore_single_points(self):
pen = DummyPen()
try:
logging.captureWarnings(True)
with CapturingLogHandler("py.warnings", level="WARNING") as log:
quadpen = Cu2QuPen(pen, MAX_ERR, ignore_single_points=True)
finally:
logging.captureWarnings(False)
quadpen.moveTo((0, 0))
quadpen.endPath()
quadpen.moveTo((1, 1))
quadpen.closePath()
self.assertGreaterEqual(len(log.records), 1)
self.assertIn("ignore_single_points is deprecated",
log.records[0].args[0])
# single-point contours were ignored, so the pen commands are empty
self.assertFalse(pen.commands)
# redraw without ignoring single points
quadpen.ignore_single_points = False
quadpen.moveTo((0, 0))
quadpen.endPath()
quadpen.moveTo((1, 1))
quadpen.closePath()
self.assertTrue(pen.commands)
self.assertEqual(str(pen).splitlines(), [
"pen.moveTo((0, 0))",
"pen.endPath()",
"pen.moveTo((1, 1))",
"pen.closePath()"
])
class TestCu2QuPointPen(unittest.TestCase, _TestPenMixin):
def __init__(self, *args, **kwargs):
super(TestCu2QuPointPen, self).__init__(*args, **kwargs)
self.Glyph = DummyPointGlyph
self.Pen = DummyPointPen
self.Cu2QuPen = Cu2QuPointPen
self.pen_getter_name = 'getPointPen'
self.draw_method_name = 'drawPoints'
def test_super_bezier_curve(self):
pen = DummyPointPen()
quadpen = Cu2QuPointPen(pen, MAX_ERR)
quadpen.beginPath()
quadpen.addPoint((0, 0), segmentType="move")
quadpen.addPoint((1, 1))
quadpen.addPoint((2, 2))
quadpen.addPoint((3, 3))
quadpen.addPoint(
(4, 4), segmentType="curve", smooth=False, name="up", selected=1)
quadpen.endPath()
self.assertEqual(str(pen).splitlines(), """\
pen.beginPath()
pen.addPoint((0, 0), name=None, segmentType='move', smooth=False)
pen.addPoint((0.75, 0.75), name=None, segmentType=None, smooth=False)
pen.addPoint((1.625, 1.625), name=None, segmentType=None, smooth=False)
pen.addPoint((2, 2), name=None, segmentType='qcurve', smooth=True)
pen.addPoint((2.375, 2.375), name=None, segmentType=None, smooth=False)
pen.addPoint((3.25, 3.25), name=None, segmentType=None, smooth=False)
pen.addPoint((4, 4), name='up', segmentType='qcurve', selected=1, smooth=False)
pen.endPath()""".splitlines())
def test__flushContour_restore_starting_point(self):
pen = DummyPointPen()
quadpen = Cu2QuPointPen(pen, MAX_ERR)
# collect the output of _flushContour before it's sent to _drawPoints
new_segments = []
def _drawPoints(segments):
new_segments.extend(segments)
Cu2QuPointPen._drawPoints(quadpen, segments)
quadpen._drawPoints = _drawPoints
# a closed path (ie. no "move" segmentType)
quadpen._flushContour([
("curve", [
((2, 2), False, None, {}),
((1, 1), False, None, {}),
((0, 0), False, None, {}),
]),
("curve", [
((1, 1), False, None, {}),
((2, 2), False, None, {}),
((3, 3), False, None, {}),
]),
])
# the original starting point is restored: the last segment has become
# the first
self.assertEqual(new_segments[0][1][-1][0], (3, 3))
self.assertEqual(new_segments[-1][1][-1][0], (0, 0))
new_segments = []
# an open path (ie. starting with "move")
quadpen._flushContour([
("move", [
((0, 0), False, None, {}),
]),
("curve", [
((1, 1), False, None, {}),
((2, 2), False, None, {}),
((3, 3), False, None, {}),
]),
])
# the segment order stays the same before and after _flushContour
self.assertEqual(new_segments[0][1][-1][0], (0, 0))
self.assertEqual(new_segments[-1][1][-1][0], (3, 3))
def test_quad_no_oncurve(self):
"""When passed a contour which has no on-curve points, the
Cu2QuPointPen will treat it as a special quadratic contour whose
first point has 'None' coordinates.
"""
self.maxDiff = None
pen = DummyPointPen()
quadpen = Cu2QuPointPen(pen, MAX_ERR)
quadpen.beginPath()
quadpen.addPoint((1, 1))
quadpen.addPoint((2, 2))
quadpen.addPoint((3, 3))
quadpen.endPath()
self.assertEqual(
str(pen),
dedent(
"""\
pen.beginPath()
pen.addPoint((1, 1), name=None, segmentType=None, smooth=False)
pen.addPoint((2, 2), name=None, segmentType=None, smooth=False)
pen.addPoint((3, 3), name=None, segmentType=None, smooth=False)
pen.endPath()"""
)
)
if __name__ == "__main__":
unittest.main()<|fim▁end|> |
def test_convert_simple_glyph(self):
self.expect_glyph(CUBIC_GLYPHS['a'], QUAD_GLYPHS['a']) |
<|file_name|>create_baseline_stubs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""Script to generate unannotated baseline stubs using stubgen.
Basic usage:
$ python3 scripts/create_baseline_stubs.py <project on PyPI>
Run with -h for more help.
"""
import argparse
import os
import re
import shutil
import subprocess
import sys
from typing import Optional, Tuple
PYRIGHT_CONFIG = "pyrightconfig.stricter.json"
def search_pip_freeze_output(project: str, output: str) -> Optional[Tuple[str, str]]:
# Look for lines such as "typed-ast==1.4.2". '-' matches '_' and
# '_' matches '-' in project name, so that "typed_ast" matches
# "typed-ast", and vice versa.
regex = "^(" + re.sub(r"[-_]", "[-_]", project) + ")==(.*)"
m = re.search(regex, output, flags=re.IGNORECASE | re.MULTILINE)
if not m:
return None
return m.group(1), m.group(2)
def get_installed_package_info(project: str) -> Optional[Tuple[str, str]]:
"""Find package information from pip freeze output.
Match project name somewhat fuzzily (case sensitive; '-' matches '_', and
vice versa).
Return (normalized project name, installed version) if successful.
"""
r = subprocess.run(["pip", "freeze"], capture_output=True, text=True, check=True)
return search_pip_freeze_output(project, r.stdout)
def run_stubgen(package: str) -> None:
print(f"Running stubgen: stubgen -p {package}")
subprocess.run(["python", "-m", "mypy.stubgen", "-p", package], check=True)
def copy_stubs(src_base_dir: str, package: str, stub_dir: str) -> None:
"""Copy generated stubs to the target directory under stub_dir/."""
print(f"Copying stubs to {stub_dir}")
if not os.path.isdir(stub_dir):
os.mkdir(stub_dir)
src_dir = os.path.join(src_base_dir, package)
if os.path.isdir(src_dir):
shutil.copytree(src_dir, os.path.join(stub_dir, package))
else:
src_file = os.path.join("out", package + ".pyi")
if not os.path.isfile(src_file):
sys.exit("Error: Cannot find generated stubs")
shutil.copy(src_file, stub_dir)
def run_black(stub_dir: str) -> None:
print(f"Running black: black {stub_dir}")
subprocess.run(["black", stub_dir])
def run_isort(stub_dir: str) -> None:
print(f"Running isort: isort {stub_dir}")
subprocess.run(["python3", "-m", "isort", stub_dir])
def create_metadata(stub_dir: str, version: str) -> None:
"""Create a METADATA.toml file."""
m = re.match(r"[0-9]+.[0-9]+", version)
if m is None:
sys.exit(f"Error: Cannot parse version number: {version}")
fnam = os.path.join(stub_dir, "METADATA.toml")
version = m.group(0)
assert not os.path.exists(fnam)
print(f"Writing {fnam}")
with open(fnam, "w") as f:
f.write(f'version = "{version}.*"\n')
def add_pyright_exclusion(stub_dir: str) -> None:
"""Exclude stub_dir from strict pyright checks."""
with open(PYRIGHT_CONFIG) as f:
lines = f.readlines()
i = 0
while i < len(lines) and not lines[i].strip().startswith('"exclude": ['):
i += 1
assert i < len(lines), f"Error parsing {PYRIGHT_CONFIG}"
while not lines[i].strip().startswith("]"):
i += 1
line_to_add = f' "{stub_dir}",'
initial = i - 1
while lines[i].lower() > line_to_add.lower():
i -= 1
if lines[i + 1].strip().rstrip(",") == line_to_add.strip().rstrip(","):
print(f"{PYRIGHT_CONFIG} already up-to-date")
return
if i == initial:
# Special case: when adding to the end of the list, commas need tweaking
line_to_add = line_to_add.rstrip(",")
lines[i] = lines[i].rstrip() + ",\n"
lines.insert(i + 1, line_to_add + "\n")
print(f"Updating {PYRIGHT_CONFIG}")
with open(PYRIGHT_CONFIG, "w") as f:
f.writelines(lines)
def main() -> None:
parser = argparse.ArgumentParser(
description="""Generate baseline stubs automatically for an installed pip package
using stubgen. Also run black and isort. If the name of
the project is different from the runtime Python package name, you must
also use --package (example: --package yaml PyYAML)."""
)
parser.add_argument("project", help="name of PyPI project for which to generate stubs under stubs/")
parser.add_argument("--package", help="generate stubs for this Python package (defaults to project)")
args = parser.parse_args()
project = args.project
package = args.package
if not re.match(r"[a-zA-Z0-9-_.]+$", project):
sys.exit(f"Invalid character in project name: {project!r}")
if not package:
package = project # TODO: infer from installed files
if not os.path.isdir("stubs") or not os.path.isdir("stdlib"):
sys.exit("Error: Current working directory must be the root of typeshed repository")
# Get normalized project name and version of installed package.
info = get_installed_package_info(project)
if info is None:
print(f'Error: "{project}" is not installed', file=sys.stderr)
print("", file=sys.stderr)
print(f'Suggestion: Run "python3 -m pip install {project}" and try again', file=sys.stderr)
sys.exit(1)
project, version = info
stub_dir = os.path.join("stubs", project)
if os.path.exists(stub_dir):
sys.exit(f"Error: {stub_dir} already exists (delete it first)")
<|fim▁hole|> # Stubs were generated under out/. Copy them to stubs/.
copy_stubs("out", package, stub_dir)
run_isort(stub_dir)
run_black(stub_dir)
create_metadata(stub_dir, version)
# Since the generated stubs won't have many type annotations, we
# have to exclude them from strict pyright checks.
add_pyright_exclusion(stub_dir)
print("\nDone!\n\nSuggested next steps:")
print(f" 1. Manually review the generated stubs in {stub_dir}")
print(f' 2. Run "MYPYPATH={stub_dir} python3 -m mypy.stubtest {package}" to check the stubs against runtime')
print(f' 3. Run "mypy {stub_dir}" to check for errors')
print(f' 4. Run "black {stub_dir}" and "isort {stub_dir}" (if you\'ve made code changes)')
print(f' 5. Run "flake8 {stub_dir}" to check for e.g. unused imports')
print(" 6. Commit the changes on a new branch and create a typeshed PR")
if __name__ == "__main__":
main()<|fim▁end|> | run_stubgen(package)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from .file_logger import FileLogger |
<|file_name|>targetpool.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package gcetasks
import (
"fmt"
compute "google.golang.org/api/compute/v0.beta"
"k8s.io/klog"
"k8s.io/kops/upup/pkg/fi"
"k8s.io/kops/upup/pkg/fi/cloudup/gce"
"k8s.io/kops/upup/pkg/fi/cloudup/terraform"
)
// TargetPool represents a GCE TargetPool
//go:generate fitask -type=TargetPool
type TargetPool struct {
Name *string
Lifecycle *fi.Lifecycle
}
var _ fi.CompareWithID = &TargetPool{}
func (e *TargetPool) CompareWithID() *string {
return e.Name
}
func (e *TargetPool) Find(c *fi.Context) (*TargetPool, error) {
cloud := c.Cloud.(gce.GCECloud)
name := fi.StringValue(e.Name)
r, err := cloud.Compute().TargetPools.Get(cloud.Project(), cloud.Region(), name).Do()
if err != nil {
if gce.IsNotFound(err) {
return nil, nil
}
return nil, fmt.Errorf("error getting TargetPool %q: %v", name, err)
}
actual := &TargetPool{}
actual.Name = fi.String(r.Name)
return actual, nil
}
func (e *TargetPool) Run(c *fi.Context) error {
return fi.DefaultDeltaRunMethod(e, c)
}
func (_ *TargetPool) CheckChanges(a, e, changes *TargetPool) error {
if fi.StringValue(e.Name) == "" {
return fi.RequiredField("Name")
}
return nil
}
func (e *TargetPool) URL(cloud gce.GCECloud) string {
name := fi.StringValue(e.Name)
return fmt.Sprintf("https://www.googleapis.com/compute/v1/projects/%s/regions/%s/targetPools/%s", cloud.Project(), cloud.Region(), name)
}
func (_ *TargetPool) RenderGCE(t *gce.GCEAPITarget, a, e, changes *TargetPool) error {
name := fi.StringValue(e.Name)
o := &compute.TargetPool{
Name: name,
}
if a == nil {
klog.V(4).Infof("Creating TargetPool %q", o.Name)
op, err := t.Cloud.Compute().TargetPools.Insert(t.Cloud.Project(), t.Cloud.Region(), o).Do()
if err != nil {
return fmt.Errorf("error creating TargetPool %q: %v", name, err)
}
if err := t.Cloud.WaitForOp(op); err != nil {
return fmt.Errorf("error creating TargetPool: %v", err)
}
} else {
return fmt.Errorf("cannot apply changes to TargetPool: %v", changes)
}
return nil
}
type terraformTargetPool struct {
Name string `json:"name"`
Description string `json:"description,omitempty"`
HealthChecks []string `json:"health_checks,omitempty"`<|fim▁hole|>}
func (_ *TargetPool) RenderTerraform(t *terraform.TerraformTarget, a, e, changes *TargetPool) error {
name := fi.StringValue(e.Name)
tf := &terraformTargetPool{
Name: name,
}
return t.RenderResource("google_compute_target_pool", name, tf)
}
func (e *TargetPool) TerraformLink() *terraform.Literal {
name := fi.StringValue(e.Name)
return terraform.LiteralSelfLink("google_compute_target_pool", name)
}<|fim▁end|> | Instances []string `json:"instances,omitempty"`
SessionAffinity string `json:"session_affinity,omitempty"` |
<|file_name|>active.js<|end_file_name|><|fim▁begin|>$(document).ready(function(){
//Grabs url path
var url = window.location.pathname;
//Grabs current file name from URL
var url = url.substring(url.lastIndexOf('/')+1);
// now grab every link from the navigation<|fim▁hole|>
$('#navigation a').each(function(){
//Grab the current elements href tag value
var link = $(this).attr("href");
//Test if the url value and element value matches
if(url === link){
//Adds class to the current item
$(this).parent('li').addClass('active');
}
});
});<|fim▁end|> | |
<|file_name|>irc.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Copyright (c) 2011-2012 Litecoin Developers
// Copyright (c) 2013 Fastcoin Developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "irc.h"
#include "net.h"
#include "strlcpy.h"
#include "base58.h"
using namespace std;
using namespace boost;
int nGotIRCAddresses = 0;
void ThreadIRCSeed2(void* parg);
#pragma pack(push, 1)
struct ircaddr
{
struct in_addr ip;
short port;
};
#pragma pack(pop)
string EncodeAddress(const CService& addr)
{
struct ircaddr tmp;
if (addr.GetInAddr(&tmp.ip))
{
tmp.port = htons(addr.GetPort());
vector<unsigned char> vch(UBEGIN(tmp), UEND(tmp));
return string("u") + EncodeBase58Check(vch);
}
return "";
}
bool DecodeAddress(string str, CService& addr)
{
vector<unsigned char> vch;
if (!DecodeBase58Check(str.substr(1), vch))
return false;
struct ircaddr tmp;
if (vch.size() != sizeof(tmp))
return false;
memcpy(&tmp, &vch[0], sizeof(tmp));
addr = CService(tmp.ip, ntohs(tmp.port));
return true;
}
static bool Send(SOCKET hSocket, const char* pszSend)
{
if (strstr(pszSend, "PONG") != pszSend)
printf("IRC SENDING: %s\n", pszSend);
const char* psz = pszSend;
const char* pszEnd = psz + strlen(psz);
while (psz < pszEnd)
{
int ret = send(hSocket, psz, pszEnd - psz, MSG_NOSIGNAL);
if (ret < 0)
return false;
psz += ret;
}
return true;
}
bool RecvLineIRC(SOCKET hSocket, string& strLine)
{
loop
{
bool fRet = RecvLine(hSocket, strLine);
if (fRet)
{
if (fShutdown)
return false;
vector<string> vWords;
ParseString(strLine, ' ', vWords);
if (vWords.size() >= 1 && vWords[0] == "PING")
{
strLine[1] = 'O';
strLine += '\r';
Send(hSocket, strLine.c_str());
continue;
}
}
return fRet;
}
}
int RecvUntil(SOCKET hSocket, const char* psz1, const char* psz2=NULL, const char* psz3=NULL, const char* psz4=NULL)
{
loop
{
string strLine;
strLine.reserve(10000);
if (!RecvLineIRC(hSocket, strLine))
return 0;
printf("IRC %s\n", strLine.c_str());
if (psz1 && strLine.find(psz1) != string::npos)
return 1;
if (psz2 && strLine.find(psz2) != string::npos)
return 2;
if (psz3 && strLine.find(psz3) != string::npos)
return 3;
if (psz4 && strLine.find(psz4) != string::npos)
return 4;
}
}
bool Wait(int nSeconds)
{
if (fShutdown)
return false;
printf("IRC waiting %d seconds to reconnect\n", nSeconds);
for (int i = 0; i < nSeconds; i++)
{
if (fShutdown)
return false;
Sleep(1000);
}
return true;
}
bool RecvCodeLine(SOCKET hSocket, const char* psz1, string& strRet)
{
strRet.clear();
loop
{
string strLine;
if (!RecvLineIRC(hSocket, strLine))
return false;
vector<string> vWords;
ParseString(strLine, ' ', vWords);
if (vWords.size() < 2)
continue;
if (vWords[1] == psz1)
{
printf("IRC %s\n", strLine.c_str());
strRet = strLine;
return true;
}
}
}
bool GetIPFromIRC(SOCKET hSocket, string strMyName, CNetAddr& ipRet)
{
Send(hSocket, strprintf("USERHOST %s\r", strMyName.c_str()).c_str());
string strLine;
if (!RecvCodeLine(hSocket, "302", strLine))
return false;
vector<string> vWords;
ParseString(strLine, ' ', vWords);
if (vWords.size() < 4)
return false;
string str = vWords[3];
if (str.rfind("@") == string::npos)
return false;
string strHost = str.substr(str.rfind("@")+1);
// Hybrid IRC used by lfnet always returns IP when you userhost yourself,
// but in case another IRC is ever used this should work.
printf("GetIPFromIRC() got userhost %s\n", strHost.c_str());
CNetAddr addr(strHost, true);
if (!addr.IsValid())
return false;
ipRet = addr;
return true;
}
void ThreadIRCSeed(void* parg)
{
IMPLEMENT_RANDOMIZE_STACK(ThreadIRCSeed(parg));
// Make this thread recognisable as the IRC seeding thread
RenameThread("bitcoin-ircseed");
try
{
ThreadIRCSeed2(parg);
}
catch (std::exception& e) {
PrintExceptionContinue(&e, "ThreadIRCSeed()");
} catch (...) {
PrintExceptionContinue(NULL, "ThreadIRCSeed()");
}
printf("ThreadIRCSeed exited\n");
}
void ThreadIRCSeed2(void* parg)
{
/* Dont advertise on IRC if we don't allow incoming connections */
if (mapArgs.count("-connect") || fNoListen)
return;
if (!GetBoolArg("-irc", false))
return;
printf("ThreadIRCSeed started\n");
int nErrorWait = 10;
int nRetryWait = 10;
while (!fShutdown)
{
CService addrConnect("92.243.23.21", 6667); // irc.lfnet.org
CService addrIRC("irc.lfnet.org", 6667, true);
if (addrIRC.IsValid())
addrConnect = addrIRC;
SOCKET hSocket;
if (!ConnectSocket(addrConnect, hSocket))
{
printf("IRC connect failed\n");
nErrorWait = nErrorWait * 11 / 10;
if (Wait(nErrorWait += 60))
continue;
else
return;
}
if (!RecvUntil(hSocket, "Found your hostname", "using your IP address instead", "Couldn't look up your hostname", "ignoring hostname"))
{
closesocket(hSocket);
hSocket = INVALID_SOCKET;
nErrorWait = nErrorWait * 11 / 10;
if (Wait(nErrorWait += 60))
continue;
else
return;
}
CNetAddr addrIPv4("1.2.3.4"); // arbitrary IPv4 address to make GetLocal prefer IPv4 addresses
CService addrLocal;
string strMyName;
if (GetLocal(addrLocal, &addrIPv4))
strMyName = EncodeAddress(GetLocalAddress(&addrConnect));
if (strMyName == "")
strMyName = strprintf("x%u", GetRand(1000000000));
Send(hSocket, strprintf("NICK %s\r", strMyName.c_str()).c_str());
Send(hSocket, strprintf("USER %s 8 * : %s\r", strMyName.c_str(), strMyName.c_str()).c_str());
int nRet = RecvUntil(hSocket, " 004 ", " 433 ");
if (nRet != 1)
{
closesocket(hSocket);
hSocket = INVALID_SOCKET;
if (nRet == 2)
{
printf("IRC name already in use\n");
Wait(10);
continue;
}
nErrorWait = nErrorWait * 11 / 10;
if (Wait(nErrorWait += 60))
continue;
else
return;
}
Sleep(500);
// Get our external IP from the IRC server and re-nick before joining the channel
CNetAddr addrFromIRC;
if (GetIPFromIRC(hSocket, strMyName, addrFromIRC))
{
printf("GetIPFromIRC() returned %s\n", addrFromIRC.ToString().c_str());
if (addrFromIRC.IsRoutable())
{
// IRC lets you to re-nick
AddLocal(addrFromIRC, LOCAL_IRC);
strMyName = EncodeAddress(GetLocalAddress(&addrConnect));
Send(hSocket, strprintf("NICK %s\r", strMyName.c_str()).c_str());
}
}
if (fTestNet) {
Send(hSocket, "JOIN #fastcoinTEST3\r");
Send(hSocket, "WHO #fastcoinTEST3\r");
} else {
// randomly join #fastcoin00-#fastcoin99
int channel_number = GetRandInt(100);
channel_number = 0; // Fastcoin: for now, just use one channel
Send(hSocket, strprintf("JOIN #fastcoin%02d\r", channel_number).c_str());
Send(hSocket, strprintf("WHO #fastcoin%02d\r", channel_number).c_str());
}
int64 nStart = GetTime();
string strLine;
strLine.reserve(10000);
while (!fShutdown && RecvLineIRC(hSocket, strLine))
{
if (strLine.empty() || strLine.size() > 900 || strLine[0] != ':')
continue;
vector<string> vWords;
ParseString(strLine, ' ', vWords);
if (vWords.size() < 2)
continue;
char pszName[10000];
pszName[0] = '\0';
if (vWords[1] == "352" && vWords.size() >= 8)
{
// index 7 is limited to 16 characters
// could get full length name at index 10, but would be different from join messages
strlcpy(pszName, vWords[7].c_str(), sizeof(pszName));
printf("IRC got who\n");
}
if (vWords[1] == "JOIN" && vWords[0].size() > 1)
{
// :[email protected] JOIN :#channelname
strlcpy(pszName, vWords[0].c_str() + 1, sizeof(pszName));
if (strchr(pszName, '!'))
*strchr(pszName, '!') = '\0';
printf("IRC got join\n");
}
if (pszName[0] == 'u')
{
CAddress addr;
if (DecodeAddress(pszName, addr))
{
addr.nTime = GetAdjustedTime();
if (addrman.Add(addr, addrConnect, 51 * 60))
printf("IRC got new address: %s\n", addr.ToString().c_str());
nGotIRCAddresses++;
}
else
{
printf("IRC decode failed\n");
}
}
}
closesocket(hSocket);
hSocket = INVALID_SOCKET;
if (GetTime() - nStart > 20 * 60)
{
nErrorWait /= 3;
nRetryWait /= 3;
}
nRetryWait = nRetryWait * 11 / 10;<|fim▁hole|>
#ifdef TEST
int main(int argc, char *argv[])
{
WSADATA wsadata;
if (WSAStartup(MAKEWORD(2,2), &wsadata) != NO_ERROR)
{
printf("Error at WSAStartup()\n");
return false;
}
ThreadIRCSeed(NULL);
WSACleanup();
return 0;
}
#endif<|fim▁end|> | if (!Wait(nRetryWait += 60))
return;
}
} |
<|file_name|>test_conditional.py<|end_file_name|><|fim▁begin|>from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat import unittest
from units.mock.loader import DictDataLoader
from mock import MagicMock<|fim▁hole|>
from ansible.template import Templar
from ansible import errors
from ansible.playbook import conditional
class TestConditional(unittest.TestCase):
def setUp(self):
self.loader = DictDataLoader({})
self.cond = conditional.Conditional(loader=self.loader)
self.templar = Templar(loader=self.loader, variables={})
def _eval_con(self, when=None, variables=None):
when = when or []
variables = variables or {}
self.cond.when = when
ret = self.cond.evaluate_conditional(self.templar, variables)
return ret
def test_false(self):
when = [u"False"]
ret = self._eval_con(when, {})
self.assertFalse(ret)
def test_true(self):
when = [u"True"]
ret = self._eval_con(when, {})
self.assertTrue(ret)
def test_true_boolean(self):
self.cond.when = [True]
m = MagicMock()
ret = self.cond.evaluate_conditional(m, {})
self.assertTrue(ret)
self.assertFalse(m.is_template.called)
def test_false_boolean(self):
self.cond.when = [False]
m = MagicMock()
ret = self.cond.evaluate_conditional(m, {})
self.assertFalse(ret)
self.assertFalse(m.is_template.called)
def test_undefined(self):
when = [u"{{ some_undefined_thing }}"]
self.assertRaisesRegex(errors.AnsibleError, "The conditional check '{{ some_undefined_thing }}' failed",
self._eval_con, when, {})
def test_defined(self):
variables = {'some_defined_thing': True}
when = [u"{{ some_defined_thing }}"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_dict_defined_values(self):
variables = {'dict_value': 1,
'some_defined_dict': {'key1': 'value1',
'key2': '{{ dict_value }}'}}
when = [u"some_defined_dict"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_dict_defined_values_is_defined(self):
variables = {'dict_value': 1,
'some_defined_dict': {'key1': 'value1',
'key2': '{{ dict_value }}'}}
when = [u"some_defined_dict.key1 is defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_dict_defined_multiple_values_is_defined(self):
variables = {'dict_value': 1,
'some_defined_dict': {'key1': 'value1',
'key2': '{{ dict_value }}'}}
when = [u"some_defined_dict.key1 is defined",
u"some_defined_dict.key2 is not undefined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_nested_hostvars_undefined_values(self):
variables = {'dict_value': 1,
'hostvars': {'host1': {'key1': 'value1',
'key2': '{{ dict_value }}'},
'host2': '{{ dict_value }}',
'host3': '{{ undefined_dict_value }}',
# no host4
},
'some_dict': {'some_dict_key1': '{{ hostvars["host3"] }}'}
}
when = [u"some_dict.some_dict_key1 == hostvars['host3']"]
# self._eval_con(when, variables)
self.assertRaisesRegex(errors.AnsibleError,
r"The conditional check 'some_dict.some_dict_key1 == hostvars\['host3'\]' failed",
# "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed",
# "The conditional check 'some_dict.some_dict_key1 == hostvars['host3']' failed.",
self._eval_con,
when, variables)
def test_dict_undefined_values_bare(self):
variables = {'dict_value': 1,
'some_defined_dict_with_undefined_values': {'key1': 'value1',
'key2': '{{ dict_value }}',
'key3': '{{ undefined_dict_value }}'
}}
# raises an exception when a non-string conditional is passed to extract_defined_undefined()
when = [u"some_defined_dict_with_undefined_values"]
self.assertRaisesRegex(errors.AnsibleError,
"The conditional check 'some_defined_dict_with_undefined_values' failed.",
self._eval_con,
when, variables)
def test_dict_undefined_values_is_defined(self):
variables = {'dict_value': 1,
'some_defined_dict_with_undefined_values': {'key1': 'value1',
'key2': '{{ dict_value }}',
'key3': '{{ undefined_dict_value }}'
}}
when = [u"some_defined_dict_with_undefined_values is defined"]
self.assertRaisesRegex(errors.AnsibleError,
"The conditional check 'some_defined_dict_with_undefined_values is defined' failed.",
self._eval_con,
when, variables)
def test_is_defined(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_undefined(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is undefined"]
ret = self._eval_con(when, variables)
self.assertFalse(ret)
def test_is_undefined_and_defined(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is undefined", u"some_defined_thing is defined"]
ret = self._eval_con(when, variables)
self.assertFalse(ret)
def test_is_undefined_and_defined_reversed(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is defined", u"some_defined_thing is undefined"]
ret = self._eval_con(when, variables)
self.assertFalse(ret)
def test_is_not_undefined(self):
variables = {'some_defined_thing': True}
when = [u"some_defined_thing is not undefined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_not_defined(self):
variables = {'some_defined_thing': True}
when = [u"some_undefined_thing is not defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_hostvars_quotes_is_defined(self):
variables = {'hostvars': {'some_host': {}},
'compare_targets_single': "hostvars['some_host']",
'compare_targets_double': 'hostvars["some_host"]',
'compare_targets': {'double': '{{ compare_targets_double }}',
'single': "{{ compare_targets_single }}"},
}
when = [u"hostvars['some_host'] is defined",
u'hostvars["some_host"] is defined',
u"{{ compare_targets.double }} is defined",
u"{{ compare_targets.single }} is defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_hostvars_quotes_is_defined_but_is_not_defined(self):
variables = {'hostvars': {'some_host': {}},
'compare_targets_single': "hostvars['some_host']",
'compare_targets_double': 'hostvars["some_host"]',
'compare_targets': {'double': '{{ compare_targets_double }}',
'single': "{{ compare_targets_single }}"},
}
when = [u"hostvars['some_host'] is defined",
u'hostvars["some_host"] is defined',
u"{{ compare_targets.triple }} is defined",
u"{{ compare_targets.quadruple }} is defined"]
self.assertRaisesRegex(errors.AnsibleError,
"The conditional check '{{ compare_targets.triple }} is defined' failed",
self._eval_con,
when, variables)
def test_is_hostvars_host_is_defined(self):
variables = {'hostvars': {'some_host': {}, }}
when = [u"hostvars['some_host'] is defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_hostvars_host_undefined_is_defined(self):
variables = {'hostvars': {'some_host': {}, }}
when = [u"hostvars['some_undefined_host'] is defined"]
ret = self._eval_con(when, variables)
self.assertFalse(ret)
def test_is_hostvars_host_undefined_is_undefined(self):
variables = {'hostvars': {'some_host': {}, }}
when = [u"hostvars['some_undefined_host'] is undefined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)
def test_is_hostvars_host_undefined_is_not_defined(self):
variables = {'hostvars': {'some_host': {}, }}
when = [u"hostvars['some_undefined_host'] is not defined"]
ret = self._eval_con(when, variables)
self.assertTrue(ret)<|fim▁end|> | |
<|file_name|>sys.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2015 T. Okubo
// This file is part of vlc-rs.
// Licensed under the MIT license, see the LICENSE file.
#![allow(non_camel_case_types, non_upper_case_globals)]
#[link(name = "vlc")]
extern "C" {}
use libc::{c_void, c_int, c_uint, c_char, c_float, uintptr_t, FILE};
pub type c_bool = u8;
pub type libvlc_event_type_t = c_int;
// From libvlc_structures.h
pub enum libvlc_instance_t {}
pub enum libvlc_log_iterator_t {}
pub type libvlc_time_t = i64;
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_log_message_t {
pub i_severity: c_int,
pub psz_type: *const c_char,
pub psz_name: *const c_char,
pub psz_header: *const c_char,
pub psz_message: *const c_char,
}
// From libvlc.h
pub enum libvlc_event_manager_t {}
pub enum libvlc_log_t {}
pub enum vlc_log_t {}
pub type libvlc_callback_t = unsafe extern "C" fn(*const libvlc_event_t, *mut c_void);
pub type va_list = *mut c_void;
pub type libvlc_log_cb = unsafe extern "C" fn(*mut c_void, c_int, *const libvlc_log_t, *const c_char, va_list);
pub use crate::enums::LogLevel as libvlc_log_level;
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_module_description_t
{
pub psz_name: *const c_char,
pub psz_shortname: *const c_char,
pub psz_longname: *const c_char,
pub psz_help: *const c_char,
pub p_next: *mut libvlc_module_description_t,
}
extern "C" {
pub fn libvlc_errmsg() -> *const c_char;
pub fn libvlc_clearerr();
pub fn libvlc_new(argc: c_int, argv: *const *const c_char) -> *mut libvlc_instance_t;
pub fn libvlc_release(p_instance: *mut libvlc_instance_t);
pub fn libvlc_retain(p_instance: *mut libvlc_instance_t);
pub fn libvlc_add_intf(p_instance: *mut libvlc_instance_t, name: *const c_char) -> c_int;
pub fn libvlc_set_exit_handler(
p_instance: *mut libvlc_instance_t,
cb: extern "C" fn(*mut c_void), opaque: *mut c_void);
pub fn libvlc_wait(p_instance: *mut libvlc_instance_t);
pub fn libvlc_set_user_agent(
p_instance: *mut libvlc_instance_t, name: *const c_char, http: *const c_char);
pub fn libvlc_set_app_id(
p_instance: *mut libvlc_instance_t, id: *const c_char, version: *const c_char,
icon: *const c_char);
pub fn libvlc_get_version() -> *const c_char;
pub fn libvlc_get_compiler() -> *const c_char;
pub fn libvlc_get_changeset() -> *const c_char;
pub fn libvlc_free(ptr: *mut c_void);
pub fn libvlc_event_attach(
p_event_manager: *mut libvlc_event_manager_t, i_event_type: libvlc_event_type_t,
f_callback: libvlc_callback_t, user_data: *mut c_void) -> c_int;
pub fn libvlc_event_type_name(event_type: libvlc_event_type_t) -> *const c_char;
pub fn libvlc_log_get_context(
ctx: *const libvlc_log_t, module: *const *const c_char, file: *const *const c_char,
line: *mut c_uint);
pub fn libvlc_log_get_object(
ctx: *const libvlc_log_t, name: *const *const c_char,
header: *const *const c_char, id: *mut uintptr_t);
pub fn libvlc_log_unset(_: *mut libvlc_instance_t);
pub fn libvlc_log_set(instance: *mut libvlc_instance_t, cb: libvlc_log_cb, data: *mut c_void);
pub fn libvlc_log_set_file(_: *mut libvlc_instance_t, stream: *mut FILE);
pub fn libvlc_module_description_list_release(p_list: *mut libvlc_module_description_t);
pub fn libvlc_audio_filter_list_get(
p_instance: *mut libvlc_instance_t) -> *mut libvlc_module_description_t;
pub fn libvlc_video_filter_list_get(
p_instance: *mut libvlc_instance_t) -> *mut libvlc_module_description_t;
pub fn libvlc_clock() -> i64;
}
pub unsafe fn libvlc_delay(pts: i64) -> i64 {
pts - libvlc_clock()
}
// From libvlc_media.h
pub enum libvlc_media_t {}
pub use crate::enums::Meta as libvlc_meta_t;
pub use crate::enums::State as libvlc_state_t;
pub const libvlc_media_option_trusted: u32 = 0x2;
pub const libvlc_media_option_unique: u32 = 0x100;
pub use crate::enums::TrackType as libvlc_track_type_t;
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_media_stats_t {
/* Input */
pub i_read_bytes: c_int,
pub f_input_bitrate: c_float,
/* Demux */
pub i_demux_read_bytes: c_int,
pub f_demux_bitrate: c_float,
pub i_demux_corrupted: c_int,
pub i_demux_discontinuity: c_int,
/* Decoders */<|fim▁hole|> pub i_displayed_pictures: c_int,
pub i_lost_pictures: c_int,
/* Audio output */
pub i_played_abuffers: c_int,
pub i_lost_abuffers: c_int,
/* Stream output */
pub i_sent_packets: c_int,
pub i_sent_bytes: c_int,
pub f_send_bitrate: c_float,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_media_track_info_t {
/* Codec fourcc */
pub i_codec: u32,
pub i_id: c_int,
pub i_type: libvlc_track_type_t,
/* Codec specific */
pub i_profile: c_int,
pub i_level: c_int,
pub u: libvlc_media_track_info_t_types::u,
}
pub mod libvlc_media_track_info_t_types {
use libc::c_uint;
#[repr(C)]
#[derive(Clone, Copy)]
pub union u {
pub audio: audio,
pub video: video,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct audio {
pub i_channels: c_uint,
pub i_rate: c_uint,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct video {
pub i_height: c_uint,
pub i_width: c_uint,
}
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_audio_track_t
{
pub i_channels: c_uint,
pub i_rate: c_uint,
}
#[derive(Clone, Copy)]
#[repr(C)]
pub struct libvlc_video_track_t
{
pub i_height: c_uint,
pub i_width: c_uint,
pub i_sar_num: c_uint,
pub i_sar_den: c_uint,
pub i_frame_rate_num: c_uint,
pub i_frame_rate_den: c_uint,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_subtitle_track_t {
pub psz_encoding: *const c_char,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_media_track_t {
pub i_codec: u32,
pub i_original_fourcc: u32,
pub i_id: c_int,
pub i_type: libvlc_track_type_t,
pub i_profile: c_int,
pub i_level: c_int,
pub u: libvlc_media_track_t_types::u,
pub i_bitrate: c_uint,
pub psz_language: *mut c_char,
pub psz_description: *mut c_char,
}
pub mod libvlc_media_track_t_types {
use super::*;
#[repr(C)]
#[derive(Clone, Copy)]
pub union u {
pub audio: *mut libvlc_audio_track_t,
pub video: *mut libvlc_video_track_t,
pub subtitle: *mut libvlc_subtitle_track_t,
}
}
impl libvlc_media_track_t {
pub unsafe fn audio(&self) -> *mut libvlc_audio_track_t {
self.u.audio
}
pub unsafe fn video(&self) -> *mut libvlc_video_track_t {
self.u.video
}
pub unsafe fn subtitle(&self) -> *mut libvlc_subtitle_track_t {
self.u.subtitle
}
}
extern "C" {
pub fn libvlc_media_new_location(p_instance: *mut libvlc_instance_t, psz_mrl: *const c_char)
-> *mut libvlc_media_t;
pub fn libvlc_media_new_path(p_instance: *mut libvlc_instance_t, path: *const c_char)
-> *mut libvlc_media_t;
pub fn libvlc_media_new_fd(p_instance: *mut libvlc_instance_t, fd: c_int)
-> *mut libvlc_media_t;
pub fn libvlc_media_as_node(p_instance: *mut libvlc_instance_t, psz_name: *const c_char)
-> *mut libvlc_media_t;
pub fn libvlc_media_add_option(p_md: *mut libvlc_media_t, psz_options: *const c_char);
pub fn libvlc_media_add_option_flag(
p_md: *mut libvlc_media_t, psz_options: *const c_char, i_flags: c_uint);
pub fn libvlc_media_retain(p_md: *mut libvlc_media_t);
pub fn libvlc_media_release(p_md: *mut libvlc_media_t);
pub fn libvlc_media_get_mrl(p_md: *mut libvlc_media_t) -> *mut c_char;
pub fn libvlc_media_duplicate(p_md: *mut libvlc_media_t) -> *mut libvlc_media_t;
pub fn libvlc_media_get_meta(p_md: *mut libvlc_media_t, e_meta: libvlc_meta_t) -> *mut c_char;
pub fn libvlc_media_set_meta(p_md: *mut libvlc_media_t, e_meta: libvlc_meta_t,
psz_value: *const c_char);
pub fn libvlc_media_save_meta(p_md: *mut libvlc_media_t) -> c_int;
pub fn libvlc_media_get_state(p_md: *mut libvlc_media_t) -> libvlc_state_t;
pub fn libvlc_media_get_stats(p_md: *mut libvlc_media_t, p_stats: *mut libvlc_media_stats_t)
-> c_int;
pub fn libvlc_media_subitems(p_md: *mut libvlc_media_t)
-> *mut libvlc_media_list_t;
pub fn libvlc_media_event_manager(p_md: *mut libvlc_media_t)
-> *mut libvlc_event_manager_t;
pub fn libvlc_media_get_duration(p_md: *mut libvlc_media_t)
-> libvlc_time_t;
pub fn libvlc_media_parse(p_md: *mut libvlc_media_t);
pub fn libvlc_media_parse_async(p_md: *mut libvlc_media_t);
pub fn libvlc_media_is_parsed(p_md: *mut libvlc_media_t) -> c_int;
pub fn libvlc_media_set_user_data(p_md: *mut libvlc_media_t,
p_new_user_data: *mut c_void);
pub fn libvlc_media_get_user_data(p_md: *mut libvlc_media_t) -> *mut c_void;
pub fn libvlc_media_tracks_get(p_md: *mut libvlc_media_t,
tracks: *mut *mut *mut libvlc_media_track_t) -> c_uint;
pub fn libvlc_media_tracks_release(p_tracks: *mut *mut libvlc_media_track_t, i_count: c_uint);
}
// From libvlc_media_player.h
pub enum libvlc_media_player_t {}
pub enum libvlc_equalizer_t {}
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub struct libvlc_track_description_t {
pub i_id: c_int,
pub psz_name: *mut c_char,
pub p_next: *mut libvlc_track_description_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_audio_output_t {
pub psz_name: *mut c_char,
pub psz_description: *mut c_char,
pub p_next: *mut libvlc_audio_output_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_audio_output_device_t {
pub p_next: *mut libvlc_audio_output_device_t,
pub psz_device: *mut c_char,
pub psz_description: *mut c_char,
}
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub struct libvlc_rectangle_t {
pub top: c_int, pub left: c_int, pub bottom: c_int, pub right: c_int,
}
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub enum libvlc_video_marquee_option_t {
libvlc_marquee_Enable = 0,
libvlc_marquee_Text,
libvlc_marquee_Color,
libvlc_marquee_Opacity,
libvlc_marquee_Position,
libvlc_marquee_Refresh,
libvlc_marquee_Size,
libvlc_marquee_Timeout,
libvlc_marquee_X,
libvlc_marquee_Y,
}
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub enum libvlc_navigate_mode_t {
libvlc_navigate_activate = 0,
libvlc_navigate_up,
libvlc_navigate_down,
libvlc_navigate_left,
libvlc_navigate_right,
}
pub use crate::enums::Position as libvlc_position_t;
pub use crate::enums::VideoAdjustOption as libvlc_video_adjust_option;
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub enum libvlc_video_logo_option_t {
libvlc_logo_enable,
libvlc_logo_file,
libvlc_logo_x,
libvlc_logo_y,
libvlc_logo_delay,
libvlc_logo_repeat,
libvlc_logo_opacity,
libvlc_logo_position
}
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub enum libvlc_audio_output_device_types_t {
libvlc_AudioOutputDevice_Error = -1,
libvlc_AudioOutputDevice_Mono = 1,
libvlc_AudioOutputDevice_Stereo = 2,
libvlc_AudioOutputDevice_2F2R = 4,
libvlc_AudioOutputDevice_3F2R = 5,
libvlc_AudioOutputDevice_5_1 = 6,
libvlc_AudioOutputDevice_6_1 = 7,
libvlc_AudioOutputDevice_7_1 = 8,
libvlc_AudioOutputDevice_SPDIF = 10
}
#[repr(C)]
#[derive(Clone, Copy, Debug)]
pub enum libvlc_audio_output_channel_t {
libvlc_AudioChannel_Error = -1,
libvlc_AudioChannel_Stereo = 1,
libvlc_AudioChannel_RStereo = 2,
libvlc_AudioChannel_Left = 3,
libvlc_AudioChannel_Right = 4,
libvlc_AudioChannel_Dolbys = 5
}
pub type libvlc_video_lock_cb = Option<unsafe extern "C" fn(*mut c_void, *mut c_void) -> *mut c_void>;
pub type libvlc_video_unlock_cb = Option<unsafe extern "C" fn(
*mut c_void, *mut c_void, *const *mut c_void)>;
pub type libvlc_video_display_cb = Option<unsafe extern "C" fn(*mut c_void, *mut c_void)>;
pub type libvlc_video_format_cb = Option<unsafe extern "C" fn(
*mut *mut c_void, *mut c_char, *mut c_uint, *mut c_uint, *mut c_uint, *mut c_uint)>;
pub type libvlc_video_cleanup_cb = Option<unsafe extern "C" fn(*mut c_void)>;
pub type libvlc_audio_play_cb = Option<unsafe extern "C" fn(*mut c_void, *const c_void, c_uint, i64)>;
pub type libvlc_audio_pause_cb = Option<unsafe extern "C" fn(*mut c_void, i64)>;
pub type libvlc_audio_resume_cb = Option<unsafe extern "C" fn(*mut c_void, i64)>;
pub type libvlc_audio_flush_cb = Option<unsafe extern "C" fn(*mut c_void, i64)>;
pub type libvlc_audio_drain_cb = Option<unsafe extern "C" fn(*mut c_void)>;
pub type libvlc_audio_set_volume_cb = Option<unsafe extern "C" fn(*mut c_void, c_float, c_bool)>;
pub type libvlc_audio_setup_cb = Option<unsafe extern "C" fn(
*mut *mut c_void, *mut c_char, *mut c_uint, *mut c_uint)>;
pub type libvlc_audio_cleanup_cb = Option<unsafe extern "C" fn(*mut c_void)>;
extern "C" {
pub fn libvlc_media_player_new(p_libvlc_instance: *mut libvlc_instance_t)
-> *mut libvlc_media_player_t;
pub fn libvlc_media_player_new_from_media(p_md: *mut libvlc_media_t)
-> *mut libvlc_media_player_t;
pub fn libvlc_media_player_release(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_media_player_retain(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_media_player_set_media(p_mi: *mut libvlc_media_player_t,
p_md: *mut libvlc_media_t);
pub fn libvlc_media_player_get_media(p_mi: *mut libvlc_media_player_t) -> *mut libvlc_media_t;
pub fn libvlc_media_player_event_manager(p_mi: *mut libvlc_media_player_t)
-> *mut libvlc_event_manager_t;
pub fn libvlc_media_player_is_playing(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_play(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_set_pause(mp: *mut libvlc_media_player_t, do_pause: c_int);
pub fn libvlc_media_player_pause(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_media_player_stop(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_video_set_callbacks(
mp: *mut libvlc_media_player_t, lock: libvlc_video_lock_cb,
unlock: libvlc_video_unlock_cb, display: libvlc_video_display_cb,
opaque: *mut c_void);
pub fn libvlc_video_set_format(
mp: *mut libvlc_media_player_t, chroma: *const c_char, width: c_uint, height: c_uint,
pitch: c_uint);
pub fn libvlc_video_set_format_callbacks(
mp: *mut libvlc_media_player_t, setup: libvlc_video_format_cb,
cleanup: libvlc_video_cleanup_cb);
pub fn libvlc_media_player_set_nsobject(
p_mi: *mut libvlc_media_player_t, drawable: *mut c_void);
pub fn libvlc_media_player_get_nsobject(p_mi: *mut libvlc_media_player_t) -> *mut c_void;
pub fn libvlc_media_player_set_xwindow(
p_mi: *mut libvlc_media_player_t, drawable: u32);
pub fn libvlc_media_player_get_xwindow(p_mi: *mut libvlc_media_player_t) -> u32;
pub fn libvlc_media_player_set_hwnd(
p_mi: *mut libvlc_media_player_t, drawable: *mut c_void);
pub fn libvlc_media_player_get_hwnd(p_mi: *mut libvlc_media_player_t) -> *mut c_void;
pub fn libvlc_audio_set_callbacks(
mp: *mut libvlc_media_player_t, play: libvlc_audio_play_cb, pause: libvlc_audio_pause_cb,
resume: libvlc_audio_resume_cb, flush: libvlc_audio_flush_cb,
drain: libvlc_audio_drain_cb, opaque: *mut c_void);
pub fn libvlc_audio_set_volume_callback(
mp: *mut libvlc_media_player_t, set_volume: libvlc_audio_set_volume_cb);
pub fn libvlc_audio_set_format_callbacks(
mp: *mut libvlc_media_player_t, setup: libvlc_audio_setup_cb,
cleanup: libvlc_audio_cleanup_cb);
pub fn libvlc_audio_set_format(
mp: *mut libvlc_media_player_t, format: *const c_char, rate: c_uint, channels: c_uint);
pub fn libvlc_media_player_get_length(p_mi: *mut libvlc_media_player_t) -> libvlc_time_t;
pub fn libvlc_media_player_get_time(p_mi: *mut libvlc_media_player_t) -> libvlc_time_t;
pub fn libvlc_media_player_set_time(
p_mi: *mut libvlc_media_player_t, i_time: libvlc_time_t);
pub fn libvlc_media_player_get_position(p_mi: *mut libvlc_media_player_t) -> c_float;
pub fn libvlc_media_player_set_position(p_mi: *mut libvlc_media_player_t, f_pos: c_float);
pub fn libvlc_media_player_set_chapter(p_mi: *mut libvlc_media_player_t, i_chapter: c_int);
pub fn libvlc_media_player_get_chapter(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_get_chapter_count(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_will_play(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_set_title(p_mi: *mut libvlc_media_player_t, i_title: c_int);
pub fn libvlc_media_player_get_chapter_count_for_title(
p_mi: *mut libvlc_media_player_t, i_title: c_int) -> c_int;
pub fn libvlc_media_player_get_title(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_get_title_count(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_previous_chapter(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_media_player_next_chapter(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_media_player_get_rate(p_mi: *mut libvlc_media_player_t) -> c_float;
pub fn libvlc_media_player_set_rate(p_mi: *mut libvlc_media_player_t, rate: c_float) -> c_int;
pub fn libvlc_media_player_get_state(p_mi: *mut libvlc_media_player_t) -> libvlc_state_t;
pub fn libvlc_media_player_get_fps(p_mi: *mut libvlc_media_player_t) -> c_float;
pub fn libvlc_media_player_has_vout(p_mi: *mut libvlc_media_player_t) -> c_uint;
pub fn libvlc_media_player_is_seekable(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_can_pause(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_program_scrambled(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_media_player_next_frame(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_media_player_navigate(p_mi: *mut libvlc_media_player_t, navigate: c_uint);
pub fn libvlc_media_player_set_video_title_display(
p_mi: *mut libvlc_media_player_t, position: libvlc_position_t, timeout: c_uint);
pub fn libvlc_track_description_list_release(p_track_description: *mut libvlc_track_description_t);
pub fn libvlc_toggle_fullscreen(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_set_fullscreen(p_mi: *mut libvlc_media_player_t, b_fullscreen: c_int);
pub fn libvlc_get_fullscreen(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_video_set_key_input(p_mi: *mut libvlc_media_player_t, on: c_uint);
pub fn libvlc_video_set_mouse_input(p_mi: *mut libvlc_media_player_t, on: c_uint);
pub fn libvlc_video_get_size(
p_mi: *mut libvlc_media_player_t, num: c_uint, px: *mut c_uint, py: *mut c_uint) -> c_int;
pub fn libvlc_video_get_cursor(
p_mi: *mut libvlc_media_player_t, num: c_uint, px: *mut c_int, py: *mut c_int) -> c_int;
pub fn libvlc_video_get_scale(p_mi: *mut libvlc_media_player_t) -> c_float;
pub fn libvlc_video_set_scale(p_mi: *mut libvlc_media_player_t, f_factor: c_float);
pub fn libvlc_video_get_aspect_ratio(p_mi: *mut libvlc_media_player_t) -> *mut c_char;
pub fn libvlc_video_set_aspect_ratio(p_mi: *mut libvlc_media_player_t, psz_aspect: *const c_char);
pub fn libvlc_video_get_spu(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_video_get_spu_count(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_video_get_spu_description(p_mi: *mut libvlc_media_player_t)
-> *mut libvlc_track_description_t;
pub fn libvlc_video_set_spu(p_mi: *mut libvlc_media_player_t, i_spu: c_int) -> c_int;
pub fn libvlc_video_set_subtitle_file(
p_mi: *mut libvlc_media_player_t, psz_subtitle: *const c_char) -> c_int;
pub fn libvlc_video_get_spu_delay(p_mi: *mut libvlc_media_player_t) -> i64;
pub fn libvlc_video_set_spu_delay(
p_mi: *mut libvlc_media_player_t, i_delay: i64) -> c_int;
pub fn libvlc_video_get_title_description(
p_mi: *mut libvlc_media_player_t) -> *mut libvlc_track_description_t;
pub fn libvlc_video_get_chapter_description(
p_mi: *mut libvlc_media_player_t, i_title: c_int) -> *mut libvlc_track_description_t;
pub fn libvlc_video_get_crop_geometry(p_mi: *mut libvlc_media_player_t) -> *mut c_char;
pub fn libvlc_video_set_crop_geometry(
p_mi: *mut libvlc_media_player_t, psz_geometry: *const c_char);
pub fn libvlc_video_get_teletext(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_video_set_teletext(p_mi: *mut libvlc_media_player_t, i_page: c_int);
pub fn libvlc_toggle_teletext(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_video_get_track_count(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_video_get_track_description(
p_mi: *mut libvlc_media_player_t) -> *mut libvlc_track_description_t;
pub fn libvlc_video_get_track(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_video_set_track(p_mi: *mut libvlc_media_player_t, i_track: c_int) -> c_int;
pub fn libvlc_video_take_snapshot(
p_mi: *mut libvlc_media_player_t, num: c_uint, psz_filepath: *const c_char,
i_width: c_uint, i_height: c_uint) -> c_int;
pub fn libvlc_video_set_deinterlace(p_mi: *mut libvlc_media_player_t, psz_mode: *const c_char);
pub fn libvlc_video_get_marquee_int(p_mi: *mut libvlc_media_player_t, option: c_uint) -> c_int;
pub fn libvlc_video_get_marquee_string(
p_mi: *mut libvlc_media_player_t, option: c_uint) -> *mut c_char;
pub fn libvlc_video_set_marquee_int(
p_mi: *mut libvlc_media_player_t, option: c_uint, i_val: c_int);
pub fn libvlc_video_set_marquee_string(
p_mi: *mut libvlc_media_player_t, option: c_uint, psz_text: *const c_char);
pub fn libvlc_video_get_logo_int(p_mi: *mut libvlc_media_player_t, option: c_uint) -> c_int;
pub fn libvlc_video_set_logo_int(p_mi: *mut libvlc_media_player_t, option: c_uint, value: c_int);
pub fn libvlc_video_set_logo_string(
p_mi: *mut libvlc_media_player_t, option: c_uint, psz_value: *const c_char);
pub fn libvlc_video_get_adjust_int(
p_mi: *mut libvlc_media_player_t, option: c_uint) -> c_int;
pub fn libvlc_video_set_adjust_int(
p_mi: *mut libvlc_media_player_t, option: c_uint, value: c_int);
pub fn libvlc_video_get_adjust_float(
p_mi: *mut libvlc_media_player_t, option: c_uint) -> c_float;
pub fn libvlc_video_set_adjust_float(
p_mi: *mut libvlc_media_player_t, option: c_uint, value: c_float);
pub fn libvlc_audio_output_list_get(p_instance: *mut libvlc_instance_t)
-> *mut libvlc_audio_output_t;
pub fn libvlc_audio_output_list_release(p_list: *mut libvlc_audio_output_t);
pub fn libvlc_audio_output_set(p_mi: *mut libvlc_media_player_t, psz_name: *const c_char) -> c_int;
pub fn libvlc_audio_output_device_enum(
mp: *mut libvlc_media_player_t) -> *mut libvlc_audio_output_device_t;
pub fn libvlc_audio_output_device_list_get(
p_instance: *mut libvlc_instance_t, aout: *const c_char) -> *mut libvlc_audio_output_device_t;
pub fn libvlc_audio_output_device_list_release(p_list: *mut libvlc_audio_output_device_t);
pub fn libvlc_audio_output_device_set(
mp: *mut libvlc_media_player_t, module: *const c_char, device_id: *const c_char);
pub fn libvlc_audio_toggle_mute(p_mi: *mut libvlc_media_player_t);
pub fn libvlc_audio_get_mute(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_audio_set_mute(p_mi: *mut libvlc_media_player_t, status: c_int);
pub fn libvlc_audio_get_volume(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_audio_set_volume(p_mi: *mut libvlc_media_player_t, i_volume: c_int) -> c_int;
pub fn libvlc_audio_get_track_count(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_audio_get_track_description(
p_mi: *mut libvlc_media_player_t) -> *mut libvlc_track_description_t;
pub fn libvlc_audio_get_track(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_audio_set_track(p_mi: *mut libvlc_media_player_t, i_track: c_int) -> c_int;
pub fn libvlc_audio_get_channel(p_mi: *mut libvlc_media_player_t) -> c_int;
pub fn libvlc_audio_set_channel(p_mi: *mut libvlc_media_player_t, channel: c_int) -> c_int;
pub fn libvlc_audio_get_delay(p_mi: *mut libvlc_media_player_t) -> i64;
pub fn libvlc_audio_set_delay(p_mi: *mut libvlc_media_player_t, i_delay: i64) -> c_int;
pub fn libvlc_audio_equalizer_get_preset_count() -> c_uint;
pub fn libvlc_audio_equalizer_get_preset_name(u_index: c_uint) -> *const c_char;
pub fn libvlc_audio_equalizer_get_band_count() -> c_uint;
pub fn libvlc_audio_equalizer_get_band_frequency(u_index: c_uint) -> c_float;
pub fn libvlc_audio_equalizer_new() -> *mut libvlc_equalizer_t;
pub fn libvlc_audio_equalizer_new_from_preset(u_index: c_uint) -> *mut libvlc_equalizer_t;
pub fn libvlc_audio_equalizer_release(p_equalizer: *mut libvlc_equalizer_t);
pub fn libvlc_audio_equalizer_set_preamp(
p_equalizer: *mut libvlc_equalizer_t, f_preamp: c_float) -> c_int;
pub fn libvlc_audio_equalizer_get_preamp(p_equalizer: *mut libvlc_equalizer_t) -> c_float;
pub fn libvlc_audio_equalizer_set_amp_at_index(
p_equalizer: *mut libvlc_equalizer_t, f_amp: c_float, u_band: c_uint) -> c_int;
pub fn libvlc_audio_equalizer_get_amp_at_index(
p_equalizer: *mut libvlc_equalizer_t, u_band: c_uint) -> c_float;
pub fn libvlc_media_player_set_equalizer(
p_mi: *mut libvlc_media_player_t, p_equalizer: *mut libvlc_equalizer_t) -> c_int;
}
// From libvlc_events.h
pub use crate::enums::EventType as libvlc_event_e;
#[repr(C)]
#[derive(Clone, Copy)]
pub struct libvlc_event_t {
pub _type: c_int,
pub p_obj: *mut c_void,
pub u: libvlc_event_t_types::u,
}
pub mod libvlc_event_t_types {
use super::*;
use libc::{c_int, c_char, c_float};
#[repr(C)]
#[derive(Clone, Copy)]
pub union u {
pub media_meta_changed: media_meta_changed,
pub media_subitem_added: media_subitem_added,
pub media_duration_changed: media_duration_changed,
pub media_parsed_changed: media_parsed_changed,
pub media_freed: media_freed,
pub media_state_changed: media_state_changed,
pub media_subitemtree_added: media_subitemtree_added,
pub media_player_buffering: media_player_buffering,
pub media_player_position_changed: media_player_position_changed,
pub media_player_time_changed: media_player_time_changed,
pub media_player_title_changed: media_player_title_changed,
pub media_player_seekable_changed: media_player_seekable_changed,
pub media_player_pausable_changed: media_player_pausable_changed,
pub media_player_scrambled_changed: media_player_scrambled_changed,
pub media_player_vout: media_player_vout,
pub media_list_item_added: media_list_item_added,
pub media_list_will_add_item: media_list_will_add_item,
pub media_list_item_deleted: media_list_item_deleted,
pub media_list_will_delete_item: media_list_will_delete_item,
pub media_list_player_next_item_set: media_list_player_next_item_set,
pub media_player_snapshot_taken: media_player_snapshot_taken,
pub media_player_length_changed: media_player_length_changed,
pub vlm_media_event: vlm_media_event,
pub media_player_media_changed: media_player_media_changed,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_meta_changed {
pub meta_type: libvlc_meta_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_subitem_added {
pub new_child: *mut libvlc_media_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_duration_changed {
pub new_duration: i64,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_parsed_changed {
pub new_status: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_freed {
pub md: *mut libvlc_media_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_state_changed {
pub new_state: libvlc_state_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_subitemtree_added {
pub item: *mut libvlc_media_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_buffering {
pub new_cache: c_float,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_position_changed {
pub new_position: c_float,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_time_changed {
pub new_time: libvlc_time_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_title_changed {
pub new_titie: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_seekable_changed {
pub new_seekable: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_pausable_changed {
pub new_pausable: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_scrambled_changed {
pub new_scrambled: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_vout {
pub new_count: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_list_item_added {
pub item: *mut libvlc_media_t,
pub index: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_list_will_add_item {
pub item: *mut libvlc_media_t,
pub index: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_list_item_deleted {
pub item: *mut libvlc_media_t,
pub index: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_list_will_delete_item {
pub item: *mut libvlc_media_t,
pub index: c_int,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_list_player_next_item_set {
pub item: *mut libvlc_media_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_snapshot_taken {
pub psz_filename: *mut c_char,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_length_changed {
pub new_length: libvlc_time_t,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct vlm_media_event {
pub psz_media_name: *mut c_char,
pub psz_instance_name: *mut c_char,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub struct media_player_media_changed {
pub new_media: *mut libvlc_media_t,
}
}
// From libvlc_media_list.h
pub enum libvlc_media_list_t {}
extern "C" {
pub fn libvlc_media_list_new(p_instance: *mut libvlc_instance_t) -> *mut libvlc_media_list_t;
pub fn libvlc_media_list_release(p_ml: *mut libvlc_media_list_t);
pub fn libvlc_media_list_retain(p_ml: *mut libvlc_media_list_t);
pub fn libvlc_media_list_set_media(p_ml: *mut libvlc_media_list_t, p_md: *mut libvlc_media_t);
pub fn libvlc_media_list_media(p_ml: *mut libvlc_media_list_t) -> *mut libvlc_media_t;
pub fn libvlc_media_list_add_media(
p_ml: *mut libvlc_media_list_t, p_md: *mut libvlc_media_t) -> c_int;
pub fn libvlc_media_list_insert_media(
p_ml: *mut libvlc_media_list_t, p_md: *mut libvlc_media_t, i_pos: c_int) -> c_int;
pub fn libvlc_media_list_remove_index(p_ml: *mut libvlc_media_list_t, i_pos: c_int) -> c_int;
pub fn libvlc_media_list_count(p_ml: *mut libvlc_media_list_t) -> c_int;
pub fn libvlc_media_list_item_at_index(
p_ml: *mut libvlc_media_list_t, i_pos: c_int) -> *mut libvlc_media_t;
pub fn libvlc_media_list_index_of_item(
p_ml: *mut libvlc_media_list_t, p_md: *mut libvlc_media_t) -> c_int;
pub fn libvlc_media_list_is_readonly(p_ml: *mut libvlc_media_list_t) -> c_int;
pub fn libvlc_media_list_lock(p_ml: *mut libvlc_media_list_t);
pub fn libvlc_media_list_unlock(p_ml: *mut libvlc_media_list_t);
pub fn libvlc_media_list_event_manager(
p_ml: *mut libvlc_media_list_t) -> *mut libvlc_event_manager_t;
}
// From libvlc_media_library.h
pub enum libvlc_media_library_t {}
extern "C" {
pub fn libvlc_media_library_new(p_instance: *mut libvlc_instance_t) -> *mut libvlc_media_library_t;
pub fn libvlc_media_library_release(p_mlib: *mut libvlc_media_library_t);
pub fn libvlc_media_library_retain(p_mlib: *mut libvlc_media_library_t);
pub fn libvlc_media_library_load(p_mlib: *mut libvlc_media_library_t) -> c_int;
pub fn libvlc_media_library_media_list(
p_mlib: *mut libvlc_media_library_t) -> *mut libvlc_media_list_t;
}
// From libvlc_media_discoverer.h
pub enum libvlc_media_discoverer_t {}
extern "C" {
pub fn libvlc_media_discoverer_new_from_name(
p_inst: *mut libvlc_instance_t, psz_name: *const c_char) -> *mut libvlc_media_discoverer_t;
pub fn libvlc_media_discoverer_release(p_mdis: *mut libvlc_media_discoverer_t);
pub fn libvlc_media_discoverer_localized_name(
p_mdis: *mut libvlc_media_discoverer_t) -> *mut c_char;
pub fn libvlc_media_discoverer_media_list(
p_mdis: *mut libvlc_media_discoverer_t) -> *mut libvlc_media_list_t;
pub fn libvlc_media_discoverer_event_manager(
p_mdis: *mut libvlc_media_discoverer_t) -> *mut libvlc_event_manager_t;
pub fn libvlc_media_discoverer_is_running(p_mdis: *mut libvlc_media_discoverer_t) -> c_int;
}
// From libvlc_vlm.h
extern "C" {
pub fn libvlc_vlm_release(p_instance: *mut libvlc_instance_t);
pub fn libvlc_vlm_add_broadcast(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, psz_input: *const c_char,
psz_output: *const c_char, i_options: c_int, ppsz_options: *const *const c_char,
b_enabled: c_int, b_loop: c_int) -> c_int;
pub fn libvlc_vlm_add_vod(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, psz_input: *const c_char,
i_options: c_int, ppsz_options: *const *const c_char, b_enabled: c_int,
psz_mux: *const c_char) -> c_int;
pub fn libvlc_vlm_del_media(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char) -> c_int;
pub fn libvlc_vlm_set_enabled(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, b_enabled: c_int) -> c_int;
pub fn libvlc_vlm_set_output(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, psz_output: *const c_char) -> c_int;
pub fn libvlc_vlm_set_input(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, psz_input: *const c_char) -> c_int;
pub fn libvlc_vlm_add_input(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, psz_input: *const c_char) -> c_int;
pub fn libvlc_vlm_set_loop(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, b_loop: c_int) -> c_int;
pub fn libvlc_vlm_set_mux(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, psz_mux: *const c_char) -> c_int;
pub fn libvlc_vlm_change_media(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, psz_input: *const c_char,
psz_output: *const c_char, i_options: c_int, ppsz_options: *const *const c_char,
b_enabled: c_int, b_loop: c_int) -> c_int;
pub fn libvlc_vlm_play_media(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char) -> c_int;
pub fn libvlc_vlm_stop_media(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char) -> c_int;
pub fn libvlc_vlm_pause_media(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char) -> c_int;
pub fn libvlc_vlm_seek_media(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, f_percentage: c_float) -> c_int;
pub fn libvlc_vlm_show_media(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char) -> *const c_char;
pub fn libvlc_vlm_get_media_instance_position(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, i_instance: c_int) -> c_float;
pub fn libvlc_vlm_get_media_instance_time(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, i_instance: c_int) -> c_int;
pub fn libvlc_vlm_get_media_instance_length(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, i_instance: c_int) -> c_int;
pub fn libvlc_vlm_get_media_instance_rate(
p_instance: *mut libvlc_instance_t, psz_name: *const c_char, i_instance: c_int) -> c_int;
pub fn libvlc_vlm_get_event_manager(
p_instance: *mut libvlc_instance_t) -> *mut libvlc_event_manager_t;
}<|fim▁end|> | pub i_decoded_video: c_int,
pub i_decoded_audio: c_int,
/* Video Output */ |
<|file_name|>doctor_attentions_diseases_inherit.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# #############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from dateutil.relativedelta import *
from datetime import datetime, date
from openerp.osv import fields, osv
from openerp.tools.translate import _
import logging
_logger = logging.getLogger(__name__)
class doctor_attentions_diseases(osv.osv):
_name = "doctor.attentions.diseases"
_inherit = 'doctor.attentions.diseases'
_columns = {
}
def _check_main_disease(self, cr, uid, ids, context=None):
'''
verify there's only one main disease
'''
for r in self.browse(cr, uid, ids, context=context):
diseases_ids = self.search(cr,uid,[('attentiont_id','=',r.attentiont_id.id),('diseases_type','=','main')])
if len(diseases_ids) > 1:
return False
return True<|fim▁hole|>
def _check_duplicated_disease(self, cr, uid, ids, context=None):
'''
verify duplicated disease
'''
for r in self.browse(cr, uid, ids, context=context):
diseases_ids = self.search(cr,uid,[('attentiont_id','=',r.attentiont_id.id),('diseases_id','=',r.diseases_id.id)])
if len(diseases_ids) > 1:
return False
return True
_constraints = [
#(_check_main_disease, u'Hay más de un diagnóstico seleccionado como Principal. Por favor seleccione uno como Principal y los demás como Relacionados.', [u'\n\nTipo de Diagnóstico\n\n']),
#(_check_duplicated_disease, u'Hay uno o más diagnósticos duplicados.', [u'\n\nDiagnósticos\n\n'])
]
doctor_attentions_diseases()<|fim▁end|> | |
<|file_name|>configuration.get.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2010 Surevine Limited.
*
* Although intended for deployment and use alongside Alfresco this module should
* be considered 'Not a Contribution' as defined in Alfresco'sstandard contribution agreement, see
* http://www.alfresco.org/resource/AlfrescoContributionAgreementv2.pdf
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.<|fim▁hole|>*/
// Get the change password url
model.changePasswordUrlNode = companyhome.childByNamePath('Data Dictionary/SV Theme/Configuration/Header/url_change_password.txt');
//Get the security label header
model.securityLabelHeaderNode = companyhome.childByNamePath('Data Dictionary/SV Theme/Configuration/Header/security_label.txt');
//Get the help url
model.helpLinkNode = companyhome.childByNamePath('Data Dictionary/SV Theme/Configuration/Header/url_help.txt');
model.launchChatUrlNode = companyhome.childByNamePath('Data Dictionary/SV Theme/Configuration/Chat Dashlet/url_launch_chat.txt');
//Get the logo node
model.appLogoNode = companyhome.childByNamePath('Data Dictionary/SV Theme/Configuration/Header/app_logo');
model.surevineLinkUrlNode = companyhome.childByNamePath('Data Dictionary/SV Theme/Configuration/Footer/url_surevine_link.txt');
cache.neverCache=false;
cache.isPublic=false;
cache.maxAge=36000; //10 hours
cache.mustRevalidate=false;
cache.ETag = 100;<|fim▁end|> | |
<|file_name|>qapi-visit.py<|end_file_name|><|fim▁begin|>#
# QAPI visitor generator
#
# Copyright IBM, Corp. 2011
#
# Authors:
# Anthony Liguori <[email protected]>
# Michael Roth <[email protected]>
#
# This work is licensed under the terms of the GNU GPLv2.
# See the COPYING.LIB file in the top-level directory.
from ordereddict import OrderedDict
from qapi import *
import sys
import os
import getopt
import errno
def generate_visit_struct_body(field_prefix, members):
ret = ""
if len(field_prefix):
field_prefix = field_prefix + "."
for argname, argentry, optional, structured in parse_args(members):
if optional:
ret += mcgen('''
visit_start_optional(m, (obj && *obj) ? &(*obj)->%(c_prefix)shas_%(c_name)s : NULL, "%(name)s", errp);
if ((*obj)->%(prefix)shas_%(c_name)s) {
''',
c_prefix=c_var(field_prefix), prefix=field_prefix,
c_name=c_var(argname), name=argname)
push_indent()
if structured:
ret += mcgen('''
visit_start_struct(m, NULL, "", "%(name)s", 0, errp);
''',
name=argname)
ret += generate_visit_struct_body(field_prefix + argname, argentry)
ret += mcgen('''
visit_end_struct(m, errp);
''')
else:
ret += mcgen('''
visit_type_%(type)s(m, (obj && *obj) ? &(*obj)->%(c_prefix)s%(c_name)s : NULL, "%(name)s", errp);
''',
c_prefix=c_var(field_prefix), prefix=field_prefix,
type=type_name(argentry), c_name=c_var(argname),
name=argname)
if optional:
pop_indent()
ret += mcgen('''
}
visit_end_optional(m, errp);
''')
return ret
def generate_visit_struct(name, members):
ret = mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp)
{
visit_start_struct(m, (void **)obj, "%(name)s", name, sizeof(%(name)s), errp);
''',
name=name)
push_indent()
ret += generate_visit_struct_body("", members)
pop_indent()
ret += mcgen('''
visit_end_struct(m, errp);
}
''')
return ret
def generate_visit_list(name, members):
return mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList ** obj, const char *name, Error **errp)
{
GenericList *i, **head = (GenericList **)obj;
visit_start_list(m, name, errp);
for (*head = i = visit_next_list(m, head, errp); i; i = visit_next_list(m, &i, errp)) {
%(name)sList *native_i = (%(name)sList *)i;
visit_type_%(name)s(m, &native_i->value, NULL, errp);
}
visit_end_list(m, errp);
}
''',
name=name)
def generate_visit_enum(name, members):
return mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s * obj, const char *name, Error **errp)
{
visit_type_enum(m, (int *)obj, %(name)s_lookup, "%(name)s", name, errp);
}
''',
name=name)
def generate_visit_union(name, members):
ret = generate_visit_enum('%sKind' % name, members.keys())
ret += mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp)
{
}
''',
name=name)
return ret
def generate_declaration(name, members, genlist=True):
ret = mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s ** obj, const char *name, Error **errp);
''',
name=name)
if genlist:
ret += mcgen('''
void visit_type_%(name)sList(Visitor *m, %(name)sList ** obj, const char *name, Error **errp);
''',
name=name)
return ret
def generate_decl_enum(name, members, genlist=True):
return mcgen('''
void visit_type_%(name)s(Visitor *m, %(name)s * obj, const char *name, Error **errp);
''',
name=name)
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "p:o:", ["prefix=", "output-dir="])
except getopt.GetoptError, err:
print str(err)
sys.exit(1)
output_dir = ""
prefix = ""
c_file = 'qapi-visit.c'
h_file = 'qapi-visit.h'
for o, a in opts:
if o in ("-p", "--prefix"):
prefix = a
elif o in ("-o", "--output-dir"):
output_dir = a + "/"
c_file = output_dir + prefix + c_file
h_file = output_dir + prefix + h_file
try:
os.makedirs(output_dir)
except os.error, e:
if e.errno != errno.EEXIST:
raise
fdef = open(c_file, 'w')
fdecl = open(h_file, 'w')
fdef.write(mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI visitor functions
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#include "%(header)s"
''',
header=basename(h_file)))
fdecl.write(mcgen('''
/* THIS FILE IS AUTOMATICALLY GENERATED, DO NOT MODIFY */
/*
* schema-defined QAPI visitor function
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <[email protected]>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
#ifndef %(guard)s
#define %(guard)s
#include "qapi/qapi-visit-core.h"
#include "%(prefix)sqapi-types.h"
''',
prefix=prefix, guard=guardname(h_file)))
exprs = parse_schema(sys.stdin)
for expr in exprs:
if expr.has_key('type'):
ret = generate_visit_struct(expr['type'], expr['data'])
ret += generate_visit_list(expr['type'], expr['data'])
fdef.write(ret)
ret = generate_declaration(expr['type'], expr['data'])
fdecl.write(ret)
elif expr.has_key('union'):
ret = generate_visit_union(expr['union'], expr['data'])<|fim▁hole|>
ret = generate_decl_enum('%sKind' % expr['union'], expr['data'].keys())
ret += generate_declaration(expr['union'], expr['data'])
fdecl.write(ret)
elif expr.has_key('enum'):
ret = generate_visit_enum(expr['enum'], expr['data'])
fdef.write(ret)
ret = generate_decl_enum(expr['enum'], expr['data'])
fdecl.write(ret)
fdecl.write('''
#endif
''')
fdecl.flush()
fdecl.close()
fdef.flush()
fdef.close()<|fim▁end|> | fdef.write(ret) |
<|file_name|>templates.js<|end_file_name|><|fim▁begin|>// # Templates
//
// Figure out which template should be used to render a request
// based on the templates which are allowed, and what is available in the theme
// TODO: consider where this should live as it deals with channels, singles, and errors
var _ = require('lodash'),
path = require('path'),
config = require('../../config'),
themes = require('../../themes'),
_private = {};
/**
* ## Get Error Template Hierarchy
*
* Fetch the ordered list of templates that can be used to render this error statusCode.
*
* The default is the
*
* @param {integer} statusCode
* @returns {String[]}
*/
_private.getErrorTemplateHierarchy = function getErrorTemplateHierarchy(statusCode) {
var errorCode = _.toString(statusCode),
templateList = ['error'];
// Add error class template: E.g. error-4xx.hbs or error-5xx.hbs
templateList.unshift('error-' + errorCode[0] + 'xx');
// Add statusCode specific template: E.g. error-404.hbs
templateList.unshift('error-' + errorCode);
return templateList;
};
/**
* ## Get Channel Template Hierarchy
*
* Fetch the ordered list of templates that can be used to render this request.
* 'index' is the default / fallback
* For channels with slugs: [:channelName-:slug, :channelName, index]
* For channels without slugs: [:channelName, index]
* Channels can also have a front page template which is used if this is the first page of the channel, e.g. 'home.hbs'
*
* @param {Object} channelOpts
* @returns {String[]}
*/
_private.getChannelTemplateHierarchy = function getChannelTemplateHierarchy(channelOpts) {
var templateList = ['index'];
if (channelOpts.name && channelOpts.name !== 'index') {
templateList.unshift(channelOpts.name);
if (channelOpts.slugTemplate && channelOpts.slugParam) {
templateList.unshift(channelOpts.name + '-' + channelOpts.slugParam);
}
}
if (channelOpts.frontPageTemplate && channelOpts.postOptions.page === 1) {
templateList.unshift(channelOpts.frontPageTemplate);
}
return templateList;
};
/**
* ## Get Entry Template Hierarchy
*
* Fetch the ordered list of templates that can be used to render this request.
* 'post' is the default / fallback
* For posts: [post-:slug, custom-*, post]
* For pages: [page-:slug, custom-*, page, post]
*
* @param {Object} postObject
* @returns {String[]}
*/
_private.getEntryTemplateHierarchy = function getEntryTemplateHierarchy(postObject) {
var templateList = ['post'],<|fim▁hole|> templateList.unshift('page');
slugTemplate = 'page-' + postObject.slug;
}
if (postObject.custom_template) {
templateList.unshift(postObject.custom_template);
}
templateList.unshift(slugTemplate);
return templateList;
};
/**
* ## Pick Template
*
* Taking the ordered list of allowed templates for this request
* Cycle through and find the first one which has a match in the theme
*
* @param {Array|String} templateList
* @param {String} fallback - a fallback template
*/
_private.pickTemplate = function pickTemplate(templateList, fallback) {
var template;
if (!_.isArray(templateList)) {
templateList = [templateList];
}
if (!themes.getActive()) {
template = fallback;
} else {
template = _.find(templateList, function (template) {
return themes.getActive().hasTemplate(template);
});
}
if (!template) {
template = fallback;
}
return template;
};
_private.getTemplateForEntry = function getTemplateForEntry(postObject) {
var templateList = _private.getEntryTemplateHierarchy(postObject),
fallback = templateList[templateList.length - 1];
return _private.pickTemplate(templateList, fallback);
};
_private.getTemplateForChannel = function getTemplateForChannel(channelOpts) {
var templateList = _private.getChannelTemplateHierarchy(channelOpts),
fallback = templateList[templateList.length - 1];
return _private.pickTemplate(templateList, fallback);
};
_private.getTemplateForError = function getTemplateForError(statusCode) {
var templateList = _private.getErrorTemplateHierarchy(statusCode),
fallback = path.resolve(config.get('paths').defaultViews, 'error.hbs');
return _private.pickTemplate(templateList, fallback);
};
module.exports.setTemplate = function setTemplate(req, res, data) {
var routeConfig = res._route || {};
if (res._template && !req.err) {
return;
}
if (req.err) {
res._template = _private.getTemplateForError(res.statusCode);
return;
}
switch (routeConfig.type) {
case 'custom':
res._template = _private.pickTemplate(routeConfig.templateName, routeConfig.defaultTemplate);
break;
case 'channel':
res._template = _private.getTemplateForChannel(res.locals.channel);
break;
case 'entry':
res._template = _private.getTemplateForEntry(data.post);
break;
default:
res._template = 'index';
}
};<|fim▁end|> | slugTemplate = 'post-' + postObject.slug;
if (postObject.page) { |
<|file_name|>account_config.py<|end_file_name|><|fim▁begin|># Copyright 2018 Silvio Gregorini ([email protected])
# Copyright (c) 2018 Openforce Srls Unipersonale (www.openforce.it)
# Copyright (c) 2019 Matteo Bilotta
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).<|fim▁hole|>class ResConfigSettings(models.TransientModel):
_inherit = "res.config.settings"
sp_description = fields.Char(
related="company_id.sp_description",
string="Description for period end statements",
readonly=False,
)<|fim▁end|> |
from odoo import fields, models
|
<|file_name|>two_step_pipeline.py<|end_file_name|><|fim▁begin|># Copyright 2020 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
from kfp import components
from kfp import dsl
from kfp import compiler
component_op_1 = components.load_component_from_text("""
name: Write to GCS
inputs:
- {name: text, type: String, description: 'Content to be written to GCS'}
outputs:
- {name: output_gcs_path, type: GCSPath, description: 'GCS file path'}
implementation:
container:
image: google/cloud-sdk:slim
command:<|fim▁hole|> set -e -x
echo "$0" | gsutil cp - "$1"
- {inputValue: text}
- {outputUri: output_gcs_path}
""")
component_op_2 = components.load_component_from_text("""
name: Read from GCS
inputs:
- {name: input_gcs_path, type: GCSPath, description: 'GCS file path'}
implementation:
container:
image: google/cloud-sdk:slim
command:
- sh
- -c
- |
set -e -x
gsutil cat "$0"
- {inputUri: input_gcs_path}
""")
@dsl.pipeline(name='simple-two-step-pipeline', pipeline_root='dummy_root')
def my_pipeline(text: str = 'Hello world!'):
component_1 = component_op_1(text=text).set_display_name('Producer')
component_2 = component_op_2(
input_gcs_path=component_1.outputs['output_gcs_path'])
component_2.set_display_name('Consumer')
if __name__ == '__main__':
compiler.Compiler().compile(
pipeline_func=my_pipeline,
pipeline_parameters={'text': 'Hello KFP!'},
package_path=__file__.replace('.py', '.json'))<|fim▁end|> | - sh
- -c
- | |
<|file_name|>ItemTM1.java<|end_file_name|><|fim▁begin|>package com.houtekamert.testmod1.item;
import com.houtekamert.testmod1.creativetab.CreativeTabTM1;
import com.houtekamert.testmod1.reference.Reference;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
public class ItemTM1 extends Item
{
public ItemTM1()
{
super();
this.setCreativeTab(CreativeTabTM1.TMI_TAB);
}
@Override
public String getUnlocalizedName()
{<|fim▁hole|> return String.format("item.%s%s", Reference.MOD_ID.toLowerCase() + ":", getUnwrappedUnlocalizedName(super.getUnlocalizedName()));
}
@Override
public String getUnlocalizedName(ItemStack itemStack)
{
return String.format("item.%s%s", Reference.MOD_ID.toLowerCase() + ":", getUnwrappedUnlocalizedName(super.getUnlocalizedName()));
}
@Override
@SideOnly(Side.CLIENT)
public void registerIcons(IIconRegister iconRegister)
{
itemIcon = iconRegister.registerIcon(this.getUnlocalizedName().substring(this.getUnlocalizedName().indexOf(".") + 1));
}
protected String getUnwrappedUnlocalizedName(String unlocalizedName)
{
return unlocalizedName.substring(unlocalizedName.indexOf(".") + 1);
}
}<|fim▁end|> | |
<|file_name|>getMotionEventName.js<|end_file_name|><|fim▁begin|>// will this be needed?
var getMotionEventName = function(type) {
var t;
var el = document.createElement('fakeelement');
var map = {};
if (type == 'transition') {
map = {
'transition': 'transitionend',
'OTransition': 'oTransitionEnd',
'MozTransition': 'transitionend',
'WebkitTransition': 'webkitTransitionEnd'
};
} else if (type == 'animation') {
map = {
'animation': 'animationend',
'OAnimation': 'oAnimationEnd',
'MozAnimation': 'animationend',
'WebkitAnimation': 'webkitAnimationEnd'
};
};
for (t in map) {
<|fim▁hole|> if (el.style[t] !== undefined) {
return map[t];
}
}
};<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.