prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>s3.go<|end_file_name|><|fim▁begin|>package storage
import (
"fmt"
"io"
"path"
"github.com/goamz/goamz/aws"
"github.com/goamz/goamz/s3"
)
/*
Repository data is stored in Amazon S3.
Valid Params for KST_S3:
* "awsregion" The code for the AWS Region, for example "us-east-1"
* "bucket" The name of the AWS bucket
* "prefix" An optional prefix for the bucket contents, for example "pulldeploy"
*/
const KST_S3 AccessMethod = "s3"
// stS3 is used for PullDeploy repositories in Amazon S3.
type stS3 struct {
regionName string // Name of the AWS Region with our bucket
bucketName string // Name of the S3 bucket
pathPrefix string // Optional prefix to namespace our bucket
bucket *s3.Bucket // Handle to the S3 bucket
}
// Initialize the repository object.
func (st *stS3) init(params Params) error {
// Extract the AWS region name.
if regionName, ok := params["awsregion"]; ok {
st.regionName = regionName
}
// Extract the AWS bucket name.
if bucketName, ok := params["bucket"]; ok {
st.bucketName = bucketName
}
// Extract the optional prefix for our paths.
if pathPrefix, ok := params["prefix"]; ok {
st.pathPrefix = pathPrefix
}<|fim▁hole|> validSet := ""
for k := range aws.Regions {
validSet += " " + k
}
return fmt.Errorf("Invalid AWS region name: '%s' Valid values:%s",
st.regionName, validSet)
}
// Pull AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY out of the environment.
auth, err := aws.EnvAuth()
if err != nil {
return err
}
// Open a handle to the bucket.
s := s3.New(auth, region)
st.bucket = s.Bucket(st.bucketName)
return nil
}
// Get fetches the contents of a repository file into a byte array.
func (st *stS3) Get(repoPath string) ([]byte, error) {
return st.bucket.Get(st.makeS3Path(repoPath))
}
// Put writes the contents of a byte array into a repository file.
func (st *stS3) Put(repoPath string, data []byte) error {
options := s3.Options{}
return st.bucket.Put(
st.makeS3Path(repoPath),
data,
"application/octet-stream",
"authenticated-read",
options,
)
}
// GetReader returns a stream handle for reading a repository file.
func (st *stS3) GetReader(repoPath string) (io.ReadCloser, error) {
return st.bucket.GetReader(st.makeS3Path(repoPath))
}
// PutReader writes a stream to a repository file.
func (st *stS3) PutReader(repoPath string, rc io.ReadCloser, length int64) error {
options := s3.Options{}
return st.bucket.PutReader(
st.makeS3Path(repoPath),
rc,
length,
"application/octet-stream",
"authenticated-read",
options,
)
}
// Delete removes a repository file.
func (st *stS3) Delete(repoPath string) error {
return st.bucket.Del(st.makeS3Path(repoPath))
}
// Utility helper to generate a full S3 repository path.
func (st *stS3) makeS3Path(repoPath string) string {
if st.pathPrefix == "" {
return repoPath
}
return path.Join(st.pathPrefix, repoPath)
}<|fim▁end|>
|
// Validate the region.
region, ok := aws.Regions[st.regionName]
if !ok {
|
<|file_name|>constants.go<|end_file_name|><|fim▁begin|>package geoipfix
import (
"time"<|fim▁hole|>
// Version is the current application version
const Version = "0.1.0"
// DefaultPort is the default server port
const DefaultPort = 3001
// DatabaseURL is the full url to download the maxmind database
const DatabaseURL = "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz"
// UpdateInterval is the default time to update the database
const UpdateInterval = 24 * time.Hour
// RetryInterval is the default retry time to retry the update
const RetryInterval = time.Hour
// compilation variables.
var (
Branch string
Revision string
BuildTime string
Compiler string
)<|fim▁end|>
|
)
|
<|file_name|>SchemaServiceUnitTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) Intersect 2012.
*
* This module contains Proprietary Information of Intersect,
* and should be treated as Confidential.
*/
package au.org.intersect.exsite9.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.stub;
import static org.mockito.Mockito.when;
import java.io.File;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import au.org.intersect.exsite9.dao.DAOTest;
import au.org.intersect.exsite9.dao.factory.MetadataAttributeDAOFactory;
import au.org.intersect.exsite9.dao.factory.MetadataCategoryDAOFactory;
import au.org.intersect.exsite9.dao.factory.SchemaDAOFactory;
import au.org.intersect.exsite9.domain.MetadataCategory;
import au.org.intersect.exsite9.domain.MetadataCategoryType;
import au.org.intersect.exsite9.domain.MetadataCategoryUse;
import au.org.intersect.exsite9.domain.MetadataValue;
import au.org.intersect.exsite9.domain.Schema;
/**
* Tests {@link SchemaService}
*/
public final class SchemaServiceUnitTest extends DAOTest
{
@Test
public void testCreateLocalSchema()
{
final EntityManagerFactory emf = mock(EntityManagerFactory.class);
when(emf.createEntityManager()).thenReturn(createEntityManager());
final File defaultSchemaDir = new File("defaultSchemaDir");
final File defaultSchemaFile = new File("defaultSchemaFile");
final File metadataSchemaSchema = new File("metadataSchemaSchema");
final SchemaDAOFactory schemaDAOFactory = new SchemaDAOFactory();
final MetadataCategoryDAOFactory metadataCategoryDAOFactory = new MetadataCategoryDAOFactory();
final MetadataAttributeDAOFactory metadataAttributeDAOFactory = new MetadataAttributeDAOFactory();
final SchemaService toTest = new SchemaService(defaultSchemaDir, defaultSchemaFile, metadataSchemaSchema, emf, schemaDAOFactory, metadataCategoryDAOFactory, metadataAttributeDAOFactory);
assertEquals(defaultSchemaFile, toTest.getDefaultSchema());
assertEquals(defaultSchemaDir, toTest.getDefaultSchemaDirectory());
final Schema schema = toTest.createLocalSchema("name", "description", "namespace url");
assertNotNull(schema.getId());
assertTrue(schema.getLocal());
assertEquals("name", schema.getName());
assertEquals("description", schema.getDescription());
assertEquals("namespace url", schema.getNamespaceURL());
}
@Test
public void testCreateImportedSchema()
{
final EntityManagerFactory emf = mock(EntityManagerFactory.class);
when(emf.createEntityManager()).thenReturn(createEntityManager());
final File defaultSchemaDir = new File("defaultSchemaDir");
final File defaultSchemaFile = new File("defaultSchemaFile");
final File metadataSchemaSchema = new File("metadataSchemaSchema");
final SchemaDAOFactory schemaDAOFactory = new SchemaDAOFactory();
final MetadataCategoryDAOFactory metadataCategoryDAOFactory = new MetadataCategoryDAOFactory();
final MetadataAttributeDAOFactory metadataAttributeDAOFactory = new MetadataAttributeDAOFactory();
final SchemaService toTest = new SchemaService(defaultSchemaDir, defaultSchemaFile, metadataSchemaSchema, emf, schemaDAOFactory, metadataCategoryDAOFactory, metadataAttributeDAOFactory);
final Schema importedSchema = new Schema("name", "desc", "namespace url", Boolean.FALSE);
final MetadataCategory mdc = new MetadataCategory("category", MetadataCategoryType.FREETEXT, MetadataCategoryUse.optional);
final MetadataValue mdv = new MetadataValue("metadata value");
mdc.getValues().add(mdv);
importedSchema.getMetadataCategories().add(mdc);
toTest.createImportedSchema(importedSchema);
assertNotNull(importedSchema.getId());
}
@Test
public void testUpdateSchema()
{
final EntityManagerFactory emf = mock(EntityManagerFactory.class);
stub(emf.createEntityManager()).toAnswer(new Answer<EntityManager>()
{
@Override
public EntityManager answer(final InvocationOnMock invocation) throws Throwable
{
return createEntityManager();
}
});
final File defaultSchemaDir = new File("defaultSchemaDir");
final File defaultSchemaFile = new File("defaultSchemaFile");
final File metadataSchemaSchema = new File("metadataSchemaSchema");
final SchemaDAOFactory schemaDAOFactory = new SchemaDAOFactory();
final MetadataCategoryDAOFactory metadataCategoryDAOFactory = new MetadataCategoryDAOFactory();
final MetadataAttributeDAOFactory metadataAttributeDAOFactory = new MetadataAttributeDAOFactory();
final SchemaService toTest = new SchemaService(defaultSchemaDir, defaultSchemaFile, metadataSchemaSchema, emf, schemaDAOFactory, metadataCategoryDAOFactory, metadataAttributeDAOFactory);<|fim▁hole|> assertTrue(schema.getLocal());
assertEquals("name", schema.getName());
assertEquals("description", schema.getDescription());
assertEquals("namespace url", schema.getNamespaceURL());
toTest.updateSchema(schema, "new name", "new description", "new namespace url");
final Schema updatedSchema = createEntityManager().find(Schema.class, schema.getId());
assertEquals("new name", updatedSchema.getName());
assertEquals("new description", updatedSchema.getDescription());
assertEquals("new namespace url", updatedSchema.getNamespaceURL());
}
@Test
public void testRemoveSchema()
{
final EntityManagerFactory emf = mock(EntityManagerFactory.class);
stub(emf.createEntityManager()).toAnswer(new Answer<EntityManager>()
{
@Override
public EntityManager answer(final InvocationOnMock invocation) throws Throwable
{
return createEntityManager();
}
});
final File defaultSchemaDir = new File("defaultSchemaDir");
final File defaultSchemaFile = new File("defaultSchemaFile");
final File metadataSchemaSchema = new File("metadataSchemaSchema");
final SchemaDAOFactory schemaDAOFactory = new SchemaDAOFactory();
final MetadataCategoryDAOFactory metadataCategoryDAOFactory = new MetadataCategoryDAOFactory();
final MetadataAttributeDAOFactory metadataAttributeDAOFactory = new MetadataAttributeDAOFactory();
final SchemaService toTest = new SchemaService(defaultSchemaDir, defaultSchemaFile, metadataSchemaSchema, emf, schemaDAOFactory, metadataCategoryDAOFactory, metadataAttributeDAOFactory);
final Schema importedSchema = new Schema("name", "desc", "namespace url", Boolean.FALSE);
final MetadataCategory mdc = new MetadataCategory("category", MetadataCategoryType.FREETEXT, MetadataCategoryUse.optional);
final MetadataValue mdv = new MetadataValue("metadata value");
mdc.getValues().add(mdv);
importedSchema.getMetadataCategories().add(mdc);
toTest.createImportedSchema(importedSchema);
assertNotNull(importedSchema.getId());
toTest.removeSchema(importedSchema);
assertNull(createEntityManager().find(Schema.class, importedSchema.getId()));
}
@Test
public void testAddRemoveMetadataCategory()
{
final EntityManagerFactory emf = mock(EntityManagerFactory.class);
stub(emf.createEntityManager()).toAnswer(new Answer<EntityManager>()
{
@Override
public EntityManager answer(final InvocationOnMock invocation) throws Throwable
{
return createEntityManager();
}
});
final File defaultSchemaDir = new File("defaultSchemaDir");
final File defaultSchemaFile = new File("defaultSchemaFile");
final File metadataSchemaSchema = new File("metadataSchemaSchema");
final SchemaDAOFactory schemaDAOFactory = new SchemaDAOFactory();
final MetadataCategoryDAOFactory metadataCategoryDAOFactory = new MetadataCategoryDAOFactory();
final MetadataAttributeDAOFactory metadataAttributeDAOFactory = new MetadataAttributeDAOFactory();
final SchemaService toTest = new SchemaService(defaultSchemaDir, defaultSchemaFile, metadataSchemaSchema, emf, schemaDAOFactory, metadataCategoryDAOFactory, metadataAttributeDAOFactory);
final Schema schema = toTest.createLocalSchema("name", "description", "namespace url");
assertNotNull(schema.getId());
final MetadataCategory mc = new MetadataCategory("mc", MetadataCategoryType.CONTROLLED_VOCABULARY, MetadataCategoryUse.required);
final MetadataValue mv = new MetadataValue("mv");
mc.getValues().add(mv);
toTest.addMetadataCategoryToSchema(schema, mc);
final Schema outSchema1 = createEntityManager().find(Schema.class, schema.getId());
assertEquals(1, outSchema1.getMetadataCategories().size());
toTest.removeMetadataCategoryFromSchema(schema, mc);
final Schema outSchema2 = createEntityManager().find(Schema.class, schema.getId());
assertEquals(0, outSchema2.getMetadataCategories().size());
}
}<|fim▁end|>
|
final Schema schema = toTest.createLocalSchema("name", "description", "namespace url");
assertNotNull(schema.getId());
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/*
* niepce - fwk/utils/mod.rs
*
* Copyright (C) 2017-2018 Hubert Figuière
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,<|fim▁hole|> *
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
pub mod exempi;
pub mod exiv2;
pub mod files;<|fim▁end|>
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>__author__ = 'Anatoli Kalysch'<|fim▁end|>
| |
<|file_name|>tests_basic.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
from dirty_validators.basic import (BaseValidator, EqualTo, NotEqualTo, StringNotContaining, Length, NumberRange,
Regexp, Email, IPAddress, MacAddress, URL, UUID, AnyOf, NoneOf,
IsEmpty, NotEmpty, NotEmptyString, IsNone, NotNone)
import re
class TestBaseValidator(TestCase):
def setUp(self):
self.validator = BaseValidator()
def tearDown(self):
pass
def test_validate_any(self):
self.assertTrue(self.validator.is_valid(None))
self.assertDictEqual(self.validator.messages, {})
self.assertTrue(self.validator.is_valid(3))
self.assertDictEqual(self.validator.messages, {})
self.assertTrue(self.validator.is_valid('aaa'))
self.assertDictEqual(self.validator.messages, {})
self.assertTrue(self.validator.is_valid({}))
self.assertDictEqual(self.validator.messages, {})
def test_error_not_hidden_behaviour(self):
error_key = 'Test key'
error_message = "'$value' is the value error to test hidden feature"
self.validator.error_messages = {error_key: error_message}
self.validator.error(error_key, 'Not hidden')
self.assertEqual(self.validator.messages,
{error_key: "'Not hidden' is the value error to test hidden feature"})
def test_error_hidden_behaviour(self):
hidden_validator = BaseValidator(hidden=True)
error_key = 'Test key'
error_message = "'$value' is the value error to test hidden feature"
hidden_validator.error_messages = {error_key: error_message}
hidden_validator.error(error_key, 'Will it be hidden?')
self.assertEqual(hidden_validator.messages,
{error_key: "'**Hidden**' is the value error to test hidden feature"})
class TestEqualTo(TestCase):
def setUp(self):
self.validator = EqualTo(comp_value="aaa")
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aaa"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aqaa"))
self.assertDictEqual(self.validator.messages, {EqualTo.NOT_EQUAL: "'aqaa' is not equal to 'aaa'"})
def test_validate_int_success(self):
self.validator = EqualTo(comp_value=3)
self.assertTrue(self.validator.is_valid(3))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_fail(self):
self.validator = EqualTo(comp_value=3)
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {EqualTo.NOT_EQUAL: "'4' is not equal to '3'"})
def test_validate_int_fail_custom_error_message(self):
self.validator = EqualTo(comp_value=3, error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value"})
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {EqualTo.NOT_EQUAL: "4 4 aaa 3"})
def test_validate_int_fail_custom_error_code(self):
self.validator = EqualTo(comp_value=3, error_code_map={EqualTo.NOT_EQUAL: "newError"})
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {"newError": "'4' is not equal to '3'"})
def test_validate_int_fail_custom_error_code_and_error_message(self):
self.validator = EqualTo(comp_value=3,
error_code_map={EqualTo.NOT_EQUAL: "newError"},
error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value"})
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {"newError": "4 4 aaa 3"})
def test_validate_int_fail_custom_error_code_error_message_and_custom_value(self):
self.validator = EqualTo(comp_value=3,
error_code_map={EqualTo.NOT_EQUAL: "newError"},
error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value $value1 $value2"},
message_values={"value1": "aaaaaa1", "value2": "eeeeee1"})
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {"newError": "4 4 aaa 3 aaaaaa1 eeeeee1"})
class TestNotEqualTo(TestCase):
def setUp(self):
self.validator = NotEqualTo(comp_value="aaa")
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aqaa"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aaa"))
self.assertDictEqual(self.validator.messages, {NotEqualTo.IS_EQUAL: "'aaa' is equal to 'aaa'"})
def test_validate_int_success(self):
self.validator = NotEqualTo(comp_value=3)
self.assertTrue(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_fail(self):
self.validator = NotEqualTo(comp_value=3)
self.assertFalse(self.validator.is_valid(3))
self.assertDictEqual(self.validator.messages, {NotEqualTo.IS_EQUAL: "'3' is equal to '3'"})
class TestStringNotContaining(TestCase):
def setUp(self):
self.validator = StringNotContaining(token='Test_TOKEN')
def test_validate_string_contains(self):
self.assertFalse(self.validator.is_valid('This string contains Test_TOKEN for sure'))
self.assertDictEqual(self.validator.messages,
{StringNotContaining.NOT_CONTAINS:
"'This string contains Test_TOKEN for sure' contains 'Test_TOKEN'"})
def test_validate_string_not_contains(self):
self.assertTrue(self.validator.is_valid('This string does not contain TESt_TOKEN for sensitive cases'))
def test_validate_string_contains_not_sensitive(self):
self.validator.case_sensitive = False
self.assertFalse(self.validator.is_valid('This string contains TESt_TOKEN for sensitive cases'))
class TestLength(TestCase):
def setUp(self):
self.validator = Length(min=3, max=6)
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aqaa"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail_short(self):
self.assertFalse(self.validator.is_valid("aa"))
self.assertDictEqual(self.validator.messages, {Length.TOO_SHORT: "'aa' is less than 3 unit length"})
def test_validate_str_fail_long(self):
self.assertFalse(self.validator.is_valid("aabbnnmm"))
self.assertDictEqual(self.validator.messages, {Length.TOO_LONG: "'aabbnnmm' is more than 6 unit length"})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(5))
self.assertDictEqual(self.validator.messages, {Length.INVALID_TYPE: "'5' has no length"})
def test_validate_list_success(self):
self.assertTrue(self.validator.is_valid(["1a", "32d", "tr", "wq"]))
self.assertDictEqual(self.validator.messages, {})
def test_validate_list_fail_short(self):
self.assertFalse(self.validator.is_valid(["1a"]))
self.assertDictEqual(self.validator.messages, {Length.TOO_SHORT: "'['1a']' is less than 3 unit length"})
def test_validate_list_fail_long(self):
self.assertFalse(self.validator.is_valid(["1a", "32d", "tr", "wq", "qwqw", "dd", "as", "er"]))
self.assertDictEqual(self.validator.messages,
{Length.TOO_LONG:
"'['1a', '32d', 'tr', 'wq', 'qwqw', 'dd', 'as', 'er']' is more than 6 unit length"})
class TestNumberRange(TestCase):
def setUp(self):
self.validator = NumberRange(min=3, max=4)
def tearDown(self):
pass
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(5))
self.assertDictEqual(self.validator.messages, {NumberRange.OUT_OF_RANGE: "'5' is out of range (3, 4)"})
def test_validate_int_no_min_success(self):
self.validator = NumberRange(max=4)
self.assertTrue(self.validator.is_valid(1))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_no_min_fail(self):
self.validator = NumberRange(max=4)
self.assertFalse(self.validator.is_valid(5))
self.assertDictEqual(self.validator.messages, {NumberRange.OUT_OF_RANGE: "'5' is out of range (None, 4)"})
def test_validate_int_no_max_success(self):
self.validator = NumberRange(min=4)
self.assertTrue(self.validator.is_valid(5))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_no_max_fail(self):
self.validator = NumberRange(min=4)
self.assertFalse(self.validator.is_valid(1))
self.assertDictEqual(self.validator.messages, {NumberRange.OUT_OF_RANGE: "'1' is out of range (4, None)"})
class TestRegexp(TestCase):
def setUp(self):
self.validator = Regexp(regex="^aa.+bb$")
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aarrbb"))<|fim▁hole|> self.assertFalse(self.validator.is_valid("aarrbbcc"))
self.assertDictEqual(self.validator.messages,
{Regexp.NOT_MATCH: "'aarrbbcc' does not match against pattern '^aa.+bb$'"})
def test_validate_str_case_sensitive_fail(self):
self.assertFalse(self.validator.is_valid("Aarrbb"))
self.assertDictEqual(self.validator.messages,
{Regexp.NOT_MATCH: "'Aarrbb' does not match against pattern '^aa.+bb$'"})
def test_validate_str_case_insensitive_success(self):
self.validator = Regexp(regex="^aa.+bb$", flags=re.IGNORECASE)
self.assertTrue(self.validator.is_valid("Aarrbb"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(6))
self.assertDictEqual(self.validator.messages,
{Regexp.NOT_MATCH: "'6' does not match against pattern '^aa.+bb$'"})
class TestEmail(TestCase):
def setUp(self):
self.validator = Email()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("[email protected]"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("[email protected]"))
self.assertDictEqual(self.validator.messages,
{Email.NOT_MAIL: "'[email protected]' is not a valid email address."})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {Email.NOT_MAIL: "'4' is not a valid email address."})
class TestIPAddress(TestCase):
def setUp(self):
self.validator = IPAddress()
def tearDown(self):
pass
def test_validate_str_ipv4_success(self):
self.assertTrue(self.validator.is_valid("192.168.2.2"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv4_fail(self):
self.assertFalse(self.validator.is_valid("192.168.2.277"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'192.168.2.277' does not appear to be a valid IP address. Allowed ipv4"})
def test_validate_str_ipv6_not_allowed_fail(self):
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages,
{IPAddress.IPV6_NOT_ALLOWED:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7334' is " +
"an ipv6 address that is not allowed. Allowed ipv4"})
def test_validate_str_ipv6_success(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv6_reduced_success(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(self.validator.is_valid("2001:0db8:85a3::8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv6_reduced_localhost_success(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(self.validator.is_valid("::1"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv6_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:733T"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:733T' does " +
"not appear to be a valid IP address. Allowed ipv6"})
def test_validate_str_ipv6_too_large_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7333:3333:3333"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7333:3333:3333' does " +
"not appear to be a valid IP address. Allowed ipv6"})
def test_validate_str_ipv6_too_big_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7333FFF"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7333FFF' does " +
"not appear to be a valid IP address. Allowed ipv6"})
def test_validate_str_ipv6_bad_white_spaces_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid(":0db8:"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"':0db8:' does " +
"not appear to be a valid IP address. Allowed ipv6"})
def test_validate_str_ipv4_not_allowed_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid("192.168.2.233"))
self.assertDictEqual(self.validator.messages,
{IPAddress.IPV4_NOT_ALLOWED:
"'192.168.2.233' is an ipv4 address that is not allowed. Allowed ipv6"})
def test_validate_str_ipv4_ipv6_using_ipv4_success(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(self.validator.is_valid("192.168.2.2"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv4_ipv6_using_ipv6_success(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv4_ipv6_using_ipv6_reduced_success(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(self.validator.is_valid("2001:0db8:85a3::8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv4_ipv6_using_wrong_ipv4_fail(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertFalse(self.validator.is_valid("192.168.2.277"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'192.168.2.277' does not appear to be a valid IP address. Allowed ipv4 and ipv6"})
def test_validate_str_ipv4_ipv6_using_wrong_ipv6_fail(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:733T"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:733T' does not " +
"appear to be a valid IP address. Allowed ipv4 and ipv6"})
def test_validate_int_fail(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertFalse(self.validator.is_valid(2323))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2323' does not appear to be a valid IP address. Allowed ipv4 and ipv6"})
def test_bad_definition(self):
with self.assertRaises(ValueError):
self.validator = IPAddress(ipv4=False, ipv6=False)
class TestMacAddress(TestCase):
def setUp(self):
self.validator = MacAddress()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("01:23:45:67:89:ab"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("[email protected]"))
self.assertDictEqual(self.validator.messages,
{MacAddress.INVALID_MAC_ADDRESS: "'[email protected]' is not a valid mac address."})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages,
{MacAddress.INVALID_MAC_ADDRESS: "'4' is not a valid mac address."})
class TestURL(TestCase):
def setUp(self):
self.validator = URL()
def tearDown(self):
pass
def test_validate_str_required_tld_http_success(self):
self.assertTrue(self.validator.is_valid("http://www.google.com"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_required_tld_git_success(self):
self.assertTrue(self.validator.is_valid("git://github.com"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_no_protocol_fail(self):
self.assertFalse(self.validator.is_valid("google.com"))
self.assertDictEqual(self.validator.messages, {URL.INVALID_URL: "'google.com' is not a valid url."})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {URL.INVALID_URL: "'4' is not a valid url."})
def test_validate_str_not_required_tld_http_success(self):
self.validator = URL(require_tld=False)
self.assertTrue(self.validator.is_valid("http://google"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_not_required_tld_git_success(self):
self.validator = URL(require_tld=False)
self.assertTrue(self.validator.is_valid("git://github"))
self.assertDictEqual(self.validator.messages, {})
class TestUUID(TestCase):
def setUp(self):
self.validator = UUID()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("550e8400-e29b-41d4-a716-446655440000"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("[email protected]"))
self.assertDictEqual(self.validator.messages, {UUID.INVALID_UUID: "'[email protected]' is not a valid UUID."})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {UUID.INVALID_UUID: "'4' is not a valid UUID."})
class TestAnyOf(TestCase):
def setUp(self):
self.validator = AnyOf(values=[1, "2", "aaas", "ouch"])
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aaas"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(1))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("lass"))
self.assertDictEqual(self.validator.messages, {AnyOf.NOT_IN_LIST: "'lass' is none of 1, '2', 'aaas', 'ouch'."})
def test_validate_int_as_str_fail(self):
self.assertFalse(self.validator.is_valid(2))
self.assertDictEqual(self.validator.messages, {AnyOf.NOT_IN_LIST: "'2' is none of 1, '2', 'aaas', 'ouch'."})
class TestNoneOf(TestCase):
def setUp(self):
self.validator = NoneOf(values=[1, "2", "aaas", "ouch"])
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aaaaaas"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(9))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_as_str_success(self):
self.assertTrue(self.validator.is_valid(2))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("ouch"))
self.assertDictEqual(self.validator.messages, {NoneOf.IN_LIST: "'ouch' is one of 1, '2', 'aaas', 'ouch'."})
class TestEmpty(TestCase):
def setUp(self):
self.validator = IsEmpty()
def test_validate_str_empty(self):
self.assertTrue(self.validator.is_valid(""))
self.assertDictEqual(self.validator.messages, {})
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertTrue(self.validator.is_valid(EmptyClass()))
self.assertDictEqual(self.validator.messages, {})
def test_validate_not_empty_class(self):
class NotEmptyClass:
def __repr__(self):
return "NotEmptyClass"
self.assertFalse(self.validator.is_valid(NotEmptyClass()))
self.assertDictEqual(self.validator.messages, {IsEmpty.EMPTY: "'NotEmptyClass' must be empty"})
def test_validate_none_ok(self):
self.assertTrue(self.validator.is_valid(None))
self.assertDictEqual(self.validator.messages, {})
def test_float_ok(self):
self.assertTrue(self.validator.is_valid(0.0))
class TestNotEmpty(TestCase):
def setUp(self):
self.validator = NotEmpty()
def test_validate_str_empty(self):
self.assertFalse(self.validator.is_valid(""))
self.assertDictEqual(self.validator.messages,
{NotEmpty.NOT_EMPTY: "Value can not be empty"})
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertFalse(self.validator.is_valid(EmptyClass()))
def test_validate_not_empty_class(self):
class NotEmptyClass:
pass
self.assertTrue(self.validator.is_valid(NotEmptyClass()))
self.assertDictEqual(self.validator.messages, {})
def test_validate_none_raises(self):
self.assertFalse(self.validator.is_valid(None))
def test_float_raises(self):
self.assertFalse(self.validator.is_valid(0.0))
class TestNotEmptyString(TestCase):
def setUp(self):
self.validator = NotEmptyString()
def test_validate_str_empty(self):
self.assertFalse(self.validator.is_valid(""))
self.assertDictEqual(self.validator.messages,
{NotEmptyString.NOT_EMPTY: "Value can not be empty"})
def test_validate_str_more_whites_empty(self):
self.assertFalse(self.validator.is_valid(" "))
self.assertDictEqual(self.validator.messages,
{NotEmptyString.NOT_EMPTY: "Value can not be empty"})
def test_validate_not_str(self):
self.assertFalse(self.validator.is_valid(3))
self.assertDictEqual(self.validator.messages,
{NotEmptyString.NOT_STRING: "Value must be a string"})
def test_validate_not_empty(self):
self.assertTrue(self.validator.is_valid("Batman"))
class TestIsNone(TestCase):
def setUp(self):
self.validator = IsNone()
def test_validate_str_empty(self):
self.assertFalse(self.validator.is_valid(""))
self.assertDictEqual(self.validator.messages,
{IsNone.NONE: "'' must be None"})
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertFalse(self.validator.is_valid(EmptyClass()))
def test_validate_none(self):
self.assertTrue(self.validator.is_valid(None))
def test_float_raises(self):
self.assertFalse(self.validator.is_valid(0.0))
class TestIsNotNone(TestCase):
def setUp(self):
self.validator = NotNone()
def test_validate_none_raises(self):
self.assertFalse(self.validator.is_valid(None))
self.assertDictEqual(self.validator.messages,
{NotNone.NOT_NONE: NotNone.error_messages[NotNone.NOT_NONE]})
def test_empty_class_ok(self):
class EmptyClass:
def __len__(self):
return 0
self.assertTrue(self.validator.is_valid(EmptyClass()))
self.assertDictEqual(self.validator.messages, {})<|fim▁end|>
|
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
|
<|file_name|>LMCONFIG.H<|end_file_name|><|fim▁begin|>/* Copyright (C) 1986-2001 by Digital Mars. */
#if __SC__ || __RCC__
#pragma once
#endif
#ifndef RC_INVOKED
#pragma pack(__DEFALIGN)<|fim▁hole|>#endif
#include <win32\scdefs.h>
#include <win32\LMCONFIG.H>
#ifndef RC_INVOKED
#pragma pack()
#endif<|fim▁end|>
| |
<|file_name|>auth.js<|end_file_name|><|fim▁begin|>var GoogleStrategy = require('passport-google-oauth').OAuth2Strategy;
var util = require('util');
var session = require('express-session');
var passport = require('passport');
module.exports = (app, url, appEnv, User) => {
app.use(session({
secret: process.env.SESSION_SECRET,
name: 'freelancalot',
proxy: true,
resave: true,
saveUninitialized: true
}));
app.use(passport.initialize());
app.use(passport.session());
passport.serializeUser((user, done) => {
var id = user.get('id');
console.log('serializeUser: ' + id)
done(null, id);
});
passport.deserializeUser((id, done) => {
User.findById(id).then((user) => {
done(null, user);
})
});
var googleOAuth = appEnv.getService('googleOAuth'),
googleOAuthCreds = googleOAuth.credentials;
passport.use(new GoogleStrategy({
clientID: googleOAuthCreds.clientID,
clientSecret: googleOAuthCreds.clientSecret,
callbackURL: util.format("http://%s%s", url, googleOAuthCreds.callbackPath)
},
(token, refreshToken, profile, done) => {
process.nextTick(() => {
User.findOrCreate({
where: {
googleId: profile.id
},
defaults: {
name: profile.displayName,
email: profile.emails[0].value,
photo: profile.photos[0].value
}
})
.spread((user, created) => {
done(null, user);
})
});
}));
<|fim▁hole|>
app.get('/auth/google/callback',
passport.authenticate('google', {
successRedirect: '/'
}));
app.get('/logout', (req, res) => {
req.logout();
res.redirect('/');
});
}<|fim▁end|>
|
app.get('/auth/google', passport.authenticate('google', {
scope: ['profile', 'email']
}));
|
<|file_name|>chanvese.py<|end_file_name|><|fim▁begin|># http://www.creatis.insa-lyon.fr/~bernard/creaseg/
# http://ascratchpad.blogspot.com/2011/03/image-segmentation-using-active.html
#------------------------------------------------------------------------
# Region Based Active Contour Segmentation
#
# seg = region_seg(I,init_mask,max_its,alpha,display)
#
# Inputs: I 2D image
# init_mask Initialization (1 = foreground, 0 = bg)
# max_its Number of iterations to run segmentation for
# alpha (optional) Weight of smoothing term
# higer = smoother. default = 0.2
# display (optional) displays intermediate outputs
# default = true
#
# Outputs: seg Final segmentation mask (1=fg, 0=bg)
#
# Description: This code implements the paper: "Active Contours Without
# Edges" By Chan Vese. This is a nice way to segment images whose
# foregrounds and backgrounds are statistically different and homogeneous.
#
# Example:
# img = imread('tire.tif');
# m = zeros(size(img));
# m(33:33+117,44:44+128) = 1;
# seg = region_seg(img,m,500);
#
# Coded by: Shawn Lankton (www.shawnlankton.com)
#------------------------------------------------------------------------
import numpy as np
import scipy.ndimage as nd
import matplotlib.pyplot as plt
eps = np.finfo(np.float).eps
def chanvese(I,init_mask,max_its=200,alpha=0.2,thresh=0,color='r',display=False):
I = I.astype('float')
#-- Create a signed distance map (SDF) from mask
phi = mask2phi(init_mask)
if display:
plt.ion()
showCurveAndPhi(I, phi, color)
plt.savefig('levelset_start.pdf',bbox_inches='tight')
#--main loop
its = 0
stop = False
prev_mask = init_mask
c = 0
while (its < max_its and not stop):
# get the curve's narrow band
idx = np.flatnonzero( np.logical_and( phi <= 1.2, phi >= -1.2) )
if len(idx) > 0:
#-- intermediate output
if display:
if np.mod(its,50) == 0:
#set(ud.txtInfo1,'string',sprintf('iteration: %d',its),'color',[1 1 0]);
print 'iteration:', its
showCurveAndPhi(I, phi, color)
else:
if np.mod(its,10) == 0:
print 'iteration:', its
#set(ud.txtInfo1,'string',sprintf('iteration: %d',its),'color',[1 1 0]);
#drawnow;
#-- find interior and exterior mean
upts = np.flatnonzero(phi<=0) # interior points
vpts = np.flatnonzero(phi>0) # exterior points
u = np.sum(I.flat[upts])/(len(upts)+eps) # interior mean
v = np.sum(I.flat[vpts])/(len(vpts)+eps) # exterior mean
F = (I.flat[idx]-u)**2-(I.flat[idx]-v)**2 # force from image information
curvature = get_curvature(phi,idx) # force from curvature penalty
dphidt = F /np.max(np.abs(F)) + alpha*curvature # gradient descent to minimize energy
#-- maintain the CFL condition
dt = 0.45/(np.max(np.abs(dphidt))+eps)
#-- evolve the curve
phi.flat[idx] += dt*dphidt
#-- Keep SDF smooth
phi = sussman(phi, 0.5)
new_mask = phi<=0
c = convergence(prev_mask,new_mask,thresh,c)
if c <= 5:
its = its + 1
prev_mask = new_mask
else: stop = True
else:
break
#-- final output
if display:
showCurveAndPhi(I, phi, color)
#plt.savefig('levelset_end.pdf',bbox_inches='tight')
time.sleep(10)
#-- make mask from SDF
seg = phi<=0 #-- Get mask from levelset
return seg,phi,its
#---------------------------------------------------------------------
#---------------------------------------------------------------------
#-- AUXILIARY FUNCTIONS ----------------------------------------------
#---------------------------------------------------------------------
#---------------------------------------------------------------------
def bwdist(a):
"""
this is an intermediary function, 'a' has only True, False vals,
so we convert them into 0, 1 values -- in reverse. True is 0,
False is 1, distance_transform_edt wants it that way.
"""
return nd.distance_transform_edt(a == 0)
import time
#-- Displays the image with curve superimposed
def showCurveAndPhi(I, phi, color):
# subplot(numRows, numCols, plotNum)
#myplot = plt.subplot(121)
#fig, axes = plt.subplots()
#axes = myplot.axes
#axes.get_xaxis().set_visible(False)
#axes.get_yaxis().set_visible(False)
plt.clf()
plt.imshow(I, cmap='gray')
#plt.hold(True)
CS = plt.contour(phi, 0, colors=color)
plt.draw()
#plt.hold(False)
# myplot = plt.subplot(122)
# axes = myplot.axes
# axes.get_xaxis().set_visible(False)
# axes.get_yaxis().set_visible(False)
# plt.imshow(phi)
plt.draw()
#time.sleep(1)
def im2double(a):
a = a.astype('float')
a /= a.max()
return a
#-- converts a mask to a SDF
def mask2phi(init_a):
phi = bwdist(init_a)-bwdist(1-init_a)+im2double(init_a) -0.5
return phi
#-- compute curvature along SDF
def get_curvature(phi,idx):
dimy, dimx = phi.shape
yx = np.array([np.unravel_index(i, phi.shape)for i in idx]) # get subscripts
y = yx[:,0]
x = yx[:,1]
#-- get subscripts of neighbors
ym1 = y-1; xm1 = x-1; yp1 = y+1; xp1 = x+1;
#-- bounds checking
ym1[ym1<0] = 0; xm1[xm1<0] = 0;
yp1[yp1>=dimy]=dimy - 1; xp1[xp1>=dimx] = dimx - 1;
#-- get indexes for 8 neighbors
idup = np.ravel_multi_index( (yp1,x),phi.shape)
iddn = np.ravel_multi_index( (ym1,x),phi.shape)
idlt = np.ravel_multi_index( (y,xm1),phi.shape)
idrt = np.ravel_multi_index( (y,xp1),phi.shape)
idul = np.ravel_multi_index( (yp1,xm1),phi.shape)
idur = np.ravel_multi_index( (yp1,xp1),phi.shape)
iddl = np.ravel_multi_index( (ym1,xm1),phi.shape)
iddr = np.ravel_multi_index( (ym1,xp1),phi.shape)
#-- get central derivatives of SDF at x,y
phi_x = -phi.flat[idlt]+phi.flat[idrt]
phi_y = -phi.flat[iddn]+phi.flat[idup]<|fim▁hole|> phi_yy = phi.flat[iddn]-2*phi.flat[idx]+phi.flat[idup]
phi_xy = (-0.25*phi.flat[iddl]-0.25*phi.flat[idur]
+0.25*phi.flat[iddr]+0.25*phi.flat[idul])
phi_x2 = phi_x**2
phi_y2 = phi_y**2
#-- compute curvature (Kappa)
curvature = ( ((phi_x2*phi_yy + phi_y2*phi_xx - 2*phi_x*phi_y*phi_xy)
/ (phi_x2 + phi_y2 +eps)**(3/2))
*(phi_x2 + phi_y2)**(1/2))
return curvature
#-- level set re-initialization by the sussman method
def sussman(D, dt):
# forward/backward differences
a = D - shiftR(D) # backward
b = shiftL(D) - D # forward
c = D - shiftD(D) # backward
d = shiftU(D) - D # forward
a_p = a.copy(); a_n = a.copy(); # a+ and a-
b_p = b.copy(); b_n = b.copy();
c_p = c.copy(); c_n = c.copy();
d_p = d.copy(); d_n = d.copy();
a_p[a < 0] = 0
a_n[a > 0] = 0
b_p[b < 0] = 0
b_n[b > 0] = 0
c_p[c < 0] = 0
c_n[c > 0] = 0
d_p[d < 0] = 0
d_n[d > 0] = 0
dD = np.zeros(D.shape)
D_neg_ind = np.flatnonzero(D < 0)
D_pos_ind = np.flatnonzero(D > 0)
dD.flat[D_pos_ind] = np.sqrt( np.max( np.concatenate( ([a_p.flat[D_pos_ind]**2],
[b_n.flat[D_pos_ind]**2]) ),
axis=0
)
+ np.max( np.concatenate( ([c_p.flat[D_pos_ind]**2],
[d_n.flat[D_pos_ind]**2])),
axis=0
)
) - 1
dD.flat[D_neg_ind] = np.sqrt( np.max( np.concatenate( ([a_n.flat[D_neg_ind]**2],
[b_p.flat[D_neg_ind]**2])),
axis=0
)
+ np.max( np.concatenate( ([c_n.flat[D_neg_ind]**2],
[d_p.flat[D_neg_ind]**2]) ),
axis=0
)
) - 1
D = D - dt * sussman_sign(D) * dD
return D
#-- whole matrix derivatives
def shiftD(M):
return shiftR(M.transpose()).transpose()
def shiftL(M):
#shift = np.concatenate( (M[:,1:], np.zeros((M.shape[1],1))), axis=1 )
#shift = np.concatenate( (M[:,1:], M[:,-1]), axis=1 )
shift = M[:,range(1,M.shape[1])+[M.shape[1]-1]]
return shift
def shiftR(M):
#shift = np.concatenate( (np.zeros((M.shape[1],1)), M[:,:-1]), axis=1 )
#shift = np.concatenate( (M[:,0], M[:,:-1]), axis=1 )
shift = M[:,[0]+range(0,M.shape[1]-1)]
return shift
def shiftU(M):
return shiftL(M.transpose()).transpose()
def sussman_sign(D):
return D / np.sqrt(D**2 + 1)
# Convergence Test
def convergence(p_mask,n_mask,thresh,c):
diff = p_mask - n_mask
n_diff = np.sum(np.abs(diff))
if n_diff < thresh:
c = c + 1
else:
c = 0
return c
if __name__ == "__main__":
import cv2
img = cv2.imread("/home/kevin/Imperial/PhD/DATASETS/Training/positive/246_cropped_c/8.png_0022_0115_0117_0132_0132_0.png",0)
#img = nd.imread('sagittal8.png')
mask = np.zeros(img.shape)
mask[55:65,55:65] = 1
chanvese(img,mask,max_its=2000,display=True,alpha=0.1)<|fim▁end|>
|
phi_xx = phi.flat[idlt]-2*phi.flat[idx]+phi.flat[idrt]
|
<|file_name|>BannerRow.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { BannerRow, H2, Button } from '@appbaseio/designkit';
import PropTypes from 'prop-types';
import { css } from 'react-emotion';
import { SecondaryLink } from '../styles';
const style = css`
p {
color: #ffffff;
font-weight: 300;
}
`;
const button = {
fontSize: '14px',
lineHeight: '19px',
fontWeight: 'bold',
};
const Banner = ({ config, theme, configName }) => (
<BannerRow>
{config.map((b, i) => (
<BannerRow.Column
key={
// eslint-disable-next-line
i
}
className={style}
style={{
backgroundColor: b.backgroundColor,
}}
>
<div>
<H2 light>{b.title}</H2>
<p>{b.description}</p>
<div className="button-row center">
<Button
href={b.button.href}
uppercase
big
primary={configName !== 'vue'}
bold
style={{
backgroundColor: theme.secondary,
...button,
}}
>
{b.button.title}
</Button>
<SecondaryLink href={b.link.href}>{b.link.title}</SecondaryLink>
</div>
</div>
</BannerRow.Column><|fim▁hole|> </BannerRow>
);
Banner.defaultProps = {
configName: 'web',
};
Banner.propTypes = {
// eslint-disable-next-line
theme: PropTypes.object,
configName: PropTypes.string,
config: PropTypes.arrayOf(PropTypes.shape({
backgroundColor: PropTypes.string,
title: PropTypes.string,
description: PropTypes.string,
button: PropTypes.shape({
title: PropTypes.string,
href: PropTypes.string,
}),
link: PropTypes.shape({
title: PropTypes.string,
href: PropTypes.string,
}),
})).isRequired,
};
export default Banner;<|fim▁end|>
|
))}
|
<|file_name|>MediaDisplayListener.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2010-2015 JPEXS
<|fim▁hole|> * This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.jpexs.decompiler.flash.gui.player;
/**
*
* @author JPEXS
*/
public interface MediaDisplayListener {
void mediaDisplayStateChanged(MediaDisplay source);
void playingFinished(MediaDisplay source);
}<|fim▁end|>
|
*
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.conf.urls import static
from django.urls import include, path, re_path
from django.contrib import admin
urlpatterns = [
path(r"admin/", admin.site.urls),
path(r"flickr/", include("ditto.flickr.urls")),
path(r"lastfm/", include("ditto.lastfm.urls")),
path(r"pinboard/", include("ditto.pinboard.urls")),
path(r"twitter/", include("ditto.twitter.urls")),
path(r"", include("ditto.core.urls")),
]
<|fim▁hole|> import debug_toolbar
urlpatterns += [
re_path(r"^__debug__/", include(debug_toolbar.urls)),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static.static(
settings.STATIC_URL, document_root=settings.STATIC_ROOT
)<|fim▁end|>
|
if settings.DEBUG:
|
<|file_name|>build.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>extern crate ispc;
fn main() {
// This build script shows how to target a specific of vector ISAs
// using the target_isas function. We can also compile for all ISAs,
// in which case ISPC will internally dispatch the function calls to
// the correct ISA for the host system
ispc::Config::new().file("src/ao.ispc").compile("ao");
}<|fim▁end|>
| |
<|file_name|>pas.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Main command line script of the pas package.
The main function contained in this module is used ai main entry point for the
pas command line utility.
The script is automatically created by setuptool, but this file can be
directly invoked with `python path/to/pas.py` or directly if its executable
flag is set.
"""
import itertools
import logging
import logging.handlers
import os
import sys
# pylint: disable-msg=E0611
# I know relative imports are not the holy grail, but here we need them and
# it is a pylint bug not to recognized empty parent paths.
from .. import commands # Relative imports to avoid name clashing
from ..conf import settings # Relative imports to avoid name clashing
# pylint: enable-msg=E0611
# Reenable unknown name detection
from fabric.state import connections
# pylint: disable-msg=W0105
# Docstring for variables are not recognized by pylint, but epydoc parses them
LOGFILE = os.getenv('PAS_LOGFILE') or 'pas.log'
"""Logfile name, settable using the PAS_LOGFILE env variable"""
VERBOSITY = logging.INFO
"""Default verbosity for console output"""
def main():<|fim▁hole|> """
# Configure logging
file_formatter = logging.Formatter("%(asctime)s - %(levelname)10s - " \
"%(message)s (%(pathname)s:%(lineno)d)")
console_formatter = logging.Formatter("%(levelname)10s: %(message)s")
# All console output not explicitly directed to the user should be a log
# message instead
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(console_formatter)
console_handler.setLevel(20) # Don't show debug log messages until the
# verbosity is set
# Buffer the logging until no errors happen
buffered_handler = logging.handlers.MemoryHandler(9999, logging.CRITICAL)
# Capture all logging output and write it to the specified log file
file_handler = logging.FileHandler('pas.log', 'w', delay=True)
file_handler.setFormatter(file_formatter)
file_handler.setLevel(40)
logger = logging.getLogger()
logger.setLevel(1)
logger.addHandler(console_handler)
logger.addHandler(buffered_handler)
# Build base parser
parser = commands.build_mainparser()
arguments = itertools.takewhile(lambda x: x.startswith('-'), sys.argv[1:])
arguments = (arg for arg in arguments if arg not in ('-h', '--help'))
command_line = sys.argv[:1] + list(arguments)
# Parse the base arguments (verbosity and settings)
args, remaining = parser.parse_known_args(command_line)
buffered_handler.setTarget(file_handler)
# Get the verbosity level
verbosity = max(1, VERBOSITY - 10 * (len(args.verbose) - len(args.quiet)))
console_handler.setLevel(verbosity)
file_handler.setLevel(1)
paramiko_logger = logging.getLogger('paramiko.transport')
paramiko_logger.setLevel(verbosity + 10)
# Load settings
try:
settings.loadfrompath(path=args.settings)
nosettings = False
except ImportError:
from ..conf import basesettings
settings.load(basesettings)
nosettings = True
# Build complete parser
parser = commands.build_subparsers(parser)
# Parse arguments
command = args = parser.parse_args()
res = 0
# Check that settings where loaded if needed
if not getattr(command.execute, 'nosettings', False) and nosettings:
logger.critical("This command requires the settings module to be " \
"present on path or defined using the " \
"PAS_SETTINGS_MODULE environment variable.")
res = 1
# Execute command
if not res:
res = command.execute(args)
# Cleanup fabric connections if needed
for key in connections.keys():
connections[key].close()
del connections[key]
# Check execution result
if res:
# ...an error occurred, write the logfile
buffered_handler.flush()
print
print "pas exited with a non-zero exit status (%d). A complete log " \
"was stored in the %s file." % (res, LOGFILE)
print
else:
# ...no errors occurred, avoid to flush the buffer
buffered_handler.setTarget(None)
# Need to close the buffered handler before sysexit is called or it will
# result in an exception
buffered_handler.close()
return res
if __name__ == '__main__':
sys.exit(main())<|fim▁end|>
|
"""
First function called upon command line invocation. Builds the command
line parser, parses the arguments, configures logging and invokes the
command.
|
<|file_name|>QyweixinMessageClient.java<|end_file_name|><|fim▁begin|>/**
*
*/
package com.sqsoft.mars.qyweixin.client.cgibin.message;
import com.sqsoft.mars.qyweixin.client.QyweixinDefaultClient;
/**
* @author lenovo
*
*/
public class QyweixinMessageClient extends QyweixinDefaultClient {
/**
*
*/
private String access_token;
/**
*
* @param access_token
*/
public QyweixinMessageClient(String access_token) {
super();
this.access_token = access_token;
}<|fim▁hole|> * @see com.sqsoft.mars.qyweixin.client.DefaultQyweixinClient#getUrl()
*/
@Override
protected String getUrl() {
return "https://qyapi.weixin.qq.com/cgi-bin/message/{method}?access_token=" + access_token;
}
}<|fim▁end|>
|
/* (non-Javadoc)
|
<|file_name|>transaction.py<|end_file_name|><|fim▁begin|>import threading
from collections import defaultdict
from funcy import once, decorator<|fim▁hole|>from django.db import DEFAULT_DB_ALIAS, DatabaseError
from django.db.backends.utils import CursorWrapper
from django.db.transaction import Atomic, get_connection, on_commit
from .utils import monkey_mix
__all__ = ('queue_when_in_transaction', 'install_cacheops_transaction_support',
'transaction_states')
class TransactionState(list):
def begin(self):
self.append({'cbs': [], 'dirty': False})
def commit(self):
context = self.pop()
if self:
# savepoint
self[-1]['cbs'].extend(context['cbs'])
self[-1]['dirty'] = self[-1]['dirty'] or context['dirty']
else:
# transaction
for func, args, kwargs in context['cbs']:
func(*args, **kwargs)
def rollback(self):
self.pop()
def push(self, item):
self[-1]['cbs'].append(item)
def mark_dirty(self):
self[-1]['dirty'] = True
def is_dirty(self):
return any(context['dirty'] for context in self)
class TransactionStates(threading.local):
def __init__(self):
super(TransactionStates, self).__init__()
self._states = defaultdict(TransactionState)
def __getitem__(self, key):
return self._states[key or DEFAULT_DB_ALIAS]
def is_dirty(self, dbs):
return any(self[db].is_dirty() for db in dbs)
transaction_states = TransactionStates()
@decorator
def queue_when_in_transaction(call):
if transaction_states[call.using]:
transaction_states[call.using].push((call, (), {}))
else:
return call()
class AtomicMixIn(object):
def __enter__(self):
entering = not transaction_states[self.using]
transaction_states[self.using].begin()
self._no_monkey.__enter__(self)
if entering:
on_commit(transaction_states[self.using].commit, self.using)
def __exit__(self, exc_type, exc_value, traceback):
connection = get_connection(self.using)
try:
self._no_monkey.__exit__(self, exc_type, exc_value, traceback)
except DatabaseError:
transaction_states[self.using].rollback()
else:
if not connection.closed_in_transaction and exc_type is None and \
not connection.needs_rollback:
if transaction_states[self.using]:
transaction_states[self.using].commit()
else:
transaction_states[self.using].rollback()
class CursorWrapperMixin(object):
def callproc(self, procname, params=None):
result = self._no_monkey.callproc(self, procname, params)
if transaction_states[self.db.alias]:
transaction_states[self.db.alias].mark_dirty()
return result
def execute(self, sql, params=None):
result = self._no_monkey.execute(self, sql, params)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
def executemany(self, sql, param_list):
result = self._no_monkey.executemany(self, sql, param_list)
if transaction_states[self.db.alias] and is_sql_dirty(sql):
transaction_states[self.db.alias].mark_dirty()
return result
CHARS = set('abcdefghijklmnoprqstuvwxyz_')
def is_sql_dirty(sql):
# This should not happen as using bytes in Python 3 is against db protocol,
# but some people will pass it anyway
if isinstance(sql, bytes):
sql = sql.decode()
# NOTE: not using regex here for speed
sql = sql.lower()
for action in ('update', 'insert', 'delete'):
p = sql.find(action)
if p == -1:
continue
start, end = p - 1, p + len(action)
if (start < 0 or sql[start] not in CHARS) and (end >= len(sql) or sql[end] not in CHARS):
return True
else:
return False
@once
def install_cacheops_transaction_support():
monkey_mix(Atomic, AtomicMixIn)
monkey_mix(CursorWrapper, CursorWrapperMixin)<|fim▁end|>
| |
<|file_name|>Compression.hpp<|end_file_name|><|fim▁begin|>/**
* @file
* @author Mamadou Babaei <[email protected]>
* @version 0.1.0
*
* @section LICENSE
*
* (The MIT License)
*
* Copyright (c) 2016 - 2021 Mamadou Babaei
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @section DESCRIPTION
*
* Provides zlib, gzip and bzip2 comprission / decompression algorithms.
*/
#ifndef CORELIB_COMPRESSION_HPP
#define CORELIB_COMPRESSION_HPP
#include <string>
#include <vector>
namespace CoreLib {
class Compression;
}
class CoreLib::Compression
{
public:
typedef std::vector<char> Buffer;
public:
enum class Algorithm : unsigned char {
Zlib,<|fim▁hole|>public:
static void Compress(const char *data, const size_t size,
Buffer &out_compressedBuffer,
const Algorithm &algorithm);
static void Compress(const std::string &dataString,
Buffer &out_compressedBuffer,
const Algorithm &algorithm);
static void Compress(const Buffer &dataBuffer,
Buffer &out_compressedBuffer,
const Algorithm &algorithm);
static void Decompress(const Buffer &dataBuffer,
std::string &out_uncompressedString,
const Algorithm &algorithm);
static void Decompress(const Buffer &dataBuffer,
Buffer &out_uncompressedBuffer,
const Algorithm &algorithm);
};
#endif /* CORELIB_COMPRESSION_HPP */<|fim▁end|>
|
Gzip,
Bzip2
};
|
<|file_name|>rest.py<|end_file_name|><|fim▁begin|>from mod_python import apache
from mod_python import util
import os.path
import urllib
import logging
debug = True
def handler(req):
"""
This is called by Apache and maps the request to the resource class.
Process of maping:
1. Try import a python script which handles this resource.
The name will be determined by the *path_info* (see mod_python or apache cgi docs for details). while the last path part is treated as the resource ID.
If no script was found, we return HTTP_NOT_FOUND
2. Check if the request method is in the allowedMethodes list of the imported script.
If not, we set the allowed Methodes and return HTTP_METHOD_NOT_ALLOWED
If the imported script does not define a allowedMethodes list, we return HTTP_NOT_FOUND
assuming this is not a script to call, but some other thing.
3. Parse the form data.
#TODO: add support for JSON and XML. Currently only url-form-data is supported.
4. Call METHOD(req, id, args)
req is the request object,
id is the parsed id or None
args is the mp_table object (may be empty)
returns the return code from the function
if the method is not defined, we return HTTP_NOT_IMPLEMENTED
"""
#Set log level here. For Production, disable both lines
logging.basicConfig(level=logging.DEBUG) #Used for debug, lot of data, not recommended for simple error search.
#logging.basicConfig(level=logging.INFO) #Used for error search with config.
# 1.
try:
(mtype, mid) = req.path_info.lstrip('/').split('/',1)
except ValueError, err:
mtype = req.path_info.lstrip('/')
mid = ''
try:
resourceModule = apache.import_module(mtype.strip('/').replace('/','.'), path=os.path.dirname(__file__))<|fim▁hole|> return apache.HTTP_NOT_FOUND
# 2.
try:
allowedMethodes = resourceModule.allowedMethodes
except AttributeError, err:
if debug: raise
return apache.HTTP_HTTP_NOT_FOUND
if not req.method in allowedMethodes:
req.allow_methods(resourceModule.allowedMethodes, 1)
return apache.HTTP_METHOD_NOT_ALLOWED
# 3.
if not 'form' in dir(req):
req.form = util.FieldStorage(req, True)
# 4.
try:
return getattr(resourceModule, req.method)(req, urllib.unquote(mid))
except AttributeError, err:
if debug: raise
return apache.HTTP_NOT_IMPLEMENTED
def writeError(req, error, message):
"""Send a error page to client. Replaces http error page."""
req.status = apache.HTTP_FORBIDDEN
req.content_type = 'text/plain'
req.write(message)
return apache.OK<|fim▁end|>
|
except Exception, err:
if debug: raise
|
<|file_name|>productSearch.spec.js<|end_file_name|><|fim▁begin|>describe('Component: Product Search', function(){
var scope,
q,
oc,
state,
_ocParameters,
parameters,
mockProductList
;
beforeEach(module(function($provide) {
$provide.value('Parameters', {searchTerm: null, page: null, pageSize: null, sortBy: null});
}));
beforeEach(module('orderCloud'));
beforeEach(module('orderCloud.sdk'));
beforeEach(inject(function($rootScope, $q, OrderCloud, ocParameters, $state, Parameters){
scope = $rootScope.$new();
q = $q;
oc = OrderCloud;
state = $state;
_ocParameters = ocParameters;
parameters = Parameters;
mockProductList = {
Items:['product1', 'product2'],
Meta:{
ItemRange:[1, 3],
TotalCount: 50
}
};
}));
describe('State: productSearchResults', function(){
var state;
beforeEach(inject(function($state){
state = $state.get('productSearchResults');
spyOn(_ocParameters, 'Get');
spyOn(oc.Me, 'ListProducts');
}));
it('should resolve Parameters', inject(function($injector){
$injector.invoke(state.resolve.Parameters);
expect(_ocParameters.Get).toHaveBeenCalled();
}));
it('should resolve ProductList', inject(function($injector){
parameters.filters = {ParentID:'12'};
$injector.invoke(state.resolve.ProductList);<|fim▁hole|> }));
});
describe('Controller: ProductSearchController', function(){
var productSearchCtrl;
beforeEach(inject(function($state, $controller){
var state = $state;
productSearchCtrl = $controller('ProductSearchCtrl', {
$state: state,
ocParameters: _ocParameters,
$scope: scope,
ProductList: mockProductList
});
spyOn(_ocParameters, 'Create');
spyOn(state, 'go');
}));
describe('filter', function(){
it('should reload state and call ocParameters.Create with any parameters', function(){
productSearchCtrl.parameters = {pageSize: 1};
productSearchCtrl.filter(true);
expect(state.go).toHaveBeenCalled();
expect(_ocParameters.Create).toHaveBeenCalledWith({pageSize:1}, true);
});
});
describe('updateSort', function(){
it('should reload page with value and sort order, if both are defined', function(){
productSearchCtrl.updateSort('!ID');
expect(state.go).toHaveBeenCalled();
expect(_ocParameters.Create).toHaveBeenCalledWith({searchTerm: null, page: null, pageSize: null, sortBy: '!ID'}, false);
});
it('should reload page with just value, if no order is defined', function(){
productSearchCtrl.updateSort('ID');
expect(state.go).toHaveBeenCalled();
expect(_ocParameters.Create).toHaveBeenCalledWith({searchTerm: null, page: null, pageSize: null, sortBy: 'ID'}, false);
});
});
describe('updatePageSize', function(){
it('should reload state with the new pageSize', function(){
productSearchCtrl.updatePageSize('25');
expect(state.go).toHaveBeenCalled();
expect(_ocParameters.Create).toHaveBeenCalledWith({searchTerm: null, page: null, pageSize: '25', sortBy: null}, true);
});
});
describe('pageChanged', function(){
it('should reload state with the new page', function(){
productSearchCtrl.pageChanged('newPage');
expect(state.go).toHaveBeenCalled();
expect(_ocParameters.Create).toHaveBeenCalledWith({searchTerm: null, page: 'newPage', pageSize: null, sortBy: null}, false);
});
});
describe('reverseSort', function(){
it('should reload state with a reverse sort call', function(){
productSearchCtrl.parameters.sortBy = 'ID';
productSearchCtrl.reverseSort();
expect(_ocParameters.Create).toHaveBeenCalledWith({searchTerm: null, page: null, pageSize: null, sortBy: '!ID'}, false);
});
});
});
describe('Component Directive: ordercloudProductSearch', function(){
var productSearchComponentCtrl,
timeout
;
beforeEach(inject(function($componentController, $timeout){
timeout = $timeout;
productSearchComponentCtrl = $componentController('ordercloudProductSearch', {
$state:state,
$timeout: timeout,
$scope: scope,
OrderCloud:oc
});
spyOn(state, 'go');
}));
describe('getSearchResults', function(){
beforeEach(function(){
var defer = q.defer();
defer.resolve();
spyOn(oc.Me, 'ListProducts').and.returnValue(defer.promise);
});
it('should call Me.ListProducts with given search term and max products', function(){
productSearchComponentCtrl.searchTerm = 'Product1';
productSearchComponentCtrl.maxProducts = 12;
productSearchComponentCtrl.getSearchResults();
expect(oc.Me.ListProducts).toHaveBeenCalledWith('Product1', 1, 12);
});
it('should default max products to five, if none is provided', function(){
productSearchComponentCtrl.searchTerm = 'Product1';
productSearchComponentCtrl.getSearchResults();
expect(oc.Me.ListProducts).toHaveBeenCalledWith('Product1', 1, 5);
});
});
describe('onSelect', function(){
it('should route user to productDetail state for the selected product id', function(){
productSearchComponentCtrl.onSelect(12);
expect(state.go).toHaveBeenCalledWith('productDetail', {productid:12});
});
});
describe('onHardEnter', function(){
it('should route user to search results page for the provided search term', function(){
productSearchComponentCtrl.onHardEnter('bikes');
expect(state.go).toHaveBeenCalledWith('productSearchResults', {searchTerm: 'bikes'});
});
});
});
});<|fim▁end|>
|
expect(oc.Me.ListProducts).toHaveBeenCalled();
|
<|file_name|>die-macro-pure.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>
fn f() {
panic!("test");
}
fn main() {
f();
}<|fim▁end|>
|
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:test
|
<|file_name|>weather.service.spec.ts<|end_file_name|><|fim▁begin|>import { TestBed, inject } from '@angular/core/testing';
import { WeatherService } from './weather.service';
describe('WeatherService', () => {
beforeEach(() => {
TestBed.configureTestingModule({
providers: [WeatherService]
});
});
<|fim▁hole|> it('should be created', inject([WeatherService], (service: WeatherService) => {
expect(service).toBeTruthy();
}));
});<|fim▁end|>
| |
<|file_name|>twig.core.js<|end_file_name|><|fim▁begin|>// ## twig.core.js
//
// This file handles template level tokenizing, compiling and parsing.
module.exports = function (Twig) {
"use strict";
Twig.trace = false;
Twig.debug = false;
// Default caching to true for the improved performance it offers
Twig.cache = true;
Twig.placeholders = {
parent: "{{|PARENT|}}"
};
/**
* Fallback for Array.indexOf for IE8 et al
*/
Twig.indexOf = function (arr, searchElement /*, fromIndex */ ) {
if (Array.prototype.hasOwnProperty("indexOf")) {
return arr.indexOf(searchElement);
}
if (arr === void 0 || arr === null) {
throw new TypeError();
}
var t = Object(arr);
var len = t.length >>> 0;
if (len === 0) {
return -1;
}
var n = 0;
if (arguments.length > 0) {
n = Number(arguments[1]);
if (n !== n) { // shortcut for verifying if it's NaN
n = 0;
} else if (n !== 0 && n !== Infinity && n !== -Infinity) {
n = (n > 0 || -1) * Math.floor(Math.abs(n));
}
}
if (n >= len) {
// console.log("indexOf not found1 ", JSON.stringify(searchElement), JSON.stringify(arr));
return -1;
}
var k = n >= 0 ? n : Math.max(len - Math.abs(n), 0);
for (; k < len; k++) {
if (k in t && t[k] === searchElement) {
return k;
}
}
if (arr == searchElement) {
return 0;
}
// console.log("indexOf not found2 ", JSON.stringify(searchElement), JSON.stringify(arr));
return -1;
}
Twig.forEach = function (arr, callback, thisArg) {
if (Array.prototype.forEach ) {
return arr.forEach(callback, thisArg);
}
var T, k;
if ( arr == null ) {
throw new TypeError( " this is null or not defined" );
}
// 1. Let O be the result of calling ToObject passing the |this| value as the argument.
var O = Object(arr);
// 2. Let lenValue be the result of calling the Get internal method of O with the argument "length".
// 3. Let len be ToUint32(lenValue).
var len = O.length >>> 0; // Hack to convert O.length to a UInt32
// 4. If IsCallable(callback) is false, throw a TypeError exception.
// See: http://es5.github.com/#x9.11
if ( {}.toString.call(callback) != "[object Function]" ) {
throw new TypeError( callback + " is not a function" );
}
// 5. If thisArg was supplied, let T be thisArg; else let T be undefined.
if ( thisArg ) {
T = thisArg;
}
// 6. Let k be 0
k = 0;
// 7. Repeat, while k < len
while( k < len ) {
var kValue;
// a. Let Pk be ToString(k).
// This is implicit for LHS operands of the in operator
// b. Let kPresent be the result of calling the HasProperty internal method of O with argument Pk.
// This step can be combined with c
// c. If kPresent is true, then
if ( k in O ) {
// i. Let kValue be the result of calling the Get internal method of O with argument Pk.
kValue = O[ k ];
// ii. Call the Call internal method of callback with T as the this value and
// argument list containing kValue, k, and O.
callback.call( T, kValue, k, O );
}
// d. Increase k by 1.
k++;
}
// 8. return undefined
};
Twig.merge = function(target, source, onlyChanged) {
Twig.forEach(Object.keys(source), function (key) {
if (onlyChanged && !(key in target)) {
return;
}
target[key] = source[key]
});
return target;
};
/**
* Exception thrown by twig.js.
*/
Twig.Error = function(message) {
this.message = message;
this.name = "TwigException";
this.type = "TwigException";
};
/**
* Get the string representation of a Twig error.
*/
Twig.Error.prototype.toString = function() {
var output = this.name + ": " + this.message;
return output;
};
/**
* Wrapper for logging to the console.
*/
Twig.log = {
trace: function() {if (Twig.trace && console) {console.log(Array.prototype.slice.call(arguments));}},
debug: function() {if (Twig.debug && console) {console.log(Array.prototype.slice.call(arguments));}}
};
if (typeof console !== "undefined") {
if (typeof console.error !== "undefined") {
Twig.log.error = function() {
console.error.apply(console, arguments);
}
} else if (typeof console.log !== "undefined") {
Twig.log.error = function() {
console.log.apply(console, arguments);
}
}
} else {
Twig.log.error = function(){};
}
/**
* Wrapper for child context objects in Twig.
*
* @param {Object} context Values to initialize the context with.
*/
Twig.ChildContext = function(context) {
var ChildContext = function ChildContext() {};
ChildContext.prototype = context;
return new ChildContext();
};
/**
* Container for methods related to handling high level template tokens
* (for example: {{ expression }}, {% logic %}, {# comment #}, raw data)
*/
Twig.token = {};
/**
* Token types.
*/
Twig.token.type = {
output: 'output',
logic: 'logic',
comment: 'comment',
raw: 'raw',
output_whitespace_pre: 'output_whitespace_pre',
output_whitespace_post: 'output_whitespace_post',
output_whitespace_both: 'output_whitespace_both',
logic_whitespace_pre: 'logic_whitespace_pre',
logic_whitespace_post: 'logic_whitespace_post',
logic_whitespace_both: 'logic_whitespace_both'
};
/**
* Token syntax definitions.
*/
Twig.token.definitions = [
{
type: Twig.token.type.raw,
open: '{% raw %}',
close: '{% endraw %}'
},
{
type: Twig.token.type.raw,
open: '{% verbatim %}',
close: '{% endverbatim %}'
},
// *Whitespace type tokens*
//
// These typically take the form `{{- expression -}}` or `{{- expression }}` or `{{ expression -}}`.
{
type: Twig.token.type.output_whitespace_pre,
open: '{{-',
close: '}}'
},
{
type: Twig.token.type.output_whitespace_post,
open: '{{',
close: '-}}'
},
{
type: Twig.token.type.output_whitespace_both,
open: '{{-',
close: '-}}'
},
{
type: Twig.token.type.logic_whitespace_pre,
open: '{%-',
close: '%}'
},
{
type: Twig.token.type.logic_whitespace_post,
open: '{%',
close: '-%}'
},
{
type: Twig.token.type.logic_whitespace_both,
open: '{%-',
close: '-%}'
},
// *Output type tokens*
//
// These typically take the form `{{ expression }}`.
{
type: Twig.token.type.output,
open: '{{',
close: '}}'
},
// *Logic type tokens*
//
// These typically take a form like `{% if expression %}` or `{% endif %}`
{
type: Twig.token.type.logic,
open: '{%',
close: '%}'
},
// *Comment type tokens*
//
// These take the form `{# anything #}`
{
type: Twig.token.type.comment,
open: '{#',
close: '#}'
}
];
/**
* What characters start "strings" in token definitions. We need this to ignore token close
* strings inside an expression.
*/
Twig.token.strings = ['"', "'"];
Twig.token.findStart = function (template) {
var output = {
position: null,
close_position: null,
def: null
},
i,
token_template,
first_key_position,
close_key_position;
for (i=0;i<Twig.token.definitions.length;i++) {
token_template = Twig.token.definitions[i];
first_key_position = template.indexOf(token_template.open);
close_key_position = template.indexOf(token_template.close);
Twig.log.trace("Twig.token.findStart: ", "Searching for ", token_template.open, " found at ", first_key_position);
//Special handling for mismatched tokens
if (first_key_position >= 0) {
//This token matches the template
if (token_template.open.length !== token_template.close.length) {
//This token has mismatched closing and opening tags
if (close_key_position < 0) {
//This token's closing tag does not match the template
continue;
}
}
}
// Does this token occur before any other types?
if (first_key_position >= 0 && (output.position === null || first_key_position < output.position)) {
output.position = first_key_position;
output.def = token_template;
output.close_position = close_key_position;
} else if (first_key_position >= 0 && output.position !== null && first_key_position === output.position) {
/*This token exactly matches another token,
greedily match to check if this token has a greater specificity*/
if (token_template.open.length > output.def.open.length) {
//This token's opening tag is more specific than the previous match
output.position = first_key_position;
output.def = token_template;
output.close_position = close_key_position;
} else if (token_template.open.length === output.def.open.length) {
if (token_template.close.length > output.def.close.length) {
//This token's opening tag is as specific as the previous match,
//but the closing tag has greater specificity
if (close_key_position >= 0 && close_key_position < output.close_position) {
//This token's closing tag exists in the template,
//and it occurs sooner than the previous match
output.position = first_key_position;
output.def = token_template;
output.close_position = close_key_position;
}
} else if (close_key_position >= 0 && close_key_position < output.close_position) {
//This token's closing tag is not more specific than the previous match,
//but it occurs sooner than the previous match
output.position = first_key_position;
output.def = token_template;
output.close_position = close_key_position;
}
}
}
}
delete output['close_position'];
return output;
};
Twig.token.findEnd = function (template, token_def, start) {
var end = null,
found = false,
offset = 0,
// String position variables
str_pos = null,
str_found = null,
pos = null,
end_offset = null,
this_str_pos = null,
end_str_pos = null,
// For loop variables
i,
l;
while (!found) {
str_pos = null;
str_found = null;
pos = template.indexOf(token_def.close, offset);
if (pos >= 0) {
end = pos;
found = true;
} else {
// throw an exception
throw new Twig.Error("Unable to find closing bracket '" + token_def.close +
"'" + " opened near template position " + start);
}
// Ignore quotes within comments; just look for the next comment close sequence,
// regardless of what comes before it. https://github.com/justjohn/twig.js/issues/95
if (token_def.type === Twig.token.type.comment) {
break;
}
// Ignore quotes within raw tag
// Fixes #283
if (token_def.type === Twig.token.type.raw) {
break;
}
l = Twig.token.strings.length;
for (i = 0; i < l; i += 1) {
this_str_pos = template.indexOf(Twig.token.strings[i], offset);
if (this_str_pos > 0 && this_str_pos < pos &&
(str_pos === null || this_str_pos < str_pos)) {
str_pos = this_str_pos;
str_found = Twig.token.strings[i];
}
}
// We found a string before the end of the token, now find the string's end and set the search offset to it
if (str_pos !== null) {
end_offset = str_pos + 1;
end = null;
found = false;
while (true) {
end_str_pos = template.indexOf(str_found, end_offset);
if (end_str_pos < 0) {
throw "Unclosed string in template";
}
// Ignore escaped quotes
if (template.substr(end_str_pos - 1, 1) !== "\\") {
offset = end_str_pos + 1;
break;
} else {
end_offset = end_str_pos + 1;
}
}
}
}
return end;
};
/**
* Convert a template into high-level tokens.
*/
Twig.tokenize = function (template) {
var tokens = [],
// An offset for reporting errors locations in the template.
error_offset = 0,
// The start and type of the first token found in the template.
found_token = null,
// The end position of the matched token.
end = null;
while (template.length > 0) {
// Find the first occurance of any token type in the template
found_token = Twig.token.findStart(template);
Twig.log.trace("Twig.tokenize: ", "Found token: ", found_token);
if (found_token.position !== null) {
// Add a raw type token for anything before the start of the token
if (found_token.position > 0) {
tokens.push({
type: Twig.token.type.raw,
value: template.substring(0, found_token.position)
});
}
template = template.substr(found_token.position + found_token.def.open.length);
error_offset += found_token.position + found_token.def.open.length;
// Find the end of the token
end = Twig.token.findEnd(template, found_token.def, error_offset);
Twig.log.trace("Twig.tokenize: ", "Token ends at ", end);
tokens.push({
type: found_token.def.type,
value: template.substring(0, end).trim()
});
if (template.substr( end + found_token.def.close.length, 1 ) === "\n") {
switch (found_token.def.type) {
case "logic_whitespace_pre":
case "logic_whitespace_post":
case "logic_whitespace_both":
case "logic":
// Newlines directly after logic tokens are ignored
end += 1;
break;
}
}
template = template.substr(end + found_token.def.close.length);
// Increment the position in the template
error_offset += end + found_token.def.close.length;
} else {
// No more tokens -> add the rest of the template as a raw-type token
tokens.push({
type: Twig.token.type.raw,
value: template
});
template = '';
}
}
return tokens;
};
Twig.compile = function (tokens) {
try {
// Output and intermediate stacks
var output = [],
stack = [],
// The tokens between open and close tags
intermediate_output = [],
token = null,
logic_token = null,
unclosed_token = null,
// Temporary previous token.
prev_token = null,
// Temporary previous output.
prev_output = null,
// Temporary previous intermediate output.
prev_intermediate_output = null,
// The previous token's template
prev_template = null,
// Token lookahead
next_token = null,
// The output token
tok_output = null,
// Logic Token values
type = null,
open = null,
next = null;
var compile_output = function(token) {
Twig.expression.compile.apply(this, [token]);
if (stack.length > 0) {
intermediate_output.push(token);
} else {
output.push(token);
}
};
var compile_logic = function(token) {
// Compile the logic token
logic_token = Twig.logic.compile.apply(this, [token]);
type = logic_token.type;
open = Twig.logic.handler[type].open;
next = Twig.logic.handler[type].next;
Twig.log.trace("Twig.compile: ", "Compiled logic token to ", logic_token,
" next is: ", next, " open is : ", open);
// Not a standalone token, check logic stack to see if this is expected
if (open !== undefined && !open) {
prev_token = stack.pop();
prev_template = Twig.logic.handler[prev_token.type];
if (Twig.indexOf(prev_template.next, type) < 0) {
throw new Error(type + " not expected after a " + prev_token.type);
}
prev_token.output = prev_token.output || [];
prev_token.output = prev_token.output.concat(intermediate_output);
intermediate_output = [];
tok_output = {
type: Twig.token.type.logic,
token: prev_token
};
if (stack.length > 0) {
intermediate_output.push(tok_output);
} else {
output.push(tok_output);
}
}
// This token requires additional tokens to complete the logic structure.
if (next !== undefined && next.length > 0) {
Twig.log.trace("Twig.compile: ", "Pushing ", logic_token, " to logic stack.");
if (stack.length > 0) {
// Put any currently held output into the output list of the logic operator
// currently at the head of the stack before we push a new one on.
prev_token = stack.pop();
prev_token.output = prev_token.output || [];
prev_token.output = prev_token.output.concat(intermediate_output);
stack.push(prev_token);
intermediate_output = [];
}
// Push the new logic token onto the logic stack
stack.push(logic_token);
} else if (open !== undefined && open) {
tok_output = {
type: Twig.token.type.logic,
token: logic_token
};
// Standalone token (like {% set ... %}
if (stack.length > 0) {
intermediate_output.push(tok_output);
} else {
output.push(tok_output);
}
}
};
while (tokens.length > 0) {
token = tokens.shift();
prev_output = output[output.length - 1];
prev_intermediate_output = intermediate_output[intermediate_output.length - 1];
next_token = tokens[0];
Twig.log.trace("Compiling token ", token);
switch (token.type) {
case Twig.token.type.raw:
if (stack.length > 0) {
intermediate_output.push(token);
} else {
output.push(token);
}
break;
case Twig.token.type.logic:
compile_logic.call(this, token);
break;
// Do nothing, comments should be ignored
case Twig.token.type.comment:
break;
case Twig.token.type.output:
compile_output.call(this, token);
break;
//Kill whitespace ahead and behind this token
case Twig.token.type.logic_whitespace_pre:
case Twig.token.type.logic_whitespace_post:
case Twig.token.type.logic_whitespace_both:
case Twig.token.type.output_whitespace_pre:
case Twig.token.type.output_whitespace_post:
case Twig.token.type.output_whitespace_both:
if (token.type !== Twig.token.type.output_whitespace_post && token.type !== Twig.token.type.logic_whitespace_post) {
if (prev_output) {
//If the previous output is raw, pop it off
if (prev_output.type === Twig.token.type.raw) {
output.pop();
//If the previous output is not just whitespace, trim it
if (prev_output.value.match(/^\s*$/) === null) {
prev_output.value = prev_output.value.trim();
//Repush the previous output
output.push(prev_output);
}
}
}
if (prev_intermediate_output) {
//If the previous intermediate output is raw, pop it off
if (prev_intermediate_output.type === Twig.token.type.raw) {
intermediate_output.pop();
//If the previous output is not just whitespace, trim it
if (prev_intermediate_output.value.match(/^\s*$/) === null) {
prev_intermediate_output.value = prev_intermediate_output.value.trim();
//Repush the previous intermediate output
intermediate_output.push(prev_intermediate_output);<|fim▁hole|> }
//Compile this token
switch (token.type) {
case Twig.token.type.output_whitespace_pre:
case Twig.token.type.output_whitespace_post:
case Twig.token.type.output_whitespace_both:
compile_output.call(this, token);
break;
case Twig.token.type.logic_whitespace_pre:
case Twig.token.type.logic_whitespace_post:
case Twig.token.type.logic_whitespace_both:
compile_logic.call(this, token);
break;
}
if (token.type !== Twig.token.type.output_whitespace_pre && token.type !== Twig.token.type.logic_whitespace_pre) {
if (next_token) {
//If the next token is raw, shift it out
if (next_token.type === Twig.token.type.raw) {
tokens.shift();
//If the next token is not just whitespace, trim it
if (next_token.value.match(/^\s*$/) === null) {
next_token.value = next_token.value.trim();
//Unshift the next token
tokens.unshift(next_token);
}
}
}
}
break;
}
Twig.log.trace("Twig.compile: ", " Output: ", output,
" Logic Stack: ", stack,
" Pending Output: ", intermediate_output );
}
// Verify that there are no logic tokens left in the stack.
if (stack.length > 0) {
unclosed_token = stack.pop();
throw new Error("Unable to find an end tag for " + unclosed_token.type +
", expecting one of " + unclosed_token.next);
}
return output;
} catch (ex) {
Twig.log.error("Error compiling twig template " + this.id + ": ");
if (ex.stack) {
Twig.log.error(ex.stack);
} else {
Twig.log.error(ex.toString());
}
if (this.options.rethrow) throw ex;
}
};
/**
* Parse a compiled template.
*
* @param {Array} tokens The compiled tokens.
* @param {Object} context The render context.
*
* @return {string} The parsed template.
*/
Twig.parse = function (tokens, context) {
try {
var output = [],
// Track logic chains
chain = true,
that = this;
Twig.forEach(tokens, function parseToken(token) {
Twig.log.debug("Twig.parse: ", "Parsing token: ", token);
switch (token.type) {
case Twig.token.type.raw:
output.push(Twig.filters.raw(token.value));
break;
case Twig.token.type.logic:
var logic_token = token.token,
logic = Twig.logic.parse.apply(that, [logic_token, context, chain]);
if (logic.chain !== undefined) {
chain = logic.chain;
}
if (logic.context !== undefined) {
context = logic.context;
}
if (logic.output !== undefined) {
output.push(logic.output);
}
break;
case Twig.token.type.comment:
// Do nothing, comments should be ignored
break;
//Fall through whitespace to output
case Twig.token.type.output_whitespace_pre:
case Twig.token.type.output_whitespace_post:
case Twig.token.type.output_whitespace_both:
case Twig.token.type.output:
Twig.log.debug("Twig.parse: ", "Output token: ", token.stack);
// Parse the given expression in the given context
output.push(Twig.expression.parse.apply(that, [token.stack, context]));
break;
}
});
return Twig.output.apply(this, [output]);
} catch (ex) {
Twig.log.error("Error parsing twig template " + this.id + ": ");
if (ex.stack) {
Twig.log.error(ex.stack);
} else {
Twig.log.error(ex.toString());
}
if (this.options.rethrow) throw ex;
if (Twig.debug) {
return ex.toString();
}
}
};
/**
* Tokenize and compile a string template.
*
* @param {string} data The template.
*
* @return {Array} The compiled tokens.
*/
Twig.prepare = function(data) {
var tokens, raw_tokens;
// Tokenize
Twig.log.debug("Twig.prepare: ", "Tokenizing ", data);
raw_tokens = Twig.tokenize.apply(this, [data]);
// Compile
Twig.log.debug("Twig.prepare: ", "Compiling ", raw_tokens);
tokens = Twig.compile.apply(this, [raw_tokens]);
Twig.log.debug("Twig.prepare: ", "Compiled ", tokens);
return tokens;
};
/**
* Join the output token's stack and escape it if needed
*
* @param {Array} Output token's stack
*
* @return {string|String} Autoescaped output
*/
Twig.output = function(output) {
if (!this.options.autoescape) {
return output.join("");
}
var strategy = 'html';
if(typeof this.options.autoescape == 'string')
strategy = this.options.autoescape;
// [].map would be better but it's not supported by IE8-
var escaped_output = [];
Twig.forEach(output, function (str) {
if (str && (str.twig_markup !== true && str.twig_markup != strategy)) {
str = Twig.filters.escape(str, [ strategy ]);
}
escaped_output.push(str);
});
return Twig.Markup(escaped_output.join(""));
}
// Namespace for template storage and retrieval
Twig.Templates = {
/**
* Registered template loaders - use Twig.Templates.registerLoader to add supported loaders
* @type {Object}
*/
loaders: {},
/**
* Registered template parsers - use Twig.Templates.registerParser to add supported parsers
* @type {Object}
*/
parsers: {},
/**
* Cached / loaded templates
* @type {Object}
*/
registry: {}
};
/**
* Is this id valid for a twig template?
*
* @param {string} id The ID to check.
*
* @throws {Twig.Error} If the ID is invalid or used.
* @return {boolean} True if the ID is valid.
*/
Twig.validateId = function(id) {
if (id === "prototype") {
throw new Twig.Error(id + " is not a valid twig identifier");
} else if (Twig.cache && Twig.Templates.registry.hasOwnProperty(id)) {
throw new Twig.Error("There is already a template with the ID " + id);
}
return true;
}
/**
* Register a template loader
*
* @example
* Twig.extend(function(Twig) {
* Twig.Templates.registerLoader('custom_loader', function(location, params, callback, error_callback) {
* // ... load the template ...
* params.data = loadedTemplateData;
* // create and return the template
* var template = new Twig.Template(params);
* if (typeof callback === 'function') {
* callback(template);
* }
* return template;
* });
* });
*
* @param {String} method_name The method this loader is intended for (ajax, fs)
* @param {Function} func The function to execute when loading the template
* @param {Object|undefined} scope Optional scope parameter to bind func to
*
* @throws Twig.Error
*
* @return {void}
*/
Twig.Templates.registerLoader = function(method_name, func, scope) {
if (typeof func !== 'function') {
throw new Twig.Error('Unable to add loader for ' + method_name + ': Invalid function reference given.');
}
if (scope) {
func = func.bind(scope);
}
this.loaders[method_name] = func;
};
/**
* Remove a registered loader
*
* @param {String} method_name The method name for the loader you wish to remove
*
* @return {void}
*/
Twig.Templates.unRegisterLoader = function(method_name) {
if (this.isRegisteredLoader(method_name)) {
delete this.loaders[method_name];
}
};
/**
* See if a loader is registered by its method name
*
* @param {String} method_name The name of the loader you are looking for
*
* @return {boolean}
*/
Twig.Templates.isRegisteredLoader = function(method_name) {
return this.loaders.hasOwnProperty(method_name);
};
/**
* Register a template parser
*
* @example
* Twig.extend(function(Twig) {
* Twig.Templates.registerParser('custom_parser', function(params) {
* // this template source can be accessed in params.data
* var template = params.data
*
* // ... custom process that modifies the template
*
* // return the parsed template
* return template;
* });
* });
*
* @param {String} method_name The method this parser is intended for (twig, source)
* @param {Function} func The function to execute when parsing the template
* @param {Object|undefined} scope Optional scope parameter to bind func to
*
* @throws Twig.Error
*
* @return {void}
*/
Twig.Templates.registerParser = function(method_name, func, scope) {
if (typeof func !== 'function') {
throw new Twig.Error('Unable to add parser for ' + method_name + ': Invalid function regerence given.');
}
if (scope) {
func = func.bind(scope);
}
this.parsers[method_name] = func;
};
/**
* Remove a registered parser
*
* @param {String} method_name The method name for the parser you wish to remove
*
* @return {void}
*/
Twig.Templates.unRegisterParser = function(method_name) {
if (this.isRegisteredParser(method_name)) {
delete this.parsers[method_name];
}
};
/**
* See if a parser is registered by its method name
*
* @param {String} method_name The name of the parser you are looking for
*
* @return {boolean}
*/
Twig.Templates.isRegisteredParser = function(method_name) {
return this.parsers.hasOwnProperty(method_name);
};
/**
* Save a template object to the store.
*
* @param {Twig.Template} template The twig.js template to store.
*/
Twig.Templates.save = function(template) {
if (template.id === undefined) {
throw new Twig.Error("Unable to save template with no id");
}
Twig.Templates.registry[template.id] = template;
};
/**
* Load a previously saved template from the store.
*
* @param {string} id The ID of the template to load.
*
* @return {Twig.Template} A twig.js template stored with the provided ID.
*/
Twig.Templates.load = function(id) {
if (!Twig.Templates.registry.hasOwnProperty(id)) {
return null;
}
return Twig.Templates.registry[id];
};
/**
* Load a template from a remote location using AJAX and saves in with the given ID.
*
* Available parameters:
*
* async: Should the HTTP request be performed asynchronously.
* Defaults to true.
* method: What method should be used to load the template
* (fs or ajax)
* parser: What method should be used to parse the template
* (twig or source)
* precompiled: Has the template already been compiled.
*
* @param {string} location The remote URL to load as a template.
* @param {Object} params The template parameters.
* @param {function} callback A callback triggered when the template finishes loading.
* @param {function} error_callback A callback triggered if an error occurs loading the template.
*
*
*/
Twig.Templates.loadRemote = function(location, params, callback, error_callback) {
var loader;
// Default to async
if (params.async === undefined) {
params.async = true;
}
// Default to the URL so the template is cached.
if (params.id === undefined) {
params.id = location;
}
// Check for existing template
if (Twig.cache && Twig.Templates.registry.hasOwnProperty(params.id)) {
// A template is already saved with the given id.
if (typeof callback === 'function') {
callback(Twig.Templates.registry[params.id]);
}
// TODO: if async, return deferred promise
return Twig.Templates.registry[params.id];
}
//if the parser name hasn't been set, default it to twig
params.parser = params.parser || 'twig';
// Assume 'fs' if the loader is not defined
loader = this.loaders[params.method] || this.loaders.fs;
return loader.apply(this, arguments);
};
// Determine object type
function is(type, obj) {
var clas = Object.prototype.toString.call(obj).slice(8, -1);
return obj !== undefined && obj !== null && clas === type;
}
/**
* Create a new twig.js template.
*
* Parameters: {
* data: The template, either pre-compiled tokens or a string template
* id: The name of this template
* blocks: Any pre-existing block from a child template
* }
*
* @param {Object} params The template parameters.
*/
Twig.Template = function ( params ) {
var data = params.data,
id = params.id,
blocks = params.blocks,
macros = params.macros || {},
base = params.base,
path = params.path,
url = params.url,
name = params.name,
method = params.method,
// parser options
options = params.options;
// # What is stored in a Twig.Template
//
// The Twig Template hold several chucks of data.
//
// {
// id: The token ID (if any)
// tokens: The list of tokens that makes up this template.
// blocks: The list of block this template contains.
// base: The base template (if any)
// options: {
// Compiler/parser options
//
// strict_variables: true/false
// Should missing variable/keys emit an error message. If false, they default to null.
// }
// }
//
this.id = id;
this.method = method;
this.base = base;
this.path = path;
this.url = url;
this.name = name;
this.macros = macros;
this.options = options;
this.reset(blocks);
if (is('String', data)) {
this.tokens = Twig.prepare.apply(this, [data]);
} else {
this.tokens = data;
}
if (id !== undefined) {
Twig.Templates.save(this);
}
};
Twig.Template.prototype.reset = function(blocks) {
Twig.log.debug("Twig.Template.reset", "Reseting template " + this.id);
this.blocks = {};
this.importedBlocks = [];
this.originalBlockTokens = {};
this.child = {
blocks: blocks || {}
};
this.extend = null;
};
Twig.Template.prototype.render = function (context, params) {
params = params || {};
var output,
url;
this.context = context || {};
// Clear any previous state
this.reset();
if (params.blocks) {
this.blocks = params.blocks;
}
if (params.macros) {
this.macros = params.macros;
}
output = Twig.parse.apply(this, [this.tokens, this.context]);
// Does this template extend another
if (this.extend) {
var ext_template;
// check if the template is provided inline
if ( this.options.allowInlineIncludes ) {
ext_template = Twig.Templates.load(this.extend);
if ( ext_template ) {
ext_template.options = this.options;
}
}
// check for the template file via include
if (!ext_template) {
url = Twig.path.parsePath(this, this.extend);
ext_template = Twig.Templates.loadRemote(url, {
method: this.getLoaderMethod(),
base: this.base,
async: false,
id: url,
options: this.options
});
}
this.parent = ext_template;
return this.parent.render(this.context, {
blocks: this.blocks
});
}
if (params.output == 'blocks') {
return this.blocks;
} else if (params.output == 'macros') {
return this.macros;
} else {
return output;
}
};
Twig.Template.prototype.importFile = function(file) {
var url, sub_template;
if (!this.url && this.options.allowInlineIncludes) {
file = this.path ? this.path + '/' + file : file;
sub_template = Twig.Templates.load(file);
if (!sub_template) {
sub_template = Twig.Templates.loadRemote(url, {
id: file,
method: this.getLoaderMethod(),
async: false,
options: this.options
});
if (!sub_template) {
throw new Twig.Error("Unable to find the template " + file);
}
}
sub_template.options = this.options;
return sub_template;
}
url = Twig.path.parsePath(this, file);
// Load blocks from an external file
sub_template = Twig.Templates.loadRemote(url, {
method: this.getLoaderMethod(),
base: this.base,
async: false,
options: this.options,
id: url
});
return sub_template;
};
Twig.Template.prototype.importBlocks = function(file, override) {
var sub_template = this.importFile(file),
context = this.context,
that = this,
key;
override = override || false;
sub_template.render(context);
// Mixin blocks
Twig.forEach(Object.keys(sub_template.blocks), function(key) {
if (override || that.blocks[key] === undefined) {
that.blocks[key] = sub_template.blocks[key];
that.importedBlocks.push(key);
}
});
};
Twig.Template.prototype.importMacros = function(file) {
var url = Twig.path.parsePath(this, file);
// load remote template
var remoteTemplate = Twig.Templates.loadRemote(url, {
method: this.getLoaderMethod(),
async: false,
id: url
});
return remoteTemplate;
};
Twig.Template.prototype.getLoaderMethod = function() {
if (this.path) {
return 'fs';
}
if (this.url) {
return 'ajax';
}
return this.method || 'fs';
};
Twig.Template.prototype.compile = function(options) {
// compile the template into raw JS
return Twig.compiler.compile(this, options);
};
/**
* Create safe output
*
* @param {string} Content safe to output
*
* @return {String} Content wrapped into a String
*/
Twig.Markup = function(content, strategy) {
if(typeof strategy == 'undefined') {
strategy = true;
}
if (typeof content === 'string' && content.length > 0) {
content = new String(content);
content.twig_markup = strategy;
}
return content;
};
return Twig;
};<|fim▁end|>
|
}
}
}
|
<|file_name|>PhotoViewController.java<|end_file_name|><|fim▁begin|>/**
* University of Campinas - Brazil
* Institute of Computing
* SED group
*
* date: February 2009
*
*/
package br.unicamp.ic.sed.mobilemedia.copyphoto.impl;
<|fim▁hole|>import javax.microedition.lcdui.Command;
import javax.microedition.lcdui.Display;
import javax.microedition.lcdui.Displayable;
import javax.microedition.midlet.MIDlet;
import br.unicamp.ic.sed.mobilemedia.copyphoto.spec.prov.IManager;
import br.unicamp.ic.sed.mobilemedia.copyphoto.spec.req.IFilesystem;
import br.unicamp.ic.sed.mobilemedia.main.spec.dt.IImageData;
import br.unicamp.ic.sed.mobilemedia.photo.spec.prov.IPhoto;
/**
* TODO This whole class must be aspectized
*/
class PhotoViewController extends AbstractController {
private AddPhotoToAlbum addPhotoToAlbum;
private static final Command backCommand = new Command("Back", Command.BACK, 0);
private Displayable lastScreen = null;
private void setAddPhotoToAlbum(AddPhotoToAlbum addPhotoToAlbum) {
this.addPhotoToAlbum = addPhotoToAlbum;
}
String imageName = "";
public PhotoViewController(MIDlet midlet, String imageName) {
super( midlet );
this.imageName = imageName;
}
private AddPhotoToAlbum getAddPhotoToAlbum() {
if( this.addPhotoToAlbum == null)
this.addPhotoToAlbum = new AddPhotoToAlbum("Copy Photo to Album");
return addPhotoToAlbum;
}
/* (non-Javadoc)
* @see ubc.midp.mobilephoto.core.ui.controller.ControllerInterface#handleCommand(javax.microedition.lcdui.Command, javax.microedition.lcdui.Displayable)
*/
public boolean handleCommand(Command c) {
String label = c.getLabel();
System.out.println( "<*"+this.getClass().getName()+".handleCommand() *> " + label);
/** Case: Copy photo to a different album */
if (label.equals("Copy")) {
this.initCopyPhotoToAlbum( );
return true;
}
/** Case: Save a copy in a new album */
else if (label.equals("Save Photo")) {
return this.savePhoto();
/* IManager manager = ComponentFactory.createInstance();
IPhoto photo = (IPhoto) manager.getRequiredInterface("IPhoto");
return photo.postCommand( listImagesCommand ); */
}else if( label.equals("Cancel")){
if( lastScreen != null ){
MIDlet midlet = this.getMidlet();
Display.getDisplay( midlet ).setCurrent( lastScreen );
return true;
}
}
return false;
}
private void initCopyPhotoToAlbum() {
String title = new String("Copy Photo to Album");
String labelPhotoPath = new String("Copy to Album:");
AddPhotoToAlbum addPhotoToAlbum = new AddPhotoToAlbum( title );
addPhotoToAlbum.setPhotoName( imageName );
addPhotoToAlbum.setLabelPhotoPath( labelPhotoPath );
this.setAddPhotoToAlbum( addPhotoToAlbum );
//Get all required interfaces for this method
MIDlet midlet = this.getMidlet();
//addPhotoToAlbum.setCommandListener( this );
lastScreen = Display.getDisplay( midlet ).getCurrent();
Display.getDisplay( midlet ).setCurrent( addPhotoToAlbum );
addPhotoToAlbum.setCommandListener(this);
}
private boolean savePhoto() {
System.out.println("[PhotoViewController:savePhoto()]");
IManager manager = ComponentFactory.createInstance();
IImageData imageData = null;
IFilesystem filesystem = (IFilesystem) manager.getRequiredInterface("IFilesystem");
System.out.println("[PhotoViewController:savePhoto()] filesystem="+filesystem);
imageData = filesystem.getImageInfo(imageName);
AddPhotoToAlbum addPhotoToAlbum = this.getAddPhotoToAlbum();
String photoName = addPhotoToAlbum.getPhotoName();
String albumName = addPhotoToAlbum.getPath();
filesystem.addImageData(photoName, imageData, albumName);
if( lastScreen != null ){
MIDlet midlet = this.getMidlet();
Display.getDisplay( midlet ).setCurrent( lastScreen );
}
return true;
}
}<|fim▁end|>
| |
<|file_name|>combinations.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Facilities for creating multiple test combinations.
Here is an example of testing various optimizers in Eager and Graph mode:
class AdditionExample(test.TestCase, parameterized.TestCase):
@combinations.generate(
combinations.combine(mode=["graph", "eager"],
optimizer=[AdamOptimizer(),
GradientDescentOptimizer()]))
def testOptimizer(self, optimizer):
... f(optimizer)...
This will run `testOptimizer` 4 times with the specified optimizers: 2 in
Eager and 2 in Graph mode.
The test will be provided with arguments that match the arguments of combine
by name. It is necessary to request all arguments, except for `mode`, which is
optional.
`combine()` function is available for creating a cross product of various
options. `times()` function exists for creating a product of N `combine()`-ed
results. See below.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import OrderedDict
import sys
import types
import unittest
from absl.testing import parameterized
import six
from tensorflow.contrib.cluster_resolver import TPUClusterResolver
from tensorflow.contrib.distribute.python import mirrored_strategy as mirrored_lib
from tensorflow.contrib.distribute.python import one_device_strategy as one_device_lib
from tensorflow.contrib.distribute.python import tpu_strategy as tpu_lib
from tensorflow.contrib.optimizer_v2 import adagrad as adagrad_v2
from tensorflow.contrib.optimizer_v2 import adam as adam_v2
from tensorflow.contrib.optimizer_v2 import gradient_descent as gradient_descent_v2
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.eager import context
from tensorflow.python.framework import ops
from tensorflow.python.training import adagrad
from tensorflow.python.training import adam
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import rmsprop
from tensorflow.python.util import tf_inspect
GPU_TEST = "test_gpu" in sys.argv[0]
TPU_TEST = "test_tpu" in sys.argv[0]
def generate(combinations):
"""A decorator for generating test cases of a test method or a test class.
Args:
combinations: a list of dictionaries created using combine() and times().
Restrictions:
-- the "mode" argument can be either "eager" or "graph". It's "graph" by
default.
-- arguments of the test method must match by name to get the corresponding
value of the combination. Tests must accept all arguments except the
"mode", "required_tpu" and "required_gpus".
-- "distribution" argument is special and optional. It is meant for passing
instances of DistributionStrategy. Each instance is to be passed as via
`NamedDistribution`. If using "distribution", "required_gpus" and
"required_tpu" should be specified via the NamedDistribution instance,
rather than as separate arguments.
-- "required_tpu" argument is special and optional. If not `None`, then the
test will be skipped if TPUs aren't available.
-- "required_gpus" argument is special and optional. If not `None`, then the
test will be skipped if the specified number of GPUs aren't available.
Returns:
a decorator that will cause the test method or the test class to be run
under the specified conditions.
Raises:
ValueError - if "mode" argument wasn't either "eager" or "graph" or if other
arguments were not accepted by the test method.
"""
def decorator(test_method_or_class):
"""The decorator to be returned."""
# Generate good test names that can be used with --test_filter.
named_combinations = []
for combination in combinations:
# We use OrderedDicts in `combine()` and `times()` to ensure stable
# order of keys in each dictionary.
assert isinstance(combination, OrderedDict)
name = "".join([
"_{}_{}".format(
"".join(filter(str.isalnum, key)),
"".join(filter(str.isalnum, str(value))))
for key, value in combination.items()
])
named_combinations.append(
OrderedDict(
list(combination.items()) + [("testcase_name",
"_test{}".format(name))]))
if isinstance(test_method_or_class, type):
class_object = test_method_or_class
class_object._test_method_ids = test_method_ids = {}
for name, test_method in six.iteritems(class_object.__dict__.copy()):
if (name.startswith(unittest.TestLoader.testMethodPrefix) and
isinstance(test_method, types.FunctionType)):
delattr(class_object, name)
methods = {}
parameterized._update_class_dict_for_param_test_case(
class_object.__name__, methods, test_method_ids, name,
parameterized._ParameterizedTestIter(
_augment_with_special_arguments(test_method),
named_combinations, parameterized._NAMED, name))
for method_name, method in six.iteritems(methods):
setattr(class_object, method_name, method)
return class_object
else:
test_method = _augment_with_special_arguments(test_method_or_class)
return parameterized.named_parameters(*named_combinations)(test_method)
return decorator
def _augment_with_special_arguments(test_method):
def decorated(self, **kwargs):
"""A wrapped test method that treats some arguments in a special way."""
mode = kwargs.pop("mode", "graph")
distribution = kwargs.get("distribution", None)
required_tpu = kwargs.pop("required_tpu", False)
required_gpus = kwargs.pop("required_gpus", None)
if distribution:
assert required_gpus is None, (
"Do not use `required_gpus` and `distribution` together.")
assert required_tpu is False, (
"Do not use `required_tpu` and `distribution` together.")
required_gpus = distribution.required_gpus
required_tpu = distribution.required_tpu
if required_tpu and not TPU_TEST:
self.skipTest("Test requires a TPU, but it's not available.")
if not required_tpu and TPU_TEST:
self.skipTest("Test that doesn't require a TPU.")
if not required_gpus:
if GPU_TEST:
self.skipTest("Test that doesn't require GPUs.")
elif context.num_gpus() < required_gpus:
# TODO(priyag): Consider allowing tests in graph mode using soft
# placement.
self.skipTest(
"{} GPUs are not available for this test. {} GPUs are available".
format(required_gpus, context.num_gpus()))
# At this point, `kwargs` doesn't have `required_gpus` or `required_tpu`
# that the user might have specified. `kwargs` still has `mode`, which
# the test is allowed to accept or ignore.
requested_arguments = tf_inspect.getfullargspec(test_method).args
missing_arguments = set(list(kwargs.keys()) + ["self"]).difference(
set(requested_arguments + ["mode"]))
if missing_arguments:
raise ValueError("The test is missing arguments {} .".format(
missing_arguments))
kwargs_to_pass = {}
for arg in requested_arguments:
if arg == "self":
kwargs_to_pass[arg] = self
else:
kwargs_to_pass[arg] = kwargs[arg]
if mode == "eager":
with context.eager_mode():
if distribution:
kwargs_to_pass["distribution"] = distribution.strategy
test_method(**kwargs_to_pass)
elif mode == "graph":
with ops.Graph().as_default(), context.graph_mode():
if distribution:
kwargs_to_pass["distribution"] = distribution.strategy
test_method(**kwargs_to_pass)
else:
raise ValueError(
"'mode' has to be either 'eager' or 'graph' and not {}".format(
mode))
return decorated
def combine(**kwargs):
"""Generate combinations based on its keyword arguments.
Two sets of returned combinations can be concatenated using +. Their product
can be computed using `times()`.
Args:
**kwargs: keyword arguments of form `option=[possibilities, ...]`
or `option=the_only_possibility`.
Returns:
a list of dictionaries for each combination. Keys in the dictionaries are
the keyword argument names. Each key has one value - one of the
corresponding keyword argument values.
"""
if not kwargs:
return [OrderedDict()]
sort_by_key = lambda k: k[0][0]
kwargs = OrderedDict(sorted(kwargs.items(), key=sort_by_key))
first = list(kwargs.items())[0]
rest = dict(list(kwargs.items())[1:])
rest_combined = combine(**rest)
key = first[0]
values = first[1]
if not isinstance(values, list):
values = [values]
return [
OrderedDict(sorted(list(combined.items()) + [(key, v)], key=sort_by_key))
for v in values
for combined in rest_combined
]
def times(*combined):
"""Generate a product of N sets of combinations.
times(combine(a=[1,2]), combine(b=[3,4])) == combine(a=[1,2], b=[3,4])
Args:
*combined: N lists of dictionaries that specify combinations.
Returns:
a list of dictionaries for each combination.
Raises:
ValueError: if some of the inputs have overlapping keys.
"""
assert combined
if len(combined) == 1:
return combined[0]
first = combined[0]
rest_combined = times(*combined[1:])
combined_results = []
for a in first:
for b in rest_combined:
if set(a.keys()).intersection(set(b.keys())):
raise ValueError("Keys need to not overlap: {} vs {}".format(
a.keys(), b.keys()))
combined_results.append(OrderedDict(list(a.items()) + list(b.items())))
return combined_results
class NamedObject(object):
"""A class that translates an object into a good test name."""
def __init__(self, name, obj):
self._name = name
self._obj = obj
def __getattr__(self, name):
return getattr(self._obj, name)
def __call__(self, *args, **kwargs):
return self._obj(*args, **kwargs)
def __repr__(self):
return self._name
class NamedDistribution(object):
"""Translates DistributionStrategy and its data into a good name."""<|fim▁hole|> self._distribution_fn = distribution_fn
self._name = name
self._required_gpus = required_gpus
self._required_tpu = required_tpu
def __repr__(self):
return self._name
@property
def strategy(self):
return self._distribution_fn()
@property
def required_gpus(self):
return self._required_gpus
@property
def required_tpu(self):
return self._required_tpu
# pylint: disable=g-long-lambda
default_strategy = NamedDistribution(
"Default",
distribution_strategy_context._get_default_distribution_strategy, # pylint: disable=protected-access
required_gpus=None)
one_device_strategy = NamedDistribution(
"OneDeviceCPU", lambda: one_device_lib.OneDeviceStrategy("/cpu:0"),
required_gpus=None)
tpu_strategy = NamedDistribution(
"TPU", lambda: tpu_lib.TPUStrategy(
TPUClusterResolver(""), steps_per_run=2),
required_tpu=True)
tpu_strategy_one_step = NamedDistribution(
"TPUOneStep", lambda: tpu_lib.TPUStrategy(
TPUClusterResolver(""), steps_per_run=1),
required_tpu=True)
mirrored_strategy_with_one_cpu = NamedDistribution(
"Mirrored1CPU",
lambda: mirrored_lib.MirroredStrategy(["/cpu:0"]))
mirrored_strategy_with_one_gpu = NamedDistribution(
"Mirrored1GPU",
lambda: mirrored_lib.MirroredStrategy(["/gpu:0"]),
required_gpus=1)
mirrored_strategy_with_gpu_and_cpu = NamedDistribution(
"MirroredCPUAndGPU",
lambda: mirrored_lib.MirroredStrategy(["/gpu:0", "/cpu:0"]),
required_gpus=1)
mirrored_strategy_with_two_gpus = NamedDistribution(
"Mirrored2GPUs",
lambda: mirrored_lib.MirroredStrategy(["/gpu:0", "/gpu:1"]),
required_gpus=2)
core_mirrored_strategy_with_one_cpu = NamedDistribution(
"CoreMirrored1CPU",
lambda: mirrored_lib.CoreMirroredStrategy(["/cpu:0"]))
core_mirrored_strategy_with_one_gpu = NamedDistribution(
"CoreMirrored1GPU",
lambda: mirrored_lib.CoreMirroredStrategy(["/gpu:0"]),
required_gpus=1)
core_mirrored_strategy_with_gpu_and_cpu = NamedDistribution(
"CoreMirroredCPUAndGPU",
lambda: mirrored_lib.CoreMirroredStrategy(["/gpu:0", "/cpu:0"]),
required_gpus=1)
core_mirrored_strategy_with_two_gpus = NamedDistribution(
"CoreMirrored2GPUs",
lambda: mirrored_lib.CoreMirroredStrategy(["/gpu:0", "/gpu:1"]),
required_gpus=2)
gradient_descent_optimizer_v1_fn = NamedObject(
"GradientDescentV1", lambda: gradient_descent.GradientDescentOptimizer(0.2))
adagrad_optimizer_v1_fn = NamedObject(
"AdagradV1", lambda: adagrad.AdagradOptimizer(0.001))
adam_optimizer_v1_fn = NamedObject("AdamV1",
lambda: adam.AdamOptimizer(0.001, epsilon=1))
rmsprop_optimizer_v1_fn = NamedObject(
"RmsPropV1", lambda: rmsprop.RMSPropOptimizer(0.001))
optimizers_v1 = [gradient_descent_optimizer_v1_fn, adagrad_optimizer_v1_fn]
gradient_descent_optimizer_v2_fn = NamedObject(
"GradientDescentV2",
lambda: gradient_descent_v2.GradientDescentOptimizer(0.2))
adagrad_optimizer_v2_fn = NamedObject(
"AdagradV2", lambda: adagrad_v2.AdagradOptimizer(0.001))
adam_optimizer_v2_fn = NamedObject(
"AdamV2", lambda: adam_v2.AdamOptimizer(0.001, epsilon=1))
optimizers_v2 = [gradient_descent_optimizer_v2_fn, adagrad_optimizer_v2_fn]
graph_and_eager_modes = ["graph", "eager"]
def distributions_and_v1_optimizers():
"""A common set of combination with DistributionStrategies and Optimizers."""
return combine(
distribution=[
one_device_strategy,
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
core_mirrored_strategy_with_gpu_and_cpu,
core_mirrored_strategy_with_two_gpus,
],
optimizer_fn=optimizers_v1)
def distributions_and_v2_optimizers():
"""DistributionStrategies and V2 Optimizers."""
return combine(
distribution=[
one_device_strategy,
mirrored_strategy_with_gpu_and_cpu,
mirrored_strategy_with_two_gpus,
core_mirrored_strategy_with_gpu_and_cpu,
core_mirrored_strategy_with_two_gpus,
],
optimizer_fn=optimizers_v2)<|fim▁end|>
|
def __init__(self, name, distribution_fn, required_gpus=None,
required_tpu=False):
|
<|file_name|>directx.rs<|end_file_name|><|fim▁begin|>use ::{
ovrResult,
ovrSession,
ovrTextureSwapChainDesc,
ovrTextureSwapChain,
ovrMirrorTexture,
ovrMirrorTextureDesc
};
use ::libc::{
c_int,
c_void,
};
use ::winapi::guiddef::IID;
use ::winapi::unknwnbase::IUnknown;
//-----------------------------------------------------------------------------------
// ***** Direct3D Specific
extern "C" {
/// Create Texture Swap Chain suitable for use with Direct3D 11 and 12.
///
/// **in** `session` Specifies an `ovrSession` previously returned by `ovr_Create`.
///
/// **in** `d3dPtr` Specifies the application's `D3D11Device` to create resources with or the `D3D12CommandQueue`
/// which must be the same one the application renders to the eye textures with.
///
/// **in** `desc` Specifies requested texture properties. See notes for more info about texture format.
///
/// **in** `bindFlags` Specifies what `ovrTextureBindFlags` the application requires for this texture chain.
///
/// **out** `out_TextureSwapChain` Returns the created `ovrTextureSwapChain`, which will be valid upon a successful return value, else it will be NULL.
/// This texture chain must be eventually destroyed via `ovr_DestroyTextureSwapChain` before destroying the session with `ovr_Destroy`.
///
/// Returns an `ovrResult` indicating success or failure. In the case of failure, use
/// `ovr_GetLastErrorInfo` to get more information.
///
/// **Note**: The texture format provided in the desc should be thought of as the format the distortion-compositor will use for the
/// `ShaderResourceView` when reading the contents of the texture. To that end, it is highly recommended that the application
/// requests texture swapchain formats that are in sRGB-space (e.g. `OVR_FORMAT_R8G8B8A8_UNORM_SRGB`) as the compositor
/// does sRGB-correct rendering. As such, the compositor relies on the GPU's hardware sampler to do the sRGB-to-linear
/// conversion. If the application still prefers to render to a linear format (e.g. `OVR_FORMAT_R8G8B8A8_UNORM`) while handling the
/// linear-to-gamma conversion via HLSL code, then the application must still request the corresponding sRGB format and also use
/// the `ovrTextureMisc_DX_Typeless` flag in the `ovrTextureSwapChainDesc`'s Flag field. This will allow the application to create
/// a RenderTargetView that is the desired linear format while the compositor continues to treat it as sRGB. Failure to do so
/// will cause the compositor to apply unexpected gamma conversions leading to gamma-curve artifacts. The `ovrTextureMisc_DX_Typeless`
/// flag for depth buffer formats (e.g. `OVR_FORMAT_D32_FLOAT`) is ignored as they are always converted to be typeless.
///
/// see [`ovr_GetTextureSwapChainLength`](../fn.ovr_GetTextureSwapChainLength.html), [`ovr_GetTextureSwapChainCurrentIndex`](../fn.ovr_GetTextureSwapChainCurrentIndex.html), [`ovr_GetTextureSwapChainDesc`](../fn.ovr_GetTextureSwapChainDesc.html), [`ovr_GetTextureSwapChainBufferDX`](fn.ovr_GetTextureSwapChainBufferDX.html), [`ovr_DestroyTextureSwapChain`](../fn.ovr_DestroyTextureSwapChain.html)
///
pub fn ovr_CreateTextureSwapChainDX(session: ovrSession, d3dPtr: * mut IUnknown, desc: * const ovrTextureSwapChainDesc, out_TextureSwapChain: * mut ovrTextureSwapChain) -> ovrResult;
/// Get a specific buffer within the chain as any compatible COM interface (similar to `QueryInterface`)
///
/// **in** `session` Specifies an `ovrSession` previously returned by `ovr_Create`.
///
/// **in** `chain` Specifies an `ovrTextureSwapChain` previously returned by `ovr_CreateTextureSwapChainDX`
///
/// **in** `index` Specifies the index within the chain to retrieve. Must be between 0 and length (see `ovr_GetTextureSwapChainLength`),
/// or may pass -1 to get the buffer at the `CurrentIndex` location. (Saving a call to `GetTextureSwapChainCurrentIndex`)
///
/// **in** `iid` Specifies the interface ID of the interface pointer to query the buffer for.
///
/// **out** `out_Buffer` Returns the COM interface pointer retrieved.
///
/// Returns an `ovrResult` indicating success or failure. In the case of failure, use
/// `ovr_GetLastErrorInfo` to get more information.
///
/// **Example code, not translated from C**
///
/// ```ignore
/// ovr_GetTextureSwapChainBufferDX(session, chain, 0, IID_ID3D11Texture2D, &d3d11_texture);
/// ovr_GetTextureSwapChainBufferDX(session, chain, 1, IID_PPV_ARGS(&dxgi_resource));
/// ```
///
pub fn ovr_GetTextureSwapChainBufferDX(session: ovrSession, chain: ovrTextureSwapChain, index: c_int, iid: IID, out_Buffer: *mut *mut c_void) -> ovrResult;
/// Create Mirror Texture which is auto-refreshed to mirror Rift contents produced by this application.
///
/// A second call to `ovr_CreateMirrorTextureDX` for a given ovrSession before destroying the first one
/// is not supported and will result in an error return.
///
/// **in** `session` Specifies an `ovrSession` previously returned by `ovr_Create`.
///
/// **in** `d3dPtr` Specifies the application's `D3D11Device` to create resources with or the `D3D12CommandQueue`
/// which must be the same one the application renders to the textures with.
///
/// **in** `desc` Specifies requested texture properties. See notes for more info about texture format.
///
/// **out** `out_MirrorTexture` Returns the created `ovrMirrorTexture`, which will be valid upon a successful return value, else it will be NULL.
/// This texture must be eventually destroyed via `ovr_DestroyMirrorTexture` before destroying the session with `ovr_Destroy`.
///
/// Returns an `ovrResult` indicating success or failure. In the case of failure, use
/// `ovr_GetLastErrorInfo` to get more information.
///
/// **Note**: The texture format provided in the desc should be thought of as the format the compositor will use for the `RenderTargetView` when
/// writing into mirror texture. To that end, it is highly recommended that the application requests a mirror texture format that is
/// in sRGB-space (e.g. `OVR_FORMAT_R8G8B8A8_UNORM_SRGB`) as the compositor does sRGB-correct rendering. If however the application wants
/// to still read the mirror texture as a linear format (e.g. `OVR_FORMAT_R8G8B8A8_UNORM`) and handle the sRGB-to-linear conversion in
/// HLSL code, then it is recommended the application still requests an sRGB format and also use the `ovrTextureMisc_DX_Typeless` flag in the
/// `ovrMirrorTextureDesc`'s Flags field. This will allow the application to bind a `ShaderResourceView` that is a linear format while the
/// compositor continues to treat is as sRGB. Failure to do so will cause the compositor to apply unexpected gamma conversions leading to
/// gamma-curve artifacts.
///<|fim▁hole|> ///
/// **Example code, not translated from C**
///
/// ```ignore
/// ovrMirrorTexture mirrorTexture = nullptr;
/// ovrMirrorTextureDesc mirrorDesc = {};
/// mirrorDesc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
/// mirrorDesc.Width = mirrorWindowWidth;
/// mirrorDesc.Height = mirrorWindowHeight;
/// ovrResult result = ovr_CreateMirrorTextureDX(session, d3d11Device, &mirrorDesc, &mirrorTexture);
/// [...]
/// // Destroy the texture when done with it.
/// ovr_DestroyMirrorTexture(session, mirrorTexture);
/// mirrorTexture = nullptr;
/// ```
///
/// see `ovr_GetMirrorTextureBufferDX`, `ovr_DestroyMirrorTexture`
///
pub fn ovr_CreateMirrorTextureDX(session: ovrSession, d3dPtr: *mut IUnknown, desc: *const ovrMirrorTextureDesc, out_MirrorTexture: *mut ovrMirrorTexture) -> ovrResult;
/// Get the underlying buffer as any compatible COM interface (similar to `QueryInterface`)
///
/// **in** `session` Specifies an `ovrSession` previously returned by `ovr_Create`.
///
/// **in** `mirrorTexture` Specifies an `ovrMirrorTexture` previously returned by `ovr_CreateMirrorTextureDX`
///
/// **in** `iid` Specifies the interface ID of the interface pointer to query the buffer for.
///
/// **out** `out_Buffer` Returns the COM interface pointer retrieved.
///
/// Returns an `ovrResult` indicating success or failure. In the case of failure, use
/// `ovr_GetLastErrorInfo` to get more information.
///
/// **Example code, not translated from C**
///
/// ```ignore
/// ID3D11Texture2D* d3d11Texture = nullptr;
/// ovr_GetMirrorTextureBufferDX(session, mirrorTexture, IID_PPV_ARGS(&d3d11Texture));
/// d3d11DeviceContext->CopyResource(d3d11TextureBackBuffer, d3d11Texture);
/// d3d11Texture->Release();
/// dxgiSwapChain->Present(0, 0);
/// ```
///
pub fn ovr_GetMirrorTextureBufferDX(session: ovrSession, mirrorTexture: ovrMirrorTexture, iid: IID, out_Buffer: *mut *mut c_void) -> ovrResult;
}<|fim▁end|>
| |
<|file_name|>grocery_test.go<|end_file_name|><|fim▁begin|>package qsim
import (
"math"
"math/rand"
"testing"
"time"<|fim▁hole|>// https://godoc.org/github.com/danslimmon/qsim#System
type GrocerySystem struct {
// The list of all queues in the system.
queues []*Queue
// The list of all processors in the system.
processors []*Processor
// The system's arrival process
arrProc ArrProc
// The system's arrival behavior
arrBeh ArrBeh
SumCustomers int
SumTotalTime int
// Holds the list of Jobs that have finished since the last tick. We
// use this to keep track of the total time spent by customers in the
// system.
FinishedJobs []*Job
NumFinishedJobs int
prevClock int
}
// Init runs before the simulation begins, and its job is to set up the
// queues, processors, and behaviors.
func (sys *GrocerySystem) Init() {
var i int
rand.Seed(time.Now().UnixNano())
// Customers arrive at the checkout line an average of every 30 seconds
// and the intervals between their arrivals are exponentially
// distributed.
sys.arrProc = NewPoissonArrProc(30000.0)
// The time taken to check a customer out is normally distributed, with
// a mean of 60 seconds and a standard deviation of 10 seconds.
procTimeGenerator := func(j *Job) int {
return int(rand.NormFloat64()*10000.0 + 60000.0)
}
// There are 3 registers and 3 queues.
sys.queues = make([]*Queue, 3)
sys.processors = make([]*Processor, 3)
for i = 0; i < 3; i++ {
sys.queues[i] = NewQueue()
sys.queues[i].QueueId = i
sys.processors[i] = NewProcessor(procTimeGenerator)
sys.processors[i].ProcessorId = i
sys.processors[i].AfterFinish(func(p *Processor, j *Job) {
sys.FinishedJobs = append(sys.FinishedJobs, j)
})
}
// When customers are ready to check out, they get in the shortest
// queue. Unless there's an empty register, in which case they go
// right ahead and start checking out.
sys.arrBeh = NewShortestQueueArrBeh(sys.queues, sys.processors, sys.arrProc)
// Customers stay in the queue they originally joined, and each queue
// leads to exactly one register.
NewOneToOneFIFODiscipline(sys.queues, sys.processors)
}
// ArrProc returns the system's arrival process.
func (sys *GrocerySystem) ArrProc() ArrProc {
return sys.arrProc
}
// ArrBeh returns the system's arrival behavior.
func (sys *GrocerySystem) ArrBeh() ArrBeh {
return sys.arrBeh
}
func (sys *GrocerySystem) BeforeFirstTick() {}
// BeforeEvents runs at every tick when a simulation event happens (a
// Job arrives in the system, or a Job finishes processing and leaves
// the system). BeforeEvents is called after all the events for the tick
// in question have finished.
//
// In this example, we use BeforeEvents to calculate stats about the
// system.
func (sys *GrocerySystem) BeforeEvents(clock int) {
// Ignore the initial tick.
if clock == 0 {
return
}
// Add the current number of customers in the system to
// currentCustomers. We are going to use this sum to generate the
// average at the end of the simulation, so we need to weight it
// by the amount of time elapsed since the last time we collected
// data.
currentCustomers := 0
currentlyQueued := 0
for _, q := range sys.queues {
currentCustomers += q.Length()
currentlyQueued += q.Length()
}
for _, p := range sys.processors {
if !p.IsIdle() {
currentCustomers++
}
}
// Add the current number of customers in the queue to SumCustomers.
// We are going to use this sum to generate the average at the end
// of the simulation, so we need to weight it by the amount of time
// elapsed since the last time we collected data.
sys.SumCustomers += (clock - sys.prevClock) * currentCustomers
sys.prevClock = clock
}
// Processors returns the list of Processors in the system.
func (sys *GrocerySystem) Processors() []*Processor {
return sys.processors
}
// AfterEvents runs at every tick when a simulation event happens, but
// in contrast with BeforeEvents, it runs after all the events for that
// tick have occurred.
//
// In this example we used it to keep track of the average time Jobs
// spend in the system (by calculating total Job-ticks and the number
// of Jobs finished).
func (sys *GrocerySystem) AfterEvents(clock int) {
var j *Job
if len(sys.FinishedJobs) != 0 {
for _, j = range sys.FinishedJobs {
sys.SumTotalTime += clock - j.ArrTime
sys.NumFinishedJobs++
}
sys.FinishedJobs = sys.FinishedJobs[:0]
}
}
// Simulates a small grocery store checkout line:
//
// – Customers arrive at the checkout line by a Poisson process (i.e. the
// distribution of the time between arrivals is exponential). This is
// probably a pretty good guess for real-world grocery stores.
// - There are 3 registers, each with its own queue. Once a customer enters
// a queue, they stay in it until that register is empty.
// - The time taken to check a customer out is drawn from a normal
// distribution.
// - Each tick is a millisecond (we use very small ticks to minimize the
// rounding error inherent in picking integer times from a continuous
// distribution.
func TestGrocery(t *testing.T) {
var finalTick, simTicks int
var avgOccupancy, avgArrivalRate, avgWait, precision float64
// Run the simulation for a week
simTicks = 7 * 86400 * 1000
// Satisfy Little's Law to within 1 part in 1000
precision = .001
sys := &GrocerySystem{}
finalTick = RunSimulation(sys, simTicks)
// Make sure the simulation ran as long as it should have
if finalTick < simTicks {
t.Log("Simulation was supposed to run for", simTicks, "ticks but only ran for", finalTick)
t.Fail()
}
// Make sure Little's Law holds.
avgOccupancy = float64(sys.SumCustomers) / float64(finalTick)
avgWait = float64(sys.SumTotalTime) / float64(sys.NumFinishedJobs)
avgArrivalRate = float64(sys.NumFinishedJobs) / float64(finalTick)
if math.Abs(avgArrivalRate*avgWait-avgOccupancy) > precision*avgOccupancy {
t.Log("Little's law doesn't hold for GrocerySystem: average occupancy should be near", avgArrivalRate*avgWait, "but it is", avgOccupancy)
t.Fail()
}
}<|fim▁end|>
|
)
// To run a simulation, you have to implement the System interface:
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import six
import logging
from collections import namedtuple
from symsynd.macho.arch import get_cpu_name
from symsynd.utils import parse_addr
from sentry.interfaces.contexts import DeviceContextType
logger = logging.getLogger(__name__)
APPLE_SDK_MAPPING = {
'iPhone OS': 'iOS',
'tvOS': 'tvOS',
'Mac OS': 'macOS',
'watchOS': 'watchOS',
}
KNOWN_DSYM_TYPES = {
'iOS': 'macho',
'tvOS': 'macho',
'macOS': 'macho',
'watchOS': 'macho',
}
AppInfo = namedtuple('AppInfo', ['id', 'version', 'build', 'name'])
def find_apple_crash_report_referenced_images(binary_images, threads):
"""Given some binary images from an apple crash report and a thread
list this returns a list of image UUIDs to load.
"""
image_map = {}
for image in binary_images:
image_map[image['image_addr']] = image['uuid']
to_load = set()
for thread in threads:
if 'backtrace' not in thread:
continue
for frame in thread['backtrace']['contents']:
img_uuid = image_map.get(frame['object_addr'])
if img_uuid is not None:
to_load.add(img_uuid)
return list(to_load)
def find_all_stacktraces(data):
"""Given a data dictionary from an event this returns all
relevant stacktraces in a list. If a frame contains a raw_stacktrace
property it's preferred over the processed one.
"""
rv = []
def _probe_for_stacktrace(container):
raw = container.get('raw_stacktrace')
if raw is not None:
rv.append((raw, container))
else:
processed = container.get('stacktrace')
if processed is not None:
rv.append((processed, container))
exc_container = data.get('sentry.interfaces.Exception')
if exc_container:
for exc in exc_container['values']:
_probe_for_stacktrace(exc)
# The legacy stacktrace interface does not support raw stacktraces
stacktrace = data.get('sentry.interfaces.Stacktrace')
if stacktrace:
rv.append((stacktrace, None))
threads = data.get('threads')
if threads:
for thread in threads['values']:
_probe_for_stacktrace(thread)
return rv
def get_sdk_from_event(event):
sdk_info = (event.get('debug_meta') or {}).get('sdk_info')
if sdk_info:
return sdk_info
os = (event.get('contexts') or {}).get('os')
if os and os.get('type') == 'os':
return get_sdk_from_os(os)
def get_sdk_from_os(data):
if 'name' not in data or 'version' not in data:
return
dsym_type = KNOWN_DSYM_TYPES.get(data['name'])
if dsym_type is None:
return
try:
system_version = tuple(int(x) for x in (
data['version'] + '.0' * 3).split('.')[:3])
except ValueError:
return
return {
'dsym_type': 'macho',
'sdk_name': data['name'],
'version_major': system_version[0],
'version_minor': system_version[1],
'version_patchlevel': system_version[2],
'build': data.get('build'),
}
def get_sdk_from_apple_system_info(info):
if not info:
return None
try:
# Support newer mapping in old format.
if info['system_name'] in KNOWN_DSYM_TYPES:
sdk_name = info['system_name']
else:
sdk_name = APPLE_SDK_MAPPING[info['system_name']]
system_version = tuple(int(x) for x in (
info['system_version'] + '.0' * 3).split('.')[:3])
except (ValueError, LookupError):
return None
return {
'dsym_type': 'macho',
'sdk_name': sdk_name,
'version_major': system_version[0],
'version_minor': system_version[1],
'version_patchlevel': system_version[2],
}
def cpu_name_from_data(data):
"""Returns the CPU name from the given data if it exists."""
device = DeviceContextType.primary_value_for_data(data)
if device:
arch = device.get('arch')
if isinstance(arch, six.string_types):
return arch
# TODO: kill this here. we want to not support that going forward
unique_cpu_name = None
images = (data.get('debug_meta') or {}).get('images') or []
for img in images:
cpu_name = get_cpu_name(img['cpu_type'],
img['cpu_subtype'])
if unique_cpu_name is None:
unique_cpu_name = cpu_name
elif unique_cpu_name != cpu_name:
unique_cpu_name = None
break<|fim▁hole|>def version_build_from_data(data):
"""Returns release and build string from the given data if it exists."""
app_context = data.get('contexts', {}).get('app', {})
if app_context is not None:
if (app_context.get('app_identifier', None) and
app_context.get('app_version', None) and
app_context.get('app_build', None) and
app_context.get('app_name', None)):
return AppInfo(
app_context.get('app_identifier', None),
app_context.get('app_version', None),
app_context.get('app_build', None),
app_context.get('app_name', None),
)
return None
def rebase_addr(instr_addr, img):
return parse_addr(instr_addr) - parse_addr(img['image_addr'])
def sdk_info_to_sdk_id(sdk_info):
if sdk_info is None:
return None
rv = '%s_%d.%d.%d' % (
sdk_info['sdk_name'],
sdk_info['version_major'],
sdk_info['version_minor'],
sdk_info['version_patchlevel'],
)
build = sdk_info.get('build')
if build is not None:
rv = '%s_%s' % (rv, build)
return rv<|fim▁end|>
|
return unique_cpu_name
|
<|file_name|>elbv2.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "elbv2")]
extern crate env_logger;
extern crate rusoto_core;
extern crate rusoto_elbv2;
use rusoto_elbv2::{Elb, ElbClient, DescribeLoadBalancersInput};
use rusoto_core::{DefaultCredentialsProvider, Region};
use rusoto_core::default_tls_client;
<|fim▁hole|> let _ = env_logger::init();
let credentials = DefaultCredentialsProvider::new().unwrap();
let client = ElbClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1);
let request = DescribeLoadBalancersInput::default();
let result = client.describe_load_balancers(&request);
println!("{:#?}", result);
}<|fim▁end|>
|
#[test]
fn should_describe_load_balancers() {
|
<|file_name|>pathFunctions.js<|end_file_name|><|fim▁begin|>function generalAttack(attacker, receiver, weaponBonus){
//Weapon bonus of one means attacker gets bonus, 0 = neutral, and -1 = penalty
if(attacker.attack > receiver.defense){
if(weaponBonus == 1){
receiver.health = receiver.health - ((attacker.attack + 2) - receiver.defense);
}else if(weaponBonus == -1){
receiver.health = receiver.health - ((attacker.attack - 2) - receiver.defense);
}else{
receiver.health = receiver.health - (attacker.attack - receiver.defense);
}
}else {
receiver.health -= 2;
}
}
function death(){
console.log("should be dead");
hero.alive = false;
}
// Global variables, we're all going to hell
var healthPlaceholder = 0;
var damageTaken = 0;
var totalDamageDealt = 0;
var totalDamageTaken = 0;
var totalKills = 0;
var totalTurns = 0;
function wolvesAttack() {
var wolf = new Character();
wolf.health = 30;
wolf.attack = 5;
wolf.defense = 5;
while(wolf.health > 0 && hero.health > 0)
{
var chance = Math.floor((Math.random() * 100) + 1);
if(chance < 20){
print_to_path(hero.name + " currently has " + hero.health + " health");
healthPlaceholder = hero.health;
generalAttack(wolf, hero);
damageTaken = healthPlaceholder - hero.health;
totalDamageTaken += damageTaken;
if(chance % 2 == 0){
print_to_path("A wolf runs up and bites "+hero.name+" for " + damageTaken + " damage");
}else{
print_to_path("A wolf claww "+hero.name+" for " + damageTaken + " damage");
}
}
else {
healthPlaceholder = wolf.health;
generalAttack(hero, wolf,0);
totalDamageDealt += (healthPlaceholder - wolf.health);
print_to_path(hero.name+" attacks the wolf!");
print_to_path("The wolf's health falls to "+wolf.health);
}
totalTurns += 1;
}
if(wolf.health <= 0){
console.log("wolf dead");
totalKills += 1;
}
if(hero.health<= 0){
death();
}
}
function banditsAttack() {
var bandit = new Character();
bandit.health = 40;
bandit.attack = 10;
bandit.defense = 5;
while(bandit.health > 0 && hero.health > 0)
{
var chance = Math.floor((Math.random() * 100) + 1);
if(chance < 30){
print_to_path(hero.name + " currently has " + hero.health + " health");
healthPlaceholder = hero.health;
generalAttack(bandit, hero);
damageTaken = healthPlaceholder - hero.health;
totalDamageTaken += damageTaken;
if(chance % 2 == 0){
print_to_path("A clan of bandits knocks "+hero.name+" to the ground dealing " + damageTaken + " Damage");
}else{
print_to_path("A bandit Seaks up from behind stabbs "+hero.name+" dealing " + damageTaken + " Damage");
}
}
else {
healthPlaceholder = bandit.health;
if(hero.weapon == "Sword"){
generalAttack(hero, bandit,1);
}else if(hero.weapon == "Bow"){
generalAttack(hero, bandit,-1);
}else{
generalAttack(hero, bandit,0);
}
totalDamageDealt += (healthPlaceholder - bandit.health);
print_to_path(hero.name+" attacks a bandit!");
print_to_path("The bandit's health falls to "+bandit.health);
}
totalTurns += 1;
}
if(bandit.health <= 0){
console.log("bandit dead");
totalKills += 1;
}
if(hero.health<= 0){
death();
}
}
function trollsAttack() {
var troll = new Character();
troll.health = 50;
troll.attack = 25;
troll.defense = 15;
while(troll.health > 0 && hero.health > 0)
{
var chance = Math.floor((Math.random() * 100) + 1);
if(chance < 35){
print_to_path(hero.name + " currently has " + hero.health + " health");
healthPlaceholder = hero.health;
generalAttack(troll, hero);
damageTaken = healthPlaceholder - hero.health;
totalDamageTaken += damageTaken;
if(chance % 2 == 0){
print_to_path("A troll throws a small axe at "+hero.name+" dealing " + damageTaken + " damage");
}else{
print_to_path("A troll smashes "+hero.name+" with his club for " + damageTaken + " damage");
}
}
else {
healthPlaceholder = troll.health;
generalAttack(hero, troll);<|fim▁hole|> }
totalTurns += 1;
}
if(troll.health <= 0){
console.log("troll dead");
totalKills += 1;
}
if(hero.health<= 0){
death();
}
}
function golemsAttack() {
var golem = new Character();
golem.health = 60;
golem.attack = 10;
golem.defense = 50;
while(golem.health > 0 && hero.health > 0)
{
var chance = Math.floor((Math.random() * 100) + 1);
if(chance < 20){
print_to_path(hero.name + " currently has " + hero.health + " health");
healthPlaceholder = hero.health;
generalAttack(golem, hero);
damageTaken = healthPlaceholder - hero.health;
totalDamageTaken += damageTaken;
if(chance % 2 == 0){
print_to_path("A golem flails its arms, smashing "+hero.name+" into the ground, dealing " + damageTaken + " damage");
}else{
print_to_path("A golem stomps its foot on the ground causing rocks to fall on "+hero.name+" from the nearby mountain. dealing " + damageTaken + " Damage");
}
}
else {
healthPlaceholder = golem.health;
if(hero.weapon == "Mace"){
generalAttack(hero, golem,1);
}else if(hero.weapon == "Sword"){
generalAttack(hero, golem,-1);
}else{
generalAttack(hero, golem,0);
}
totalDamageDealt += (healthPlaceholder - golem.health);
print_to_path(hero.name+" attacks the golem!");
print_to_path("The golem's health falls to "+golem.health);
}
totalTurns += 1;
}
if(golem.health <= 0){
console.log("golem dead");
totalKills += 1;
}
if(hero.health<= 0){
death();
}
}
function dragonAttack() {
// atk 30
var dragon = new Character();
dragon.health = 60;
dragon.attack = 30;
dragon.defense = 30;
while(dragon.health > 0 && hero.health > 0)
{
var chance = Math.floor((Math.random() * 100) + 1);
if(chance < 20){
print_to_path(hero.name + " currently has " + hero.health + " health");
healthPlaceholder = hero.health;
generalAttack(dragon, hero);
damageTaken = healthPlaceholder - hero.health;
totalDamageTaken += damageTaken;
if(chance % 2 == 0){
print_to_path("A dragon breaths green flames at "+hero.name+" which inflicted a burn, dealing " + damageTaken + " damage");
}else{
print_to_path("A dragon wipes its tail along the floor flinging "+hero.name+" into the wall, dealing " + damageTaken + " damage");
}
}
else {
healthPlaceholder = dragon.health;
if(hero.weapon == "Bow"){
generalAttack(hero, dragon,1);
}else if(hero.weapon == "Mace"){
generalAttack(hero, dragon,-1);
}else{
generalAttack(hero, dragon,0);
}
totalDamageDealt += (healthPlaceholder - dragon.health);
print_to_path(hero.name+" attacks the dragon!");
print_to_path("The dragon's health falls to: "+dragon.health);
}
totalTurns += 1;
}
if(dragon.health <= 0){
console.log("dragon dead");
totalKills += 1;
}
if(hero.health<= 0){
death();
}
}
function blackSquirrelAttacks() {
// I has no Tail D:
}
function statistics() {
print_to_path("<b>Score:</b>");
print_to_path("Total kills: " + totalKills + " | " +
"Total turns: " + totalTurns + " | " +
"Total damage dealt: " + totalDamageDealt + " | " +
"Total damage taken: " + totalDamageTaken
);
}<|fim▁end|>
|
totalDamageDealt += (healthPlaceholder - troll.health);
print_to_path(hero.name+" attacks the troll!");
print_to_path("The troll's health falls to "+troll.health);
|
<|file_name|>updater.py<|end_file_name|><|fim▁begin|>import abc
import json
from future.utils import with_metaclass
from collections import defaultdict
import numpy as np
import tensorflow as tf
from dps import cfg
from dps.utils import Parameterized, Param
from dps.utils.tf import build_gradient_train_op, trainable_variables, get_scheduled_values, ScopedFunction
from dps.datasets.base import Dataset
class Updater(with_metaclass(abc.ABCMeta, Parameterized)):
build_saver = True
def __init__(self, env, scope=None, mpi_context=None, **kwargs):
self.scope = scope
self.env = env
self.mpi_context = mpi_context
self._n_experiences = 0
self.step = 0
self._saver = None
@property
def n_experiences(self):
return self._n_experiences
def build_graph(self):
# with tf.name_scope(self.scope or self.__class__.__name__) as scope:
# self._scope = scope
self._build_graph()
global_step = tf.train.get_or_create_global_step()
self.inc_global_step_op = tf.assign_add(global_step, 1)
global_step_input = tf.placeholder(tf.int64, ())
assign_global_step = tf.assign(global_step, global_step_input)
tf.get_default_session().run(assign_global_step, feed_dict={global_step_input: 0})
if self.build_saver:
updater_variables = {v.name: v for v in self.trainable_variables(for_opt=False)}
self.saver = tf.train.Saver(updater_variables)
@abc.abstractmethod
def _build_graph(self):
raise Exception("NotImplemented")
def update(self, batch_size, step):
update_result = self._update(batch_size)
sess = tf.get_default_session()
sess.run(self.inc_global_step_op)
self._n_experiences += batch_size
return update_result
@abc.abstractmethod
def _update(self, batch_size):
raise Exception("NotImplemented")
def evaluate(self, batch_size, step, mode="val"):
assert mode in "val test".split()
return self._evaluate(batch_size, mode)
@abc.abstractmethod
def _evaluate(self, batch_size, mode):
raise Exception("NotImplemented")
def trainable_variables(self, for_opt):
raise Exception("AbstractMethod")
def save(self, filename):
path = self.saver.save(tf.get_default_session(), filename)
return path
def restore(self, path):
self.saver.restore(tf.get_default_session(), path)
class DummyUpdater(Updater):
""" For when you just want to build datasets. Much faster than most normal updaters. """
build_saver = False
def trainable_variables(self, for_opt):
return []
def _build_graph(self):
pass
def _update(self, batch_size):
return dict()
def _evaluate(self, batch_size, mode):
return dict()
def save(self, session, filename):
return ''
def restore(self, path):
pass
class DifferentiableUpdater(Updater):
""" Update parameters of a differentiable function `f` using gradient-based algorithm.
Must be used in context of a default graph, session and config.
Parameters
----------
env: gym Env
The environment we're trying to learn about.
f: An instance of ScopedFunction
Accepts a tensor (input), returns a tensor (inference).
"""
optimizer_spec = Param()
lr_schedule = Param()
noise_schedule = Param()
max_grad_norm = Param()
l2_weight = Param(None)
stopping_criteria = "loss,min"
def __init__(self, env, f, **kwargs):
assert hasattr(env, 'build'), (
"Environments used with DifferentiableUpdater must possess "
"a method called `build` which builds returns a dictionary of scalar tensors."
)
self.f = f
super(DifferentiableUpdater, self).__init__(env, **kwargs)
def trainable_variables(self, for_opt):
return trainable_variables(self.f.scope, for_opt=for_opt)
def _build_graph(self):
self.recorded_tensors = self.env.build(self.f)
self.loss = self.recorded_tensors['loss']
tvars = self.trainable_variables(for_opt=True)
if self.l2_weight is not None:
self.loss += self.l2_weight * sum(tf.nn.l2_loss(v) for v in tvars if 'weights' in v.name)
self.train_op, self.train_recorded_tensors = build_gradient_train_op(
self.loss, tvars, self.optimizer_spec, self.lr_schedule,
self.max_grad_norm, self.noise_schedule)
self.recorded_tensors.update(get_scheduled_values())
def _update(self, batch_size):
feed_dict = self.env.data_manager.do_train()
sess = tf.get_default_session()
_, record, train_record = sess.run(
[self.train_op, self.recorded_tensors, self.train_recorded_tensors], feed_dict=feed_dict)
record.update(train_record)
return record
def _evaluate(self, batch_size, mode):
if mode == "val":
feed_dict = self.env.data_manager.do_val()
elif mode == "test":
feed_dict = self.env.data_manager.do_test()
else:
raise Exception("Unknown evaluation mode: {}".format(mode))
sess = tf.get_default_session()
return sess.run(self.recorded_tensors, feed_dict=feed_dict)
class VideoUpdater(Updater):
optimizer_spec = Param()
lr_schedule = Param()
noise_schedule = Param()
max_grad_norm = Param()
grad_n_record_groups = Param(None)
def __init__(self, env, scope=None, **kwargs):
self.obs_shape = env.obs_shape
*other, self.image_height, self.image_width, self.image_depth = self.obs_shape
self.n_frames = other[0] if other else 0
self.network = cfg.build_network(env, self, scope="network")
super(VideoUpdater, self).__init__(env, scope=scope, **kwargs)
def trainable_variables(self, for_opt):
return self.network.trainable_variables(for_opt)
def _update(self, batch_size):
if cfg.get('no_gradient', False):
return dict()
feed_dict = self.data_manager.do_train()
sess = tf.get_default_session()
_, record, train_record = sess.run(
[self.train_op, self.recorded_tensors, self.train_records], feed_dict=feed_dict)
record.update(train_record)
return record
def _evaluate(self, _batch_size, mode):
return self.evaluator.eval(self.recorded_tensors, self.data_manager, mode)
def _build_graph(self):
self.data_manager = DataManager(datasets=self.env.datasets)
self.data_manager.build_graph()
data = self.data_manager.iterator.get_next()
self.inp = data["image"]
network_outputs = self.network(data, self.data_manager.is_training)
network_tensors = network_outputs["tensors"]
network_recorded_tensors = network_outputs["recorded_tensors"]
network_losses = network_outputs["losses"]
self.tensors = network_tensors
self.recorded_tensors = recorded_tensors = dict(global_step=tf.train.get_or_create_global_step())
# --- loss ---
self.loss = tf.constant(0., tf.float32)
for name, tensor in network_losses.items():
self.loss += tensor
recorded_tensors['loss_' + name] = tensor
recorded_tensors['loss'] = self.loss
# --- train op ---
if cfg.do_train and not cfg.get('no_gradient', False):
tvars = self.trainable_variables(for_opt=True)
self.train_op, self.train_records = build_gradient_train_op(
self.loss, tvars, self.optimizer_spec, self.lr_schedule,
self.max_grad_norm, self.noise_schedule, grad_n_record_groups=self.grad_n_record_groups)
sess = tf.get_default_session()
for k, v in getattr(sess, 'scheduled_values', None).items():<|fim▁hole|> else:
recorded_tensors[k] = v
# --- recorded values ---
intersection = recorded_tensors.keys() & network_recorded_tensors.keys()
assert not intersection, "Key sets have non-zero intersection: {}".format(intersection)
recorded_tensors.update(network_recorded_tensors)
intersection = recorded_tensors.keys() & self.network.eval_funcs.keys()
assert not intersection, "Key sets have non-zero intersection: {}".format(intersection)
if self.network.eval_funcs:
eval_funcs = self.network.eval_funcs
else:
eval_funcs = {}
# For running functions, during evaluation, that are not implemented in tensorflow
self.evaluator = Evaluator(eval_funcs, network_tensors, self)
class TensorRecorder(ScopedFunction):
_recorded_tensors = None
def record_tensors(self, **kwargs):
for k, v in kwargs.items():
self.recorded_tensors[k] = tf.reduce_mean(tf.to_float(v))
@property
def recorded_tensors(self):
if self._recorded_tensors is None:
self._recorded_tensors = {}
return self._recorded_tensors
class DataManager(Parameterized):
""" Manages a collection of datasets (of type dps/datasets/base.py:Dataset) and iterators accessing them.
Datasets of type Dataset are passed into the constructor. At least one of those must be called
'train', 'val' or 'test'. When build_graph is called, iterators accessing those datasets
are created, and a special string-handle iterator is created. (Note: an iterator is a tensorflow
operations which is used to stream data from a file stored on disk). The string-handle iterator
can switch between datasets; which dataset it accesses is controlled by the value of a string tensor.
This allows us to build a single model (i.e. a single tensorflow graph), but feed it different data.
For example, we can easily switch from feeding the model training data to feeding it evaluation data.
Note: all datasets collected under a single DataManager instance must return data with the same structure.
(i.e. they should have the same set of Features; see dps/datasets/base.py:Dataset).
Convenience functions do_train, do_val and do_test are provided. When called, they return feed_dicts
when can be used to set the string handle to the appropriate value for the desired dataset.
Additional iterators can be provided by directly calling `build_iterator`, after `build_graph` has
been called. Indeed, this MUST be done in order to access datasets other than 'train', 'val', 'test',
as `build_graph` does not create iterators for these non-standard datasets.
Example use:
dm = DataManager(
train=MyTrainDataset(),
val=MyValDataset(),
test=MyTestDataset(),
)
input_data = dm.iterator.get_next()
The form of input_data will depend on the Features of the datasets; most often if will be a dictionary of tensors.
"""
shuffle_buffer_size = Param()
prefetch_buffer_size_in_batches = Param(10)
prefetch_to_device = Param(False)
batch_size = Param()
train_initialized = False
def __init__(self, train=None, val=None, test=None, datasets=None, **kwargs):
self.datasets = {}
self.datasets.update(train=train, val=val, test=test)
self.datasets.update(datasets)
assert (
self.datasets['train'] is not None
or self.datasets['val'] is not None
or self.datasets['test'] is not None), (
'Must provide at least one dataset with name "train", "val", or "test".')
self.iterators_and_handles = {}
def build_graph(self):
tf_dsets = []
train_dataset = self.datasets.get('train', None)
if train_dataset is not None:
train_dset, _, _ = self.build_iterator('train', 'train', self.batch_size, True, self.shuffle_buffer_size)
tf_dsets.append(train_dset)
val_dataset = self.datasets.get('val', None)
if val_dataset is not None:
val_dset, _, _ = self.build_iterator('val', 'val', self.batch_size, False, 0)
tf_dsets.append(val_dset)
test_dataset = self.datasets.get('test', None)
if test_dataset is not None:
test_dset, _, _ = self.build_iterator('test', 'test', self.batch_size, False, 0)
tf_dsets.append(test_dset)
# --- outputs ---
self.handle = tf.placeholder(tf.string, shape=(), name="dataset_handle")
tf_dset = tf_dsets[0]
if cfg.use_gpu and self.prefetch_to_device:
# In tensorflow 1.13 (at least), tf wants to put this op on CPU, not sure why. This results in an error like:
#
# InvalidArgumentError: Attempted create an iterator on device "/job:localhost/replica:0/task:0/device:CPU:0"
# from handle defined on device "/job:localhost/replica:0/task:0/device:GPU:0"
#
# And the error explicitly references IteratorFromStringHandleV2 built here. The reason is that the
# resources that are pointed to by self.handle are all on the GPU, but, unless we are explicit,
# the iterator created from that handle will be on the CPU, which is apparently not allowed.
with tf.device("/gpu:0"):
self.iterator = tf.data.Iterator.from_string_handle(
self.handle, tf_dset.output_types, tf_dset.output_shapes)
else:
self.iterator = tf.data.Iterator.from_string_handle(
self.handle, tf_dset.output_types, tf_dset.output_shapes)
self.is_training = tf.placeholder(tf.bool, shape=(), name="is_training")
def build_iterator(self, name, base_dataset_name, batch_size, repeat, shuffle_buffer_size):
base_dataset = self.datasets[base_dataset_name]
if batch_size is None:
batch_size = self.batch_size
if isinstance(base_dataset, tf.data.Dataset):
dset = base_dataset
elif isinstance(base_dataset, Dataset):
dset = tf.data.TFRecordDataset(base_dataset.filename)
else:
raise Exception("Unknown dataset type: {}.".format(base_dataset))
# --- possibly repeat and/or shuffle --
if repeat and shuffle_buffer_size > 0:
try:
shuffle_and_repeat_func = tf.data.experimental.shuffle_and_repeat
except AttributeError:
shuffle_and_repeat_func = tf.contrib.data.shuffle_and_repeat
shuffle_and_repeat = shuffle_and_repeat_func(self.shuffle_buffer_size)
dset = dset.apply(shuffle_and_repeat)
elif shuffle_buffer_size > 0:
dset = dset.shuffle(self.shuffle_buffer_size)
# --- batch and parse ---
dset = dset.batch(batch_size)
if hasattr(base_dataset, 'parse_example_batch'):
dset = dset.map(base_dataset.parse_example_batch)
# --- possibly prefetch to improve performance ---
if self.prefetch_buffer_size_in_batches > 0:
if cfg.use_gpu and self.prefetch_to_device:
# Suggested here: https://github.com/tensorflow/tensorflow/issues/18947#issuecomment-407778515
dset = (dset.apply(tf.data.experimental.copy_to_device('/gpu:0'))
.prefetch(self.prefetch_buffer_size_in_batches))
else:
dset = dset.prefetch(self.prefetch_buffer_size_in_batches)
# --- finalize ---
iterator = dset.make_initializable_iterator()
sess = tf.get_default_session()
handle = sess.run(iterator.string_handle(name="{}_string_handle".format(name)))
self.iterators_and_handles[name] = (iterator, handle)
return dset, iterator, handle
def do_train(self, is_training=True):
return self.do('train', is_training)
def do_val(self, is_training=False):
return self.do('val', is_training)
def do_test(self, is_training=False):
return self.do('test', is_training)
def do(self, name, is_training=False):
""" Initialize iterator (unless it's the `train` iterator, which is handled slightly differently)
and return a feed_dict populated with the appropriate handle for the requested iterator. """
iterator, handle = self.iterators_and_handles[name]
sess = tf.get_default_session()
if name == 'train':
if not self.train_initialized:
sess.run(iterator.initializer)
self.train_initialized = True
else:
sess.run(iterator.initializer)
return {self.handle: handle, self.is_training: is_training}
class DummyFunc:
keys_accessed = ""
def __call__(self, fetched, updater):
return {}
class Evaluator:
""" A helper object for running a list of functions on a collection of evaluated tensors.
Parameters
----------
functions: a dict (name-> function). Each function as assumed to have an attribute `keys_accessed`
listing the keys (into `tensors`) that will be accessed by that function.
tensors: a (possibly nested) dictionary of tensors which will provide the input to the functions
updater: the updater object, passed into the functions at eval time
"""
def __init__(self, functions, tensors, updater):
self._functions = functions
self._tensors = tensors
# Force evaluation to happen at with the default feed_dict
functions["dummy"] = DummyFunc()
self.updater = updater
self.functions = defaultdict(list)
self.feed_dicts = {}
fetch_keys = defaultdict(set)
for name, func in functions.items():
if hasattr(func, 'get_feed_dict'):
feed_dict = func.get_feed_dict(updater)
else:
feed_dict = {}
fd_key = {str(k): str(v) for k, v in feed_dict.items()}
fd_key = json.dumps(fd_key, default=str, indent=4, sort_keys=True)
self.functions[fd_key].append((name, func))
self.feed_dicts[fd_key] = feed_dict
# store for the function
keys_accessed = func.keys_accessed
if isinstance(keys_accessed, str):
keys_accessed = keys_accessed.split()
for key in keys_accessed:
fetch_keys[fd_key].add(key)
self.fetches = {}
for fd_key, _fetch_keys in fetch_keys.items():
fetches = self.fetches[fd_key] = {}
for key in _fetch_keys:
dst = fetches
src = tensors
subkeys = key.split(":")
for i, _key in enumerate(subkeys):
if i == len(subkeys)-1:
dst[_key] = src[_key]
else:
if _key not in dst:
dst[_key] = dict()
dst = dst[_key]
src = src[_key]
def _check_continue(self, record):
return True
def eval(self, recorded_tensors, data_manager, mode):
final_record = {}
for key, functions in self.functions.items():
if mode == "val":
feed_dict = data_manager.do_val()
elif mode == "test":
feed_dict = data_manager.do_test()
else:
raise Exception("Unknown evaluation mode: {}".format(mode))
extra_feed_dict = self.feed_dicts[key]
feed_dict.update(extra_feed_dict)
sess = tf.get_default_session()
n_points = 0
record = defaultdict(float)
fetches = self.fetches.get(key, {})
while True:
try:
if extra_feed_dict:
_recorded_tensors = dict(batch_size=recorded_tensors['batch_size'])
_record, fetched = sess.run([_recorded_tensors, fetches], feed_dict=feed_dict)
else:
# Only get values from recorded_tensors when using the default feed dict.
_record, fetched = sess.run([recorded_tensors, fetches], feed_dict=feed_dict)
except tf.errors.OutOfRangeError:
break
for name, func in functions:
result = func(fetched, self.updater)
if isinstance(result, dict):
for k, v in result.items():
_record["{}:{}".format(name, k)] = np.mean(v)
else:
_record[name] = np.mean(result)
batch_size = _record['batch_size']
# Assumes that each record entry is an average over the batch
for k, v in _record.items():
record[k] += batch_size * v
n_points += batch_size
do_continue = self._check_continue(_record)
if not do_continue:
break
record = {k: v / n_points for k, v in record.items()}
intersection = record.keys() & final_record.keys() - set(['batch_size'])
assert not intersection, "Key sets have non-zero intersection: {}".format(intersection)
final_record.update(record)
return final_record<|fim▁end|>
|
if k in recorded_tensors:
recorded_tensors['scheduled_' + k] = v
|
<|file_name|>MultiClassSubjectNode.py<|end_file_name|><|fim▁begin|>from SubjectNodes import SubjectNode
class MultiClassSubjectNode(SubjectNode):
def __init__(self):
SubjectNode.__init__(numClasses=1)
def __changeClassificationAttributes__(self,attributesList):<|fim▁hole|><|fim▁end|>
|
pass
|
<|file_name|>paper.test.js<|end_file_name|><|fim▁begin|>/* global describe, it, beforeEach */
import { expect } from '@open-wc/testing';
import * as components from '@lit-any/components-paper-elements';
import * as sinon from 'sinon';
import { pEvent } from '../async-tests';
import render from './helper/render';
describe('paper-elements', () => {
let opts;
describe('textbox', () => {
describe('single line', () => {
beforeEach(() => {
opts = {
type: 'single line',
};
});
it('should mark required when field is required', async () => {
// given
const field = {
required: true,
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field);
// then
expect(el.getAttribute('required')).to.be.not.null;
});
it('should render a text textbox', async () => {
// given
const field = {
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field);
// then
expect(el.tagName).to.match(/paper-input/i);
expect(el.getAttribute('type')).to.equal('text');
});
it('should set field title as label', async () => {
// given
const field = {
title: 'user name',
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field);
// then
expect(el.label).to.equal('user name');
});
it('should be [auto-validate]', async () => {
// given
const field = {
title: 'user name',
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field);
// then
expect(el.autoValidate).to.be.true;
});
it('should not set invalid initially when setting null value', async () => {
// given
const field = {
title: 'user name',
required: true,
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field, 'id', null);
// then
expect(el.invalid).to.be.false;
});
});
describe('multi line', () => {
beforeEach(() => {
opts = {
type: 'multi line',
};
});
it('should render a textarea', async () => {
// given
const field = {
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field);
// then
expect(el.tagName).to.match(/paper-textarea/i);
});
it('should be [auto-validate]', async () => {
// given
const field = {
title: 'user name',
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field);
// then
expect(el.autoValidate).to.be.true;
});
it('should be required if field is required', async () => {
// given
const field = {
title: 'user name',
required: true,
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field);
// then
expect(el.required).to.be.true;
});
it('should not set invalid initially when setting null value', async () => {
// given
const field = {
title: 'user name',
required: true,
};
// when
const textbox = components.textbox(opts);
const el = await render(textbox, field, 'id', null);
// then
expect(el.invalid).to.be.false;
});
});
});
describe('dropdown', () => {
beforeEach(() => {
opts = {
};
});
it('should be required if field is required', async () => {
// given
const field = {
title: 'user name',
required: true,
};
// when
const dropdown = components.dropdown(opts);
const el = await render(dropdown, field);
<|fim▁hole|> expect(el.required).to.be.true;
});
it('should fire validation when value is set', async () => {
// given
const field = {
title: 'user name',
};
const dropdown = components.dropdown(opts);
const el = await render(dropdown, field);
el.validate = sinon.spy();
const valueChangedToHappen = pEvent(el, 'value-changed');
// when
el.value = 'hello';
// then
await valueChangedToHappen;
expect(el.validate.called).to.be.true;
});
it('should accept items array', async () => {
// given
const field = {
title: 'user name',
};
opts.items = [{}, {}, {}];
// when
const dropdown = components.dropdown(opts);
const el = await render(dropdown, field);
// then
expect(el.querySelectorAll('paper-item').length).to.be.equal(3);
});
it('should accept items as function returning array', async () => {
// given
const field = {
title: 'abc',
};
opts.items = f => f.title.split('').map(l => ({ label: l, value: l }));
// when
const dropdown = components.dropdown(opts);
const el = await render(dropdown, field);
// then
const itemElements = el.querySelectorAll('paper-item');
expect(itemElements[0].value).to.be.equal('a');
expect(itemElements[1].value).to.be.equal('b');
expect(itemElements[2].value).to.be.equal('c');
});
it('should accept items as function returning promise', async () => {
// given
const field = {
title: 'abc',
};
opts.items = f => Promise.resolve(f.title.split('').map(l => ({ label: l, value: l })));
// when
const dropdown = components.dropdown(opts);
const el = await render(dropdown, field);
// then
const itemElements = el.querySelectorAll('paper-item');
expect(itemElements[0].value).to.be.equal('a');
expect(itemElements[1].value).to.be.equal('b');
expect(itemElements[2].value).to.be.equal('c');
});
});
});<|fim▁end|>
|
// then
|
<|file_name|>arguments.rs<|end_file_name|><|fim▁begin|>use thiserror::Error;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Comm {
Type1,
Type2,
None,
}
/// A type of paired token
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Field {
/// Processes (ex: `$(..)`)
Proc,
/// Literal array (ex: `[ 1 .. 3 ]`)
Array,
/// Brace expansion (ex: `{a,b,c,d}`)
Braces,
}
/// The depth of various paired structures
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Levels {
/// Parentheses
parens: u8,
/// Array literals
array: u8,
/// Braces
braces: u8,
}
/// Error with paired tokens
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Error)]
pub enum LevelsError {
/// Unmatched opening parenthese
#[error("unmatched opening parenthese")]
UnmatchedParen,
/// Unmatched opening bracket
#[error("unmatched opening bracket")]
UnmatchedBracket,
/// Unmatched opening brace
#[error("unmatched opening brace")]
UnmatchedBrace,
/// Extra closing parenthese(s)
#[error("extra closing parenthese(s)")]
ExtraParen,
/// Extra closing bracket(s)
#[error("extra closing bracket(s)")]
ExtraBracket,
/// Extra closing brace(s)
#[error("extra closing brace(s)")]
ExtraBrace,
}
impl Levels {
/// Add a new depth level
pub fn up(&mut self, field: Field) {
let level = match field {
Field::Proc => &mut self.parens,
Field::Array => &mut self.array,
Field::Braces => &mut self.braces,
};
*level += 1;
}
/// Close paired tokens
pub fn down(&mut self, field: Field) -> Result<(), LevelsError> {
let level = match field {
Field::Proc if self.parens > 0 => &mut self.parens,
Field::Array if self.array > 0 => &mut self.array,
Field::Braces if self.braces > 0 => &mut self.braces,
// errors
Field::Proc => return Err(LevelsError::ExtraParen),
Field::Array => return Err(LevelsError::ExtraBracket),
Field::Braces => return Err(LevelsError::ExtraBrace),
};
*level -= 1;
Ok(())
}
/// Check if all parens where matched
pub const fn are_rooted(self) -> bool {
self.parens == 0 && self.array == 0 && self.braces == 0
}
/// Check if all is ok
pub const fn check(self) -> Result<(), LevelsError> {
if self.parens > 0 {
Err(LevelsError::UnmatchedParen)
} else if self.array > 0 {
Err(LevelsError::UnmatchedBracket)
} else if self.braces > 0 {
Err(LevelsError::UnmatchedBrace)
} else {<|fim▁hole|> }
}
/// An efficient `Iterator` structure for splitting arguments
#[derive(Debug)]
pub struct ArgumentSplitter<'a> {
data: &'a str,
/// Number of bytes read
read: usize,
comm: Comm,
quotes: bool,
variab: bool,
array: bool,
method: bool,
}
impl<'a> ArgumentSplitter<'a> {
/// Create a new argument splitter based on the provided data
pub const fn new(data: &'a str) -> ArgumentSplitter<'a> {
ArgumentSplitter {
data,
read: 0,
comm: Comm::None,
quotes: false,
variab: false,
array: false,
method: false,
}
}
fn scan_singlequotes<B: Iterator<Item = u8>>(&mut self, bytes: &mut B) {
while let Some(character) = bytes.next() {
match character {
b'\\' => {
self.read += 2;
let _ = bytes.next();
continue;
}
b'\'' => break,
_ => (),
}
self.read += 1;
}
}
}
impl<'a> Iterator for ArgumentSplitter<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<&'a str> {
let data = self.data.as_bytes();
while let Some(&b' ') = data.get(self.read) {
self.read += 1;
}
let start = self.read;
let mut levels = Levels::default();
let mut bytes = data.iter().skip(self.read).copied();
while let Some(character) = bytes.next() {
match character {
// Skip the next byte.
b'\\' => {
self.read += 2;
let _ = bytes.next();
continue;
}
// Disable COMM_1 and enable COMM_2 + ARRAY.
b'@' => {
self.array = true;
self.comm = Comm::Type2;
self.read += 1;
continue;
}
// Disable COMM_2 and enable COMM_1 + VARIAB.
b'$' => {
self.variab = true;
self.comm = Comm::Type1;
self.read += 1;
continue;
}
b'[' => levels.up(Field::Array),
b']' => {
let _ = levels.down(Field::Array);
}
b'{' => levels.up(Field::Braces),
b'}' => {
// TODO: handle errors here
let _ = levels.down(Field::Braces);
}
b'(' => {
// Disable VARIAB + ARRAY and enable METHOD.
// if variab or array are set
if self.array || self.variab {
self.array = false;
self.variab = false;
self.method = true;
}
levels.up(Field::Proc);
}
b')' => {
self.method = false;
let _ = levels.down(Field::Proc);
}
// Toggle double quote rules.
b'"' => {
self.quotes ^= true;
}
// Loop through characters until single quote rules are completed.
b'\'' if !self.quotes => {
self.scan_singlequotes(&mut bytes);
self.read += 2;
continue;
}
// Break from the loop once a root-level space is found.
b' ' => {
if !self.quotes && !self.method && levels.are_rooted() {
break;
}
}
_ => (),
}
self.read += 1;
// disable COMM_1 and COMM_2
self.comm = Comm::None;
}
if start == self.read {
None
} else {
Some(&self.data[start..self.read])
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn compare(input: &str, expected: Vec<&str>) {
let arguments = ArgumentSplitter::new(input).collect::<Vec<&str>>();
for (left, right) in expected.iter().zip(arguments.iter()) {
assert_eq!(left, right);
}
assert_eq!(expected.len(), arguments.len());
}
#[test]
fn methods() {
let input = "echo $join(array, ', ') @split(var, ', ')";
let expected = vec!["echo", "$join(array, ', ')", "@split(var, ', ')"];
compare(input, expected);
}
#[test]
fn processes() {
let input = "echo $(echo one $(echo two)) @[echo one @[echo two]]";
let expected = vec!["echo", "$(echo one $(echo two))", "@[echo one @[echo two]]"];
compare(input, expected);
}
#[test]
fn arrays() {
let input = "echo [ one two @[echo three four] five ] [ six seven ]";
let expected = vec!["echo", "[ one two @[echo three four] five ]", "[ six seven ]"];
compare(input, expected);
}
#[test]
fn quotes() {
let input = "echo 'one two \"three four\"' \"five six 'seven eight'\"";
let expected = vec!["echo", "'one two \"three four\"'", "\"five six 'seven eight'\""];
compare(input, expected);
}
}<|fim▁end|>
|
Ok(())
}
|
<|file_name|>02-discord_bot.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate serenity;
extern crate gwent_api;
extern crate serde;<|fim▁hole|>
use gwent_api::client::gw_client;
use serenity::client::Client;
use serenity::model::Mentionable;
use std::env;
fn main() {
// Login with a bot token from the environment
let mut client = Client::new(&env::var("DISCORD_TOKEN").expect("token"));
client.with_framework(|f| f
.configure(|c| c.prefix("!"))
.on("card", card));
// start listening for events by starting a single shard
let _ = client.start();
}
command!(card(_context, message) {
let card_name = message.content.clone().split_off(6);
println!("{}", card_name);
match gw_client::Client::get_card_by_name(card_name.as_str()) {
Err(_) => {
let _ = message.reply("Card name is not recognized.");
}
Ok(card) => {
let art_uri = gw_client::Client::get_card_default_art(&card).unwrap().art.thumbnail_image;
let _ = message.channel_id.send_message(|m| m
.content(message.author.mention().as_str())
.embed(|e| e
.title(card.name.as_str())
.image(art_uri.as_str())
.description(card.info.as_str())));
}
}
});<|fim▁end|>
|
extern crate serde_json;
|
<|file_name|>customplot.py<|end_file_name|><|fim▁begin|>#
# First, let us create some utility functions for Plotting
#
def pd_centers(featuresUsed, centers):
from itertools import cycle, islice
from pandas.tools.plotting import parallel_coordinates
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
colNames = list(featuresUsed)
colNames.append('prediction')
# Zip with a column called 'prediction' (index)
Z = [np.append(A, index) for index, A in enumerate(centers)]
# Convert to pandas for plotting
P = pd.DataFrame(Z, columns=colNames)
P['prediction'] = P['prediction'].astype(int)<|fim▁hole|> from itertools import cycle, islice
from pandas.tools.plotting import parallel_coordinates
import matplotlib.pyplot as plt
my_colors = list(islice(cycle(['b', 'r', 'g', 'y', 'k']), None, len(data)))
plt.figure(figsize=(15,8)).gca().axes.set_ylim([-2.5,+2.5])
parallel_coordinates(data, 'prediction', color = my_colors, marker='o')<|fim▁end|>
|
return P
def parallel_plot(data):
|
<|file_name|>fields.py<|end_file_name|><|fim▁begin|>import datetime
import os
from django.db.models.fields.files import FileField
from django.core.files.storage import default_storage
from django.utils.encoding import force_unicode, smart_str
class ModelUploadFileField(FileField):
"""
Makes the upload_to parameter optional by using the name of the model
"""
def __init__(self, verbose_name=None, name=None, storage=None, **kwargs):
for arg in ('primary_key', 'unique'):
if arg in kwargs:
raise TypeError("'%s' is not a valid argument for %s." % (arg, self.__class__))
self.storage = storage or default_storage
upload_to = kwargs.pop('upload_to', '$$MODEL$$')
if not upload_to:
upload_to = '$$MODEL$$'
self.upload_to = upload_to
if callable(upload_to):
self.generate_filename = upload_to
kwargs['max_length'] = kwargs.get('max_length', 100)
super(FileField, self).__init__(verbose_name, name, **kwargs)
def get_directory_name(self):<|fim▁hole|> def generate_filename(self, instance, filename):
if self.upload_to == '$$MODEL$$':
self.upload_to = instance._meta.verbose_name
return os.path.join(self.get_directory_name(), self.get_filename(filename))<|fim▁end|>
|
return os.path.normpath(force_unicode(datetime.datetime.now().strftime(smart_str(self.upload_to))))
|
<|file_name|>rankings_helper.py<|end_file_name|><|fim▁begin|>from typing import List, Optional
from backend.common.consts.ranking_sort_orders import SORT_ORDER_INFO
from backend.common.models.event_details import EventDetails
from backend.common.models.event_ranking import EventRanking
from backend.common.models.event_team_status import WLTRecord
from backend.common.models.keys import TeamKey, Year
from backend.common.models.ranking_sort_order_info import RankingSortOrderInfo
class RankingsHelper:
NO_RECORD_YEARS = {2010, 2015, 2021}
QUAL_AVERAGE_YEARS = {2015}
@classmethod<|fim▁hole|> rank: int,
team_key: TeamKey,
wins: int,
losses: int,
ties: int,
qual_average: Optional[float],
matches_played: int,
dq: int,
sort_orders: List[float],
) -> EventRanking:
record: Optional[WLTRecord] = None
if year not in cls.NO_RECORD_YEARS:
record = {
"wins": int(wins),
"losses": int(losses),
"ties": int(ties),
}
if year not in cls.QUAL_AVERAGE_YEARS:
qual_average = None
sort_orders_sanitized = []
for so in sort_orders:
try:
sort_orders_sanitized.append(float(so))
except Exception:
sort_orders_sanitized.append(0.0)
return {
"rank": int(rank),
"team_key": team_key,
"record": record, # None if record doesn't affect rank (e.g. 2010, 2015)
"qual_average": qual_average, # None if qual_average doesn't affect rank (all years except 2015)
"matches_played": int(matches_played),
"dq": int(dq),
"sort_orders": sort_orders_sanitized,
}
@classmethod
def get_sort_order_info(
cls, event_details: EventDetails
) -> Optional[List[RankingSortOrderInfo]]:
return SORT_ORDER_INFO.get(event_details.game_year)<|fim▁end|>
|
def build_ranking(
cls,
year: Year,
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.<|fim▁hole|><|fim▁end|>
|
pub mod db;
pub mod error;
|
<|file_name|>RoleFuncService.java<|end_file_name|><|fim▁begin|>package com.yh.admin.roles.service;
/**<|fim▁hole|> * @version 1.0, 16/08/23
*/
public class RoleFuncService {
}<|fim▁end|>
|
*
* @author zhangqp
|
<|file_name|>setSolidaritySettings.ts<|end_file_name|><|fim▁begin|>import { SolidarityRunContext, SolidaritySettings } from '../../types'
module.exports = (settings: SolidaritySettings, context: SolidarityRunContext): void => {
const { filesystem } = context
if (settings.requirements) {
// Write file
filesystem.write('.solidarity', JSON.stringify(settings, null, 2), { atomic: true })
} else {<|fim▁hole|>}<|fim▁end|>
|
throw 'You must have a requirements key to be a valid solidarity file'
}
|
<|file_name|>safe-string.js<|end_file_name|><|fim▁begin|>var colors = require('../safe');
console.log(colors.yellow('First some yellow text'));
console.log(colors.yellow.underline('Underline that text'));
console.log(colors.red.bold('Make it bold and red'));
console.log(colors.rainbow('Double Raindows All Day Long'));
console.log(colors.trap('Drop the bass'));
console.log(colors.rainbow(colors.trap('DROP THE RAINBOW BASS')));
// styles not widely supported
console.log(colors.bold.italic.underline.red('Chains are also cool.'));
// styles not widely supported
console.log(colors.green('So ') + colors.underline('are') + ' '
+ colors.inverse('inverse') + colors.yellow.bold(' styles! '));
console.log(colors.zebra('Zebras are so fun!'));
console.log('This is ' + colors.strikethrough('not') + ' fun.');
console.log(colors.black.bgWhite('Background color attack!'));
console.log(colors.random('Use random styles on everything!'));
console.log(colors.america('America, Heck Yeah!'));
console.log(colors.brightCyan('Blindingly ') + colors.brightRed('bright? ') + colors.brightYellow('Why ') + colors.brightGreen('not?!'));
console.log('Setting themes is useful');
//
// Custom themes
//
// console.log('Generic logging theme as JSON'.green.bold.underline);
// Load theme with JSON literal
colors.setTheme({
silly: 'rainbow',
input: 'blue',
verbose: 'cyan',
prompt: 'grey',
info: 'green',
data: 'grey',
help: 'cyan',
warn: 'yellow',
debug: 'blue',
error: 'red',
});
// outputs red text
console.log(colors.error('this is an error'));
// outputs yellow text
console.log(colors.warn('this is a warning'));
// outputs blue text
console.log(colors.input('this is an input'));<|fim▁hole|>
// console.log('Generic logging theme as file'.green.bold.underline);
// Load a theme from file
colors.setTheme(require(__dirname + '/../themes/generic-logging.js'));
// outputs red text
console.log(colors.error('this is an error'));
// outputs yellow text
console.log(colors.warn('this is a warning'));
// outputs grey text
console.log(colors.input('this is an input'));
// console.log(colors.zalgo("Don't summon him"))<|fim▁end|>
| |
<|file_name|>[email protected]<|end_file_name|><|fim▁begin|>(function(window, factory) {
if (typeof define === 'function' && define.amd) {
define([], function() {
return factory();
});
} else if (typeof module === 'object' && typeof module.exports === 'object') {
module.exports = factory();
} else {
(window.LocaleData || (window.LocaleData = {}))['de_LU@euro'] = factory();
}
}(typeof window !== "undefined" ? window : this, function() {
return {
"LC_ADDRESS": {
"postal_fmt": "%f%N%a%N%d%N%b%N%s %h %e %r%N%z %T%N%c%N",
"country_name": "Luxemburg",
"country_post": null,
"country_ab2": "LU",
"country_ab3": "LUX",
"country_num": 442,
"country_car": "L",
"country_isbn": null,
"lang_name": "Deutsch",
"lang_ab": "de",
"lang_term": "deu",
"lang_lib": "ger"
},
"LC_MEASUREMENT": {
"measurement": 1
},
"LC_MESSAGES": {
"yesexpr": "^[+1jJyY]",
"noexpr": "^[-0nN]",
"yesstr": "ja",
"nostr": "nein"
},
"LC_MONETARY": {
"currency_symbol": "\u20ac",
"mon_decimal_point": ",",
"mon_thousands_sep": ".",
"mon_grouping": [
3,
3
],
"positive_sign": "",
"negative_sign": "-",
"frac_digits": 2,
"p_cs_precedes": 1,
"p_sep_by_space": 1,
"n_cs_precedes": 1,
"n_sep_by_space": 1,
"p_sign_posn": 4,
"n_sign_posn": 4,
"int_curr_symbol": "EUR ",
"int_frac_digits": 2,
"int_p_cs_precedes": null,
"int_p_sep_by_space": null,
"int_n_cs_precedes": null,
"int_n_sep_by_space": null,
"int_p_sign_posn": null,
"int_n_sign_posn": null
},
"LC_NAME": {
"name_fmt": "%d%t%g%t%m%t%f",
"name_gen": null,<|fim▁hole|> "name_mr": null,
"name_mrs": null,
"name_miss": null,
"name_ms": null
},
"LC_NUMERIC": {
"decimal_point": ",",
"thousands_sep": ".",
"grouping": [
3,
3
]
},
"LC_PAPER": {
"height": 297,
"width": 210
},
"LC_TELEPHONE": {
"tel_int_fmt": "+%c %a %l",
"tel_dom_fmt": null,
"int_select": "00",
"int_prefix": "352"
},
"LC_TIME": {
"date_fmt": "%a %b %e %H:%M:%S %Z %Y",
"abday": [
"So",
"Mo",
"Di",
"Mi",
"Do",
"Fr",
"Sa"
],
"day": [
"Sonntag",
"Montag",
"Dienstag",
"Mittwoch",
"Donnerstag",
"Freitag",
"Samstag"
],
"week": [
7,
19971130,
4
],
"abmon": [
"Jan",
"Feb",
"M\u00e4r",
"Apr",
"Mai",
"Jun",
"Jul",
"Aug",
"Sep",
"Okt",
"Nov",
"Dez"
],
"mon": [
"Januar",
"Februar",
"M\u00e4rz",
"April",
"Mai",
"Juni",
"Juli",
"August",
"September",
"Oktober",
"November",
"Dezember"
],
"d_t_fmt": "%a %d %b %Y %T %Z",
"d_fmt": "%Y-%m-%d",
"t_fmt": "%T",
"am_pm": [
"",
""
],
"t_fmt_ampm": "",
"era": null,
"era_year": null,
"era_d_t_fmt": null,
"era_d_fmt": null,
"era_t_fmt": null,
"alt_digits": null,
"first_weekday": 2,
"first_workday": null,
"cal_direction": null,
"timezone": null
}
};
}));<|fim▁end|>
| |
<|file_name|>racelib.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# coding:utf-8
# Author: ASU --<[email protected]>
# Purpose: Concurrent utility classes (name coming from RACEconditionLIBrary)
# Created: 11/26/2015
import time
__author__ = 'ASU'
class ContextLock():
def __init__(self, lock):
"""
:param lock:
:type lock: thread.LockType
"""
self.__lock = lock
def __enter__(self):
self.__lock.acquire()
def __exit__(self, exc_type, exc_value, traceback):
self.__lock.release()<|fim▁hole|>
class TimePerformanceLogger:
"""
Used to measure the performance of a code block run within a With Statement Context Manager
"""
def __init__(self, logger):
"""
:param logger: logger function tha would get argument number of seconds
:type logger: (basestring) -> None
"""
self._logger = logger
def __enter__(self):
self._t1 = time.time()
def __exit__(self, exc_type, exc_value, traceback):
self._logger(time.time() - self._t1)
if exc_type:
return False
return True<|fim▁end|>
|
return False
|
<|file_name|>route.go<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 @ z3q.net.
* name :
* author : jarryliu
* date : 2014-02-05 21:53
* description :
* history :
*/
package ucenter
import (
"github.com/jrsix/gof/web"
"github.com/jrsix/gof/web/mvc"
"go2o/src/app/util"
)
var (
routes *mvc.Route = mvc.NewRoute(nil)
moRoutes *mvc.Route = mvc.NewRoute(nil)
)
func GetRouter() *mvc.Route {
return moRoutes
}
//处理请求
func Handle(ctx *web.Context) {
switch util.GetBrownerDevice(ctx) {
default:
case util.DevicePC:
ctx.Items["device_view_dir"] = "pc"
routes.Handle(ctx)
case util.DeviceTouchPad, util.DeviceMobile:
ctx.Items["device_view_dir"] = "touchpad"
moRoutes.Handle(ctx)
case util.DeviceAppEmbed:
ctx.Items["device_view_dir"] = "app_embed"
routes.Handle(ctx)
}
}
func registerRoutes() {
mc := &mainC{}
bc := &basicC{}
oc := &orderC{}
ac := &accountC{}
lc := &loginC{}
<|fim▁hole|> routes.Register("main", mc)
routes.Register("basic", bc)
routes.Register("order", oc)
routes.Register("account", ac)
routes.Register("login", lc)
routes.Add("/logout", mc.Logout)
routes.Add("/", mc.Index)
routes.Add("/static/*", util.HttpStaticFileHandler)
// 注册触屏版路由
moRoutes.Register("main", mc)
moRoutes.Register("basic", bc)
moRoutes.Register("order", oc)
moRoutes.Register("account", ac)
moRoutes.Register("login", lc)
moRoutes.Add("/logout", mc.Logout)
moRoutes.Add("/", mc.Index)
// 为了使用IconFont
moRoutes.Add("/static/*", util.HttpStaticFileHandler)
}
func init() {
registerRoutes()
}<|fim▁end|>
| |
<|file_name|>back.rs<|end_file_name|><|fim▁begin|>//! Back-end module for the task queue. The back-end is running
//! on a separate thread. All it does is listening to a command
//! channel and starting new tasks when the time comes.
use std::sync::atomic::*;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{Sender, Receiver, channel};
use std::collections::HashMap;
use std::thread;
use bran;
use pulse::*;
use deque;
use num_cpus;
use {Wait, Schedule, FnBox};
use super::worker;
struct Inner {
index: usize,
stealers: HashMap<usize, deque::Stealer<ReadyTask>>,
workers: HashMap<usize, Sender<worker::Command>>,
joins: Vec<thread::JoinHandle<()>>
}
/// Task queue back-end.
pub struct Backend {
active: AtomicBool,
global_queue: Mutex<deque::Worker<ReadyTask>>,
workers: Mutex<Inner>,
pool: bran::StackPool
}
/// A ready task
pub struct ReadyTask(bran::Handle);
impl ReadyTask {
pub fn run(self) {
use bran::fiber::State;
let ReadyTask(task) = self;
match task.run() {
State::Pending(signal) => {
worker::requeue(task, signal);
}
State::PendingTimeout(_, _) => {
panic!("Timeouts are not supported")
}
State::Finished | State::Panicked => ()
}
}
}
impl Backend {
/// Create a new back-end.
pub fn new() -> Arc<Backend> {
let buffer = deque::BufferPool::new();
let (worker, stealer) = buffer.deque();
let mut map = HashMap::new();
map.insert(0, stealer);
let back = Arc::new(Backend {
active: AtomicBool::new(false),
global_queue: Mutex::new(worker),
workers: Mutex::new(Inner {
index: 1,
stealers: map,
workers: HashMap::new(),
joins: Vec::new()
}),
pool: bran::StackPool::new()
});
for _ in 0..num_cpus::get() {
worker::Worker::new(back.clone()).start();
}
back
}
/// Start a task on the global work queue
fn start_on_global_queue(&self, rt: ReadyTask) {
let guard = self.global_queue.lock().unwrap();
guard.push(rt);
}
/// Start a task that will run once all the Handle's have
/// been completed.
pub fn start(back: Arc<Backend>, task: Box<FnBox+Send>, mut after: Vec<Signal>) {
// Create the wait signal if needed
let signal = if after.len() == 0 {
Signal::pulsed()
} else if after.len() == 1 {
after.pop().unwrap()
} else {
Barrier::new(&after).signal()
};
signal.callback(move || {
if !back.active.load(Ordering::SeqCst) {
let fiber = bran::fiber::Fiber::spawn_with(move || {
task.call_box(&mut worker::FiberSchedule)
}, back.pool.clone());
let try_thread = worker::start(ReadyTask(fiber));
match try_thread {
Ok(b) => b,
Err(rt) => {
back.start_on_global_queue(rt);
true
}
};
}
});
}
/// Start a task that will run once all the Handle's have
/// been completed.
pub fn enqueue(back: Arc<Backend>, task: bran::Handle, after: Signal) {
after.callback(move || {
if !back.active.load(Ordering::SeqCst) {
let try_thread = worker::start(ReadyTask(task));
match try_thread {
Ok(b) => b,
Err(rt) => {
back.start_on_global_queue(rt);
true
}
};
}
});
}
/// Kill the backend, wait until the condition is satisfied.
pub fn exit(&self, wait: Wait) {
// read the current active count, OR in the BLOCK
// flag if needed for the wait
match wait {
Wait::None | Wait::Active => {
self.active.store(true, Ordering::SeqCst);
}
Wait::Pending => ()
};
let mut guard = self.workers.lock().unwrap();
for (_, send) in guard.workers.iter() {
let _ = send.send(worker::Command::Exit);
}
while let Some(join) = guard.joins.pop() {
join.join().unwrap();
}
}
/// Create a new deque
pub fn new_deque(&self) -> (usize,
deque::Worker<ReadyTask>,
Receiver<worker::Command>) {
let buffer = deque::BufferPool::new();
let (worker, stealer) = buffer.deque();
let (send, recv) = channel();
let mut guard = self.workers.lock().unwrap();
let index = guard.index;<|fim▁hole|> }
for (_, workers) in guard.workers.iter() {
workers.send(worker::Command::Add(index, stealer.clone())).unwrap();
}
guard.stealers.insert(index, stealer);
guard.workers.insert(index, send);
(index, worker, recv)
}
///
pub fn register_worker(&self, handle: thread::JoinHandle<()>) {
let mut guard = self.workers.lock().unwrap();
guard.joins.push(handle);
}
}
impl<'a> Schedule for Arc<Backend> {
fn add_task(&mut self, task: Box<FnBox+Send>, after: Vec<Signal>) {
Backend::start(self.clone(), task, after)
}
}<|fim▁end|>
|
guard.index += 1;
for (&key, stealer) in guard.stealers.iter() {
send.send(worker::Command::Add(key, stealer.clone())).unwrap();
|
<|file_name|>c2_w2.py<|end_file_name|><|fim▁begin|>########################### 1. 導入所需模組
import cherrypy
import os
########################### 2. 設定近端與遠端目錄
# 確定程式檔案所在目錄, 在 Windows 有最後的反斜線
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
# 設定在雲端與近端的資料儲存目錄
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示程式在雲端執行
download_root_dir = os.environ['OPENSHIFT_DATA_DIR']
data_dir = os.environ['OPENSHIFT_DATA_DIR']
else:
# 表示程式在近端執行
download_root_dir = _curdir + "/local_data/"
data_dir = _curdir + "/local_data/"
########################### 3. 建立主物件
class HelloWorld(object):
_cp_config = {
# if there is no utf-8 encoding, no Chinese input available
'tools.encode.encoding': 'utf-8',
'tools.sessions.on' : True,
'tools.sessions.storage_type' : 'file',
'tools.sessions.locking' : 'explicit',
'tools.sessions.storage_path' : data_dir+'/tmp',
# session timeout is 60 minutes
'tools.sessions.timeout' : 60
}
@cherrypy.expose
def fileuploadform(self):
return '''<h1>file upload</h1>
<script src="/static/jquery.js" type="text/javascript"></script>
<script src="/static/axuploader.js" type="text/javascript"></script>
<script>
$(document).ready(function(){
$('.prova').axuploader({url:'/fileaxupload', allowExt:['jpg','png','gif','7z','pdf','zip','flv','stl','txt'],
finish:function(x,files)
{
alert('All files have been uploaded: '+files);
},
enable:true,
remotePath:function(){
return 'downloads/';
}
});
});
</script>
<div class="prova"></div>
<input type="button" onclick="$('.prova').axuploader('disable')" value="asd" />
<input type="button" onclick="$('.prova').axuploader('enable')" value="ok" />
</section></body></html>
'''
@cherrypy.expose
def brythonuploadform(self):
return '''<h1>file upload</h1>
<script type="text/javascript" src="/static/Brython2.0.0-20140209-164925/brython.js"></script>
<script type="text/javascript" >
function getradio(tagname){
var radios = document.getElementsByName(tagname);
for (var i = 0, length = radios.length; i < length; i++) {
if (radios[i].checked) {
// do whatever you want with the checked radio
return radios[i].value;
// only one radio can be logically checked, don't check the rest
break;
}
}
}
function run_js(){
var cons = document.getElementById("console")
var jscode = cons.value
var t0 = (new Date()).getTime()
eval(jscode)
var t1 = (new Date()).getTime()
console.log("Javascript code run in "+(t1-t0)+" ms")
}
</script>
<script type="text/python3" src="/static/editor.py"></script><|fim▁hole|>
<script type="text/python3">
from browser import doc
overwrite = 0
# add delete_program 1/7, seven steps to complete the ajax task, the last step is to add delete_program function on server
# delete1 and delete2 parameters are also added into save_program function.
delete1 = 0
delete2 = 0
def set_debug(ev):
if ev.target.checked:
__BRYTHON__.debug = 1
else:
__BRYTHON__.debug = 0
def set_overwrite(ev):
global overwrite
if ev.target.checked:
overwrite = 1
else:
overwrite = 0
# add delete_program 2/7, client side add set_delete1 and set_delete2 functions.
def set_delete1(ev):
global delete1
if ev.target.checked:
delete1 = 1
else:
delete1 = 0
def set_delete2(ev):
global delete2
if ev.target.checked:
delete2 = 1
else:
delete2 = 0
#### ajax process
from browser import ajax,doc
def on_complete(req):
print(req.readyState)
print('status',req.status)
if req.status==200 or req.status==0:
# show request text on id=result division
doc["result"].html = req.text
else:
doc["result"].html = "error "+req.text
def err_msg():
doc["result"].html = "server didn't reply after %s seconds" %timeout
timeout = 4
def go(url):
req = ajax.ajax()
req.bind('complete', on_complete)
req.set_timeout(timeout, err_msg)
req.open('GET', url, True)
req.send()
def post(url):
req = ajax.ajax()
req.bind('complete', on_complete)
req.set_timeout(timeout, err_msg)
req.open('POST', url, True)
req.set_header('content-type','application/x-www-form-urlencoded')
# doc["filename"].value is the id=filename input field's value
# editor.getValue() is the content on editor, need to send dictionary format data
# while post url, need to save editor content into local_storage to use the previous load javascripts
storage["py_src"] = editor.getValue()
# add delete_program 3/7, two parameters added, this will also affect save_program function on server.
req.send({'filename':doc["filename"].value, 'editor':editor.getValue(), 'overwrite':overwrite, 'delete1':delete1, 'delete2':delete2})
# get program from server
def get_prog(ev):
# ajax can only read data from server
_name = '/brython_programs/'+doc["filename"].value
try:
editor.setValue(open(_name, encoding="utf-8").read())
doc["result"].html = doc["filename"].value+" loaded!"
except:
doc["result"].html = "can not get "+doc["filename"].value+"!"
editor.scrollToRow(0)
editor.gotoLine(0)
reset_theme()
def get_radio(ev):
from javascript import JSObject
filename = JSObject(getradio)("filename")
# ajax can only read data from server
doc["filename"].value = filename
_name = '/brython_programs/'+filename
editor.setValue(open(_name, encoding="utf-8").read())
doc["result"].html = filename+" loaded!"
editor.scrollToRow(0)
editor.gotoLine(0)
reset_theme()
# bindings
doc['run_js'].bind('click',run_js)
doc['set_debug'].bind('change',set_debug)
doc['set_overwrite'].bind('change',set_overwrite)
# add delete_program 4/7, two associated binds added
doc['set_delete1'].bind('change',set_delete1)
doc['set_delete2'].bind('change',set_delete2)
# next functions are defined in editor.py
doc['show_js'].bind('click',show_js)
doc['run'].bind('click',run)
doc['show_console'].bind('click',show_console)
# get_prog and get _radio (working)
doc['get_prog'].bind('click', get_prog)
doc['get_radio'].bind('click', get_radio)
# reset_the_src and clear_console (working)
doc['reset_the_src'].bind('click',reset_the_src)
doc['clear_console'].bind('click',clear_console)
# clear_canvas and clear_src
doc['clear_canvas'].bind('click',clear_canvas)
doc['clear_src'].bind('click',clear_src)
# only admin can save program to server
doc['save_program'].bind('click',lambda ev:post('/save_program'))
# add delete_program 5/7, delete_program button bind to execute delete_program on server.
doc['delete_program'].bind('click',lambda ev:post('/delete_program'))
</script>
<script type="text/javascript">
window.onload=brython({debug:1, cache:'version'});
</script>
<div class="prova"></div>
<input type="button" onclick="$('.prova').axuploader('disable')" value="asd" />
<input type="button" onclick="$('.prova').axuploader('enable')" value="ok" />
</section></body></html>
'''
@cherrypy.expose
def fileaxupload(self, *args, **kwargs):
filename = kwargs["ax-file-name"]
flag = kwargs["start"]
# 終於找到 bug, 因為從 kwargs[] 所取得的變數為字串, 而非數字, 先前用 flag == 0 是錯誤的
if flag == "0":
# 若從 0 byte 送起, 表示要開啟新檔案
file = open(download_root_dir+"downloads/"+filename, "wb")
else:
file = open(download_root_dir+"downloads/"+filename, "ab")
file.write(cherrypy.request.body.read())
file.close()
return "files uploaded!"
@cherrypy.expose
def index(self, input1=None, input2=None):
return "Hello world!"+str(input1)+_curdir
@cherrypy.expose
def inputform(self, input1=None, input2=None):
return "input form"+str(input1)
#index.exposed = True
########################### 4. 安排啟動設定
# 配合程式檔案所在目錄設定靜態目錄或靜態檔案
application_conf = {'/static':{
'tools.staticdir.on': True,
'tools.staticdir.dir': _curdir+"/static"},
'/downloads':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/downloads"}
}
########################### 5. 在近端或遠端啟動程式
# 利用 HelloWorld() class 產生案例物件
root = HelloWorld()
# 假如在 os 環境變數中存在 'OPENSHIFT_REPO_DIR', 表示程式在 OpenShift 環境中執行
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 雲端執行啟動
application = cherrypy.Application(root, config = application_conf)
else:
# 近端執行啟動
'''
cherrypy.server.socket_port = 8083
cherrypy.server.socket_host = '127.0.0.1'
'''
cherrypy.quickstart(root, config = application_conf)<|fim▁end|>
| |
<|file_name|>Vector2f.java<|end_file_name|><|fim▁begin|>package com.base.engine.math;
public class Vector2f
{
private float x, y;
public Vector2f(float x, float y)
{
this.x = x;
this.y = y;
}
public Vector2f normalized()
{
float len = length();
float x_ = x / len;
float y_ = y / len;
return new Vector2f(x_, y_);
}
public float length()
{
return (float)Math.sqrt(x * x + y * y);
}
public Vector2f add(Vector2f r)
{
return new Vector2f(x + r.getX(), y + r.getY());
}
public Vector2f add(float r)
{
return new Vector2f(x + r, y + r);
}
public Vector2f sub(Vector2f r)
{
return new Vector2f(x - r.getX(), y - r.getY());
}
public Vector2f sub(float r)
{
return new Vector2f(x - r, y - r);
}<|fim▁hole|> {
return new Vector2f(x * r.getX(), y * r.getY());
}
public Vector2f mul(float r)
{
return new Vector2f(x * r, y * r);
}
public Vector2f div(Vector2f r)
{
return new Vector2f(x / r.getX(), y / r.getY());
}
public Vector2f div(float r)
{
return new Vector2f(x / r, y / r);
}
public Vector2f abs()
{
return new Vector2f(Math.abs(x), Math.abs(y));
}
@Override
public String toString()
{
return "(" + x + ", " + y + ")";
}
public float getX()
{
return this.x;
}
public void setX(float x)
{
this.x = x;
}
public float getY()
{
return this.y;
}
public void setY(float y)
{
this.y = y;
}
}<|fim▁end|>
|
public Vector2f mul(Vector2f r)
|
<|file_name|>config.go<|end_file_name|><|fim▁begin|>package main
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"log"
"net/url"
"os"
"os/user"
"path/filepath"
"strings"
"crypto/tls"
"net/http"
)
type configCommand struct {
config *ClientConfig
}
// skipVerifyHTTPClient returns an *http.Client with InsecureSkipVerify set
// to true for its TLS config. This allows self-signed SSL certificates.
func skipVerifyHTTPClient(skipVerify bool) *http.Client {
if skipVerify {
tlsConfig := &tls.Config{InsecureSkipVerify: true}
transport := &http.Transport{TLSClientConfig: tlsConfig}
return &http.Client{Transport: transport}
}
return http.DefaultClient
}
func (cmd *configCommand) Run(args []string) error {
if len(args) < 1 {
cmd.Usage()
os.Exit(1)
}
var config *ClientConfig
if cfg, err := LoadClientConfig(); err == nil {
config = cfg
} else {
config = new(ClientConfig)
}
var run func(*ClientConfig, []string) error
switch strings.ToLower(args[0]) {
case "set":
run = setCmd
case "print":
printConfig()
return nil
default:
cmd.Usage()
os.Exit(1)
}<|fim▁hole|>}
func printConfig() {
path, err := clientConfigPath()
if err != nil {
log.Fatal(err)
}
cfgData, err := ioutil.ReadFile(path)
if err != nil {
log.Fatal(err)
}
fmt.Println(string(cfgData))
}
func (cmd *configCommand) Usage() error {
const help = `
mdmctl config print
mdmctl config set -h
`
fmt.Println(help)
return nil
}
func setCmd(cfg *ClientConfig, args []string) error {
flagset := flag.NewFlagSet("set", flag.ExitOnError)
var (
flToken = flagset.String("api-token", "", "api token to connect to micromdm server")
flServerURL = flagset.String("server-url", "", "server url of micromdm server")
flSkipVerify = flagset.Bool("skip-verify", false, "skip verification of server certificate (insecure)")
)
flagset.Usage = usageFor(flagset, "mdmctl config set [flags]")
if err := flagset.Parse(args); err != nil {
return err
}
if *flToken != "" {
cfg.APIToken = *flToken
}
if *flServerURL != "" {
if !strings.HasPrefix(*flServerURL, "http") ||
!strings.HasPrefix(*flServerURL, "https") {
*flServerURL = "https://" + *flServerURL
}
u, err := url.Parse(*flServerURL)
if err != nil {
return err
}
u.Scheme = "https"
u.Path = "/"
cfg.ServerURL = u.String()
}
cfg.SkipVerify = *flSkipVerify
return SaveClientConfig(cfg)
}
func clientConfigPath() (string, error) {
usr, err := user.Current()
if err != nil {
return "", err
}
return filepath.Join(usr.HomeDir, ".micromdm", "default.json"), err
}
func SaveClientConfig(cfg *ClientConfig) error {
configPath, err := clientConfigPath()
if err != nil {
return err
}
if _, err := os.Stat(filepath.Dir(configPath)); os.IsNotExist(err) {
if err := os.MkdirAll(filepath.Dir(configPath), 0777); err != nil {
return err
}
}
f, err := os.OpenFile(configPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
if err != nil {
return err
}
defer f.Close()
if cfg == nil {
cfg = new(ClientConfig)
}
enc := json.NewEncoder(f)
enc.SetIndent("", " ")
return enc.Encode(cfg)
}
func LoadClientConfig() (*ClientConfig, error) {
path, err := clientConfigPath()
if err != nil {
return nil, err
}
cfgData, err := ioutil.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("unable to load default config file: %s", err)
}
var cfg ClientConfig
err = json.Unmarshal(cfgData, &cfg)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal %s : %s", path, err)
}
return &cfg, nil
}
type ClientConfig struct {
APIToken string `json:"api_token"`
ServerURL string `json:"server_url"`
SkipVerify bool `json:"skip_verify"`
}<|fim▁end|>
|
return run(config, args[1:])
|
<|file_name|>buggy_as_mut_slice.rs<|end_file_name|><|fim▁begin|>mod safe {
use std::slice::from_raw_parts_mut;
pub fn as_mut_slice<T>(self_: &Vec<T>) -> &mut [T] {
unsafe {
from_raw_parts_mut(self_.as_ptr() as *mut T, self_.len())
}
}
}
fn main() {
let v = vec![0,1,2];
let v1 = safe::as_mut_slice(&v);
let _v2 = safe::as_mut_slice(&v);
v1[1] = 5;<|fim▁hole|><|fim▁end|>
|
//~^ ERROR borrow stack
}
|
<|file_name|>androidqtversionfactory.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2016 BogDan Vatra <[email protected]><|fim▁hole|>**
** This file is part of Qt Creator.
**
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and The Qt Company. For licensing terms
** and conditions see https://www.qt.io/terms-conditions. For further
** information use the contact form at https://www.qt.io/contact-us.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3 as published by the Free Software
** Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
** included in the packaging of this file. Please review the following
** information to ensure the GNU General Public License requirements will
** be met: https://www.gnu.org/licenses/gpl-3.0.html.
**
****************************************************************************/
#include "androidqtversionfactory.h"
#include "androidqtversion.h"
#include "androidconstants.h"
#include <qtsupport/qtsupportconstants.h>
#include <utils/qtcassert.h>
#include <proparser/profileevaluator.h>
#include <QFileInfo>
namespace Android {
namespace Internal {
AndroidQtVersionFactory::AndroidQtVersionFactory(QObject *parent)
: QtSupport::QtVersionFactory(parent)
{
}
bool AndroidQtVersionFactory::canRestore(const QString &type)
{
return type == QLatin1String(Constants::ANDROIDQT);
}
QtSupport::BaseQtVersion *AndroidQtVersionFactory::restore(const QString &type,
const QVariantMap &data)
{
QTC_ASSERT(canRestore(type), return nullptr);
auto v = new AndroidQtVersion;
v->fromMap(data);
return v;
}
int AndroidQtVersionFactory::priority() const
{
return 90;
}
QtSupport::BaseQtVersion *AndroidQtVersionFactory::create(const Utils::FileName &qmakePath, ProFileEvaluator *evaluator, bool isAutoDetected, const QString &autoDetectionSource)
{
QFileInfo fi = qmakePath.toFileInfo();
if (!fi.exists() || !fi.isExecutable() || !fi.isFile())
return nullptr;
if (!evaluator->values(QLatin1String("CONFIG")).contains(QLatin1String("android"))
&& evaluator->value(QLatin1String("QMAKE_PLATFORM")) != QLatin1String("android"))
return nullptr;
if (evaluator->values(QLatin1String("CONFIG")).contains(QLatin1String("android-no-sdk")))
return nullptr;
return new AndroidQtVersion(qmakePath, isAutoDetected, autoDetectionSource);
}
} // Internal
} // Android<|fim▁end|>
|
** Contact: https://www.qt.io/licensing/
|
<|file_name|>message-view.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
* Copyright (C) 2012 by David Edmundson <[email protected]> *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA *
***************************************************************************/
#include "message-view.h"
#include "adium-theme-view.h"
#include "adium-theme-status-info.h"
#include <KTp/message-processor.h>
#include <KDebug>
#include <KIconLoader>
#include <QLabel>
#include <QResizeEvent>
#include <KTp/Logger/log-manager.h>
#include <KTp/Logger/pending-logger-logs.h>
#include <TelepathyQt/Account>
MessageView::MessageView(QWidget *parent) :
AdiumThemeView(parent),
m_infoLabel(new QLabel(this))
{
loadSettings();
QFont font = m_infoLabel->font();
font.setBold(true);
m_infoLabel->setFont(font);
m_infoLabel->setAlignment(Qt::AlignCenter);
connect(this, SIGNAL(loadFinished(bool)), SLOT(processStoredEvents()));
}
void MessageView::loadLog(const Tp::AccountPtr &account, const KTp::LogEntity &entity,
const Tp::ContactPtr &contact, const QDate &date,
const QPair< QDate, QDate > &nearestDates)
{
if (account.isNull() || !entity.isValid()) {
//note contact can be null
showInfoMessage(i18n("Unknown or invalid contact"));<|fim▁hole|> }
m_infoLabel->hide();
m_account = account;
// FIXME: Workaround for a bug, probably in QGlib which causes that
// m_entity = m_entity results in invalid m_entity->m_class being null
if (m_entity != entity) {
m_entity = entity;
}
m_contact = m_contact.dynamicCast<Tp::Contact>(contact);
m_date = date;
m_prev = nearestDates.first;
m_next = nearestDates.second;
if (entity.entityType() == Tp::HandleTypeRoom) {
load(AdiumThemeView::GroupChat);
} else {
load(AdiumThemeView::SingleUserChat);
}
Tp::Avatar avatar = m_account->avatar();
if (!avatar.avatarData.isEmpty()) {
m_accountAvatar = QString(QLatin1String("data:%1;base64,%2")).
arg(avatar.MIMEType.isEmpty() ? QLatin1String("image/*") : avatar.MIMEType).
arg(QString::fromLatin1(m_account->avatar().avatarData.toBase64().data()));
}
KTp::LogManager *logManager = KTp::LogManager::instance();
KTp::PendingLoggerLogs *pendingLogs = logManager->queryLogs(m_account, m_entity, m_date);
connect(pendingLogs, SIGNAL(finished(KTp::PendingLoggerOperation*)), SLOT(onEventsLoaded(KTp::PendingLoggerOperation*)));
}
void MessageView::showInfoMessage(const QString& message)
{
m_infoLabel->setText(message);
m_infoLabel->show();
m_infoLabel->raise();
m_infoLabel->setGeometry(0, 0, width(), height());
}
void MessageView::resizeEvent(QResizeEvent* e)
{
m_infoLabel->setGeometry(0, 0, e->size().width(), e->size().height());
QWebView::resizeEvent(e);
}
void MessageView::setHighlightText(const QString &text)
{
m_highlightedText = text;
}
void MessageView::clearHighlightText()
{
setHighlightText(QString());
}
void MessageView::onEventsLoaded(KTp::PendingLoggerOperation *po)
{
KTp::PendingLoggerLogs *pl = qobject_cast<KTp::PendingLoggerLogs*>(po);
m_events << pl->logs();
/* Wait with initialization for the first event so that we can know when the chat session started */
AdiumThemeHeaderInfo headerInfo;
headerInfo.setDestinationDisplayName(m_contact.isNull() ? m_entity.alias() : m_contact->alias());
headerInfo.setChatName(m_contact.isNull() ? m_entity.alias() : m_contact->alias());
headerInfo.setGroupChat(m_entity.entityType() == Tp::HandleTypeRoom);
headerInfo.setSourceName(m_account->displayName());
headerInfo.setIncomingIconPath(m_contact.isNull() ? QString() : m_contact->avatarData().fileName);
headerInfo.setService(m_account->serviceName());
// check iconPath docs for minus sign in -KIconLoader::SizeMedium
headerInfo.setServiceIconPath(KIconLoader::global()->iconPath(m_account->iconName(), -KIconLoader::SizeMedium));
if (pl->logs().count() > 0) {
headerInfo.setTimeOpened(pl->logs().first().time());
}
initialise(headerInfo);
}
bool logMessageOlderThan(const KTp::LogMessage &e1, const KTp::LogMessage &e2)
{
return e1.time() < e2.time();
}
bool logMessageNewerThan(const KTp::LogMessage &e1, const KTp::LogMessage &e2)
{
return e1.time() > e2.time();
}
void MessageView::processStoredEvents()
{
AdiumThemeStatusInfo prevConversation;
if (m_prev.isValid()) {
prevConversation = AdiumThemeStatusInfo(AdiumThemeMessageInfo::HistoryStatus);
prevConversation.setMessage(QString(QLatin1String("<a href=\"#x-prevConversation\"><<< %1</a>")).arg(i18n("Older conversation")));
prevConversation.setTime(QDateTime(m_prev));
}
AdiumThemeStatusInfo nextConversation;
if (m_next.isValid()) {
nextConversation = AdiumThemeStatusInfo(AdiumThemeMessageInfo::HistoryStatus);
nextConversation.setMessage(QString(QLatin1String("<a href=\"#x-nextConversation\">%1 >>></a>")).arg(i18n("Newer conversation")));
nextConversation.setTime(QDateTime(m_next));
}
if (m_sortMode == MessageView::SortOldestTop) {
if (m_prev.isValid()) {
addAdiumStatusMessage(nextConversation);
}
qSort(m_events.begin(), m_events.end(), logMessageOlderThan);
} else if (m_sortMode == MessageView::SortNewestTop) {
if (m_next.isValid()) {
addAdiumStatusMessage(prevConversation);
}
qSort(m_events.begin(), m_events.end(), logMessageNewerThan);
}
if (m_events.isEmpty()) {
showInfoMessage(i18n("There are no logs for this day"));
}
while (!m_events.isEmpty()) {
const KTp::LogMessage msg = m_events.takeFirst();
KTp::MessageContext ctx(m_account, Tp::TextChannelPtr());
KTp::Message message = KTp::MessageProcessor::instance()->processIncomingMessage(msg, ctx);
addMessage(message);
}
if (m_sortMode == MessageView::SortOldestTop && m_next.isValid()) {
addAdiumStatusMessage(prevConversation);
} else if (m_sortMode == MessageView::SortNewestTop && m_prev.isValid()) {
addAdiumStatusMessage(nextConversation);
}
/* Can't highlight the text directly, we need to wait for the JavaScript in
* AdiumThemeView to include the log messages into DOM. */
QTimer::singleShot(100, this, SLOT(doHighlightText()));
}
void MessageView::onLinkClicked(const QUrl &link)
{
// Don't emit the signal directly, KWebView does not like when we reload the
// page from an event handler (and then chain up) and we can't guarantee
// that everyone will use QueuedConnection when connecting to
// conversationSwitchRequested() slot
if (link.fragment() == QLatin1String("x-nextConversation")) {
// Q_EMIT conversationSwitchRequested(m_next)
QMetaObject::invokeMethod(this, "conversationSwitchRequested", Qt::QueuedConnection,
Q_ARG(QDate, m_next));
return;
}
if (link.fragment() == QLatin1String("x-prevConversation")) {
// Q_EMIT conversationSwitchRequested(m_prev)
QMetaObject::invokeMethod(this, "conversationSwitchRequested", Qt::QueuedConnection,
Q_ARG(QDate, m_prev));
return;
}
AdiumThemeView::onLinkClicked(link);
}
void MessageView::loadSettings()
{
const KConfig config(QLatin1String("ktelepathyrc"));
const KConfigGroup group = config.group("LogViewer");
m_sortMode = static_cast<SortMode>(group.readEntry("SortMode", static_cast<int>(SortOldestTop)));
}
void MessageView::reloadTheme()
{
loadSettings();
loadLog(m_account, m_entity, m_contact, m_date, qMakePair(m_prev, m_next));
}
void MessageView::doHighlightText()
{
findText(QString());
if (!m_highlightedText.isEmpty()) {
findText(m_highlightedText, QWebPage::HighlightAllOccurrences |
QWebPage::FindWrapsAroundDocument);
}
}<|fim▁end|>
|
return;
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import copy
import datetime
import decimal
import math
import warnings
from itertools import tee
from django.db import connection
from django.db.models.query_utils import QueryWrapper
from django.conf import settings
from django import forms
from django.core import exceptions, validators
from django.utils.datastructures import DictWrapper
from django.utils.dateparse import parse_date, parse_datetime, parse_time
from django.utils.functional import curry
from django.utils.text import capfirst
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode, force_unicode, smart_str
from django.utils.ipv6 import clean_ipv6_address
class NOT_PROVIDED:
pass
# The values to use for "blank" in SelectFields. Will be appended to the start
# of most "choices" lists.
BLANK_CHOICE_DASH = [("", "---------")]
BLANK_CHOICE_NONE = [("", "None")]
class FieldDoesNotExist(Exception):
pass
# A guide to Field parameters:
#
# * name: The name of the field specifed in the model.
# * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is
# appended.
# * db_column: The db_column specified in the model (or None).
# * column: The database column for this field. This is the same as
# "attname", except if db_column is specified.
#
# Code that introspects values, or does other dynamic things, should use
# attname. For example, this gets the primary key value of object "obj":
#
# getattr(obj, opts.pk.attname)
class Field(object):
"""Base class for all field types"""
# Designates whether empty strings fundamentally are allowed at the
# database level.
empty_strings_allowed = True
# These track each time a Field instance is created. Used to retain order.
# The auto_creation_counter is used for fields that Django implicitly
# creates, creation_counter is used for all user-specified fields.
creation_counter = 0
auto_creation_counter = -1
default_validators = [] # Default set of validators
default_error_messages = {
'invalid_choice': _(u'Value %r is not a valid choice.'),
'null': _(u'This field cannot be null.'),
'blank': _(u'This field cannot be blank.'),
'unique': _(u'%(model_name)s with this %(field_label)s '
u'already exists.'),
}
# Generic field type description, usually overriden by subclasses
def _description(self):
return _(u'Field of type: %(field_type)s') % {
'field_type': self.__class__.__name__
}
description = property(_description)
def __init__(self, verbose_name=None, name=None, primary_key=False,
max_length=None, unique=False, blank=False, null=False,
db_index=False, rel=None, default=NOT_PROVIDED, editable=True,
serialize=True, unique_for_date=None, unique_for_month=None,
unique_for_year=None, choices=None, help_text='', db_column=None,
db_tablespace=None, auto_created=False, validators=[],
error_messages=None):
self.name = name
self.verbose_name = verbose_name
self.primary_key = primary_key
self.max_length, self._unique = max_length, unique
self.blank, self.null = blank, null
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (self.empty_strings_allowed and
connection.features.interprets_empty_strings_as_nulls):
self.null = True
self.rel = rel
self.default = default
self.editable = editable
self.serialize = serialize
self.unique_for_date, self.unique_for_month = (unique_for_date,
unique_for_month)
self.unique_for_year = unique_for_year
self._choices = choices or []
self.help_text = help_text
self.db_column = db_column
self.db_tablespace = db_tablespace or settings.DEFAULT_INDEX_TABLESPACE
self.auto_created = auto_created
# Set db_index to True if the field has a relationship and doesn't
# explicitly set db_index.
self.db_index = db_index
# Adjust the appropriate creation counter, and save our local copy.
if auto_created:
self.creation_counter = Field.auto_creation_counter
Field.auto_creation_counter -= 1
else:
self.creation_counter = Field.creation_counter
Field.creation_counter += 1
self.validators = self.default_validators + validators
messages = {}
for c in reversed(self.__class__.__mro__):
messages.update(getattr(c, 'default_error_messages', {}))
messages.update(error_messages or {})
self.error_messages = messages
def __cmp__(self, other):
# This is needed because bisect does not take a comparison function.
return cmp(self.creation_counter, other.creation_counter)
def __deepcopy__(self, memodict):
# We don't have to deepcopy very much here, since most things are not
# intended to be altered after initial creation.
obj = copy.copy(self)
if self.rel:
obj.rel = copy.copy(self.rel)
memodict[id(self)] = obj
return obj
def to_python(self, value):
"""
Converts the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Returns the converted value. Subclasses should override this.
"""
return value
def run_validators(self, value):
if value in validators.EMPTY_VALUES:
return
errors = []
for v in self.validators:
try:
v(value)
except exceptions.ValidationError, e:
if hasattr(e, 'code') and e.code in self.error_messages:
message = self.error_messages[e.code]
if e.params:
message = message % e.params
errors.append(message)
else:
errors.extend(e.messages)
if errors:
raise exceptions.ValidationError(errors)
def validate(self, value, model_instance):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not self.editable:
# Skip validation for non-editable fields.
return
if self._choices and value:
for option_key, option_value in self.choices:
if isinstance(option_value, (list, tuple)):
# This is an optgroup, so look inside the group for
# options.
for optgroup_key, optgroup_value in option_value:
if value == optgroup_key:
return
elif value == option_key:
return
msg = self.error_messages['invalid_choice'] % value
raise exceptions.ValidationError(msg)
if value is None and not self.null:
raise exceptions.ValidationError(self.error_messages['null'])
if not self.blank and value in validators.EMPTY_VALUES:
raise exceptions.ValidationError(self.error_messages['blank'])
def clean(self, value, model_instance):
"""
Convert the value's type and run validation. Validation errors
from to_python and validate are propagated. The correct value is
returned if no error is raised.
"""
value = self.to_python(value)
self.validate(value, model_instance)
self.run_validators(value)
return value
def db_type(self, connection):
"""
Returns the database column data type for this field, for the provided
connection.
"""
# The default implementation of this method looks at the
# backend-specific DATA_TYPES dictionary, looking up the field by its
# "internal type".
#
# A Field class can implement the get_internal_type() method to specify
# which *preexisting* Django Field class it's most similar to -- i.e.,
# a custom field might be represented by a TEXT column type, which is
# the same as the TextField Django field type, which means the custom
# field's get_internal_type() returns 'TextField'.
#
# But the limitation of the get_internal_type() / data_types approach
# is that it cannot handle database column types that aren't already
# mapped to one of the built-in Django field types. In this case, you
# can implement db_type() instead of get_internal_type() to specify
# exactly which wacky database column type you want to use.
data = DictWrapper(self.__dict__, connection.ops.quote_name, "qn_")
try:
return (connection.creation.data_types[self.get_internal_type()]
% data)
except KeyError:
return None
@property
def unique(self):
return self._unique or self.primary_key
def set_attributes_from_name(self, name):
if not self.name:
self.name = name
self.attname, self.column = self.get_attname_column()
if self.verbose_name is None and self.name:
self.verbose_name = self.name.replace('_', ' ')
def contribute_to_class(self, cls, name):
self.set_attributes_from_name(name)
self.model = cls
cls._meta.add_field(self)
if self.choices:
setattr(cls, 'get_%s_display' % self.name,
curry(cls._get_FIELD_display, field=self))
def get_attname(self):
return self.name
def get_attname_column(self):
attname = self.get_attname()
column = self.db_column or attname
return attname, column
def get_cache_name(self):
return '_%s_cache' % self.name
def get_internal_type(self):
return self.__class__.__name__
def pre_save(self, model_instance, add):
"""
Returns field's value just before saving.
"""
return getattr(model_instance, self.attname)
def get_prep_value(self, value):
"""
Perform preliminary non-db specific value checks and conversions.
"""
return value
def get_db_prep_value(self, value, connection, prepared=False):
"""Returns field's value prepared for interacting with the database
backend.
Used by the default implementations of ``get_db_prep_save``and
`get_db_prep_lookup```
"""
if not prepared:
value = self.get_prep_value(value)
return value
def get_db_prep_save(self, value, connection):
"""
Returns field's value prepared for saving into a database.
"""
return self.get_db_prep_value(value, connection=connection,
prepared=False)
def get_prep_lookup(self, lookup_type, value):
"""
Perform preliminary non-db specific lookup checks and conversions
"""
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
if lookup_type in (
'regex', 'iregex', 'month', 'day', 'week_day', 'search',
'contains', 'icontains', 'iexact', 'startswith', 'istartswith',
'endswith', 'iendswith', 'isnull'
):
return value
elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
return self.get_prep_value(value)
elif lookup_type in ('range', 'in'):
return [self.get_prep_value(v) for v in value]
elif lookup_type == 'year':
try:
return int(value)
except ValueError:
raise ValueError("The __year lookup type requires an integer "
"argument")
raise TypeError("Field has invalid lookup: %s" % lookup_type)
def get_db_prep_lookup(self, lookup_type, value, connection,
prepared=False):
"""
Returns field's value prepared for database lookup.
"""
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabel_aliases method, it will need to
# be invoked before the final SQL is evaluated
if hasattr(value, 'relabel_aliases'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
if lookup_type in ('regex', 'iregex', 'month', 'day', 'week_day',
'search'):
return [value]
elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
return [self.get_db_prep_value(value, connection=connection,
prepared=prepared)]
elif lookup_type in ('range', 'in'):
return [self.get_db_prep_value(v, connection=connection,
prepared=prepared) for v in value]
elif lookup_type in ('contains', 'icontains'):
return ["%%%s%%" % connection.ops.prep_for_like_query(value)]
elif lookup_type == 'iexact':
return [connection.ops.prep_for_iexact_query(value)]
elif lookup_type in ('startswith', 'istartswith'):
return ["%s%%" % connection.ops.prep_for_like_query(value)]
elif lookup_type in ('endswith', 'iendswith'):
return ["%%%s" % connection.ops.prep_for_like_query(value)]
elif lookup_type == 'isnull':
return []
elif lookup_type == 'year':
if self.get_internal_type() == 'DateField':
return connection.ops.year_lookup_bounds_for_date_field(value)
else:
return connection.ops.year_lookup_bounds(value)
def has_default(self):
"""
Returns a boolean of whether this field has a default value.
"""
return self.default is not NOT_PROVIDED
def get_default(self):
"""
Returns the default value for this field.
"""
if self.has_default():
if callable(self.default):
return self.default()
return force_unicode(self.default, strings_only=True)
if (not self.empty_strings_allowed or (self.null and
not connection.features.interprets_empty_strings_as_nulls)):
return None
return ""
def get_validator_unique_lookup_type(self):
return '%s__exact' % self.name
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH):
"""Returns choices with a default blank choices included, for use
as SelectField choices for this field."""
first_choice = include_blank and blank_choice or []
if self.choices:
return first_choice + list(self.choices)
rel_model = self.rel.to
if hasattr(self.rel, 'get_related_field'):
lst = [(getattr(x, self.rel.get_related_field().attname),
smart_unicode(x))
for x in rel_model._default_manager.complex_filter(
self.rel.limit_choices_to)]
else:
lst = [(x._get_pk_val(), smart_unicode(x))
for x in rel_model._default_manager.complex_filter(
self.rel.limit_choices_to)]
return first_choice + lst
def get_choices_default(self):
return self.get_choices()
def get_flatchoices(self, include_blank=True,
blank_choice=BLANK_CHOICE_DASH):
"""
Returns flattened choices with a default blank choice included.
"""
first_choice = include_blank and blank_choice or []
return first_choice + list(self.flatchoices)
def _get_val_from_obj(self, obj):
if obj is not None:
return getattr(obj, self.attname)
else:
return self.get_default()
def value_to_string(self, obj):
"""
Returns a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return smart_unicode(self._get_val_from_obj(obj))
def bind(self, fieldmapping, original, bound_field_class):
return bound_field_class(self, fieldmapping, original)
def _get_choices(self):
if hasattr(self._choices, 'next'):
choices, self._choices = tee(self._choices)
return choices
else:
return self._choices
choices = property(_get_choices)
def _get_flatchoices(self):
"""Flattened version of choices tuple."""
flat = []
for choice, value in self.choices:
if isinstance(value, (list, tuple)):
flat.extend(value)
else:
flat.append((choice,value))
return flat
flatchoices = property(_get_flatchoices)
def save_form_data(self, instance, data):
setattr(instance, self.name, data)
def formfield(self, form_class=forms.CharField, **kwargs):
"""
Returns a django.forms.Field instance for this database Field.
"""
defaults = {'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
if self.has_default():
if callable(self.default):
defaults['initial'] = self.default
defaults['show_hidden_initial'] = True
else:
defaults['initial'] = self.get_default()
if self.choices:
# Fields with choices get special treatment.
include_blank = (self.blank or
not (self.has_default() or 'initial' in kwargs))
defaults['choices'] = self.get_choices(include_blank=include_blank)
defaults['coerce'] = self.to_python
if self.null:
defaults['empty_value'] = None
form_class = forms.TypedChoiceField
# Many of the subclass-specific formfield arguments (min_value,
# max_value) don't apply for choice fields, so be sure to only pass
# the values that TypedChoiceField will understand.
for k in kwargs.keys():
if k not in ('coerce', 'empty_value', 'choices', 'required',
'widget', 'label', 'initial', 'help_text',
'error_messages', 'show_hidden_initial'):
del kwargs[k]
defaults.update(kwargs)
return form_class(**defaults)
def value_from_object(self, obj):
"""
Returns the value of this field in the given model instance.
"""
return getattr(obj, self.attname)
def __repr__(self):
"""
Displays the module, class and name of the field.
"""
path = '%s.%s' % (self.__class__.__module__, self.__class__.__name__)
name = getattr(self, 'name', None)
if name is not None:
return '<%s: %s>' % (path, name)
return '<%s>' % path
class AutoField(Field):
description = _("Automatic key")
empty_strings_allowed = False
def __init__(self, *args, **kwargs):
assert kwargs.get('primary_key', False) is True, \
"%ss must have primary_key=True." % self.__class__.__name__
kwargs['blank'] = True
Field.__init__(self, *args, **kwargs)
def get_internal_type(self):
return "AutoField"
def validate(self, value, model_instance):
pass
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return value
return connection.ops.value_to_db_auto(value)
def contribute_to_class(self, cls, name):
assert not cls._meta.has_auto_field, \
"A model can't have more than one AutoField."
super(AutoField, self).contribute_to_class(cls, name)
cls._meta.has_auto_field = True
cls._meta.auto_field = self
def formfield(self, **kwargs):
return None
class BooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _(u"'%s' value must be either True or False."),
}
description = _("Boolean (Either True or False)")
def __init__(self, *args, **kwargs):
kwargs['blank'] = True
if 'default' not in kwargs and not kwargs.get('null'):
kwargs['default'] = False
Field.__init__(self, *args, **kwargs)
def get_internal_type(self):
return "BooleanField"
def to_python(self, value):
if value in (True, False):
# if value is 1 or 0 than it's equal to True or False, but we want
# to return a true bool for semantic reasons.
return bool(value)
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
msg = self.error_messages['invalid'] % str(value)
raise exceptions.ValidationError(msg)
def get_prep_lookup(self, lookup_type, value):
# Special-case handling for filters coming from a Web request (e.g. the
# admin interface). Only works for scalar values (not lists). If you're
# passing in a list, you might as well make things the right type when
# constructing the list.
if value in ('1', '0'):
value = bool(int(value))
return super(BooleanField, self).get_prep_lookup(lookup_type, value)
def get_prep_value(self, value):
if value is None:
return None
return bool(value)
def formfield(self, **kwargs):
# Unlike most fields, BooleanField figures out include_blank from
# self.null instead of self.blank.
if self.choices:
include_blank = (self.null or
not (self.has_default() or 'initial' in kwargs))
defaults = {'choices': self.get_choices(
include_blank=include_blank)}
else:
defaults = {'form_class': forms.BooleanField}
defaults.update(kwargs)
return super(BooleanField, self).formfield(**defaults)
class CharField(Field):
description = _("String (up to %(max_length)s)")
def __init__(self, *args, **kwargs):
super(CharField, self).__init__(*args, **kwargs)
self.validators.append(validators.MaxLengthValidator(self.max_length))
def get_internal_type(self):
return "CharField"
def to_python(self, value):
if isinstance(value, basestring) or value is None:
return value
return smart_unicode(value)
def get_prep_value(self, value):
return self.to_python(value)
def formfield(self, **kwargs):
# Passing max_length to forms.CharField means that the value's length
# will be validated twice. This is considered acceptable since we want
# the value in the form field (to pass into widget for example).
defaults = {'max_length': self.max_length}
defaults.update(kwargs)
return super(CharField, self).formfield(**defaults)
# TODO: Maybe move this into contrib, because it's specialized.
class CommaSeparatedIntegerField(CharField):
default_validators = [validators.validate_comma_separated_integer_list]
description = _("Comma-separated integers")
def formfield(self, **kwargs):
defaults = {
'error_messages': {
'invalid': _(u'Enter only digits separated by commas.'),
}
}
defaults.update(kwargs)
return super(CommaSeparatedIntegerField, self).formfield(**defaults)
class DateField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _(u"'%s' value has an invalid date format. It must be "
u"in YYYY-MM-DD format."),
'invalid_date': _(u"'%s' value has the correct format (YYYY-MM-DD) "
u"but it is an invalid date."),
}
description = _("Date (without time)")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
Field.__init__(self, verbose_name, name, **kwargs)
def get_internal_type(self):
return "DateField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value.date()
if isinstance(value, datetime.date):
return value
value = smart_str(value)
try:
parsed = parse_date(value)
if parsed is not None:
return parsed
except ValueError:
msg = self.error_messages['invalid_date'] % value
raise exceptions.ValidationError(msg)
msg = self.error_messages['invalid'] % value
raise exceptions.ValidationError(msg)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.date.today()
setattr(model_instance, self.attname, value)
return value
else:
return super(DateField, self).pre_save(model_instance, add)
def contribute_to_class(self, cls, name):
super(DateField,self).contribute_to_class(cls, name)
if not self.null:
setattr(cls, 'get_next_by_%s' % self.name,
curry(cls._get_next_or_previous_by_FIELD, field=self,
is_next=True))
setattr(cls, 'get_previous_by_%s' % self.name,
curry(cls._get_next_or_previous_by_FIELD, field=self,
is_next=False))
def get_prep_lookup(self, lookup_type, value):
# For "__month", "__day", and "__week_day" lookups, convert the value
# to an int so the database backend always sees a consistent type.
if lookup_type in ('month', 'day', 'week_day'):
return int(value)
return super(DateField, self).get_prep_lookup(lookup_type, value)
def get_prep_value(self, value):
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts dates into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.value_to_db_date(value)
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):<|fim▁hole|>class DateTimeField(DateField):
empty_strings_allowed = False
default_error_messages = {
'invalid': _(u"'%s' value has an invalid format. It must be in "
u"YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ] format."),
'invalid_date': _(u"'%s' value has the correct format "
u"(YYYY-MM-DD) but it is an invalid date."),
'invalid_datetime': _(u"'%s' value has the correct format "
u"(YYYY-MM-DD HH:MM[:ss[.uuuuuu]][TZ]) "
u"but it is an invalid date/time."),
}
description = _("Date (with time)")
# __init__ is inherited from DateField
def get_internal_type(self):
return "DateTimeField"
def to_python(self, value):
if value is None:
return value
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
value = datetime.datetime(value.year, value.month, value.day)
if settings.USE_TZ:
# For backwards compatibility, interpret naive datetimes in
# local time. This won't work during DST change, but we can't
# do much about it, so we let the exceptions percolate up the
# call stack.
warnings.warn(u"DateTimeField received a naive datetime (%s)"
u" while time zone support is active." % value,
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
value = smart_str(value)
try:
parsed = parse_datetime(value)
if parsed is not None:
return parsed
except ValueError:
msg = self.error_messages['invalid_datetime'] % value
raise exceptions.ValidationError(msg)
try:
parsed = parse_date(value)
if parsed is not None:
return datetime.datetime(parsed.year, parsed.month, parsed.day)
except ValueError:
msg = self.error_messages['invalid_date'] % value
raise exceptions.ValidationError(msg)
msg = self.error_messages['invalid'] % value
raise exceptions.ValidationError(msg)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = timezone.now()
setattr(model_instance, self.attname, value)
return value
else:
return super(DateTimeField, self).pre_save(model_instance, add)
# contribute_to_class is inherited from DateField, it registers
# get_next_by_FOO and get_prev_by_FOO
# get_prep_lookup is inherited from DateField
def get_prep_value(self, value):
value = self.to_python(value)
if value is not None and settings.USE_TZ and timezone.is_naive(value):
# For backwards compatibility, interpret naive datetimes in local
# time. This won't work during DST change, but we can't do much
# about it, so we let the exceptions percolate up the call stack.
warnings.warn(u"DateTimeField received a naive datetime (%s)"
u" while time zone support is active." % value,
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
return value
def get_db_prep_value(self, value, connection, prepared=False):
# Casts datetimes into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.value_to_db_datetime(value)
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.DateTimeField}
defaults.update(kwargs)
return super(DateTimeField, self).formfield(**defaults)
class DecimalField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _(u"'%s' value must be a decimal number."),
}
description = _("Decimal number")
def __init__(self, verbose_name=None, name=None, max_digits=None,
decimal_places=None, **kwargs):
self.max_digits, self.decimal_places = max_digits, decimal_places
Field.__init__(self, verbose_name, name, **kwargs)
def get_internal_type(self):
return "DecimalField"
def to_python(self, value):
if value is None:
return value
try:
return decimal.Decimal(value)
except decimal.InvalidOperation:
msg = self.error_messages['invalid'] % str(value)
raise exceptions.ValidationError(msg)
def _format(self, value):
if isinstance(value, basestring) or value is None:
return value
else:
return self.format_number(value)
def format_number(self, value):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
# Method moved to django.db.backends.util.
#
# It is preserved because it is used by the oracle backend
# (django.db.backends.oracle.query), and also for
# backwards-compatibility with any external code which may have used
# this method.
from django.db.backends import util
return util.format_number(value, self.max_digits, self.decimal_places)
def get_db_prep_save(self, value, connection):
return connection.ops.value_to_db_decimal(self.to_python(value),
self.max_digits, self.decimal_places)
def get_prep_value(self, value):
return self.to_python(value)
def formfield(self, **kwargs):
defaults = {
'max_digits': self.max_digits,
'decimal_places': self.decimal_places,
'form_class': forms.DecimalField,
}
defaults.update(kwargs)
return super(DecimalField, self).formfield(**defaults)
class EmailField(CharField):
default_validators = [validators.validate_email]
description = _("E-mail address")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 75)
CharField.__init__(self, *args, **kwargs)
def formfield(self, **kwargs):
# As with CharField, this will cause email validation to be performed
# twice.
defaults = {
'form_class': forms.EmailField,
}
defaults.update(kwargs)
return super(EmailField, self).formfield(**defaults)
class FilePathField(Field):
description = _("File path")
def __init__(self, verbose_name=None, name=None, path='', match=None,
recursive=False, **kwargs):
self.path, self.match, self.recursive = path, match, recursive
kwargs['max_length'] = kwargs.get('max_length', 100)
Field.__init__(self, verbose_name, name, **kwargs)
def get_prep_value(self, value):
value = super(FilePathField, self).get_prep_value(value)
if value is None:
return None
return smart_unicode(value)
def formfield(self, **kwargs):
defaults = {
'path': self.path,
'match': self.match,
'recursive': self.recursive,
'form_class': forms.FilePathField,
}
defaults.update(kwargs)
return super(FilePathField, self).formfield(**defaults)
def get_internal_type(self):
return "FilePathField"
class FloatField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%s' value must be a float."),
}
description = _("Floating point number")
def get_prep_value(self, value):
if value is None:
return None
return float(value)
def get_internal_type(self):
return "FloatField"
def to_python(self, value):
if value is None:
return value
try:
return float(value)
except (TypeError, ValueError):
msg = self.error_messages['invalid'] % str(value)
raise exceptions.ValidationError(msg)
def formfield(self, **kwargs):
defaults = {'form_class': forms.FloatField}
defaults.update(kwargs)
return super(FloatField, self).formfield(**defaults)
class IntegerField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%s' value must be an integer."),
}
description = _("Integer")
def get_prep_value(self, value):
if value is None:
return None
return int(value)
def get_prep_lookup(self, lookup_type, value):
if ((lookup_type == 'gte' or lookup_type == 'lt')
and isinstance(value, float)):
value = math.ceil(value)
return super(IntegerField, self).get_prep_lookup(lookup_type, value)
def get_internal_type(self):
return "IntegerField"
def to_python(self, value):
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
msg = self.error_messages['invalid'] % str(value)
raise exceptions.ValidationError(msg)
def formfield(self, **kwargs):
defaults = {'form_class': forms.IntegerField}
defaults.update(kwargs)
return super(IntegerField, self).formfield(**defaults)
class BigIntegerField(IntegerField):
empty_strings_allowed = False
description = _("Big (8 byte) integer")
MAX_BIGINT = 9223372036854775807
def get_internal_type(self):
return "BigIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': -BigIntegerField.MAX_BIGINT - 1,
'max_value': BigIntegerField.MAX_BIGINT}
defaults.update(kwargs)
return super(BigIntegerField, self).formfield(**defaults)
class IPAddressField(Field):
empty_strings_allowed = False
description = _("IPv4 address")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 15
Field.__init__(self, *args, **kwargs)
def get_prep_value(self, value):
value = super(IPAddressField, self).get_prep_value(value)
if value is None:
return None
return smart_unicode(value)
def get_internal_type(self):
return "IPAddressField"
def formfield(self, **kwargs):
defaults = {'form_class': forms.IPAddressField}
defaults.update(kwargs)
return super(IPAddressField, self).formfield(**defaults)
class GenericIPAddressField(Field):
empty_strings_allowed = True
description = _("IP address")
default_error_messages = {}
def __init__(self, verbose_name=None, name=None, protocol='both',
unpack_ipv4=False, *args, **kwargs):
self.unpack_ipv4 = unpack_ipv4
self.default_validators, invalid_error_message = \
validators.ip_address_validators(protocol, unpack_ipv4)
self.default_error_messages['invalid'] = invalid_error_message
kwargs['max_length'] = 39
Field.__init__(self, verbose_name, name, *args, **kwargs)
def get_internal_type(self):
return "GenericIPAddressField"
def to_python(self, value):
if value and ':' in value:
return clean_ipv6_address(value,
self.unpack_ipv4, self.error_messages['invalid'])
return value
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
return value or None
def get_prep_value(self, value):
if value is None:
return value
if value and ':' in value:
try:
return clean_ipv6_address(value, self.unpack_ipv4)
except exceptions.ValidationError:
pass
return smart_unicode(value)
def formfield(self, **kwargs):
defaults = {'form_class': forms.GenericIPAddressField}
defaults.update(kwargs)
return super(GenericIPAddressField, self).formfield(**defaults)
class NullBooleanField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _("'%s' value must be either None, True or False."),
}
description = _("Boolean (Either True, False or None)")
def __init__(self, *args, **kwargs):
kwargs['null'] = True
kwargs['blank'] = True
Field.__init__(self, *args, **kwargs)
def get_internal_type(self):
return "NullBooleanField"
def to_python(self, value):
if value is None:
return None
if value in (True, False):
return bool(value)
if value in ('None',):
return None
if value in ('t', 'True', '1'):
return True
if value in ('f', 'False', '0'):
return False
msg = self.error_messages['invalid'] % str(value)
raise exceptions.ValidationError(msg)
def get_prep_lookup(self, lookup_type, value):
# Special-case handling for filters coming from a Web request (e.g. the
# admin interface). Only works for scalar values (not lists). If you're
# passing in a list, you might as well make things the right type when
# constructing the list.
if value in ('1', '0'):
value = bool(int(value))
return super(NullBooleanField, self).get_prep_lookup(lookup_type,
value)
def get_prep_value(self, value):
if value is None:
return None
return bool(value)
def formfield(self, **kwargs):
defaults = {
'form_class': forms.NullBooleanField,
'required': not self.blank,
'label': capfirst(self.verbose_name),
'help_text': self.help_text}
defaults.update(kwargs)
return super(NullBooleanField, self).formfield(**defaults)
class PositiveIntegerField(IntegerField):
description = _("Positive integer")
def get_internal_type(self):
return "PositiveIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': 0}
defaults.update(kwargs)
return super(PositiveIntegerField, self).formfield(**defaults)
class PositiveSmallIntegerField(IntegerField):
description = _("Positive small integer")
def get_internal_type(self):
return "PositiveSmallIntegerField"
def formfield(self, **kwargs):
defaults = {'min_value': 0}
defaults.update(kwargs)
return super(PositiveSmallIntegerField, self).formfield(**defaults)
class SlugField(CharField):
description = _("Slug (up to %(max_length)s)")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.get('max_length', 50)
# Set db_index=True unless it's been set manually.
if 'db_index' not in kwargs:
kwargs['db_index'] = True
super(SlugField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return "SlugField"
def formfield(self, **kwargs):
defaults = {'form_class': forms.SlugField}
defaults.update(kwargs)
return super(SlugField, self).formfield(**defaults)
class SmallIntegerField(IntegerField):
description = _("Small integer")
def get_internal_type(self):
return "SmallIntegerField"
class TextField(Field):
description = _("Text")
def get_internal_type(self):
return "TextField"
def get_prep_value(self, value):
if isinstance(value, basestring) or value is None:
return value
return smart_unicode(value)
def formfield(self, **kwargs):
defaults = {'widget': forms.Textarea}
defaults.update(kwargs)
return super(TextField, self).formfield(**defaults)
class TimeField(Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _(u"'%s' value has an invalid format. It must be in "
u"HH:MM[:ss[.uuuuuu]] format."),
'invalid_time': _(u"'%s' value has the correct format "
u"(HH:MM[:ss[.uuuuuu]]) but it is an invalid time."),
}
description = _("Time")
def __init__(self, verbose_name=None, name=None, auto_now=False,
auto_now_add=False, **kwargs):
self.auto_now, self.auto_now_add = auto_now, auto_now_add
if auto_now or auto_now_add:
kwargs['editable'] = False
kwargs['blank'] = True
Field.__init__(self, verbose_name, name, **kwargs)
def get_internal_type(self):
return "TimeField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, datetime.time):
return value
if isinstance(value, datetime.datetime):
# Not usually a good idea to pass in a datetime here (it loses
# information), but this can be a side-effect of interacting with a
# database backend (e.g. Oracle), so we'll be accommodating.
return value.time()
value = smart_str(value)
try:
parsed = parse_time(value)
if parsed is not None:
return parsed
except ValueError:
msg = self.error_messages['invalid_time'] % value
raise exceptions.ValidationError(msg)
msg = self.error_messages['invalid'] % value
raise exceptions.ValidationError(msg)
def pre_save(self, model_instance, add):
if self.auto_now or (self.auto_now_add and add):
value = datetime.datetime.now().time()
setattr(model_instance, self.attname, value)
return value
else:
return super(TimeField, self).pre_save(model_instance, add)
def get_prep_value(self, value):
return self.to_python(value)
def get_db_prep_value(self, value, connection, prepared=False):
# Casts times into the format expected by the backend
if not prepared:
value = self.get_prep_value(value)
return connection.ops.value_to_db_time(value)
def value_to_string(self, obj):
val = self._get_val_from_obj(obj)
return '' if val is None else val.isoformat()
def formfield(self, **kwargs):
defaults = {'form_class': forms.TimeField}
defaults.update(kwargs)
return super(TimeField, self).formfield(**defaults)
class URLField(CharField):
description = _("URL")
def __init__(self, verbose_name=None, name=None, verify_exists=False,
**kwargs):
kwargs['max_length'] = kwargs.get('max_length', 200)
CharField.__init__(self, verbose_name, name, **kwargs)
self.validators.append(
validators.URLValidator(verify_exists=verify_exists))
def formfield(self, **kwargs):
# As with CharField, this will cause URL validation to be performed
# twice.
defaults = {
'form_class': forms.URLField,
}
defaults.update(kwargs)
return super(URLField, self).formfield(**defaults)<|fim▁end|>
|
defaults = {'form_class': forms.DateField}
defaults.update(kwargs)
return super(DateField, self).formfield(**defaults)
|
<|file_name|>0013_persistentsubsectiongradeoverride.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):<|fim▁hole|> ]
operations = [
migrations.CreateModel(
name='PersistentSubsectionGradeOverride',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True, db_index=True)),
('modified', models.DateTimeField(auto_now=True, db_index=True)),
('earned_all_override', models.FloatField(null=True, blank=True)),
('possible_all_override', models.FloatField(null=True, blank=True)),
('earned_graded_override', models.FloatField(null=True, blank=True)),
('possible_graded_override', models.FloatField(null=True, blank=True)),
('grade', models.OneToOneField(related_name='override', to='grades.PersistentSubsectionGrade',
on_delete=models.CASCADE)),
],
),
]<|fim▁end|>
|
dependencies = [
('grades', '0012_computegradessetting'),
|
<|file_name|>borrowck-borrow-overloaded-deref-mut.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test how overloaded deref interacts with borrows when DerefMut
// is implemented.
use std::ops::{Deref, DerefMut};
struct Own<T> {
value: *mut T
}
impl<T> Deref for Own<T> {
type Target = T;
fn deref<'a>(&'a self) -> &'a T {
unsafe { &*self.value }
}
}
impl<T> DerefMut for Own<T> {
fn deref_mut<'a>(&'a mut self) -> &'a mut T {
unsafe { &mut *self.value }
}
}
fn deref_imm(x: Own<isize>) {
let _i = &*x;
}
fn deref_mut1(x: Own<isize>) {
let _i = &mut *x; //~ ERROR cannot borrow
}
fn deref_mut2(mut x: Own<isize>) {
let _i = &mut *x;
}
fn deref_extend<'a>(x: &'a Own<isize>) -> &'a isize {
&**x
}
fn deref_extend_mut1<'a>(x: &'a Own<isize>) -> &'a mut isize {
&mut **x //~ ERROR cannot borrow
}
fn deref_extend_mut2<'a>(x: &'a mut Own<isize>) -> &'a mut isize {
&mut **x
}
fn assign1<'a>(x: Own<isize>) {
*x = 3; //~ ERROR cannot borrow<|fim▁hole|> **x = 3; //~ ERROR cannot borrow
}
fn assign3<'a>(x: &'a mut Own<isize>) {
**x = 3;
}
pub fn main() {}<|fim▁end|>
|
}
fn assign2<'a>(x: &'a Own<isize>) {
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|><|fim▁hole|>Upnp = True
BitcoindConfigPath = "/opt/bitcoin/bitcoindata/bitcoin.conf"
WORKER_STATUS_REFRESH_TIME = 10
dbService = {}
workerStatus = {}
NodeService = {
'authentication': 'http://127.0.0.1:8080/service/node/authentication.htm'
}
DbOptions = {
'type': 'sql',
'engine': 'mysql',
'dbopts': {
'host': '127.0.0.1',
'db': 'antpooldb',
'user': 'antpool',
'password': 'antpool',
}
}<|fim▁end|>
|
TestNet = False
Address = "1MjeEv3WDgycrEaaNeSESrWvRfkU6s81TX"
workerEndpoint = "3333"
DonationPercentage = 0.0
|
<|file_name|>TableTransferJob.java<|end_file_name|><|fim▁begin|>/**
* Copyright MaDgIK Group 2010 - 2015.
*/
package madgik.exareme.worker.art.container.job;
import madgik.exareme.worker.art.container.ContainerJob;
import madgik.exareme.worker.art.container.ContainerJobType;
import madgik.exareme.worker.art.executionEngine.session.PlanSessionReportID;
/**
* @author heraldkllapi
*/
public class TableTransferJob implements ContainerJob {
public final PlanSessionReportID sessionReportID;<|fim▁hole|>
public TableTransferJob(PlanSessionReportID sessionReportID) {
this.sessionReportID = sessionReportID;
}
@Override
public ContainerJobType getType() {
return ContainerJobType.dataTransfer;
}
}<|fim▁end|>
| |
<|file_name|>0031_merge.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
<|fim▁hole|> ('accounting', '0027_more_prbac_bootstrap'),
('accounting', '0030_remove_softwareplan_visibility_trial_internal'),
]
operations = [
]<|fim▁end|>
|
class Migration(migrations.Migration):
dependencies = [
|
<|file_name|>mia_client1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import socket
import random
SERVERHOST = 'localhost'
SERVERPORT = 4080
LOCALIP = '127.0.0.2'
LOCALPORT = 4082
LOCALNAME = "30_PERCENT_SEE"
def higher(dice_a, dice_b):
ad1, ad2 = dice_a[0], dice_a[1]
bd1, bd2 = dice_b[0], dice_b[1]
if ad1 == bd1 and ad2 == bd2: return False
if ad1 == "2" and ad2 == "1": return True
if bd1 == "2" and bd2 == "1": return False<|fim▁hole|> if ad1 == ad2 and bd1 == bd2: return int(ad1) > int(bd1)
if ad1 == ad2: return True
if bd1 == bd2: return False
if ad1 == bd1: return int(ad2) > int(bd2)
return int(ad1) > int(bd1)
def one_higher(dice):
d1, d2 = dice[0],dice[1]
if d1 == "6" and d2 == "6":
return "2,1"
if d1 == d2:
return str(int(d1)+1)+","+str(int(d1)+1)
if d1 == "6" and d2 == "5":
return "1,1"
if int(d1) == int(d2)+1:
return str(int(d1)+1)+",1"
return d1+","+str(int(d2)+1)
def connect_to_miaserver(sock):
sock.settimeout(2)
while True:
sock.sendto("REGISTER;" + LOCALNAME, (SERVERHOST, SERVERPORT))
try:
data = sock.recv(1024)
if "REGISTERED" in data:
break
else:
print "Received '" + data + "'"
except socket.timeout:
print "MIA Server does not respond, retrying"
print "Registered at MIA Server"
sock.setblocking(1)
def play_mia(sock):
announced = None
while True:
data = sock.recv(1024)
if data.startswith("ROUND STARTING;"):
_, _, token = data.strip().partition(";")
sock.sendto("JOIN;" + token, (SERVERHOST, SERVERPORT))
announced = None
elif data.startswith("ANNOUNCED;"):
d1, _, d2 = data.strip().split(";")[2].partition(",")
announced = (d1, d2)
elif data.startswith("YOUR TURN;"):
_, _, token = data.strip().partition(";")
if announced == None or random.uniform(0,100) > 30.0:
sock.sendto("ROLL;" + token, (SERVERHOST, SERVERPORT))
else:
sock.sendto("SEE;" + token, (SERVERHOST, SERVERPORT))
elif data.startswith("ROLLED;"):
token = data.split(";")[2]
d1, _, d2 = data.strip().split(";")[1].partition(",")
if announced == None or higher((d1,d2), announced):
sock.sendto("ANNOUNCE;"+d1+","+d2+";"+token, (SERVERHOST, SERVERPORT))
else:
sock.sendto("ANNOUNCE;"+one_higher(announced)+";"+token, (SERVERHOST, SERVERPORT))
def mia_client_start():
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.bind((LOCALIP, LOCALPORT))
connect_to_miaserver(sock)
play_mia(sock)
if __name__ == "__main__":
mia_client_start()<|fim▁end|>
| |
<|file_name|>run_metrics.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand, CommandError
from metricsapp.models import Metric, SprintMetric
class Command(BaseCommand):
help = 'Runs all available metrics.'
#parser.add_argument('poll_id', nargs='+', type=int)
def add_arguments(self, parser):
parser.add_argument("-m", "--metric-names", type=str, dest="explicit", nargs="+")
def handle(self, *args, **options):
all_metrics = Metric.objects.filter(active=True).select_subclasses()
explicit = options['explicit']
if explicit:
explicit_metrics = [all_metrics.get(name=name) for name in explicit]
self._run_metrics(explicit_metrics)
else:<|fim▁hole|> self._run_metrics(all_metrics)
def _run_metrics(self, lst):
for metric in lst:
self.stdout.write('Running "{}"'.format(metric))
metric.run()<|fim▁end|>
| |
<|file_name|>owners.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import unittest<|fim▁hole|>from yotta.lib.fsutils import rmRf
from . import cli
Test_Module_JSON = '''{
"name": "git-access-testing",
"version": "0.0.2",
"description": "Git Access Testing",
"author": "autopulated",
"homepage": "https://github.com/autopulated/git-access-testing",
"licenses": [
{
"url": "about:blank",
"type": ""
}
],
"dependencies": {
"testing-dummy": "[email protected]:autopulated/testing-dummy.git",
"other-testing-dummy": "[email protected]:autopulated/other-testing-dummy.git#0.0.2"
}
}
'''
class TestCLIOwners(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_dir = tempfile.mkdtemp()
with open(os.path.join(cls.test_dir, 'module.json'), 'w') as f:
f.write(Test_Module_JSON)
cls.saved_settings_dir = None
# override the settings directory, so that we can be sure we're not
# logged in
if 'YOTTA_USER_SETTINGS_DIR' in os.environ:
cls.saved_settings_dir = os.environ['YOTTA_USER_SETTINGS_DIR']
# use a directory called tmp_yotta_settings in the working directory:
os.environ['YOTTA_USER_SETTINGS_DIR'] = 'tmp_yotta_settings'
@classmethod
def tearDownClass(cls):
rmRf(cls.test_dir)
cls.test_dir = None
if cls.saved_settings_dir is not None:
os.environ['YOTTA_USER_SETTINGS_DIR'] = cls.saved_settings_dir
cls.saved_settings_dir = None
else:
del os.environ['YOTTA_USER_SETTINGS_DIR']
# you have have to be authenticated to list owners, so currently we only
# test that the commands fail correctly in noninteractive mode:
def test_listOwners(self):
stdout, stderr, statuscode = cli.run(['-n', 'owners', 'ls'], cwd=self.test_dir)
if statuscode != 0:
self.assertTrue((stdout+stderr).find('login required') != -1)
def test_addOwner(self):
stdout, stderr, statuscode = cli.run(['-n', 'owners', 'add', '[email protected]'], cwd=self.test_dir)
if statuscode != 0:
self.assertTrue((stdout+stderr).find('login required') != -1)
def test_rmOwner(self):
stdout, stderr, statuscode = cli.run(['-n', 'owners', 'rm', '[email protected]'], cwd=self.test_dir)
if statuscode != 0:
self.assertTrue((stdout+stderr).find('login required') != -1)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
import os
import tempfile
# internal modules:
|
<|file_name|>bvr_import.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import time
import re
from openerp.tools.translate import _
from openerp.osv.orm import TransientModel, fields
from openerp.osv.osv import except_osv
from openerp.tools import mod10r
REF = re.compile('[^0-9]')
class BvrImporterWizard(TransientModel):
_name = 'bvr.import.wizard'
_columns = {'file': fields.binary('BVR File')}
def _reconstruct_invoice_ref(self, cursor, user, reference, context=None):
"""Try to get correct invoice/invoice line form ESV/BVR reference"""
id_invoice = False
# On fait d'abord une recherche sur toutes les factures
# we now search for an invoice
user_obj = self.pool['res.users']
user_current = user_obj.browse(cursor, user, user)
cursor.execute("SELECT inv.id, inv.number from account_invoice "
"AS inv where inv.company_id = %s and type='out_invoice'",
(user_current.company_id.id,))
result_invoice = cursor.fetchall()
for inv_id, inv_name in result_invoice:
inv_name = REF.sub('0', str(inv_name))
if inv_name == reference:
id_invoice = inv_id
break
if id_invoice:
cursor.execute('SELECT l.id'
' FROM account_move_line l, account_invoice i'
' WHERE l.move_id = i.move_id AND l.reconcile_id is NULL '
' AND i.id IN %s', (tuple([id_invoice]),))
inv_line = []
for id_line in cursor.fetchall():
inv_line.append(id_line[0])
return inv_line
else:
return []
def _parse_lines(self, cursor, uid, inlines, context=None):
"""Parses raw v11 line and populate records list with dict"""
records = []
total_amount = 0
total_cost = 0
find_total = False
for lines in inlines:
if not lines: # manage new line at end of file
continue
(line, lines) = (lines[:128], lines[128:])
record = {}
if line[0:3] in ('999', '995'):
if find_total:
raise except_osv(_('Error'),
_('Too much total record found!'))
find_total = True
if lines:
raise except_osv(_('Error'),
_('Record found after total record!'))
amount = float(line[39:49]) + (float(line[49:51]) / 100)
cost = float(line[69:76]) + (float(line[76:78]) / 100)
if line[2] == '5':
amount *= -1
cost *= -1
<|fim▁hole|> or round(cost - total_cost, 2) >= 0.01:
raise except_osv(_('Error'),
_('Total record different from the computed!'))
if int(line[51:63]) != len(records):
raise except_osv(_('Error'),
_('Number record different from the computed!'))
else:
record = {
'reference': line[12:39],
'amount': float(line[39:47]) + (float(line[47:49]) / 100),
'date': time.strftime('%Y-%m-%d', time.strptime(line[65:71], '%y%m%d')),
'cost': float(line[96:98]) + (float(line[98:100]) / 100),
}
if record['reference'] != mod10r(record['reference'][:-1]):
raise except_osv(_('Error'),
_('Recursive mod10 is invalid for reference: %s') % record['reference'])
if line[2] == '5':
record['amount'] *= -1
record['cost'] *= -1
total_amount += record['amount']
total_cost += record['cost']
records.append(record)
return records
#deprecated
def _create_voucher_from_record(self, cursor, uid, record,
statement, line_ids, context=None):
"""Create a voucher with voucher line"""
context.update({'move_line_ids': line_ids})
voucher_obj = self.pool.get('account.voucher')
move_line_obj = self.pool.get('account.move.line')
voucher_line_obj = self.pool.get('account.voucher.line')
line = move_line_obj.browse(cursor, uid, line_ids[0])
partner_id = line.partner_id and line.partner_id.id or False
if not partner_id:
return False
move_id = line.move_id.id
result = voucher_obj.onchange_partner_id(cursor, uid, [],
partner_id,
statement.journal_id.id,
abs(record['amount']),
statement.currency.id,
'receipt',
statement.date,
context=context)
voucher_res = {'type': 'receipt',
'name': record['reference'],
'partner_id': partner_id,
'journal_id': statement.journal_id.id,
'account_id': result.get('account_id', statement.journal_id.default_credit_account_id.id),
'company_id': statement.company_id.id,
'currency_id': statement.currency.id,
'date': record['date'] or time.strftime('%Y-%m-%d'),
'amount': abs(record['amount']),
'period_id': statement.period_id.id
}
voucher_id = voucher_obj.create(cursor, uid, voucher_res, context=context)
voucher_line_dict = False
if result['value']['line_cr_ids']:
for line_dict in result['value']['line_cr_ids']:
move_line = move_line_obj.browse(cursor, uid, line_dict['move_line_id'], context)
if move_id == move_line.move_id.id:
voucher_line_dict = line_dict
if voucher_line_dict:
voucher_line_dict.update({'voucher_id': voucher_id})
voucher_line_obj.create(cursor, uid, voucher_line_dict, context=context)
return voucher_id
def _get_account(self, cursor, uid, line_ids, record, context=None):
"""Get account from move line or from property"""
property_obj = self.pool.get('ir.property')
move_line_obj = self.pool.get('account.move.line')
account_id = False
if line_ids:
for line in move_line_obj.browse(cursor, uid, line_ids, context=context):
return line.account_id.id
if not account_id and not line_ids:
name = "property_account_receivable"
if record['amount'] < 0:
name = "property_account_payable"
account_id = property_obj.get(cursor, uid, name, 'res.partner', context=context).id
if not account_id:
raise except_osv(_('Error'),
_('The properties account payable account receivable are not set'))
return account_id
def _prepare_line_vals(self, cursor, uid, statement, record,
voucher_enabled, context=None):
# Remove the 11 first char because it can be adherent number
# TODO check if 11 is the right number
move_line_obj = self.pool.get('account.move.line')
reference = record['reference']
values = {'name': '/',
'date': record['date'],
'amount': record['amount'],
'ref': reference,
'type': (record['amount'] >= 0 and 'customer') or 'supplier',
'statement_id': statement.id,
}
line_ids = move_line_obj.search(cursor, uid,
[('ref', '=', reference),
('reconcile_id', '=', False),
('account_id.type', 'in', ['receivable', 'payable']),
('journal_id.type', '=', 'sale')],
order='date desc', context=context)
#for multiple payments
if not line_ids:
line_ids = move_line_obj.search(cursor, uid,
[('transaction_ref', '=', reference),
('reconcile_id', '=', False),
('account_id.type', 'in', ['receivable', 'payable']),
('journal_id.type', '=', 'sale')],
order='date desc', context=context)
if not line_ids:
line_ids = self._reconstruct_invoice_ref(cursor, uid, reference, None)
if line_ids and voucher_enabled:
values['voucher_id'] = self._create_voucher_from_record(cursor, uid, record,
statement, line_ids,
context=context)
account_id = self._get_account(cursor, uid, line_ids,
record, context=context)
values['account_id'] = account_id
if line_ids:
line = move_line_obj.browse(cursor, uid, line_ids[0])
partner_id = line.partner_id.id
values['name'] = line.invoice and (_('Inv. no ') + line.invoice.number) or values['name']
values['partner_id'] = partner_id
return values
def import_v11(self, cursor, uid, ids, data, context=None):
"""Import v11 file and transfor it into statement lines"""
if context is None: context = {}
module_obj = self.pool['ir.module.module']
voucher_enabled = module_obj.search(cursor, uid, [('name', '=', 'account_voucher'),
('state', '=', 'installed')])
# if module installed we check ir.config_parameter to force disable of voucher
if voucher_enabled:
para = self.pool['ir.config_parameter'].get_param(cursor,
uid,
'l10n_ch_payment_slip_voucher_disable',
default = '0')
if para.lower() not in ['0', 'false']: # if voucher is disabled
voucher_enabled = False
statement_line_obj = self.pool.get('account.bank.statement.line')
attachment_obj = self.pool.get('ir.attachment')
statement_obj = self.pool.get('account.bank.statement')
file = data['form']['file']
if not file:
raise except_osv(_('UserError'),
_('Please select a file first!'))
statement_id = data['id']
lines = base64.decodestring(file).split("\n")
records = self._parse_lines(cursor, uid, lines, context=context)
statement = statement_obj.browse(cursor, uid, statement_id, context=context)
for record in records:
values = self._prepare_line_vals(cursor, uid, statement,
record, voucher_enabled,
context=context)
statement_line_obj.create(cursor, uid, values, context=context)
attachment_obj.create(cursor, uid,
{'name': 'BVR %s' % time.strftime("%Y-%m-%d_%H:%M:%S", time.gmtime()),
'datas': file,
'datas_fname': 'BVR %s.txt' % time.strftime("%Y-%m-%d_%H:%M:%S", time.gmtime()),
'res_model': 'account.bank.statement',
'res_id': statement_id,
},
context=context)
return {}
def import_bvr(self, cursor, uid, ids, context=None):
data = {}
if context is None: context = {}
active_ids = context.get('active_ids', [])
active_id = context.get('active_id', False)
data['form'] = {}
data['ids'] = active_ids
data['id'] = active_id
data['form']['file'] = ''
res = self.read(cursor, uid, ids[0], ['file'])
if res:
data['form']['file'] = res['file']
self.import_v11(cursor, uid, ids, data, context=context)
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
|
if round(amount - total_amount, 2) >= 0.01 \
|
<|file_name|>find_vms_by_deployment_parameters.go<|end_file_name|><|fim▁begin|>package vm
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"time"
"golang.org/x/net/context"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/swag"
strfmt "github.com/go-openapi/strfmt"
)
// NewFindVmsByDeploymentParams creates a new FindVmsByDeploymentParams object
// with the default values initialized.
func NewFindVmsByDeploymentParams() *FindVmsByDeploymentParams {
var ()
return &FindVmsByDeploymentParams{
timeout: cr.DefaultTimeout,
}
}
// NewFindVmsByDeploymentParamsWithTimeout creates a new FindVmsByDeploymentParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewFindVmsByDeploymentParamsWithTimeout(timeout time.Duration) *FindVmsByDeploymentParams {
var ()
return &FindVmsByDeploymentParams{
timeout: timeout,
}
}
// NewFindVmsByDeploymentParamsWithContext creates a new FindVmsByDeploymentParams object
// with the default values initialized, and the ability to set a context for a request
func NewFindVmsByDeploymentParamsWithContext(ctx context.Context) *FindVmsByDeploymentParams {
var ()
return &FindVmsByDeploymentParams{
Context: ctx,
}
}
/*FindVmsByDeploymentParams contains all the parameters to send to the API endpoint
for the find vms by deployment operation typically these are written to a http.Request
*/
type FindVmsByDeploymentParams struct {
/*Deployment
deployment values that need to be considered for filter
*/
Deployment []string
timeout time.Duration
Context context.Context
}
// WithTimeout adds the timeout to the find vms by deployment params
func (o *FindVmsByDeploymentParams) WithTimeout(timeout time.Duration) *FindVmsByDeploymentParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the find vms by deployment params
func (o *FindVmsByDeploymentParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the find vms by deployment params
func (o *FindVmsByDeploymentParams) WithContext(ctx context.Context) *FindVmsByDeploymentParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the find vms by deployment params<|fim▁hole|> o.Context = ctx
}
// WithDeployment adds the deployment to the find vms by deployment params
func (o *FindVmsByDeploymentParams) WithDeployment(deployment []string) *FindVmsByDeploymentParams {
o.SetDeployment(deployment)
return o
}
// SetDeployment adds the deployment to the find vms by deployment params
func (o *FindVmsByDeploymentParams) SetDeployment(deployment []string) {
o.Deployment = deployment
}
// WriteToRequest writes these params to a swagger request
func (o *FindVmsByDeploymentParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
r.SetTimeout(o.timeout)
var res []error
valuesDeployment := o.Deployment
joinedDeployment := swag.JoinByFormat(valuesDeployment, "multi")
// query array param deployment
if err := r.SetQueryParam("deployment", joinedDeployment...); err != nil {
return err
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}<|fim▁end|>
|
func (o *FindVmsByDeploymentParams) SetContext(ctx context.Context) {
|
<|file_name|>db.go<|end_file_name|><|fim▁begin|>// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package db contains functions related to database.
* db.go includes conneting to db, query and insertion.
* dbschema.go contains definitions of struct for db tables.
Currently it only contains Module struct.
*/
package db
import (
"database/sql"
"fmt"
"os"
"strconv"
"github.com/golang/glog"
// Go postgres driver for Go's database/sql package
_ "github.com/lib/pq"
)
// These are SQL stataments used in this package.
// Query statements can be appended based on its query parameters.
const (
// $4 and $5 should be assigned with the same value (the JSON data of module).
insertModule = `INSERT INTO modules (orgName, name, version, data) VALUES($1, $2, $3, $4) on conflict (orgName, name, version) do update set data=$5`
selectModules = `select * from modules`
// We want to ensure that user has to provide all three inputs,
// instead of deleting too many modules by mistake with some fields missing.
deleteModule = `delete from modules where orgName = $1 and name = $2 and version = $3`
selectFeatureBundles = `select * from featureBundles`
// $4 and $5 should be assigned with the same value (the JSON data of feature-bundle).
insertFeatureBundle = `INSERT INTO featureBundles (orgName, name, version, data) VALUES($1, $2, $3, $4) on conflict (orgName, name, version) do update set data=$5`
deleteFeatureBundle = `delete from featurebundles where orgName = $1 and name = $2 and version = $3`
)
// db is the global variable of connection to database.
// It would be assigned value when *ConnectDB* function is called.
//
// We choose to use this global variable due to that *gqlgen* automatically generates many server side codes.
// Resolver functions generated by *gqlgen* are handler functions for graphQL queries and are methods of *resolver* struct.
// If we define a struct with field of db connection instead of using the global variable
// and change *Query* functions to methods of that struct,
// we need to initialize db connection while initializing *resolver* object in server codes
// such that *resolver* functions can call these *Query* functions.
// However, initialization function of *resolver* struct is automatically generated and overwritten every time.
// Thus, we cannot initialize a db connection inside the *resolver* objects.
//
// Another option is to establish a new db connection for each *Query* function and close it after query finishes.
// However, that would be too expensive to connect to db every time server receives a new query.
var db *sql.DB
// ConnectDB establishes connection to database, *db* variable is assigned when opening database.
// This should only be called once before any other database function is called.
//
// Users need set environment variables for connection, including
// * DB_HOST: host address of target db instances, by default: localhost.
// * DB_PORT: port number of postgres db, by default: 5432.
// * DB_USERNAME: username of database, error would be returned if not set.
// * DB_PWD: password of target database, error would be returned if not set.
// * DB_NAME: name of database for connection, error would be returned if not set.
// * DB_SOCKER_DIR: directory of Unix socket in Cloud Run which serves as Cloud SQL
// Auth proxy to connect to postgres database.
// If service is deployed on Cloud Run, just use the default value.
// By default, it is set to `/cloudsql`.
func ConnectDB() error {
// read db config from env
// port number of target database
port := 5432
if portStr, ok := os.LookupEnv("DB_PORT"); !ok {
glog.Infof("DB_PORT not set, setting port to %d", port)
} else {
var err error
if port, err = strconv.Atoi(portStr); err != nil {
return fmt.Errorf("DB_PORT in incorrect format: %v", err)
}
}
// username of target database
user, ok := os.LookupEnv("DB_USERNAME")
if !ok {
return fmt.Errorf("DB_USERNAME not set")
}
// password of target database
password, ok := os.LookupEnv("DB_PWD")
if !ok {
return fmt.Errorf("DB_PWD not set")
}
// name of target database
dbname, ok := os.LookupEnv("DB_NAME")
if !ok {
return fmt.Errorf("DB_NAME not set")
}
// (Cloud Run only) Directory of Unix socket
socketDir, ok := os.LookupEnv("DB_SOCKET_DIR")
if !ok {
socketDir = "/cloudsql"
}
var psqlconn string // connection string used to connect to traget database
// host address of target database
host, ok := os.LookupEnv("DB_HOST")
switch {
case !ok:
glog.Infoln("DB_HOST not set, setting host to localhost")
host = "localhost"
fallthrough
case host == "localhost":
// This connection string is used if service is not deployed on Cloud Run,
// instead connection is made from localhost via Cloud SQL proxy.
psqlconn = fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s sslmode=disable", host, port, user, password, dbname)
default:
psqlconn = fmt.Sprintf("host=%s/%s port=%d user=%s password=%s dbname=%s", socketDir, host, port, user, password, dbname)
}
// open database
var err error
db, err = sql.Open("postgres", psqlconn)
if err != nil {
return fmt.Errorf("open database failed: %v", err)
}
// see if connection is established successfully
if err := db.Ping(); err != nil {
return fmt.Errorf("ping database failed: %v", err)
}
return nil
}
// Close function closes db connection
func Close() error {
return db.Close()
}
// InsertModule inserts module into database given values of four field of MODULE schema.
// Or if there is existing module with existing key (orgName, name, version), update data field.
// Error is returned when insertion failed.
func InsertModule(orgName string, name string, version string, data string) error {
if _, err := db.Exec(insertModule, orgName, name, version, data, data); err != nil {
return fmt.Errorf("insert/update module into db failed: %v", err)
}
return nil
}
// ReadModulesByRow scans from queried modules from rows one by one, rows are closed inside.
// Return slice of db Module struct each field of which corresponds to one column in db.
// Error is returned when scan rows failed.
func ReadModulesByRow(rows *sql.Rows) ([]Module, error) {
var modules []Module
defer rows.Close()
for rows.Next() {
var module Module
if err := rows.Scan(&module.OrgName, &module.Name, &module.Version, &module.Data); err != nil {
return nil, fmt.Errorf("scan db rows failure, %v", err)
}
modules = append(modules, module)
}
return modules, nil
}
// FormatQueryStr is used to generate query statement string based on query parameters.
// * parmNames is a list of names of all non-nil query parameters.
// * baseQuery is query statement without any query parameters.
func FormatQueryStr(parmNames []string, baseQuery string) string {
queryStmt := baseQuery
for i := 0; i < len(parmNames); i++ {
if i == 0 {
queryStmt += " where"
} else {
queryStmt += " and"
}
queryStmt += fmt.Sprintf(" %s=$%d", parmNames[i], i+1)
}
return queryStmt
}
// QueryModulesByOrgName queries modules of organization with *orgName* from database.
// If orgName is null then directly query all modules.
// Return slice of db Module struct each field of which corresponds to one column in db.
// Error is returned when query or reading data failed.
func QueryModulesByOrgName(orgName *string) ([]Module, error) {
var parms []interface{} // parms is used to store value of non-nil query parameters
parmNames := []string{} // parmNames is used to store name of non-nil query parameters
if orgName != nil {
parms = append(parms, *orgName)
parmNames = append(parmNames, "orgName")
}
// Format query statement string based on non-nil query parameters
queryStmt := FormatQueryStr(parmNames, selectModules)
rows, err := db.Query(queryStmt, parms...)
if err != nil {
return nil, fmt.Errorf("QueryModulesByOrgName failed: %v", err)
}
defer rows.Close()
return ReadModulesByRow(rows)
}
// QueryModulesByKey queries modules by its key (name, version), it is possible that parameters are null.
// If both parameters are null, this equals query for all modules.
// Return slice of db Module struct each field of which corresponds to one column in db.
// Error is returned when query or reading data failed.
func QueryModulesByKey(name *string, version *string) ([]Module, error) {
var parms []interface{} // parms is used to store value of non-nil query parameters
parmNames := []string{} // parmNames is used to store name of non-nil query parameters
if name != nil {
parms = append(parms, *name)
parmNames = append(parmNames, "name")
}
if version != nil {
parms = append(parms, *version)
parmNames = append(parmNames, "version")
}
// Format query statement string based on non-nil query parameters
queryStmt := FormatQueryStr(parmNames, selectModules)
rows, err := db.Query(queryStmt, parms...)
if err != nil {
return nil, fmt.Errorf("QueryModulesByOrgName failed: %v", err)
}
defer rows.Close()
return ReadModulesByRow(rows)
}
// DeleteModule takes three string, orgName, name, version,
// whose combination is key of one Module in DB's Module table.
// If deletion fails, an non-nil error is returned.
// If the number of rows affected by this deletion is not 1, an error is also returned.
func DeleteModule(orgName string, name string, version string) error {
result, err := db.Exec(deleteModule, orgName, name, version)
if err != nil {
return fmt.Errorf("DeleteModule failed: %v", err)
}
num, err := result.RowsAffected()
if err != nil {
return fmt.Errorf("DeleteModule, access rows affected in result failed: %v", err)
}
// delete should only affect one row
if num != 1 {
return fmt.Errorf("DeleteModule: affected row is not one, it affects %d rows", num)
}
return nil
}
// ReadFeatureBundlesByRow scans from queried FeatureBundles from rows one by one, rows are closed inside.
// Return slice of db FeatureBundle struct each field of which corresponds to one column in db.
// Error is returned when scan rows failed.
func ReadFeatureBundlesByRow(rows *sql.Rows) ([]FeatureBundle, error) {
var featureBundles []FeatureBundle
defer rows.Close()
for rows.Next() {
var featureBundle FeatureBundle
if err := rows.Scan(&featureBundle.OrgName, &featureBundle.Name, &featureBundle.Version, &featureBundle.Data); err != nil {
return nil, fmt.Errorf("ReadFeatureBundlesByRow: scan db rows failure, %v", err)
}
featureBundles = append(featureBundles, featureBundle)
}
return featureBundles, nil
}
// QueryFeatureBundlesByOrgName queries feature-bundles of organization with *orgName* from database.
// If orgName is null then directly query all feature-bundles.
// Return slice of db FeatureBundle struct each field of which corresponds to one column in db.
// Error is returned when query or reading data failed.
func QueryFeatureBundlesByOrgName(orgName *string) ([]FeatureBundle, error) {
var parms []interface{} // parms is used to store value of non-nil query parameters
parmNames := []string{} // parmNames is used to store name of non-nil query parameters
if orgName != nil {
parms = append(parms, *orgName)
parmNames = append(parmNames, "orgName")
}
// Format query statement string based on non-nil query parameters
queryStmt := FormatQueryStr(parmNames, selectFeatureBundles)
rows, err := db.Query(queryStmt, parms...)
if err != nil {
return nil, fmt.Errorf("QueryFeatureBundlesByOrgName failed: %v", err)
}
return ReadFeatureBundlesByRow(rows)
}
// InsertFeatureBundle inserts FeatureBundle into database given values of four field of FeatureBundle schema.
// Or if there is existing FeatureBundle with existing key (orgName, name, version), update data field.
// Error is returned when insertion failed.
func InsertFeatureBundle(orgName string, name string, version string, data string) error {
if _, err := db.Exec(insertFeatureBundle, orgName, name, version, data, data); err != nil {
return fmt.Errorf("insert/update FeatureBundle into db failed: %v", err)
}
return nil
}
// DeleteFeatureBundle takes three pointer of string, orgName, name, version,<|fim▁hole|> result, err := db.Exec(deleteFeatureBundle, orgName, name, version)
if err != nil {
return fmt.Errorf("DeleteFeatureBundle failed: %v", err)
}
num, err := result.RowsAffected()
if err != nil {
return fmt.Errorf("DeleteFeatureBundle, access rows affected in result failed: %v", err)
}
// delete should only affect one row
if num != 1 {
return fmt.Errorf("DeleteFeatureBundle: affected row is not one, it affects %d rows", num)
}
return nil
}
// QueryFeatureBundlesByKey queries feature-bundles by its key (name, version), it is possible that
// If both parameters are null, this equals query for all feature-bundles.
// Return slice of db FeatureBundle struct each field of which corresponds to one column in
// Error is returned when query or reading data failed.
func QueryFeatureBundlesByKey(name *string, version *string) ([]FeatureBundle, error) {
var parms []interface{} // parms is used to store value of non-nil query paramete
parmNames := []string{} // parmNames is used to store name of non-nil query param
if name != nil {
parms = append(parms, *name)
parmNames = append(parmNames, "name")
}
if version != nil {
parms = append(parms, *version)
parmNames = append(parmNames, "version")
}
// Format query statement string based on non-nil query parameters
queryStmt := FormatQueryStr(parmNames, selectFeatureBundles)
rows, err := db.Query(queryStmt, parms...)
if err != nil {
return nil, fmt.Errorf("QueryFeatureBundlesByKey failed: %v", err)
}
return ReadFeatureBundlesByRow(rows)
}<|fim▁end|>
|
// whose combination is key of one FeatureBundle in DB's FeatureBundle table.
// If deletion fails, an non-nil error is returned.
// If the number of rows affected by this deletion is not 1, an error is also returned.
func DeleteFeatureBundle(orgName string, name string, version string) error {
|
<|file_name|>SVGTester.tsx<|end_file_name|><|fim▁begin|>import * as fs from "fs-extra"
import { csvParse } from "d3-dsv"
import md5 from "md5"
import { BAKED_GRAPHER_URL } from "../../settings/clientSettings"
import React from "react"
import { closeTypeOrmAndKnexConnections } from "../../db/db"
import {
bakeGrapherToSvg,
getPublishedGraphersBySlug,
} from "../../baker/GrapherImageBaker"
const header = `bakeOrder,timeToBake,slug,chartType,md5`
const sampleRow = `1,123,world-pop,LineChart,ee5a6312...`
interface BakedSvgInfo {
bakeOrder: number
timeToBake: number
slug: string
md5: string
}
const svgResultsPlaceholder = `${header}\n${sampleRow}\n`
const style = {
width: 600,
height: 300,
}
export const svgCompareFormPage = (
<form action="" method="post">
<div>Prod SVG Results CSV</div>
<textarea
name="prodResults"
placeholder={svgResultsPlaceholder}
style={style}
/>
<br />
<div>Local SVG Results CSV</div>
<textarea
name="localResults"
placeholder={svgResultsPlaceholder}
style={style}
/>
<br />
<button type="submit">Compare</button>
</form>
)
export async function bakeAndSaveResultsFile(
bakeLimit: number = 100000,
outDir: string = __dirname + "/bakedSvgs"
) {
if (!fs.existsSync(outDir)) fs.mkdirSync(outDir)
const { graphersBySlug } = await getPublishedGraphersBySlug()
const resultsPath = outDir + "/results.csv"
fs.writeFileSync(resultsPath, header + "\n")
// eslint-disable-next-line no-console
console.log(header)
let bakeOrder = 1
for (const [slug, config] of graphersBySlug) {
if (bakeOrder > bakeLimit) break
const startTime = Date.now()
const svg = await bakeGrapherToSvg(
config,
outDir,
slug,
undefined,
undefined,
true,
false
)
const row = {
bakeOrder,
timeToBake: Date.now() - startTime,
slug,
chartType: config.type,
md5: md5(svg!),
}
const line = `${bakeOrder},${row.timeToBake},${row.slug},${row.chartType},${row.md5}`
// eslint-disable-next-line no-console
console.log(line)<|fim▁hole|> fs.appendFileSync(resultsPath, line + "\n")
bakeOrder++
}
await closeTypeOrmAndKnexConnections()
}
const compareSets = (liveSvgs: BakedSvgInfo[], localSvgs: BakedSvgInfo[]) => {
const localSvgMap = new Map<string, BakedSvgInfo>()
localSvgs.forEach((svg) => {
localSvgMap.set(svg.slug, svg)
})
return liveSvgs.map((liveSvg) => {
const { slug } = liveSvg
const localSvg = localSvgMap.get(slug)
if (!localSvg)
return {
missing: slug,
}
const changed = liveSvg.md5 !== localSvg.md5
const devInteractiveUrl = `${BAKED_GRAPHER_URL}/${slug}`
const devSvgPath = `${BAKED_GRAPHER_URL}/exports/${slug}.svg`
const liveInteractiveUrl = `https://ourworldindata.org/grapher/${slug}`
const liveSvgUrl = `https://ourworldindata.org/grapher/exports/${slug}.svg`
return {
changed,
liveSvgUrl,
liveInteractiveUrl,
devSvgPath,
devInteractiveUrl,
}
})
}
export const getComparePage = async (liveRows: string, devRows: string) => {
const live = csvParse(liveRows)
const dev = csvParse(devRows)
const files = compareSets(live as any, dev as any)
const missing = files.filter((file) => file.missing)
const notMissing = files.filter((file) => !file.missing)
const changed = notMissing.filter((file) => file.changed)
const rows = changed.map((file) => (
<tr key={file.devSvgPath ?? file.devInteractiveUrl}>
<td>
<a href={file.liveSvgUrl}>
<img src={file.liveSvgUrl} />
</a>
<a href={file.liveInteractiveUrl}>{file.liveInteractiveUrl}</a>
</td>
<td>
<a href={file.devSvgPath}>
<img src={file.devSvgPath} />
</a>
<a href={file.devInteractiveUrl}>{file.devInteractiveUrl}</a>
</td>
</tr>
))
const summaryMessage = `${changed.length} (${Math.round(
(100 * changed.length) / notMissing.length
)}%) out of ${notMissing.length} are different. ${
notMissing.length - changed.length
} unchanged. ${missing.length} files on live missing locally.`
const missingDivs = missing.map((el) => (
<div key={el.devSvgPath ?? el.devInteractiveUrl}>${el.missing}</div>
))
return (
<div>
<div>{summaryMessage}</div>
<table>{rows}</table>
<div>{missing.length && <>{missingDivs}</>}</div>
</div>
)
}<|fim▁end|>
| |
<|file_name|>statusBar.py<|end_file_name|><|fim▁begin|># This file is part of DevParrot.
#
# Author: Matthieu Gautier <[email protected]>
#
# DevParrot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DevParrot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DevParrot. If not, see <http://www.gnu.org/licenses/>.
#
#
# Copyright 2011-2013 Matthieu Gautier
import tkinter, tkinter.ttk
import logging
from devparrot.core import session, userLogging
class StatusBar(tkinter.Frame, logging.Handler):<|fim▁hole|> self['relief'] = 'sunken'
session.userLogger.addHandler(self)
self.label = tkinter.Label(self)
self.label.pack(side='left', fill=tkinter.BOTH, expand=True)
self.defaultColor = self['background']
self.label['anchor'] = 'nw'
separator = tkinter.ttk.Separator(self, orient="vertical")
separator.pack(side='left', fill='y')
self.insertLabel = tkinter.ttk.Label(self)
self.insertLabel.pack(side='right', expand=False, fill="none")
session.eventSystem.connect('mark_set', self.on_mark_set)
self.currentLevel = 0
self.callbackId = 0
def flush(self):
"""overide logging.Handler.flush"""
pass
def clear(self):
self.currentLevel = 0
self.label['text'] = ""
self.label['background'] = self.defaultColor
self.callbackId = 0
def emit(self,record):
"""overide logging.Handler.emit"""
if record.levelno >= self.currentLevel:
self.currentLevel = record.levelno
self.label['text'] = record.getMessage()
if self.currentLevel == userLogging.INFO:
self.label['background'] = session.config.get('ok_color')
if self.currentLevel == userLogging.ERROR:
self.label['background'] = session.config.get('error_color')
if self.currentLevel == userLogging.INVALID:
self.label['background'] = session.config.get('invalid_color')
if self.callbackId:
self.after_cancel(self.callbackId)
self.callbackId = self.after(5000, self.clear)
def on_mark_set(self, model, name, index):
if name == "insert":
if model.sel_isSelection():
self.insertLabel['text'] = "[%s:%s]"%(model.index("sel.first"), model.index("sel.last"))
else:
self.insertLabel['text'] = str(model.index("insert"))<|fim▁end|>
|
def __init__(self, parent):
tkinter.Frame.__init__(self, parent)
logging.Handler.__init__(self)
self.pack(side=tkinter.BOTTOM, fill=tkinter.X)
|
<|file_name|>vert_edges_kdtree_range.py<|end_file_name|><|fim▁begin|>def sv_main(num_verts=20, radius=5, num_rings=3, rotation=0.3, mdist=0.3):
# in boilerplate, could be less verbose
in_sockets = [
['s', 'num_verts', num_verts],
['s', 'radius', radius],
['s', 'num_rings', num_rings],
['s', 'rotation', rotation],
['s', 'distance', mdist]
]
from math import sin, cos, pi
import mathutils<|fim▁hole|> angle = TWO_PI / num_verts
v = []
e = []
# create vertices
for j in range(num_rings):
radial_offset = rotation * j
for i in range(num_verts):
theta = (angle * i) + radial_offset
tr = r + (0.5 * j)
v.append([cos(theta) * tr, sin(theta) * tr, 0])
# make kd tree
# documentation/blender_python_api_2_70_release/mathutils.kdtree.html
size = len(v)
kd = mathutils.kdtree.KDTree(size)
for i, vtx in enumerate(v):
kd.insert(Vector(vtx), i)
kd.balance()
# makes edges
for i, vtx in enumerate(v):
num_edges = 0
for (co, index, dist) in kd.find_range(vtx, mdist):
if i == index or (num_edges > 2):
continue
e.append([i, index])
num_edges += 1
# out boilerplate
out_sockets = [
['v', 'Vecs', [v]],
['s', 'Edges', e]
]
return in_sockets, out_sockets<|fim▁end|>
|
from mathutils import Vector
TWO_PI = 2 * pi
r = radius
|
<|file_name|>PluginManager.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t; python-indent: 4 -*-
"""
Role
====
The ``PluginManager`` loads plugins that enforce the `Plugin
Description Policy`_, and offers the most simple methods to activate
and deactivate the plugins once they are loaded.
.. note:: It may also classify the plugins in various categories, but
this behaviour is optional and if not specified elseway all
plugins are stored in the same default category.
.. note:: It is often more useful to have the plugin manager behave
like singleton, this functionality is provided by
``PluginManagerSingleton``
Plugin Description Policy
=========================
When creating a ``PluginManager`` instance, one should provide it with
a list of directories where plugins may be found. In each directory,
a plugin should contain the following elements:
For a *Standard* plugin:
``myplugin.yapsy-plugin``
A *plugin info file* identical to the one previously described.
``myplugin``
A directory ontaining an actual Python plugin (ie with a
``__init__.py`` file that makes it importable). The upper
namespace of the plugin should present a class inheriting the
``IPlugin`` interface (the same remarks apply here as in the
previous case).
For a *Single file* plugin:
``myplugin.yapsy-plugin``
A *plugin info file* which is identified thanks to its extension,
see the `Plugin Info File Format`_ to see what should be in this
file.
The extension is customisable at the ``PluginManager``'s
instanciation, since one may usually prefer the extension to bear
the application name.
``myplugin.py``
The source of the plugin. This file should at least define a class
inheriting the ``IPlugin`` interface. This class will be
instanciated at plugin loading and it will be notified the
activation/deactivation events.
Plugin Info File Format
-----------------------
The plugin info file is a text file *encoded in ASCII or UTF-8* and
gathering, as its name suggests, some basic information about the
plugin.
- it gives crucial information needed to be able to load the plugin
- it provides some documentation like information like the plugin
author's name and a short description fo the plugin functionality.
Here is an example of what such a file should contain::
[Core]
Name = My plugin Name
Module = the_name_of_the_pluginto_load_with_no_py_ending
[Documentation]
Description = What my plugin broadly does
Author = My very own name
Version = the_version_number_of_the_plugin
Website = My very own website
.. note:: From such plugin descriptions, the ``PluginManager`` will
built its own representations of the plugins as instances of
the :doc:`PluginInfo` class.
Changing the default behaviour
==============================
The default behaviour for locating and loading plugins can be changed
using the various options exposed on the interface via getters.
The plugin detection, in particular, can be fully customized by
settting a custom plugin locator. See ``IPluginLocator`` for more
details on this.
Extensibility
=============
Several mechanisms have been put up to help extending the basic
functionalities of the proivided classes.
A few *hints* to help you extend those classes:
If the new functionalities do not overlap the ones already
implemented, then they should be implemented as a Decorator class of the
base plugin. This should be done by inheriting the
``PluginManagerDecorator``.
If this previous way is not possible, then the functionalities should
be added as a subclass of ``PluginManager``.
.. note:: The first method is highly prefered since it makes it
possible to have a more flexible design where one can pick
several functionalities and litterally *add* them to get an
object corresponding to one's precise needs.
API
===
"""
import sys
import os
import imp
from yapsy import log
from yapsy import NormalizePluginNameForModuleName
from yapsy.IPlugin import IPlugin
from yapsy.IPluginLocator import IPluginLocator
# The follozing two imports are used to implement the default behaviour
from yapsy.PluginFileLocator import PluginFileAnalyzerWithInfoFile
from yapsy.PluginFileLocator import PluginFileLocator
# imported for backward compatibility (this variable was defined here
# before 1.10)
from yapsy import PLUGIN_NAME_FORBIDEN_STRING
# imported for backward compatibility (this PluginInfo was imported
# here before 1.10)
from yapsy.PluginInfo import PluginInfo
class PluginManager(object):
"""
Manage several plugins by ordering them in categories.
The mechanism for searching and loading the plugins is already
implemented in this class so that it can be used directly (hence
it can be considered as a bit more than a mere interface)
The file describing a plugin must be written in the syntax
compatible with Python's ConfigParser module as in the
`Plugin Info File Format`_
"""
def __init__(self,
categories_filter=None,
directories_list=None,
plugin_info_ext=None,
plugin_locator=None):
"""
Initialize the mapping of the categories and set the list of<|fim▁hole|> - ``setPluginPlaces`` for ``directories_list``
- ``setPluginInfoExtension`` for ``plugin_info_ext``
You may look at these function's documentation for the meaning
of each corresponding arguments.
"""
# as a good practice we don't use mutable objects as default
# values (these objects would become like static variables)
# for function/method arguments, but rather use None.
if categories_filter is None:
categories_filter = {"Default":IPlugin}
self.setCategoriesFilter(categories_filter)
plugin_locator = self._locatorDecide(plugin_info_ext, plugin_locator)
# plugin_locator could be either a dict defining strategies, or directly
# an IPluginLocator object
self.setPluginLocator(plugin_locator, directories_list)
def _locatorDecide(self, plugin_info_ext, plugin_locator):
"""
For backward compatibility, we kept the *plugin_info_ext* argument.
Thus we may use it if provided. Returns the (possibly modified)
*plugin_locator*.
"""
specific_info_ext = plugin_info_ext is not None
specific_locator = plugin_locator is not None
if not specific_info_ext and not specific_locator:
# use the default behavior
res = PluginFileLocator()
elif not specific_info_ext and specific_locator:
# plugin_info_ext not used
res = plugin_locator
elif not specific_locator and specific_info_ext:
# plugin_locator not used, and plugin_info_ext provided
# -> compatibility mode
res = PluginFileLocator()
res.setAnalyzers([PluginFileAnalyzerWithInfoFile("info_ext",plugin_info_ext)])
elif specific_info_ext and specific_locator:
# both provided... issue a warning that tells "plugin_info_ext"
# will be ignored
msg = ("Two incompatible arguments (%s) provided:",
"'plugin_info_ext' and 'plugin_locator'). Ignoring",
"'plugin_info_ext'.")
raise ValueError(" ".join(msg) % self.__class__.__name__)
return res
def setCategoriesFilter(self, categories_filter):
"""
Set the categories of plugins to be looked for as well as the
way to recognise them.
The ``categories_filter`` first defines the various categories
in which the plugins will be stored via its keys and it also
defines the interface tha has to be inherited by the actual
plugin class belonging to each category.
"""
self.categories_interfaces = categories_filter.copy()
# prepare the mapping from categories to plugin lists
self.category_mapping = {}
# also maps the plugin info files (useful to avoid loading
# twice the same plugin...)
self._category_file_mapping = {}
for categ in categories_filter:
self.category_mapping[categ] = []
self._category_file_mapping[categ] = []
def setPluginPlaces(self, directories_list):
"""
DEPRECATED(>1.9): directly configure the IPluginLocator instance instead !
Convenience method (actually call the IPluginLocator method)
"""
self.getPluginLocator().setPluginPlaces(directories_list)
def updatePluginPlaces(self, directories_list):
"""
DEPRECATED(>1.9): directly configure the IPluginLocator instance instead !
Convenience method (actually call the IPluginLocator method)
"""
self.getPluginLocator().updatePluginPlaces(directories_list)
def setPluginInfoExtension(self, ext):
"""
DEPRECATED(>1.9): for backward compatibility. Directly configure the
IPluginLocator instance instead !
.. warning:: This will only work if the strategy "info_ext" is
active for locating plugins.
"""
try:
self.getPluginLocator().setPluginInfoExtension(ext)
except KeyError:
log.error("Current plugin locator doesn't support setting the plugin info extension.")
def setPluginInfoClass(self, picls, strategies=None):
"""
DEPRECATED(>1.9): directly configure the IPluginLocator instance instead !
Convenience method (actually call self.getPluginLocator().setPluginInfoClass)
When using a ``PluginFileLocator`` you may restrict the
strategies to which the change of PluginInfo class will occur
by just giving the list of strategy names in the argument
"strategies"
"""
if strategies:
for name in strategies:
self.getPluginLocator().setPluginInfoClass(picls, name)
else:
self.getPluginLocator().setPluginInfoClass(picls)
def getPluginInfoClass(self):
"""
DEPRECATED(>1.9): directly control that with the IPluginLocator
instance instead !
Get the class that holds PluginInfo.
"""
return self.getPluginLocator().getPluginInfoClass()
def setPluginLocator(self, plugin_locator, dir_list=None, picls=None):
"""
Sets the strategy used to locate the basic information.
See ``IPluginLocator`` for the policy that plugin_locator must enforce.
"""
if isinstance(plugin_locator, IPluginLocator):
self._plugin_locator = plugin_locator
if dir_list is not None:
self._plugin_locator.updatePluginPlaces(dir_list)
if picls is not None:
self.setPluginInfoClass(picls)
else:
raise TypeError("Unexpected format for plugin_locator ('%s' is not an instance of IPluginLocator)" % plugin_locator)
def getPluginLocator(self):
"""
Grant direct access to the plugin locator.
"""
return self._plugin_locator
def _gatherCorePluginInfo(self, directory, plugin_info_filename):
"""
DEPRECATED(>1.9): please use a specific plugin
locator if you need such information.
Gather the core information (name, and module to be loaded)
about a plugin described by it's info file (found at
'directory/filename').
Return an instance of ``PluginInfo`` and the
config_parser used to gather the core data *in a tuple*, if the
required info could be localised, else return ``(None,None)``.
.. note:: This is supposed to be used internally by subclasses
and decorators.
"""
return self.getPluginLocator().gatherCorePluginInfo(directory,plugin_info_filename)
def _getPluginNameAndModuleFromStream(self,infoFileObject,candidate_infofile="<buffered info>"):
"""
DEPRECATED(>1.9): please use a specific plugin
locator if you need such information.
Extract the name and module of a plugin from the
content of the info file that describes it and which
is stored in infoFileObject.
.. note:: Prefer using ``_gatherCorePluginInfo``
instead, whenever possible...
.. warning:: ``infoFileObject`` must be a file-like
object: either an opened file for instance or a string
buffer wrapped in a StringIO instance as another
example.
.. note:: ``candidate_infofile`` must be provided
whenever possible to get better error messages.
Return a 3-uple with the name of the plugin, its
module and the config_parser used to gather the core
data *in a tuple*, if the required info could be
localised, else return ``(None,None,None)``.
.. note:: This is supposed to be used internally by subclasses
and decorators.
"""
return self.getPluginLocator().getPluginNameAndModuleFromStream(infoFileObject, candidate_infofile)
def getCategories(self):
"""
Return the list of all categories.
"""
return list(self.category_mapping.keys())
def removePluginFromCategory(self, plugin,category_name):
"""
Remove a plugin from the category where it's assumed to belong.
"""
self.category_mapping[category_name].remove(plugin)
def appendPluginToCategory(self, plugin, category_name):
"""
Append a new plugin to the given category.
"""
self.category_mapping[category_name].append(plugin)
def getPluginsOfCategory(self, category_name):
"""
Return the list of all plugins belonging to a category.
"""
return self.category_mapping[category_name][:]
def getAllPlugins(self):
"""
Return the list of all plugins (belonging to all categories).
"""
allPlugins = set()
for pluginsOfOneCategory in self.category_mapping.values():
allPlugins.update(pluginsOfOneCategory)
return list(allPlugins)
def getPluginCandidates(self):
"""
Return the list of possible plugins.
Each possible plugin (ie a candidate) is described by a 3-uple:
(info file path, python file path, plugin info instance)
.. warning: locatePlugins must be called before !
"""
if not hasattr(self, '_candidates'):
raise RuntimeError("locatePlugins must be called before getPluginCandidates")
return self._candidates[:]
def removePluginCandidate(self,candidateTuple):
"""
Remove a given candidate from the list of plugins that should be loaded.
The candidate must be represented by the same tuple described
in ``getPluginCandidates``.
.. warning: locatePlugins must be called before !
"""
if not hasattr(self, '_candidates'):
raise ValueError("locatePlugins must be called before removePluginCandidate")
self._candidates.remove(candidateTuple)
def appendPluginCandidate(self, candidateTuple):
"""
Append a new candidate to the list of plugins that should be loaded.
The candidate must be represented by the same tuple described
in ``getPluginCandidates``.
.. warning: locatePlugins must be called before !
"""
if not hasattr(self, '_candidates'):
raise ValueError("locatePlugins must be called before removePluginCandidate")
self._candidates.append(candidateTuple)
def locatePlugins(self):
"""
Convenience method (actually call the IPluginLocator method)
"""
self._candidates, npc = self.getPluginLocator().locatePlugins()
def loadPlugins(self, callback=None):
"""
Load the candidate plugins that have been identified through a
previous call to locatePlugins. For each plugin candidate
look for its category, load it and store it in the appropriate
slot of the ``category_mapping``.
If a callback function is specified, call it before every load
attempt. The ``plugin_info`` instance is passed as an argument to
the callback.
"""
# print "%s.loadPlugins" % self.__class__
if not hasattr(self, '_candidates'):
raise ValueError("locatePlugins must be called before loadPlugins")
processed_plugins = []
for candidate_infofile, candidate_filepath, plugin_info in self._candidates:
# make sure to attribute a unique module name to the one
# that is about to be loaded
plugin_module_name_template = NormalizePluginNameForModuleName("yapsy_loaded_plugin_" + plugin_info.name) + "_%d"
for plugin_name_suffix in range(len(sys.modules)):
plugin_module_name = plugin_module_name_template % plugin_name_suffix
if plugin_module_name not in sys.modules:
break
# tolerance on the presence (or not) of the py extensions
if candidate_filepath.endswith(".py"):
candidate_filepath = candidate_filepath[:-3]
# if a callback exists, call it before attempting to load
# the plugin so that a message can be displayed to the
# user
if callback is not None:
callback(plugin_info)
# cover the case when the __init__ of a package has been
# explicitely indicated
if "__init__" in os.path.basename(candidate_filepath):
candidate_filepath = os.path.dirname(candidate_filepath)
try:
# use imp to correctly load the plugin as a module
if os.path.isdir(candidate_filepath):
candidate_module = imp.load_module(plugin_module_name,None,candidate_filepath,("py","r",imp.PKG_DIRECTORY))
else:
with open(candidate_filepath+".py","r") as plugin_file:
candidate_module = imp.load_module(plugin_module_name,plugin_file,candidate_filepath+".py",("py","r",imp.PY_SOURCE))
except Exception:
exc_info = sys.exc_info()
log.error("Unable to import plugin: %s" % candidate_filepath, exc_info=exc_info)
plugin_info.error = exc_info
processed_plugins.append(plugin_info)
continue
processed_plugins.append(plugin_info)
if "__init__" in os.path.basename(candidate_filepath):
sys.path.remove(plugin_info.path)
# now try to find and initialise the first subclass of the correct plugin interface
for element in (getattr(candidate_module,name) for name in dir(candidate_module)):
plugin_info_reference = None
for category_name in self.categories_interfaces:
try:
is_correct_subclass = issubclass(element, self.categories_interfaces[category_name])
except Exception:
continue
if is_correct_subclass and element is not self.categories_interfaces[category_name]:
current_category = category_name
if candidate_infofile not in self._category_file_mapping[current_category]:
# we found a new plugin: initialise it and search for the next one
if not plugin_info_reference:
plugin_info.plugin_object = element()
plugin_info_reference = plugin_info
plugin_info.categories.append(current_category)
self.category_mapping[current_category].append(plugin_info_reference)
self._category_file_mapping[current_category].append(candidate_infofile)
# Remove candidates list since we don't need them any more and
# don't need to take up the space
delattr(self, '_candidates')
return processed_plugins
def collectPlugins(self):
"""
Walk through the plugins' places and look for plugins. Then
for each plugin candidate look for its category, load it and
stores it in the appropriate slot of the category_mapping.
"""
# print "%s.collectPlugins" % self.__class__
self.locatePlugins()
self.loadPlugins()
def getPluginByName(self,name,category="Default"):
"""
Get the plugin correspoding to a given category and name
"""
if category in self.category_mapping:
for item in self.category_mapping[category]:
if item.name == name:
return item
return None
def activatePluginByName(self,name,category="Default"):
"""
Activate a plugin corresponding to a given category + name.
"""
pta_item = self.getPluginByName(name,category)
if pta_item is not None:
plugin_to_activate = pta_item.plugin_object
if plugin_to_activate is not None:
log.debug("Activating plugin: %s.%s"% (category,name))
plugin_to_activate.activate()
return plugin_to_activate
return None
def deactivatePluginByName(self,name,category="Default"):
"""
Desactivate a plugin corresponding to a given category + name.
"""
if category in self.category_mapping:
plugin_to_deactivate = None
for item in self.category_mapping[category]:
if item.name == name:
plugin_to_deactivate = item.plugin_object
break
if plugin_to_deactivate is not None:
log.debug("Deactivating plugin: %s.%s"% (category,name))
plugin_to_deactivate.deactivate()
return plugin_to_deactivate
return None
class PluginManagerSingleton(object):
"""
Singleton version of the most basic plugin manager.
Being a singleton, this class should not be initialised explicitly
and the ``get`` classmethod must be called instead.
To call one of this class's methods you have to use the ``get``
method in the following way:
``PluginManagerSingleton.get().themethodname(theargs)``
To set up the various coonfigurables variables of the
PluginManager's behaviour please call explicitly the following
methods:
- ``setCategoriesFilter`` for ``categories_filter``
- ``setPluginPlaces`` for ``directories_list``
- ``setPluginInfoExtension`` for ``plugin_info_ext``
"""
__instance = None
__decoration_chain = None
def __init__(self):
"""
Initialisation: this class should not be initialised
explicitly and the ``get`` classmethod must be called instead.
To set up the various configurables variables of the
PluginManager's behaviour please call explicitly the following
methods:
- ``setCategoriesFilter`` for ``categories_filter``
- ``setPluginPlaces`` for ``directories_list``
- ``setPluginInfoExtension`` for ``plugin_info_ext``
"""
if self.__instance is not None:
raise Exception("Singleton can't be created twice !")
def setBehaviour(self,list_of_pmd):
"""
Set the functionalities handled by the plugin manager by
giving a list of ``PluginManager`` decorators.
This function shouldn't be called several time in a same
process, but if it is only the first call will have an effect.
It also has an effect only if called before the initialisation
of the singleton.
In cases where the function is indeed going to change anything
the ``True`` value is return, in all other cases, the ``False``
value is returned.
"""
if self.__decoration_chain is None and self.__instance is None:
log.debug("Setting up a specific behaviour for the PluginManagerSingleton")
self.__decoration_chain = list_of_pmd
return True
else:
log.debug("Useless call to setBehaviour: the singleton is already instanciated of already has a behaviour.")
return False
setBehaviour = classmethod(setBehaviour)
def get(self):
"""
Actually create an instance
"""
if self.__instance is None:
if self.__decoration_chain is not None:
# Get the object to be decorated
# print self.__decoration_chain
pm = self.__decoration_chain[0]()
for cls_item in self.__decoration_chain[1:]:
# print cls_item
pm = cls_item(decorated_manager=pm)
# Decorate the whole object
self.__instance = pm
else:
# initialise the 'inner' PluginManagerDecorator
self.__instance = PluginManager()
log.debug("PluginManagerSingleton initialised")
return self.__instance
get = classmethod(get)
# For backward compatility import the most basic decorator (it changed
# place as of v1.8)
from yapsy.PluginManagerDecorator import PluginManagerDecorator<|fim▁end|>
|
directories where plugins may be. This can also be set by
direct call the methods:
- ``setCategoriesFilter`` for ``categories_filter``
|
<|file_name|>PersistentObjectDraggableEditPane.js<|end_file_name|><|fim▁begin|>/*
Copyright 2012 - $Date $ by PeopleWare n.v.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
define(["dojo/_base/declare",
"ppwcode-util-oddsAndEnds/ui/horizontalPanesContainer/DraggablePane",
"ppwcode-vernacular-persistence/ui/persistentObjectButtonEditPane/PersistentObjectButtonEditPane",
"dojo/keys", "dojo/Deferred",
"ppwcode-util-oddsAndEnds/log/logger!",
"dojo/aspect",
"dojo/text!./persistentObjectDraggableEditPane.html", "dojo/i18n!./nls/labels",
"module",
"dijit/layout/LayoutContainer", "dijit/layout/ContentPane",
"dojo/_base/lang", "dojox/mvc/at",
"xstyle/css!./persistentObjectDraggableEditPane.css"],
function(declare,
DraggablePane, PersistentObjectButtonEditPane,
keys, Deferred,
logger,
aspect,
template, labels,
module) {
//noinspection LocalVariableNamingConventionJS
var PersistentObjectDraggableEditPane = declare([PersistentObjectButtonEditPane, DraggablePane], {
// summary:
// A PersistentObjectDraggableEditPane is a PersistentObjectButtonEditPane with a different template.
templateString: template,
labels: labels,
constructor: function(kwargs) {
var self = this;
self.set("opener", function(po) {
return self.container.openPaneFor(po, /*after*/ self);
});
},
postCreate: function() {
this.inherited(arguments);
var self = this;
self.own(self.on("keypress", function(event) {
var presentationMode = self.get("presentationMode");
var target = self.get("target");
if (presentationMode === self.VIEW && event.keyChar === "e" && target && target.get("editable")) {
event.preventDefault();
event.stopPropagation();
self.edit();
}
else if (event.keyChar === "w" && presentationMode === self.VIEW) {
event.preventDefault();
event.stopPropagation();
self.close();
}
else if ((presentationMode === self.EDIT || presentationMode === self.WILD) &&
(event.ctrlKey || event.metaKey) &&
(event.keyChar === "s" || (event.keyChar === "w" && event.altKey))) {
event.preventDefault();
event.stopPropagation();
self.save(event);
}
}));
// TODO below function too complex; refactor
self.own(self.on("keydown", function(event) {
var presentationMode = self.get("presentationMode");
if ((presentationMode === self.EDIT || presentationMode === self.DELETE_ONLY || presentationMode === self.WILD) &&
event.keyCode === keys.ESCAPE) {
event.preventDefault();
event.stopPropagation();
self.cancel(event);
}
else if (
((event.keyCode === keys.LEFT_ARROW || event.keyCode === keys.RIGHT_ARROW) &&
(presentationMode === self.VIEW || presentationMode === self.BUSY)) ||
((event.keyCode === keys.PAGE_UP || event.keyCode === keys.PAGE_DOWN || event.keyCode === keys.HOME || event.keyCode === keys.END) &&
(presentationMode === self.EDIT || presentationMode === self.DELETE_ONLY ||
presentationMode === self.WILD || presentationMode === self.VIEW || presentationMode === self.BUSY) &&
event.metaKey)
) {
event.preventDefault();
event.stopPropagation();
if ((event.keyCode === keys.LEFT_ARROW || event.keyCode === keys.PAGE_UP) && self.previous !== self.getFirst()) {
self.previous.focus();
}
else if ((event.keyCode === keys.RIGHT_ARROW || event.keyCode === keys.PAGE_DOWN) && self.next !== self.getLast()) {
self.next.focus();
}
else if (event.keyCode === keys.HOME && self.getFirst().next !== self.getLast()) {
self.getFirst().next.focus();
}
else if (event.keyCode === keys.END && self.getLast().previous !== self.getFirst()) {
self.getLast().previous.focus();
}
// IDEA: with shift: move left, right
}
}));
self.own(aspect.after(
self._btnDelete,
"_onDropDownMouseDown",
function(/*Event*/ e) {
// we need to stopPropagation, or else the enclosing movable will think we are starting a drag, and it will eat the mouse up
e.stopPropagation();
},
true
));
},
isVisualizationOf: function(object) {
return this.get("target") === object;
},
_setButtonsStyles: function(stylePresentationMode) {
this.inherited(arguments);
this._setVisible(this._btnClose, stylePresentationMode === this.VIEW, stylePresentationMode === this.BUSY);
},
cancel: function(event) {
return this._closeOnAlt(event, this.inherited(arguments));
},
save: function(event) {
return this._closeOnAlt(event, this.inherited(arguments));
},
remove: function(event) {
return this._closeOnAlt(event, this.inherited(arguments));
},
_closeOnAlt: function(/*Event*/ event, /*Promise*/ promise) {
if (!event || !event.altKey) {
return promise;
}
// also close
var self = this;
return promise.then(
function(result) {
return self.close().then(function() {return result;});
}
);
},
<|fim▁hole|>
var self = this;
var inheritedResult = self.inherited(arguments);
return inheritedResult.then(
function(po) {
var removed = self.removeFromContainer(); // this is destroyed.
if (removed) {
return removed.then(function() {
return po;
});
}
else {
return new Deferred().resolve(po); // returns the promise
}
}
);
}
});
PersistentObjectDraggableEditPane.mid = module.id;
return PersistentObjectDraggableEditPane;
});<|fim▁end|>
|
close: function() {
// summary:
// Close and destroy.
|
<|file_name|>production.py<|end_file_name|><|fim▁begin|>from base import *
<|fim▁hole|><|fim▁end|>
|
DEBUG = False
|
<|file_name|>login.module.js<|end_file_name|><|fim▁begin|>"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;<|fim▁hole|> * Created by skytzi on 15.4.17.
*/
var core_1 = require("@angular/core");
var common_1 = require("@angular/common");
var router_1 = require("@angular/router");
var login_component_1 = require("./login.component");
var LoginModule = (function () {
function LoginModule() {
}
return LoginModule;
}());
LoginModule = __decorate([
core_1.NgModule({
imports: [router_1.RouterModule, common_1.CommonModule],
declarations: [login_component_1.LoginComponent],
exports: [login_component_1.LoginComponent]
})
], LoginModule);
exports.LoginModule = LoginModule;
//# sourceMappingURL=authentication.module.js.map<|fim▁end|>
|
};
Object.defineProperty(exports, "__esModule", { value: true });
/**
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from zipfile import ZipFile
import decimal
import datetime
from xml.dom.minidom import parseString
from . import ods_components
from .formula import Formula
# Basic compatibility setup for Python 2 and Python 3.
try:
long
except NameError:
long = int
try:
unicode
except NameError:
unicode = str
# End compatibility setup.
class ODSWriter(object):
"""
Utility for writing OpenDocument Spreadsheets. Can be used in simple 1 sheet mode (use writerow/writerows) or with
multiple sheets (use new_sheet). It is suggested that you use with object like a context manager.
"""
def __init__(self, odsfile):
self.zipf = ZipFile(odsfile, "w")
# Make the skeleton of an ODS.
self.dom = parseString(ods_components.content_xml)
self.zipf.writestr("mimetype",
ods_components.mimetype.encode("utf-8"))
self.zipf.writestr("META-INF/manifest.xml",
ods_components.manifest_xml.encode("utf-8"))
self.zipf.writestr("styles.xml",
ods_components.styles_xml.encode("utf-8"))
self.default_sheet = None
self.sheets = []
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
self.close()
def close(self):
"""
Finalises the compressed version of the spreadsheet. If you aren't using the context manager ('with' statement,
you must call this manually, it is not triggered automatically like on a file object.
:return: Nothing.
"""
self.zipf.writestr("content.xml", self.dom.toxml().encode("utf-8"))
self.zipf.close()
def writerow(self, cells):
"""
Write a row of cells into the default sheet of the spreadsheet.
:param cells: A list of cells (most basic Python types supported).
:return: Nothing.
"""
if self.default_sheet is None:
self.default_sheet = self.new_sheet()
self.default_sheet.writerow(cells)
def writerows(self, rows):
"""
Write rows into the default sheet of the spreadsheet.
:param rows: A list of rows, rows are lists of cells - see writerow.
:return: Nothing.
"""
for row in rows:
self.writerow(row)
def new_sheet(self, name=None, cols=None):
"""
Create a new sheet in the spreadsheet and return it so content can be added.
:param name: Optional name for the sheet.
:param cols: Specify the number of columns, needed for compatibility in some cases
:return: Sheet object
"""
sheet = Sheet(self.dom, name, cols)
self.sheets.append(sheet)
return sheet
class Sheet(object):
def __init__(self, dom, name="Sheet 1", cols=None):
self.dom = dom
self.cols = cols
spreadsheet = self.dom.getElementsByTagName("office:spreadsheet")[0]
self.table = self.dom.createElement("table:table")
if name:
self.table.setAttribute("table:name", name)
self.table.setAttribute("table:style-name", "ta1")
<|fim▁hole|> col = self.dom.createElement("table:table-column")
col.setAttribute("table:number-columns-repeated", unicode(self.cols))
self.table.appendChild(col)
spreadsheet.appendChild(self.table)
def writerow(self, cells):
row = self.dom.createElement("table:table-row")
content_cells = len(cells)
if self.cols is not None:
padding_cells = self.cols - content_cells
if content_cells > self.cols:
raise Exception("More cells than cols.")
cells += [None]*padding_cells
for cell_data in cells:
cell = self.dom.createElement("table:table-cell")
text = None
if isinstance(cell_data, (datetime.date, datetime.datetime)):
cell.setAttribute("office:value-type", "date")
date_str = cell_data.isoformat()
cell.setAttribute("office:date-value", date_str)
cell.setAttribute("table:style-name", "cDateISO")
text = date_str
elif isinstance(cell_data, datetime.time):
cell.setAttribute("office:value-type", "time")
cell.setAttribute("office:time-value",
cell_data.strftime("PT%HH%MM%SS"))
cell.setAttribute("table:style-name", "cTime")
text = cell_data.strftime("%H:%M:%S")
elif isinstance(cell_data, bool):
# Bool condition must be checked before numeric because:
# isinstance(True, int): True
# isinstance(True, bool): True
cell.setAttribute("office:value-type", "boolean")
cell.setAttribute("office:boolean-value",
"true" if cell_data else "false")
cell.setAttribute("table:style-name", "cBool")
text = "TRUE" if cell_data else "FALSE"
elif isinstance(cell_data, (float, int, decimal.Decimal, long)):
cell.setAttribute("office:value-type", "float")
float_str = unicode(cell_data)
cell.setAttribute("office:value", float_str)
text = float_str
elif isinstance(cell_data, Formula):
cell.setAttribute("table:formula", str(cell_data))
elif cell_data is None:
pass # Empty element
else:
# String and unknown types become string cells
cell.setAttribute("office:value-type", "string")
text = unicode(cell_data)
if text:
p = self.dom.createElement("text:p")
p.appendChild(self.dom.createTextNode(text))
cell.appendChild(p)
row.appendChild(cell)
self.table.appendChild(row)
def writerows(self, rows):
for row in rows:
self.writerow(row)
def writer(odsfile, *args, **kwargs):
"""
Returns an ODSWriter object.
Python 3: Make sure that the file you pass is mode b:
f = open("spreadsheet.ods", "wb")
odswriter.writer(f)
...
Otherwise you will get "TypeError: must be str, not bytes"
"""
return ODSWriter(odsfile, *args, **kwargs)<|fim▁end|>
|
if self.cols is not None:
|
<|file_name|>snapUtils.ts<|end_file_name|><|fim▁begin|>import { Box, Point, Diff, BoundingBox } from "./types";
export const SNAP_DISTANCE = 15;
export const top = (box: Box) => box.y;
export const bottom = (box: Box) => box.y + box.height;
export const left = (box: Box) => box.x;
export const right = (box: Box) => box.x + box.width;
export const near = (a: number, b: number) => Math.abs(a - b) < SNAP_DISTANCE;
// http://stackoverflow.com/a/3269471/1263117
export const overlapX = (a: Box, b: Box) =>
left(a) <= right(b) + SNAP_DISTANCE && left(b) <= right(a) + SNAP_DISTANCE;
export const overlapY = (a: Box, b: Box) =>
top(a) <= bottom(b) + SNAP_DISTANCE && top(b) <= bottom(a) + SNAP_DISTANCE;
// Give a new position for `boxA` that snaps it to `boxB` if neede.
export const snap = (boxA: Box, boxB: Box) => {
let x, y;
// TODO: Refactor/simplify this code
if (overlapY(boxA, boxB)) {
if (near(left(boxA), right(boxB))) {
x = right(boxB);
} else if (near(right(boxA), left(boxB))) {
x = left(boxB) - boxA.width;
} else if (near(left(boxA), left(boxB))) {
x = left(boxB);
} else if (near(right(boxA), right(boxB))) {
x = right(boxB) - boxA.width;
}
}
if (overlapX(boxA, boxB)) {
if (near(top(boxA), bottom(boxB))) {
y = bottom(boxB);
} else if (near(bottom(boxA), top(boxB))) {
y = top(boxB) - boxA.height;
} else if (near(top(boxA), top(boxB))) {
y = top(boxB);
} else if (near(bottom(boxA), bottom(boxB))) {
y = bottom(boxB) - boxA.height;
}
}
return { x, y };
};
export const snapDiff = (a: Box, b: Box): Point => {
const newPos = snap(a, b);
return {
x: newPos.x === undefined ? 0 : newPos.x - a.x,
y: newPos.y === undefined ? 0 : newPos.y - a.y,
};
};
// TODO: Use the first x and y combo
export const snapDiffManyToMany = (as: Box[], bs: Box[]): Point => {
let x: number | undefined = 0;
let y: number | undefined = 0;
for (const a of as) {
for (const b of bs) {
const diff = snapDiff(a, b);
x = x || diff.x;
y = y || diff.y;
if (x !== undefined && x > 0 && y !== undefined && y > 0) {
break;
}
}
}
return { x, y };
};
export const snapToMany = (boxA: Box, otherBoxes: Box[]): Diff => {
let x: number | undefined;
let y: number | undefined;
otherBoxes.forEach((boxB) => {
const newPos = snap(boxA, boxB);
x = newPos.x || x;
y = newPos.y || y;
});
return { x, y };
};
export const snapWithin = (boxA: Box, boundingBox: BoundingBox): Diff => {
let x, y;
if (boxA.x - SNAP_DISTANCE < 0) {
x = 0;
} else if (boxA.x + boxA.width + SNAP_DISTANCE > boundingBox.width) {
x = boundingBox.width - boxA.width;
}
if (boxA.y - SNAP_DISTANCE < 0) {
y = 0;
} else if (boxA.y + boxA.height + SNAP_DISTANCE > boundingBox.height) {
y = boundingBox.height - boxA.height;
}
return { x, y };
};
export const snapWithinDiff = (a: Box, b: BoundingBox) => {
const newPos = snapWithin(a, b);
return {
x: newPos.x === undefined ? 0 : newPos.x - a.x,
y: newPos.y === undefined ? 0 : newPos.y - a.y,
};
};
export const applySnap = (original: Point, ...snaps: Diff[]) =>
snaps.reduce(
(previous, snapped) => ({
...previous,
x: typeof snapped.x !== "undefined" ? snapped.x : previous.x,
y: typeof snapped.y !== "undefined" ? snapped.y : previous.y,
}),
original
);
export const boundingBox = (nodes: Box[]): Box => {
const boxes = nodes.slice();
const firstNode = boxes.pop();
if (firstNode == null) {
throw new Error("boundingBox must be called with at least one node");
}
const bounding = {
top: top(firstNode),
right: right(firstNode),
bottom: bottom(firstNode),
left: left(firstNode),
};
boxes.forEach((node) => {
bounding.top = Math.min(bounding.top, top(node));
bounding.right = Math.max(bounding.right, right(node));
bounding.bottom = Math.max(bounding.bottom, bottom(node));
bounding.left = Math.min(bounding.left, left(node));
});
return {
x: bounding.left,
y: bounding.top,
width: bounding.right - bounding.left,
height: bounding.bottom - bounding.top,
};
};
export function traceConnection<B extends Box>(
areConnected: (candidate: Box, n: Box) => boolean
) {
return (candidates: B[], node: B): Set<B> => {
const connected = new Set<B>();
const checkNode = (n: B) => {
for (const candidate of candidates) {
if (!connected.has(candidate) && areConnected(candidate, n)) {
connected.add(candidate);<|fim▁hole|> }
}
};
checkNode(node);
return connected;
};
}
export const applyDiff = (a: Point, b: Point) => ({
x: a.x + b.x,
y: a.y + b.y,
});
// TODO: This should not
export const applyMultipleDiffs = (initial: Point, ...diffs: Point[]) => {
const metaDiff = diffs.reduce((m, diff) => ({
// Use the smallest non-zero diff for each axis.
// TODO: Min should be the absolute value
x: m.x === 0 || diff.x === 0 ? m.x + diff.x : Math.min(m.x, diff.x),
y: m.y === 0 || diff.y === 0 ? m.y + diff.y : Math.min(m.y, diff.y),
}));
return applyDiff(initial, metaDiff);
};<|fim▁end|>
|
checkNode(candidate);
|
<|file_name|>event.py<|end_file_name|><|fim▁begin|>import logging
from atracker.models import Event
from atracker.util import create_event
from django.conf.urls import url
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from tastypie.authentication import (
MultiAuthentication,
Authentication,
SessionAuthentication,
ApiKeyAuthentication,
)
from tastypie.authorization import Authorization
from tastypie.http import HttpUnauthorized
from tastypie.resources import ModelResource
from tastypie.utils import trailing_slash
log = logging.getLogger(__name__)
class EventResource(ModelResource):
class Meta:
queryset = Event.objects.all()
list_allowed_methods = ["get"]
detail_allowed_methods = ["get"]
resource_name = "atracker/event"
include_resource_uri = False
# TODO: double-check for sensitive information
fields = ["created"]
authentication = MultiAuthentication(
SessionAuthentication(), ApiKeyAuthentication(), Authentication()
)
authorization = Authorization()
always_return_data = True
filtering = {}<|fim▁hole|>
def prepend_urls(self):
return [
url(
r"^(?P<resource_name>%s)/(?P<content_type>[\w.]+)/(?P<object_uuid>[\w.-]+)(?:/(?P<action>[\w-]+))?(?:/(?P<user_id>-?[0-9]+))?%s$"
% (self._meta.resource_name, trailing_slash()),
self.wrap_view("create_event_for_user"),
name="atracker-create-event-for-user",
)
]
# creante event in behalf of user
"""
call via curl
curl -i \
-H "Accept: application/json" \
-H "Authorization: ApiKey remote:d65b075c593f27a42c26e65be74c047e5b50d215" \
http://local.openbroadcast.org:8080/api/v1/atracker/event/alibrary.media/4faa159c-87f4-43eb-b2b7-a4de124a05e5/stream/1/?format=json
"""
def create_event_for_user(self, request, **kwargs):
self.method_check(request, allowed=["get"])
self.is_authenticated(request)
self.throttle_check(request)
object_uuid = kwargs.get("object_uuid", None)
content_type = kwargs.get("content_type", None)
orig_ct = content_type
action = kwargs.get("action", None)
user_id = kwargs.get("user_id", None)
if user_id:
user_id = int(user_id)
log.debug(
"create_event_for_user - content_type: %s - object_uuid: %s - action: %s - user_id: %s"
% (content_type, object_uuid, action, user_id)
)
if isinstance(content_type, basestring) and "." in content_type:
app, modelname = content_type.split(".")
content_type = ContentType.objects.get(
app_label=app, model__iexact=modelname
)
elif isinstance(content_type, basestring):
content_type = ContentType.objects.get(id=int(content_type))
else:
raise ValueError('content_type must a ct id or "app.modelname" string')
if user_id:
log.debug("creating event on _behalf_ of user with id: %s" % user_id)
if request.user.has_perm("atracker.track_for_user"):
user = get_user_model().objects.get(pk=user_id)
log.info("voting for user by id: %s" % user.username)
else:
log.warning(
"no permission for %s to vote in behalf of %s"
% (request.user, user_id)
)
user = None
elif request.user and request.user.is_authenticated():
user = request.user
log.info("creating event for user by request: %s" % user.username)
else:
log.debug("no authenticated user")
user = None
object = content_type.model_class().objects.get(uuid=object_uuid)
if action:
if not user:
return HttpUnauthorized("No permission to update this resource.")
create_event(user, object, None, action)
bundle = {
"object_id": object.id,
"object_uuid": object.uuid,
"ct": orig_ct,
"action": action,
}
self.log_throttled_access(request)
return self.create_response(request, bundle)<|fim▁end|>
|
def dehydrate(self, bundle):
return bundle
|
<|file_name|>BudgetTable.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { getCategoryGroups } from '../selectors/categoryGroups';
import { getCategoriesByGroupId } from '../selectors/categories';
import { getSelectedMonthBudgetItemsByCategoryId, getBudgetItemsSumUpToSelectedMonthByCategoryId } from '../selectors/budgetItems';
import { getTransactionsSumUpToSelectedMonthByCategoryId, getSelectedMonthActivityByCategoryId } from '../selectors/transactions';
import {connect} from 'react-redux';
import CategoryRow from './CategoryRow';
import CategoryGroupRow from './CategoryGroupRow';
import ui from 'redux-ui';
@ui({
state: {
editingCategoryId: undefined
}
})
class BudgetTable extends React.Component {
static propTypes = {
categoryGroups: React.PropTypes.array.isRequired,
categoriesByGroupId: React.PropTypes.object.isRequired,
getSelectedMonthActivityByCategoryId: React.PropTypes.object.isRequired,
getSelectedMonthBudgetItemsByCategoryId: React.PropTypes.object.isRequired,
transactionsSumUpToSelectedMonthByCategoryId: React.PropTypes.object.isRequired,
budgetItemsSumUpToSelectedMonthByCategoryId: React.PropTypes.object.isRequired
}
render() {
const rows = [];
this.props.categoryGroups.forEach(cg => {
rows.push(<CategoryGroupRow key={"cg"+cg.id} name={cg.name} />);
if (this.props.categoriesByGroupId[cg.id]) {
this.props.categoriesByGroupId[cg.id].forEach(c => {
rows.push(<CategoryRow
key={"c"+c.id}
category={c}
budgetItem={this.props.getSelectedMonthBudgetItemsByCategoryId[c.id]}
activity={this.props.getSelectedMonthActivityByCategoryId[c.id]}
available={(this.props.budgetItemsSumUpToSelectedMonthByCategoryId[c.id] || 0) + (this.props.transactionsSumUpToSelectedMonthByCategoryId[c.id] || 0)} />);
});
}
});<|fim▁hole|>
return (
<table className="table">
<thead>
<tr>
<th>Category</th>
<th>Budgeted</th>
<th>Activity</th>
<th>Available</th>
</tr>
</thead>
<tbody>
{rows}
</tbody>
</table>
);
}
}
const mapStateToProps = (state) => ({
categoryGroups: getCategoryGroups(state),
categoriesByGroupId: getCategoriesByGroupId(state),
getSelectedMonthActivityByCategoryId: getSelectedMonthActivityByCategoryId(state),
getSelectedMonthBudgetItemsByCategoryId: getSelectedMonthBudgetItemsByCategoryId(state),
transactionsSumUpToSelectedMonthByCategoryId: getTransactionsSumUpToSelectedMonthByCategoryId(state),
budgetItemsSumUpToSelectedMonthByCategoryId: getBudgetItemsSumUpToSelectedMonthByCategoryId(state)
});
export default connect(mapStateToProps)(BudgetTable);<|fim▁end|>
| |
<|file_name|>JavaXImpl.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.<|fim▁hole|> public static void main(String[] args) {
new JavaXImpl();
}
}<|fim▁end|>
|
*/
package stubgenerator.traitStaticPropertiesStub;
public class JavaXImpl extends GroovyXImpl {
|
<|file_name|>RestEndpoint.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.rest;
import java.util.Map;
import java.util.Set;
import org.apache.camel.Component;
import org.apache.camel.Consumer;
import org.apache.camel.ExchangePattern;
import org.apache.camel.NoFactoryAvailableException;
import org.apache.camel.NoSuchBeanException;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.spi.FactoryFinder;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.RestConfiguration;
import org.apache.camel.spi.RestConsumerFactory;
import org.apache.camel.spi.RestProducerFactory;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.camel.support.DefaultEndpoint;
import org.apache.camel.util.HostUtils;
import org.apache.camel.util.ObjectHelper;
import static org.apache.camel.support.RestProducerFactoryHelper.setupComponent;<|fim▁hole|> */
@UriEndpoint(firstVersion = "2.14.0", scheme = "rest", title = "REST", syntax = "rest:method:path:uriTemplate", label = "core,rest", lenientProperties = true)
public class RestEndpoint extends DefaultEndpoint {
public static final String[] DEFAULT_REST_CONSUMER_COMPONENTS = new String[]{"coap", "netty-http", "netty4-http", "jetty", "restlet", "servlet", "spark-java", "undertow"};
public static final String[] DEFAULT_REST_PRODUCER_COMPONENTS = new String[]{"http", "http4", "netty4-http", "jetty", "restlet", "undertow"};
public static final String DEFAULT_API_COMPONENT_NAME = "swagger";
public static final String RESOURCE_PATH = "META-INF/services/org/apache/camel/rest/";
@UriPath(label = "common", enums = "get,post,put,delete,patch,head,trace,connect,options") @Metadata(required = true)
private String method;
@UriPath(label = "common") @Metadata(required = true)
private String path;
@UriPath(label = "common")
private String uriTemplate;
@UriParam(label = "common")
private String consumes;
@UriParam(label = "common")
private String produces;
@UriParam(label = "common")
private String componentName;
@UriParam(label = "common")
private String inType;
@UriParam(label = "common")
private String outType;
@UriParam(label = "common")
private String routeId;
@UriParam(label = "consumer")
private String description;
@UriParam(label = "producer")
private String apiDoc;
@UriParam(label = "producer")
private String host;
@UriParam(label = "producer", multiValue = true)
private String queryParameters;
@UriParam(label = "producer", enums = "auto,off,json,xml,json_xml")
private RestConfiguration.RestBindingMode bindingMode;
private Map<String, Object> parameters;
public RestEndpoint(String endpointUri, RestComponent component) {
super(endpointUri, component);
setExchangePattern(ExchangePattern.InOut);
}
@Override
public RestComponent getComponent() {
return (RestComponent) super.getComponent();
}
public String getMethod() {
return method;
}
/**
* HTTP method to use.
*/
public void setMethod(String method) {
this.method = method;
}
public String getPath() {
return path;
}
/**
* The base path
*/
public void setPath(String path) {
this.path = path;
}
public String getUriTemplate() {
return uriTemplate;
}
/**
* The uri template
*/
public void setUriTemplate(String uriTemplate) {
this.uriTemplate = uriTemplate;
}
public String getConsumes() {
return consumes;
}
/**
* Media type such as: 'text/xml', or 'application/json' this REST service accepts.
* By default we accept all kinds of types.
*/
public void setConsumes(String consumes) {
this.consumes = consumes;
}
public String getProduces() {
return produces;
}
/**
* Media type such as: 'text/xml', or 'application/json' this REST service returns.
*/
public void setProduces(String produces) {
this.produces = produces;
}
public String getComponentName() {
return componentName;
}
/**
* The Camel Rest component to use for the REST transport, such as restlet, spark-rest.
* If no component has been explicit configured, then Camel will lookup if there is a Camel component
* that integrates with the Rest DSL, or if a org.apache.camel.spi.RestConsumerFactory is registered in the registry.
* If either one is found, then that is being used.
*/
public void setComponentName(String componentName) {
this.componentName = componentName;
}
public String getInType() {
return inType;
}
/**
* To declare the incoming POJO binding type as a FQN class name
*/
public void setInType(String inType) {
this.inType = inType;
}
public String getOutType() {
return outType;
}
/**
* To declare the outgoing POJO binding type as a FQN class name
*/
public void setOutType(String outType) {
this.outType = outType;
}
public String getRouteId() {
return routeId;
}
/**
* Name of the route this REST services creates
*/
public void setRouteId(String routeId) {
this.routeId = routeId;
}
public String getDescription() {
return description;
}
/**
* Human description to document this REST service
*/
public void setDescription(String description) {
this.description = description;
}
public Map<String, Object> getParameters() {
return parameters;
}
/**
* Additional parameters to configure the consumer of the REST transport for this REST service
*/
public void setParameters(Map<String, Object> parameters) {
this.parameters = parameters;
}
public String getApiDoc() {
return apiDoc;
}
/**
* The swagger api doc resource to use.
* The resource is loaded from classpath by default and must be in JSon format.
*/
public void setApiDoc(String apiDoc) {
this.apiDoc = apiDoc;
}
public String getHost() {
return host;
}
/**
* Host and port of HTTP service to use (override host in swagger schema)
*/
public void setHost(String host) {
this.host = host;
}
public String getQueryParameters() {
return queryParameters;
}
/**
* Query parameters for the HTTP service to call
*/
public void setQueryParameters(String queryParameters) {
this.queryParameters = queryParameters;
}
public RestConfiguration.RestBindingMode getBindingMode() {
return bindingMode;
}
/**
* Configures the binding mode for the producer. If set to anything
* other than 'off' the producer will try to convert the body of
* the incoming message from inType to the json or xml, and the
* response from json or xml to outType.
*/
public void setBindingMode(RestConfiguration.RestBindingMode bindingMode) {
this.bindingMode = bindingMode;
}
public void setBindingMode(String bindingMode) {
this.bindingMode = RestConfiguration.RestBindingMode.valueOf(bindingMode.toLowerCase());
}
@Override
public Producer createProducer() throws Exception {
if (ObjectHelper.isEmpty(host)) {
// hostname must be provided
throw new IllegalArgumentException("Hostname must be configured on either restConfiguration"
+ " or in the rest endpoint uri as a query parameter with name host, eg rest:" + method + ":" + path + "?host=someserver");
}
RestProducerFactory apiDocFactory = null;
RestProducerFactory factory = null;
if (apiDoc != null) {
log.debug("Discovering camel-swagger-java on classpath for using api-doc: {}", apiDoc);
// lookup on classpath using factory finder to automatic find it (just add camel-swagger-java to classpath etc)
try {
FactoryFinder finder = getCamelContext().getFactoryFinder(RESOURCE_PATH);
Object instance = finder.newInstance(DEFAULT_API_COMPONENT_NAME);
if (instance instanceof RestProducerFactory) {
// this factory from camel-swagger-java will facade the http component in use
apiDocFactory = (RestProducerFactory) instance;
}
parameters.put("apiDoc", apiDoc);
} catch (NoFactoryAvailableException e) {
throw new IllegalStateException("Cannot find camel-swagger-java on classpath to use with api-doc: " + apiDoc);
}
}
String cname = getComponentName();
if (cname != null) {
Object comp = getCamelContext().getRegistry().lookupByName(getComponentName());
if (comp instanceof RestProducerFactory) {
factory = (RestProducerFactory) comp;
} else {
comp = setupComponent(getComponentName(), getCamelContext(), (Map<String, Object>) parameters.get("component"));
if (comp instanceof RestProducerFactory) {
factory = (RestProducerFactory) comp;
}
}
if (factory == null) {
if (comp != null) {
throw new IllegalArgumentException("Component " + getComponentName() + " is not a RestProducerFactory");
} else {
throw new NoSuchBeanException(getComponentName(), RestProducerFactory.class.getName());
}
}
cname = getComponentName();
}
// try all components
if (factory == null) {
for (String name : getCamelContext().getComponentNames()) {
Component comp = setupComponent(name, getCamelContext(), (Map<String, Object>) parameters.get("component"));
if (comp instanceof RestProducerFactory) {
factory = (RestProducerFactory) comp;
cname = name;
break;
}
}
}
parameters.put("componentName", cname);
// lookup in registry
if (factory == null) {
Set<RestProducerFactory> factories = getCamelContext().getRegistry().findByType(RestProducerFactory.class);
if (factories != null && factories.size() == 1) {
factory = factories.iterator().next();
}
}
// no explicit factory found then try to see if we can find any of the default rest consumer components
// and there must only be exactly one so we safely can pick this one
if (factory == null) {
RestProducerFactory found = null;
String foundName = null;
for (String name : DEFAULT_REST_PRODUCER_COMPONENTS) {
Object comp = setupComponent(getComponentName(), getCamelContext(), (Map<String, Object>) parameters.get("component"));
if (comp instanceof RestProducerFactory) {
if (found == null) {
found = (RestProducerFactory) comp;
foundName = name;
} else {
throw new IllegalArgumentException("Multiple RestProducerFactory found on classpath. Configure explicit which component to use");
}
}
}
if (found != null) {
log.debug("Auto discovered {} as RestProducerFactory", foundName);
factory = found;
}
}
if (factory != null) {
log.debug("Using RestProducerFactory: {}", factory);
RestConfiguration config = getCamelContext().getRestConfiguration(cname, true);
Producer producer;
if (apiDocFactory != null) {
// wrap the factory using the api doc factory which will use the factory
parameters.put("restProducerFactory", factory);
producer = apiDocFactory.createProducer(getCamelContext(), host, method, path, uriTemplate, queryParameters, consumes, produces, config, parameters);
} else {
producer = factory.createProducer(getCamelContext(), host, method, path, uriTemplate, queryParameters, consumes, produces, config, parameters);
}
RestProducer answer = new RestProducer(this, producer, config);
answer.setOutType(outType);
answer.setType(inType);
answer.setBindingMode(bindingMode);
return answer;
} else {
throw new IllegalStateException("Cannot find RestProducerFactory in Registry or as a Component to use");
}
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
RestConsumerFactory factory = null;
String cname = null;
if (getComponentName() != null) {
Object comp = getCamelContext().getRegistry().lookupByName(getComponentName());
if (comp instanceof RestConsumerFactory) {
factory = (RestConsumerFactory) comp;
} else {
comp = getCamelContext().getComponent(getComponentName());
if (comp instanceof RestConsumerFactory) {
factory = (RestConsumerFactory) comp;
}
}
if (factory == null) {
if (comp != null) {
throw new IllegalArgumentException("Component " + getComponentName() + " is not a RestConsumerFactory");
} else {
throw new NoSuchBeanException(getComponentName(), RestConsumerFactory.class.getName());
}
}
cname = getComponentName();
}
// try all components
if (factory == null) {
for (String name : getCamelContext().getComponentNames()) {
Component comp = getCamelContext().getComponent(name);
if (comp instanceof RestConsumerFactory) {
factory = (RestConsumerFactory) comp;
cname = name;
break;
}
}
}
// lookup in registry
if (factory == null) {
Set<RestConsumerFactory> factories = getCamelContext().getRegistry().findByType(RestConsumerFactory.class);
if (factories != null && factories.size() == 1) {
factory = factories.iterator().next();
}
}
// no explicit factory found then try to see if we can find any of the default rest consumer components
// and there must only be exactly one so we safely can pick this one
if (factory == null) {
RestConsumerFactory found = null;
String foundName = null;
for (String name : DEFAULT_REST_CONSUMER_COMPONENTS) {
Object comp = getCamelContext().getComponent(name, true);
if (comp instanceof RestConsumerFactory) {
if (found == null) {
found = (RestConsumerFactory) comp;
foundName = name;
} else {
throw new IllegalArgumentException("Multiple RestConsumerFactory found on classpath. Configure explicit which component to use");
}
}
}
if (found != null) {
log.debug("Auto discovered {} as RestConsumerFactory", foundName);
factory = found;
}
}
if (factory != null) {
// if no explicit port/host configured, then use port from rest configuration
String scheme = "http";
String host = "";
int port = 80;
RestConfiguration config = getCamelContext().getRestConfiguration(cname, true);
if (config.getScheme() != null) {
scheme = config.getScheme();
}
if (config.getHost() != null) {
host = config.getHost();
}
int num = config.getPort();
if (num > 0) {
port = num;
}
// if no explicit hostname set then resolve the hostname
if (ObjectHelper.isEmpty(host)) {
if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.allLocalIp) {
host = "0.0.0.0";
} else if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.localHostName) {
host = HostUtils.getLocalHostName();
} else if (config.getHostNameResolver() == RestConfiguration.RestHostNameResolver.localIp) {
host = HostUtils.getLocalIp();
}
}
// calculate the url to the rest service
String path = getPath();
if (!path.startsWith("/")) {
path = "/" + path;
}
// there may be an optional context path configured to help Camel calculate the correct urls for the REST services
// this may be needed when using camel-servlet where we cannot get the actual context-path or port number of the servlet engine
// during init of the servlet
String contextPath = config.getContextPath();
if (contextPath != null) {
if (!contextPath.startsWith("/")) {
path = "/" + contextPath + path;
} else {
path = contextPath + path;
}
}
String baseUrl = scheme + "://" + host + (port != 80 ? ":" + port : "") + path;
String url = baseUrl;
if (uriTemplate != null) {
// make sure to avoid double slashes
if (uriTemplate.startsWith("/")) {
url = url + uriTemplate;
} else {
url = url + "/" + uriTemplate;
}
}
Consumer consumer = factory.createConsumer(getCamelContext(), processor, getMethod(), getPath(),
getUriTemplate(), getConsumes(), getProduces(), config, getParameters());
configureConsumer(consumer);
// add to rest registry so we can keep track of them, we will remove from the registry when the consumer is removed
// the rest registry will automatic keep track when the consumer is removed,
// and un-register the REST service from the registry
getCamelContext().getRestRegistry().addRestService(consumer, url, baseUrl, getPath(), getUriTemplate(), getMethod(),
getConsumes(), getProduces(), getInType(), getOutType(), getRouteId(), getDescription());
return consumer;
} else {
throw new IllegalStateException("Cannot find RestConsumerFactory in Registry or as a Component to use");
}
}
@Override
public boolean isSingleton() {
return true;
}
@Override
public boolean isLenientProperties() {
return true;
}
}<|fim▁end|>
|
/**
* The rest component is used for either hosting REST services (consumer) or calling external REST services (producer).
|
<|file_name|>0006_auto_20141203_0021.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('backend', '0005_organization_registered'),
]
operations = [
migrations.RemoveField(
model_name='organization',
name='lobbyists_with_access',
),
migrations.AlterField(
model_name='organization',
name='explore_url',
field=models.CharField(max_length=128, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='organization',
name='lobbyists',
field=models.IntegerField(null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(<|fim▁hole|> field=models.IntegerField(null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='organization',
name='name',
field=models.CharField(max_length=128, null=True, blank=True),
preserve_default=True,
),
]<|fim▁end|>
|
model_name='organization',
name='money',
|
<|file_name|>0010_remove_userstory_watchers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import connection
from django.db import models, migrations<|fim▁hole|>from django.contrib.contenttypes.management import update_all_contenttypes
def create_notifications(apps, schema_editor):
update_all_contenttypes(verbosity=0)
sql="""
INSERT INTO notifications_watched (object_id, created_date, content_type_id, user_id, project_id)
SELECT userstory_id AS object_id, now() AS created_date, {content_type_id} AS content_type_id, user_id, project_id
FROM userstories_userstory_watchers INNER JOIN userstories_userstory ON userstories_userstory_watchers.userstory_id = userstories_userstory.id""".format(content_type_id=ContentType.objects.get(model='userstory').id)
cursor = connection.cursor()
cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('notifications', '0004_watched'),
('userstories', '0009_remove_userstory_is_archived'),
]
operations = [
migrations.RunPython(create_notifications),
migrations.RemoveField(
model_name='userstory',
name='watchers',
),
]<|fim▁end|>
|
from django.contrib.contenttypes.models import ContentType
|
<|file_name|>es7_utils.py<|end_file_name|><|fim▁begin|>import importlib
import inspect
from celery import task
from django.conf import settings
from elasticsearch import Elasticsearch
from elasticsearch.helpers import bulk as es7_bulk
from elasticsearch.helpers.errors import BulkIndexError
from elasticsearch_dsl import Document, UpdateByQuery, analyzer, char_filter, token_filter
from kitsune.search import config
def _insert_custom_filters(analyzer_name, filter_list, char=False):
"""
Takes a list containing in-built filters (as strings), and the settings for custom filters
(as dicts). Turns the dicts into instances of `token_filter` or `char_filter` depending
on the value of the `char` argument.
"""
def mapping_func(position_filter_tuple):
position, filter = position_filter_tuple
if type(filter) is dict:
prefix = analyzer_name
default_filters = config.ES_DEFAULT_ANALYZER["char_filter" if char else "filter"]
if filter in default_filters:
# detect if this filter exists in the default analyzer
# if it does use the same name as the default
# to avoid defining the same filter for each locale
prefix = config.ES_DEFAULT_ANALYZER_NAME
position = default_filters.index(filter)
name = f'{prefix}_{position}_{filter["type"]}'
if char:
return char_filter(name, **filter)
return token_filter(name, **filter)
return filter
return list(map(mapping_func, enumerate(filter_list)))
def _create_synonym_graph_filter(synonym_file_name):
filter_name = f"{synonym_file_name}_synonym_graph"
return token_filter(
filter_name,
type="synonym_graph",
synonyms_path=f"synonyms/{synonym_file_name}.txt",
# we must use "true" instead of True to work around an elastic-dsl bug
expand="true",
lenient="true",
updateable="true",
)
def es_analyzer_for_locale(locale, search_analyzer=False):
"""Pick an appropriate analyzer for a given locale.
If no analyzer is defined for `locale` or the locale analyzer uses a plugin
but using plugin is turned off from settings, return an analyzer named "default_sumo".
"""
name = ""
analyzer_config = config.ES_LOCALE_ANALYZERS.get(locale)
if not analyzer_config or (analyzer_config.get("plugin") and not settings.ES_USE_PLUGINS):
name = config.ES_DEFAULT_ANALYZER_NAME
analyzer_config = {}
# use default values from ES_DEFAULT_ANALYZER if not overridden
# using python 3.9's dict union operator
analyzer_config = config.ES_DEFAULT_ANALYZER | analyzer_config
# turn dictionaries into `char_filter` and `token_filter` instances
filters = _insert_custom_filters(name or locale, analyzer_config["filter"])
char_filters = _insert_custom_filters(
name or locale, analyzer_config["char_filter"], char=True
)
if search_analyzer:
# create a locale-specific search analyzer, even if the index-time analyzer is
# `sumo_default`. we do this so that we can adjust the synonyms used in any locale,
# even if it doesn't have a custom analysis chain set up, without having to re-index
name = locale + "_search_analyzer"
filters.append(_create_synonym_graph_filter(config.ES_ALL_SYNONYMS_NAME))
filters.append(_create_synonym_graph_filter(locale))
return analyzer(
name or locale,
tokenizer=analyzer_config["tokenizer"],
filter=filters,
char_filter=char_filters,
)
def es7_client(**kwargs):
"""Return an ES7 Elasticsearch client"""
# prefer a cloud_id if available
if es7_cloud_id := settings.ES7_CLOUD_ID:
kwargs.update({"cloud_id": es7_cloud_id, "http_auth": settings.ES7_HTTP_AUTH})
else:
kwargs.update({"hosts": settings.ES7_URLS})
return Elasticsearch(**kwargs)
def get_doc_types(paths=["kitsune.search.documents"]):
"""Return all registered document types"""
doc_types = []
modules = [importlib.import_module(path) for path in paths]
for module in modules:
for key in dir(module):
cls = getattr(module, key)
if (
inspect.isclass(cls)
and issubclass(cls, Document)
and cls != Document
and cls.__name__ != "SumoDocument"
):
doc_types.append(cls)
return doc_types
@task
def index_object(doc_type_name, obj_id):
"""Index an ORM object given an object id and a document type name."""
doc_type = next(cls for cls in get_doc_types() if cls.__name__ == doc_type_name)
model = doc_type.get_model()
try:
obj = model.objects.get(pk=obj_id)
except model.DoesNotExist:
# if the row doesn't exist in DB, it may have been deleted while this job
# was in the celery queue - this shouldn't be treated as a failure, so
# just return
return
if doc_type.update_document:
doc_type.prepare(obj).to_action("update", doc_as_upsert=True)
else:
doc_type.prepare(obj).to_action("index")
@task
def index_objects_bulk(
doc_type_name,
obj_ids,
timeout=settings.ES_BULK_DEFAULT_TIMEOUT,
elastic_chunk_size=settings.ES_DEFAULT_ELASTIC_CHUNK_SIZE,
):
"""Bulk index ORM objects given a list of object ids and a document type name."""
doc_type = next(cls for cls in get_doc_types() if cls.__name__ == doc_type_name)
db_objects = doc_type.get_queryset().filter(pk__in=obj_ids)
# prepare the docs for indexing
docs = [doc_type.prepare(obj) for obj in db_objects]
# set the appropriate action per document type
action = "index"
kwargs = {}
# If the `update_document` is true we are using update instead of index
if doc_type.update_document:
action = "update"
kwargs.update({"doc_as_upsert": True})
# if the request doesn't resolve within `timeout`,<|fim▁hole|> es7_client(
timeout=timeout,
retry_on_timeout=True,
initial_backoff=timeout,
max_retries=settings.ES_BULK_MAX_RETRIES,
),
(doc.to_action(action=action, is_bulk=True, **kwargs) for doc in docs),
chunk_size=elastic_chunk_size,
raise_on_error=False, # we'll raise the errors ourselves, so all the chunks get sent
)
errors = [
error
for error in errors
if not (error.get("delete") and error["delete"]["status"] in [400, 404])
]
if errors:
raise BulkIndexError(f"{len(errors)} document(s) failed to index.", errors)
@task
def remove_from_field(doc_type_name, field_name, field_value):
"""Remove a value from all documents in the doc_type's index."""
doc_type = next(cls for cls in get_doc_types() if cls.__name__ == doc_type_name)
script = (
f"if (ctx._source.{field_name}.contains(params.value)) {{"
f"ctx._source.{field_name}.remove(ctx._source.{field_name}.indexOf(params.value))"
f"}}"
)
update = UpdateByQuery(using=es7_client(), index=doc_type._index._name)
update = update.filter("term", **{field_name: field_value})
update = update.script(source=script, params={"value": field_value}, conflicts="proceed")
# refresh index to ensure search fetches all matches
doc_type._index.refresh()
update.execute()
@task
def delete_object(doc_type_name, obj_id):
"""Unindex an ORM object given an object id and document type name."""
doc_type = next(cls for cls in get_doc_types() if cls.__name__ == doc_type_name)
doc = doc_type()
doc.meta.id = obj_id
doc.to_action("delete")<|fim▁end|>
|
# sleep for `timeout` then try again up to `settings.ES_BULK_MAX_RETRIES` times,
# before raising an exception:
success, errors = es7_bulk(
|
<|file_name|>regions-outlives-nominal-type-struct-region.rs<|end_file_name|><|fim▁begin|>// Test that a nominal type (like `Foo<'a>`) outlives `'b` if its
// arguments (like `'a`) outlive `'b`.
//
// Rule OutlivesNominalType from RFC 1214.<|fim▁hole|>#![allow(dead_code)]
mod variant_struct_region {
struct Foo<'a> {
x: &'a i32,
}
struct Bar<'a,'b> {
f: &'a Foo<'b>
}
}
fn main() { }<|fim▁end|>
|
// check-pass
#![feature(rustc_attrs)]
|
<|file_name|>messages.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2014 IBM Corporation
# Copyright 2015 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module implements client/server messages emitted from plugins.
# Things are defined here to 'encourage' developers to coordinate information
# format. This is also how different data formats are supported
import confluent.exceptions as exc
import json
def _htmlify_structure(indict):
ret = "<ul>"
if isinstance(indict, dict):
for key in indict.iterkeys():
ret += "<li>{0}: ".format(key)
if type(indict[key]) in (str, unicode, float, int):
ret += str(indict[key])
else:
ret += _htmlify_structure(indict[key])
elif isinstance(indict, list):
if len(indict) > 0:
if type(indict[0]) in (str, unicode):
ret += ",".join(indict)
else:
for v in indict:
ret += _htmlify_structure(v)
return ret + '</ul>'
class ConfluentMessage(object):
readonly = False
defaultvalue = ''
defaulttype = 'text'
def __init__(self):
self.desc = ''
self.stripped = False
self.kvpairs = {}
raise NotImplementedError("Must be subclassed!")
def json(self):
# This will create the canonical json representation of this message
if hasattr(self, 'stripped') and self.stripped:
datasource = self.kvpairs
else:
datasource = {'databynode': self.kvpairs}
jsonsnippet = json.dumps(datasource, separators=(',', ':'))[1:-1]
return jsonsnippet
def raw(self):
"""Return pythonic representation of the response.<|fim▁hole|> if hasattr(self, 'stripped') and self.stripped:
return self.kvpairs
return {'databynode': self.kvpairs}
def strip_node(self, node):
self.stripped = True
if self.kvpairs is not None:
self.kvpairs = self.kvpairs[node]
def html(self, extension=''):
#this is used to facilitate the api explorer feature
if not hasattr(self, 'stripped'):
self.stripped = False
if not hasattr(self, 'notnode'):
self.notnode = False
if self.stripped or self.notnode:
return self._generic_html_value(self.kvpairs)
if not self.stripped:
htmlout = ''
for node in self.kvpairs.iterkeys():
htmlout += '{0}:{1}\n'.format(
node, self._generic_html_value(self.kvpairs[node]))
return htmlout
def _generic_html_value(self, pairs):
snippet = ""
for key in pairs.iterkeys():
val = pairs[key]
value = self.defaultvalue
valtype = self.defaulttype
notes = []
if val is not None and 'value' in val:
value = val['value']
if 'inheritedfrom' in val:
notes.append('Inherited from %s' % val['inheritedfrom'])
if 'expression' in val:
notes.append(
'Derived from expression "%s"' % val['expression'])
elif val is not None and 'expression' in val and 'broken' in val:
value = "*BROKEN*"
notes.append(
'Derived from expression "%s"' % val['expression'])
notes.append('Broken because of %s' % val['broken'])
elif val is not None and 'expression' in val:
value = val['expression']
if value is None:
value = ''
if val is not None and value == '' and 'isset' in val and val[
'isset'] is True:
# an encrypted value, put some *** to show it is set
# in the explorer
if 'inheritedfrom' in val:
notes.append('Inherited from %s' % val['inheritedfrom'])
value = '********'
if isinstance(val, list):
snippet += key + ":"
if len(val) == 0 and not self.readonly:
snippet += ('<input type="{0}" name="{1}" value="" '
' "title="{2}">'
).format(valtype, key, self.desc)
for v in val:
if self.readonly:
snippet += _htmlify_structure(v)
else:
snippet += ('<input type="{0}" name="{1}" value="{2}" '
' "title="{3}">'
).format(valtype, key, v, self.desc)
if not self.readonly:
snippet += (
'<input type="{0}" name="{1}" value="" title="{2}">'
'<input type="checkbox" name="restexplorerhonorkey" '
'value="{1}">').format(valtype, key, self.desc)
return snippet
if self.readonly:
snippet += "{0}: {1}".format(key, value)
else:
snippet += (key + ":" +
'<input type="{0}" name="{1}" value="{2}" '
'title="{3}"><input type="checkbox" '
'name="restexplorerhonorkey" value="{1}">'
).format(valtype, key, value, self.desc)
if len(notes) > 0:
snippet += '(' + ','.join(notes) + ')'
return snippet
class ConfluentNodeError(object):
def __init__(self, node, errorstr):
self.node = node
self.error = errorstr
def raw(self):
return {'databynode': {self.node: {'error': self.error}}}
def html(self):
return self.node + ":" + self.error
def strip_node(self, node):
#NOTE(jbjohnso): For single node errors, raise exception to
#trigger what a developer of that medium would expect
raise Exception(self.error)
class ConfluentTargetTimeout(ConfluentNodeError):
def __init__(self, node, errstr='timeout'):
self.node = node
self.error = errstr
def strip_node(self, node):
raise exc.TargetEndpointUnreachable(self.error)
class ConfluentTargetNotFound(ConfluentNodeError):
def __init__(self, node, errorstr='not found'):
self.node = node
self.error = errorstr
def strip_node(self, node):
raise exc.NotFoundException(self.error)
class ConfluentTargetInvalidCredentials(ConfluentNodeError):
def __init__(self, node):
self.node = node
self.error = 'bad credentials'
def strip_node(self, node):
raise exc.TargetEndpointBadCredentials
class DeletedResource(ConfluentMessage):
def __init__(self, resource):
self.kvpairs = {}
class ConfluentChoiceMessage(ConfluentMessage):
valid_values = set()
valid_paramset = {}
def __init__(self, node, state):
self.stripped = False
self.kvpairs = {
node: {
self.keyname: {'value': state},
}
}
def html(self, extension=''):
if hasattr(self, 'stripped') and self.stripped:
return self._create_option(self.kvpairs)
else:
htmlout = ''
for node in self.kvpairs.iterkeys():
htmlout += '{0}:{1}\n'.format(
node, self._create_option(self.kvpairs[node]))
return htmlout
def _create_option(self, pairdata):
snippet = ''
for key in pairdata.iterkeys():
val = pairdata[key]
snippet += key + ':<select name="%s">' % key
valid_values = self.valid_values
if key in self.valid_paramset:
valid_values = self.valid_paramset[key]
for opt in valid_values:
if opt == val['value']:
snippet += '<option value="%s" selected>%s</option>\r' % (
opt, opt)
else:
snippet += '<option value="%s">%s</option>\r' % (opt, opt)
snippet += '</select>'
snippet += '<input type="checkbox" name="restexplorerhonorkey" '
snippet += 'value="{0}"><br>\r'.format(key)
return snippet
class LinkRelation(ConfluentMessage):
kvpairs = None
def __init__(self):
self.href = ''
self.rel = ''
def json(self):
"""Provide json_hal style representation of the relation.
This currently only makes sense for the socket api.
"""
return {self.rel: '{ "href": "%s" }' % self.href}
def raw(self):
"""Provide python structure of the relation.
This currently is only sensible to consume from httpapi.
"""
return {self.rel: {"href": self.href}}
def html(self, extension=''):
"""Provide an html representation of the link relation.
This is used by the API explorer aspect of httpapi"""
return '<a href="{0}{2}" rel="{1}">{0}{2}</a>'.format(self.href,
self.rel,
extension)
# return '<a href="%s" rel="%s">%s</a><input type="submit"
# name="restexprerorop" value="delete:%s"' % (self.href, self.rel,
# self.href, self.href)
class ChildCollection(LinkRelation):
def __init__(self, collname, candelete=False):
self.rel = 'item'
self.href = collname
self.candelete = candelete
def html(self, extension=''):
if self.candelete:
return (
'<a href="{0}{2}" rel="{1}">{0}{2}</a> . . . . . . . . . . . . '
'<button type="submit" name="restexplorerop" '
'value="delete" formaction="{0}">delete'
'</button>').format(self.href, self.rel, extension)
else:
return '<a href="{0}{1}" rel="{0}">{0}{1}</a>'.format(self.href,
extension)
def get_input_message(path, operation, inputdata, nodes=None):
if path[0] == 'power' and path[1] == 'state' and operation != 'retrieve':
return InputPowerMessage(path, nodes, inputdata)
elif path[0] in ('attributes', 'users') and operation != 'retrieve':
return InputAttributes(path, inputdata, nodes)
elif path == ['boot', 'nextdevice'] and operation != 'retrieve':
return InputBootDevice(path, nodes, inputdata)
elif path == ['identify'] and operation != 'retrieve':
return InputIdentifyMessage(path, nodes, inputdata)
elif inputdata:
raise exc.InvalidArgumentException()
class InputAttributes(ConfluentMessage):
def __init__(self, path, inputdata, nodes=None):
self.nodeattribs = {}
nestedmode = False
if not inputdata:
raise exc.InvalidArgumentException('no request data provided')
if nodes is None:
self.attribs = inputdata
for attrib in self.attribs:
if type(self.attribs[attrib]) in (str, unicode):
try:
# ok, try to use format against the string
# store back result to the attribute to
# handle things like '{{' and '}}'
# if any weird sort of error should
# happen, it means the string has something
# that formatter is looking to fulfill, but
# is unable to do so, meaning it is an expression
tv = self.attribs[attrib].format()
self.attribs[attrib] = tv
except (KeyError, IndexError):
# this means format() actually thought there was work
# that suggested parameters, push it in as an
# expression
self.attribs[attrib] = {
'expression': self.attribs[attrib]}
return
for node in nodes:
if node in inputdata:
nestedmode = True
self.nodeattribs[node] = inputdata[node]
if nestedmode:
for key in inputdata:
if key not in nodes:
raise exc.InvalidArgumentException
else:
for node in nodes:
self.nodeattribs[node] = inputdata
def get_attributes(self, node):
if node not in self.nodeattribs:
return {}
nodeattr = self.nodeattribs[node]
for attr in nodeattr:
if type(nodeattr[attr]) in (str, unicode):
try:
# as above, use format() to see if string follows
# expression, store value back in case of escapes
tv = nodeattr[attr].format()
nodeattr[attr] = tv
except (KeyError, IndexError):
# an expression string will error if format() done
# use that as cue to put it into config as an expr
nodeattr[attr] = {'expression': nodeattr[attr]}
return nodeattr
class ConfluentInputMessage(ConfluentMessage):
keyname = 'state'
def __init__(self, path, nodes, inputdata):
self.inputbynode = {}
self.stripped = False
if not inputdata:
raise exc.InvalidArgumentException('missing input data')
if self.keyname not in inputdata:
#assume we have nested information
for key in nodes:
if key not in inputdata:
raise exc.InvalidArgumentException(key + ' not in request')
datum = inputdata[key]
if self.keyname not in datum:
raise exc.InvalidArgumentException(
'missing {0} argument'.format(self.keyname))
elif datum[self.keyname] not in self.valid_values:
raise exc.InvalidArgumentException(
datum[self.keyname] + ' is not one of ' +
','.join(self.valid_values))
self.inputbynode[key] = datum[self.keyname]
else: # we have a state argument not by node
datum = inputdata
if self.keyname not in datum:
raise exc.InvalidArgumentException('missing {0} argument'.format(self.keyname))
elif datum[self.keyname] not in self.valid_values:
raise exc.InvalidArgumentException(datum[self.keyname] +
' is not one of ' +
','.join(self.valid_values))
for node in nodes:
self.inputbynode[node] = datum[self.keyname]
class InputIdentifyMessage(ConfluentInputMessage):
valid_values = set([
'on',
'off',
])
keyname = 'identify'
class InputPowerMessage(ConfluentInputMessage):
valid_values = set([
'on',
'off',
'reset',
'boot',
])
def powerstate(self, node):
return self.inputbynode[node]
class BootDevice(ConfluentChoiceMessage):
valid_values = set([
'network',
'hd',
'setup',
'default',
'cd',
])
valid_bootmodes = set([
'unspecified',
'bios',
'uefi',
])
valid_paramset = {
'bootmode': valid_bootmodes,
}
def __init__(self, node, device, bootmode='unspecified'):
if device not in self.valid_values:
raise Exception("Invalid boot device argument passed in:" +
repr(device))
if bootmode not in self.valid_bootmodes:
raise Exception("Invalid boot mode argument passed in:" +
repr(bootmode))
self.kvpairs = {
node: {
'nextdevice': {'value': device},
'bootmode': {'value': bootmode },
}
}
class InputBootDevice(BootDevice):
def __init__(self, path, nodes, inputdata):
self.bootdevbynode = {}
self.bootmodebynode = {}
if not inputdata:
raise exc.InvalidArgumentException()
if 'nextdevice' not in inputdata:
for key in nodes:
if key not in inputdata:
raise exc.InvalidArgumentException(key + ' not in request')
datum = inputdata[key]
if 'nextdevice' not in datum:
raise exc.InvalidArgumentException(
'missing nextdevice argument')
elif datum['nextdevice'] not in self.valid_values:
raise exc.InvalidArgumentException(
datum['nextdevice'] + ' is not one of ' +
','.join(self.valid_values))
self.bootdevbynode[key] = datum['nextdevice']
if 'bootmode' in datum:
if datum['bootmode'] not in self.valid_bootmodes:
raise exc.InvalidArgumentException(
datum['bootmode'] + ' is not one of ' +
','.join(self.valid_bootmodes))
self.bootmodebynode[key] = datum['bootmode']
else:
datum = inputdata
if 'nextdevice' not in datum:
raise exc.InvalidArgumentException(
'missing nextdevice argument')
elif datum['nextdevice'] not in self.valid_values:
raise exc.InvalidArgumentException(
datum['nextdevice'] + ' is not one of ' +
','.join(self.valid_values))
for node in nodes:
self.bootdevbynode[node] = datum['nextdevice']
if 'bootmode' in datum:
self.bootmodebynode[node] = datum['bootmode']
def bootdevice(self, node):
return self.bootdevbynode[node]
def bootmode(self, node):
return self.bootmodebynode.get(node, 'unspecified')
class IdentifyState(ConfluentChoiceMessage):
valid_values = set([
'', # allowed for output to indicate write-only support
'on',
'off',
])
keyname = 'identify'
class PowerState(ConfluentChoiceMessage):
valid_values = set([
'on',
'off',
'reset',
'boot',
])
keyname = 'state'
class SensorReadings(ConfluentMessage):
readonly = True
def __init__(self, sensors=(), name=None):
readings = []
self.notnode = name is None
for sensor in sensors:
sensordict = {'name': sensor['name']}
if 'value' in sensor:
sensordict['value'] = sensor['value']
if 'units' in sensor:
sensordict['units'] = sensor['units']
if 'states' in sensor:
sensordict['states'] = sensor['states']
if 'health' in sensor:
sensordict['health'] = sensor['health']
readings.append(sensordict)
if self.notnode:
self.kvpairs = {'sensors': readings}
else:
self.kvpairs = {name: {'sensors': readings}}
class HealthSummary(ConfluentMessage):
readonly = True
valid_values = set([
'ok',
'warning',
'critical',
'failed',
])
def __init__(self, health, name=None):
self.stripped = False
self.notnode = name is None
if health not in self.valid_values:
raise ValueError("%d is not a valid health state" % health)
if self.notnode:
self.kvpairs = {'health': {'value': health}}
else:
self.kvpairs = {name: {'health': {'value': health}}}
class Attributes(ConfluentMessage):
def __init__(self, name=None, kv=None, desc=''):
self.desc = desc
nkv = {}
self.notnode = name is None
for key in kv.iterkeys():
if type(kv[key]) in (str, unicode):
nkv[key] = {'value': kv[key]}
else:
nkv[key] = kv[key]
if self.notnode:
self.kvpairs = nkv
else:
self.kvpairs = {
name: nkv
}
class ListAttributes(ConfluentMessage):
def __init__(self, name=None, kv=None, desc=''):
self.desc = desc
self.notnode = name is None
if self.notnode:
self.kvpairs = kv
else:
self.kvpairs = {name: kv}
class CryptedAttributes(Attributes):
defaulttype = 'password'
def __init__(self, name=None, kv=None, desc=''):
# for now, just keep the dictionary keys and discard crypt value
self.desc = desc
nkv = {}
for key in kv.iterkeys():
nkv[key] = {'isset': False}
try:
if kv[key] is not None and kv[key]['cryptvalue'] != '':
nkv[key] = {'isset': True}
nkv[key]['inheritedfrom'] = kv[key]['inheritedfrom']
except KeyError:
pass
self.notnode = name is None
if self.notnode:
self.kvpairs = nkv
else:
self.kvpairs = {
name: nkv
}<|fim▁end|>
|
Used by httpapi while assembling data prior to json serialization"""
|
<|file_name|>handleBackoffice.go<|end_file_name|><|fim▁begin|>package server
import (
"fmt"
"math/big"
"net/http"
"strconv"
"time"
"github.com/san-lab/banketh-quorum/banketh/bots"
"github.com/san-lab/banketh-quorum/banketh/cryptobank"
"github.com/san-lab/banketh-quorum/banketh/data"
"github.com/san-lab/banketh-quorum/lib/bank/banktypes"
"github.com/san-lab/banketh-quorum/lib/db"
"github.com/san-lab/banketh-quorum/lib/ethapi"
)
func HandleBackoffice(w http.ResponseWriter, req *http.Request) {
if !logged(w, req) {
reload(w, req, "/")
return
}
req.ParseForm()
whatToShowA, ok := req.Form["whattoshow"]
if !ok || whatToShowA[0] == "Cashins" {
HandleCashins(w, req)
return
} else if whatToShowA[0] == "Cashouts" {
HandleCashouts(w, req)
return
} else if whatToShowA[0] == "PaymentTerminations" {
HandlePaymentTerminations(w, req)
return
}
showError(w, req, "Navigation error")
}
func HandleCashins(w http.ResponseWriter, req *http.Request) {
cashins, err := db.ReadTable(data.DBNAME, data.DBTABLECASHINS, &data.CashinT{}, "", "Time desc")
if err != nil {
showErrorf(w, req, "Unable to read cashin transactions table [%v]", err)
return
}
passdata := map[string]interface{}{
"Cashins": cashins,
"Currency": cryptobank.CURRENCY,
}
placeHeader(w, req)
templates.ExecuteTemplate(w, "cashins.html", passdata)
}
func HandleCashouts(w http.ResponseWriter, req *http.Request) {
cashouts, err := db.ReadTable(data.DBNAME, data.DBTABLECASHOUTS, &data.CashoutT{}, "", "Time desc")
if err != nil {
showErrorf(w, req, "Unable to read cashout transactions table [%v]", err)
return
}
passdata := map[string]interface{}{
"Cashouts": cashouts,
"Currency": cryptobank.CURRENCY,
}
placeHeader(w, req)
templates.ExecuteTemplate(w, "cashouts.html", passdata)
}
func HandlePaymentTerminations(w http.ResponseWriter, req *http.Request) {
paymentTerminations, err := db.ReadTable(data.DBNAME, data.DBTABLEPAYMENTTERMINATIONS, &data.PaymentTerminationT{}, "", "Time desc")
if err != nil {
showErrorf(w, req, "Unable to read payment terminations table [%v]", err)
return
}
passdata := map[string]interface{}{
"PaymentTerminations": paymentTerminations,
"Currency": cryptobank.CURRENCY,
}
placeHeader(w, req)
templates.ExecuteTemplate(w, "paymentterminations.html", passdata)
}
func HandleManualAddFunds(w http.ResponseWriter, req *http.Request) {
if !logged(w, req) {
reload(w, req, "/")
return
}
req.ParseForm()
bankaccountA, ok := req.Form["bankaccount"]
if !ok {
showError(w, req, "Form error")
return
}
banktridA, ok := req.Form["banktrid"]
if !ok {
showError(w, req, "Form error")
return
}
amountA, ok := req.Form["amount"]
if !ok {
showError(w, req, "Form error")
return
}
amount, err := strconv.ParseFloat(amountA[0], 64)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Wrong amount argument %v [%v]", amountA[0], err)
reload(w, req, "/backoffice")
return
}
messageA, ok := req.Form["message"]
if !ok {
showError(w, req, "Form error")
return
}
bankethaccountA, ok := req.Form["bankethaccount"]
if !ok {
showError(w, req, "Form error")
return
}
bankethaccount, err := strconv.ParseUint(bankethaccountA[0], 0, 64)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Wrong banketh account %v [%v]", bankethaccountA[0], err)
reload(w, req, "/backoffice")
return
}
manyaccounts, err := cryptobank.Many_accounts(ethclient)
if int64(bankethaccount) >= manyaccounts {
pushAlertf(w, req, ALERT_DANGER, "Account %v does not exist in banketh", bankethaccountA[0])
reload(w, req, "/backoffice")
return
}
account, err := cryptobank.Read_account(ethclient, bankethaccount)
if err != nil {
showErrorf(w, req, "Error reading account %v from ethereum node [%v]", bankethaccount, err)
return
}
bankethamount, _ := big.NewFloat(0).Mul(big.NewFloat(amount), big.NewFloat(cryptobank.PRECISION)).Int(nil)
txHash, err := cryptobank.Add_funds(ethclient, int64(bankethaccount), bankethamount)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Add_funds method call failed! [%v]", err)
reload(w, req, "/backoffice")
return
}
newT := data.CashinT{
BankTrID: banktridA[0],
Time: db.MyTime(time.Now()),
BankAccount: bankaccountA[0],
BankAmount: amount,
Message: messageA[0],
ToAddress: account.Owner,
ToAccount: int64(bankethaccount),
BankethAmount: big.NewInt(0).Set(bankethamount),
AddFundsOrSubmitPaymentHash: txHash,
ReturnTrID: "",
ReturnMessage: "",
Status: data.CASHIN_STATUS_MANUALLY_FINISHED,
}
err = db.WriteEntry(data.DBNAME, data.DBTABLECASHINS, newT)
if err != nil {
errMsg := fmt.Sprintf("Sent an addFunds call (hash %v) but could not write it to DB! [%v]", err)
pushAlert(w, req, ALERT_DANGER, errMsg)
db.RegisterEvent(data.DBNAME, data.DBTABLEEVENTS, db.EVENT_MANUAL_INTERVENTION_NEEDED,
errMsg+fmt.Sprintf(" - Need manual intervention, we should record the transaction in the DB; projected transaction was %v", newT))
reload(w, req, "/backoffice")
return
}
}
func HandleManualAddTransfer(w http.ResponseWriter, req *http.Request) {
if !logged(w, req) {
reload(w, req, "/")
return
}
req.ParseForm()
banktridA, ok := req.Form["banktrid"]
if !ok {
showError(w, req, "Form error")
return
}
bankaccountA, ok := req.Form["bankaccount"]
if !ok {
showError(w, req, "Form error")
return
}
amountA, ok := req.Form["amount"]
if !ok {
showError(w, req, "Form error")
return
}
amount, err := strconv.ParseFloat(amountA[0], 64)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Wrong amount argument %v [%v]", amountA[0], err)
reload(w, req, "/backoffice")
return
}
typeA, ok := req.Form["type"]
if !ok {
showError(w, req, "Form error")
return
}
messageA, ok := req.Form["message"]
if !ok {
showError(w, req, "Form error")
return
}
newTransfer := banktypes.BankTransferT{
TransferID: banktridA[0],
Time: db.MyTime(time.Now()),
Account: bankaccountA[0],
Amount: amount,
Type: typeA[0],
Message: messageA[0],
}
d, err := db.ConnectDB(data.DBNAME)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Error connecting to the database [%v]", err)
reload(w, req, "/backoffice")
return
}
not_ok := bots.Process_inbound_transfer(d, &newTransfer)
if not_ok {
pushAlertf(w, req, ALERT_DANGER, "Error processing manual inbound transfer - pls check the console log")
reload(w, req, "/backoffice")
return
}
}
func HandleManualRemoveFunds(w http.ResponseWriter, req *http.Request) {
if !logged(w, req) {
reload(w, req, "/")
return
}
req.ParseForm()
var redeemFundsHash ethapi.Hash
var err error
redeemFundsHashA, ok := req.Form["redeemfundshash"]
if ok && redeemFundsHashA[0] != "" {
redeemFundsHash, err = ethapi.String_to_hash(redeemFundsHashA[0])<|fim▁hole|> reload(w, req, "/backoffice")
return
}
}
bankethaccountA, ok := req.Form["bankethaccount"]
if !ok {
showError(w, req, "Form error")
return
}
bankethaccount, err := strconv.ParseUint(bankethaccountA[0], 0, 64)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Wrong banketh account %v [%v]", bankethaccountA[0], err)
reload(w, req, "/backoffice")
return
}
manyaccounts, err := cryptobank.Many_accounts(ethclient)
if int64(bankethaccount) >= manyaccounts {
pushAlertf(w, req, ALERT_DANGER, "Account %v does not exist in banketh", bankethaccountA[0])
reload(w, req, "/backoffice")
return
}
account, err := cryptobank.Read_account(ethclient, bankethaccount)
if err != nil {
showErrorf(w, req, "Error reading account %v from ethereum node [%v]", bankethaccount, err)
return
}
amountA, ok := req.Form["amount"]
if !ok {
showError(w, req, "Form error")
return
}
amount, err := strconv.ParseFloat(amountA[0], 64)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Wrong amount argument %v [%v]", amountA[0], err)
reload(w, req, "/backoffice")
return
}
bankethamount, _ := big.NewFloat(0).Mul(big.NewFloat(amount), big.NewFloat(cryptobank.PRECISION)).Int(nil)
redemptionModeA, ok := req.Form["redemptionmode"]
if !ok {
showError(w, req, "Form error")
return
}
redemptionMode, err := strconv.ParseUint(redemptionModeA[0], 0, 64)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Wrong redemption mode %v [%v]", redemptionModeA[0], err)
reload(w, req, "/backoffice")
return
}
routingInfoA, given := req.Form["routinginfo"]
if given {
if len(routingInfoA[0]) > 32 {
pushAlertf(w, req, ALERT_DANGER, "Wrong routing info (%v)", routingInfoA[0])
reload(w, req, "/backoffice")
return
}
}
var errorCode int64
errorCodeA, given := req.Form["errorcode"]
if given {
errorCode, err = strconv.ParseInt(errorCodeA[0], 0, 64)
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Wrong redemption error code (%v)", errorCodeA[0])
reload(w, req, "/backoffice")
return
}
}
/*
redemptionCodeSent := big.NewInt(0)
redemptionCodeSentA, given := req.Form["redemptioncodesent"]
if given {
redemptionCodeSent, ok = redemptionCodeSent.SetString(redemptionCodeSentA[0], 0)
if !ok {
pushAlertf(w, req, ALERT_DANGER, "Wrong redemption code (%v)", redemptionCodeSentA[0])
reload(w, req, "/backoffice")
return
}
}
*/
bankaccountA, _ := req.Form["bankaccount"]
banktridA, _ := req.Form["banktrid"]
messageA, _ := req.Form["message"]
txHash, err := cryptobank.Remove_funds(ethclient, int64(bankethaccount), bankethamount, redeemFundsHash, errorCode)
/*
txHash, err := cryptobank.Remove_funds(ethclient, int64(bankethaccount), bankethamount, redemptionCodeSent)
*/
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Remove_funds method call failed! [%v]", err)
reload(w, req, "/backoffice")
return
}
newT := data.CashoutT{
RedeemFundsHash: redeemFundsHash,
Time: db.MyTime(time.Now()),
FromAccount: int64(bankethaccount),
FromAddress: account.Owner,
BankethAmount: big.NewInt(0).Set(bankethamount),
RedemptionMode: redemptionMode,
RoutingInfo: routingInfoA[0],
ErrorCode: errorCode,
RemoveFundsHash: txHash,
// MakeTransferHash: // Not neccessary, since this is a known cashout
BankAccount: bankaccountA[0],
BankAmount: amount,
BankTrID: banktridA[0],
Message: messageA[0],
Status: data.CASHOUT_STATUS_MANUALLY_FINISHED,
}
err = db.WriteEntry(data.DBNAME, data.DBTABLECASHOUTS, newT)
if err != nil {
errMsg := fmt.Sprintf("Sent an remove_funds call (hash %v) but could not write it to DB! [%v]", err)
pushAlert(w, req, ALERT_DANGER, errMsg)
db.RegisterEvent(data.DBNAME, data.DBTABLEEVENTS, db.EVENT_MANUAL_INTERVENTION_NEEDED,
errMsg+fmt.Sprintf(" - Need manual intervention, we should record the transaction in the DB; projected transaction was %v", newT))
reload(w, req, "/backoffice")
return
}
}<|fim▁end|>
|
if err != nil {
pushAlertf(w, req, ALERT_DANGER, "Bad hash %v [%v]", redeemFundsHashA[0], err)
|
<|file_name|>state.py<|end_file_name|><|fim▁begin|>import abc
from typing import Dict, List
class State(metaclass=abc.ABCMeta):
"""Base class for inheritable state objects, such as would represent an
input element's attributes."""
@abc.abstractproperty<|fim▁hole|> @abc.abstractproperty
def ref(self) -> str:
"""Return a string uniquely identifying this element."""
pass
@abc.abstractproperty
def keys(self) -> List[str]:
"""Return a list of attributes in this State class that will inherit
from a parent object."""
pass
def inherit(self, other: 'State') -> None:
"""Inherit properties from a parent State instance."""
for key in [k for k in other.keys if getattr(self, k) is None]:
setattr(self, key, getattr(other, key))
for src, dest in other.replacements.items():
if src not in self.replacements:
self.replacements[src] = dest
def __str__(self) -> str:
d = {}
for k in self.keys:
d[k] = getattr(self, k)
return str(d)<|fim▁end|>
|
def replacements(self) -> Dict[str, str]:
"""Return a dictionary mapping template replacements."""
pass
|
<|file_name|>BoardCell.ts<|end_file_name|><|fim▁begin|>module VirusGame {
export const enum CellState { Empty, Alive, Dead };
export class BoardCell extends Phaser.Image {
state: CellState = CellState.Empty;
player: BoardPlayer;
isPossibleToMoveTo: boolean = false;
<|fim▁hole|>
this.inputEnabled = true;
this.input.useHandCursor = true;
this.events.onInputOver.add(this.drawUnderPointer, this);
this.events.onInputOut.add(this.drawNormal, this);
this.events.onInputUp.add(function() {
if (this.board_game.current_player.is_local_player)
this.cellPlayed();
}, this);
}
setState(state, player) {
this.state = state;
this.player = player;
switch (this.state) {
case CellState.Alive:
this.frameName = this.player.color + '_boxCross';
break;
case CellState.Dead:
this.frameName = this.player.color + '_boxCheckmark';
break;
}
}
cellPlayed(opponentTurn?) {
if (this.board_game.isTurnLegal(this.row, this.col)) {
switch (this.state) {
case CellState.Empty:
this.frameName = this.board_game.current_player_color + '_boxCross';
this.state = CellState.Alive;
this.player = this.board_game.current_player;
this.board_game.endTurn();
if (!opponentTurn)
client.player_move(this.board_game.id,this.row,this.col,1,this.board_game.left_turn_cells,this.board_game.current_player_number,this.player.state,0);
break;
case CellState.Alive:
this.frameName = this.board_game.current_player_color + '_boxCheckmark';
this.state = CellState.Dead;
this.player = this.board_game.current_player;
this.board_game.endTurn();
if (!opponentTurn)
client.player_move(this.board_game.id,this.row,this.col,2,this.board_game.left_turn_cells,this.board_game.current_player_number,this.player.state,0);
break;
case CellState.Dead:
break;
}
}
}
drawNormal() {
if(this.isPossibleToMoveTo)
this.tint = 0xabcdef;
else
this.tint = 0xffffff;
}
drawUnderPointer() {
this.tint = 0xaaaaaa;
}
makePossibleToMoveTo() {
this.isPossibleToMoveTo = true;
this.drawNormal();
}
disablePossibleToMoveTo() {
this.isPossibleToMoveTo = false;
this.drawNormal();
}
}
}<|fim▁end|>
|
constructor(public row: number, public col: number, public board_game: BoardGame) {
super(board_game.game, 0, 0, 'board_cells', 'grey_box');
|
<|file_name|>not-panic-safe-6.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
use std::panic::UnwindSafe;<|fim▁hole|>fn assert<T: UnwindSafe + ?Sized>() {}
fn main() {
assert::<*mut RefCell<i32>>();
//~^ ERROR the type `UnsafeCell<i32>` may contain interior mutability and a
//~| ERROR the type `UnsafeCell<isize>` may contain interior mutability and a
}<|fim▁end|>
|
use std::cell::RefCell;
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/**
* Module dependencies.
*/
var express = require('express');
var http = require('http');
var path = require('path');
var handlebars = require('express3-handlebars')
var index = require('./routes/index');
// Example route
// var user = require('./routes/user');
// below added by tommy
var login = require('./routes/login');
var messages = require('./routes/messages');
var app = express();
// all environments
app.set('port', process.env.PORT || 3000);
app.set('views', path.join(__dirname, 'views'));
app.engine('handlebars', handlebars());
app.set('view engine', 'handlebars');
app.use(express.favicon());
app.use(express.logger('dev'));
app.use(express.json());
app.use(express.urlencoded());
app.use(express.methodOverride());
app.use(express.cookieParser('Intro HCI secret key'));
app.use(express.session());
app.use(app.router);
app.use(express.static(path.join(__dirname, 'public')));
// development only
if ('development' == app.get('env')) {
app.use(express.errorHandler());
}
<|fim▁hole|>
//below added by tommy
app.get('/login', login.view);
app.get('/messages', messages.view);
http.createServer(app).listen(app.get('port'), function(){
console.log('Express server listening on port ' + app.get('port'));
});<|fim▁end|>
|
// Add routes here
app.get('/', index.view);
// Example route
// app.get('/users', user.list);
|
<|file_name|>crash_reporting.cpp<|end_file_name|><|fim▁begin|>// SuperTuxKart - a fun racing game with go-kart
//
// Copyright (C) 2013-2015 Lionel Fuentes
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 3
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#include "crash_reporting.hpp"
#include "log.hpp"
#include <string.h>
#if defined(WIN32) && !defined(DEBUG) && !defined(__MINGW32__)
// --------------------- Windows version -----------------
#include <Windows.h>
#include <DbgHelp.h>
#include <stdlib.h>
#include <signal.h>
#include <new.h>
typedef BOOL (__stdcall *tSymCleanup)(
_In_ HANDLE hProcess
);
typedef PVOID (__stdcall *tSymFunctionTableAccess64)(
_In_ HANDLE hProcess,
_In_ DWORD64 AddrBase
);
typedef BOOL (__stdcall *tSymGetLineFromAddr64)(
_In_ HANDLE hProcess,
_In_ DWORD64 qwAddr,
_Out_ PDWORD pdwDisplacement,
_Out_ PIMAGEHLP_LINE64 Line64
);
typedef DWORD64 (__stdcall *tSymGetModuleBase64)(
_In_ HANDLE hProcess,
_In_ DWORD64 qwAddr
);
typedef BOOL (__stdcall *tSymGetSymFromAddr64)(
_In_ HANDLE hProcess,
_In_ DWORD64 qwAddr,
_Out_opt_ PDWORD64 pdwDisplacement,
_Inout_ PIMAGEHLP_SYMBOL64 Symbol
);
typedef BOOL (__stdcall *tSymInitialize)(
_In_ HANDLE hProcess,
_In_opt_ PCSTR UserSearchPath,
_In_ BOOL fInvadeProcess
);
typedef DWORD (__stdcall *tSymSetOptions)(
_In_ DWORD SymOptions
);
typedef BOOL (__stdcall *tStackWalk64)(
_In_ DWORD MachineType,
_In_ HANDLE hProcess,
_In_ HANDLE hThread,
_Inout_ LPSTACKFRAME64 StackFrame,
_Inout_ PVOID ContextRecord,
_In_opt_ PREAD_PROCESS_MEMORY_ROUTINE64 ReadMemoryRoutine,
_In_opt_ PFUNCTION_TABLE_ACCESS_ROUTINE64 FunctionTableAccessRoutine,
_In_opt_ PGET_MODULE_BASE_ROUTINE64 GetModuleBaseRoutine,
_In_opt_ PTRANSLATE_ADDRESS_ROUTINE64 TranslateAddress
);
typedef DWORD (__stdcall *tUnDecorateSymbolName)(
_In_ PCSTR name,
_Out_ PSTR outputString,
_In_ DWORD maxStringLength,
_In_ DWORD flags
);
<|fim▁hole|> namespace CrashReporting
{
void getCallStackWithContext(std::string& callstack, PCONTEXT pContext);
void winCrashHandler(PCONTEXT pContext=NULL)
{
std::string callstack;
if(pContext)
getCallStackWithContext(callstack, pContext);
else
getCallStack(callstack);
std::string msg = "SuperTuxKart crashed!\n"
"Please hit Ctrl+C to copy to clipboard and signal the problem\n"
"to the developers on our forum: http://forum.freegamedev.net/viewforum.php?f=16\n"
"\n"
"Call stack:\n";
msg += callstack;
MessageBoxA(NULL, msg.c_str(), "SuperTuxKart crashed :/", MB_OK);
}
LONG WINAPI sehHandler(_In_ struct _EXCEPTION_POINTERS *ExceptionInfo)
{
winCrashHandler(ExceptionInfo->ContextRecord);
return EXCEPTION_EXECUTE_HANDLER;
}
void pureCallHandler()
{
winCrashHandler();
}
int newHandler( size_t )
{
winCrashHandler();
return 0;
}
void invalidParameterHandler(const wchar_t *, const wchar_t *, const wchar_t *, unsigned int, uintptr_t)
{
winCrashHandler();
}
void signalHandler(int code)
{
winCrashHandler();
}
void installHandlers()
{
// ----- Per-process handlers -----
SetUnhandledExceptionFilter(sehHandler); // Top-level SEH handler
_set_purecall_handler(pureCallHandler); // Pure virtual function calls handler
// Catch new operator memory allocation exceptions
_set_new_mode(1); // Force malloc() to call new handler too
_set_new_handler(newHandler);
_set_invalid_parameter_handler(invalidParameterHandler); // Catch invalid parameter exceptions.
//_set_security_error_handler(securityHandler); // Catch buffer overrun exceptions
signal(SIGABRT, signalHandler);
signal(SIGINT, signalHandler);
signal(SIGTERM, signalHandler);
// ----- Per-thread handlers -----
// TODO
}
void getCallStackWithContext(std::string& callstack, PCONTEXT pContext)
{
HINSTANCE hImageHlpDll = LoadLibraryA("imagehlp.dll");
if(!hImageHlpDll)
{
Log::warn("CrashReporting", "Failed to load DLL imagehlp.dll");
callstack = "Crash reporting failed to load DLL imagehlp.dll";
return;
}
// Retrieve the DLL functions
#define GET_FUNC_PTR(FuncName) \
t##FuncName _##FuncName = (t##FuncName)GetProcAddress(hImageHlpDll, #FuncName); \
if(!_##FuncName) { \
Log::warn("CrashReporting", "Failed to import symbol " #FuncName " from imagehlp.dll"); \
FreeLibrary(hImageHlpDll); \
return; \
}
GET_FUNC_PTR(SymCleanup )
GET_FUNC_PTR(SymFunctionTableAccess64 )
GET_FUNC_PTR(SymGetLineFromAddr64 )
GET_FUNC_PTR(SymGetModuleBase64 )
GET_FUNC_PTR(SymGetSymFromAddr64 )
GET_FUNC_PTR(SymInitialize )
GET_FUNC_PTR(SymSetOptions )
GET_FUNC_PTR(StackWalk64 )
GET_FUNC_PTR(UnDecorateSymbolName )
#undef GET_FUNC_PTR
const HANDLE hProcess = GetCurrentProcess();
const HANDLE hThread = GetCurrentThread();
// Initialize the symbol hander for the process
{
// Get the file path of the executable
char filepath[512];
GetModuleFileNameA(NULL, filepath, sizeof(filepath));
if(!filepath)
{
Log::warn("CrashReporting", "GetModuleFileNameA failed");
FreeLibrary(hImageHlpDll);
return;
}
// Only keep the directory
char* last_separator = strchr(filepath, '/');
if(!last_separator) last_separator = strchr(filepath, '\\');
if(last_separator)
last_separator[0] = '\0';
// Since the stack trace can also be used for leak checks, don't
// initialise this all the time.
static bool first_time = true;
if (first_time)
{
// Finally initialize the symbol handler.
BOOL bOk = _SymInitialize(hProcess, filepath ? filepath : NULL, TRUE);
if (!bOk)
{
Log::warn("CrashReporting", "SymInitialize() failed");
FreeLibrary(hImageHlpDll);
return;
}
_SymSetOptions(SYMOPT_LOAD_LINES);
first_time = false;
}
}
// Get the stack trace
{
// Initialize the IMAGEHLP_SYMBOL64 structure
const size_t MaxNameLength = 256;
IMAGEHLP_SYMBOL64* sym = (IMAGEHLP_SYMBOL64*)_malloca(sizeof(IMAGEHLP_SYMBOL64) + MaxNameLength);
sym->SizeOfStruct = sizeof(IMAGEHLP_SYMBOL64);
sym->MaxNameLength = MaxNameLength;
// Initialize the STACKFRAME structure so that it
// corresponds to the current function call
STACKFRAME64 stackframe;
memset(&stackframe, 0, sizeof(stackframe));
stackframe.AddrPC.Offset = pContext->Eip;
stackframe.AddrPC.Mode = AddrModeFlat;
stackframe.AddrStack.Offset = pContext->Esp;
stackframe.AddrStack.Mode = AddrModeFlat;
stackframe.AddrFrame.Offset = pContext->Ebp;
stackframe.AddrFrame.Mode = AddrModeFlat;
const DWORD machine_type = IMAGE_FILE_MACHINE_I386;
// Walk the stack
const int max_nb_calls = 32;
for(int i=0 ; i < max_nb_calls ; i++)
{
const BOOL stackframe_ok = _StackWalk64( machine_type,
hProcess,
hThread,
&stackframe,
pContext,
NULL,
_SymFunctionTableAccess64,
_SymGetModuleBase64,
NULL);
if(stackframe_ok)
{
// Decode the symbol and add it to the call stack
DWORD64 sym_displacement;
if(_SymGetSymFromAddr64( hProcess,
stackframe.AddrPC.Offset,
&sym_displacement,
sym))
{
IMAGEHLP_LINE64 line64;
DWORD dwDisplacement = (DWORD)sym_displacement;
if(_SymGetLineFromAddr64(hProcess, stackframe.AddrPC.Offset, &dwDisplacement, &line64))
{
callstack += "\n ";
// Directory + filename -> filename only
const char* filename = line64.FileName;
const char* ptr = line64.FileName;
while(*ptr)
{
if(*ptr == '\\' || *ptr == '/')
filename = ptr+1;
ptr++;
}
callstack += filename;
callstack += ":";
callstack += sym->Name;
char str[128];
_itoa(line64.LineNumber, str, 10);
callstack += ":";
callstack += str;
}
else
{
callstack += "\n ";
callstack += sym->Name;
}
}
else
callstack += "\n <no symbol available>";
}
else
break; // done
}
}
FreeLibrary(hImageHlpDll);
}
void getCallStack(std::string& callstack)
{
// Get the current CONTEXT
// NB: this code is ONLY VALID FOR X86 (32 bit)!
CONTEXT ctx;
memset(&ctx, '\0', sizeof(ctx));
ctx.ContextFlags = CONTEXT_FULL;
__asm call x
__asm x: pop eax // get eip (can't directly use mov)
__asm mov ctx.Eip, eax
__asm mov ctx.Ebp, ebp
__asm mov ctx.Esp, esp
getCallStackWithContext(callstack, &ctx);
}
} // end namespace CrashReporting
#else
// --------------------- Unix version -----------------------
namespace CrashReporting
{
void installHandlers()
{
// TODO!
}
void getCallStack(std::string& callstack)
{
// TODO!
}
} // end namespace CrashReporting
#endif<|fim▁end|>
| |
<|file_name|>s3.rs<|end_file_name|><|fim▁begin|>// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use std::io;
use std::marker::PhantomData;
use futures_util::{
future::FutureExt,
io::{AsyncRead, AsyncReadExt},
stream::TryStreamExt,
};
use rusoto_core::{
request::DispatchSignedRequest,
{ByteStream, RusotoError},
};
use rusoto_s3::*;
use rusoto_util::new_client;
use super::{
util::{block_on_external_io, error_stream, retry, RetryError},
ExternalStorage,
};
use kvproto::backup::S3 as Config;
/// S3 compatible storage
#[derive(Clone)]
pub struct S3Storage {
config: Config,
client: S3Client,
// The current implementation (rosoto 0.43.0 + hyper 0.13.3) is not `Send`
// in practical. See more https://github.com/tikv/tikv/issues/7236.
// FIXME: remove it.
_not_send: PhantomData<*const ()>,
}
impl S3Storage {
/// Create a new S3 storage for the given config.
pub fn new(config: &Config) -> io::Result<S3Storage> {
Self::check_config(config)?;
let client = new_client!(S3Client, config);
Ok(S3Storage {
config: config.clone(),
client,
_not_send: PhantomData::default(),
})
}
pub fn with_request_dispatcher<D>(config: &Config, dispatcher: D) -> io::Result<S3Storage>
where
D: DispatchSignedRequest + Send + Sync + 'static,
{
Self::check_config(config)?;
let client = new_client!(S3Client, config, dispatcher);
Ok(S3Storage {
config: config.clone(),
client,
_not_send: PhantomData::default(),
})
}
fn check_config(config: &Config) -> io::Result<()> {
if config.bucket.is_empty() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"missing bucket name",
));
}
Ok(())
}
fn maybe_prefix_key(&self, key: &str) -> String {
if !self.config.prefix.is_empty() {
return format!("{}/{}", self.config.prefix, key);
}
key.to_owned()
}
}
impl<E> RetryError for RusotoError<E> {
fn placeholder() -> Self {
Self::Blocking
}
fn is_retryable(&self) -> bool {
match self {
Self::HttpDispatch(_) => true,
Self::Unknown(resp) if resp.status.is_server_error() => true,
// FIXME: Retry NOT_READY & THROTTLED (403).
_ => false,
}
}
}
/// A helper for uploading a large files to S3 storage.
///
/// Note: this uploader does not support uploading files larger than 19.5 GiB.
struct S3Uploader<'client> {
client: &'client S3Client,
bucket: String,
key: String,
acl: Option<String>,
server_side_encryption: Option<String>,
ssekms_key_id: Option<String>,
storage_class: Option<String>,
upload_id: String,
parts: Vec<CompletedPart>,
}
/// Specifies the minimum size to use multi-part upload.
/// AWS S3 requires each part to be at least 5 MiB.
const MINIMUM_PART_SIZE: usize = 5 * 1024 * 1024;
impl<'client> S3Uploader<'client> {
/// Creates a new uploader with a given target location and upload configuration.
fn new(client: &'client S3Client, config: &Config, key: String) -> Self {
fn get_var(s: &str) -> Option<String> {
if s.is_empty() {
None
} else {
Some(s.to_owned())
}
}
Self {
client,
bucket: config.bucket.clone(),
key,
acl: get_var(&config.acl),
server_side_encryption: get_var(&config.sse),
ssekms_key_id: get_var(&config.sse_kms_key_id),
storage_class: get_var(&config.storage_class),
upload_id: "".to_owned(),
parts: Vec::new(),
}
}
/// Executes the upload process.
async fn run(
mut self,
reader: &mut (dyn AsyncRead + Unpin),
est_len: u64,
) -> Result<(), Box<dyn std::error::Error>> {
if est_len <= MINIMUM_PART_SIZE as u64 {
// For short files, execute one put_object to upload the entire thing.
let mut data = Vec::with_capacity(est_len as usize);
reader.read_to_end(&mut data).await?;
retry(|| self.upload(&data)).await?;
Ok(())
} else {
// Otherwise, use multipart upload to improve robustness.
self.upload_id = retry(|| self.begin()).await?;
let upload_res = async {
let mut buf = vec![0; MINIMUM_PART_SIZE];
let mut part_number = 1;
loop {
let data_size = reader.read(&mut buf).await?;
if data_size == 0 {
break;
}
let part = retry(|| self.upload_part(part_number, &buf[..data_size])).await?;
self.parts.push(part);
part_number += 1;
}
Ok(())
}
.await;
if upload_res.is_ok() {
retry(|| self.complete()).await?;
} else {
let _ = retry(|| self.abort()).await;
}
upload_res
}
}
/// Starts a multipart upload process.
async fn begin(&self) -> Result<String, RusotoError<CreateMultipartUploadError>> {
let output = self
.client
.create_multipart_upload(CreateMultipartUploadRequest {
bucket: self.bucket.clone(),
key: self.key.clone(),
acl: self.acl.clone(),
server_side_encryption: self.server_side_encryption.clone(),
ssekms_key_id: self.ssekms_key_id.clone(),
storage_class: self.storage_class.clone(),
..Default::default()
})
.await?;
output.upload_id.ok_or_else(|| {
RusotoError::ParseError("missing upload-id from create_multipart_upload()".to_owned())
})
}
/// Completes a multipart upload process, asking S3 to join all parts into a single file.
async fn complete(&self) -> Result<(), RusotoError<CompleteMultipartUploadError>> {
self.client
.complete_multipart_upload(CompleteMultipartUploadRequest {
bucket: self.bucket.clone(),
key: self.key.clone(),
upload_id: self.upload_id.clone(),
multipart_upload: Some(CompletedMultipartUpload {
parts: Some(self.parts.clone()),
}),
..Default::default()
})
.await?;
Ok(())
}
/// Aborts the multipart upload process, deletes all uploaded parts.
async fn abort(&self) -> Result<(), RusotoError<AbortMultipartUploadError>> {
self.client
.abort_multipart_upload(AbortMultipartUploadRequest {
bucket: self.bucket.clone(),
key: self.key.clone(),
upload_id: self.upload_id.clone(),
..Default::default()
})
.await?;
Ok(())
}
/// Uploads a part of the file.
///
/// The `part_number` must be between 1 to 10000.
async fn upload_part(
&self,
part_number: i64,
data: &[u8],
) -> Result<CompletedPart, RusotoError<UploadPartError>> {
let part = self
.client
.upload_part(UploadPartRequest {
bucket: self.bucket.clone(),
key: self.key.clone(),
upload_id: self.upload_id.clone(),
part_number,
content_length: Some(data.len() as i64),
body: Some(data.to_vec().into()),
..Default::default()
})
.await?;
Ok(CompletedPart {
e_tag: part.e_tag,
part_number: Some(part_number),
})
}
/// Uploads a file atomically.
///
/// This should be used only when the data is known to be short, and thus relatively cheap to
/// retry the entire upload.
async fn upload(&self, data: &[u8]) -> Result<(), RusotoError<PutObjectError>> {
self.client
.put_object(PutObjectRequest {
bucket: self.bucket.clone(),
key: self.key.clone(),
acl: self.acl.clone(),
server_side_encryption: self.server_side_encryption.clone(),
ssekms_key_id: self.ssekms_key_id.clone(),
storage_class: self.storage_class.clone(),
content_length: Some(data.len() as i64),
body: Some(data.to_vec().into()),
..Default::default()
})
.await?;
Ok(())
}
}
impl ExternalStorage for S3Storage {
fn write(
&self,
name: &str,
mut reader: Box<dyn AsyncRead + Send + Unpin>,
content_length: u64,
) -> io::Result<()> {
let key = self.maybe_prefix_key(name);
debug!("save file to s3 storage"; "key" => %key);
let uploader = S3Uploader::new(&self.client, &self.config, key);
block_on_external_io(uploader.run(&mut *reader, content_length)).map_err(|e| {
io::Error::new(io::ErrorKind::Other, format!("failed to put object {}", e))
})
}
fn read(&self, name: &str) -> Box<dyn AsyncRead + Unpin + '_> {
let key = self.maybe_prefix_key(name);
let bucket = self.config.bucket.clone();
debug!("read file from s3 storage"; "key" => %key);
let req = GetObjectRequest {
key,
bucket: bucket.clone(),
..Default::default()
};
Box::new(
self.client
.get_object(req)
.map(move |future| match future {
Ok(out) => out.body.unwrap(),
Err(RusotoError::Service(GetObjectError::NoSuchKey(key))) => {
ByteStream::new(error_stream(io::Error::new(
io::ErrorKind::NotFound,
format!("no key {} at bucket {}", key, bucket),
)))
}
Err(e) => ByteStream::new(error_stream(io::Error::new(
io::ErrorKind::Other,
format!("failed to get object {}", e),
))),
})
.flatten_stream()
.into_async_read(),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use futures::io::AsyncReadExt;
use rusoto_core::signature::SignedRequest;
use rusoto_mock::MockRequestDispatcher;
#[test]
fn test_s3_config() {
let config = Config {
region: "ap-southeast-2".to_string(),
bucket: "mybucket".to_string(),
prefix: "myprefix".to_string(),
access_key: "abc".to_string(),
secret_access_key: "xyz".to_string(),
..Default::default()
};
let cases = vec![
// bucket is empty
Config {
bucket: "".to_owned(),
..config.clone()
},
];
for case in cases {
let r = S3Storage::new(&case);
assert!(r.is_err());
}
assert!(S3Storage::new(&config).is_ok());
}
#[test]
fn test_s3_storage() {
let magic_contents = "5678";
let config = Config {
region: "ap-southeast-2".to_string(),
bucket: "mybucket".to_string(),
prefix: "myprefix".to_string(),
access_key: "abc".to_string(),
secret_access_key: "xyz".to_string(),
..Default::default()
};
let dispatcher = MockRequestDispatcher::with_status(200).with_request_checker(
move |req: &SignedRequest| {
assert_eq!(req.region.name(), "ap-southeast-2");
assert_eq!(req.path(), "/mybucket/myprefix/mykey");
// PutObject is translated to HTTP PUT.
assert_eq!(req.payload.is_some(), req.method() == "PUT");
},<|fim▁hole|> "mykey",
Box::new(magic_contents.as_bytes()),
magic_contents.len() as u64,
)
.unwrap();
let mut reader = s.read("mykey");
let mut buf = Vec::new();
let ret = block_on_external_io(reader.read_to_end(&mut buf));
assert!(ret.unwrap() == 0);
assert!(buf.is_empty());
}
#[test]
#[cfg(FALSE)]
// FIXME: enable this (or move this to an integration test) if we've got a
// reliable way to test s3 (rusoto_mock requires custom logic to verify the
// body stream which itself can have bug)
fn test_real_s3_storage() {
use std::f64::INFINITY;
use tikv_util::time::Limiter;
let mut s3 = Config::default();
s3.set_endpoint("http://127.0.0.1:9000".to_owned());
s3.set_bucket("bucket".to_owned());
s3.set_prefix("prefix".to_owned());
s3.set_access_key("93QZ01QRBYQQXC37XHZV".to_owned());
s3.set_secret_access_key("N2VcI4Emg0Nm7fDzGBMJvguHHUxLGpjfwt2y4+vJ".to_owned());
s3.set_force_path_style(true);
let limiter = Limiter::new(INFINITY);
let storage = S3Storage::new(&s3).unwrap();
const LEN: usize = 1024 * 1024 * 4;
static CONTENT: [u8; LEN] = [50_u8; LEN];
storage
.write(
"huge_file",
Box::new(limiter.limit(&CONTENT[..])),
LEN as u64,
)
.unwrap();
let mut reader = storage.read("huge_file");
let mut buf = Vec::new();
block_on_external_io(reader.read_to_end(&mut buf)).unwrap();
assert_eq!(buf.len(), LEN);
assert_eq!(buf.iter().position(|b| *b != 50_u8), None);
}
}<|fim▁end|>
|
);
let s = S3Storage::with_request_dispatcher(&config, dispatcher).unwrap();
s.write(
|
<|file_name|>UpgradeVersion.ts<|end_file_name|><|fim▁begin|>import {EditProject} from '@atomist/rug/operations/ProjectEditor'
import {Project, Xml} from '@atomist/rug/model/Core'
import { Pattern, RugOperation } from '@atomist/rug/operations/RugOperation'
import {PathExpression,PathExpressionEngine,TextTreeNode} from '@atomist/rug/tree/PathExpression'
import { Editor, Tags, Parameter } from '@atomist/rug/operations/Decorators'
/*
Return a path expression to match the version of a particular dependency, if found
<dependencies> ...
<dependency>
<groupId>io.cucumber</groupId>
<artifactId>gherkin</artifactId>
<version>4.0.0</version>
</dependency>
*/
export function versionOfDependency(group: string, artifact: string) {
return new PathExpression<TextTreeNode,TextTreeNode>(
`/*[@name='pom.xml']/XmlFile()/project/dependencies/dependency<|fim▁hole|> [/artifactId//TEXT[@value='${artifact}']]
/version//TEXT
`
)
}
@Editor("UpgradeVersion", "Find and upgrade POM version")
export class UpgradeVersion implements EditProject {
@Parameter({pattern: Pattern.group_id, description: "Group to match"})
group: string
@Parameter({pattern: Pattern.artifact_id, description: "Artifact to match"})
artifact: string
@Parameter({pattern: Pattern.semantic_version, description: "Version to upgrade to"})
desiredVersion: string
edit(project: Project) {
let eng: PathExpressionEngine = project.context.pathExpressionEngine;
let search = versionOfDependency(this.group, this.artifact)
eng.with<TextTreeNode>(project, search, version => {
if (version.value() != this.desiredVersion) {
//console.log(`Updated to desired version ${this.desiredVersion}`)
version.update(this.desiredVersion)
}
})
}
}
export const uv = new UpgradeVersion();<|fim▁end|>
|
[/groupId//TEXT[@value='${group}']]
|
<|file_name|>job.rs<|end_file_name|><|fim▁begin|>use std::io::{self, BufRead, Read};
use std::marker::PhantomData;
use std::ops::Deref;
use std::ptr;
use crate::{raw, Error};
pub struct JobDriver<R> {
input: R,
job: Job,
input_ended: bool,
}
pub struct Job(pub *mut raw::rs_job_t);
// Wrapper around rs_buffers_t.
struct Buffers<'a> {
inner: raw::rs_buffers_t,
_phantom: PhantomData<&'a u8>,
}
impl<R: BufRead> JobDriver<R> {
pub fn new(input: R, job: Job) -> Self {
JobDriver {
input,
job,
input_ended: false,
}
}
pub fn into_inner(self) -> R {
self.input
}
/// Complete the job by working without an output buffer.
///
/// If the job needs to write some data, an `ErrorKind::WouldBlock` error is returned.
pub fn consume_input(&mut self) -> io::Result<()> {
loop {
let (res, read, cap) = {
let readbuf = self.input.fill_buf()?;
let cap = readbuf.len();
if cap == 0 {
self.input_ended = true;
}
// work
let mut buffers = Buffers::with_no_out(readbuf, self.input_ended);
let res = unsafe { raw::rs_job_iter(*self.job, buffers.as_raw()) };
let read = cap - buffers.available_input();
(res, read, cap - read)
};
// update read size<|fim▁hole|> self.input.consume(read);
// determine result
// NOTE: this should be done here, after the input buffer update, because we need to
// know if the possible RS_BLOCKED result is due to a full input, or to an empty output
// buffer
match res {
raw::RS_DONE => (),
raw::RS_BLOCKED => {
if cap > 0 {
// the block is due to a missing output buffer
return Err(io::Error::new(
io::ErrorKind::WouldBlock,
"cannot consume input without an output buffer",
));
}
}
_ => {
let err = Error::from(res);
return Err(io::Error::new(io::ErrorKind::Other, err));
}
};
if self.input_ended {
return Ok(());
}
}
}
}
impl<R: BufRead> Read for JobDriver<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let mut out_pos = 0;
let mut out_cap = buf.len();
loop {
let (res, read, written) = {
let readbuf = self.input.fill_buf()?;
let cap = readbuf.len();
if cap == 0 {
self.input_ended = true;
}
// work
let mut buffers = Buffers::new(readbuf, &mut buf[out_pos..], self.input_ended);
let res = unsafe { raw::rs_job_iter(*self.job, buffers.as_raw()) };
if res != raw::RS_DONE && res != raw::RS_BLOCKED {
let err = Error::from(res);
return Err(io::Error::new(io::ErrorKind::Other, err));
}
let read = cap - buffers.available_input();
let written = out_cap - buffers.available_output();
(res, read, written)
};
// update read size
self.input.consume(read);
// update write size
out_pos += written;
out_cap -= written;
if out_cap == 0 || res == raw::RS_DONE {
return Ok(out_pos);
}
}
}
}
unsafe impl Send for Job {}
impl Deref for Job {
type Target = *mut raw::rs_job_t;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Drop for Job {
fn drop(&mut self) {
unsafe {
if !self.0.is_null() {
raw::rs_job_free(self.0);
}
}
}
}
impl<'a> Buffers<'a> {
pub fn new(in_buf: &'a [u8], out_buf: &'a mut [u8], eof_in: bool) -> Self {
Buffers {
inner: raw::rs_buffers_t {
next_in: in_buf.as_ptr() as *const i8,
avail_in: in_buf.len(),
eof_in: if eof_in { 1 } else { 0 },
next_out: out_buf.as_mut_ptr() as *mut i8,
avail_out: out_buf.len(),
},
_phantom: PhantomData,
}
}
pub fn with_no_out(in_buf: &'a [u8], eof_in: bool) -> Self {
Buffers {
inner: raw::rs_buffers_t {
next_in: in_buf.as_ptr() as *const i8,
avail_in: in_buf.len(),
eof_in: if eof_in { 1 } else { 0 },
next_out: ptr::null_mut(),
avail_out: 0,
},
_phantom: PhantomData,
}
}
pub fn as_raw(&mut self) -> *mut raw::rs_buffers_t {
&mut self.inner
}
pub fn available_input(&self) -> usize {
self.inner.avail_in
}
pub fn available_output(&self) -> usize {
self.inner.avail_out
}
}<|fim▁end|>
| |
<|file_name|>dimensions.go<|end_file_name|><|fim▁begin|>package pdf417
import "math"
const (
minCols = 2
maxCols = 30
maxRows = 30
minRows = 2
moduleHeight = 2
preferred_ratio = 3.0
)
func calculateNumberOfRows(m, k, c int) int {
r := ((m + 1 + k) / c) + 1
if c*r >= (m + 1 + k + c) {
r--
}
return r
}
func calcDimensions(dataWords, eccWords int) (cols, rows int) {
ratio := 0.0
cols = 0
rows = 0
for c := minCols; c <= maxCols; c++ {
r := calculateNumberOfRows(dataWords, eccWords, c)
if r < minRows {
break
}
if r > maxRows {
continue
}
newRatio := float64(17*cols+69) / float64(rows*moduleHeight)
if rows != 0 && math.Abs(newRatio-preferred_ratio) > math.Abs(ratio-preferred_ratio) {
continue
}
ratio = newRatio
cols = c
rows = r
}
<|fim▁hole|> rows = minRows
cols = minCols
}
}
return
}<|fim▁end|>
|
if rows == 0 {
r := calculateNumberOfRows(dataWords, eccWords, minCols)
if r < minRows {
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! stal-rs
//! ====
//!
//! Set algebra solver for Redis in Rust, based on
//! [Stal](https://github.com/soveran/stal).
//!
//! Description
//! -----------
//!
//! `stal-rs` provide set operations and resolves them in [Redis][redis].
//!
//! Usage
//! -----
//!
//! `stal-rs` has no dependencies. It produces a vector of Redis operations that
//! have to be run by the user.
//!
//! ```rust
//! extern crate stal;
//!
//! let foobar = stal::Set::Inter(vec![stal::Set::Key(b"foo".to_vec()), stal::Set::Key(b"bar".to_vec())]);
//! let foobar_nobaz = stal::Set::Diff(vec![foobar, stal::Set::Key(b"baz".to_vec())]);
//! let foobar_nobaz_andqux = stal::Set::Union(vec![stal::Set::Key(b"qux".to_vec()), foobar_nobaz]);
//!
//! assert_eq!(
//! stal::Stal::new("SMEMBERS".to_string(), foobar_nobaz_andqux).solve(),
//! (
//! vec![
//! vec![b"MULTI".to_vec()],
//! vec![b"SINTERSTORE".to_vec(), b"stal:2".to_vec(), b"foo".to_vec(), b"bar".to_vec()],
//! vec![b"SDIFFSTORE".to_vec(), b"stal:1".to_vec(), b"stal:2".to_vec(), b"baz".to_vec()],
//! vec![b"SUNIONSTORE".to_vec(), b"stal:0".to_vec(), b"qux".to_vec(), b"stal:1".to_vec()],
//! vec![b"SMEMBERS".to_vec(), b"stal:0".to_vec()],
//! vec![b"DEL".to_vec(), b"stal:0".to_vec(), b"stal:1".to_vec(), b"stal:2".to_vec()],
//! vec![b"EXEC".to_vec()],
//! ],
//! 4
//! ));
//! ```
//!
//! `stal-rs` translates the internal calls to `SUNION`, `SDIFF` and
//! `SINTER` into `SDIFFSTORE`, `SINTERSTORE` and `SUNIONSTORE` to
//! perform the underlying operations, and it takes care of generating
//! and deleting any temporary keys.
//!
//! The outmost command can be any set operation, for example:
//!
//! ```rust
//! extern crate stal;
//! let myset = stal::Set::Key(b"my set".to_vec());
//! stal::Stal::new("SCARD".to_string(), myset).solve();
//! ```
//!
//! If you want to preview the commands `Stal` will send to generate
//! the results, you can use `Stal.explain`:
//!
//! ```rust
//! extern crate stal;
//!
//! assert_eq!(
//! stal::Stal::new("SMEMBERS".to_string(),
//! stal::Set::Inter(vec![
//! stal::Set::Union(vec![
//! stal::Set::Key(b"foo".to_vec()),
//! stal::Set::Key(b"bar".to_vec()),
//! ]),
//! stal::Set::Key(b"baz".to_vec()),
//! ])
//! ).explain(),
//! vec![
//! vec![b"SUNIONSTORE".to_vec(), b"stal:1".to_vec(), b"foo".to_vec(), b"bar".to_vec()],
//! vec![b"SINTERSTORE".to_vec(), b"stal:0".to_vec(), b"stal:1".to_vec(), b"baz".to_vec()],
//! vec![b"SMEMBERS".to_vec(), b"stal:0".to_vec()],
//! ]
//! )
//! ```
//!
//! All commands are wrapped in a `MULTI/EXEC` transaction.
//!
//! [redis]: http://redis.io
#![crate_name = "stal"]
#![crate_type = "lib"]
/// A set of values. It can be generated from a Redis key or from a set
/// operation based on other sets.
#[derive(Debug, Clone)]
pub enum Set {
/// A key
Key(Vec<u8>),
/// All the elements in any of the provided sets
Union(Vec<Set>),
/// All the elements in all the sets
Inter(Vec<Set>),
/// All the elements in the first set that are not in the other sets
Diff(Vec<Set>),
}
use Set::*;
impl Set {
/// Gets the commands to get a list of ids for this set
pub fn into_ids(self) -> Stal {
let (op, sets) = match self {
Key(_) => return Stal::from_template(vec![b"SMEMBERS".to_vec(), vec![]], vec![(self, 1)]),
Union(sets) => ("SUNION", sets),
Inter(sets) => ("SINTER", sets),
Diff(sets) => ("SDIFF", sets),
};
let mut command = vec![op.as_bytes().to_vec()];
command.extend(sets.iter().map(|_| vec![]).collect::<Vec<_>>());
let mut setv = vec![];
let mut i = 1;
for set in sets.into_iter() {
setv.push((set, i));
i += 1;
}
Stal::from_template(command, setv)
}
/// Gets the commands to get a list of ids for this set
pub fn ids(&self) -> Stal {
let (op, sets) = match *self {
Key(_) => return Stal::from_template(vec![b"SMEMBERS".to_vec(), vec![]], vec![(self.clone(), 1)]),
Union(ref sets) => ("SUNION", sets),
Inter(ref sets) => ("SINTER", sets),
Diff(ref sets) => ("SDIFF", sets),
};
let mut command = vec![op.as_bytes().to_vec()];
command.extend(sets.iter().map(|_| vec![]).collect::<Vec<_>>());
let mut setv = vec![];
for i in 0..sets.len() {
setv.push((sets[i].clone(), i + 1));
}
Stal::from_template(command, setv)
}
/// Maps the operation to its Redis command name.
fn command(&self) -> &'static str {
match *self {
Key(_) => unreachable!(),
Union(_) => "SUNIONSTORE",
Inter(_) => "SINTERSTORE",
Diff(_) => "SDIFFSTORE",
}
}
/// Appends the operation to `ops` and any temporary id created to `ids`.
/// Returns the key representing the set.
pub fn convert(&self, ids: &mut Vec<String>, ops: &mut Vec<Vec<Vec<u8>>>) -> Vec<u8> {
let sets = match *self {
Key(ref k) => return k.clone(),
Union(ref sets) => sets,
Inter(ref sets) => sets,
Diff(ref sets) => sets,
};
let mut op = Vec::with_capacity(2 + sets.len());
let id = format!("stal:{}", ids.len());
let r = id.as_bytes().to_vec();
ids.push(id);
op.push(self.command().as_bytes().to_vec());
op.push(r.clone());
op.extend(sets.into_iter().map(|s| s.convert(ids, ops)));
ops.push(op);
r
}
}
/// An operation to be executed on a set
#[derive(Debug)]
pub struct Stal {
/// A Redis command
command: Vec<Vec<u8>>,
/// Set in which execute the operation
sets: Vec<(Set, usize)>,
}
impl Stal {
pub fn new(operation: String, set: Set) -> Self {
Stal {
command: vec![operation.as_bytes().to_vec(), vec![]],
sets: vec![(set, 1)],
}
}
/// Takes an arbitrary command that uses one or more sets. The `command`
/// must have placeholders where the set keys should go. Each element
/// in `sets` specifies the position in the `command`.
pub fn from_template(command: Vec<Vec<u8>>, sets: Vec<(Set, usize)>) -> Self {
Stal {
command: command,
sets: sets,
}
}
fn add_ops(&self, ids: &mut Vec<String>, ops: &mut Vec<Vec<Vec<u8>>>) {
let mut command = self.command.clone();
for args in self.sets.iter() {
command.push(args.0.convert(ids, ops));
command.swap_remove(args.1);
}
ops.push(command);
}
/// Returns a list of operations to run. For debug only.
pub fn explain(&self) -> Vec<Vec<Vec<u8>>> {
let mut ids = vec![];
let mut ops = vec![];
self.add_ops(&mut ids, &mut ops);
ops
}
/// Returns a lit of operations, wrapped in a multi/exec.
/// The last operation is always exec, and the returned `usize` indicates
/// the return value of the `operation`.
pub fn solve(&self) -> (Vec<Vec<Vec<u8>>>, usize) {<|fim▁hole|> let pos = ops.len() - 1;
if ids.len() > 0 {
let mut del = vec![b"DEL".to_vec()];
del.extend(ids.into_iter().map(|x| x.as_bytes().to_vec()));
ops.push(del);
}
ops.push(vec![b"EXEC".to_vec()]);
(ops, pos)
}
}<|fim▁end|>
|
let mut ids = vec![];
let mut ops = vec![vec![b"MULTI".to_vec()]];
self.add_ops(&mut ids, &mut ops);
|
<|file_name|>profiler-plugin.js<|end_file_name|><|fim▁begin|>/* profiler-plugin.js is part of Aloha Editor project http://aloha-editor.org
*
* Aloha Editor is a WYSIWYG HTML5 inline editing library and editor.
* Copyright (c) 2010-2012 Gentics Software GmbH, Vienna, Austria.
* Contributors http://aloha-editor.org/contribution.php
*
* Aloha Editor is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or any later version.
*
* Aloha Editor is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* As an additional permission to the GNU GPL version 2, you may distribute
* non-source (e.g., minimized or compacted) forms of the Aloha-Editor
* source code without the copy of the GNU GPL normally required,
* provided you include this license notice and a URL through which
* recipients can access the Corresponding Source.
*/
/* Aloha Profiler
* --------------
* Provides a useful interface to profile some of Aloha components and their
* methods.
*
* Potentially process intensive methods:
* Aloha.Profiler.profileAlohaComponent('Markup.preProcessKeyStrokes')
* Aloha.Profiler.profileAlohaComponent('Selection._updateSelection')
*/
window.define( [
'aloha/core',
'aloha/plugin',
'aloha/editable',
// 'aloha/sidebar',
'aloha/selection',
'aloha/markup',
'aloha/contenthandlermanager',
'aloha/floatingmenu',
'aloha/console',
'css!profiler/css/profiler'
], function( Aloha, Plugin, /* Sidebar */ Editable, Selection, Markup,
ContentHandlerManager, FloatingMenu, console ) {
// 'caller', 'callee', and 'arguments' properties may not be accessed on
// strict mode functions or the arguments objects for calls to them
//
var jQuery = Aloha.jQuery,
profiledFunctions = [],
// get the arguments string literal of this function, and split it into
// an array of names
argsStr = ( /function[^\(]*\(([^\)]+)/g ).exec( arguments.callee.toString() ),
argNames = argsStr ? argsStr[1].replace( /^\s+|\s+$/g, '' ).split( /\,\s*/ ) : [],
args = Array.prototype.slice.call( arguments );
/**
* @param {String} path dot seperated path to resolve inside a given object
* or browser window
* @param {?Object} object inwhich to resolve a path. If no object is
* passed, the browser window object will be used instead
* @return {?} Object
*/
function resolvePath(path, obj) {
if ( typeof path !== 'string' ) {
return path;
}
if ( !obj || typeof obj !== 'object' ) {
obj = window;
}
var parts = path.split( '.' ),
i = 0,
j = parts.length;
for ( ; i < j; ++i ) {
obj = obj[ parts[ i ] ];
if ( typeof obj === 'undefined' ) {
console.error(
'Aloha.Profiler',
'Property "' + parts[ i ] + '" does not exist' +
<|fim▁hole|> }
}
return obj;
};
function parseObjectPath( path, obj ) {
if ( typeof path !== 'string' ) {
return null;
}
var parts = path.split( '.' ),
pathToProp = parts.slice( 0, Math.max( 1, parts.length - 1 ) ).join( '.' ),
prop;
obj = resolvePath( pathToProp, obj );
if ( !obj ) {
return null;
}
if ( parts.length > 1 ) {
var lastProp = parts[ parts.length - 1 ];
if ( typeof obj[ lastProp ] === 'undefined' ) {
console.error( 'Aloha.Profiler',
'Property "' + lastProp + '" does not exist in object ' +
pathToProp );
} else {
prop = lastProp;
}
}
return {
obj : obj[ prop ],
path : path,
parentObj : obj,
propName : prop
};
};
var panel;
function initSidebarPanel(sidebar) {
sidebar.addPanel( {
id : 'aloha-devtool-profiler-panel',
title : 'Aloha Profiler',
expanded : true,
activeOn : true,
content : '' +
'<div id="aloha-devtool-profiler-container">' +
'<input id="aloha-devtool-profiler-input" ' +
'value="Aloha.Profiler.profileAlohaComponent(\'Markup.preProcessKeyStrokes\')" />' +
'<ul id="aloha-devtool-profiler-console"></ul>' +
'</div>',
onInit : function() {
this.content.find( 'input#aloha-devtool-profiler-input' ).keydown( function( event ) {
// Handle ENTER
if ( event.keyCode === 13 ) {
var input = jQuery( this );
var value = input.val();
if ( value ) {
eval( value );
PanelConsole.log( value );
input.val( '' );
}
}
} );
}
} );
sidebar.show().open();
};
var PanelConsole = {
log: function() {
jQuery( '#aloha-devtool-profiler-console' )
.prepend( '<li>' +
Array.prototype.slice.call( arguments ).join( ' ' ) +
'</li>' );
}
}
Aloha.Profiler = Plugin.create( 'profiler', {
/**
* Explose all dependencies to allow easy access. eg:
* If the 5th dependency was Markup, then:
* Aloha.Profiler.profile(Aloha.Profiler.alohaObjects[4], 'preProcessKeyStrokes')
* would start profiling the Markup.preProcessKeyStrokes method.
*/
loadedDependencies: Array.prototype.slice.call( arguments ),
/**
* Provides a better interface to access various components of Aloha.
* eg: Aloha.Profiler.profile(Aloha.Profiler.alohaComponents[ 'Markup' ], 'preProcessKeyStrokes')
*/
alohaComponents: {},
panel: null,
/**
* Initializes Profiler plugin by populating alohaComponents with all
* arguments of our define function, mapping name, to object
*/
init: function() {
var j = argNames.length;
while ( --j >= 0 ) {
this.alohaComponents[ argNames[ j ] ] = args[ j ];
}
var that = this;
Aloha.ready( function() {
if ( Aloha.Sidebar && Aloha.Sidebar.right ) {
that.panel = initSidebarPanel( Aloha.Sidebar.right );
}
} );
},
log: function() {
PanelConsole.log.apply( PanelConsole, arguments );
},
/**
* Shortcut to profile one of the Aloha components that was required by
* Aloha Profiler.
*
* @param {String} path
* @param {String} fnName
*/
profileAlohaComponent: function( path, fnName ) {
var parts = parseObjectPath( path, this.alohaComponents );
return this.profile( parts.parentObj, fnName || parts.propName );
},
/**
* @param {(Object|String)} obj object or path to object that contains
* the function we want to profile. Or the path to the
* function itself
* @param {String} fnName name of function inside obj, which we want to
* profile
* @param {?Function(Function, Array):Boolean} intercept functiont to
* call each time this method is invoked
*/
profile: function( obj, fnName, intercept ) {
var path,
parts,
objIndex = -1,
i;
if ( typeof obj === 'string' ) {
parts = parseObjectPath( obj );
obj = parts.parentObj;
path = parts.path + ( fnName ? '.' + fnName : '' );
if ( parts.propName ) {
if ( typeof parts.obj === 'function' ) {
fnName = parts.propName;
} else if ( parts.obj === 'object' ) {
obj = parts.obj;
}
}
}
if ( !obj || !fnName || typeof obj[ fnName ] !== 'function' ) {
return;
}
for ( i = 0; i < profiledFunctions.length; ++i ) {
if ( profiledFunctions[ i ] === obj ) {
objIndex = i;
if ( profiledFunctions[ i ][ fnName ] ) {
return;
}
}
}
var fn = obj[ fnName ];
var that = this;
// In IE typeof window.console.log returns "object!!!"
if ( window.console && window.console.log ) {
if ( objIndex === -1 ) {
objIndex = profiledFunctions.push( obj ) - 1;
}
profiledFunctions[ objIndex ][ fnName ] = fn;
obj[ fnName ] = function() {
if ( typeof intercept === 'function' ) {
intercept( fn, arguments );
}
// window.console.time( fnName );
var start = +( new Date() );
var returnValue = fn.apply( obj, arguments );
// window.console.timeEnd( fnName );
that.log( ( path || fnName ) + ': ' +
( ( new Date() ) - start ) + 'ms' );
return returnValue;
};
}
},
/**
* @return {String} "Aloha.Profiler"
*/
toString: function() {
return 'Aloha.Profiler';
}
} );
return Aloha.Profiler;
} );<|fim▁end|>
|
( i ? ' in object ' + parts.slice( 0, i ).join( '.' ) : '' )
);
return null;
|
<|file_name|>swap_bytes.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
// macro_rules! int_impl {
// ($ActualT:ty, $UnsignedT:ty, $BITS:expr,
// $add_with_overflow:path,
// $sub_with_overflow:path,
// $mul_with_overflow:path) => {
// /// Returns the smallest value that can be represented by this integer type.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn min_value() -> Self {
// (-1 as Self) << ($BITS - 1)
// }
//
// /// Returns the largest value that can be represented by this integer type.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn max_value() -> Self {
// let min = Self::min_value(); !min
// }
//
// /// Converts a string slice in a given base to an integer.
// ///
// /// Leading and trailing whitespace represent an error.
// ///
// /// # Examples
// ///
// /// ```
// /// assert_eq!(u32::from_str_radix("A", 16), Ok(10));
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[allow(deprecated)]
// pub fn from_str_radix(src: &str, radix: u32) -> Result<Self, ParseIntError> {
// from_str_radix(src, radix)
// }
//
// /// Returns the number of ones in the binary representation of `self`.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0b01001100u8;
// ///
// /// assert_eq!(n.count_ones(), 3);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn count_ones(self) -> u32 { (self as $UnsignedT).count_ones() }
//
// /// Returns the number of zeros in the binary representation of `self`.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0b01001100u8;
// ///
// /// assert_eq!(n.count_zeros(), 5);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn count_zeros(self) -> u32 {
// (!self).count_ones()
// }
//
// /// Returns the number of leading zeros in the binary representation
// /// of `self`.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0b0101000u16;
// ///
// /// assert_eq!(n.leading_zeros(), 10);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn leading_zeros(self) -> u32 {
// (self as $UnsignedT).leading_zeros()
// }
//
// /// Returns the number of trailing zeros in the binary representation
// /// of `self`.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0b0101000u16;
// ///
// /// assert_eq!(n.trailing_zeros(), 3);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn trailing_zeros(self) -> u32 {
// (self as $UnsignedT).trailing_zeros()
// }
//
// /// Shifts the bits to the left by a specified amount, `n`,
// /// wrapping the truncated bits to the end of the resulting integer.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// /// let m = 0x3456789ABCDEF012u64;
// ///
// /// assert_eq!(n.rotate_left(12), m);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn rotate_left(self, n: u32) -> Self {
// (self as $UnsignedT).rotate_left(n) as Self
// }
//
// /// Shifts the bits to the right by a specified amount, `n`,
// /// wrapping the truncated bits to the beginning of the resulting
// /// integer.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// /// let m = 0xDEF0123456789ABCu64;
// ///
// /// assert_eq!(n.rotate_right(12), m);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn rotate_right(self, n: u32) -> Self {
// (self as $UnsignedT).rotate_right(n) as Self
// }
//
// /// Reverses the byte order of the integer.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// /// let m = 0xEFCDAB8967452301u64;
// ///
// /// assert_eq!(n.swap_bytes(), m);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn swap_bytes(self) -> Self {
// (self as $UnsignedT).swap_bytes() as Self
// }
//
// /// Converts an integer from big endian to the target's endianness.
// ///
// /// On big endian this is a no-op. On little endian the bytes are
// /// swapped.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// ///
// /// if cfg!(target_endian = "big") {
// /// assert_eq!(u64::from_be(n), n)
// /// } else {
// /// assert_eq!(u64::from_be(n), n.swap_bytes())
// /// }
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn from_be(x: Self) -> Self {
// if cfg!(target_endian = "big") { x } else { x.swap_bytes() }
// }
//
// /// Converts an integer from little endian to the target's endianness.
// ///
// /// On little endian this is a no-op. On big endian the bytes are
// /// swapped.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// ///
// /// if cfg!(target_endian = "little") {
// /// assert_eq!(u64::from_le(n), n)
// /// } else {
// /// assert_eq!(u64::from_le(n), n.swap_bytes())
// /// }
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn from_le(x: Self) -> Self {
// if cfg!(target_endian = "little") { x } else { x.swap_bytes() }
// }
//
// /// Converts `self` to big endian from the target's endianness.
// ///
// /// On big endian this is a no-op. On little endian the bytes are
// /// swapped.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// ///
// /// if cfg!(target_endian = "big") {
// /// assert_eq!(n.to_be(), n)
// /// } else {
// /// assert_eq!(n.to_be(), n.swap_bytes())
// /// }
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn to_be(self) -> Self { // or not to be?
// if cfg!(target_endian = "big") { self } else { self.swap_bytes() }
// }
//
// /// Converts `self` to little endian from the target's endianness.
// ///
// /// On little endian this is a no-op. On big endian the bytes are
// /// swapped.
// ///
// /// # Examples
// ///
// /// ```rust
// /// let n = 0x0123456789ABCDEFu64;
// ///
// /// if cfg!(target_endian = "little") {
// /// assert_eq!(n.to_le(), n)
// /// } else {
// /// assert_eq!(n.to_le(), n.swap_bytes())
// /// }
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn to_le(self) -> Self {
// if cfg!(target_endian = "little") { self } else { self.swap_bytes() }
// }
//
// /// Checked integer addition. Computes `self + other`, returning `None`
// /// if overflow occurred.
// ///
// /// # Examples
// ///
// /// ```rust
// /// assert_eq!(5u16.checked_add(65530), Some(65535));
// /// assert_eq!(6u16.checked_add(65530), None);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn checked_add(self, other: Self) -> Option<Self> {
// checked_op!($ActualT, $add_with_overflow, self, other)
// }
//
// /// Checked integer subtraction. Computes `self - other`, returning
// /// `None` if underflow occurred.
// ///
// /// # Examples
// ///
// /// ```rust
// /// assert_eq!((-127i8).checked_sub(1), Some(-128));
// /// assert_eq!((-128i8).checked_sub(1), None);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn checked_sub(self, other: Self) -> Option<Self> {
// checked_op!($ActualT, $sub_with_overflow, self, other)
// }
//
// /// Checked integer multiplication. Computes `self * other`, returning
// /// `None` if underflow or overflow occurred.
// ///
// /// # Examples
// ///
// /// ```rust<|fim▁hole|> // /// assert_eq!(5u8.checked_mul(52), None);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn checked_mul(self, other: Self) -> Option<Self> {
// checked_op!($ActualT, $mul_with_overflow, self, other)
// }
//
// /// Checked integer division. Computes `self / other`, returning `None`
// /// if `other == 0` or the operation results in underflow or overflow.
// ///
// /// # Examples
// ///
// /// ```rust
// /// assert_eq!((-127i8).checked_div(-1), Some(127));
// /// assert_eq!((-128i8).checked_div(-1), None);
// /// assert_eq!((1i8).checked_div(0), None);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn checked_div(self, v: Self) -> Option<Self> {
// match v {
// 0 => None,
// -1 if self == Self::min_value()
// => None,
// v => Some(self / v),
// }
// }
//
// /// Saturating integer addition. Computes `self + other`, saturating at
// /// the numeric bounds instead of overflowing.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn saturating_add(self, other: Self) -> Self {
// match self.checked_add(other) {
// Some(x) => x,
// None if other >= Self::zero() => Self::max_value(),
// None => Self::min_value(),
// }
// }
//
// /// Saturating integer subtraction. Computes `self - other`, saturating
// /// at the numeric bounds instead of overflowing.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn saturating_sub(self, other: Self) -> Self {
// match self.checked_sub(other) {
// Some(x) => x,
// None if other >= Self::zero() => Self::min_value(),
// None => Self::max_value(),
// }
// }
//
// /// Wrapping (modular) addition. Computes `self + other`,
// /// wrapping around at the boundary of the type.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn wrapping_add(self, rhs: Self) -> Self {
// unsafe {
// intrinsics::overflowing_add(self, rhs)
// }
// }
//
// /// Wrapping (modular) subtraction. Computes `self - other`,
// /// wrapping around at the boundary of the type.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn wrapping_sub(self, rhs: Self) -> Self {
// unsafe {
// intrinsics::overflowing_sub(self, rhs)
// }
// }
//
// /// Wrapping (modular) multiplication. Computes `self *
// /// other`, wrapping around at the boundary of the type.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn wrapping_mul(self, rhs: Self) -> Self {
// unsafe {
// intrinsics::overflowing_mul(self, rhs)
// }
// }
//
// /// Wrapping (modular) division. Computes `floor(self / other)`,
// /// wrapping around at the boundary of the type.
// ///
// /// The only case where such wrapping can occur is when one
// /// divides `MIN / -1` on a signed type (where `MIN` is the
// /// negative minimal value for the type); this is equivalent
// /// to `-MIN`, a positive value that is too large to represent
// /// in the type. In such a case, this function returns `MIN`
// /// itself..
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_div(self, rhs: Self) -> Self {
// self.overflowing_div(rhs).0
// }
//
// /// Wrapping (modular) remainder. Computes `self % other`,
// /// wrapping around at the boundary of the type.
// ///
// /// Such wrap-around never actually occurs mathematically;
// /// implementation artifacts make `x % y` illegal for `MIN /
// /// -1` on a signed type illegal (where `MIN` is the negative
// /// minimal value). In such a case, this function returns `0`.
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_rem(self, rhs: Self) -> Self {
// self.overflowing_rem(rhs).0
// }
//
// /// Wrapping (modular) negation. Computes `-self`,
// /// wrapping around at the boundary of the type.
// ///
// /// The only case where such wrapping can occur is when one
// /// negates `MIN` on a signed type (where `MIN` is the
// /// negative minimal value for the type); this is a positive
// /// value that is too large to represent in the type. In such
// /// a case, this function returns `MIN` itself.
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_neg(self) -> Self {
// self.overflowing_neg().0
// }
//
// /// Panic-free bitwise shift-left; yields `self << mask(rhs)`,
// /// where `mask` removes any high-order bits of `rhs` that
// /// would cause the shift to exceed the bitwidth of the type.
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_shl(self, rhs: u32) -> Self {
// self.overflowing_shl(rhs).0
// }
//
// /// Panic-free bitwise shift-left; yields `self >> mask(rhs)`,
// /// where `mask` removes any high-order bits of `rhs` that
// /// would cause the shift to exceed the bitwidth of the type.
// #[unstable(feature = "core", since = "1.0.0")]
// #[inline(always)]
// pub fn wrapping_shr(self, rhs: u32) -> Self {
// self.overflowing_shr(rhs).0
// }
//
// /// Raises self to the power of `exp`, using exponentiation by squaring.
// ///
// /// # Examples
// ///
// /// ```
// /// let x: i32 = 2; // or any other integer type
// ///
// /// assert_eq!(x.pow(4), 16);
// /// ```
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn pow(self, mut exp: u32) -> Self {
// let mut base = self;
// let mut acc = Self::one();
//
// let mut prev_base = self;
// let mut base_oflo = false;
// while exp > 0 {
// if (exp & 1) == 1 {
// if base_oflo {
// // ensure overflow occurs in the same manner it
// // would have otherwise (i.e. signal any exception
// // it would have otherwise).
// acc = acc * (prev_base * prev_base);
// } else {
// acc = acc * base;
// }
// }
// prev_base = base;
// let (new_base, new_base_oflo) = base.overflowing_mul(base);
// base = new_base;
// base_oflo = new_base_oflo;
// exp /= 2;
// }
// acc
// }
//
// /// Computes the absolute value of `self`.
// ///
// /// # Overflow behavior
// ///
// /// The absolute value of `i32::min_value()` cannot be represented as an
// /// `i32`, and attempting to calculate it will cause an overflow. This
// /// means that code in debug mode will trigger a panic on this case and
// /// optimized code will return `i32::min_value()` without a panic.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn abs(self) -> Self {
// if self.is_negative() {
// // Note that the #[inline] above means that the overflow
// // semantics of this negation depend on the crate we're being
// // inlined into.
// -self
// } else {
// self
// }
// }
//
// /// Returns a number representing sign of `self`.
// ///
// /// - `0` if the number is zero
// /// - `1` if the number is positive
// /// - `-1` if the number is negative
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn signum(self) -> Self {
// match self {
// n if n > 0 => 1,
// 0 => 0,
// _ => -1,
// }
// }
//
// /// Returns `true` if `self` is positive and `false` if the number
// /// is zero or negative.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn is_positive(self) -> bool { self > 0 }
//
// /// Returns `true` if `self` is negative and `false` if the number
// /// is zero or positive.
// #[stable(feature = "rust1", since = "1.0.0")]
// #[inline]
// pub fn is_negative(self) -> bool { self < 0 }
// }
// }
// impl i32 {
// int_impl! { i32, u32, 32,
// intrinsics::i32_add_with_overflow,
// intrinsics::i32_sub_with_overflow,
// intrinsics::i32_mul_with_overflow }
// }
macro_rules! swap_bytes_test {
($value:expr, $reverse:expr) => ({
let value: i32 = $value;
let result: i32 = value.swap_bytes();
assert_eq!(result, $reverse);
})
}
#[test]
#[allow(overflowing_literals)]
fn swap_bytes_test1() {
swap_bytes_test!( 0x00000001, 0x01000000 );
swap_bytes_test!( 0x00000002, 0x02000000 );
swap_bytes_test!( 0x00000004, 0x04000000 );
swap_bytes_test!( 0x00000008, 0x08000000 );
swap_bytes_test!( 0x00000010, 0x10000000 );
swap_bytes_test!( 0x00000020, 0x20000000 );
swap_bytes_test!( 0x00000040, 0x40000000 );
swap_bytes_test!( 0x00000080, 0x80000000 );
swap_bytes_test!( 0x00000100, 0x00010000 );
swap_bytes_test!( 0x00000200, 0x00020000 );
swap_bytes_test!( 0x00000400, 0x00040000 );
swap_bytes_test!( 0x00000800, 0x00080000 );
swap_bytes_test!( 0x00001000, 0x00100000 );
swap_bytes_test!( 0x00002000, 0x00200000 );
swap_bytes_test!( 0x00004000, 0x00400000 );
swap_bytes_test!( 0x00008000, 0x00800000 );
swap_bytes_test!( 0x00010000, 0x00000100 );
swap_bytes_test!( 0x00020000, 0x00000200 );
swap_bytes_test!( 0x00040000, 0x00000400 );
swap_bytes_test!( 0x00080000, 0x00000800 );
swap_bytes_test!( 0x00100000, 0x00001000 );
swap_bytes_test!( 0x00200000, 0x00002000 );
swap_bytes_test!( 0x00400000, 0x00004000 );
swap_bytes_test!( 0x00800000, 0x00008000 );
swap_bytes_test!( 0x01000000, 0x00000001 );
swap_bytes_test!( 0x02000000, 0x00000002 );
swap_bytes_test!( 0x04000000, 0x00000004 );
swap_bytes_test!( 0x08000000, 0x00000008 );
swap_bytes_test!( 0x10000000, 0x00000010 );
swap_bytes_test!( 0x20000000, 0x00000020 );
swap_bytes_test!( 0x40000000, 0x00000040 );
swap_bytes_test!( 0x80000000, 0x00000080 );
}
}<|fim▁end|>
|
// /// assert_eq!(5u8.checked_mul(51), Some(255));
|
<|file_name|>test_pycparser.py<|end_file_name|><|fim▁begin|>import pycparser
<|fim▁hole|> j = p && r || q;
return j;
}
'''
t = parser.parse( buf, 'x.c' )
return t
if __name__ == "__main__":
t = main_eg()
t.show()<|fim▁end|>
|
def main_eg():
parser = pycparser.CParser()
buf = '''
int main( int argc, char** argv ) {
|
<|file_name|>Accra.py<|end_file_name|><|fim▁begin|>'''tzinfo timezone information for Africa/Accra.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Accra(DstTzInfo):
'''Africa/Accra timezone definition. See datetime.tzinfo for details'''
zone = 'Africa/Accra'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1918,1,1,0,0,52),
d(1936,9,1,0,0,0),
d(1936,12,30,23,40,0),
d(1937,9,1,0,0,0),
d(1937,12,30,23,40,0),
d(1938,9,1,0,0,0),
d(1938,12,30,23,40,0),
d(1939,9,1,0,0,0),
d(1939,12,30,23,40,0),
d(1940,9,1,0,0,0),
d(1940,12,30,23,40,0),
d(1941,9,1,0,0,0),
d(1941,12,30,23,40,0),
d(1942,9,1,0,0,0),
d(1942,12,30,23,40,0),
]
_transition_info = [
i(-60,0,'LMT'),
i(0,0,'GMT'),
i(1200,1200,'GHST'),
i(0,0,'GMT'),
i(1200,1200,'GHST'),
i(0,0,'GMT'),
i(1200,1200,'GHST'),
i(0,0,'GMT'),
i(1200,1200,'GHST'),
i(0,0,'GMT'),<|fim▁hole|>i(1200,1200,'GHST'),
i(0,0,'GMT'),
i(1200,1200,'GHST'),
i(0,0,'GMT'),
i(1200,1200,'GHST'),
i(0,0,'GMT'),
]
Accra = Accra()<|fim▁end|>
| |
<|file_name|>global.rs<|end_file_name|><|fim▁begin|>use iron::prelude::*;
use iron::status::Status;
use persistent::State;
use iron::typemap::Key;
use window_service::window::Window;
use rustorm::table::Table;
use std::collections::BTreeMap;
use rustorm::database::DbError;
use rustorm::database::Database;
use rustorm::database::DatabaseDev;
use std::sync::{Arc, RwLock};
use error::ServiceError;
use rustc_serialize::json;
use rustorm::platform::pool;
use rustorm::platform::pool::Platform;
pub struct GlobalPools {
pub cache_map: BTreeMap<String, Cache>, // caches indexed by db_url
}
impl Key for GlobalPools {
type Value = GlobalPools;
}
impl GlobalPools {
/// initialize the pools
pub fn new() -> Self {
GlobalPools { cache_map: BTreeMap::new() }
}
pub fn from_request(req: &mut Request) -> Arc<RwLock<Self>> {
let global = req.get::<State<GlobalPools>>().unwrap();
global
}
pub fn has_cache(&self, db_url: &str) -> bool {
self.cache_map.contains_key(db_url)
}
pub fn has_cached_tables(&self, db_url: &str) -> bool {
match self.get_cache(db_url) {
Some(cache) => cache.tables.is_some(),
None => false,
}
}
/// reset the cache with this url
fn reset_cache(&mut self, db_url: &str) -> Result<(), ServiceError> {
let cache = self.cache_map.remove(db_url);
if let Some(cache) = cache {
println!("removing cache: {:?}", cache.windows);
println!("removing cache: {:?}", cache.tables);
info!("removing cache: {:?}", cache.windows);
info!("removing cache: {:?}", cache.tables);
}
Ok(())
}
pub fn get_cache(&self, db_url: &str) -> Option<&Cache> {
self.cache_map.get(db_url)
}
pub fn get_cached_tables(&self, db_url: &str) -> Option<Vec<Table>> {
match self.get_cache(db_url) {
Some(cache) => {
match cache.tables {
Some(ref tables) => Some(tables.clone()),
None => None,
}
}
None => None,
}
}
pub fn has_cached_windows(&self, db_url: &str) -> bool {
match self.get_cache(db_url) {
Some(cache) => cache.windows.is_some(),
None => false,
}
}
pub fn get_cached_windows(&self, db_url: &str) -> Option<Vec<Window>> {
match self.get_cache(db_url) {
Some(cache) => {
match cache.windows {
Some(ref windows) => Some(windows.clone()),
None => None,
}
}
None => None,
}
}
/// cache this window values to this db_url
pub fn cache_windows(&mut self, db_url: &str, windows: Vec<Window>) -> Result<(), DbError> {
if self.has_cache(db_url) {
let mut cache = self.cache_map.remove(db_url).unwrap();
cache.set_windows(windows);
self.cache_map.insert(db_url.to_owned(), cache);
Ok(())
} else {
let mut cache = try!(Cache::new(db_url));
cache.set_windows(windows);
self.cache_map.insert(db_url.to_owned(), cache);
Ok(())
}
}
pub fn cache_tables(&mut self, db_url: &str, tables: Vec<Table>) -> Result<(), DbError> {
if self.has_cache(db_url) {
let mut cache = self.cache_map.remove(db_url).unwrap();
cache.set_tables(tables);
self.cache_map.insert(db_url.to_owned(), cache);
Ok(())
} else {
let mut cache = try!(Cache::new(db_url));
cache.set_tables(tables);
self.cache_map.insert(db_url.to_owned(), cache);
Ok(())
}
}
}
/// items cached, unique for each db_url connection
pub struct Cache {
/// windows extraction is an expensive operation and doesn't change very often
/// None indicates, that nothing is cached yet, empty can be indicated as cached
pub windows: Option<Vec<Window>>,
/// tables extraction is an expensive operation and doesn't change very often
pub tables: Option<Vec<Table>>,
}
impl Cache {
fn new(db_url: &str) -> Result<Self, DbError> {
Ok(Cache {
windows: None,
tables: None,
})
}
fn set_windows(&mut self, windows: Vec<Window>) {
self.windows = Some(windows);
}
fn set_tables(&mut self, tables: Vec<Table>) {
self.tables = Some(tables);
}
}
// the db_url is stored in the headers
pub fn get_db_url(req: &Request) -> Option<String> {
let db_url: Option<&[Vec<u8>]> = req.headers.get_raw("db_url");
match db_url {
Some(db_url) => {
let first = &db_url[0];
let url = String::from_utf8(first.clone()).unwrap();
Some(url)
}
None => None,
}
}
pub struct Context {
pub db_url: String,
arc: Arc<RwLock<GlobalPools>>,<|fim▁hole|>}
impl Context {
pub fn new(req: &mut Request) -> Self {
let db_url = get_db_url(req).unwrap();
let globals = GlobalPools::from_request(req);
let context = Context {
db_url: db_url.into(),
arc: globals,
};
context
}
fn get_connection(&self) -> Result<Platform, DbError> {
pool::db_with_url(&self.db_url)
}
pub fn db(&self) -> Result<Platform, DbError> {
self.get_connection()
}
pub fn cache_tables(&self, tables: Vec<Table>) -> Result<(), DbError> {
let ref mut globals = *self.arc.write().unwrap();
globals.cache_tables(&self.db_url, tables)
}
pub fn has_cached_tables(&self) -> bool {
let ref globals = *self.arc.read().unwrap();
globals.has_cached_tables(&self.db_url)
}
pub fn get_cached_tables(&self) -> Option<Vec<Table>> {
let ref globals = *self.arc.read().unwrap();
globals.get_cached_tables(&self.db_url)
}
pub fn has_cached_windows(&self) -> bool {
let ref globals = *self.arc.read().unwrap();
globals.has_cached_windows(&self.db_url)
}
pub fn get_cached_windows(&self) -> Option<Vec<Window>> {
let ref globals = *self.arc.read().unwrap();
globals.get_cached_windows(&self.db_url)
}
pub fn cache_windows(&self, windows: Vec<Window>) {
let ref mut globals = *self.arc.write().unwrap();
globals.cache_windows(&self.db_url, windows);
}
pub fn reset_cache(&self) -> Result<(), ServiceError> {
let ref mut globals = *self.arc.write().unwrap();
try!(globals.reset_cache(&self.db_url));
Ok(())
}
}
pub fn http_reset_cache(req: &mut Request) -> IronResult<Response> {
let mut context = Context::new(req);
match context.reset_cache() {
Ok(()) => Ok(Response::with((Status::Ok, json::encode(&"OK").unwrap()))),
Err(_) => Ok(Response::with((Status::BadRequest, "Something went wrong"))),
}
}
pub fn http_can_db_url_connect(req: &mut Request) -> IronResult<Response> {
let mut context = Context::new(req);
let test = pool::test_connection(&context.db_url);
match test {
Ok(_) => Ok(Response::with((Status::Ok, json::encode(&"OK").unwrap()))),
Err(e) => Ok(Response::with((Status::BadRequest, json::encode(&"Unable to connect DB").unwrap()))),
}
}<|fim▁end|>
| |
<|file_name|>handle.rs<|end_file_name|><|fim▁begin|>use ffi::h5i::{hid_t, H5I_type_t, H5Iget_type, H5Iis_valid, H5Iinc_ref, H5Idec_ref,
H5I_INVALID_HID};
use ffi::h5i::H5I_type_t::*;
use error::Result;
use object::Object;
use std::sync::{Arc, Mutex, RwLock};
use std::collections::HashMap;
pub fn get_id_type(id: hid_t) -> H5I_type_t {
h5lock_s!({
let tp = h5lock!(H5Iget_type(id));
let valid = id > 0 && tp > H5I_BADID && tp < H5I_NTYPES;
if valid { tp } else { H5I_BADID }
})<|fim▁hole|>pub fn is_valid_id(id: hid_t) -> bool {
h5lock_s!({
let tp = get_id_type(id);
tp > H5I_BADID && tp < H5I_NTYPES
})
}
pub fn is_valid_user_id(id: hid_t) -> bool {
h5lock!({
H5Iis_valid(id) == 1
})
}
pub trait ID {
fn id(&self) -> hid_t;
}
pub trait FromID {
fn from_id(id: hid_t) -> Result<Self>;
}
struct Registry {
registry: Mutex<HashMap<hid_t, Arc<RwLock<hid_t>>>>,
}
impl Registry {
pub fn new() -> Registry {
Registry { registry: Mutex::new(HashMap::new()) }
}
pub fn new_handle(&self, id: hid_t) -> Arc<RwLock<hid_t>> {
let mut registry = self.registry.lock().unwrap();
let handle = registry.entry(id).or_insert(Arc::new(RwLock::new(id)));
if *handle.read().unwrap() != id {
// an id may be left dangling by previous invalidation of a linked handle
*handle = Arc::new(RwLock::new(id));
}
handle.clone()
}
}
pub struct Handle {
id: Arc<RwLock<hid_t>>,
}
impl Handle {
pub fn new(id: hid_t) -> Result<Handle> {
lazy_static! {
static ref REGISTRY: Registry = Registry::new();
}
h5lock_s!({
match is_valid_user_id(id) {
false => Err(From::from(format!("Invalid handle id: {}", id))),
true => Ok(Handle{ id: REGISTRY.new_handle(id) })
}
})
}
pub fn invalid() -> Handle {
Handle { id: Arc::new(RwLock::new(H5I_INVALID_HID)) }
}
pub fn id(&self) -> hid_t {
*self.id.read().unwrap()
}
pub fn invalidate(&self) {
*self.id.write().unwrap() = H5I_INVALID_HID;
}
#[allow(dead_code)] // FIXME: spurious rustc warning
pub fn incref(&self) {
if is_valid_user_id(self.id()) {
h5lock!(H5Iinc_ref(self.id()));
}
}
pub fn decref(&self) {
h5lock!({
if is_valid_user_id(self.id()) {
H5Idec_ref(self.id());
}
// must invalidate all linked IDs because the library reuses them internally
if !is_valid_user_id(self.id()) && !is_valid_id(self.id()) {
self.invalidate();
}
})
}
}
impl Drop for Handle {
fn drop(&mut self) {
h5lock_s!(self.decref());
}
}
impl ID for Handle {
fn id(&self) -> hid_t {
self.id()
}
}
impl FromID for Handle {
fn from_id(id: hid_t) -> Result<Handle> {
Handle::new(id)
}
}
impl Object for Handle {}<|fim▁end|>
|
}
|
<|file_name|>AuditApp.tsx<|end_file_name|><|fim▁begin|>/*
* SonarQube
* Copyright (C) 2009-2022 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import * as React from 'react';
import { connect } from 'react-redux';
import { getGlobalSettingValue, Store } from '../../../store/rootReducer';
import { AdminPageExtension } from '../../../types/extension';
import { Extension } from '../../../types/types';
import { fetchValues } from '../../settings/store/actions';
import '../style.css';
import { HousekeepingPolicy, RangeOption } from '../utils';
import AuditAppRenderer from './AuditAppRenderer';
interface Props {
auditHousekeepingPolicy: HousekeepingPolicy;
fetchValues: typeof fetchValues;
adminPages: Extension[];<|fim▁hole|> hasGovernanceExtension?: boolean;
downloadStarted: boolean;
selection: RangeOption;
}
export class AuditApp extends React.PureComponent<Props, State> {
constructor(props: Props) {
super(props);
const hasGovernanceExtension = Boolean(
props.adminPages?.find(e => e.key === AdminPageExtension.GovernanceConsole)
);
this.state = {
downloadStarted: false,
selection: RangeOption.Today,
hasGovernanceExtension
};
}
componentDidMount() {
const { hasGovernanceExtension } = this.state;
if (hasGovernanceExtension) {
this.props.fetchValues(['sonar.dbcleaner.auditHousekeeping']);
}
}
componentDidUpdate(prevProps: Props) {
if (prevProps.adminPages !== this.props.adminPages) {
const hasGovernanceExtension = Boolean(
this.props.adminPages?.find(e => e.key === AdminPageExtension.GovernanceConsole)
);
this.setState({
hasGovernanceExtension
});
}
}
handleDateSelection = (dateRange: { from?: Date; to?: Date }) =>
this.setState({ dateRange, downloadStarted: false, selection: RangeOption.Custom });
handleOptionSelection = (selection: RangeOption) =>
this.setState({ dateRange: undefined, downloadStarted: false, selection });
handleStartDownload = () => {
setTimeout(() => {
this.setState({ downloadStarted: true });
}, 0);
};
render() {
const { hasGovernanceExtension, ...auditAppRendererProps } = this.state;
const { auditHousekeepingPolicy } = this.props;
if (!hasGovernanceExtension) {
return null;
}
return (
<AuditAppRenderer
handleDateSelection={this.handleDateSelection}
handleOptionSelection={this.handleOptionSelection}
handleStartDownload={this.handleStartDownload}
housekeepingPolicy={auditHousekeepingPolicy || HousekeepingPolicy.Monthly}
{...auditAppRendererProps}
/>
);
}
}
const mapDispatchToProps = { fetchValues };
const mapStateToProps = (state: Store) => {
const settingValue = getGlobalSettingValue(state, 'sonar.dbcleaner.auditHousekeeping');
return {
auditHousekeepingPolicy: settingValue?.value as HousekeepingPolicy
};
};
export default connect(mapStateToProps, mapDispatchToProps)(AuditApp);<|fim▁end|>
|
}
interface State {
dateRange?: { from?: Date; to?: Date };
|
<|file_name|>voicemailService.service.ts<|end_file_name|><|fim▁begin|>/* Copyright © 2017 BroadSoft Inc. */
import { Injectable, Inject, Component } from '@angular/core';
import { Http, Response, Request, Headers, RequestOptions } from '@angular/http';
import { Observable } from "rxjs/Observable";
import { HttpServices } from 'app/AppCommon/httpservices.service';
import { VoicemailServiceInput } from 'app/Voicemail/voicemailServiceInput.service';
@Injectable()
export class VoicemailService {
private res: Response;
private voicemailBody;
private headers: Headers = new Headers();
customizedTextJson = window['customizedTexts'];
constructor(private http: Http, private httpServices: HttpServices, private voicemailServiceInput: VoicemailServiceInput) { }<|fim▁hole|> this.httpServices.httpGetRequest(voicemailUrl)
.subscribe((res) => {
var voicemailParsedJson = res.json();
this.voicemailServiceInput.setActive(voicemailParsedJson["VoiceMessaging"]["active"]["$"] == "true");
this.voicemailServiceInput.setAlwaysRedirectToVoiceMail(voicemailParsedJson["VoiceMessaging"]["alwaysRedirectToVoiceMail"]["$"] == "true");
this.voicemailServiceInput.setBusyRedirectToVoiceMail(voicemailParsedJson["VoiceMessaging"]["busyRedirectToVoiceMail"]["$"] == "true");
this.voicemailServiceInput.setNoAnswerRedirectToVoiceMail(voicemailParsedJson["VoiceMessaging"]["noAnswerRedirectToVoiceMail"]["$"] == "true");
this.voicemailServiceInput.setProcessing(voicemailParsedJson["VoiceMessaging"]["processing"]["$"]);
this.voicemailServiceInput.setUnifiedMessagingChecked(voicemailParsedJson["VoiceMessaging"]["processing"]["$"] == "Unified Voice and Email Messaging");
this.voicemailServiceInput.setUsePhoneMessageWaitingIndicator(voicemailParsedJson["VoiceMessaging"]["usePhoneMessageWaitingIndicator"]["$"] == "true");
this.voicemailServiceInput.setVoiceMessageDeliveryEmailAddress(voicemailParsedJson["VoiceMessaging"]["voiceMessageDeliveryEmailAddress"]["$"]);
this.voicemailServiceInput.setSendVoiceMessageNotifyEmail(voicemailParsedJson["VoiceMessaging"]["sendVoiceMessageNotifyEmail"]["$"] == "true");
this.voicemailServiceInput.setNotifyEmailAddress(voicemailParsedJson["VoiceMessaging"]["voiceMessageNotifyEmailAddress"]["$"]);
this.voicemailServiceInput.setSendCarbonCopyVoiceMessage(voicemailParsedJson["VoiceMessaging"]["sendCarbonCopyVoiceMessage"]["$"] == "true");
this.voicemailServiceInput.setVoiceMessageCarbonCopyEmailAddress(voicemailParsedJson["VoiceMessaging"]["voiceMessageCarbonCopyEmailAddress"]["$"]);
this.voicemailServiceInput.setTransferOnZeroToPhoneNumber(voicemailParsedJson["VoiceMessaging"]["transferOnZeroToPhoneNumber"]["$"] == "true");
this.voicemailServiceInput.setTransferPhoneNumber(voicemailParsedJson["VoiceMessaging"]["transferPhoneNumber"]["$"]);
postVoicemailGet(voicemailParsedJson);
}, (err) => {
var voicemailParsedJson = null;
postVoicemailGet(voicemailParsedJson);
});
}
constructInitialBody() {
this.voicemailBody = '<?xml version="1.0" encoding="UTF-8"?><VoiceMessaging xmlns="http://schema.broadsoft.com/xsi">';
}
constructFinalBody() {
this.voicemailBody = this.voicemailBody + '</VoiceMessaging>';
}
putVoicemailService(voicemailUrl, isVoicemailChecked, postVoicemailPut) {
this.constructInitialBody();
this.voicemailBody = this.voicemailBody + '<active>' + isVoicemailChecked + '</active>';
this.constructFinalBody();
this.httpServices.httpPutRequest(voicemailUrl, this.voicemailBody)
.subscribe((res) => {
this.voicemailServiceInput.setActive(isVoicemailChecked);
postVoicemailPut(res);
}, (err) => {
postVoicemailPut(err);
});
}
putMessageArrivesService(voicemailUrl,processingType,deliveryEmailAddress,isMessageWaitingIndicatorChecked, postVoicemailPut) {
this.constructInitialBody();
this.voicemailBody = this.voicemailBody + '<processing>' + processingType + '</processing>';
if (deliveryEmailAddress) {
this.voicemailBody = this.voicemailBody + '<voiceMessageDeliveryEmailAddress>' + deliveryEmailAddress + '</voiceMessageDeliveryEmailAddress>';
}
else {
this.voicemailBody = this.voicemailBody + '<voiceMessageDeliveryEmailAddress xs:nil="true" xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"/>';
}
if (this.fetchUnifiedMessagingChecked()) {
this.voicemailBody = this.voicemailBody + '<usePhoneMessageWaitingIndicator>' + isMessageWaitingIndicatorChecked + '</usePhoneMessageWaitingIndicator>';
}
this.constructFinalBody();
this.httpServices.httpPutRequest(voicemailUrl, this.voicemailBody)
.subscribe((res) => {
this.voicemailServiceInput.setProcessing(processingType);
this.voicemailServiceInput.setVoiceMessageDeliveryEmailAddress(deliveryEmailAddress);
this.voicemailServiceInput.setUsePhoneMessageWaitingIndicator(isMessageWaitingIndicatorChecked);
postVoicemailPut(res);
}, (err) => {
postVoicemailPut(err);
});
}
putSendCallsToVoicemailService(voicemailUrl, isSendCallAlwaysSelected,isSendCallBusySelected, isSendCallNoAnswerSelected, postVoicemailPut) {
this.constructInitialBody();
this.voicemailBody = this.voicemailBody + '<alwaysRedirectToVoiceMail>' + isSendCallAlwaysSelected + '</alwaysRedirectToVoiceMail><busyRedirectToVoiceMail>' + isSendCallBusySelected + '</busyRedirectToVoiceMail><noAnswerRedirectToVoiceMail>' + isSendCallNoAnswerSelected + '</noAnswerRedirectToVoiceMail>';
this.constructFinalBody();
this.httpServices.httpPutRequest(voicemailUrl, this.voicemailBody)
.subscribe((res) => {
this.voicemailServiceInput.setAlwaysRedirectToVoiceMail(isSendCallAlwaysSelected);
this.voicemailServiceInput.setBusyRedirectToVoiceMail(isSendCallBusySelected);
this.voicemailServiceInput.setNoAnswerRedirectToVoiceMail(isSendCallNoAnswerSelected);
postVoicemailPut(res);
}, (err) => {
postVoicemailPut(err);
});
}
putEmailNotificationService(voicemailUrl, isEmailNotificationChecked, notifyEmailAddress, postVoicemailPut) {
this.constructInitialBody();
if (notifyEmailAddress) {
this.voicemailBody = this.voicemailBody + '<sendVoiceMessageNotifyEmail>' + isEmailNotificationChecked + '</sendVoiceMessageNotifyEmail><voiceMessageNotifyEmailAddress>' + notifyEmailAddress + '</voiceMessageNotifyEmailAddress>';
}
else {
this.voicemailBody = this.voicemailBody + '<sendVoiceMessageNotifyEmail>' + isEmailNotificationChecked + '</sendVoiceMessageNotifyEmail><voiceMessageNotifyEmailAddress xs:nil="true" xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"/>';
}
this.constructFinalBody();
this.httpServices.httpPutRequest(voicemailUrl, this.voicemailBody)
.subscribe((res) => {
this.voicemailServiceInput.setSendVoiceMessageNotifyEmail(isEmailNotificationChecked);
this.voicemailServiceInput.setNotifyEmailAddress(notifyEmailAddress);
postVoicemailPut(res);
}, (err) => {
postVoicemailPut(err);
});
}
putEmailCarbonCopyService(voicemailUrl, isEmailCarbonCopyChecked, carbonCopyEmailAddress, postVoicemailPut) {
this.constructInitialBody();
if (carbonCopyEmailAddress) {
this.voicemailBody = this.voicemailBody + '<sendCarbonCopyVoiceMessage>' + isEmailCarbonCopyChecked + '</sendCarbonCopyVoiceMessage><voiceMessageCarbonCopyEmailAddress>' + carbonCopyEmailAddress + '</voiceMessageCarbonCopyEmailAddress>';
}
else {
this.voicemailBody = this.voicemailBody + '<sendCarbonCopyVoiceMessage>' + isEmailCarbonCopyChecked + '</sendCarbonCopyVoiceMessage><voiceMessageCarbonCopyEmailAddress xs:nil="true" xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"/>';
}
this.constructFinalBody();
this.httpServices.httpPutRequest(voicemailUrl, this.voicemailBody)
.subscribe((res) => {
this.voicemailServiceInput.setSendCarbonCopyVoiceMessage(isEmailCarbonCopyChecked);
this.voicemailServiceInput.setVoiceMessageCarbonCopyEmailAddress(carbonCopyEmailAddress);
postVoicemailPut(res);
}, (err) => {
postVoicemailPut(err);
});
}
putTransferToNumberService(voicemailUrl, isTransferChecked, transferPhoneNumber, postVoicemailPut) {
this.constructInitialBody();
if (transferPhoneNumber) {
this.voicemailBody = this.voicemailBody + '<transferOnZeroToPhoneNumber>' + isTransferChecked + '</transferOnZeroToPhoneNumber><transferPhoneNumber>' + transferPhoneNumber + '</transferPhoneNumber>';
}
else {
this.voicemailBody = this.voicemailBody + '<transferOnZeroToPhoneNumber>' + isTransferChecked + '</transferOnZeroToPhoneNumber><transferPhoneNumber xs:nil="true" xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"/>';
}
this.constructFinalBody();
this.httpServices.httpPutRequest(voicemailUrl, this.voicemailBody)
.subscribe((res) => {
this.voicemailServiceInput.setTransferOnZeroToPhoneNumber(isTransferChecked);
this.voicemailServiceInput.setTransferPhoneNumber(transferPhoneNumber);
postVoicemailPut(res);
}, (err) => {
postVoicemailPut(err);
});
}
getRingsService(voicemailGreetingUrl, postVoicemailGreetingGet) {
this.httpServices.httpGetRequest(voicemailGreetingUrl)
.subscribe((res) => {
var voicemailGreetingsParsedJson = res.json();
this.voicemailServiceInput.setSendCallsNumberOfRings(voicemailGreetingsParsedJson["VoiceMessagingGreetings"]["noAnswerNumberOfRings"]["$"]);
postVoicemailGreetingGet(voicemailGreetingsParsedJson);
}, (err) => {
var voicemailGreetingsParsedJson = null;
postVoicemailGreetingGet(voicemailGreetingsParsedJson);
});
}
putRingsService(voicemailGreetingUrl, voicemailRingSelected, postVoicemailGreetingPut) {
if(voicemailRingSelected == this.customizedTextJson.voice_management.none){
voicemailRingSelected = '0';
}
var body = '<?xml version="1.0" encoding="ISO-8859-1"?> <VoiceMessagingGreetings xmlns="http://schema.broadsoft.com/xsi" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">';
body = body + '<noAnswerNumberOfRings>' + voicemailRingSelected + '</noAnswerNumberOfRings>';
body = body + '</VoiceMessagingGreetings>'
this.httpServices.httpPutRequest(voicemailGreetingUrl, body)
.subscribe((res) => {
this.voicemailServiceInput.setSendCallsNumberOfRings(voicemailRingSelected);
postVoicemailGreetingPut(res);
}, (err) => {
postVoicemailGreetingPut(err);
});
}
fetchIsVoicemailServiceActive() {
return (this.voicemailServiceInput.getIsActive());
}
fetchIsEmailNotificationChecked() {
return (this.voicemailServiceInput.getIsSendVoiceMessageNotifyEmail());
}
fetchNotifyEmailAddress() {
return (this.voicemailServiceInput.getNotifyEmailAddress());
}
fetchIsEmailCarbonCopyChecked() {
return (this.voicemailServiceInput.getIsSendCarbonCopyVoiceMessage());
}
fetchCarbonCopyEmailAddress() {
return (this.voicemailServiceInput.getVoiceMessageCarbonCopyEmailAddress());
}
fetchIsTransferToPhoneNumberChecked() {
return (this.voicemailServiceInput.getIsTransferOnZeroToPhoneNumber());
}
fetchTransferNumber() {
return (this.voicemailServiceInput.getTransferPhoneNumber());
}
fetchProcessing() {
return (this.voicemailServiceInput.getProcessing());
}
fetchUnifiedMessagingChecked() {
return (this.voicemailServiceInput.getUnifiedMessagingChecked());
}
fetchMessageWaitingIndicator() {
return (this.voicemailServiceInput.getUsePhoneMessageWaitingIndicator());
}
fetchDeliveryMailAddress() {
return (this.voicemailServiceInput.getVoiceMessageDeliveryEmailAddress());
}
fetchIsSendCallsAlwaysChecked() {
return (this.voicemailServiceInput.getIsAlwaysRedirectToVoiceMail());
}
fetchIsSendCallsBusyChecked() {
return (this.voicemailServiceInput.getIsBusyRedirectToVoiceMail());
}
fetchIsSendCallsNoAnswerChecked() {
return (this.voicemailServiceInput.getIsNoAnswerRedirectToVoiceMail());
}
fetchSendCallsRings() {
return (this.voicemailServiceInput.getSendCallsRings());
}
}<|fim▁end|>
|
getVoicemailService(voicemailUrl, postVoicemailGet) {
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.