prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>session.js<|end_file_name|><|fim▁begin|>/** * Session Configuration * (sails.config.session) * * Sails session integration leans heavily on the great work already done by * Express, but also unifies Socket.io with the Connect session store. It uses * Connect's cookie parser to normalize configuration differences between Express * and Socket.io and hooks into Sails' middleware interpreter to allow you to access * and auto-save to `req.session` with Socket.io the same way you would with Express. * * For more information on configuring the session, check out: * http://sailsjs.org/#/documentation/reference/sails.config/sails.config.session.html */ module.exports.session = { /*************************************************************************** * * * Session secret is automatically generated when your new app is created * * Replace at your own risk in production-- you will invalidate the cookies * * of your users, forcing them to log in again. * * * ***************************************************************************/ secret: '0e12331bed5965a0443585ad3157def3', /*************************************************************************** * * * Set the session cookie expire time The maxAge is set by milliseconds, * * the example below is for 24 hours * * * ***************************************************************************/ // cookie: { // maxAge: 24 * 60 * 60 * 1000 // } /*************************************************************************** * * * In production, uncomment the following lines to set up a shared redis * * session store that can be shared across multiple Sails.js servers * ***************************************************************************/ // adapter: 'redis', /*************************************************************************** * * * The following values are optional, if no options are set a redis * * instance running on localhost is expected. Read more about options at: * * https://github.com/visionmedia/connect-redis * * * * * ***************************************************************************/ // host: 'localhost', // port: 6379, // ttl: <redis session TTL in seconds>, // db: 0, // pass: <redis auth password> // prefix: 'sess:' /*************************************************************************** * *<|fim▁hole|> // adapter: 'mongo', // host: 'localhost', // port: 27017, // db: 'sails', // collection: 'sessions', /*************************************************************************** * * * Optional Values: * * * * # Note: url will override other connection settings url: * * 'mongodb://user:pass@host:port/database/collection', * * * ***************************************************************************/ // username: '', // password: '', // auto_reconnect: false, // ssl: false, // stringify: true };<|fim▁end|>
* Uncomment the following lines to use your Mongo adapter as a session * * store * * * ***************************************************************************/
<|file_name|>test_netsnmp.py<|end_file_name|><|fim▁begin|># snapy - a python snmp library # # Copyright (C) 2009 ITA Software, Inc. # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # version 2 as published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. import time from twisted.trial import unittest from snapy.netsnmp.unittests import TestCase from snapy.netsnmp import Session, SnmpError, SnmpTimeout, OID class Result(object): """Container for async results""" value = None def set_result(value, result): result.value = value class TestSessionV1(TestCase): version = "1" bulk = False basics = [ (OID(".1.3.6.1.4.2.1.1"), 1), (OID(".1.3.6.1.4.2.1.2"), -1), (OID(".1.3.6.1.4.2.1.3"), 1), (OID(".1.3.6.1.4.2.1.4"), "test value"), ] def setUpSession(self, address): self.session = Session( version=self.version, community="public", peername=address, _use_bulk=self.bulk) self.session.open() def tearDownSession(self): self.session.close() def test_sget(self): result = self.session.sget([x for x,v in self.basics]) self.assertEquals(result, self.basics) return self.finishGet() def test_get_small(self): result = Result() self.session.get([x for x,v in self.basics], set_result, result) self.session.wait() self.assertEquals(result.value, self.basics) return self.finishGet() def test_get_big(self): oids = [] for i in xrange(1, 100): oids.append(OID((1,3,6,1,4,2,4,i))) result = Result() self.session.get(oids, set_result, result) self.session.wait() result = dict(result.value) for oid in oids: assert oid in result assert result[oid] == "data data data data" return self.finishGet() def test_walk_tree(self): result = Result() self.session.walk([".1.3.6.1.4.2.1"], set_result, result) self.session.wait() self.assertEquals(result.value, self.basics) return self.finishWalk() def test_walk_leaf(self): oid = OID(".1.3.6.1.4.2.1.1") result = Result() self.session.walk([oid], set_result, result) self.session.wait() self.assertEquals(result.value, [(oid, 1)]) return self.finishGet() def test_walk_strict(self): oid = OID(".1.3.6.1.4.2.1.1") result = Result() self.session.walk([oid], set_result, result, strict=True) self.session.wait() self.assertEquals(result.value, []) return self.finishStrictWalk() def test_sysDescr(self): result = self.session.sget([OID("SNMPv2-MIB::sysDescr.0")]) self.assert_(result) self.assertIsInstance(result[0][1], str)<|fim▁hole|> self.assert_(len(result[0][1]) > 0) return self.finishGet() class TestSessionV2c(TestSessionV1): version = "2c" def test_hrSystemDate(self): # This is a special string that gets formatted using the # MIB's DISPLAY-HINT value. Also, strip off everything # other than the date and hour to avoid a race condition. # And one more quirk, these dates are not zero padded # so we must format the date manually, whee... now = time.localtime() now = "%d-%d-%d,%d" % (now[0], now[1], now[2], now[3]) result = self.session.sget([OID(".1.3.6.1.2.1.25.1.2.0")]) self.assert_(result) value = result[0][1].split(':', 1)[0] self.assertEquals(value, now) return self.finishGet() class TestSessionV2cBulk(TestSessionV2c): bulk = True class TestTimeoutsV1(unittest.TestCase): version = "1" def setUp(self): self.session = Session( version=self.version, community="public", peername="udp:127.0.0.1:9", retries=0, timeout=0.1) self.session.open() def test_sget(self): self.assertRaises(SnmpError, self.session.sget, [".1.3.6.1.4.2.1.1"]) def test_get(self): result = Result() self.session.get([".1.3.6.1.4.2.1.1"], set_result, result) self.session.wait() assert isinstance(result.value, SnmpTimeout) def tearDown(self): self.session.close() class TestTimeoutsV2c(TestTimeoutsV1): version = "2c" class TestOID(unittest.TestCase): def test_oid_name(self): oid = OID("1.3.6.1.2.1.1.1.0") self.assertEquals(oid, OID("SNMPv2-MIB::sysDescr.0")) self.assertEquals(oid, OID("sysDescr.0"))<|fim▁end|>
<|file_name|>test3_steps.js<|end_file_name|><|fim▁begin|>CucumberJsBrowserRunner.StepDefinitions.test3(function () { var And = Given = When = Then = this.defineStep, runner; Given(/^test3$/, function(callback) { callback(); }); When(/^test3$/, function(callback) { callback(); }); Then(/^test3$/, function(callback) { callback(); }); <|fim▁hole|><|fim▁end|>
});
<|file_name|>arrays.rs<|end_file_name|><|fim▁begin|>// Lumol, an extensible molecular simulation engine // Copyright (C) Lumol's contributors — BSD license //! Multi-dimensional arrays based on ndarray use ndarray; use std::ops::{Deref, DerefMut, Index, IndexMut}; use types::Zero; /// Two dimensional tensors, based on ndarray. /// /// Most of the methods are simply forwarded to ndarray, so also look the doc /// for this crate. This array type mainly supports indexing using tuples as /// indices and is though as storage backend for multi-dimensional data. /// /// ``` /// # use lumol_core::types::Array2; /// let mut a = Array2::zeros((3, 5)); /// /// assert_eq!(a[(0, 4)], 0.0); /// /// a[(0, 4)] = 7.0; /// assert_eq!(a[(0, 4)], 7.0); /// ``` #[derive(Debug, Clone, PartialEq)] pub struct Array2<T>(ndarray::Array2<T>); impl<T: Zero + Clone> Array2<T> { /// Create a new `Array2` of the specified `size` filled with the /// `Zero::zero` return value. /// /// # Examples /// /// ``` /// # use lumol_core::types::Array2; /// let a: Array2<f64> = Array2::zeros((8, 5)); /// assert_eq!(a[(6, 2)], 0.0); /// ``` pub fn zeros(size: (usize, usize)) -> Array2<T> { Array2(ndarray::Array2::zeros(size)) } /// Resize the array if the current size is not `size`, and fill the /// new array with zeros. /// /// # Examples /// /// ``` /// # use lumol_core::types::Array2; /// let mut a = Array2::zeros((8, 5)); /// /// a[(3, 3)] = 42.0; /// /// // This does nothing /// a.resize_if_different((8, 5)); /// assert_eq!(a[(3, 3)], 42.0); /// /// // This allocates a new array /// a.resize_if_different((8, 9)); /// assert_eq!(a[(3, 3)], 0.0); /// ``` pub fn resize_if_different(&mut self, size: (usize, usize)) { if self.dim() != size { *self = Array2::zeros(size); } } } impl<T: Default> Array2<T> { /// Create a new `Array2` of the specified `size` filled with the /// `Default::default` return value. /// /// # Examples /// /// ``` /// # use lumol_core::types::Array2; /// let a: Array2<f64> = Array2::zeros((8, 5)); /// let b: Array2<f64> = Array2::default((8, 5)); /// /// assert_eq!(a, b); /// ``` pub fn default(size: (usize, usize)) -> Array2<T> { Array2(ndarray::Array2::default(size)) } } impl<T> Index<(usize, usize)> for Array2<T> { type Output = T; fn index(&self, index: (usize, usize)) -> &T { unsafe { // ndarray does the check for us in debug builds self.0.uget(index) } } } impl<T> IndexMut<(usize, usize)> for Array2<T> { fn index_mut(&mut self, index: (usize, usize)) -> &mut T { unsafe { // ndarray does the check for us in debug builds self.0.uget_mut(index) } } } impl<T> Deref for Array2<T> { type Target = ndarray::Array2<T>; fn deref(&self) -> &ndarray::Array2<T> { &self.0 } } impl<T> DerefMut for Array2<T> { fn deref_mut(&mut self) -> &mut ndarray::Array2<T> { &mut self.0 } } /// Three dimensional tensors, based on ndarray /// /// Most of the methods are simply forwarded to ndarray, so also look the doc /// for this crate. This array type mainly supports indexing using tuples as /// indices and is though as storage backend for multi-dimensional data. /// /// ``` /// # use lumol_core::types::Array3; /// let mut a = Array3::zeros((3, 5, 2)); /// /// assert_eq!(a[(0, 4, 1)], 0.0); /// /// a[(0, 4, 1)] = 7.0; /// assert_eq!(a[(0, 4, 1)], 7.0); /// ``` #[derive(Debug, Clone, PartialEq)] pub struct Array3<T>(ndarray::Array3<T>); impl<T> Array3<T> { /// Create a new `Array3` of the specified `size` filled with the /// `Zero::zero` return value. /// /// # Examples /// /// ``` /// # use lumol_core::types::Array3; /// let a: Array3<f64> = Array3::zeros((8, 5, 2)); /// assert_eq!(a[(6, 2, 0)], 0.0); /// ``` pub fn zeros(size: (usize, usize, usize)) -> Array3<T> where T: Zero + Clone, { Array3(ndarray::Array3::zeros(size)) } /// Resize the array if the current size is not `size`, and fill the /// new array with zeros. /// /// # Examples /// /// ``` /// # use lumol_core::types::Array3; /// let mut a = Array3::zeros((8, 5, 7)); /// /// a[(3, 3, 3)] = 42.0; /// /// // This does nothing /// a.resize_if_different((8, 5, 7)); /// assert_eq!(a[(3, 3, 3)], 42.0); /// /// // This allocates a new array /// a.resize_if_different((8, 5, 6)); /// assert_eq!(a[(3, 3, 3)], 0.0); /// ``` pub fn resize_if_different(&mut self, size: (usize, usize, usize)) where T: Zero + Clone, { if self.0.shape() != [size.0, size.1, size.2] { *self = Array3::zeros(size); } } /// Create a new `Array3` of the specified `size` filled with the /// `Default::default` return value. /// `Default::default` return value. /// /// # Examples /// /// ``` /// # use lumol_core::types::Array3; /// let a: Array3<f64> = Array3::zeros((8, 5, 2)); /// let b: Array3<f64> = Array3::default((8, 5, 2)); /// /// assert_eq!(a, b); /// ``` pub fn default(size: (usize, usize, usize)) -> Array3<T> where T: Default, { Array3(ndarray::Array3::default(size)) } } impl<T> Index<(usize, usize, usize)> for Array3<T> { type Output = T; fn index(&self, index: (usize, usize, usize)) -> &T { unsafe { // ndarray does the check for us in debug builds self.0.uget(index) } } } impl<T> IndexMut<(usize, usize, usize)> for Array3<T> { fn index_mut(&mut self, index: (usize, usize, usize)) -> &mut T { unsafe { // ndarray does the check for us in debug builds self.0.uget_mut(index) } } } impl<T> Deref for Array3<T> { type Target = ndarray::Array3<T>; fn deref(&self) -> &ndarray::Array3<T> { &self.0 } } impl<T> DerefMut for Array3<T> { fn deref_mut(&mut self) -> &mut ndarray::Array3<T> { &mut self.0 } } #[cfg(test)] mod tests { mod array2 { use super::super::Array2; #[test] fn zeros() { let a: Array2<f64> = Array2::zeros((3, 4)); for i in 0..3 { for j in 0..4 { assert_eq!(a[(i, j)], 0.0); } } } #[test] fn default() { #[derive(Clone)] struct F64(f64); impl Default for F64 { fn default() -> F64 { F64(42.0) } } let a: Array2<F64> = Array2::default((3, 4)); for i in 0..3 { for j in 0..4 { assert_eq!(a[(i, j)].0, 42.0); } } } #[test] fn resize() { let mut a: Array2<f64> = Array2::zeros((3, 4)); assert_eq!(a.dim(), (3, 4)); a[(1, 1)] = 42.0; a.resize_if_different((7, 90)); assert_eq!(a.dim(), (7, 90)); assert_eq!(a[(1, 1)], 0.0); a[(1, 1)] = 42.0; a.resize_if_different((7, 90)); assert_eq!(a[(1, 1)], 42.0); } #[test] fn index() { let mut a: Array2<f64> = Array2::zeros((3, 4)); assert_eq!(a[(1, 3)], 0.0); a[(1, 3)] = 45.0; assert_eq!(a[(1, 3)], 45.0); } #[test] #[should_panic] #[cfg(debug_assertions)] fn out_of_bound_1() { let a: Array2<f64> = Array2::zeros((3, 4)); let _ = a[(5, 1)]; } #[test] #[should_panic] #[cfg(debug_assertions)] fn out_of_bound_2() { let a: Array2<f64> = Array2::zeros((3, 4)); let _ = a[(2, 7)]; } } mod array3 { use super::super::Array3; #[test] fn zeros() { let a: Array3<f64> = Array3::zeros((3, 4, 8)); for i in 0..3 { for j in 0..4 { for k in 0..8 { assert_eq!(a[(i, j, k)], 0.0); } } } } #[test] fn default() { #[derive(Clone)] struct F64(f64); impl Default for F64 { fn default() -> F64 { F64(42.0) } } let a: Array3<F64> = Array3::default((3, 4, 8)); for i in 0..3 { for j in 0..4 { for k in 0..8 { assert_eq!(a[(i, j, k)].0, 42.0); } } } } #[test] fn resize() { let mut a: Array3<f64> = Array3::zeros((3, 4, 5)); assert_eq!(a.dim(), (3, 4, 5)); a[(1, 1, 1)] = 42.0; <|fim▁hole|> assert_eq!(a.dim(), (7, 90, 8)); assert_eq!(a[(1, 1, 1)], 0.0); a[(1, 1, 1)] = 42.0; a.resize_if_different((7, 90, 8)); assert_eq!(a[(1, 1, 1)], 42.0); } #[test] fn index() { let mut a: Array3<f64> = Array3::zeros((3, 4, 5)); assert_eq!(a[(1, 3, 2)], 0.0); a[(1, 3, 2)] = 45.0; assert_eq!(a[(1, 3, 2)], 45.0); } #[test] #[should_panic] #[cfg(debug_assertions)] fn out_of_bound_1() { let a: Array3<f64> = Array3::zeros((3, 4, 89)); let _ = a[(5, 1, 6)]; } #[test] #[should_panic] #[cfg(debug_assertions)] fn out_of_bound_2() { let a: Array3<f64> = Array3::zeros((3, 4, 89)); let _ = a[(2, 67, 6)]; } #[test] #[should_panic] #[cfg(debug_assertions)] fn out_of_bound_3() { let a: Array3<f64> = Array3::zeros((3, 4, 89)); let _ = a[(2, 1, 600)]; } } }<|fim▁end|>
a.resize_if_different((7, 90, 8));
<|file_name|>setup_helper.py<|end_file_name|><|fim▁begin|># Copyright 2018 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Provides the setup for the experiments.""" from pytorch_pretrained_bert import modeling from pytorch_pretrained_bert import tokenization import torch import embeddings_helper def setup_uncased(model_config): """Setup the uncased bert model. Args: model_config: The model configuration to be loaded. Returns: tokenizer: The tokenizer to be used to convert between tokens and ids.<|fim▁hole|> model: The model that has been initialized. device: The device to be used in this run. embedding_map: Holding all token embeddings. """ # Load pre-trained model tokenizer (vocabulary) tokenizer = tokenization.BertTokenizer.from_pretrained(model_config) # Load pre-trained model (weights) model = modeling.BertModel.from_pretrained(model_config) _ = model.eval() # Set up the device in use device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') print('device : ', device) model = model.to(device) # Initialize the embedding map embedding_map = embeddings_helper.EmbeddingMap(device, model) return tokenizer, model, device, embedding_map def setup_bert_vanilla(model_config): """Setup the uncased bert model without embedding maps. Args: model_config: The model configuration to be loaded. Returns: tokenizer: The tokenizer to be used to convert between tokens and ids. model: The model that has been initialized. device: The device to be used in this run. """ # Load pre-trained model tokenizer (vocabulary) tokenizer = tokenization.BertTokenizer.from_pretrained(model_config) # Load pre-trained model (weights) model = modeling.BertModel.from_pretrained(model_config) _ = model.eval() # Set up the device in use device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') print('device : ', device) model = model.to(device) return tokenizer, model, device def setup_bert_mlm(model_config): """Setup the uncased bert model with classification head. Args: model_config: The model configuration to be loaded. Returns: tokenizer: The tokenizer to be used to convert between tokens and ids. model: The model that has been initialized. device: The device to be used in this run. """ # Load pre-trained model tokenizer (vocabulary) tokenizer = tokenization.BertTokenizer.from_pretrained(model_config) # Load pre-trained model (weights) model = modeling.BertForMaskedLM.from_pretrained('bert-base-uncased') _ = model.eval() # Set up the device in use device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') print('device : ', device) model = model.to(device) # Initialize the embedding map embedding_map = embeddings_helper.EmbeddingMap(device, model.bert) return tokenizer, model, device, embedding_map<|fim▁end|>
<|file_name|>memory_storage.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package storage import ( "context" "fmt" "io" "os" "path/filepath" "regexp" "time" "google.golang.org/grpc/codes" /* copybara-comment */ "google.golang.org/grpc/status" /* copybara-comment */ "github.com/golang/protobuf/jsonpb" /* copybara-comment */ "github.com/golang/protobuf/proto" /* copybara-comment */ ) const ( memStorageType = "memory" memStorageVersion = "v0" ) // MemoryStorage is designed as a single threading storage. Will throw exception if multiple TX request. type MemoryStorage struct { service string path string pathParts []string cache *StorageCache fs *FileStorage deleted map[string]bool wipedRealms map[string]bool lock chan bool lastLock time.Time } func NewMemoryStorage(service, path string) *MemoryStorage { return &MemoryStorage{ service: service, path: path, cache: NewStorageCache(), fs: NewFileStorage(service, path), deleted: make(map[string]bool), wipedRealms: make(map[string]bool), lock: make(chan bool, 1), lastLock: time.Unix(0, 0), } } func (m *MemoryStorage) Info() map[string]string { return map[string]string{ "type": memStorageType, "version": memStorageVersion, "service": m.service, "path": m.path, } } func (m *MemoryStorage) Exists(datatype, realm, user, id string, rev int64) (bool, error) { fname := m.fname(datatype, realm, user, id, rev) if _, ok := m.cache.GetEntity(fname); ok { return true, nil } if m.deleted[fname] || m.wipedRealms[realm] { return false, nil } return m.fs.Exists(datatype, realm, user, id, rev) } func (m *MemoryStorage) Read(datatype, realm, user, id string, rev int64, content proto.Message) error { return m.ReadTx(datatype, realm, user, id, rev, content, nil) } // ReadTx reads inside a transaction. func (m *MemoryStorage) ReadTx(datatype, realm, user, id string, rev int64, content proto.Message, tx Tx) (ferr error) { if tx == nil { var err error tx, err = m.Tx(false) if err != nil { return err } defer func() { err := tx.Finish() if ferr == nil { ferr = err } }() } fname := m.fname(datatype, realm, user, id, rev) if data, ok := m.cache.GetEntity(fname); ok { content.Reset() proto.Merge(content, data) return nil } if m.deleted[fname] || m.wipedRealms[realm] { return fmt.Errorf("not found: %q", fname) } if err := m.fs.ReadTx(datatype, realm, user, id, rev, content, tx); err != nil { return err } m.cache.PutEntity(fname, content) return nil } // MultiReadTx reads a set of objects matching the input parameters and filters func (m *MemoryStorage) MultiReadTx(datatype, realm, user, id string, filters [][]Filter, offset, pageSize int, typ proto.Message, tx Tx) (_ *Results, ferr error) { if tx == nil { var err error tx, err = m.fs.Tx(false) if err != nil { return nil, fmt.Errorf("file read lock error: %v", err) } defer func() { err := tx.Finish() if ferr == nil { ferr = err } }() } if pageSize > MaxPageSize { pageSize = MaxPageSize } results := NewResults() err := m.findPath(datatype, realm, user, id, typ, func(path, userMatch, idMatch string, p proto.Message) error { if m.deleted[m.fname(datatype, realm, userMatch, idMatch, LatestRev)] || m.wipedRealms[realm] { return nil } if id != MatchAllIDs && idMatch != id { return nil } if !MatchProtoFilters(filters, p) { return nil } if offset > 0 { offset-- return nil } if pageSize > results.MatchCount { results.Entries = append(results.Entries, &Entry{ Realm: realm, GroupID: userMatch, ItemID: idMatch, Item: p, }) } results.MatchCount++ return nil }) return results, err } func (m *MemoryStorage) findPath(datatype, realm, user, id string, typ proto.Message, fn func(string, string, string, proto.Message) error) error { searchUser := user if user == MatchAllUsers { searchUser = "(.*)" } else { searchUser = "(" + user + ")" } searchRealm := realm if realm == AllRealms { searchRealm = "(.*)" } searchID := id if id == MatchAllIDs { searchID = "(.*)" } else { searchID = "(" + id + ")" } extractID := m.fs.fname(datatype, searchRealm, searchUser, searchID, LatestRev) re, err := regexp.Compile(extractID) if err != nil { return fmt.Errorf("file extract ID %q regexp error: %v", extractID, err) } defaultUserID := m.fs.fname(datatype, realm, DefaultUser, searchID, LatestRev) dure, err := regexp.Compile(defaultUserID) if err != nil { return fmt.Errorf("file extract ID %q regexp error: %v", defaultUserID, err) } cached := m.cache.Entities() fileMatcher := func(path string, info os.FileInfo, err error) error { return extractFromPath(re, dure, user, path, info, err, typ, cached, fn) } if err = filepath.Walk(m.fs.path, fileMatcher); err != nil { return err } return extractFromCache(re, dure, user, cached, fn) } func extractFromPath(re, dure *regexp.Regexp, user, path string, info os.FileInfo, err error, typ proto.Message, cached map[string]proto.Message, fn func(string, string, string, proto.Message) error) error { if err != nil { return err } if info.IsDir() { return nil } if _, ok := cached[path]; ok { return nil } userMatch, idMatch := extractUserAndID(re, dure, user, path) if userMatch == "" && idMatch == "" { return nil } var p proto.Message if typ != nil { file, err := os.Open(path) if err != nil { return fmt.Errorf("file %q I/O error: %v", path, err) } defer file.Close() p = proto.Clone(typ) if err := jsonpb.Unmarshal(file, p); err != nil && err != io.EOF { return fmt.Errorf("file %q invalid JSON: %v", path, err) } } return fn(path, userMatch, idMatch, p) } func extractUserAndID(re, dure *regexp.Regexp, user, path string) (string, string) { matches := re.FindStringSubmatch(path) if len(matches) == 3 { return matches[1], matches[2] } if user == DefaultUser { matches = dure.FindStringSubmatch(path) if len(matches) == 2 { return DefaultUser, matches[1] } } return "", "" } func extractFromCache(re, dure *regexp.Regexp, user string, cached map[string]proto.Message, fn func(string, string, string, proto.Message) error) error { for path, content := range cached { if !re.MatchString(path) && !dure.MatchString(path) { continue } userMatch, idMatch := extractUserAndID(re, dure, user, path) if userMatch == "" && idMatch == "" { continue } if err := fn(path, userMatch, idMatch, content); err != nil { return err } } return nil } func (m *MemoryStorage) ReadHistory(datatype, realm, user, id string, content *[]proto.Message) error { return m.ReadHistoryTx(datatype, realm, user, id, content, nil) } // ReadHistoryTx reads history inside a transaction. func (m *MemoryStorage) ReadHistoryTx(datatype, realm, user, id string, content *[]proto.Message, tx Tx) (ferr error) { if tx == nil { var err error tx, err = m.Tx(false) if err != nil { return err } defer func() { err := tx.Finish() if ferr == nil { ferr = err } }() } hfname := m.historyName(datatype, realm, user, id) if data, ok := m.cache.GetHistory(hfname); ok { for _, he := range data { *content = append(*content, he) } return nil } if err := m.fs.ReadHistoryTx(datatype, realm, user, id, content, tx); err != nil { return err } m.cache.PutHistory(hfname, *content) return nil } func (m *MemoryStorage) Write(datatype, realm, user, id string, rev int64, content proto.Message, history proto.Message) error { return m.WriteTx(datatype, realm, user, id, rev, content, history, nil) } // WriteTx writes inside a transaction. func (m *MemoryStorage) WriteTx(datatype, realm, user, id string, rev int64, content proto.Message, history proto.Message, tx Tx) (ferr error) { if tx == nil { var err error tx, err = m.Tx(true) if err != nil { return err } defer func() { err := tx.Finish() if ferr == nil { ferr = err } }() } hlist := make([]proto.Message, 0) if err := m.ReadHistoryTx(datatype, realm, user, id, &hlist, tx); err != nil && !ErrNotFound(err) { return err } hlist = append(hlist, history) hfname := m.historyName(datatype, realm, user, id) m.cache.PutHistory(hfname, hlist) vname := m.fname(datatype, realm, user, id, rev) m.cache.PutEntity(vname, content) lname := m.fname(datatype, realm, user, id, LatestRev) m.cache.PutEntity(lname, content) if _, ok := m.deleted[vname]; ok { delete(m.deleted, vname) } if _, ok := m.deleted[lname]; ok { delete(m.deleted, lname) } return nil } // Delete a record. func (m *MemoryStorage) Delete(datatype, realm, user, id string, rev int64) error { return m.DeleteTx(datatype, realm, user, id, rev, nil) } // DeleteTx delete a record with transaction. func (m *MemoryStorage) DeleteTx(datatype, realm, user, id string, rev int64, tx Tx) (ferr error) { if tx == nil { var err error tx, err = m.Tx(true) if err != nil { return err } defer func() { err := tx.Finish() if ferr == nil { ferr = err } }() } exists, err := m.Exists(datatype, realm, user, id, rev) if err != nil { return err } lname := m.fname(datatype, realm, user, id, LatestRev) if !exists { return status.Errorf(codes.NotFound, "not found: %q", lname) } vname := m.fname(datatype, realm, user, id, rev) m.cache.DeleteEntity(vname) m.cache.DeleteEntity(lname) m.deleted[vname] = true m.deleted[lname] = true return nil } // MultiDeleteTx deletes all records of a certain data type within a realm. func (m *MemoryStorage) MultiDeleteTx(datatype, realm, user string, tx Tx) (ferr error) { if tx == nil { var err error tx, err = m.fs.Tx(false) if err != nil { return fmt.Errorf("file read lock error: %v", err) } defer func() { err := tx.Finish() if ferr == nil { ferr = err } }() } return m.findPath(datatype, realm, user, MatchAllIDs, nil, func(path, userMatch, idMatch string, p proto.Message) error { return m.DeleteTx(datatype, realm, userMatch, idMatch, LatestRev, tx) }) } func (m *MemoryStorage) Wipe(ctx context.Context, realm string, batchNum, maxEntries int) (int, error) { // Wipe everything, not just for the realm provided or the maxEntries. count := len(m.cache.entityCache) + len(m.cache.historyCache) m.cache = NewStorageCache() m.deleted = make(map[string]bool) m.wipedRealms[realm] = true return count, nil } func (m *MemoryStorage) Tx(update bool) (Tx, error) { select { case m.lock <- true: default: panic("MAYBE BUG: Requested a new TX without the existing TX release.") } m.cache.Backup() return &MemTx{ update: update, ms: m, }, nil } // LockTx returns a storage-wide lock by the given name. Only one such lock should // be requested at a time. If Tx is provided, it must be an update Tx. func (m *MemoryStorage) LockTx(lockName string, minFrequency time.Duration, tx Tx) Tx { now := time.Now() if now.Sub(m.lastLock) < minFrequency { return nil } if tx == nil { var err error tx, err = m.Tx(true) if err != nil { return nil } } m.lastLock = now return tx }<|fim▁hole|> update bool ms *MemoryStorage } // Finish attempts to commit a transaction. func (tx *MemTx) Finish() error { select { case <-tx.ms.lock: default: panic("MAYBE BUG: Releasing a released TX.") } return nil } // Rollback attempts to rollback a transaction. func (tx *MemTx) Rollback() error { tx.ms.cache.Restore() tx.ms.fs = NewFileStorage(tx.ms.service, tx.ms.path) return nil } // MakeUpdate will upgrade a read-only transaction to an update transaction. func (tx *MemTx) MakeUpdate() error { tx.update = true return nil } func (tx *MemTx) IsUpdate() bool { return tx.update } func (m *MemoryStorage) fname(datatype, realm, user, id string, rev int64) string { return m.fs.fname(datatype, realm, user, id, rev) } func (m *MemoryStorage) historyName(datatype, realm, user, id string) string { return m.fs.historyName(datatype, realm, user, id) }<|fim▁end|>
type MemTx struct {
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ flaskext.sqlalchemy ~~~~~~~~~~~~~~~~~~~ Adds basic SQLAlchemy support to your application. :copyright: (c) 2014 by Armin Ronacher, Daniel Neuhäuser. :license: BSD, see LICENSE for more details. """ from __future__ import with_statement, absolute_import import os import re import sys import time import functools import warnings import sqlalchemy from math import ceil from functools import partial from flask import _request_ctx_stack, abort, has_request_context, request from flask.signals import Namespace from operator import itemgetter from threading import Lock from sqlalchemy import orm, event, inspect from sqlalchemy.orm.exc import UnmappedClassError from sqlalchemy.orm.session import Session as SessionBase from sqlalchemy.engine.url import make_url from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta from flask_sqlalchemy._compat import iteritems, itervalues, xrange, \ string_types # the best timer function for the platform if sys.platform == 'win32': _timer = time.clock else: _timer = time.time try: from flask import _app_ctx_stack except ImportError: _app_ctx_stack = None __version__ = '2.0' # Which stack should we use? _app_ctx_stack is new in 0.9 connection_stack = _app_ctx_stack or _request_ctx_stack _camelcase_re = re.compile(r'([A-Z]+)(?=[a-z0-9])') _signals = Namespace() models_committed = _signals.signal('models-committed') before_models_committed = _signals.signal('before-models-committed') def _make_table(db): def _make_table(*args, **kwargs): if len(args) > 1 and isinstance(args[1], db.Column): args = (args[0], db.metadata) + args[1:] info = kwargs.pop('info', None) or {} info.setdefault('bind_key', None) kwargs['info'] = info return sqlalchemy.Table(*args, **kwargs) return _make_table def _set_default_query_class(d): if 'query_class' not in d: d['query_class'] = BaseQuery def _wrap_with_default_query_class(fn): @functools.wraps(fn) def newfn(*args, **kwargs): _set_default_query_class(kwargs) if "backref" in kwargs: backref = kwargs['backref'] if isinstance(backref, string_types): backref = (backref, {}) _set_default_query_class(backref[1]) return fn(*args, **kwargs) return newfn def _include_sqlalchemy(obj): for module in sqlalchemy, sqlalchemy.orm: for key in module.__all__: if not hasattr(obj, key): setattr(obj, key, getattr(module, key)) # Note: obj.Table does not attempt to be a SQLAlchemy Table class. obj.Table = _make_table(obj) obj.relationship = _wrap_with_default_query_class(obj.relationship) obj.relation = _wrap_with_default_query_class(obj.relation) obj.dynamic_loader = _wrap_with_default_query_class(obj.dynamic_loader) obj.event = event class _DebugQueryTuple(tuple): statement = property(itemgetter(0)) parameters = property(itemgetter(1)) start_time = property(itemgetter(2)) end_time = property(itemgetter(3)) context = property(itemgetter(4)) @property def duration(self): return self.end_time - self.start_time def __repr__(self): return '<query statement="%s" parameters=%r duration=%.03f>' % ( self.statement, self.parameters, self.duration ) def _calling_context(app_path): frm = sys._getframe(1) while frm.f_back is not None: name = frm.f_globals.get('__name__') if name and (name == app_path or name.startswith(app_path + '.')): funcname = frm.f_code.co_name return '%s:%s (%s)' % ( frm.f_code.co_filename, frm.f_lineno, funcname ) frm = frm.f_back return '<unknown>' class SignallingSession(SessionBase): """The signalling session is the default session that Flask-SQLAlchemy uses. It extends the default session system with bind selection and modification tracking. If you want to use a different session you can override the :meth:`SQLAlchemy.create_session` function. .. versionadded:: 2.0 """ def __init__(self, db, autocommit=False, autoflush=True, **options): #: The application that this session belongs to. self.app = app = db.get_app() track_modifications = app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] bind = options.pop('bind', None) or db.engine if track_modifications is None or track_modifications: _SessionSignalEvents.register(self) SessionBase.__init__( self, autocommit=autocommit, autoflush=autoflush, bind=bind, binds=db.get_binds(self.app), **options ) def get_bind(self, mapper=None, clause=None): # mapper is None if someone tries to just get a connection if mapper is not None: info = getattr(mapper.mapped_table, 'info', {}) bind_key = info.get('bind_key') if bind_key is not None: state = get_state(self.app) return state.db.get_engine(self.app, bind=bind_key) return SessionBase.get_bind(self, mapper, clause) class _SessionSignalEvents(object): @classmethod def register(cls, session): if not hasattr(session, '_model_changes'): session._model_changes = {} event.listen(session, 'before_flush', cls.record_ops) event.listen(session, 'before_commit', cls.record_ops) event.listen(session, 'before_commit', cls.before_commit) event.listen(session, 'after_commit', cls.after_commit) event.listen(session, 'after_rollback', cls.after_rollback) @classmethod def unregister(cls, session): if hasattr(session, '_model_changes'): del session._model_changes event.remove(session, 'before_flush', cls.record_ops) event.remove(session, 'before_commit', cls.record_ops) event.remove(session, 'before_commit', cls.before_commit) event.remove(session, 'after_commit', cls.after_commit) event.remove(session, 'after_rollback', cls.after_rollback) @staticmethod def record_ops(session, flush_context=None, instances=None): try: d = session._model_changes except AttributeError: return for targets, operation in ((session.new, 'insert'), (session.dirty, 'update'), (session.deleted, 'delete')): for target in targets: state = inspect(target) key = state.identity_key if state.has_identity else id(target) d[key] = (target, operation) @staticmethod def before_commit(session): try: d = session._model_changes except AttributeError: return if d: before_models_committed.send(session.app, changes=list(d.values())) @staticmethod def after_commit(session): try: d = session._model_changes except AttributeError: return if d: models_committed.send(session.app, changes=list(d.values())) d.clear() @staticmethod def after_rollback(session): try: d = session._model_changes except AttributeError: return d.clear() class _EngineDebuggingSignalEvents(object): """Sets up handlers for two events that let us track the execution time of queries.""" def __init__(self, engine, import_name): self.engine = engine self.app_package = import_name def register(self): event.listen(self.engine, 'before_cursor_execute', self.before_cursor_execute) event.listen(self.engine, 'after_cursor_execute', self.after_cursor_execute) def before_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): if connection_stack.top is not None: context._query_start_time = _timer() def after_cursor_execute(self, conn, cursor, statement, parameters, context, executemany): ctx = connection_stack.top if ctx is not None: queries = getattr(ctx, 'sqlalchemy_queries', None) if queries is None: queries = [] setattr(ctx, 'sqlalchemy_queries', queries) queries.append(_DebugQueryTuple(( statement, parameters, context._query_start_time, _timer(), _calling_context(self.app_package)))) def get_debug_queries(): """In debug mode Flask-SQLAlchemy will log all the SQL queries sent to the database. This information is available until the end of request which makes it possible to easily ensure that the SQL generated is the one expected on errors or in unittesting. If you don't want to enable the DEBUG mode for your unittests you can also enable the query recording by setting the ``'SQLALCHEMY_RECORD_QUERIES'`` config variable to `True`. This is automatically enabled if Flask is in testing mode. The value returned will be a list of named tuples with the following attributes: `statement` The SQL statement issued `parameters` The parameters for the SQL statement `start_time` / `end_time` Time the query started / the results arrived. Please keep in mind that the timer function used depends on your platform. These values are only useful for sorting or comparing. They do not necessarily represent an absolute timestamp. `duration` Time the query took in seconds `context` A string giving a rough estimation of where in your application query was issued. The exact format is undefined so don't try to reconstruct filename or function name. """ return getattr(connection_stack.top, 'sqlalchemy_queries', []) class Pagination(object): """Internal helper class returned by :meth:`BaseQuery.paginate`. You can also construct it from any other SQLAlchemy query object if you are working with other libraries. Additionally it is possible to pass `None` as query object in which case the :meth:`prev` and :meth:`next` will no longer work. """ def __init__(self, query, page, per_page, total, items): #: the unlimited query object that was used to create this #: pagination object. self.query = query #: the current page number (1 indexed) self.page = page #: the number of items to be displayed on a page. self.per_page = per_page #: the total number of items matching the query self.total = total #: the items for the current page self.items = items @property def pages(self): """The total number of pages""" if self.per_page == 0: pages = 0 else: pages = int(ceil(self.total / float(self.per_page))) return pages def prev(self, error_out=False): """Returns a :class:`Pagination` object for the previous page.""" assert self.query is not None, 'a query object is required ' \ 'for this method to work' return self.query.paginate(self.page - 1, self.per_page, error_out) @property def prev_num(self): """Number of the previous page.""" return self.page - 1 @property def has_prev(self): """True if a previous page exists""" return self.page > 1 def next(self, error_out=False): """Returns a :class:`Pagination` object for the next page.""" assert self.query is not None, 'a query object is required ' \ 'for this method to work' return self.query.paginate(self.page + 1, self.per_page, error_out) @property def has_next(self): """True if a next page exists.""" return self.page < self.pages @property def next_num(self): """Number of the next page""" return self.page + 1 def iter_pages(self, left_edge=2, left_current=2, right_current=5, right_edge=2): """Iterates over the page numbers in the pagination. The four parameters control the thresholds how many numbers should be produced from the sides. Skipped page numbers are represented as `None`. This is how you could render such a pagination in the templates: .. sourcecode:: html+jinja {% macro render_pagination(pagination, endpoint) %} <div class=pagination> {%- for page in pagination.iter_pages() %} {% if page %} {% if page != pagination.page %} <a href="{{ url_for(endpoint, page=page) }}">{{ page }}</a> {% else %} <strong>{{ page }}</strong> {% endif %} {% else %} <span class=ellipsis>…</span> {% endif %} {%- endfor %} </div> {% endmacro %} """ last = 0 for num in xrange(1, self.pages + 1): if num <= left_edge or \ (num > self.page - left_current - 1 and \ num < self.page + right_current) or \ num > self.pages - right_edge: if last + 1 != num: yield None yield num last = num class BaseQuery(orm.Query): """The default query object used for models, and exposed as :attr:`~SQLAlchemy.Query`. This can be subclassed and replaced for individual models by setting the :attr:`~Model.query_class` attribute. This is a subclass of a standard SQLAlchemy :class:`~sqlalchemy.orm.query.Query` class and has all the methods of a standard query as well. """ def get_or_404(self, ident): """Like :meth:`get` but aborts with 404 if not found instead of returning `None`. """ rv = self.get(ident) if rv is None: abort(404) return rv def first_or_404(self): """Like :meth:`first` but aborts with 404 if not found instead of returning `None`. """ rv = self.first() if rv is None: abort(404) return rv def paginate(self, page=None, per_page=None, error_out=True): """Returns `per_page` items from page `page`. By default it will abort with 404 if no items were found and the page was larger than 1. This behavor can be disabled by setting `error_out` to `False`. If page or per_page are None, they will be retrieved from the request query. If the values are not ints and ``error_out`` is true, it will abort with 404. If there is no request or they aren't in the query, they default to page 1 and 20 respectively. Returns an :class:`Pagination` object. """ if has_request_context(): if page is None: try: page = int(request.args.get('page', 1)) except (TypeError, ValueError): if error_out: abort(404) page = 1 if per_page is None: try: per_page = int(request.args.get('per_page', 20)) except (TypeError, ValueError): if error_out: abort(404) per_page = 20 else: if page is None: page = 1 if per_page is None: per_page = 20 if error_out and page < 1: abort(404) items = self.limit(per_page).offset((page - 1) * per_page).all() if not items and page != 1 and error_out: abort(404) # No need to count if we're on the first page and there are fewer # items than we expected. if page == 1 and len(items) < per_page: total = len(items) else: total = self.order_by(None).count() return Pagination(self, page, per_page, total, items) class _QueryProperty(object): def __init__(self, sa): self.sa = sa def __get__(self, obj, type): try: mapper = orm.class_mapper(type) if mapper: return type.query_class(mapper, session=self.sa.session()) except UnmappedClassError: return None def _record_queries(app): if app.debug: return True rq = app.config['SQLALCHEMY_RECORD_QUERIES'] if rq is not None: return rq return bool(app.config.get('TESTING')) class _EngineConnector(object): def __init__(self, sa, app, bind=None): self._sa = sa self._app = app self._engine = None self._connected_for = None self._bind = bind self._lock = Lock() def get_uri(self): if self._bind is None: return self._app.config['SQLALCHEMY_DATABASE_URI'] binds = self._app.config.get('SQLALCHEMY_BINDS') or () assert self._bind in binds, \ 'Bind %r is not specified. Set it in the SQLALCHEMY_BINDS ' \ 'configuration variable' % self._bind return binds[self._bind] def get_engine(self): with self._lock: uri = self.get_uri() echo = self._app.config['SQLALCHEMY_ECHO'] if (uri, echo) == self._connected_for: return self._engine info = make_url(uri) options = {'convert_unicode': True} self._sa.apply_pool_defaults(self._app, options) self._sa.apply_driver_hacks(self._app, info, options) if echo: options['echo'] = True self._engine = rv = sqlalchemy.create_engine(info, **options) if _record_queries(self._app): _EngineDebuggingSignalEvents(self._engine, self._app.import_name).register() self._connected_for = (uri, echo) return rv def _should_set_tablename(bases, d): """Check what values are set by a class and its bases to determine if a tablename should be automatically generated. The class and its bases are checked in order of precedence: the class itself then each base in the order they were given at class definition. Abstract classes do not generate a tablename, although they may have set or inherited a tablename elsewhere. If a class defines a tablename or table, a new one will not be generated. Otherwise, if the class defines a primary key, a new name will be generated. This supports: * Joined table inheritance without explicitly naming sub-models. * Single table inheritance. * Inheriting from mixins or abstract models. :param bases: base classes of new class :param d: new class dict :return: True if tablename should be set """ if '__tablename__' in d or '__table__' in d or '__abstract__' in d: return False if any(v.primary_key for v in itervalues(d) if isinstance(v, sqlalchemy.Column)): return True for base in bases: if hasattr(base, '__tablename__') or hasattr(base, '__table__'): return False for name in dir(base): attr = getattr(base, name) if isinstance(attr, sqlalchemy.Column) and attr.primary_key: return True class _BoundDeclarativeMeta(DeclarativeMeta): def __new__(cls, name, bases, d): if _should_set_tablename(bases, d): def _join(match): word = match.group() if len(word) > 1: return ('_%s_%s' % (word[:-1], word[-1])).lower() return '_' + word.lower() d['__tablename__'] = _camelcase_re.sub(_join, name).lstrip('_') return DeclarativeMeta.__new__(cls, name, bases, d) def __init__(self, name, bases, d): bind_key = d.pop('__bind_key__', None) DeclarativeMeta.__init__(self, name, bases, d) if bind_key is not None: self.__table__.info['bind_key'] = bind_key def get_state(app): """Gets the state for the application""" assert 'sqlalchemy' in app.extensions, \ 'The sqlalchemy extension was not registered to the current ' \ 'application. Please make sure to call init_app() first.' return app.extensions['sqlalchemy'] class _SQLAlchemyState(object): """Remembers configuration for the (db, app) tuple.""" def __init__(self, db, app): self.db = db self.app = app self.connectors = {} class Model(object): """Baseclass for custom user models.""" #: the query class used. The :attr:`query` attribute is an instance #: of this class. By default a :class:`BaseQuery` is used. query_class = BaseQuery #: an instance of :attr:`query_class`. Can be used to query the #: database for instances of this model. query = None class SQLAlchemy(object): """This class is used to control the SQLAlchemy integration to one or more Flask applications. Depending on how you initialize the object it is usable right away or will attach as needed to a Flask application. There are two usage modes which work very similarly. One is binding the instance to a very specific Flask application:: app = Flask(__name__) db = SQLAlchemy(app) The second possibility is to create the object once and configure the application later to support it:: db = SQLAlchemy() def create_app(): app = Flask(__name__) db.init_app(app) return app The difference between the two is that in the first case methods like :meth:`create_all` and :meth:`drop_all` will work all the time but in the second case a :meth:`flask.Flask.app_context` has to exist. By default Flask-SQLAlchemy will apply some backend-specific settings to improve your experience with them. As of SQLAlchemy 0.6 SQLAlchemy will probe the library for native unicode support. If it detects unicode it will let the library handle that, otherwise do that itself. Sometimes this detection can fail in which case you might want to set `use_native_unicode` (or the ``SQLALCHEMY_NATIVE_UNICODE`` configuration key) to `False`. Note that the configuration key overrides the value you pass to the constructor. This class also provides access to all the SQLAlchemy functions and classes from the :mod:`sqlalchemy` and :mod:`sqlalchemy.orm` modules. So you can declare models like this:: class User(db.Model): username = db.Column(db.String(80), unique=True) pw_hash = db.Column(db.String(80)) You can still use :mod:`sqlalchemy` and :mod:`sqlalchemy.orm` directly, but note that Flask-SQLAlchemy customizations are available only through an instance of this :class:`SQLAlchemy` class. Query classes default to :class:`BaseQuery` for `db.Query`, `db.Model.query_class`, and the default query_class for `db.relationship` and `db.backref`. If you use these interfaces through :mod:`sqlalchemy` and :mod:`sqlalchemy.orm` directly, the default query class will be that of :mod:`sqlalchemy`. .. admonition:: Check types carefully Don't perform type or `isinstance` checks against `db.Table`, which emulates `Table` behavior but is not a class. `db.Table` exposes the `Table` interface, but is a function which allows omission of metadata. You may also define your own SessionExtension instances as well when defining your SQLAlchemy class instance. You may pass your custom instances to the `session_extensions` keyword. This can be either a single SessionExtension instance, or a list of SessionExtension instances. In the following use case we use the VersionedListener from the SQLAlchemy versioning examples.:: from history_meta import VersionedMeta, VersionedListener app = Flask(__name__) db = SQLAlchemy(app, session_extensions=[VersionedListener()]) class User(db.Model): __metaclass__ = VersionedMeta username = db.Column(db.String(80), unique=True) pw_hash = db.Column(db.String(80)) The `session_options` parameter can be used to override session options. If provided it's a dict of parameters passed to the session's constructor. .. versionadded:: 0.10 The `session_options` parameter was added. .. versionadded:: 0.16 `scopefunc` is now accepted on `session_options`. It allows specifying a custom function which will define the SQLAlchemy session's scoping. .. versionadded:: 2.1 The `metadata` parameter was added. This allows for setting custom naming conventions among other, non-trivial things. """ def __init__(self, app=None, use_native_unicode=True, session_options=None, metadata=None): if session_options is None: session_options = {} session_options.setdefault('scopefunc', connection_stack.__ident_func__) self.use_native_unicode = use_native_unicode self.session = self.create_scoped_session(session_options) self.Model = self.make_declarative_base(metadata) self.Query = BaseQuery self._engine_lock = Lock() self.app = app _include_sqlalchemy(self) if app is not None: self.init_app(app) @property def metadata(self): """Returns the metadata""" return self.Model.metadata def create_scoped_session(self, options=None): """Helper factory method that creates a scoped session. It internally calls :meth:`create_session`. """ if options is None: options = {} scopefunc = options.pop('scopefunc', None) return orm.scoped_session(partial(self.create_session, options), scopefunc=scopefunc) def create_session(self, options): """Creates the session. The default implementation returns a :class:`SignallingSession`. .. versionadded:: 2.0 """ return SignallingSession(self, **options) def make_declarative_base(self, metadata=None): """Creates the declarative base.""" base = declarative_base(cls=Model, name='Model', metadata=metadata, metaclass=_BoundDeclarativeMeta) base.query = _QueryProperty(self) return base def init_app(self, app): """This callback can be used to initialize an application for the use with this database setup. Never use a database in the context of an application not initialized that way or connections will leak. """ app.config.setdefault('SQLALCHEMY_DATABASE_URI', 'sqlite://') app.config.setdefault('SQLALCHEMY_BINDS', None) app.config.setdefault('SQLALCHEMY_NATIVE_UNICODE', None) app.config.setdefault('SQLALCHEMY_ECHO', False) app.config.setdefault('SQLALCHEMY_RECORD_QUERIES', None) app.config.setdefault('SQLALCHEMY_POOL_SIZE', None) app.config.setdefault('SQLALCHEMY_POOL_TIMEOUT', None) app.config.setdefault('SQLALCHEMY_POOL_RECYCLE', None) app.config.setdefault('SQLALCHEMY_MAX_OVERFLOW', None) app.config.setdefault('SQLALCHEMY_COMMIT_ON_TEARDOWN', False) track_modifications = app.config.setdefault('SQLALCHEMY_TRACK_MODIFICATIONS', None) if track_modifications is None: warnings.warn('SQLALCHEMY_TRACK_MODIFICATIONS adds significant overhead and will be disabled by default in the future. Set it to True to suppress this warning.') if not hasattr(app, 'extensions'): app.extensions = {} app.extensions['sqlalchemy'] = _SQLAlchemyState(self, app) # 0.9 and later if hasattr(app, 'teardown_appcontext'): teardown = app.teardown_appcontext # 0.7 to 0.8<|fim▁hole|> # Older Flask versions else: if app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN']: raise RuntimeError("Commit on teardown requires Flask >= 0.7") teardown = app.after_request @teardown def shutdown_session(response_or_exc): if app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN']: if response_or_exc is None: self.session.commit() self.session.remove() return response_or_exc def apply_pool_defaults(self, app, options): def _setdefault(optionkey, configkey): value = app.config[configkey] if value is not None: options[optionkey] = value _setdefault('pool_size', 'SQLALCHEMY_POOL_SIZE') _setdefault('pool_timeout', 'SQLALCHEMY_POOL_TIMEOUT') _setdefault('pool_recycle', 'SQLALCHEMY_POOL_RECYCLE') _setdefault('max_overflow', 'SQLALCHEMY_MAX_OVERFLOW') def apply_driver_hacks(self, app, info, options): """This method is called before engine creation and used to inject driver specific hacks into the options. The `options` parameter is a dictionary of keyword arguments that will then be used to call the :func:`sqlalchemy.create_engine` function. The default implementation provides some saner defaults for things like pool sizes for MySQL and sqlite. Also it injects the setting of `SQLALCHEMY_NATIVE_UNICODE`. """ if info.drivername.startswith('mysql'): info.query.setdefault('charset', 'utf8') if info.drivername != 'mysql+gaerdbms': options.setdefault('pool_size', 10) options.setdefault('pool_recycle', 7200) elif info.drivername == 'sqlite': pool_size = options.get('pool_size') detected_in_memory = False # we go to memory and the pool size was explicitly set to 0 # which is fail. Let the user know that if info.database in (None, '', ':memory:'): detected_in_memory = True from sqlalchemy.pool import StaticPool options['poolclass'] = StaticPool if 'connect_args' not in options: options['connect_args'] = {} options['connect_args']['check_same_thread'] = False if pool_size == 0: raise RuntimeError('SQLite in memory database with an ' 'empty queue not possible due to data ' 'loss.') # if pool size is None or explicitly set to 0 we assume the # user did not want a queue for this sqlite connection and # hook in the null pool. elif not pool_size: from sqlalchemy.pool import NullPool options['poolclass'] = NullPool # if it's not an in memory database we make the path absolute. if not detected_in_memory: info.database = os.path.join(app.root_path, info.database) unu = app.config['SQLALCHEMY_NATIVE_UNICODE'] if unu is None: unu = self.use_native_unicode if not unu: options['use_native_unicode'] = False @property def engine(self): """Gives access to the engine. If the database configuration is bound to a specific application (initialized with an application) this will always return a database connection. If however the current application is used this might raise a :exc:`RuntimeError` if no application is active at the moment. """ return self.get_engine(self.get_app()) def make_connector(self, app, bind=None): """Creates the connector for a given state and bind.""" return _EngineConnector(self, app, bind) def get_engine(self, app, bind=None): """Returns a specific engine. .. versionadded:: 0.12 """ with self._engine_lock: state = get_state(app) connector = state.connectors.get(bind) if connector is None: connector = self.make_connector(app, bind) state.connectors[bind] = connector return connector.get_engine() def get_app(self, reference_app=None): """Helper method that implements the logic to look up an application. """ if reference_app is not None: return reference_app if self.app is not None: return self.app ctx = connection_stack.top if ctx is not None: return ctx.app raise RuntimeError('application not registered on db ' 'instance and no application bound ' 'to current context') def get_tables_for_bind(self, bind=None): """Returns a list of all tables relevant for a bind.""" result = [] for table in itervalues(self.Model.metadata.tables): if table.info.get('bind_key') == bind: result.append(table) return result def get_binds(self, app=None): """Returns a dictionary with a table->engine mapping. This is suitable for use of sessionmaker(binds=db.get_binds(app)). """ app = self.get_app(app) binds = [None] + list(app.config.get('SQLALCHEMY_BINDS') or ()) retval = {} for bind in binds: engine = self.get_engine(app, bind) tables = self.get_tables_for_bind(bind) retval.update(dict((table, engine) for table in tables)) return retval def _execute_for_all_tables(self, app, bind, operation, skip_tables=False): app = self.get_app(app) if bind == '__all__': binds = [None] + list(app.config.get('SQLALCHEMY_BINDS') or ()) elif isinstance(bind, string_types) or bind is None: binds = [bind] else: binds = bind for bind in binds: extra = {} if not skip_tables: tables = self.get_tables_for_bind(bind) extra['tables'] = tables op = getattr(self.Model.metadata, operation) op(bind=self.get_engine(app, bind), **extra) def create_all(self, bind='__all__', app=None): """Creates all tables. .. versionchanged:: 0.12 Parameters were added """ self._execute_for_all_tables(app, bind, 'create_all') def drop_all(self, bind='__all__', app=None): """Drops all tables. .. versionchanged:: 0.12 Parameters were added """ self._execute_for_all_tables(app, bind, 'drop_all') def reflect(self, bind='__all__', app=None): """Reflects tables from the database. .. versionchanged:: 0.12 Parameters were added """ self._execute_for_all_tables(app, bind, 'reflect', skip_tables=True) def __repr__(self): app = None if self.app is not None: app = self.app else: ctx = connection_stack.top if ctx is not None: app = ctx.app return '<%s engine=%r>' % ( self.__class__.__name__, app and app.config['SQLALCHEMY_DATABASE_URI'] or None )<|fim▁end|>
elif hasattr(app, 'teardown_request'): teardown = app.teardown_request
<|file_name|>build.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Build a bunch of SRPMs import argparse import getopt import sys import os import glob import subprocess import shutil import rpm import hashlib import time from planex.globals import (BUILD_ROOT_DIR, SRPMS_DIR, RPMS_DIR, BUILD_DIR, SPECS_GLOB) from planex.util import (bcolours, print_col, run, dump_cmds) TMP_RPM_PATH = "/tmp/RPMS" RPM_TOP_DIR = os.path.join(os.getcwd(), BUILD_ROOT_DIR) CACHE_DIR = "rpmcache" DEFAULT_ARCH = "x86_64" def doexec(args, inputtext=None, check=True): """Execute a subprocess, then return its return code, stdout and stderr""" myenv = os.environ.copy() myenv['HOME'] = RPM_TOP_DIR return run(args, check=check, env=myenv, inputtext=inputtext) def get_srpm_info(srpm): for spec_path in glob.glob(SPECS_GLOB): os.unlink(spec_path) doexec(["rpm", "-i", srpm]) myspecfile = glob.glob(SPECS_GLOB)[0] spec = rpm.ts().parseSpec(myspecfile) info = {} info['deps'] = spec.sourceHeader["requires"] info['arch'] = DEFAULT_ARCH info['packages'] = [{'name':p.header['name']} for p in spec.packages] info['srcrpm'] = srpm content_file = open(myspecfile,'r') info['spec'] = content_file.read() content_file.close() return info def extract_target(srpm_infos, srpm_filename): """ Given a list of srpm_info and an srpm filename, return the target arch """ for srpm_info in srpm_infos: if srpm_info["srcrpm"] == srpm_filename: return srpm_info["arch"] def get_package_to_srpm_map(srpm_info): pkg_map = {} for srpm in srpm_info: for package in srpm['packages']: pkg_map[package['name']] = srpm['srcrpm'] return pkg_map def get_deps(srpm_infos): p_to_s_map = get_package_to_srpm_map(srpm_infos) deps = {} for srpm_info in srpm_infos: deps[srpm_info['srcrpm']] = set() for dep in srpm_info['deps']: if dep in p_to_s_map: deps[srpm_info['srcrpm']].add(p_to_s_map[dep]) return deps def toposort2(data): # Ignore self dependencies. for key, val in data.items(): val.discard(key) # Find all items that don't depend on anything. extra_items_in_deps = reduce(set.union, data.itervalues()) - set(data.iterkeys()) # Add empty dependences where needed extra = {} for item in extra_items_in_deps: extra[item] = set() data.update(extra) result = [] while True: ordered = set(item for item, dep in data.iteritems() if not dep) if not ordered: break result.append(ordered) newdata = {} for item, dep in data.iteritems(): if item not in ordered: newdata[item] = (dep - ordered) data = newdata assert not data, ("Cyclic dependencies exist among these items:\n%s" % '\n'.join(repr(x) for x in data.iteritems())) return result def write_rpmmacros(): rpmmacros = open(os.path.join(RPM_TOP_DIR, '.rpmmacros'), 'w') rpmmacros.write('%%_topdir %s\n' % RPM_TOP_DIR) rpmmacros.write('%%_rpmdir %s\n' % TMP_RPM_PATH) rpmmacros.close() def find_pkg(srpm_infos, srpm): for srpm_info in srpm_infos: if srpm_info["srcrpm"] == srpm: return srpm_info def get_pkg_ddeps(deps, srpm): if srpm in deps: ddeps = [] for dep in deps[srpm]: ddeps.append(dep) for ddep in get_pkg_ddeps(deps, dep): ddeps.append(ddep) return ddeps else: return [] def get_srpm_hash(srpm_infos, external, deps, srpm): allpkgs = get_pkg_ddeps(deps, srpm) allpkgs.append(srpm) allpkgs.sort() srpm_hash = hashlib.md5() for mypkg in allpkgs: srpm_info = find_pkg(srpm_infos, mypkg) srpm_hash.update(srpm_info['spec']) srpm_hash.update(external) return srpm_hash.hexdigest() def get_external_hash(external_deps): external_deps.sort() external_hash = hashlib.md5() for dep in external_deps: with open(dep, "rb") as f: for block in iter(lambda: f.read(1024), ""): external_hash.update(block) return external_hash.hexdigest() def get_cache_dir(srpm_infos, external, deps, srpm): if not os.path.exists(CACHE_DIR): return None myhash = get_srpm_hash(srpm_infos, external, deps, srpm) dst_dir = os.path.join(CACHE_DIR, myhash) return dst_dir def need_to_build(srpm_infos, external, deps, srpm): dst_dir = get_cache_dir(srpm_infos, external, deps, srpm) if not dst_dir: return True return (not os.path.exists(dst_dir)) def get_new_number(srpm, cache_dir):<|fim▁hole|> if cache_dir == None: return 1 latest_path = os.path.join(CACHE_DIR, srpm, "latest") if os.path.exists(latest_path): latest = int(os.readlink(latest_path)) os.remove(latest_path) build_number = latest+1 else: try: os.makedirs(os.path.join(CACHE_DIR, srpm)) except os.error: pass build_number = 1 os.symlink("%d" % build_number, latest_path) num_file_path = os.path.join(CACHE_DIR, srpm, "%d" % build_number) num_file = open(num_file_path, 'w') num_file.write(cache_dir) num_file.close() return build_number def createrepo(): doexec(["createrepo", "--update", RPMS_DIR]) def do_build(srpm, target, build_number, use_mock, xs_build_sys): if xs_build_sys: mock = "/usr/bin/mock" else: mock = "mock" if use_mock: cmd = [mock, "--configdir=mock", "--resultdir=%s" % TMP_RPM_PATH, "--rebuild", "--target", target, # "--enable-plugin=tmpfs", "--define", "extrarelease .%d" % build_number, "-v", srpm] if not xs_build_sys: cmd = ["sudo"] + cmd + ["--disable-plugin=package_state"] else: cmd = ["rpmbuild", "--rebuild", "-v", "%s" % srpm, "--target", target, "--define", "_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm"] doexec(cmd) srpms = glob.glob(os.path.join(TMP_RPM_PATH, "*.src.rpm")) for srpm in srpms: print_col(bcolours.WARNING,"Removing SRPM %s" % srpm) os.unlink(srpm) return glob.glob(os.path.join(TMP_RPM_PATH, "*.rpm")) def build_srpm(srpm, srpm_infos, external, deps, use_mock, xs_build_sys): cache_dir = get_cache_dir(srpm_infos, external, deps, srpm) if(need_to_build(srpm_infos, external, deps, srpm)): target = extract_target(srpm_infos, srpm) build_number = get_new_number(srpm, cache_dir) print_col(bcolours.OKGREEN, "CACHE MISS: Building %s (%d)" % (srpm, build_number)) createrepo() pkgs = do_build(srpm, target, build_number, use_mock, xs_build_sys) if cache_dir: try: os.makedirs(cache_dir+".tmp") print "Archiving result in cache" for pkg in pkgs: shutil.copy(pkg, cache_dir+".tmp") os.rename(cache_dir+".tmp",cache_dir) except: print bgcolors.WARNING + "FAILED TO PUT BUILD RESULTS INTO CACHE" else: print_col(bcolours.OKGREEN,"CACHE HIT: Not building %s" % srpm) pkgs = glob.glob(os.path.join(cache_dir, "*.rpm")) for pkg in pkgs: shutil.copy(pkg, TMP_RPM_PATH) mytime=time.time() os.utime(cache_dir,(mytime,mytime)) pkgs = glob.glob(os.path.join(TMP_RPM_PATH, "*.rpm")) if not use_mock: result = doexec(["rpm", "-U", "--force", "--nodeps"] + pkgs, check=False) if result['rc'] != 0: print "Ignoring failure installing rpm batch: %s" % pkgs print result['stderr'] for pkg in pkgs: shutil.move(pkg, RPMS_DIR) def parse_cmdline(argv=None): """ Parse command line options """ parser = argparse.ArgumentParser() parser.add_argument( '--no-mock', help="Don't use mock", action='store_true') parser.add_argument( '--xs-build-sys', help='Assume XenServer build system', action='store_true') parser.add_argument('--i686', help='Build for i686', action='store_true') parser.add_argument('--external-dependencies', help='External dependencies to include in the package hash', metavar="file", nargs="+", default=[]) parser.add_argument('--cache-dir', help='Root directory of the RPM cache', metavar="directory", default=None) return parser.parse_args(argv) def main(): global DEFAULT_ARCH args = parse_cmdline() use_mock = not args.no_mock xs_build_sys = args.xs_build_sys if args.i686: DEFAULT_ARCH = "i686" if args.cache_dir: CACHE_DIR = args.cache_dir if not os.path.isdir(SRPMS_DIR) or not os.listdir(SRPMS_DIR): print ("Error: No srpms found in %s; First run configure.py." % SRPMS_DIR) sys.exit(1) packages = glob.glob(os.path.join(SRPMS_DIR, '*.src.rpm')) write_rpmmacros() srpm_infos = [get_srpm_info(pkg) for pkg in packages] deps = get_deps(srpm_infos) order = toposort2(deps) external = get_external_hash(args.external_dependencies) for path in (TMP_RPM_PATH, BUILD_DIR, RPMS_DIR): if os.path.exists(path): print "Cleaning out directory: %s" % path shutil.rmtree(path) os.makedirs(path) os.chmod(path, 0777) createrepo() for batch in order: for srpm in batch: build_srpm(srpm, srpm_infos, external, deps, use_mock, xs_build_sys) createrepo() if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>sensor.py<|end_file_name|><|fim▁begin|>"""Sensor to collect the reference daily prices of electricity ('PVPC') in Spain.""" import logging from random import randint from typing import Optional from aiopvpc import PVPCData from homeassistant import config_entries from homeassistant.const import CONF_NAME, ENERGY_KILO_WATT_HOUR from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.event import async_call_later, async_track_time_change from homeassistant.helpers.restore_state import RestoreEntity import homeassistant.util.dt as dt_util from .const import ATTR_TARIFF _LOGGER = logging.getLogger(__name__) ATTR_PRICE = "price" ICON = "mdi:currency-eur" UNIT = f"€/{ENERGY_KILO_WATT_HOUR}" _DEFAULT_TIMEOUT = 10 async def async_setup_entry( hass: HomeAssistant, config_entry: config_entries.ConfigEntry, async_add_entities ): """Set up the electricity price sensor from config_entry.""" name = config_entry.data[CONF_NAME] pvpc_data_handler = PVPCData( tariff=config_entry.data[ATTR_TARIFF], local_timezone=hass.config.time_zone, websession=async_get_clientsession(hass), logger=_LOGGER, timeout=_DEFAULT_TIMEOUT, ) async_add_entities( [ElecPriceSensor(name, config_entry.unique_id, pvpc_data_handler)], False ) class ElecPriceSensor(RestoreEntity): """Class to hold the prices of electricity as a sensor.""" unit_of_measurement = UNIT icon = ICON should_poll = False def __init__(self, name, unique_id, pvpc_data_handler): """Initialize the sensor object.""" self._name = name self._unique_id = unique_id self._pvpc_data = pvpc_data_handler self._num_retries = 0 self._hourly_tracker = None self._price_tracker = None async def async_will_remove_from_hass(self) -> None: """Cancel listeners for sensor updates.""" self._hourly_tracker() self._price_tracker() async def async_added_to_hass(self): """Handle entity which will be added.""" await super().async_added_to_hass() state = await self.async_get_last_state() if state: self._pvpc_data.state = state.state # Update 'state' value in hour changes self._hourly_tracker = async_track_time_change( self.hass, self.update_current_price, second=[0], minute=[0] ) # Update prices at random time, 2 times/hour (don't want to upset API) random_minute = randint(1, 29) mins_update = [random_minute, random_minute + 30] self._price_tracker = async_track_time_change( self.hass, self.async_update_prices, second=[0], minute=mins_update ) _LOGGER.debug( "Setup of price sensor %s (%s) with tariff '%s', " "updating prices each hour at %s min", self.name, self.entity_id, self._pvpc_data.tariff, mins_update, ) await self.async_update_prices(dt_util.utcnow()) self.update_current_price(dt_util.utcnow()) @property def unique_id(self) -> Optional[str]: """Return a unique ID.""" return self._unique_id @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._pvpc_data.state @property def available(self) -> bool: """Return True if entity is available.""" return self._pvpc_data.state_available @property def device_state_attributes(self): """Return the state attributes.""" return self._pvpc_data.attributes @callback def update_current_price(self, now): """Update the sensor state, by selecting the current price for this hour.""" self._pvpc_data.process_state_and_attributes(now) self.async_write_ha_state() async def async_update_prices(self, now):<|fim▁hole|> """Update electricity prices from the ESIOS API.""" prices = await self._pvpc_data.async_update_prices(now) if not prices and self._pvpc_data.source_available: self._num_retries += 1 if self._num_retries > 2: _LOGGER.warning( "%s: repeated bad data update, mark component as unavailable source", self.entity_id, ) self._pvpc_data.source_available = False return retry_delay = 2 * self._num_retries * self._pvpc_data.timeout _LOGGER.debug( "%s: Bad update[retry:%d], will try again in %d s", self.entity_id, self._num_retries, retry_delay, ) async_call_later(self.hass, retry_delay, self.async_update_prices) return if not prices: _LOGGER.debug("%s: data source is not yet available", self.entity_id) return self._num_retries = 0 if not self._pvpc_data.source_available: self._pvpc_data.source_available = True _LOGGER.warning("%s: component has recovered data access", self.entity_id) self.update_current_price(now)<|fim▁end|>
<|file_name|>test_actor.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (c) 2015 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest from mock import Mock from calvin.tests import DummyNode from calvin.runtime.north.actormanager import ActorManager from calvin.runtime.south.endpoint import LocalOutEndpoint, LocalInEndpoint from calvin.actor.actor import Actor pytestmark = pytest.mark.unittest def create_actor(node): actor_manager = ActorManager(node) actor_id = actor_manager.new('std.Identity', {}) actor = actor_manager.actors[actor_id] actor._calvinsys = Mock() return actor @pytest.fixture def actor(): return create_actor(DummyNode()) @pytest.mark.parametrize("port_type,port_name,port_property,value,expected", [ ("invalid", "", "", "", False), ("in", "missing", "", "", False), ("out", "missing", "", "", False), ("out", "token", "missing", "", False), ("in", "token", "missing", "", False), ("out", "token", "name", "new_name", True), ("out", "token", "name", "new_name", True), ]) def test_set_port_property(port_type, port_name, port_property, value, expected): assert actor().set_port_property(port_type, port_name, port_property, value) is expected @pytest.mark.parametrize("inport_ret_val,outport_ret_val,expected", [ (False, False, False), (False, True, False), (True, False, False), (True, True, True), ]) def test_did_connect(actor, inport_ret_val, outport_ret_val, expected): for port in actor.inports.values(): port.is_connected = Mock(return_value=inport_ret_val) for port in actor.outports.values(): port.is_connected = Mock(return_value=outport_ret_val) actor.fsm = Mock() actor.did_connect(None) if expected: actor.fsm.transition_to.assert_called_with(Actor.STATUS.ENABLED) assert actor._calvinsys.scheduler_wakeup.called else: assert not actor.fsm.transition_to.called assert not actor._calvinsys.scheduler_wakeup.called @pytest.mark.parametrize("inport_ret_val,outport_ret_val,expected", [ (True, True, False), (True, False, False), (False, True, False), (False, False, True), ]) def test_did_disconnect(actor, inport_ret_val, outport_ret_val, expected): for port in actor.inports.values(): port.is_connected = Mock(return_value=inport_ret_val) for port in actor.outports.values(): port.is_connected = Mock(return_value=outport_ret_val) actor.fsm = Mock() actor.did_disconnect(None) if expected: actor.fsm.transition_to.assert_called_with(Actor.STATUS.READY) else: assert not actor.fsm.transition_to.called def test_enabled(actor): actor.enable() assert actor.enabled() actor.disable() assert not actor.enabled() def test_connections(): node = DummyNode() node.id = "node_id" actor = create_actor(node) inport = actor.inports['token'] outport = actor.outports['token'] port = Mock() port.id = "x" peer_port = Mock() peer_port.id = "y" inport.attach_endpoint(LocalInEndpoint(port, peer_port)) outport.attach_endpoint(LocalOutEndpoint(port, peer_port)) assert actor.connections(node) == { 'actor_id': actor.id, 'actor_name': actor.name, 'inports': {inport.id: (node, "y")}, 'outports': {outport.id: [(node, "y")]} } def test_state(actor): inport = actor.inports['token'] outport = actor.outports['token'] correct_state = { '_component_members': set([actor.id]), '_deployment_requirements': [],<|fim▁hole|> 'dump': False, 'id': actor.id, 'inports': {'token': {'fifo': {'N': 5, 'fifo': [{'data': 0, 'type': 'Token'}, {'data': 0, 'type': 'Token'}, {'data': 0, 'type': 'Token'}, {'data': 0, 'type': 'Token'}, {'data': 0, 'type': 'Token'}], 'read_pos': {inport.id: 0}, 'readers': [inport.id], 'tentative_read_pos': {inport.id: 0}, 'write_pos': 0}, 'id': inport.id, 'name': 'token'}}, 'name': '', 'outports': {'token': {'fanout': 1, 'fifo': {'N': 5, 'fifo': [{'data': 0, 'type': 'Token'}, {'data': 0, 'type': 'Token'}, {'data': 0, 'type': 'Token'}, {'data': 0, 'type': 'Token'}, {'data': 0, 'type': 'Token'}], 'read_pos': {}, 'readers': [], 'tentative_read_pos': {}, 'write_pos': 0}, 'id': outport.id, 'name': 'token'}}} test_state = actor.state() for k, v in correct_state.iteritems(): # Read state use list to support JSON serialization if isinstance(v, set): assert set(test_state[k]) == v else: assert test_state[k] == v @pytest.mark.parametrize("prev_signature,new_signature,expected", [ (None, "new_val", "new_val"), ("old_val", "new_val", "old_val") ]) def test_set_signature(actor, prev_signature, new_signature, expected): actor.signature_set(prev_signature) actor.signature_set(new_signature) assert actor._signature == expected def test_component(actor): actor.component_add(1) assert 1 in actor.component_members() actor.component_add([2, 3]) assert 2 in actor.component_members() assert 3 in actor.component_members() actor.component_remove(1) assert 1 not in actor.component_members() actor.component_remove([2, 3]) assert 2 not in actor.component_members() assert 3 not in actor.component_members() def test_requirements(actor): assert actor.requirements_get() == [] actor.requirements_add([1, 2, 3]) assert actor.requirements_get() == [1, 2, 3] actor.requirements_add([4, 5]) assert actor.requirements_get() == [4, 5] actor.requirements_add([6, 7], extend=True) assert actor.requirements_get() == [4, 5, 6, 7]<|fim▁end|>
'_managed': set(['dump', '_signature', 'id', '_deployment_requirements', 'name', 'credentials']), '_signature': None,
<|file_name|>person.java<|end_file_name|><|fim▁begin|>package BubbleSort2D; public class person { String name; int age; boolean sortme = false; public person(String n, int a) { name = n; age = a; } public String toString(){ return name + " " + Integer.toString(age); <|fim▁hole|>}<|fim▁end|>
}
<|file_name|>StringEvent.java<|end_file_name|><|fim▁begin|>package com.code.constant; /** * Created by niu on 2017/8/17.<|fim▁hole|> */ public class StringEvent { //网络状态改变 public static String NET_STATE_CHANGE = "net_state_change"; }<|fim▁end|>
<|file_name|>widgets.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.forms import widgets from django.template.loader import render_to_string from django.utils.translation import gettext_lazy as _ from .conf import settings class PlacesWidget(widgets.MultiWidget): template_name = 'places/widgets/places.html' def __init__(self, attrs=None): _widgets = ( widgets.TextInput( attrs={'data-geo': 'formatted_address', 'data-id': 'map_place'} ), widgets.TextInput( attrs={ 'data-geo': 'lat', 'data-id': 'map_latitude', 'placeholder': _('Latitude'), } ), widgets.TextInput( attrs={ 'data-geo': 'lng', 'data-id': 'map_longitude', 'placeholder': _('Longitude'), } ), ) super(PlacesWidget, self).__init__(_widgets, attrs) def decompress(self, value): if isinstance(value, str): return value.rsplit(',') if value: return [value.place, value.latitude, value.longitude] return [None, None] def get_context(self, name, value, attrs): context = super(PlacesWidget, self).get_context(name, value, attrs) context['map_widget_height'] = settings.MAP_WIDGET_HEIGHT context['map_options'] = settings.MAP_OPTIONS context['marker_options'] = settings.MARKER_OPTIONS return context<|fim▁hole|> settings.MAPS_API_KEY ), 'places/places.js', ) css = {'all': ('places/places.css',)}<|fim▁end|>
class Media: js = ( '//maps.googleapis.com/maps/api/js?key={}&libraries=places'.format(
<|file_name|>r3msg.rs<|end_file_name|><|fim▁begin|>extern crate r3bar; extern crate i3ipc; use r3bar::r3ipc::{R3Msg}; use std::env; fn help() { println!("usage: msgtype <integer> msgtype number - see r3ipc documentation. payload [string] msg arguments if any."); } fn main() { let args: Vec<String> = env::args().collect(); let empty_payload = "".to_string(); match args.len() { // no arguments passed 1 => { help(); }, l @ 2...3 => { let cmd = &args[1]; let payload; if l == 3 { payload = &args[2]; } else { payload = &empty_payload; } match cmd.parse::<u32>() { Ok(msgtype) => send(msgtype, payload), Err(_) => return help(), }<|fim▁hole|> // all the other cases _ => { // show a help message help(); } } } fn send(msgtype: u32, payload: &str) { match R3Msg::new(None).unwrap().send_msg(msgtype, payload) { Ok(i3ipc::reply::Command{outcomes}) => println!("{:?}", outcomes), Err(e) => println!("{}", e) } }<|fim▁end|>
},
<|file_name|>authors.js<|end_file_name|><|fim▁begin|>$(function() { var resultCache = []; var allItems = $(".index .item"); $("#filter_text").keyup(function(){ var searchString = $(this).val(); allItems.addClass('visibility_hidden'); var items; if (resultCache[searchString] === undefined) { items = $(".index .item .text").textContains(searchString).parent().parent(); resultCache[searchString] = items; } else { items = resultCache[searchString]; } items.removeClass('visibility_hidden'); $("#numberFiltered").text(items.length); $("#numberFilteredText").text(items.length == 1 ? $T("author") : $T("authors")); }); $(".material_icon").each(function() { $(this).qtip({ style: { classes: 'material_tip'<|fim▁hole|> show: { event: 'click' }, hide: { event: 'unfocus' }, position: { my: 'top right', at: 'bottom left' } }); }); });<|fim▁end|>
}, content: { text: $(this).siblings('.material_list') },
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .group_analysis import create_fsl_flame_wf, \ get_operation<|fim▁hole|> __all__ = ['create_fsl_flame_wf', \ 'get_operation']<|fim▁end|>
<|file_name|>update_tournaments.py<|end_file_name|><|fim▁begin|>from src.interfacing.ogs.connect import Authentication import codecs import sys import os from time import sleep def loadList(pNameFile): iList = [] with codecs.open(pNameFile, "r", "utf-8") as f: for line in f: iList.append(line) return iList if __name__ == "__main__": a = Authentication("Kuksu League", "", testing=False); iGroupNames = loadList("E:/Project/OGS/OGS-League/group_names.txt"); iGroupIDs = loadList("E:/Project/OGS/OGS-League/group_ids.txt"); nGroups = len(iGroupNames); for i in range(nGroups): iGroupNames[i] = iGroupNames[i].replace("\r\n", ""); iGroupNames[i] = iGroupNames[i].replace("\n", ""); iGroupIDs[i] = iGroupIDs[i].replace("\r\n", ""); iGroupIDs[i] = iGroupIDs[i].replace("\n", ""); iGroupIDs[i] = int(iGroupIDs[i]); iDescription = """ Kuksu Main Title Tournament 9th Cycle Group %s Title Holder: <a href='https://online-go.com/user/view/35184/vitality'>vitality (5d)</a> Previous cycles: <table style="text-align:center;" border='2'> <tr><th rowspan=2>Cycle</th><td colspan=3><b>Title Match</b></td><td colspan=3><b>Title Tournament</b></td></tr> <tr> <th>Winner</th><th>Score</th><th>Runner-up</th> <th>Winner<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/gold_title_19.png' alt='Gold'></img></th> <th>Runner-up<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/silver_title_19.png' alt='Silver'></img></th> <th>3rd Place<img src='https://a00ce0086bda2213e89f-570db0116da8eb5fdc3ce95006e46d28.ssl.cf1.rackcdn.com/4.2/img/trophies/bronze_title_19.png' alt='Bronze'></img></th> </tr> <tr> <td><a href='https://online-go.com/tournament/2375'>1</a></td> <td><b>luke</b></td><td></td><td></td> <td><b>luke (2d)</b></td><td>davos</td><td>gomad361</td> </tr> <tr> <td><a href='https://online-go.com/tournament/2384'>2</a></td> <td><b>gomad361</b></td><td>3-2</td><td>luke</td> <td><b>luke (2d)</b></td><td>gomad361</td><td>hotspur</td> </tr> <tr> <td><a href='https://online-go.com/tournament/2391'>3</a></td> <td><b>Uberdude</b></td><td>&lowast;</td><td>gomad361</td> <td><b>Uberdude (6d)</b></td><td>KyuT</td><td>marigo</td> </tr> <tr> <td><a href='https://online-go.com/tournament/2406'>4</a></td> <td><b>Uberdude</b></td><td>5-0</td><td>KyuT</td> <td><b>KyuT (4d)</b></td><td>quiller</td><td>morituri</td> </tr> <tr> <td><a href='https://online-go.com/tournament/2424'>5</a></td> <td><b>Uberdude</b></td><td>5-0</td><td>gomad361</td> <td><b>gomad361 (2d)</b></td><td>morituri</td><td>betterlife</td> </tr> <tr> <td><a href='https://online-go.com/tournament/2439'>6</a></td> <td><b>Uberdude</b></td><td>5-0</td><td>Elin</td> <td><b>Elin (3d)</b></td><td>gomad361</td><td>morituri</td> </tr> <tr> <td><a href='https://online-go.com/tournament/2460'>7</a></td> <td><b>Uberdude</b></td><td>3-2</td><td>vitality</td> <td><b>vitality (5d)</b></td><td>Elin</td><td>gomad361</td> </tr> <tr> <td><a href='https://online-go.com/tournament/2475'>8</a></td> <td><b>vitality</b></td><td>&lowast;</td><td>Uberdude</td> <td><b>vitality (5d)</b></td><td>nrx</td><td>gojohn</td> </tr> <tr> <td rowspan=5><a href='#'>9</a></td> <td rowspan=5 colspan=3></td> <td colspan=3> <a href='https://online-go.com/tournament/12653'>[A]</a> </td> </tr> <tr> <td colspan=3> <a href='https://online-go.com/tournament/12654'>[B1]</a> <a href='https://online-go.com/tournament/12655'>[B2]</a> </td> </tr> <tr> <td colspan=3> <a href='https://online-go.com/tournament/12656'>[C1]</a> <a href='https://online-go.com/tournament/12657'>[C2]</a> <a href='https://online-go.com/tournament/12658'>[C3]</a> <a href='https://online-go.com/tournament/12659'>[C4]</a> </td> </tr> <tr> <td colspan=3> <a href='https://online-go.com/tournament/12660'>[D1]</a> <a href='https://online-go.com/tournament/12661'>[D2]</a> <a href='https://online-go.com/tournament/12662'>[D3]</a> <a href='https://online-go.com/tournament/12663'>[D4]</a> <a href='https://online-go.com/tournament/12664'>[D5]</a> <a href='https://online-go.com/tournament/12665'>[D6]</a> <a href='https://online-go.com/tournament/12666'>[D7]</a> <a href='https://online-go.com/tournament/12667'>[D8]</a> </td> </tr> <tr> <td colspan=3> <a href='https://online-go.com/tournament/12668'>[E1]</a> <a href='https://online-go.com/tournament/12669'>[E2]</a> <a href='https://online-go.com/tournament/12670'>[E3]</a> <a href='https://online-go.com/tournament/12671'>[E4]</a> <a href='https://online-go.com/tournament/12672'>[E5]</a> <a href='https://online-go.com/tournament/12673'>[E6]</a> </td> </tr> </table> &lowast; means the games were finished by timeout or retiring. Rules could be found <a href='https://forums.online-go.com/t/league-format-kuksu-title-tournament-rules-and-discussion/5191'>here</a>. """ % iGroupNames[i]; a.put(['tournaments', iGroupIDs[i]], {"description": iDescription }); print("Tournament %s with id %d updated.\n" % (iGroupNames[i], iGroupIDs[i])); sleep(2); # tourney id 7370 """ iTournament = a.post(['tournaments'],{ "id":12650, "name":"Test Tournament 2", "group":515, "tournament_type":"roundrobin", "description":"<b>Test 3</b>", "board_size":19, "handicap":0, #default -1 for auto "time_start": "2015-12-01T00:00:00Z", "time_control_parameters":{ "time_control":"fischer", "initial_time":604800, "max_time":604800, "time_increment":86400 }, "rules": "korean", "exclusivity": "invite", # open, group. default "exclude_provisional": False, # default "auto_start_on_max": True, # default "analysis_enabled": True, #default "settings":{ "maximum_players":10, }, "players_start": 6, #default "first_pairing_method": "slide", #slaughter, random, slide, strength . default "subsequent_pairing_method": "slide", # default "min_ranking":0, "max_ranking":36 }); #print("Hello"); print(iTournament["id"]); """<|fim▁hole|>#print "Tournament %s is created." % iTournament["id"]; # r= a.post (['tournaments', 12642, 'players'], app_param= {"player_id":40318} ) # print (r)<|fim▁end|>
<|file_name|>test_textbox15.py<|end_file_name|><|fim▁begin|>############################################################################### # # Tests for XlsxWriter. # # SPDX-License-Identifier: BSD-2-Clause # Copyright (c), 2013-2022, John McNamara, [email protected] # from ..excel_comparison_test import ExcelComparisonTest from ...workbook import Workbook class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.set_filename('textbox15.xlsx') def test_create_file(self): """Test the creation of a simple XlsxWriter file with textbox(s).""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() worksheet.insert_textbox('E9', 'This is some text', {'align': {'horizontal': 'center'}})<|fim▁hole|> workbook.close() self.assertExcelEqual()<|fim▁end|>
<|file_name|>opt_name.rs<|end_file_name|><|fim▁begin|>//! Test optional prefix. extern crate flame; extern crate flamer; use flamer::{flame, noflame}; #[flame("top")] fn a() { let l = Lower {}; l.a(); } #[flame] fn b() { a() } #[noflame] fn c() { b() } pub struct Lower; impl Lower { #[flame("lower")] pub fn a(self) { // nothing to do here } } <|fim▁hole|> c(); let spans = flame::spans(); assert_eq!(1, spans.len()); let roots = &spans[0]; println!("{:?}",roots); // if more than 2 roots, a() was flamed twice or c was flamed // main is missing because main isn't closed here assert_eq!("b", roots.name); assert_eq!(1, roots.children.len()); assert_eq!("top::a", roots.children[0].name); assert_eq!(1, roots.children[0].children.len()); assert_eq!("lower::a", roots.children[0].children[0].name); }<|fim▁end|>
#[test] fn main() {
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This crate provides the `regex!` macro. Its use is documented in the //! `regex` crate. #![crate_name = "regex_macros"] #![crate_type = "dylib"] #![experimental] #![license = "MIT/ASL2"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] #![feature(plugin_registrar, quote)] extern crate regex; extern crate syntax; extern crate rustc; use std::rc::Rc; use syntax::ast; use syntax::codemap; use syntax::ext::build::AstBuilder; use syntax::ext::base::{ExtCtxt, MacResult, MacExpr, DummyResult}; use syntax::parse::token; use syntax::print::pprust; use syntax::fold::Folder; use syntax::ptr::P; use rustc::plugin::Registry; use regex::Regex; use regex::native::{ OneChar, CharClass, Any, Save, Jump, Split, Match, EmptyBegin, EmptyEnd, EmptyWordBoundary, Program, Dynamic, ExDynamic, Native, FLAG_NOCASE, FLAG_MULTI, FLAG_DOTNL, FLAG_NEGATED, }; /// For the `regex!` syntax extension. Do not use. #[plugin_registrar] #[doc(hidden)] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("regex", native); } /// Generates specialized code for the Pike VM for a particular regular /// expression. /// /// There are two primary differences between the code generated here and the /// general code in vm.rs. /// /// 1. All heap allocation is removed. Sized vector types are used instead. /// Care must be taken to make sure that these vectors are not copied /// gratuitously. (If you're not sure, run the benchmarks. They will yell /// at you if you do.) /// 2. The main `match instruction { ... }` expressions are replaced with more /// direct `match pc { ... }`. The generators can be found in /// `step_insts` and `add_insts`. /// /// Other more minor changes include eliding code when possible (although this /// isn't completely thorough at the moment), and translating character class /// matching from using a binary search to a simple `match` expression (see /// `match_class`). /// /// It is strongly recommended to read the dynamic implementation in vm.rs /// first before trying to understand the code generator. The implementation /// strategy is identical and vm.rs has comments and will be easier to follow. #[allow(experimental)] fn native(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box<MacResult+'static> { let regex = match parse(cx, tts) { Some(r) => r, // error is logged in 'parse' with cx.span_err None => return DummyResult::any(sp), }; let re = match Regex::new(regex.as_slice()) { Ok(re) => re, Err(err) => { cx.span_err(sp, err.to_string().as_slice()); return DummyResult::any(sp) } }; let prog = match re { Dynamic(ExDynamic { ref prog, .. }) => prog.clone(), Native(_) => unreachable!(), }; let mut gen = NfaGen { cx: &*cx, sp: sp, prog: prog, names: re.names_iter().collect(), original: re.as_str().to_string(), }; MacExpr::new(gen.code()) } struct NfaGen<'a> { cx: &'a ExtCtxt<'a>, sp: codemap::Span, prog: Program, names: Vec<Option<String>>, original: String, } impl<'a> NfaGen<'a> { fn code(&mut self) -> P<ast::Expr> { // Most or all of the following things are used in the quasiquoted // expression returned. let num_cap_locs = 2 * self.prog.num_captures(); let num_insts = self.prog.insts.len(); let cap_names = self.vec_expr(self.names.as_slice().iter(), |cx, name| match *name { Some(ref name) => { let name = name.as_slice(); quote_expr!(cx, Some($name)) } None => cx.expr_none(self.sp), } ); let prefix_anchor = match self.prog.insts.as_slice()[1] { EmptyBegin(flags) if flags & FLAG_MULTI == 0 => true, _ => false, }; let init_groups = self.vec_expr(range(0, num_cap_locs), |cx, _| cx.expr_none(self.sp)); let prefix_lit = Rc::new(self.prog.prefix.as_slice().as_bytes().to_vec()); let prefix_bytes = self.cx.expr_lit(self.sp, ast::LitBinary(prefix_lit)); let check_prefix = self.check_prefix(); let step_insts = self.step_insts(); let add_insts = self.add_insts(); let regex = self.original.as_slice(); quote_expr!(self.cx, { // When `regex!` is bound to a name that is not used, we have to make sure // that dead_code warnings don't bubble up to the user from the generated // code. Therefore, we suppress them by allowing dead_code. The effect is that // the user is only warned about *their* unused variable/code, and not the // unused code generated by regex!. See #14185 for an example. #[allow(dead_code)] static CAP_NAMES: &'static [Option<&'static str>] = &$cap_names; #[allow(dead_code)] fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str, start: uint, end: uint) -> Vec<Option<uint>> { #![allow(unused_imports)] #![allow(unused_mut)] use regex::native::{ MatchKind, Exists, Location, Submatches, StepState, StepMatchEarlyReturn, StepMatch, StepContinue, CharReader, find_prefix, }; return Nfa { which: which, input: input, ic: 0, chars: CharReader::new(input), }.run(start, end); type Captures = [Option<uint>, ..$num_cap_locs]; struct Nfa<'t> { which: MatchKind, input: &'t str, ic: uint, chars: CharReader<'t>, } impl<'t> Nfa<'t> { #[allow(unused_variables)] fn run(&mut self, start: uint, end: uint) -> Vec<Option<uint>> { let mut matched = false; let prefix_bytes: &[u8] = $prefix_bytes; let mut clist = &mut Threads::new(self.which); let mut nlist = &mut Threads::new(self.which); let mut groups = $init_groups; self.ic = start; let mut next_ic = self.chars.set(start); while self.ic <= end { if clist.size == 0 { if matched { break } $check_prefix } if clist.size == 0 || (!$prefix_anchor && !matched) { self.add(clist, 0, &mut groups) } self.ic = next_ic; next_ic = self.chars.advance(); for i in range(0, clist.size) { let pc = clist.pc(i); let step_state = self.step(&mut groups, nlist, clist.groups(i), pc); match step_state { StepMatchEarlyReturn => return vec![Some(0u), Some(0u)], StepMatch => { matched = true; break }, StepContinue => {}, } } ::std::mem::swap(&mut clist, &mut nlist); nlist.empty(); } match self.which { Exists if matched => vec![Some(0u), Some(0u)], Exists => vec![None, None], Location | Submatches => groups.iter().map(|x| *x).collect(), } } // Sometimes `nlist` is never used (for empty regexes). #[allow(unused_variables)] #[inline] fn step(&self, groups: &mut Captures, nlist: &mut Threads, caps: &mut Captures, pc: uint) -> StepState { $step_insts StepContinue } fn add(&self, nlist: &mut Threads, pc: uint, groups: &mut Captures) { if nlist.contains(pc) { return } $add_insts } } struct Thread { pc: uint, groups: Captures, } struct Threads { which: MatchKind, queue: [Thread, ..$num_insts], sparse: [uint, ..$num_insts], size: uint, } impl Threads { fn new(which: MatchKind) -> Threads { Threads { which: which, // These unsafe blocks are used for performance reasons, as it // gives us a zero-cost initialization of a sparse set. The // trick is described in more detail here: // http://research.swtch.com/sparse // The idea here is to avoid initializing threads that never // need to be initialized, particularly for larger regexs with // a lot of instructions. queue: unsafe { ::std::mem::uninitialized() }, sparse: unsafe { ::std::mem::uninitialized() }, size: 0, } } #[inline] fn add(&mut self, pc: uint, groups: &Captures) { let t = &mut self.queue[self.size]; t.pc = pc; match self.which { Exists => {}, Location => { t.groups[0] = groups[0]; t.groups[1] = groups[1]; } Submatches => { for (slot, val) in t.groups.iter_mut().zip(groups.iter()) { *slot = *val; } } } self.sparse[pc] = self.size; self.size += 1; } #[inline] fn add_empty(&mut self, pc: uint) { self.queue[self.size].pc = pc; self.sparse[pc] = self.size; self.size += 1; } #[inline] fn contains(&self, pc: uint) -> bool { let s = self.sparse[pc]; s < self.size && self.queue[s].pc == pc } #[inline] fn empty(&mut self) { self.size = 0; } #[inline] fn pc(&self, i: uint) -> uint { self.queue[i].pc } #[inline] fn groups<'r>(&'r mut self, i: uint) -> &'r mut Captures { &mut self.queue[i].groups } } } ::regex::native::Native(::regex::native::ExNative { original: $regex, names: &CAP_NAMES, prog: exec, }) }) } // Generates code for the `add` method, which is responsible for adding // zero-width states to the next queue of states to visit. fn add_insts(&self) -> P<ast::Expr> { let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| { let nextpc = pc + 1; let body = match *inst { EmptyBegin(flags) => { let cond = if flags & FLAG_MULTI > 0 { quote_expr!(self.cx, self.chars.is_begin() || self.chars.prev == Some('\n') ) } else { quote_expr!(self.cx, self.chars.is_begin())<|fim▁hole|> }; quote_expr!(self.cx, { nlist.add_empty($pc); if $cond { self.add(nlist, $nextpc, &mut *groups) } }) } EmptyEnd(flags) => { let cond = if flags & FLAG_MULTI > 0 { quote_expr!(self.cx, self.chars.is_end() || self.chars.cur == Some('\n') ) } else { quote_expr!(self.cx, self.chars.is_end()) }; quote_expr!(self.cx, { nlist.add_empty($pc); if $cond { self.add(nlist, $nextpc, &mut *groups) } }) } EmptyWordBoundary(flags) => { let cond = if flags & FLAG_NEGATED > 0 { quote_expr!(self.cx, !self.chars.is_word_boundary()) } else { quote_expr!(self.cx, self.chars.is_word_boundary()) }; quote_expr!(self.cx, { nlist.add_empty($pc); if $cond { self.add(nlist, $nextpc, &mut *groups) } }) } Save(slot) => { let save = quote_expr!(self.cx, { let old = groups[$slot]; groups[$slot] = Some(self.ic); self.add(nlist, $nextpc, &mut *groups); groups[$slot] = old; }); let add = quote_expr!(self.cx, { self.add(nlist, $nextpc, &mut *groups); }); // If this is saving a submatch location but we request // existence or only full match location, then we can skip // right over it every time. if slot > 1 { quote_expr!(self.cx, { nlist.add_empty($pc); match self.which { Submatches => $save, Exists | Location => $add, } }) } else { quote_expr!(self.cx, { nlist.add_empty($pc); match self.which { Submatches | Location => $save, Exists => $add, } }) } } Jump(to) => { quote_expr!(self.cx, { nlist.add_empty($pc); self.add(nlist, $to, &mut *groups); }) } Split(x, y) => { quote_expr!(self.cx, { nlist.add_empty($pc); self.add(nlist, $x, &mut *groups); self.add(nlist, $y, &mut *groups); }) } // For Match, OneChar, CharClass, Any _ => quote_expr!(self.cx, nlist.add($pc, &*groups)), }; self.arm_inst(pc, body) }).collect::<Vec<ast::Arm>>(); self.match_insts(arms) } // Generates the code for the `step` method, which processes all states // in the current queue that consume a single character. fn step_insts(&self) -> P<ast::Expr> { let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| { let nextpc = pc + 1; let body = match *inst { Match => { quote_expr!(self.cx, { match self.which { Exists => { return StepMatchEarlyReturn } Location => { groups[0] = caps[0]; groups[1] = caps[1]; return StepMatch } Submatches => { for (slot, val) in groups.iter_mut().zip(caps.iter()) { *slot = *val; } return StepMatch } } }) } OneChar(c, flags) => { if flags & FLAG_NOCASE > 0 { let upc = c.to_uppercase(); quote_expr!(self.cx, { let upc = self.chars.prev.map(|c| c.to_uppercase()); if upc == Some($upc) { self.add(nlist, $nextpc, caps); } }) } else { quote_expr!(self.cx, { if self.chars.prev == Some($c) { self.add(nlist, $nextpc, caps); } }) } } CharClass(ref ranges, flags) => { let negate = flags & FLAG_NEGATED > 0; let casei = flags & FLAG_NOCASE > 0; let get_char = if casei { quote_expr!(self.cx, self.chars.prev.unwrap().to_uppercase()) } else { quote_expr!(self.cx, self.chars.prev.unwrap()) }; let negcond = if negate { quote_expr!(self.cx, !found) } else { quote_expr!(self.cx, found) }; let mranges = self.match_class(casei, ranges.as_slice()); quote_expr!(self.cx, { if self.chars.prev.is_some() { let c = $get_char; let found = $mranges; if $negcond { self.add(nlist, $nextpc, caps); } } }) } Any(flags) => { if flags & FLAG_DOTNL > 0 { quote_expr!(self.cx, self.add(nlist, $nextpc, caps)) } else { quote_expr!(self.cx, { if self.chars.prev != Some('\n') { self.add(nlist, $nextpc, caps) } () }) } } // EmptyBegin, EmptyEnd, EmptyWordBoundary, Save, Jump, Split _ => self.empty_block(), }; self.arm_inst(pc, body) }).collect::<Vec<ast::Arm>>(); self.match_insts(arms) } // Translates a character class into a match expression. // This avoids a binary search (and is hopefully replaced by a jump // table). fn match_class(&self, casei: bool, ranges: &[(char, char)]) -> P<ast::Expr> { let mut arms = ranges.iter().map(|&(mut start, mut end)| { if casei { start = start.to_uppercase(); end = end.to_uppercase(); } let pat = self.cx.pat(self.sp, ast::PatRange(quote_expr!(self.cx, $start), quote_expr!(self.cx, $end))); self.cx.arm(self.sp, vec!(pat), quote_expr!(self.cx, true)) }).collect::<Vec<ast::Arm>>(); arms.push(self.wild_arm_expr(quote_expr!(self.cx, false))); let match_on = quote_expr!(self.cx, c); self.cx.expr_match(self.sp, match_on, arms) } // Generates code for checking a literal prefix of the search string. // The code is only generated if the regex *has* a literal prefix. // Otherwise, a no-op is returned. fn check_prefix(&self) -> P<ast::Expr> { if self.prog.prefix.len() == 0 { self.empty_block() } else { quote_expr!(self.cx, if clist.size == 0 { let haystack = self.input.as_bytes()[self.ic..]; match find_prefix(prefix_bytes, haystack) { None => break, Some(i) => { self.ic += i; next_ic = self.chars.set(self.ic); } } } ) } } // Builds a `match pc { ... }` expression from a list of arms, specifically // for matching the current program counter with an instruction. // A wild-card arm is automatically added that executes a no-op. It will // never be used, but is added to satisfy the compiler complaining about // non-exhaustive patterns. fn match_insts(&self, mut arms: Vec<ast::Arm>) -> P<ast::Expr> { arms.push(self.wild_arm_expr(self.empty_block())); self.cx.expr_match(self.sp, quote_expr!(self.cx, pc), arms) } fn empty_block(&self) -> P<ast::Expr> { quote_expr!(self.cx, {}) } // Creates a match arm for the instruction at `pc` with the expression // `body`. fn arm_inst(&self, pc: uint, body: P<ast::Expr>) -> ast::Arm { let pc_pat = self.cx.pat_lit(self.sp, quote_expr!(self.cx, $pc)); self.cx.arm(self.sp, vec!(pc_pat), body) } // Creates a wild-card match arm with the expression `body`. fn wild_arm_expr(&self, body: P<ast::Expr>) -> ast::Arm { ast::Arm { attrs: vec!(), pats: vec!(P(ast::Pat{ id: ast::DUMMY_NODE_ID, span: self.sp, node: ast::PatWild(ast::PatWildSingle), })), guard: None, body: body, } } // Converts `xs` to a `[x1, x2, .., xN]` expression by calling `to_expr` // on each element in `xs`. fn vec_expr<T, It: Iterator<T>>(&self, xs: It, to_expr: |&ExtCtxt, T| -> P<ast::Expr>) -> P<ast::Expr> { let exprs = xs.map(|x| to_expr(self.cx, x)).collect(); self.cx.expr_vec(self.sp, exprs) } } /// Looks for a single string literal and returns it. /// Otherwise, logs an error with cx.span_err and returns None. fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<String> { let mut parser = cx.new_parser_from_tts(tts); let entry = cx.expander().fold_expr(parser.parse_expr()); let regex = match entry.node { ast::ExprLit(ref lit) => { match lit.node { ast::LitStr(ref s, _) => s.to_string(), _ => { cx.span_err(entry.span, format!( "expected string literal but got `{}`", pprust::lit_to_string(&**lit)).as_slice()); return None } } } _ => { cx.span_err(entry.span, format!( "expected string literal but got `{}`", pprust::expr_to_string(&*entry)).as_slice()); return None } }; if !parser.eat(&token::Eof) { cx.span_err(parser.span, "only one string literal allowed"); return None; } Some(regex) }<|fim▁end|>
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from django.template.defaultfilters import slugify as django_slugify from django.utils.importlib import import_module from unidecode import unidecode # Timezone support with fallback. try: from django.utils.timezone import now except ImportError: from datetime import datetime now = datetime.now<|fim▁hole|> """ Translates unicode into closest possible ascii chars before slugifying. """ from future.builtins import str return django_slugify(unidecode(str(s))) def unique_slug(manager, slug_field, slug): """ Ensure slug is unique for the given manager, appending a digit if it isn't. """ i = 0 while True: if i > 0: if i > 1: slug = slug.rsplit("-", 1)[0] slug = "%s-%s" % (slug, i) if not manager.filter(**{slug_field: slug}): break i += 1 return slug def split_choices(choices_string): """ Convert a comma separated choices string to a list. """ return [x.strip() for x in choices_string.split(",") if x.strip()] def html5_field(name, base): """ Takes a Django form field class and returns a subclass of it with the given name as its input type. """ return type(str(""), (base,), {"input_type": name}) def import_attr(path): """ Given a a Python dotted path to a variable in a module, imports the module and returns the variable in it. """ module_path, attr_name = path.rsplit(".", 1) return getattr(import_module(module_path), attr_name)<|fim▁end|>
def slugify(s):
<|file_name|>ExplicitKualiDecimalFormatter.java<|end_file_name|><|fim▁begin|>/* * Copyright 2012 The Kuali Foundation. * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kfs.sys.businessobject.format; import java.math.BigDecimal; import org.apache.log4j.Logger; import org.kuali.rice.core.api.util.type.KualiDecimal; import org.kuali.rice.core.web.format.BigDecimalFormatter; /** * This class is used to format explicit decimal value to BigDecimal objects. */ public class ExplicitKualiDecimalFormatter extends BigDecimalFormatter { private static Logger LOG = Logger.getLogger(ExplicitKualiDecimalFormatter.class); /** * Converts the given String into a KualiDecimal with the final two characters being behind the decimal place */ @Override protected Object convertToObject(String target) { BigDecimal value = (BigDecimal)super.convertToObject(addDecimalPoint(target)); return new KualiDecimal(value); } /** * Adds the decimal point to the String<|fim▁hole|> if (!amount.contains(".")) { //have to add decimal point if it's missing int length = amount.length(); amount = amount.substring(0, length - 2) + "." + amount.substring(length - 2, length); } return amount; } }<|fim▁end|>
* @param amount the String representing the amount * @return a new String, with a decimal inserted in the third to last place */ private String addDecimalPoint (String amount) {
<|file_name|>fn-item-type-zero-sized.rs<|end_file_name|><|fim▁begin|>// run-pass // Test that fn item types are zero-sized. use std::mem::{size_of, size_of_val}; <|fim▁hole|> assert_eq!(size_of_val(&a), 0); assert_eq!(size_of_val(&b), 0); assert_eq!((a(), b()), (1, 2)); }<|fim▁end|>
fn main() { assert_eq!(size_of_val(&main), 0); let (a, b) = (size_of::<u8>, size_of::<u16>);
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! # Rocket - Code Generation //! //! This crate implements the code generation portions of Rocket. This includes //! custom derives, custom attributes, procedural macros, and lints. The //! documentation here is purely technical. The code generation facilities are //! documented thoroughly in the [Rocket programming //! guide](https://rocket.rs/guide). //! //! ## Custom Attributes //! //! This crate implements the following custom attributes: //! //! * **route** //! * **get** //! * **put** //! * **post** //! * **delete** //! * **head** //! * **patch** //! * **options** //! * **error** //! //! The grammar for all _route_ attributes, including **route**, **get**, //! **put**, **post**, **delete**, **head**, **patch**, and **options** is //! defined as: //! //! <pre> //! route := METHOD? '(' ('path' '=')? path (',' kv_param)* ')' //! //! path := URI_SEG //! | DYNAMIC_PARAM //! | '?' DYNAMIC_PARAM //! | path '/' path //! (string literal) //! //! kv_param := 'rank' '=' INTEGER //! | 'format' '=' STRING //! | 'data' '=' DYNAMIC_PARAM //! //! INTEGER := isize, as defined by Rust //! STRING := UTF-8 string literal, as defined by Rust //! IDENT := Valid identifier, as defined by Rust //! //! URI_SEG := Valid HTTP URI Segment //! DYNAMIC_PARAM := '<' IDENT '..'? '>' (string literal) //! </pre> //! //! Note that the **route** attribute takes a method as its first argument, //! while the remaining do not. That is, **route** looks like: //! //! #[route(GET, path = "/hello")] //! //! while the equivalent using **get** looks like: //! //! #[get("/hello")] //! //! The syntax for the **error** attribute is: //! //! <pre> //! error := INTEGER //! </pre> //! //! A use of the `error` attribute looks like: //! //! #[error(404)] //! //! ## Custom Derives //! //! This crate implements the following custom derives: //! //! * **FromForm** //! //! ## Procedural Macros //! //! This crate implements the following procedural macros: //! //! * **routes** //! * **errors** //! //! The syntax for both of these is defined as: //! //! <pre> //! macro := PATH (',' macro)* //! //! PATH := a path, as defined by Rust //! </pre> //! //! ## Lints //! //! This crate implements the following lints: //! //! * **unmounted_route**: defaults to _warn_ //! //! emits a warning when a declared route is not mounted //! //! * **unmanaged_state**: defaults to _warn_ //! //! emits a warning when a `State<T>` request guest is used in a mounted //! route without managing a value for `T` //! //! # Debugging Codegen //! //! When the `ROCKET_CODEGEN_DEBUG` environment variable is set, this crate logs //! the items it has generated to the console at compile-time. For example, you //! might run the following to build a Rocket application with codegen logging //! enabled: //! //! ``` //! ROCKET_CODEGEN_DEBUG=1 cargo build //! ``` #![crate_type = "dylib"] #![feature(quote, concat_idents, plugin_registrar, rustc_private, unicode)] #![feature(custom_attribute)] #![feature(i128_type)] #![allow(unused_attributes)] #![allow(deprecated)] #[macro_use] extern crate log; #[macro_use] extern crate rustc; extern crate syntax; extern crate syntax_ext; extern crate syntax_pos; extern crate rustc_plugin; extern crate rocket; #[macro_use] mod utils; mod parser; mod macros;<|fim▁hole|> use std::env; use rustc_plugin::Registry; use syntax::ext::base::SyntaxExtension; use syntax::symbol::Symbol; const DEBUG_ENV_VAR: &'static str = "ROCKET_CODEGEN_DEBUG"; const PARAM_PREFIX: &'static str = "rocket_param_"; const ROUTE_STRUCT_PREFIX: &'static str = "static_rocket_route_info_for_"; const CATCH_STRUCT_PREFIX: &'static str = "static_rocket_catch_info_for_"; const ROUTE_FN_PREFIX: &'static str = "rocket_route_fn_"; const CATCH_FN_PREFIX: &'static str = "rocket_catch_fn_"; const ROUTE_ATTR: &'static str = "rocket_route"; const ROUTE_INFO_ATTR: &'static str = "rocket_route_info"; const CATCHER_ATTR: &'static str = "rocket_catcher"; macro_rules! register_decorators { ($registry:expr, $($name:expr => $func:ident),+) => ( $($registry.register_syntax_extension(Symbol::intern($name), SyntaxExtension::MultiModifier(Box::new(decorators::$func))); )+ ) } macro_rules! register_derives { ($registry:expr, $($name:expr => $func:ident),+) => ( $($registry.register_custom_derive(Symbol::intern($name), SyntaxExtension::MultiDecorator(Box::new(decorators::$func))); )+ ) } macro_rules! register_lints { ($registry:expr, $($item:ident),+) => ($( $registry.register_late_lint_pass(Box::new(lints::$item::default())); )+) } /// Compiler hook for Rust to register plugins. #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { // Enable logging early if the DEBUG_ENV_VAR is set. if env::var(DEBUG_ENV_VAR).is_ok() { ::rocket::logger::init(::rocket::LoggingLevel::Debug); } reg.register_macro("routes", macros::routes); reg.register_macro("errors", macros::errors); register_derives!(reg, "derive_FromForm" => from_form_derive ); register_decorators!(reg, "error" => error_decorator, "route" => route_decorator, "get" => get_decorator, "put" => put_decorator, "post" => post_decorator, "delete" => delete_decorator, "head" => head_decorator, "patch" => patch_decorator, "options" => options_decorator ); register_lints!(reg, RocketLint); }<|fim▁end|>
mod decorators; mod lints;
<|file_name|>app.module.js<|end_file_name|><|fim▁begin|>"use strict"; var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; var __metadata = (this && this.__metadata) || function (k, v) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v); }; var core_1 = require('@angular/core');<|fim▁hole|>var forms_1 = require('@angular/forms'); var app_component_1 = require('./app.component'); var person_detail_component_1 = require('./person-detail.component'); var AppModule = (function () { function AppModule() { } AppModule = __decorate([ core_1.NgModule({ imports: [platform_browser_1.BrowserModule, forms_1.FormsModule], declarations: [app_component_1.AppComponent, person_detail_component_1.PersonDetailComponent], bootstrap: [app_component_1.AppComponent] }), __metadata('design:paramtypes', []) ], AppModule); return AppModule; }()); exports.AppModule = AppModule; //# sourceMappingURL=app.module.js.map<|fim▁end|>
var platform_browser_1 = require('@angular/platform-browser');
<|file_name|>GroupDetails.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react'; import { StyleSheet, Text, View, Navigator, ScrollView, ListView, } from 'react-native' import NavigationBar from 'react-native-navbar'; var REQUEST_URL = 'https://calm-garden-29993.herokuapp.com/index/groupsinfo/?'; class GroupDetails extends Component { constructor(props, context) { super(props, context); this.state = { loggedIn: true, loaded: false, rando: "a", }; this.fetchData(); } backOnePage () { this.props.navigator.pop(); } renderRide (ride) { return ( <View> <Text style={styles.title}>{ride.title}</Text> </View> ); } componentDidMount () { this.fetchData(); } toQueryString(obj) { return obj ? Object.keys(obj).sort().map(function (key) { var val = obj[key]; if (Array.isArray(val)) { return val.sort().map(function (val2) { return encodeURIComponent(key) + '=' + encodeURIComponent(val2); }).join('&'); } return encodeURIComponent(key) + '=' + encodeURIComponent(val); }).join('&') : ''; } fetchData() { console.log(this.props.group_info.pk); fetch(REQUEST_URL + this.toQueryString({"group": this.props.group_info.pk})) .then((response) => response.json()) .then((responseData) => { console.log(responseData); this.setState({ group_info: responseData, loaded: true, }); }) .done(); } render () { if (!this.state.loaded) { return (<View> <Text>Loading!</Text> </View>); } else if (this.state.loggedIn) { console.log(this.props.group_info.fields); console.log(this.state); console.log(this.state.group_info[0]); const backButton = { title: "Back", handler: () => this.backOnePage(), }; return ( <ScrollView> <NavigationBar style={{ backgroundColor: "white", }} leftButton={backButton} statusBar={{ tintColor: "white", }} /> <Text style={styles.headTitle}> Group Name: {this.state.group_info.name} </Text> <Text style={styles.headerOtherText}>Group Leader: {this.state.group_info.admin}</Text> <Text style={styles.headerOtherText}>{this.state.group_info.users} people in this group.</Text> </ScrollView> ); } else { this.props.navigator.push({id: "LoginPage", name:"Index"}) } } } var styles = StyleSheet.create({ headerOtherText : { color: 'black', fontSize: 15 , fontWeight: 'normal', fontFamily: 'Helvetica Neue', alignSelf: "center", }, headTitle: { color: 'black', fontSize: 30 , fontWeight: 'normal', fontFamily: 'Helvetica Neue', alignSelf: "center", }, header: { marginTop: 20, flex: 1, flexDirection: "column", justifyContent: "center", alignItems: "center", }, container: { flex: 1, flexDirection: 'row', justifyContent: 'center',<|fim▁hole|> alignItems: 'center', backgroundColor: '#ff7f50', }, rightContainer: { flex: 1, }, title: { fontSize: 20, marginBottom: 8, textAlign: 'center', }, year: { textAlign: 'center', }, thumbnail: { width: 53, height: 81, }, listView: { backgroundColor: '#0000ff', paddingBottom: 200, }, }); module.exports = GroupDetails;<|fim▁end|>
<|file_name|>PBObjcWrapper.hh<|end_file_name|><|fim▁begin|>// // PBObjcWrapper.h // AppBootstrap // // Created by Yaming on 10/31/14. // Copyright (c) 2014 whosbean.com. All rights reserved. // #import <Foundation/Foundation.h> #ifdef __cplusplus #import <google/protobuf/message.h> #endif @protocol PBObjcWrapper <NSObject> -(instancetype) initWithProtocolData:(NSData*) data;<|fim▁hole|> -(NSData*) getProtocolData; -(NSMutableDictionary*) asDict; @end<|fim▁end|>
#ifdef __cplusplus -(instancetype) initWithProtocolObj:(google::protobuf::Message *)pbobj; #endif
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 Phillip Oppermann, Calvin Lee and JJ Garzella. // See the README.md file at the top-level directory of this // distribution. // // Licensed under the MIT license <LICENSE or // http://opensource.org/licenses/MIT>, at your option. // This file may not be copied, modified, or distributed // except according to those terms. #![allow(non_snake_case)] #![feature(lang_items)] #![feature(alloc)] #![feature(const_fn)] #![feature(associated_type_defaults)] #![feature(asm, naked_functions, core_intrinsics)] #![feature(abi_x86_interrupt)] #![feature(ptr_internals)] #![no_std] // crates.io crates extern crate rlibc; /// Bare metal Mutex extern crate spin; /// Abstraction of the multiboot2 info structure extern crate multiboot2; extern crate x86_64; extern crate bit_field; #[macro_use] extern crate bitflags; /// A macro for running a function only once #[macro_use] extern crate once; // Features involving allocation /// Heap allocator for rust code extern crate hole_list_allocator; /// Higher-level data structures that use the heap extern crate alloc; #[macro_use] /// Abstraction of the VGA text buffer mod vga_buffer; /// Memory management mod memory; /// Interrupts code mod interrupts;<|fim▁hole|>mod scheduler; /// Utilities for multi-CPU processing mod smp; /// Testing #[cfg(feature = "test")] mod tap; extern "C" { /// The kernel exit point. It disables interrupts, enters an infinite loop, /// and halts the processor fn KEXIT() -> !; } /// The Rust entry point /// /// This clears the screen, initializes each module and enters an infinite /// loop. #[no_mangle] pub extern "C" fn rust_main(multiboot_info_address: usize) -> ! { vga_buffer::clear_screen(); println!("Hello Rust log \x01"); let boot_info = unsafe { multiboot2::load(multiboot_info_address) }; for module in boot_info.module_tags() { if module.name() == "keyboard" { let addr = module.start_address() as usize + memory::KERNEL_BASE; unsafe { interrupts::KEYBOARD.lock() .change_kbmap(&*(addr as *const [u8; 128])); } } } // Initialize memory memory::init(&boot_info); // Initialize CPU local variables and the scheduler unsafe { smp::CpuLocal::init() }; // Initialize the IDT interrupts::init(); // Initialize the serial port cpuio::init(); println!("Try to write some things!"); vga_buffer::change_color(vga_buffer::Color::White, vga_buffer::Color::Black); #[cfg(feature = "test")] { run_tests(); shutdown(); } loop { // We are waiting for interrupts here, so don't bother doing anything unsafe { asm!("hlt" :::: "volatile") } } } #[cfg(feature = "test")] fn shutdown() -> ! { use cpuio::port::Port; let mut p: Port<u8> = unsafe { Port::new(0xf4) }; p.write(0x00); unreachable!(); } #[cfg(feature = "test")] fn run_tests() { memory::tests::run(); scheduler::tests::run(); smp::tests::run(); interrupts::tests::run(); cpuio::tests::run(); } #[allow(non_snake_case)] #[no_mangle] pub extern "C" fn _Unwind_Resume() -> ! { unsafe { KEXIT() } } /// Used for unwinding, unsupported #[lang = "eh_personality"] fn eh_personality() {} use core::alloc::Layout; /// Runs when the allocator is out of memory #[lang = "oom"] fn oom(_: Layout) -> ! { panic!("Error, out of memory"); } /// Runs during a `panic!()` #[panic_handler] extern "C" fn panic_fmt(pi: &core::panic::PanicInfo) -> ! { vga_buffer::change_color(vga_buffer::Color::Red, vga_buffer::Color::Black); println!("\n\nESALP {}", pi); #[cfg(feature = "test")] { serial_println!("Bail out! - {}", pi); shutdown(); } unsafe { KEXIT() } }<|fim▁end|>
/// IO abstractions in Rust #[macro_use] mod cpuio; mod sync;
<|file_name|>http.py<|end_file_name|><|fim▁begin|># pygopherd -- Gopher-based protocol server in Python # module: serve up gopherspace via http # $Id: http.py,v 1.21 2002/04/26 15:18:10 jgoerzen Exp $ # Copyright (C) 2002 John Goerzen # <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import SocketServer import re, binascii import os, stat, os.path, mimetypes, urllib, time from pygopherd import handlers, protocols, GopherExceptions from pygopherd.protocols.base import BaseGopherProtocol import pygopherd.version import cgi class HTTPProtocol(BaseGopherProtocol): def canhandlerequest(self): self.requestparts = map(lambda arg: arg.strip(), self.request.split(" ")) return len(self.requestparts) == 3 and \ (self.requestparts[0] == 'GET' or self.requestparts[0] == 'HEAD') and \ self.requestparts[2][0:5] == 'HTTP/' def headerslurp(self): if hasattr(self.requesthandler, 'pygopherd_http_slurped'): # Already slurped.<|fim▁hole|> self.httpheaders = self.requesthandler.pygopherd_http_slurped return # Slurp up remaining lines. self.httpheaders = {} while 1: line = self.rfile.readline() if not len(line): break line = line.strip() if not len(line): break splitline = line.split(':', 1) if len(splitline) == 2: self.httpheaders[splitline[0].lower()] = splitline[1] self.requesthandler.pygopherd_http_slurped = self.httpheaders def handle(self): self.canhandlerequest() # To get self.requestparts self.iconmapping = eval(self.config.get("protocols.http.HTTPProtocol", "iconmapping")) self.headerslurp() splitted = self.requestparts[1].split('?') self.selector = splitted[0] self.selector = urllib.unquote(self.selector) self.selector = self.slashnormalize(self.selector) self.formvals = {} if len(splitted) >= 2: self.formvals = cgi.parse_qs(splitted[1]) if self.formvals.has_key('searchrequest'): self.searchrequest = self.formvals['searchrequest'][0] icon = re.match('/PYGOPHERD-HTTPPROTO-ICONS/(.+)$', self.selector) if icon: iconname = icon.group(1) if icons.has_key(iconname): self.wfile.write("HTTP/1.0 200 OK\r\n") self.wfile.write("Last-Modified: Fri, 14 Dec 2001 21:19:47 GMT\r\n") self.wfile.write("Content-Type: image/gif\r\n\r\n") if self.requestparts[0] == 'HEAD': return self.wfile.write(binascii.unhexlify(icons[iconname])) return try: handler = self.gethandler() self.log(handler) self.entry = handler.getentry() handler.prepare() self.wfile.write("HTTP/1.0 200 OK\r\n") if self.entry.getmtime() != None: gmtime = time.gmtime(self.entry.getmtime()) mtime = time.strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime) self.wfile.write("Last-Modified: " + mtime + "\r\n") mimetype = self.entry.getmimetype() mimetype = self.adjustmimetype(mimetype) self.wfile.write("Content-Type: " + mimetype + "\r\n\r\n") if self.requestparts[0] == 'GET': if handler.isdir(): self.writedir(self.entry, handler.getdirlist()) else: self.handlerwrite(self.wfile) except GopherExceptions.FileNotFound, e: self.filenotfound(str(e)) except IOError, e: GopherExceptions.log(e, self, None) self.filenotfound(e[1]) def handlerwrite(self, wfile): self.handler.write(wfile) def adjustmimetype(self, mimetype): if mimetype == None: return 'text/plain' if mimetype == 'application/gopher-menu': return 'text/html' return mimetype def renderobjinfo(self, entry): url = None # Decision time.... if re.match('(/|)URL:', entry.getselector()): # It's a plain URL. Make it that. url = re.match('(/|)URL:(.+)$', entry.getselector()).group(2) elif (not entry.gethost()) and (not entry.getport()): # It's a link to our own server. Make it as such. (relative) url = urllib.quote(entry.getselector()) else: # Link to a different server. Make it a gopher URL. url = entry.geturl(self.server.server_name, 70) # OK. Render. return self.getrenderstr(entry, url) def getrenderstr(self, entry, url): retstr = '<TR><TD>' retstr += self.getimgtag(entry) retstr += "</TD>\n<TD>&nbsp;" if entry.gettype() != 'i' and entry.gettype() != '7': retstr += '<A HREF="%s">' % url retstr += "<TT>" if entry.getname() != None: retstr += cgi.escape(entry.getname()).replace(" ", " &nbsp;") else: retstr += cgi.escape(entry.getselector()).replace(" ", " &nbsp;") retstr += "</TT>" if entry.gettype() != 'i' and entry.gettype() != '7': retstr += '</A>' if (entry.gettype() == '7'): retstr += '<BR><FORM METHOD="GET" ACTION="%s">' % url retstr += '<INPUT TYPE="text" NAME="searchrequest" SIZE="30">' retstr += '<INPUT TYPE="submit" NAME="Submit" VALUE="Submit">' retstr += '</FORM>' retstr += '</TD><TD><FONT SIZE="-2">' if entry.getmimetype(): subtype = re.search('/.+$', entry.getmimetype()) if subtype: retstr += cgi.escape(subtype.group()[1:]) retstr += '</FONT></TD></TR>\n' return retstr def renderdirstart(self, entry): retstr ='<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd">' retstr += "\n<HTML><HEAD><TITLE>Gopher" if self.entry.getname(): retstr += ": " + cgi.escape(self.entry.getname()) retstr += "</TITLE></HEAD><BODY>" if self.config.has_option("protocols.http.HTTPProtocol", "pagetopper"): retstr += re.sub('GOPHERURL', self.entry.geturl(self.server.server_name, self.server.server_port), self.config.get("protocols.http.HTTPProtocol", "pagetopper")) retstr += "<H1>Gopher" if self.entry.getname(): retstr += ": " + cgi.escape(self.entry.getname()) retstr += '</H1><TABLE WIDTH="100%" CELLSPACING="1" CELLPADDING="0">' return retstr def renderdirend(self, entry): retstr = "</TABLE><HR>\n[<A HREF=\"/\">server top</A>]" retstr += " [<A HREF=\"%s\">view with gopher</A>]" % \ entry.geturl(self.server.server_name, self.server.server_port) retstr += '<BR>Generated by <A HREF="%s">%s</A>' % ( pygopherd.version.homepage, pygopherd.version.productname) return retstr + "\n</BODY></HTML>\n" def filenotfound(self, msg): self.wfile.write("HTTP/1.0 404 Not Found\r\n") self.wfile.write("Content-Type: text/html\r\n\r\n") self.wfile.write('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd">') self.wfile.write("""\n<HTML><HEAD><TITLE>Selector Not Found</TITLE> <H1>Selector Not Found</H1> <TT>""") self.wfile.write(cgi.escape(msg)) self.wfile.write("</TT><HR>Pygopherd</BODY></HTML>\n") def getimgtag(self, entry): name = 'generic.gif' if self.iconmapping.has_key(entry.gettype()): name = self.iconmapping[entry.gettype()] return '<IMG ALT=" * " SRC="%s" WIDTH="20" HEIGHT="22" BORDER="0">' % \ ('/PYGOPHERD-HTTPPROTO-ICONS/' + name) icons = { 'binary.gif': '47494638396114001600c20000ffffffccffffcccccc99999933333300000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c000000001400160000036948babcf1301040ab9d24be590a105d210013a9715e07a8a509a16beab5ae14df6a41e8fc76839d5168e8b3182983e4a0e0038a6e1525d396931d97be2ad482a55a55c6eec429f484a7b4e339eb215fd138ebda1b7fb3eb73983bafee8b094a8182493b114387885309003b', 'binhex.gif': '47494638396114001600c20000ffffffccffff99999966666633333300000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c000000001400160000036948babcf1301040ab9d24be59baefc0146adce78555068914985e2b609e0551df9b3c17ba995b408a602828e48a2681856894f44cc1628e07a42e9b985d14ab1b7c9440a9131c0c733b229bb5222ecdb6bfd6da3cd5d29d688a1aee2c97db044482834336113b884d09003b', 'folder.gif': '47494638396114001600c20000ffffffffcc99ccffff99663333333300000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000002002c000000001400160000035428badcfe30ca4959b9f8ce12baef45c47d64a629c5407a6a8906432cc72b1c8ef51a13579e0f3c9c8f05ec0d4945e171673cb2824e2234da495261569856c5ddc27882d46c3c2680c3e6b47acd232c4cf08c3b01003b', 'image3.gif': '47494638396114001600e30000ffffffff3333ccffff9999996600003333330099cc00993300336600000000000000000000000000000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000002002c0000000014001600000479b0c849a7b85814c0bbdf45766d5e49861959762a3a76442c132ae0aa44a0ef49d1ff2f4e6ea74b188f892020c70c3007d04152b3aa46a7adcaa42355160ee0f041d5a572bee23017cb1abbbf6476d52a0720ee78fc5a8930f8ff06087b66768080832a7d8a81818873744a8f8805519596503e19489b9c5311003b', 'sound1.gif': '47494638396114001600c20000ffffffff3333ccffffcccccc99999966000033333300000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000002002c000000001400160000036b28badcfe3036c34290ea1c61558f07b171170985c0687e0d9a729e77693401dc5bd7154148fcb6db6b77e1b984c20d4fb03406913866717a842aa7d22af22acd120cdf6fd2d49cd10e034354871518de06b43a17334de42a36243e187d4a7b1a762c7b140b8418898a0b09003b', 'text.gif': '47494638396114001600c20000ffffffccffff99999933333300000000000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c000000001400160000035838babcf1300c40ab9d23be693bcf11d75522b88dd7057144eb52c410cf270abb6e8db796e00b849aadf20b4a6ebb1705281c128daca412c03c3a7b50a4f4d9bc5645dae9f78aed6e975932baebfc0e7ef0b84f1691da8d09003b', 'generic.gif': '47494638396114001600c20000ffffffccffff99999933333300000000000000000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c000000001400160000035038babcf1300c40ab9d23be693bcf11d75522b88dd705892831b8f08952446d13f24c09bc804b3a4befc70a027c39e391a8ac2081cd65d2f82c06ab5129b4898d76b94c2f71d02b9b79afc86dcdfe2500003b', 'blank.gif': '47494638396114001600a10000ffffffccffff00000000000021fe4e546869732061727420697320696e20746865207075626c696320646f6d61696e2e204b6576696e204875676865732c206b6576696e68406569742e636f6d2c2053657074656d62657220313939350021f90401000001002c00000000140016000002138c8fa9cbed0fa39cb4da8bb3debcfb0f864901003b'}<|fim▁end|>
<|file_name|>test_jvm_dependency_usage.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os from collections import defaultdict from pants.backend.jvm.targets.java_library import JavaLibrary from pants.backend.jvm.tasks.classpath_products import ClasspathProducts from pants.backend.jvm.tasks.jvm_dependency_usage import JvmDependencyUsage from pants.goal.products import MultipleRootedProducts from pants.util.dirutil import safe_mkdir, touch from pants_test.tasks.task_test_base import TaskTestBase class TestJvmDependencyUsage(TaskTestBase): @classmethod def task_type(cls): return JvmDependencyUsage def _setup(self, target_classfiles): """Takes a dict mapping targets to lists of classfiles.""" context = self.context(target_roots=target_classfiles.keys()) # Create classfiles in a target-specific directory, and add it to the classpath for the target. classpath_products = context.products.get_data('runtime_classpath', ClasspathProducts) for target, classfiles in target_classfiles.items(): target_dir = os.path.join(self.test_workdir, target.id) safe_mkdir(target_dir) for classfile in classfiles: touch(os.path.join(target_dir, classfile)) classpath_products.add_for_target(target, [('default', target_dir)]) product_deps_by_src = context.products.get_data('product_deps_by_src', dict) return self.create_task(context), product_deps_by_src def make_java_target(self, *args, **kwargs): assert 'target_type' not in kwargs return self.make_target(target_type=JavaLibrary, *args, **kwargs) def _cover_output(self, graph): # coverage of the output code self.assertNotEqual(graph.to_json(), "") self.assertNotEqual(graph.to_summary(), "") def test_simple_dep_usage_graph(self): t1 = self.make_java_target(spec=':t1', sources=['a.java', 'b.java']) t2 = self.make_java_target(spec=':t2', sources=['c.java'], dependencies=[t1]) t3 = self.make_java_target(spec=':t3', sources=['d.java', 'e.java'], dependencies=[t1]) self.set_options(size_estimator='filecount') dep_usage, product_deps_by_src = self._setup({ t1: ['a.class', 'b.class'], t2: ['c.class'], t3: ['d.class', 'e.class'], }) product_deps_by_src[t1] = {} product_deps_by_src[t2] = {'c.java': ['a.class']} product_deps_by_src[t3] = {'d.java': ['a.class', 'b.class'], 'e.java': ['a.class', 'b.class']} graph = dep_usage.create_dep_usage_graph([t1, t2, t3], '') self.assertEqual(graph._nodes[t1].products_total, 2) self.assertEqual(graph._nodes[t2].products_total, 1) self.assertEqual(graph._nodes[t3].products_total, 2) <|fim▁hole|> self.assertEqual(graph._trans_cost(t1), 2) self.assertEqual(graph._trans_cost(t2), 3) self.assertEqual(graph._trans_cost(t3), 4) self._cover_output(graph) def test_dep_usage_graph_with_synthetic_targets(self): t1 = self.make_java_target(spec=':t1', sources=['t1.thrift']) t1_x = self.make_java_target(spec=':t1.x', derived_from=t1) t1_y = self.make_java_target(spec=':t1.y', derived_from=t1) t1_z = self.make_java_target(spec=':t1.z', derived_from=t1) t2 = self.make_java_target(spec=':t2', sources=['a.java', 'b.java'], dependencies=[t1, t1_x, t1_y, t1_z]) self.set_options(size_estimator='nosize') dep_usage, product_deps_by_src = self._setup({ t1_x: ['x1.class'], t1_y: ['y1.class'], t1_z: ['z1.class', 'z2.class', 'z3.class'], t2: ['a.class', 'b.class'], }) product_deps_by_src[t1] = {} product_deps_by_src[t1_x] = {} product_deps_by_src[t1_y] = {} product_deps_by_src[t1_z] = {} product_deps_by_src[t2] = {'a.java': ['x1.class'], 'b.java': ['z1.class', 'z2.class']} graph = dep_usage.create_dep_usage_graph([t1, t1_x, t1_y, t1_z, t2], '') self.assertEqual(graph._nodes[t1].products_total, 5) self.assertEqual(len(graph._nodes[t2].dep_edges[t1].products_used), 3) self._cover_output(graph)<|fim▁end|>
self.assertEqual(graph._nodes[t1].dep_edges, {}) self.assertEqual(len(graph._nodes[t2].dep_edges[t1].products_used), 1) self.assertEqual(len(graph._nodes[t3].dep_edges[t1].products_used), 2)
<|file_name|>0006_auto_20170701_2013.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-07-01 20:13 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('flocks', '0015_auto_20170624_1312'), ('feeding', '0005_auto_20170625_1129'), ] operations = [ migrations.CreateModel( name='FeedingPeriodForFlock', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('start_date', models.DateField()), ('end_date', models.DateField(null=True)), ('feed_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='feeding.FeedType')), ('flock', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='flocks.Flock')), ], ), migrations.RemoveField( model_name='feedingperiodforroom', name='feed_type', ), migrations.RemoveField( model_name='feedingperiodforroom', name='room', ),<|fim▁hole|> name='FeedingPeriodForRoom', ), ]<|fim▁end|>
migrations.DeleteModel(
<|file_name|>002.py<|end_file_name|><|fim▁begin|>blocklevel = ["blockquote", "div", "form", "p", "table", "video", "h1", "h2", "h3", "h4", "h5", "h6", "hr", "details", "article", "header", "main"] def normalizeEnter(src): #Deletes all user defined for readability reason existing line breaks that are issues for the HTML output for elem in blocklevel: while src.find("\r<" + elem) > -1: src = src.replace("\r<" + elem, "<" + elem) while src.find("</" + elem + ">\r") > -1: src = src.replace("</" + elem + ">\r", "</" + elem + ">") while src.find(">\r") > -1: src = src.replace(">\r", ">") #It is really needed, it created some other bugs?! while src.find("\r</") > -1: src = src.replace("\r</", "</") ##It is really needed, it created some other bugs?! return src def main(islinput, inputfile, pluginData, globalData): currentIndex = 0<|fim▁hole|> currentIndex += 1 return islinput, pluginData, globalData<|fim▁end|>
for item in islinput: item = normalizeEnter(item) #Deletes not wanted line breaks in order to prevent the problem we have with Markdown. islinput[currentIndex] = item
<|file_name|>text.py<|end_file_name|><|fim▁begin|># Authors: # Jason Gerard DeRose <[email protected]> # # Copyright (C) 2009 Red Hat # see file 'COPYING' for use and warranty contextrmation # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Defers gettext translation till request time. IPA presents some tricky gettext challenges. On the one hand, most translatable message are defined as class attributes on the plugins, which means these get evaluated at module-load time. But on the other hand, each request to the server can be in a different locale, so the actual translation must not occur till request time. The `text` module provides a mechanism for for deferred gettext translation. It was designed to: 1. Allow translatable strings to be marked with the usual ``_()`` and ``ngettext()`` functions so that standard tools like xgettext can still be used 2. Allow programmers to mark strings in a natural way without burdening them with details of the deferred translation mechanism A typical plugin will use the deferred translation like this: <|fim▁hole|>... my_plural = ngettext('%(count)d goose', '%(count)d geese', 0) ... With normal gettext usage, the *my_string* and *my_plural* message would be translated at module-load-time when your ``my_plugin`` class is defined. This would mean that all message are translated in the locale of the server rather than the locale of the request. However, the ``_()`` function above is actually a `GettextFactory` instance, which when called returns a `Gettext` instance. A `Gettext` instance stores the message to be translated, and the gettext domain and localedir, but it doesn't perform the translation till `Gettext.__unicode__()` is called. For example: >>> my_plugin.my_string Gettext('Hello, %(name)s.', domain='ipa', localedir=None) >>> unicode(my_plugin.my_string) u'Hello, %(name)s.' Translation can also be performed via the `Gettext.__mod__()` convenience method. For example, these two are equivalent: >>> my_plugin.my_string % dict(name='Joe') u'Hello, Joe.' >>> unicode(my_plugin.my_string) % dict(name='Joe') # Long form u'Hello, Joe.' Similar to ``_()``, the ``ngettext()`` function above is actually an `NGettextFactory` instance, which when called returns an `NGettext` instance. An `NGettext` instance stores the singular and plural messages, and the gettext domain and localedir, but it doesn't perform the translation till `NGettext.__call__()` is called. For example: >>> my_plugin.my_plural NGettext('%(count)d goose', '%(count)d geese', domain='ipa', localedir=None) >>> my_plugin.my_plural(1) u'%(count)d goose' >>> my_plugin.my_plural(2) u'%(count)d geese' Translation can also be performed via the `NGettext.__mod__()` convenience method. For example, these two are equivalent: >>> my_plugin.my_plural % dict(count=1) u'1 goose' >>> my_plugin.my_plural(1) % dict(count=1) # Long form u'1 goose' Lastly, 3rd-party plugins can create factories bound to a different gettext domain. The default domain is ``'ipa'``, which is also the domain of the standard ``ipalib._()`` and ``ipalib.ngettext()`` factories. But 3rd-party plugins can create their own factories like this: >>> from ipalib import GettextFactory, NGettextFactory >>> _ = GettextFactory(domain='ipa_foo') >>> ngettext = NGettextFactory(domain='ipa_foo') >>> class foo(Command): ... msg1 = _('Foo!') ... msg2 = ngettext('%(count)d bar', '%(count)d bars', 0) ... Notice that these messages are bound to the ``'ipa_foo'`` domain: >>> foo.msg1 Gettext('Foo!', domain='ipa_foo', localedir=None) >>> foo.msg2 NGettext('%(count)d bar', '%(count)d bars', domain='ipa_foo', localedir=None) For additional details, see `GettextFactory` and `Gettext`, and for plural forms, see `NGettextFactory` and `NGettext`. """ import threading import locale import gettext import six from ipalib.request import context def create_translation(key): assert key not in context.__dict__ (domain, localedir) = key translation = gettext.translation(domain, localedir=localedir, languages=getattr(context, 'languages', None), fallback=True, ) context.__dict__[key] = translation return translation class LazyText(object): """ Base class for deferred translation. This class is not used directly. See the `Gettext` and `NGettext` subclasses. Concatenating LazyText objects with the + operator gives ConcatenatedLazyText objects. """ __slots__ = ('domain', 'localedir', 'key', 'args') def __init__(self, domain=None, localedir=None): """ Initialize. :param domain: The gettext domain in which this message will be translated, e.g. ``'ipa'`` or ``'ipa_3rd_party'``; default is ``None`` :param localedir: The directory containing the gettext translations, e.g. ``'/usr/share/locale/'``; default is ``None``, in which case gettext will use the default system locale directory. """ self.domain = domain self.localedir = localedir self.key = (domain, localedir) self.args = None def __eq__(self, other): """ Return ``True`` if this instances is equal to *other*. Note that this method cannot be used on the `LazyText` base class itself as subclasses must define an *args* instance attribute. """ if type(other) is not self.__class__: return False return self.args == other.args def __ne__(self, other): """ Return ``True`` if this instances is not equal to *other*. Note that this method cannot be used on the `LazyText` base class itself as subclasses must define an *args* instance attribute. """ return not self.__eq__(other) def __add__(self, other): return ConcatenatedLazyText(self) + other def __radd__(self, other): return other + ConcatenatedLazyText(self) @six.python_2_unicode_compatible class Gettext(LazyText): """ Deferred translation using ``gettext.ugettext()``. Normally the `Gettext` class isn't used directly and instead is created via a `GettextFactory` instance. However, for illustration, we can create one like this: >>> msg = Gettext('Hello, %(name)s.') When you create a `Gettext` instance, the message is stored on the *msg* attribute: >>> msg.msg 'Hello, %(name)s.' No translation is performed till `Gettext.__unicode__()` is called. This will translate *msg* using ``gettext.ugettext()``, which will return the translated string as a Python ``unicode`` instance. For example: >>> unicode(msg) u'Hello, %(name)s.' `Gettext.__unicode__()` should be called at request time, which in a nutshell means it should be called from within your plugin's ``Command.execute()`` method. `Gettext.__unicode__()` will perform the translation based on the locale of the current request. `Gettext.__mod__()` is a convenience method for Python "percent" string formatting. It will translate your message using `Gettext.__unicode__()` and then perform the string substitution on the translated message. For example, these two are equivalent: >>> msg % dict(name='Joe') u'Hello, Joe.' >>> unicode(msg) % dict(name='Joe') # Long form u'Hello, Joe.' See `GettextFactory` for additional details. If you need to pick between singular and plural form, use `NGettext` instances via the `NGettextFactory`. """ __slots__ = ('msg') def __init__(self, msg, domain=None, localedir=None): super(Gettext, self).__init__(domain, localedir) self.msg = msg self.args = (msg, domain, localedir) def __repr__(self): return '%s(%r, domain=%r, localedir=%r)' % (self.__class__.__name__, self.msg, self.domain, self.localedir) def __str__(self): """ Translate this message and return as a ``unicode`` instance. """ if self.key in context.__dict__: g = context.__dict__[self.key].ugettext else: g = create_translation(self.key).ugettext return g(self.msg) def __json__(self): return self.__unicode__() #pylint: disable=no-member def __mod__(self, kw): return self.__unicode__() % kw #pylint: disable=no-member @six.python_2_unicode_compatible class FixMe(Gettext): """ Non-translated place-holder for UI labels. `FixMe` is a subclass of `Gettext` and is used for automatically created place-holder labels. It generally behaves exactly like `Gettext` except no translation is ever performed. `FixMe` allows programmers to get plugins working without first filling in all the labels that will ultimately be required, while at the same time it creates conspicuous looking UI labels that remind the programmer to "fix-me!". For example, the typical usage would be something like this: >>> class Plugin(object): ... label = None ... def __init__(self): ... self.name = self.__class__.__name__ ... if self.label is None: ... self.label = FixMe(self.name + '.label') ... assert isinstance(self.label, Gettext) ... >>> class user(Plugin): ... pass # Oops, we didn't set user.label yet ... >>> u = user() >>> u.label FixMe('user.label') Note that as `FixMe` is a subclass of `Gettext`, is passes the above type check using ``isinstance()``. Calling `FixMe.__unicode__()` performs no translation, but instead returns said conspicuous looking label: >>> unicode(u.label) u'<user.label>' For more examples of how `FixMe` is used, see `ipalib.parameters`. """ __slots__ = tuple() def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self.msg) def __str__(self): return u'<%s>' % self.msg class NGettext(LazyText): """ Deferred translation for plural forms using ``gettext.ungettext()``. Normally the `NGettext` class isn't used directly and instead is created via a `NGettextFactory` instance. However, for illustration, we can create one like this: >>> msg = NGettext('%(count)d goose', '%(count)d geese') When you create an `NGettext` instance, the singular and plural forms of your message are stored on the *singular* and *plural* instance attributes: >>> msg.singular '%(count)d goose' >>> msg.plural '%(count)d geese' The translation and number selection isn't performed till `NGettext.__call__()` is called. This will translate and pick the correct number using ``gettext.ungettext()``. As a callable, an `NGettext` instance takes a single argument, an integer specifying the count. For example: >>> msg(0) u'%(count)d geese' >>> msg(1) u'%(count)d goose' >>> msg(2) u'%(count)d geese' `NGettext.__mod__()` is a convenience method for Python "percent" string formatting. It can only be used if your substitution ``dict`` contains the count in a ``'count'`` item. For example: >>> msg % dict(count=0) u'0 geese' >>> msg % dict(count=1) u'1 goose' >>> msg % dict(count=2) u'2 geese' Alternatively, these longer forms have the same effect as the three examples above: >>> msg(0) % dict(count=0) u'0 geese' >>> msg(1) % dict(count=1) u'1 goose' >>> msg(2) % dict(count=2) u'2 geese' A ``KeyError`` is raised if your substitution ``dict`` doesn't have a ``'count'`` item. For example: >>> msg2 = NGettext('%(num)d goose', '%(num)d geese') >>> msg2 % dict(num=0) Traceback (most recent call last): ... KeyError: 'count' However, in this case you can still use the longer, explicit form for string substitution: >>> msg2(0) % dict(num=0) u'0 geese' See `NGettextFactory` for additional details. """ __slots__ = ('singular', 'plural') def __init__(self, singular, plural, domain=None, localedir=None): super(NGettext, self).__init__(domain, localedir) self.singular = singular self.plural = plural self.args = (singular, plural, domain, localedir) def __repr__(self): return '%s(%r, %r, domain=%r, localedir=%r)' % (self.__class__.__name__, self.singular, self.plural, self.domain, self.localedir) def __mod__(self, kw): count = kw['count'] return self(count) % kw def __call__(self, count): if self.key in context.__dict__: ng = context.__dict__[self.key].ungettext else: ng = create_translation(self.key).ungettext return ng(self.singular, self.plural, count) @six.python_2_unicode_compatible class ConcatenatedLazyText(object): """Concatenation of multiple strings, or any objects convertible to unicode Used to concatenate several LazyTexts together. This allows large strings like help text to be split, so translators do not have to re-translate the whole text when only a small part changes. Additional strings may be added to the end with the + or += operators. """ def __init__(self, *components): self.components = list(components) def __repr__(self): return '%s(%r)' % (self.__class__.__name__, self.components) def __str__(self): return u''.join(unicode(c) for c in self.components) def __json__(self): return unicode(self) def __mod__(self, kw): return unicode(self) % kw def __add__(self, other): if isinstance(other, ConcatenatedLazyText): return ConcatenatedLazyText(*self.components + other.components) else: return ConcatenatedLazyText(*self.components + [other]) def __radd__(self, other): if isinstance(other, ConcatenatedLazyText): return ConcatenatedLazyText(*other.components + self.components) else: return ConcatenatedLazyText(*[other] + self.components) class GettextFactory(object): """ Factory for creating ``_()`` functions. A `GettextFactory` allows you to mark translatable messages that are evaluated at initialization time, but deferred their actual translation till request time. When you create a `GettextFactory` you can provide a specific gettext *domain* and *localedir*. By default the *domain* will be ``'ipa'`` and the *localedir* will be ``None``. Both are available via instance attributes of the same name. For example: >>> _ = GettextFactory() >>> _.domain 'ipa' >>> _.localedir is None True When the *localedir* is ``None``, gettext will use the default system localedir (typically ``'/usr/share/locale/'``). In general, you should **not** provide a *localedir*... it is intended only to support in-tree testing. Third party plugins will most likely want to use a different gettext *domain*. For example: >>> _ = GettextFactory(domain='ipa_3rd_party') >>> _.domain 'ipa_3rd_party' When you call your `GettextFactory` instance, it will return a `Gettext` instance associated with the same *domain* and *localedir*. For example: >>> my_msg = _('Hello world') >>> my_msg.domain 'ipa_3rd_party' >>> my_msg.localedir is None True The message isn't translated till `Gettext.__unicode__()` is called, which should be done during each request. See the `Gettext` class for additional details. """ def __init__(self, domain='ipa', localedir=None): """ Initialize. :param domain: The gettext domain in which this message will be translated, e.g. ``'ipa'`` or ``'ipa_3rd_party'``; default is ``'ipa'`` :param localedir: The directory containing the gettext translations, e.g. ``'/usr/share/locale/'``; default is ``None``, in which case gettext will use the default system locale directory. """ self.domain = domain self.localedir = localedir def __repr__(self): return '%s(domain=%r, localedir=%r)' % (self.__class__.__name__, self.domain, self.localedir) def __call__(self, msg): return Gettext(msg, self.domain, self.localedir) class NGettextFactory(GettextFactory): """ Factory for creating ``ngettext()`` functions. `NGettextFactory` is similar to `GettextFactory`, except `NGettextFactory` is for plural forms. So that standard tools like xgettext can find your plural forms, you should reference your `NGettextFactory` instance using a variable named *ngettext*. For example: >>> ngettext = NGettextFactory() >>> ngettext NGettextFactory(domain='ipa', localedir=None) When you call your `NGettextFactory` instance to create a deferred translation, you provide the *singular* message, the *plural* message, and a dummy *count*. An `NGettext` instance will be returned. For example: >>> my_msg = ngettext('%(count)d goose', '%(count)d geese', 0) >>> my_msg NGettext('%(count)d goose', '%(count)d geese', domain='ipa', localedir=None) The *count* is ignored (because the translation is deferred), but you should still provide it so parsing tools aren't confused. For consistency, it is recommended to always provide ``0`` for the *count*. See `NGettext` for details on how the deferred translation is later performed. See `GettextFactory` for details on setting a different gettext *domain* (likely needed for 3rd-party plugins). """ def __call__(self, singular, plural, count): return NGettext(singular, plural, self.domain, self.localedir) # Process wide factories: _ = GettextFactory() ngettext = NGettextFactory() ugettext = _<|fim▁end|>
>>> from ipalib import Command, _, ngettext >>> class my_plugin(Command): ... my_string = _('Hello, %(name)s.')
<|file_name|>csc.py<|end_file_name|><|fim▁begin|>"""Compressed Sparse Column matrix format""" from __future__ import division, print_function, absolute_import __docformat__ = "restructuredtext en" __all__ = ['csc_matrix', 'isspmatrix_csc'] import numpy as np from scipy._lib.six import xrange from ._sparsetools import csc_tocsr from . import _sparsetools from .sputils import upcast, isintlike, IndexMixin, get_index_dtype from .compressed import _cs_matrix class csc_matrix(_cs_matrix, IndexMixin): """ Compressed Sparse Column matrix This can be instantiated in several ways: csc_matrix(D) with a dense matrix or rank-2 ndarray D csc_matrix(S)<|fim▁hole|> csc_matrix((M, N), [dtype]) to construct an empty matrix with shape (M, N) dtype is optional, defaulting to dtype='d'. csc_matrix((data, (row_ind, col_ind)), [shape=(M, N)]) where ``data``, ``row_ind`` and ``col_ind`` satisfy the relationship ``a[row_ind[k], col_ind[k]] = data[k]``. csc_matrix((data, indices, indptr), [shape=(M, N)]) is the standard CSC representation where the row indices for column i are stored in ``indices[indptr[i]:indptr[i+1]]`` and their corresponding values are stored in ``data[indptr[i]:indptr[i+1]]``. If the shape parameter is not supplied, the matrix dimensions are inferred from the index arrays. Attributes ---------- dtype : dtype Data type of the matrix shape : 2-tuple Shape of the matrix ndim : int Number of dimensions (this is always 2) nnz Number of nonzero elements data Data array of the matrix indices CSC format index array indptr CSC format index pointer array has_sorted_indices Whether indices are sorted Notes ----- Sparse matrices can be used in arithmetic operations: they support addition, subtraction, multiplication, division, and matrix power. Advantages of the CSC format - efficient arithmetic operations CSC + CSC, CSC * CSC, etc. - efficient column slicing - fast matrix vector products (CSR, BSR may be faster) Disadvantages of the CSC format - slow row slicing operations (consider CSR) - changes to the sparsity structure are expensive (consider LIL or DOK) Examples -------- >>> import numpy as np >>> from scipy.sparse import csc_matrix >>> csc_matrix((3, 4), dtype=np.int8).toarray() array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], dtype=int8) >>> row = np.array([0, 2, 2, 0, 1, 2]) >>> col = np.array([0, 0, 1, 2, 2, 2]) >>> data = np.array([1, 2, 3, 4, 5, 6]) >>> csc_matrix((data, (row, col)), shape=(3, 3)).toarray() array([[1, 0, 4], [0, 0, 5], [2, 3, 6]]) >>> indptr = np.array([0, 2, 3, 6]) >>> indices = np.array([0, 2, 2, 0, 1, 2]) >>> data = np.array([1, 2, 3, 4, 5, 6]) >>> csc_matrix((data, indices, indptr), shape=(3, 3)).toarray() array([[1, 0, 4], [0, 0, 5], [2, 3, 6]]) """ format = 'csc' def transpose(self, copy=False): from .csr import csr_matrix M,N = self.shape return csr_matrix((self.data,self.indices,self.indptr),(N,M),copy=copy) def __iter__(self): csr = self.tocsr() for r in xrange(self.shape[0]): yield csr[r,:] def tocsc(self, copy=False): if copy: return self.copy() else: return self def tocsr(self): M,N = self.shape idx_dtype = get_index_dtype((self.indptr, self.indices), maxval=max(self.nnz, N)) indptr = np.empty(M + 1, dtype=idx_dtype) indices = np.empty(self.nnz, dtype=idx_dtype) data = np.empty(self.nnz, dtype=upcast(self.dtype)) csc_tocsr(M, N, self.indptr.astype(idx_dtype), self.indices.astype(idx_dtype), self.data, indptr, indices, data) from .csr import csr_matrix A = csr_matrix((data, indices, indptr), shape=self.shape) A.has_sorted_indices = True return A def __getitem__(self, key): # Use CSR to implement fancy indexing. row, col = self._unpack_index(key) # Things that return submatrices. row or col is a int or slice. if (isinstance(row, slice) or isinstance(col, slice) or isintlike(row) or isintlike(col)): return self.T[col, row].T # Things that return a sequence of values. else: return self.T[col, row] def nonzero(self): # CSC can't use _cs_matrix's .nonzero method because it # returns the indices sorted for self transposed. # Get row and col indices, from _cs_matrix.tocoo major_dim, minor_dim = self._swap(self.shape) minor_indices = self.indices major_indices = np.empty(len(minor_indices), dtype=self.indptr.dtype) _sparsetools.expandptr(major_dim, self.indptr, major_indices) row, col = self._swap((major_indices, minor_indices)) # Sort them to be in C-style order ind = np.lexsort((col, row)) row = row[ind] col = col[ind] return row, col nonzero.__doc__ = _cs_matrix.nonzero.__doc__ def getrow(self, i): """Returns a copy of row i of the matrix, as a (1 x n) CSR matrix (row vector). """ # we convert to CSR to maintain compatibility with old impl. # in spmatrix.getrow() return self._get_submatrix(i, slice(None)).tocsr() def getcol(self, i): """Returns a copy of column i of the matrix, as a (m x 1) CSC matrix (column vector). """ return self._get_submatrix(slice(None), i) # these functions are used by the parent class (_cs_matrix) # to remove redudancy between csc_matrix and csr_matrix def _swap(self,x): """swap the members of x if this is a column-oriented matrix """ return (x[1],x[0]) def isspmatrix_csc(x): return isinstance(x, csc_matrix)<|fim▁end|>
with another sparse matrix S (equivalent to S.tocsc())
<|file_name|>check_models.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import from __future__ import print_function from keras.datasets import mnist from keras.models import Sequential from keras.layers.core import Dense, Activation, Merge from keras.utils import np_utils import numpy as np nb_classes = 10 batch_size = 128 nb_epoch = 1 max_train_samples = 5000 max_test_samples = 1000 np.random.seed(1337) # for reproducibility<|fim▁hole|># the data, shuffled and split between tran and test sets (X_train, y_train), (X_test, y_test) = mnist.load_data() X_train = X_train.reshape(60000,784)[:max_train_samples] X_test = X_test.reshape(10000,784)[:max_test_samples] X_train = X_train.astype("float32") X_test = X_test.astype("float32") X_train /= 255 X_test /= 255 # convert class vectors to binary class matrices Y_train = np_utils.to_categorical(y_train, nb_classes)[:max_train_samples] Y_test = np_utils.to_categorical(y_test, nb_classes)[:max_test_samples] ######################### # sequential model test # ######################### print('Test sequential') model = Sequential() model.add(Dense(784, 50)) model.add(Activation('relu')) model.add(Dense(50, 10)) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=(X_test, Y_test)) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=(X_test, Y_test)) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False) score = model.evaluate(X_train, Y_train, verbose=0) print('score:', score) if score < 0.25: raise Exception('Score too low, learning issue.') preds = model.predict(X_test, verbose=0) classes = model.predict_classes(X_test, verbose=0) model.get_config(verbose=1) ################### # merge test: sum # ################### print('Test merge: sum') left = Sequential() left.add(Dense(784, 50)) left.add(Activation('relu')) right = Sequential() right.add(Dense(784, 50)) right.add(Activation('relu')) model = Sequential() model.add(Merge([left, right], mode='sum')) model.add(Dense(50, 10)) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test], Y_test)) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test], Y_test)) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False) score = model.evaluate([X_train, X_train], Y_train, verbose=0) print('score:', score) if score < 0.22: raise Exception('Score too low, learning issue.') preds = model.predict([X_test, X_test], verbose=0) classes = model.predict_classes([X_test, X_test], verbose=0) model.get_config(verbose=1) ################### # merge test: concat # ################### print('Test merge: concat') left = Sequential() left.add(Dense(784, 50)) left.add(Activation('relu')) right = Sequential() right.add(Dense(784, 50)) right.add(Activation('relu')) model = Sequential() model.add(Merge([left, right], mode='concat')) model.add(Dense(50*2, 10)) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test], Y_test)) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test], Y_test)) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0) model.fit([X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False) score = model.evaluate([X_train, X_train], Y_train, verbose=0) print('score:', score) if score < 0.22: raise Exception('Score too low, learning issue.') preds = model.predict([X_test, X_test], verbose=0) classes = model.predict_classes([X_test, X_test], verbose=0) model.get_config(verbose=1) ########################## # test merge recursivity # ########################## print('Test merge recursivity') left = Sequential() left.add(Dense(784, 50)) left.add(Activation('relu')) right = Sequential() right.add(Dense(784, 50)) right.add(Activation('relu')) righter = Sequential() righter.add(Dense(784, 50)) righter.add(Activation('relu')) intermediate = Sequential() intermediate.add(Merge([left, right], mode='sum')) intermediate.add(Dense(50, 50)) intermediate.add(Activation('relu')) model = Sequential() model.add(Merge([intermediate, righter], mode='sum')) model.add(Dense(50, 10)) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=([X_test, X_test, X_test], Y_test)) model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=([X_test, X_test, X_test], Y_test)) model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1) model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1) model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0) model.fit([X_train, X_train, X_train], Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False) score = model.evaluate([X_train, X_train, X_train], Y_train, verbose=0) print('score:', score) if score < 0.19: raise Exception('Score too low, learning issue.') preds = model.predict([X_test, X_test, X_test], verbose=0) classes = model.predict_classes([X_test, X_test, X_test], verbose=0) model.get_config(verbose=1) model.save_weights('temp.h5') model.load_weights('temp.h5') score = model.evaluate([X_train, X_train, X_train], Y_train, verbose=0) print('score:', score) ###################### # test merge overlap # ###################### print('Test merge overlap') left = Sequential() left.add(Dense(784, 50)) left.add(Activation('relu')) model = Sequential() model.add(Merge([left, left], mode='sum')) model.add(Dense(50, 10)) model.add(Activation('softmax')) model.compile(loss='categorical_crossentropy', optimizer='rmsprop') model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_data=(X_test, Y_test)) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_data=(X_test, Y_test)) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=0, validation_split=0.1) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=False, verbose=0, validation_split=0.1) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0) model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, verbose=0, shuffle=False) score = model.evaluate(X_train, Y_train, verbose=0) print('score:', score) if score < 0.22: raise Exception('Score too low, learning issue.') preds = model.predict(X_test, verbose=0) classes = model.predict_classes(X_test, verbose=0) model.get_config(verbose=1)<|fim▁end|>
<|file_name|>Floor.ts<|end_file_name|><|fim▁begin|>'use strict'; import * as models from './models'; export interface Floor { floorId?: string;<|fim▁hole|> name?: string; map?: string; }<|fim▁end|>
<|file_name|>LaravelRunConf.java<|end_file_name|><|fim▁begin|>package com.smartbit8.laravelstorm.run; import com.intellij.execution.ExecutionException; import com.intellij.execution.Executor; import com.intellij.execution.configurations.*; import com.intellij.execution.process.*; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.ide.browsers.*; import com.intellij.openapi.options.SettingsEditor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.WriteExternalException; import com.intellij.util.xmlb.SkipDefaultsSerializationFilter; import com.intellij.util.xmlb.XmlSerializer; import com.jetbrains.php.config.interpreters.PhpInterpreter; import com.jetbrains.php.config.interpreters.PhpInterpretersManagerImpl; import com.smartbit8.laravelstorm.ui.LaravelRunConfSettingsEditor; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; public class LaravelRunConf extends RunConfigurationBase { private Project project; private String host = "localhost"; private int port = 8000; private String route = "/"; private WebBrowser browser; private PhpInterpreter interpreter; LaravelRunConf(@NotNull Project project, @NotNull ConfigurationFactory factory, String name) { super(project, factory, name); this.project = project; } @Override public void createAdditionalTabComponents(AdditionalTabComponentManager manager, ProcessHandler startedProcess) { LogTab logTab = new LogTab(getProject()); manager.addAdditionalTabComponent(logTab, "Laravel.log"); startedProcess.addProcessListener(new ProcessAdapter() { @Override public void startNotified(ProcessEvent event) { logTab.start(); } @Override public void processTerminated(ProcessEvent event) { startedProcess.removeProcessListener(this); } }); } @Override public void readExternal(Element element) throws InvalidDataException { super.readExternal(element); Settings settings = XmlSerializer.deserialize(element, Settings.class); this.host = settings.host; this.port = settings.port; this.route = settings.route; this.browser = WebBrowserManager.getInstance().findBrowserById(settings.browser); this.interpreter = PhpInterpretersManagerImpl.getInstance(getProject()).findInterpreter(settings.interpreterName); } @Override public void writeExternal(Element element) throws WriteExternalException { Settings settings = new Settings(); settings.host = this.host; settings.port = this.port; settings.route = this.route; if (this.browser != null) settings.browser = this.browser.getId().toString(); else settings.browser = ""; if (this.interpreter != null) settings.interpreterName = this.interpreter.getName(); else settings.interpreterName = ""; XmlSerializer.serializeInto(settings, element, new SkipDefaultsSerializationFilter()); super.writeExternal(element); } @NotNull @Override public SettingsEditor<? extends RunConfiguration> getConfigurationEditor() { return new LaravelRunConfSettingsEditor(getProject()); } @Override public void checkConfiguration() throws RuntimeConfigurationException {} @Nullable @Override public RunProfileState getState(@NotNull Executor executor, @NotNull ExecutionEnvironment executionEnvironment) throws ExecutionException { return new CommandLineState(executionEnvironment) { @NotNull @Override protected ProcessHandler startProcess() throws ExecutionException { String phpExec = (interpreter != null? interpreter.getPathToPhpExecutable():"php"); GeneralCommandLine cmd = new GeneralCommandLine(phpExec, "artisan", "serve", "--host=" + host, "--port="+ port); cmd.setWorkDirectory(project.getBasePath()); OSProcessHandler handler = new OSProcessHandler(cmd); handler.addProcessListener(new ProcessAdapter() { @Override public void onTextAvailable(ProcessEvent event, Key outputType) { String text = event.getText(); if (text != null){ if (text.startsWith("Laravel development server started:")){ BrowserLauncher.getInstance().browse("http://" + host + ":" + port + (route.startsWith("/") ? route : "/" + route), browser); handler.removeProcessListener(this); } } } }); // new LaravelRunMgr(handler, new File(getProject().getBasePath()+("/storage/logs/laravel.log"))); return handler; } }; } public int getPort() { return port; } public String getHost() { return host; } public void setPort(int port) { this.port = port; } public void setHost(String host) { this.host = host; } public String getRoute() { return route; } public void setRoute(String route) { this.route = route; } public WebBrowser getBrowser() { return browser; } public void setBrowser(WebBrowser browser) { this.browser = browser; } public PhpInterpreter getInterpreter() { return interpreter; } public void setInterpreter(PhpInterpreter interpreter) {<|fim▁hole|> public static class Settings { public String host; public int port; public String route; public String browser; public String interpreterName; } }<|fim▁end|>
this.interpreter = interpreter; }
<|file_name|>flat.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Ethcore (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! Flat trace module use std::collections::VecDeque; use rlp::*; use util::HeapSizeOf; use basic_types::LogBloom; use super::trace::{Action, Res}; /// Trace localized in vector of traces produced by a single transaction. /// /// Parent and children indexes refer to positions in this vector. #[derive(Debug, PartialEq, Clone, Binary)] pub struct FlatTrace { /// Type of action performed by a transaction. pub action: Action, /// Result of this action. pub result: Res, /// Number of subtraces. pub subtraces: usize, /// Exact location of trace. /// /// [index in root, index in first CALL, index in second CALL, ...] pub trace_address: VecDeque<usize>, } impl FlatTrace { /// Returns bloom of the trace. pub fn bloom(&self) -> LogBloom { self.action.bloom() | self.result.bloom() } } impl HeapSizeOf for FlatTrace { fn heap_size_of_children(&self) -> usize { self.trace_address.heap_size_of_children() } } impl Encodable for FlatTrace { fn rlp_append(&self, s: &mut RlpStream) { s.begin_list(4); s.append(&self.action); s.append(&self.result); s.append(&self.subtraces); s.append(&self.trace_address.clone().into_iter().collect::<Vec<_>>()); } } impl Decodable for FlatTrace { fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { let d = decoder.as_rlp(); let v: Vec<usize> = try!(d.val_at(3)); let res = FlatTrace { action: try!(d.val_at(0)), result: try!(d.val_at(1)), subtraces: try!(d.val_at(2)), trace_address: v.into_iter().collect(), }; Ok(res) } } /// Represents all traces produced by a single transaction. #[derive(Debug, PartialEq, Clone)] pub struct FlatTransactionTraces(Vec<FlatTrace>); impl From<Vec<FlatTrace>> for FlatTransactionTraces { fn from(v: Vec<FlatTrace>) -> Self { FlatTransactionTraces(v) } } impl HeapSizeOf for FlatTransactionTraces { fn heap_size_of_children(&self) -> usize { self.0.heap_size_of_children() } } impl FlatTransactionTraces { /// Returns bloom of all traces in the collection. pub fn bloom(&self) -> LogBloom { self.0.iter().fold(Default::default(), | bloom, trace | bloom | trace.bloom()) } } impl Encodable for FlatTransactionTraces { fn rlp_append(&self, s: &mut RlpStream) { s.append(&self.0); } } impl Decodable for FlatTransactionTraces { fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { Ok(FlatTransactionTraces(try!(Decodable::decode(decoder)))) } } impl Into<Vec<FlatTrace>> for FlatTransactionTraces { fn into(self) -> Vec<FlatTrace> { self.0 } } /// Represents all traces produced by transactions in a single block. #[derive(Debug, PartialEq, Clone, Default)] pub struct FlatBlockTraces(Vec<FlatTransactionTraces>); impl HeapSizeOf for FlatBlockTraces { fn heap_size_of_children(&self) -> usize { self.0.heap_size_of_children() } } impl From<Vec<FlatTransactionTraces>> for FlatBlockTraces { fn from(v: Vec<FlatTransactionTraces>) -> Self { FlatBlockTraces(v) } } impl FlatBlockTraces { /// Returns bloom of all traces in the block. pub fn bloom(&self) -> LogBloom { self.0.iter().fold(Default::default(), | bloom, tx_traces | bloom | tx_traces.bloom()) } } impl Encodable for FlatBlockTraces { fn rlp_append(&self, s: &mut RlpStream) { s.append(&self.0); } } impl Decodable for FlatBlockTraces { fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { Ok(FlatBlockTraces(try!(Decodable::decode(decoder)))) } } impl Into<Vec<FlatTransactionTraces>> for FlatBlockTraces { fn into(self) -> Vec<FlatTransactionTraces> { self.0 } } #[cfg(test)] mod tests { use super::{FlatBlockTraces, FlatTransactionTraces, FlatTrace}; use trace::trace::{Action, Res, CallResult, Call, Suicide}; use types::executed::CallType; #[test] fn test_trace_serialization() { // block #51921 let flat_trace = FlatTrace { action: Action::Call(Call { from: "8dda5e016e674683241bf671cced51e7239ea2bc".parse().unwrap(), to: "37a5e19cc2d49f244805d5c268c0e6f321965ab9".parse().unwrap(), value: "3627e8f712373c0000".parse().unwrap(), gas: 0x03e8.into(), input: vec![], call_type: CallType::Call, }), result: Res::Call(CallResult { gas_used: 0.into(), output: vec![], }),<|fim▁hole|> trace_address: Default::default(), subtraces: 0, }; let flat_trace1 = FlatTrace { action: Action::Call(Call { from: "3d0768da09ce77d25e2d998e6a7b6ed4b9116c2d".parse().unwrap(), to: "412fda7643b37d436cb40628f6dbbb80a07267ed".parse().unwrap(), value: 0.into(), gas: 0x010c78.into(), input: vec![0x41, 0xc0, 0xe1, 0xb5], call_type: CallType::Call, }), result: Res::Call(CallResult { gas_used: 0x0127.into(), output: vec![], }), trace_address: Default::default(), subtraces: 1, }; let flat_trace2 = FlatTrace { action: Action::Suicide(Suicide { address: "412fda7643b37d436cb40628f6dbbb80a07267ed".parse().unwrap(), balance: 0.into(), refund_address: "3d0768da09ce77d25e2d998e6a7b6ed4b9116c2d".parse().unwrap(), }), result: Res::None, trace_address: vec![0].into_iter().collect(), subtraces: 0, }; let block_traces = FlatBlockTraces(vec![ FlatTransactionTraces(vec![flat_trace]), FlatTransactionTraces(vec![flat_trace1, flat_trace2]) ]); let encoded = ::rlp::encode(&block_traces); let decoded = ::rlp::decode(&encoded); assert_eq!(block_traces, decoded); } }<|fim▁end|>
<|file_name|>queries.ts<|end_file_name|><|fim▁begin|>export {Query, Dialect} from 'src/client' export interface TimeRange { lower: string upper?: string | null seconds?: number format?: string<|fim▁hole|>}<|fim▁end|>
label?: string duration?: string
<|file_name|>sentry.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import import mock import os from django.conf import settings from sentry_sdk import Hub TEST_ROOT = os.path.normpath( os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, os.pardir, "tests") ) def pytest_configure(config): # HACK: Only needed for testing! os.environ.setdefault("_SENTRY_SKIP_CONFIGURATION", "1") os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sentry.conf.server") # override docs which are typically synchronized from an upstream server # to ensure tests are consistent os.environ.setdefault( "INTEGRATION_DOC_FOLDER", os.path.join(TEST_ROOT, "fixtures", "integration-docs") ) from sentry.utils import integrationdocs integrationdocs.DOC_FOLDER = os.environ["INTEGRATION_DOC_FOLDER"] if not settings.configured: # only configure the db if its not already done test_db = os.environ.get("DB", "postgres") if test_db == "postgres": settings.DATABASES["default"].update( { "ENGINE": "sentry.db.postgres", "USER": "postgres", "NAME": "sentry", "HOST": "127.0.0.1", } ) # postgres requires running full migration all the time # since it has to install stored functions which come from # an actual migration. else: raise RuntimeError("oops, wrong database: %r" % test_db) settings.TEMPLATE_DEBUG = True # Disable static compiling in tests settings.STATIC_BUNDLES = {} # override a few things with our test specifics settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ("tests",) # Need a predictable key for tests that involve checking signatures settings.SENTRY_PUBLIC = False if not settings.SENTRY_CACHE: settings.SENTRY_CACHE = "sentry.cache.django.DjangoCache" settings.SENTRY_CACHE_OPTIONS = {} # This speeds up the tests considerably, pbkdf2 is by design, slow. settings.PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] settings.AUTH_PASSWORD_VALIDATORS = [] # Replace real sudo middleware with our mock sudo middleware # to assert that the user is always in sudo mode middleware = list(settings.MIDDLEWARE_CLASSES) sudo = middleware.index("sentry.middleware.sudo.SudoMiddleware") middleware[sudo] = "sentry.testutils.middleware.SudoMiddleware" settings.MIDDLEWARE_CLASSES = tuple(middleware) settings.SENTRY_OPTIONS["cloudflare.secret-key"] = "cloudflare-secret-key" # enable draft features settings.SENTRY_OPTIONS["mail.enable-replies"] = True settings.SENTRY_ALLOW_ORIGIN = "*" settings.SENTRY_TSDB = "sentry.tsdb.inmemory.InMemoryTSDB" settings.SENTRY_TSDB_OPTIONS = {} if settings.SENTRY_NEWSLETTER == "sentry.newsletter.base.Newsletter": settings.SENTRY_NEWSLETTER = "sentry.newsletter.dummy.DummyNewsletter" settings.SENTRY_NEWSLETTER_OPTIONS = {} settings.BROKER_BACKEND = "memory" settings.BROKER_URL = None settings.CELERY_ALWAYS_EAGER = False settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True settings.DEBUG_VIEWS = True settings.SENTRY_ENCRYPTION_SCHEMES = () settings.DISABLE_RAVEN = True settings.CACHES = {"default": {"BACKEND": "django.core.cache.backends.locmem.LocMemCache"}} if os.environ.get("USE_SNUBA", False): settings.SENTRY_SEARCH = "sentry.search.snuba.SnubaSearchBackend" settings.SENTRY_TAGSTORE = "sentry.tagstore.snuba.SnubaCompatibilityTagStorage" settings.SENTRY_TSDB = "sentry.tsdb.redissnuba.RedisSnubaTSDB" settings.SENTRY_EVENTSTREAM = "sentry.eventstream.snuba.SnubaEventStream" if not hasattr(settings, "SENTRY_OPTIONS"): settings.SENTRY_OPTIONS = {} settings.SENTRY_OPTIONS.update( { "redis.clusters": {"default": {"hosts": {0: {"db": 9}}}}, "mail.backend": "django.core.mail.backends.locmem.EmailBackend", "system.url-prefix": "http://testserver", "slack.client-id": "slack-client-id", "slack.client-secret": "slack-client-secret", "slack.verification-token": "slack-verification-token",<|fim▁hole|> "github-app.client-id": "github-client-id", "github-app.client-secret": "github-client-secret", "vsts.client-id": "vsts-client-id", "vsts.client-secret": "vsts-client-secret", } ) # django mail uses socket.getfqdn which doesn't play nice if our # networking isn't stable patcher = mock.patch("socket.getfqdn", return_value="localhost") patcher.start() if not settings.SOUTH_TESTS_MIGRATE: settings.INSTALLED_APPS = tuple(i for i in settings.INSTALLED_APPS if i != "south") from sentry.runner.initializer import ( bootstrap_options, configure_structlog, initialize_receivers, fix_south, bind_cache_to_option_store, setup_services, ) bootstrap_options(settings) configure_structlog() fix_south(settings) import django if hasattr(django, "setup"): django.setup() bind_cache_to_option_store() initialize_receivers() setup_services() register_extensions() from sentry.utils.redis import clusters with clusters.get("default").all() as client: client.flushdb() # force celery registration from sentry.celery import app # NOQA # disable DISALLOWED_IPS from sentry import http http.DISALLOWED_IPS = set() def register_extensions(): from sentry.plugins import plugins from sentry.plugins.utils import TestIssuePlugin2 plugins.register(TestIssuePlugin2) from sentry import integrations from sentry.integrations.bitbucket import BitbucketIntegrationProvider from sentry.integrations.example import ( ExampleIntegrationProvider, AliasedIntegrationProvider, ExampleRepositoryProvider, ) from sentry.integrations.github import GitHubIntegrationProvider from sentry.integrations.github_enterprise import GitHubEnterpriseIntegrationProvider from sentry.integrations.gitlab import GitlabIntegrationProvider from sentry.integrations.jira import JiraIntegrationProvider from sentry.integrations.jira_server import JiraServerIntegrationProvider from sentry.integrations.slack import SlackIntegrationProvider from sentry.integrations.vsts import VstsIntegrationProvider from sentry.integrations.vsts_extension import VstsExtensionIntegrationProvider integrations.register(BitbucketIntegrationProvider) integrations.register(ExampleIntegrationProvider) integrations.register(AliasedIntegrationProvider) integrations.register(GitHubIntegrationProvider) integrations.register(GitHubEnterpriseIntegrationProvider) integrations.register(GitlabIntegrationProvider) integrations.register(JiraIntegrationProvider) integrations.register(JiraServerIntegrationProvider) integrations.register(SlackIntegrationProvider) integrations.register(VstsIntegrationProvider) integrations.register(VstsExtensionIntegrationProvider) from sentry.plugins import bindings from sentry.plugins.providers.dummy import DummyRepositoryProvider bindings.add("repository.provider", DummyRepositoryProvider, id="dummy") bindings.add( "integration-repository.provider", ExampleRepositoryProvider, id="integrations:example" ) def pytest_runtest_teardown(item): if not os.environ.get("USE_SNUBA", False): from sentry import tsdb # TODO(dcramer): this only works if this is the correct tsdb backend tsdb.flush() # XXX(dcramer): only works with DummyNewsletter from sentry import newsletter if hasattr(newsletter.backend, "clear"): newsletter.backend.clear() from sentry.utils.redis import clusters with clusters.get("default").all() as client: client.flushdb() from celery.task.control import discard_all discard_all() from sentry.models import OrganizationOption, ProjectOption, UserOption for model in (OrganizationOption, ProjectOption, UserOption): model.objects.clear_local_cache() Hub.main.bind_client(None)<|fim▁end|>
"github-app.name": "sentry-test-app",
<|file_name|>ChangeSessionToken.ts<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1 oid sha256:de6a4f96b6914035c33c21f63dc7fcb03b9203aa911b580501b99fbe3ea4b096<|fim▁hole|><|fim▁end|>
size 138
<|file_name|>clutter-ui.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import os import sys import urllib from gi.repository import Clutter, ClutterX11, Mx, Lomo, GObject, GLib class Cover(Clutter.Box): __gtype_name__ = 'GlosseCover' # # Member, setter and getter for the 'lomo' property # def _get_lomo_player_prop(self): return self._lomo_player_prop def _set_lomo_player_prop(self, lomo_player): self._lomo_player_prop = lomo_player if not lomo_player: raise Exception('No lomo-player') lomo_player.connect('notify::current', lambda f, t: self._sync_from_model()) self._sync_from_model() lomo_player = GObject.property (type = Lomo.Player, setter = _set_lomo_player_prop, getter = _get_lomo_player_prop, flags = GObject.PARAM_READWRITE) def __init__(self, *args, **kwargs): self._layout = Clutter.BinLayout() Clutter.Box.__init__(self, *args, layout_manager = self._layout, **kwargs) self.set_property('lomo-player', kwargs['lomo_player']) self._actors = [] self._timelines = [] self.set_from_file(os.path.join(os.path.dirname(__file__), 'cover-default.png')) def _sync_from_model(self): """ Sync data from model """ lomo_player = self.get_property('lomo-player') if not lomo_player: raise Exception('Missing model') stream = lomo_player.get_nth_stream(lomo_player.get_current()) art = stream.get_extended_metadata('art-data') if type(art) == str and art.startswith('file:///'): self.set_from_file(urllib.unquote(art[7:])) def set_from_file(self, filename): try: w = self.get_width() texture = Clutter.Texture( filename = filename, sync_size = True, keep_aspect_ratio = True, opacity = 0x00) except Exception as e: print repr(e) return timeline = Clutter.Timeline(duration = 1000) texture.animate_with_timelinev(Clutter.AnimationMode.LINEAR, timeline, ("opacity",), (0xff,)) timeline.connect('completed', self.timeline_completed_cb) if len(self._actors) > 0: t = self._actors[-1] t.animatev(Clutter.AnimationMode.LINEAR, 1000, ("opacity",) , (0x00,)) self._actors.append(texture) self._timelines.append(timeline) self._layout.add(texture, Clutter.BinAlignment.CENTER, Clutter.BinAlignment.CENTER) texture.show() def timeline_completed_cb(self, timeline): try: index = self._timelines.index(timeline) except ValueError: print "Invalid timeline" return if index == 0: return index = index - 1 actor = self._actors[index] self.remove_actor(actor) self._actors.pop(index) self._timelines.pop(index) class Seek(Clutter.Box): __gtype_name__ = 'GlosseSeek' def _get_lomo_player_prop(self): return self._lomo_player_prop def _set_lomo_player_prop(self, lomo_player): if not lomo_player: raise Exception('Missing lomo') self._lomo_player_prop = lomo_player lomo_player.connect('notify', lambda a, b: self._update_from_model()) self._update_from_model() lomo_player = GObject.property(type = Lomo.Player, getter = _get_lomo_player_prop, setter = _set_lomo_player_prop, flags = GObject.PARAM_READWRITE) def __init__(self, *args, **kwargs): self._updater_id = 0 self._inhibitor = False self._slider = None layout = Clutter.TableLayout() super(Seek, self).__init__(*args, layout_manager = layout, **kwargs) white = Clutter.Color() white.from_string('#ffffffff') self._curr = Clutter.Text(text = '1:23', color = white) layout.pack(self._curr, 0, 0) self._slider = Mx.Slider() layout.pack(self._slider, 1, 0) self._total = Clutter.Text(text = '5:14', color = white) layout.pack(self._total, 2, 0) self._slider.connect('notify::value', self._on_notify_value) def _on_notify_value(self, widget, prop): if self._inhibitor: return lomo = self.get_property('lomo-player') pos = lomo.get_length() * self._slider.get_value() lomo.set_position(pos) def _update_from_model(self): lomo = self.get_property('lomo-player') if not lomo: raise Exception('Missing model') self._inhibitor = True state = lomo.get_state() #print "State %s ID: %d" % (repr(Lomo.State.PLAY), self._updater_id ) if state == Lomo.State.PLAY and (self._updater_id == 0): self._updater_id = GLib.timeout_add(500, self._update_from_model_timeout_helper) elif state != Lomo.State.PLAY and self._updater_id > 0: GLib.source_remove(self._updater_id) self._updater_id = 0 pos = lomo.get_position() if pos == -1: self._slider.set_value(0) self._curr.set_text('-:--') self._total.set_text('-:--') else: print pos secs = pos / 1e9 total = lomo.get_length() / 1e9 self._slider.set_value(pos / float(lomo.get_length())) self._curr.set_text("%d:%02d" % (secs / 60, secs % 60)) self._total.set_text("%d:%02d" % (total / 60, total % 60)) self._inhibitor = False def _update_from_model_timeout_helper(self): self._update_from_model() return True class Controls(Clutter.Box): __gtype_name__ = 'GlosseControls' def _set_lomo_player_prop(self, lomo): self._lomo_prop = lomo if not lomo: raise Exception('No lomo-player') lomo.connect('notify::state', lambda l,state: self.sync_from_model()) self.sync_from_model()<|fim▁hole|> def _get_lomo_player_prop(self): return self._lomo_prop lomo_player = GObject.property (type = Lomo.Player, setter = _set_lomo_player_prop, getter = _get_lomo_player_prop, flags = GObject.PARAM_READWRITE) def __init__(self, *args, **kwargs): layout = Clutter.TableLayout() super(Controls, self).__init__(*args, layout_manager = layout, **kwargs) d = (('previous', 'media-skip-backward' ), ('playback', 'media-playback-start'), ('next', 'media-skip-forward' )) self._buttons = dict() for index, (id_, icon_name) in enumerate(d): button = Mx.Button() button.add_actor(Mx.Icon( icon_name = icon_name, icon_size = 32)) button.show_all() layout.pack(button, index, 0) self._buttons[id_] = button self._buttons[id_].connect('clicked', self._button_clicked_cb) self.set_property('lomo-player', kwargs['lomo_player']) def sync_from_model(self): if not hasattr(self, '_buttons'): return lomo = self.get_property('lomo-player') if not lomo: raise Exception('Missing model') state = lomo.get_state() if state == Lomo.State.PLAY: icon_name = 'media-playback-pause' elif state in (Lomo.State.STOP, Lomo.State.PAUSE): icon_name = 'media-playback-start' else: raise Exception('Unknow state') self._buttons['playback'].set_icon_name(icon_name) def _button_clicked_cb(self, w): lomo = self.get_property('lomo-player') if lomo is None: raise Exception('No lomo') if w == self._buttons['previous']: i = lomo.get_previous() if i < 0: return lomo.set_current(i) elif w == self._buttons['next']: i = lomo.get_next() if i < 0: return lomo.set_current(i) else: lomo.toggle_playback_state() class App(Clutter.Stage): __gtype_name__ = 'GlosseApp' _lomo = None _controls = None _cover = None def _set_lomo(self, lomo): self._lomo = lomo d = { 'controls' : self._controls, 'cover' : self._cover } for widget in (self._cover, self._controls): if widget: widget.set_property('lomo', lomo) def _get_lomo(self): return self._lomo lomo = GObject.property(type = Lomo.Player, setter = _set_lomo, getter = _get_lomo) def __init__(self, uris): Clutter.Stage.__init__(self, use_alpha = True, user_resizable = True, min_height = 200, min_width = 200) self.set_property('lomo', Lomo.Player(random = True, repeat = True)) self.insert_songs(uris, 0) bg_color = Clutter.Color() bg_color.from_string('#000000ff') self.set_color(bg_color) # Setup main container main_layout = Clutter.BinLayout() main_box = Clutter.Box(layout_manager = main_layout) main_box.add_constraint(Clutter.BindConstraint.new(self, Clutter.BindCoordinate.SIZE, 0.0)) main_box.show() self.add_actor(main_box) # Setup cover (or background) self._cover = Cover(lomo_player = self._get_lomo()) self._cover.show() main_layout.add(self._cover, Clutter.BinAlignment.FILL, Clutter.BinAlignment.FILL) bottom_layout = Clutter.TableLayout() self._bottom_box = Clutter.Box(opacity = 0x00, layout_manager = bottom_layout) # Setup controls self._controls = Controls(lomo_player = self._get_lomo()) bottom_layout.pack(self._controls, 0, 0) # Setup seek self._seek = Seek(lomo_player = self._get_lomo()) bottom_layout.pack(self._seek, 0, 1) # Add bottom_box main_layout.add(self._bottom_box, Clutter.BinAlignment.CENTER, Clutter.BinAlignment.END) self.connect('enter-event', self.fade_in) self.connect('leave-event', self.fade_out) def insert_songs(self, songs, index): model = self.get_property('lomo') for song in songs: model.insert_uri(Lomo.create_uri(song), index) def fade_in(self, actor, ev): self._bottom_box.animatev(Clutter.AnimationMode.EASE_OUT_EXPO, 500, ("opacity",), (0xff,)) def fade_out(self, actor, ev): self._bottom_box.animatev(Clutter.AnimationMode.EASE_OUT_EXPO, 500, ("opacity",), (0x00,)) if __name__ == '__main__': Lomo.init(0, "") ClutterX11.set_use_argb_visual(True) Clutter.init([]) app = App(sys.argv[1:]) app.connect('destroy', lambda w: Clutter.main_quit()) app.show() Clutter.main()<|fim▁end|>
<|file_name|>extint_toggle.py<|end_file_name|><|fim▁begin|>import pyb import micropython micropython.alloc_emergency_exception_buf(100) led1 = pyb.LED(4) # 4 = Blue led2 = pyb.LED(3) # 3 = Yellow pin = pyb.Pin('SW', pyb.Pin.IN, pull=pyb.Pin.PULL_UP) def callback(line):<|fim▁hole|> led2.off() else: led2.on() ext = pyb.ExtInt(pin, pyb.ExtInt.IRQ_RISING_FALLING, pyb.Pin.PULL_UP, callback)<|fim▁end|>
led1.toggle() if pin.value(): # 1 = not pressed
<|file_name|>template_funcs_test.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The Hugo Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package tpl import ( "bytes" "encoding/base64" "errors" "fmt" "html/template" "image" "image/color" "image/png" "math/rand" "path" "path/filepath" "reflect" "runtime" "strings" "testing" "time" "github.com/spf13/hugo/helpers" "github.com/spf13/afero" "github.com/spf13/cast" "github.com/spf13/hugo/hugofs" "github.com/spf13/viper" "github.com/stretchr/testify/assert" ) type tstNoStringer struct { } type tstCompareType int const ( tstEq tstCompareType = iota tstNe tstGt tstGe tstLt tstLe ) func tstIsEq(tp tstCompareType) bool { return tp == tstEq || tp == tstGe || tp == tstLe } func tstIsGt(tp tstCompareType) bool { return tp == tstGt || tp == tstGe } func tstIsLt(tp tstCompareType) bool { return tp == tstLt || tp == tstLe } func tstInitTemplates() { viper.Set("CurrentContentLanguage", helpers.NewLanguage("en")) helpers.ResetConfigProvider() } func TestFuncsInTemplate(t *testing.T) { viper.Reset() defer viper.Reset() workingDir := "/home/hugo" viper.Set("workingDir", workingDir) viper.Set("currentContentLanguage", helpers.NewDefaultLanguage()) viper.Set("multilingual", true) fs := &afero.MemMapFs{} hugofs.InitFs(fs) afero.WriteFile(fs, filepath.Join(workingDir, "README.txt"), []byte("Hugo Rocks!"), 0755) // Add the examples from the docs: As a smoke test and to make sure the examples work. // TODO(bep): docs: fix title example in := `absLangURL: {{ "index.html" | absLangURL }} absURL: {{ "http://gohugo.io/" | absURL }} absURL: {{ "mystyle.css" | absURL }} absURL: {{ 42 | absURL }} add: {{add 1 2}} base64Decode 1: {{ "SGVsbG8gd29ybGQ=" | base64Decode }} base64Decode 2: {{ 42 | base64Encode | base64Decode }} base64Encode: {{ "Hello world" | base64Encode }} chomp: {{chomp "<p>Blockhead</p>\n" }} dateFormat: {{ dateFormat "Monday, Jan 2, 2006" "2015-01-21" }} delimit: {{ delimit (slice "A" "B" "C") ", " " and " }} div: {{div 6 3}} echoParam: {{ echoParam .Params "langCode" }} emojify: {{ "I :heart: Hugo" | emojify }} eq: {{ if eq .Section "blog" }}current{{ end }} findRE: {{ findRE "[G|g]o" "Hugo is a static side generator written in Go." 1 }} hasPrefix 1: {{ hasPrefix "Hugo" "Hu" }} hasPrefix 2: {{ hasPrefix "Hugo" "Fu" }} htmlEscape 1: {{ htmlEscape "Cathal Garvey & The Sunshine Band <[email protected]>" | safeHTML}} htmlEscape 2: {{ htmlEscape "Cathal Garvey & The Sunshine Band <[email protected]>"}} htmlUnescape 1: {{htmlUnescape "Cathal Garvey &amp; The Sunshine Band &lt;[email protected]&gt;" | safeHTML}} htmlUnescape 2: {{"Cathal Garvey &amp;amp; The Sunshine Band &amp;lt;[email protected]&amp;gt;" | htmlUnescape | htmlUnescape | safeHTML}} htmlUnescape 3: {{"Cathal Garvey &amp;amp; The Sunshine Band &amp;lt;[email protected]&amp;gt;" | htmlUnescape | htmlUnescape }} htmlUnescape 4: {{ htmlEscape "Cathal Garvey & The Sunshine Band <[email protected]>" | htmlUnescape | safeHTML }} htmlUnescape 5: {{ htmlUnescape "Cathal Garvey &amp; The Sunshine Band &lt;[email protected]&gt;" | htmlEscape | safeHTML }} humanize 1: {{ humanize "my-first-post" }} humanize 2: {{ humanize "myCamelPost" }} humanize 3: {{ humanize "52" }} humanize 4: {{ humanize 103 }} in: {{ if in "this string contains a substring" "substring" }}Substring found!{{ end }} jsonify: {{ (slice "A" "B" "C") | jsonify }} lower: {{lower "BatMan"}} markdownify: {{ .Title | markdownify}} md5: {{ md5 "Hello world, gophers!" }} mod: {{mod 15 3}} modBool: {{modBool 15 3}} mul: {{mul 2 3}} plainify: {{ plainify "Hello <strong>world</strong>, gophers!" }} pluralize: {{ "cat" | pluralize }} querify 1: {{ (querify "foo" 1 "bar" 2 "baz" "with spaces" "qux" "this&that=those") | safeHTML }} querify 2: <a href="https://www.google.com?{{ (querify "q" "test" "page" 3) | safeURL }}">Search</a> readDir: {{ range (readDir ".") }}{{ .Name }}{{ end }} readFile: {{ readFile "README.txt" }} relLangURL: {{ "index.html" | relLangURL }} relURL 1: {{ "http://gohugo.io/" | relURL }} relURL 2: {{ "mystyle.css" | relURL }} relURL 3: {{ mul 2 21 | relURL }} replace: {{ replace "Batman and Robin" "Robin" "Catwoman" }} replaceRE: {{ "http://gohugo.io/docs" | replaceRE "^https?://([^/]+).*" "$1" }} safeCSS: {{ "Bat&Man" | safeCSS | safeCSS }} safeHTML: {{ "Bat&Man" | safeHTML | safeHTML }} safeHTML: {{ "Bat&Man" | safeHTML }} safeJS: {{ "(1*2)" | safeJS | safeJS }} safeURL: {{ "http://gohugo.io" | safeURL | safeURL }} seq: {{ seq 3 }} sha1: {{ sha1 "Hello world, gophers!" }} sha256: {{ sha256 "Hello world, gophers!" }} singularize: {{ "cats" | singularize }} slicestr: {{slicestr "BatMan" 0 3}} slicestr: {{slicestr "BatMan" 3}} sort: {{ slice "B" "C" "A" | sort }} sub: {{sub 3 2}} substr: {{substr "BatMan" 0 -3}} substr: {{substr "BatMan" 3 3}} title: {{title "Bat man"}} time: {{ (time "2015-01-21").Year }} trim: {{ trim "++Batman--" "+-" }} truncate: {{ "this is a very long text" | truncate 10 " ..." }} truncate: {{ "With [Markdown](/markdown) inside." | markdownify | truncate 14 }} upper: {{upper "BatMan"}} urlize: {{ "Bat Man" | urlize }} ` expected := `absLangURL: http://mysite.com/hugo/en/index.html absURL: http://gohugo.io/ absURL: http://mysite.com/hugo/mystyle.css absURL: http://mysite.com/hugo/42 add: 3 base64Decode 1: Hello world base64Decode 2: 42 base64Encode: SGVsbG8gd29ybGQ= chomp: <p>Blockhead</p> dateFormat: Wednesday, Jan 21, 2015 delimit: A, B and C div: 2 echoParam: en emojify: I ❤️ Hugo eq: current findRE: [go] hasPrefix 1: true hasPrefix 2: false htmlEscape 1: Cathal Garvey &amp; The Sunshine Band &lt;[email protected]&gt; htmlEscape 2: Cathal Garvey &amp;amp; The Sunshine Band &amp;lt;[email protected]&amp;gt; htmlUnescape 1: Cathal Garvey & The Sunshine Band <[email protected]> htmlUnescape 2: Cathal Garvey & The Sunshine Band <[email protected]> htmlUnescape 3: Cathal Garvey &amp; The Sunshine Band &lt;[email protected]&gt; htmlUnescape 4: Cathal Garvey & The Sunshine Band <[email protected]> htmlUnescape 5: Cathal Garvey &amp; The Sunshine Band &lt;[email protected]&gt; humanize 1: My first post humanize 2: My camel post humanize 3: 52nd humanize 4: 103rd in: Substring found! jsonify: ["A","B","C"] lower: batman markdownify: <strong>BatMan</strong> md5: b3029f756f98f79e7f1b7f1d1f0dd53b mod: 0 modBool: true mul: 6 plainify: Hello world, gophers! pluralize: cats querify 1: bar=2&baz=with+spaces&foo=1&qux=this%26that%3Dthose querify 2: <a href="https://www.google.com?page=3&amp;q=test">Search</a> readDir: README.txt readFile: Hugo Rocks! relLangURL: /hugo/en/index.html relURL 1: http://gohugo.io/ relURL 2: /hugo/mystyle.css relURL 3: /hugo/42 replace: Batman and Catwoman replaceRE: gohugo.io safeCSS: Bat&amp;Man safeHTML: Bat&Man safeHTML: Bat&Man safeJS: (1*2) safeURL: http://gohugo.io seq: [1 2 3] sha1: c8b5b0e33d408246e30f53e32b8f7627a7a649d4 sha256: 6ec43b78da9669f50e4e422575c54bf87536954ccd58280219c393f2ce352b46 singularize: cat slicestr: Bat slicestr: Man sort: [A B C] sub: 1 substr: Bat substr: Man title: Bat Man time: 2015 trim: Batman truncate: this is a ... truncate: With <a href="/markdown">Markdown …</a> upper: BATMAN urlize: bat-man ` var b bytes.Buffer templ, err := New().New("test").Parse(in) var data struct { Title string Section string Params map[string]interface{} } data.Title = "**BatMan**" data.Section = "blog" data.Params = map[string]interface{}{"langCode": "en"} viper.Set("baseURL", "http://mysite.com/hugo/") tstInitTemplates() if err != nil { t.Fatal("Got error on parse", err) } err = templ.Execute(&b, &data) if err != nil { t.Fatal("Got error on execute", err) } if b.String() != expected { sl1 := strings.Split(b.String(), "\n") sl2 := strings.Split(expected, "\n") t.Errorf("Diff:\n%q", helpers.DiffStringSlices(sl1, sl2)) } } func TestCompare(t *testing.T) { for _, this := range []struct { tstCompareType funcUnderTest func(a, b interface{}) bool }{ {tstGt, gt}, {tstLt, lt}, {tstGe, ge}, {tstLe, le}, {tstEq, eq}, {tstNe, ne}, } { doTestCompare(t, this.tstCompareType, this.funcUnderTest) } } func doTestCompare(t *testing.T, tp tstCompareType, funcUnderTest func(a, b interface{}) bool) { for i, this := range []struct { left interface{} right interface{} expectIndicator int }{ {5, 8, -1}, {8, 5, 1}, {5, 5, 0}, {int(5), int64(5), 0}, {int32(5), int(5), 0}, {int16(4), int(5), -1}, {uint(15), uint64(15), 0}, {-2, 1, -1}, {2, -5, 1}, {0.0, 1.23, -1}, {1.1, 1.1, 0}, {float32(1.0), float64(1.0), 0}, {1.23, 0.0, 1}, {"5", "5", 0}, {"8", "5", 1}, {"5", "0001", 1}, {[]int{100, 99}, []int{1, 2, 3, 4}, -1}, {cast.ToTime("2015-11-20"), cast.ToTime("2015-11-20"), 0}, {cast.ToTime("2015-11-19"), cast.ToTime("2015-11-20"), -1}, {cast.ToTime("2015-11-20"), cast.ToTime("2015-11-19"), 1}, } { result := funcUnderTest(this.left, this.right) success := false if this.expectIndicator == 0 { if tstIsEq(tp) { success = result } else { success = !result } } if this.expectIndicator < 0 { success = result && (tstIsLt(tp) || tp == tstNe) success = success || (!result && !tstIsLt(tp)) } if this.expectIndicator > 0 { success = result && (tstIsGt(tp) || tp == tstNe) success = success || (!result && (!tstIsGt(tp) || tp != tstNe)) } if !success { t.Errorf("[%d][%s] %v compared to %v: %t", i, path.Base(runtime.FuncForPC(reflect.ValueOf(funcUnderTest).Pointer()).Name()), this.left, this.right, result) } } } func TestMod(t *testing.T) { for i, this := range []struct { a interface{} b interface{} expect interface{} }{ {3, 2, int64(1)}, {3, 1, int64(0)}, {3, 0, false}, {0, 3, int64(0)}, {3.1, 2, false}, {3, 2.1, false}, {3.1, 2.1, false}, {int8(3), int8(2), int64(1)}, {int16(3), int16(2), int64(1)}, {int32(3), int32(2), int64(1)}, {int64(3), int64(2), int64(1)}, } { result, err := mod(this.a, this.b) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] modulo didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] modulo got %v but expected %v", i, result, this.expect) } } } } func TestModBool(t *testing.T) { for i, this := range []struct { a interface{} b interface{} expect interface{} }{ {3, 3, true}, {3, 2, false}, {3, 1, true}, {3, 0, nil}, {0, 3, true}, {3.1, 2, nil}, {3, 2.1, nil}, {3.1, 2.1, nil}, {int8(3), int8(3), true}, {int8(3), int8(2), false}, {int16(3), int16(3), true}, {int16(3), int16(2), false}, {int32(3), int32(3), true}, {int32(3), int32(2), false}, {int64(3), int64(3), true}, {int64(3), int64(2), false}, } { result, err := modBool(this.a, this.b) if this.expect == nil { if err == nil { t.Errorf("[%d] modulo didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] modulo got %v but expected %v", i, result, this.expect) } } } } func TestFirst(t *testing.T) { for i, this := range []struct { count interface{} sequence interface{} expect interface{} }{ {int(2), []string{"a", "b", "c"}, []string{"a", "b"}}, {int32(3), []string{"a", "b"}, []string{"a", "b"}}, {int64(2), []int{100, 200, 300}, []int{100, 200}}, {100, []int{100, 200}, []int{100, 200}}, {"1", []int{100, 200, 300}, []int{100}}, {int64(-1), []int{100, 200, 300}, false}, {"noint", []int{100, 200, 300}, false}, {1, nil, false}, {nil, []int{100}, false}, {1, t, false}, {1, (*string)(nil), false}, } { results, err := first(this.count, this.sequence) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] First didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(results, this.expect) { t.Errorf("[%d] First %d items, got %v but expected %v", i, this.count, results, this.expect) } } } } func TestLast(t *testing.T) { for i, this := range []struct { count interface{} sequence interface{} expect interface{} }{ {int(2), []string{"a", "b", "c"}, []string{"b", "c"}}, {int32(3), []string{"a", "b"}, []string{"a", "b"}}, {int64(2), []int{100, 200, 300}, []int{200, 300}}, {100, []int{100, 200}, []int{100, 200}}, {"1", []int{100, 200, 300}, []int{300}}, {int64(-1), []int{100, 200, 300}, false}, {"noint", []int{100, 200, 300}, false}, {1, nil, false}, {nil, []int{100}, false}, {1, t, false}, {1, (*string)(nil), false}, } { results, err := last(this.count, this.sequence) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] First didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(results, this.expect) { t.Errorf("[%d] First %d items, got %v but expected %v", i, this.count, results, this.expect) } } } } func TestAfter(t *testing.T) { for i, this := range []struct { count interface{} sequence interface{} expect interface{} }{ {int(2), []string{"a", "b", "c", "d"}, []string{"c", "d"}}, {int32(3), []string{"a", "b"}, false}, {int64(2), []int{100, 200, 300}, []int{300}}, {100, []int{100, 200}, false}, {"1", []int{100, 200, 300}, []int{200, 300}}, {int64(-1), []int{100, 200, 300}, false}, {"noint", []int{100, 200, 300}, false}, {1, nil, false}, {nil, []int{100}, false}, {1, t, false}, {1, (*string)(nil), false}, } { results, err := after(this.count, this.sequence) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] First didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(results, this.expect) { t.Errorf("[%d] First %d items, got %v but expected %v", i, this.count, results, this.expect) } } } } func TestShuffleInputAndOutputFormat(t *testing.T) { for i, this := range []struct { sequence interface{} success bool }{ {[]string{"a", "b", "c", "d"}, true}, {[]int{100, 200, 300}, true}, {[]int{100, 200, 300}, true}, {[]int{100, 200}, true}, {[]string{"a", "b"}, true}, {[]int{100, 200, 300}, true}, {[]int{100, 200, 300}, true}, {[]int{100}, true}, {nil, false}, {t, false}, {(*string)(nil), false}, } { results, err := shuffle(this.sequence) if !this.success { if err == nil { t.Errorf("[%d] First didn't return an expected error", i) } } else { resultsv := reflect.ValueOf(results) sequencev := reflect.ValueOf(this.sequence) if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if resultsv.Len() != sequencev.Len() { t.Errorf("Expected %d items, got %d items", sequencev.Len(), resultsv.Len()) } } } } func TestShuffleRandomising(t *testing.T) { // Note that this test can fail with false negative result if the shuffle // of the sequence happens to be the same as the original sequence. However // the propability of the event is 10^-158 which is negligible. sequenceLength := 100 rand.Seed(time.Now().UTC().UnixNano()) for _, this := range []struct { sequence []int }{ {rand.Perm(sequenceLength)}, } { results, _ := shuffle(this.sequence) resultsv := reflect.ValueOf(results) allSame := true for index, value := range this.sequence { allSame = allSame && (resultsv.Index(index).Interface() == value) } if allSame { t.Error("Expected sequence to be shuffled but was in the same order") } } } func TestDictionary(t *testing.T) { for i, this := range []struct { v1 []interface{} expecterr bool expectedValue map[string]interface{} }{ {[]interface{}{"a", "b"}, false, map[string]interface{}{"a": "b"}}, {[]interface{}{5, "b"}, true, nil}, {[]interface{}{"a", 12, "b", []int{4}}, false, map[string]interface{}{"a": 12, "b": []int{4}}}, {[]interface{}{"a", "b", "c"}, true, nil}, } { r, e := dictionary(this.v1...) if (this.expecterr && e == nil) || (!this.expecterr && e != nil) { t.Errorf("[%d] got an unexpected error: %s", i, e) } else if !this.expecterr { if !reflect.DeepEqual(r, this.expectedValue) { t.Errorf("[%d] got %v but expected %v", i, r, this.expectedValue) } } } } func blankImage(width, height int) []byte { var buf bytes.Buffer img := image.NewRGBA(image.Rect(0, 0, width, height)) if err := png.Encode(&buf, img); err != nil { panic(err) } return buf.Bytes() } func TestImageConfig(t *testing.T) { viper.Reset() defer viper.Reset() workingDir := "/home/hugo" viper.Set("workingDir", workingDir) fs := &afero.MemMapFs{} hugofs.InitFs(fs) for i, this := range []struct { resetCache bool path string input []byte expected image.Config }{ // Make sure that the cache is initialized by default. { resetCache: false, path: "a.png", input: blankImage(10, 10), expected: image.Config{ Width: 10, Height: 10, ColorModel: color.NRGBAModel, }, }, { resetCache: true, path: "a.png", input: blankImage(10, 10), expected: image.Config{ Width: 10, Height: 10, ColorModel: color.NRGBAModel, }, }, { resetCache: false, path: "b.png", input: blankImage(20, 15), expected: image.Config{ Width: 20, Height: 15, ColorModel: color.NRGBAModel, }, }, { resetCache: false, path: "a.png", input: blankImage(20, 15), expected: image.Config{ Width: 10, Height: 10, ColorModel: color.NRGBAModel, }, }, { resetCache: true, path: "a.png", input: blankImage(20, 15), expected: image.Config{ Width: 20, Height: 15, ColorModel: color.NRGBAModel, }, }, } { afero.WriteFile(fs, filepath.Join(workingDir, this.path), this.input, 0755) if this.resetCache { resetImageConfigCache() } result, err := imageConfig(this.path) if err != nil { t.Errorf("imageConfig returned error: %s", err) } if !reflect.DeepEqual(result, this.expected) { t.Errorf("[%d] imageConfig: expected '%v', got '%v'", i, this.expected, result) } if len(defaultImageConfigCache.config) == 0 { t.Error("defaultImageConfigCache should have at least 1 item") } } if _, err := imageConfig(t); err == nil { t.Error("Expected error from imageConfig when passed invalid path") } if _, err := imageConfig("non-existent.png"); err == nil { t.Error("Expected error from imageConfig when passed non-existent file") } if _, err := imageConfig(""); err == nil { t.Error("Expected error from imageConfig when passed empty path") } // test cache clearing ResetCaches() if len(defaultImageConfigCache.config) != 0 { t.Error("ResetCaches should have cleared defaultImageConfigCache") } } func TestIn(t *testing.T) { for i, this := range []struct { v1 interface{} v2 interface{} expect bool }{ {[]string{"a", "b", "c"}, "b", true}, {[]interface{}{"a", "b", "c"}, "b", true}, {[]interface{}{"a", "b", "c"}, "d", false}, {[]string{"a", "b", "c"}, "d", false}, {[]string{"a", "12", "c"}, 12, false}, {[]int{1, 2, 4}, 2, true}, {[]interface{}{1, 2, 4}, 2, true}, {[]interface{}{1, 2, 4}, nil, false}, {[]interface{}{nil}, nil, false}, {[]int{1, 2, 4}, 3, false}, {[]float64{1.23, 2.45, 4.67}, 1.23, true}, {[]float64{1.234567, 2.45, 4.67}, 1.234568, false}, {"this substring should be found", "substring", true}, {"this substring should not be found", "subseastring", false}, } { result := in(this.v1, this.v2) if result != this.expect { t.Errorf("[%d] got %v but expected %v", i, result, this.expect) } } } func TestSlicestr(t *testing.T) { var err error for i, this := range []struct { v1 interface{} v2 interface{} v3 interface{} expect interface{} }{ {"abc", 1, 2, "b"}, {"abc", 1, 3, "bc"}, {"abcdef", 1, int8(3), "bc"}, {"abcdef", 1, int16(3), "bc"}, {"abcdef", 1, int32(3), "bc"}, {"abcdef", 1, int64(3), "bc"}, {"abc", 0, 1, "a"}, {"abcdef", nil, nil, "abcdef"}, {"abcdef", 0, 6, "abcdef"}, {"abcdef", 0, 2, "ab"}, {"abcdef", 2, nil, "cdef"}, {"abcdef", int8(2), nil, "cdef"}, {"abcdef", int16(2), nil, "cdef"}, {"abcdef", int32(2), nil, "cdef"}, {"abcdef", int64(2), nil, "cdef"}, {123, 1, 3, "23"}, {"abcdef", 6, nil, false}, {"abcdef", 4, 7, false}, {"abcdef", -1, nil, false}, {"abcdef", -1, 7, false}, {"abcdef", 1, -1, false}, {tstNoStringer{}, 0, 1, false}, {"ĀĀĀ", 0, 1, "Ā"}, // issue #1333 {"a", t, nil, false}, {"a", 1, t, false}, } { var result string if this.v2 == nil { result, err = slicestr(this.v1) } else if this.v3 == nil { result, err = slicestr(this.v1, this.v2) } else { result, err = slicestr(this.v1, this.v2, this.v3) } if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] Slice didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] got %s but expected %s", i, result, this.expect) } } } // Too many arguments _, err = slicestr("a", 1, 2, 3) if err == nil { t.Errorf("Should have errored") } } func TestHasPrefix(t *testing.T) { cases := []struct { s interface{} prefix interface{} want interface{} isErr bool }{ {"abcd", "ab", true, false}, {"abcd", "cd", false, false}, {template.HTML("abcd"), "ab", true, false}, {template.HTML("abcd"), "cd", false, false}, {template.HTML("1234"), 12, true, false}, {template.HTML("1234"), 34, false, false}, {[]byte("abcd"), "ab", true, false}, } for i, c := range cases { res, err := hasPrefix(c.s, c.prefix) if (err != nil) != c.isErr { t.Fatalf("[%d] unexpected isErr state: want %v, got %v, err = %v", i, c.isErr, err != nil, err) } if res != c.want { t.Errorf("[%d] want %v, got %v", i, c.want, res) } } } func TestSubstr(t *testing.T) { var err error var n int for i, this := range []struct { v1 interface{} v2 interface{} v3 interface{} expect interface{} }{ {"abc", 1, 2, "bc"}, {"abc", 0, 1, "a"}, {"abcdef", -1, 2, "ef"}, {"abcdef", -3, 3, "bcd"}, {"abcdef", 0, -1, "abcde"}, {"abcdef", 2, -1, "cde"}, {"abcdef", 4, -4, false}, {"abcdef", 7, 1, false}, {"abcdef", 1, 100, "bcdef"}, {"abcdef", -100, 3, "abc"}, {"abcdef", -3, -1, "de"}, {"abcdef", 2, nil, "cdef"}, {"abcdef", int8(2), nil, "cdef"}, {"abcdef", int16(2), nil, "cdef"}, {"abcdef", int32(2), nil, "cdef"}, {"abcdef", int64(2), nil, "cdef"}, {"abcdef", 2, int8(3), "cde"}, {"abcdef", 2, int16(3), "cde"}, {"abcdef", 2, int32(3), "cde"}, {"abcdef", 2, int64(3), "cde"}, {123, 1, 3, "23"}, {1.2e3, 0, 4, "1200"}, {tstNoStringer{}, 0, 1, false}, {"abcdef", 2.0, nil, "cdef"}, {"abcdef", 2.0, 2, "cd"}, {"abcdef", 2, 2.0, "cd"}, {"ĀĀĀ", 1, 2, "ĀĀ"}, // # issue 1333 {"abcdef", "doo", nil, false}, {"abcdef", "doo", "doo", false}, {"abcdef", 1, "doo", false}, } { var result string n = i if this.v3 == nil { result, err = substr(this.v1, this.v2) } else { result, err = substr(this.v1, this.v2, this.v3) } if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] Substr didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] got %s but expected %s", i, result, this.expect) } } } n++ _, err = substr("abcdef") if err == nil { t.Errorf("[%d] Substr didn't return an expected error", n) } n++ _, err = substr("abcdef", 1, 2, 3) if err == nil { t.Errorf("[%d] Substr didn't return an expected error", n) } } func TestSplit(t *testing.T) { for i, this := range []struct { v1 interface{} v2 string expect interface{} }{ {"a, b", ", ", []string{"a", "b"}}, {"a & b & c", " & ", []string{"a", "b", "c"}}, {"http://example.com", "http://", []string{"", "example.com"}}, {123, "2", []string{"1", "3"}}, {tstNoStringer{}, ",", false}, } { result, err := split(this.v1, this.v2) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] Split didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] got %s but expected %s", i, result, this.expect) } } } } func TestIntersect(t *testing.T) { for i, this := range []struct { sequence1 interface{} sequence2 interface{} expect interface{} }{ {[]string{"a", "b", "c", "c"}, []string{"a", "b", "b"}, []string{"a", "b"}}, {[]string{"a", "b"}, []string{"a", "b", "c"}, []string{"a", "b"}}, {[]string{"a", "b", "c"}, []string{"d", "e"}, []string{}}, {[]string{}, []string{}, []string{}}, {[]string{"a", "b"}, nil, make([]interface{}, 0)}, {nil, []string{"a", "b"}, make([]interface{}, 0)}, {nil, nil, make([]interface{}, 0)}, {[]string{"1", "2"}, []int{1, 2}, []string{}}, {[]int{1, 2}, []string{"1", "2"}, []int{}}, {[]int{1, 2, 4}, []int{2, 4}, []int{2, 4}}, {[]int{2, 4}, []int{1, 2, 4}, []int{2, 4}}, {[]int{1, 2, 4}, []int{3, 6}, []int{}}, {[]float64{2.2, 4.4}, []float64{1.1, 2.2, 4.4}, []float64{2.2, 4.4}}, } { results, err := intersect(this.sequence1, this.sequence2) if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(results, this.expect) { t.Errorf("[%d] got %v but expected %v", i, results, this.expect) } } _, err1 := intersect("not an array or slice", []string{"a"}) if err1 == nil { t.Error("Expected error for non array as first arg") } _, err2 := intersect([]string{"a"}, "not an array or slice") if err2 == nil { t.Error("Expected error for non array as second arg") } } func TestIsSet(t *testing.T) { aSlice := []interface{}{1, 2, 3, 5} aMap := map[string]interface{}{"a": 1, "b": 2} assert.True(t, isSet(aSlice, 2)) assert.True(t, isSet(aMap, "b")) assert.False(t, isSet(aSlice, 22)) assert.False(t, isSet(aMap, "bc")) } func (x *TstX) TstRp() string { return "r" + x.A } func (x TstX) TstRv() string { return "r" + x.B } func (x TstX) unexportedMethod() string { return x.unexported } func (x TstX) MethodWithArg(s string) string { return s } func (x TstX) MethodReturnNothing() {} func (x TstX) MethodReturnErrorOnly() error { return errors.New("some error occurred") } func (x TstX) MethodReturnTwoValues() (string, string) { return "foo", "bar" } func (x TstX) MethodReturnValueWithError() (string, error) { return "", errors.New("some error occurred") } func (x TstX) String() string { return fmt.Sprintf("A: %s, B: %s", x.A, x.B) } type TstX struct { A, B string unexported string } func TestTimeUnix(t *testing.T) { var sec int64 = 1234567890 tv := reflect.ValueOf(time.Unix(sec, 0)) i := 1 res := toTimeUnix(tv) if sec != res { t.Errorf("[%d] timeUnix got %v but expected %v", i, res, sec) } i++ func(t *testing.T) { defer func() { if err := recover(); err == nil { t.Errorf("[%d] timeUnix didn't return an expected error", i) } }() iv := reflect.ValueOf(sec) toTimeUnix(iv) }(t) } func TestEvaluateSubElem(t *testing.T) { tstx := TstX{A: "foo", B: "bar"} var inner struct { S fmt.Stringer } inner.S = tstx interfaceValue := reflect.ValueOf(&inner).Elem().Field(0) for i, this := range []struct { value reflect.Value key string expect interface{} }{ {reflect.ValueOf(tstx), "A", "foo"}, {reflect.ValueOf(&tstx), "TstRp", "rfoo"}, {reflect.ValueOf(tstx), "TstRv", "rbar"}, //{reflect.ValueOf(map[int]string{1: "foo", 2: "bar"}), 1, "foo"}, {reflect.ValueOf(map[string]string{"key1": "foo", "key2": "bar"}), "key1", "foo"}, {interfaceValue, "String", "A: foo, B: bar"}, {reflect.Value{}, "foo", false}, //{reflect.ValueOf(map[int]string{1: "foo", 2: "bar"}), 1.2, false}, {reflect.ValueOf(tstx), "unexported", false}, {reflect.ValueOf(tstx), "unexportedMethod", false}, {reflect.ValueOf(tstx), "MethodWithArg", false}, {reflect.ValueOf(tstx), "MethodReturnNothing", false}, {reflect.ValueOf(tstx), "MethodReturnErrorOnly", false}, {reflect.ValueOf(tstx), "MethodReturnTwoValues", false}, {reflect.ValueOf(tstx), "MethodReturnValueWithError", false}, {reflect.ValueOf((*TstX)(nil)), "A", false}, {reflect.ValueOf(tstx), "C", false}, {reflect.ValueOf(map[int]string{1: "foo", 2: "bar"}), "1", false}, {reflect.ValueOf([]string{"foo", "bar"}), "1", false}, } { result, err := evaluateSubElem(this.value, this.key) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] evaluateSubElem didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if result.Kind() != reflect.String || result.String() != this.expect { t.Errorf("[%d] evaluateSubElem with %v got %v but expected %v", i, this.key, result, this.expect) } } } } func TestCheckCondition(t *testing.T) { type expect struct { result bool isError bool } for i, this := range []struct { value reflect.Value match reflect.Value op string expect }{ {reflect.ValueOf(123), reflect.ValueOf(123), "", expect{true, false}}, {reflect.ValueOf("foo"), reflect.ValueOf("foo"), "", expect{true, false}}, { reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), "", expect{true, false}, }, {reflect.ValueOf(true), reflect.ValueOf(true), "", expect{true, false}}, {reflect.ValueOf(nil), reflect.ValueOf(nil), "", expect{true, false}}, {reflect.ValueOf(123), reflect.ValueOf(456), "!=", expect{true, false}}, {reflect.ValueOf("foo"), reflect.ValueOf("bar"), "!=", expect{true, false}}, { reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)), "!=", expect{true, false}, }, {reflect.ValueOf(true), reflect.ValueOf(false), "!=", expect{true, false}}, {reflect.ValueOf(123), reflect.ValueOf(nil), "!=", expect{true, false}}, {reflect.ValueOf(456), reflect.ValueOf(123), ">=", expect{true, false}}, {reflect.ValueOf("foo"), reflect.ValueOf("bar"), ">=", expect{true, false}}, { reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)), ">=", expect{true, false}, }, {reflect.ValueOf(456), reflect.ValueOf(123), ">", expect{true, false}}, {reflect.ValueOf("foo"), reflect.ValueOf("bar"), ">", expect{true, false}}, { reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)), ">", expect{true, false}, }, {reflect.ValueOf(123), reflect.ValueOf(456), "<=", expect{true, false}}, {reflect.ValueOf("bar"), reflect.ValueOf("foo"), "<=", expect{true, false}}, { reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)), reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), "<=", expect{true, false}, }, {reflect.ValueOf(123), reflect.ValueOf(456), "<", expect{true, false}}, {reflect.ValueOf("bar"), reflect.ValueOf("foo"), "<", expect{true, false}}, { reflect.ValueOf(time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC)), reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), "<", expect{true, false}, }, {reflect.ValueOf(123), reflect.ValueOf([]int{123, 45, 678}), "in", expect{true, false}}, {reflect.ValueOf("foo"), reflect.ValueOf([]string{"foo", "bar", "baz"}), "in", expect{true, false}}, { reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), reflect.ValueOf([]time.Time{ time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC), time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC), time.Date(2015, time.June, 26, 19, 18, 56, 12345, time.UTC), }), "in", expect{true, false}, }, {reflect.ValueOf(123), reflect.ValueOf([]int{45, 678}), "not in", expect{true, false}}, {reflect.ValueOf("foo"), reflect.ValueOf([]string{"bar", "baz"}), "not in", expect{true, false}}, { reflect.ValueOf(time.Date(2015, time.May, 26, 19, 18, 56, 12345, time.UTC)), reflect.ValueOf([]time.Time{ time.Date(2015, time.February, 26, 19, 18, 56, 12345, time.UTC), time.Date(2015, time.March, 26, 19, 18, 56, 12345, time.UTC), time.Date(2015, time.April, 26, 19, 18, 56, 12345, time.UTC), }), "not in", expect{true, false}, }, {reflect.ValueOf("foo"), reflect.ValueOf("bar-foo-baz"), "in", expect{true, false}}, {reflect.ValueOf("foo"), reflect.ValueOf("bar--baz"), "not in", expect{true, false}}, {reflect.Value{}, reflect.ValueOf("foo"), "", expect{false, false}}, {reflect.ValueOf("foo"), reflect.Value{}, "", expect{false, false}}, {reflect.ValueOf((*TstX)(nil)), reflect.ValueOf("foo"), "", expect{false, false}}, {reflect.ValueOf("foo"), reflect.ValueOf((*TstX)(nil)), "", expect{false, false}}, {reflect.ValueOf(true), reflect.ValueOf("foo"), "", expect{false, false}}, {reflect.ValueOf("foo"), reflect.ValueOf(true), "", expect{false, false}}, {reflect.ValueOf("foo"), reflect.ValueOf(map[int]string{}), "", expect{false, false}}, {reflect.ValueOf("foo"), reflect.ValueOf([]int{1, 2}), "", expect{false, false}}, {reflect.ValueOf((*TstX)(nil)), reflect.ValueOf((*TstX)(nil)), ">", expect{false, false}}, {reflect.ValueOf(true), reflect.ValueOf(false), ">", expect{false, false}}, {reflect.ValueOf(123), reflect.ValueOf([]int{}), "in", expect{false, false}}, {reflect.ValueOf(123), reflect.ValueOf(123), "op", expect{false, true}}, } { result, err := checkCondition(this.value, this.match, this.op) if this.expect.isError { if err == nil { t.Errorf("[%d] checkCondition didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if result != this.expect.result { t.Errorf("[%d] check condition %v %s %v, got %v but expected %v", i, this.value, this.op, this.match, result, this.expect.result) } } } } func TestWhere(t *testing.T) { type Mid struct { Tst TstX } d1 := time.Now() d2 := d1.Add(1 * time.Hour) d3 := d2.Add(1 * time.Hour) d4 := d3.Add(1 * time.Hour) d5 := d4.Add(1 * time.Hour) d6 := d5.Add(1 * time.Hour) for i, this := range []struct { sequence interface{} key interface{} op string match interface{} expect interface{} }{ { sequence: []map[int]string{ {1: "a", 2: "m"}, {1: "c", 2: "d"}, {1: "e", 3: "m"}, }, key: 2, match: "m", expect: []map[int]string{ {1: "a", 2: "m"}, }, }, { sequence: []map[string]int{ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "x": 4}, }, key: "b", match: 4, expect: []map[string]int{ {"a": 3, "b": 4}, }, }, { sequence: []TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, }, key: "B", match: "f", expect: []TstX{ {A: "e", B: "f"}, }, }, { sequence: []*map[int]string{ {1: "a", 2: "m"}, {1: "c", 2: "d"}, {1: "e", 3: "m"}, }, key: 2, match: "m", expect: []*map[int]string{ {1: "a", 2: "m"}, }, }, { sequence: []*TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, }, key: "B", match: "f", expect: []*TstX{ {A: "e", B: "f"}, }, }, { sequence: []*TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "c"}, }, key: "TstRp", match: "rc", expect: []*TstX{ {A: "c", B: "d"}, }, }, { sequence: []TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "c"}, }, key: "TstRv", match: "rc", expect: []TstX{ {A: "e", B: "c"}, }, }, { sequence: []map[string]TstX{ {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}, }, key: "foo.B", match: "d", expect: []map[string]TstX{ {"foo": TstX{A: "c", B: "d"}}, }, }, { sequence: []map[string]TstX{ {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}, }, key: ".foo.B", match: "d", expect: []map[string]TstX{ {"foo": TstX{A: "c", B: "d"}}, }, }, { sequence: []map[string]TstX{ {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}, }, key: "foo.TstRv", match: "rd", expect: []map[string]TstX{ {"foo": TstX{A: "c", B: "d"}}, }, }, { sequence: []map[string]*TstX{ {"foo": &TstX{A: "a", B: "b"}}, {"foo": &TstX{A: "c", B: "d"}}, {"foo": &TstX{A: "e", B: "f"}}, }, key: "foo.TstRp", match: "rc", expect: []map[string]*TstX{ {"foo": &TstX{A: "c", B: "d"}}, }, }, { sequence: []map[string]Mid{ {"foo": Mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": Mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": Mid{Tst: TstX{A: "e", B: "f"}}}, }, key: "foo.Tst.B", match: "d", expect: []map[string]Mid{ {"foo": Mid{Tst: TstX{A: "c", B: "d"}}}, }, }, { sequence: []map[string]Mid{ {"foo": Mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": Mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": Mid{Tst: TstX{A: "e", B: "f"}}}, }, key: "foo.Tst.TstRv", match: "rd", expect: []map[string]Mid{ {"foo": Mid{Tst: TstX{A: "c", B: "d"}}}, }, }, { sequence: []map[string]*Mid{ {"foo": &Mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": &Mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": &Mid{Tst: TstX{A: "e", B: "f"}}}, }, key: "foo.Tst.TstRp", match: "rc", expect: []map[string]*Mid{ {"foo": &Mid{Tst: TstX{A: "c", B: "d"}}}, }, }, { sequence: []map[string]int{ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, }, key: "b", op: ">", match: 3, expect: []map[string]int{ {"a": 3, "b": 4}, {"a": 5, "b": 6}, }, }, { sequence: []TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, }, key: "B", op: "!=", match: "f", expect: []TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, }, }, { sequence: []map[string]int{ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, }, key: "b", op: "in", match: []int{3, 4, 5}, expect: []map[string]int{ {"a": 3, "b": 4}, }, }, { sequence: []map[string][]string{ {"a": []string{"A", "B", "C"}, "b": []string{"D", "E", "F"}}, {"a": []string{"G", "H", "I"}, "b": []string{"J", "K", "L"}}, {"a": []string{"M", "N", "O"}, "b": []string{"P", "Q", "R"}}, }, key: "b", op: "intersect", match: []string{"D", "P", "Q"}, expect: []map[string][]string{ {"a": []string{"A", "B", "C"}, "b": []string{"D", "E", "F"}}, {"a": []string{"M", "N", "O"}, "b": []string{"P", "Q", "R"}}, }, }, { sequence: []map[string][]int{ {"a": []int{1, 2, 3}, "b": []int{4, 5, 6}}, {"a": []int{7, 8, 9}, "b": []int{10, 11, 12}}, {"a": []int{13, 14, 15}, "b": []int{16, 17, 18}}, }, key: "b", op: "intersect", match: []int{4, 10, 12}, expect: []map[string][]int{ {"a": []int{1, 2, 3}, "b": []int{4, 5, 6}}, {"a": []int{7, 8, 9}, "b": []int{10, 11, 12}}, }, }, { sequence: []map[string][]int8{ {"a": []int8{1, 2, 3}, "b": []int8{4, 5, 6}}, {"a": []int8{7, 8, 9}, "b": []int8{10, 11, 12}}, {"a": []int8{13, 14, 15}, "b": []int8{16, 17, 18}}, }, key: "b", op: "intersect", match: []int8{4, 10, 12}, expect: []map[string][]int8{ {"a": []int8{1, 2, 3}, "b": []int8{4, 5, 6}}, {"a": []int8{7, 8, 9}, "b": []int8{10, 11, 12}}, }, }, { sequence: []map[string][]int16{ {"a": []int16{1, 2, 3}, "b": []int16{4, 5, 6}}, {"a": []int16{7, 8, 9}, "b": []int16{10, 11, 12}}, {"a": []int16{13, 14, 15}, "b": []int16{16, 17, 18}}, }, key: "b", op: "intersect", match: []int16{4, 10, 12}, expect: []map[string][]int16{ {"a": []int16{1, 2, 3}, "b": []int16{4, 5, 6}}, {"a": []int16{7, 8, 9}, "b": []int16{10, 11, 12}}, }, }, { sequence: []map[string][]int32{ {"a": []int32{1, 2, 3}, "b": []int32{4, 5, 6}}, {"a": []int32{7, 8, 9}, "b": []int32{10, 11, 12}}, {"a": []int32{13, 14, 15}, "b": []int32{16, 17, 18}}, }, key: "b", op: "intersect", match: []int32{4, 10, 12}, expect: []map[string][]int32{ {"a": []int32{1, 2, 3}, "b": []int32{4, 5, 6}}, {"a": []int32{7, 8, 9}, "b": []int32{10, 11, 12}}, }, }, { sequence: []map[string][]int64{ {"a": []int64{1, 2, 3}, "b": []int64{4, 5, 6}}, {"a": []int64{7, 8, 9}, "b": []int64{10, 11, 12}}, {"a": []int64{13, 14, 15}, "b": []int64{16, 17, 18}}, }, key: "b", op: "intersect", match: []int64{4, 10, 12}, expect: []map[string][]int64{<|fim▁hole|> { sequence: []map[string][]float32{ {"a": []float32{1.0, 2.0, 3.0}, "b": []float32{4.0, 5.0, 6.0}}, {"a": []float32{7.0, 8.0, 9.0}, "b": []float32{10.0, 11.0, 12.0}}, {"a": []float32{13.0, 14.0, 15.0}, "b": []float32{16.0, 17.0, 18.0}}, }, key: "b", op: "intersect", match: []float32{4, 10, 12}, expect: []map[string][]float32{ {"a": []float32{1.0, 2.0, 3.0}, "b": []float32{4.0, 5.0, 6.0}}, {"a": []float32{7.0, 8.0, 9.0}, "b": []float32{10.0, 11.0, 12.0}}, }, }, { sequence: []map[string][]float64{ {"a": []float64{1.0, 2.0, 3.0}, "b": []float64{4.0, 5.0, 6.0}}, {"a": []float64{7.0, 8.0, 9.0}, "b": []float64{10.0, 11.0, 12.0}}, {"a": []float64{13.0, 14.0, 15.0}, "b": []float64{16.0, 17.0, 18.0}}, }, key: "b", op: "intersect", match: []float64{4, 10, 12}, expect: []map[string][]float64{ {"a": []float64{1.0, 2.0, 3.0}, "b": []float64{4.0, 5.0, 6.0}}, {"a": []float64{7.0, 8.0, 9.0}, "b": []float64{10.0, 11.0, 12.0}}, }, }, { sequence: []map[string]int{ {"a": 1, "b": 2}, {"a": 3, "b": 4}, {"a": 5, "b": 6}, }, key: "b", op: "in", match: slice(3, 4, 5), expect: []map[string]int{ {"a": 3, "b": 4}, }, }, { sequence: []map[string]time.Time{ {"a": d1, "b": d2}, {"a": d3, "b": d4}, {"a": d5, "b": d6}, }, key: "b", op: "in", match: slice(d3, d4, d5), expect: []map[string]time.Time{ {"a": d3, "b": d4}, }, }, { sequence: []TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, }, key: "B", op: "not in", match: []string{"c", "d", "e"}, expect: []TstX{ {A: "a", B: "b"}, {A: "e", B: "f"}, }, }, { sequence: []TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, }, key: "B", op: "not in", match: slice("c", t, "d", "e"), expect: []TstX{ {A: "a", B: "b"}, {A: "e", B: "f"}, }, }, { sequence: []map[string]int{ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6}, }, key: "b", op: "", match: nil, expect: []map[string]int{ {"a": 3}, }, }, { sequence: []map[string]int{ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6}, }, key: "b", op: "!=", match: nil, expect: []map[string]int{ {"a": 1, "b": 2}, {"a": 5, "b": 6}, }, }, { sequence: []map[string]int{ {"a": 1, "b": 2}, {"a": 3}, {"a": 5, "b": 6}, }, key: "b", op: ">", match: nil, expect: []map[string]int{}, }, { sequence: []map[string]bool{ {"a": true, "b": false}, {"c": true, "b": true}, {"d": true, "b": false}, }, key: "b", op: "", match: true, expect: []map[string]bool{ {"c": true, "b": true}, }, }, { sequence: []map[string]bool{ {"a": true, "b": false}, {"c": true, "b": true}, {"d": true, "b": false}, }, key: "b", op: "!=", match: true, expect: []map[string]bool{ {"a": true, "b": false}, {"d": true, "b": false}, }, }, { sequence: []map[string]bool{ {"a": true, "b": false}, {"c": true, "b": true}, {"d": true, "b": false}, }, key: "b", op: ">", match: false, expect: []map[string]bool{}, }, {sequence: (*[]TstX)(nil), key: "A", match: "a", expect: false}, {sequence: TstX{A: "a", B: "b"}, key: "A", match: "a", expect: false}, {sequence: []map[string]*TstX{{"foo": nil}}, key: "foo.B", match: "d", expect: false}, { sequence: []TstX{ {A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, }, key: "B", op: "op", match: "f", expect: false, }, { sequence: map[string]interface{}{ "foo": []interface{}{map[interface{}]interface{}{"a": 1, "b": 2}}, "bar": []interface{}{map[interface{}]interface{}{"a": 3, "b": 4}}, "zap": []interface{}{map[interface{}]interface{}{"a": 5, "b": 6}}, }, key: "b", op: "in", match: slice(3, 4, 5), expect: map[string]interface{}{ "bar": []interface{}{map[interface{}]interface{}{"a": 3, "b": 4}}, }, }, { sequence: map[string]interface{}{ "foo": []interface{}{map[interface{}]interface{}{"a": 1, "b": 2}}, "bar": []interface{}{map[interface{}]interface{}{"a": 3, "b": 4}}, "zap": []interface{}{map[interface{}]interface{}{"a": 5, "b": 6}}, }, key: "b", op: ">", match: 3, expect: map[string]interface{}{ "bar": []interface{}{map[interface{}]interface{}{"a": 3, "b": 4}}, "zap": []interface{}{map[interface{}]interface{}{"a": 5, "b": 6}}, }, }, } { var results interface{} var err error if len(this.op) > 0 { results, err = where(this.sequence, this.key, this.op, this.match) } else { results, err = where(this.sequence, this.key, this.match) } if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] Where didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(results, this.expect) { t.Errorf("[%d] Where clause matching %v with %v, got %v but expected %v", i, this.key, this.match, results, this.expect) } } } var err error _, err = where(map[string]int{"a": 1, "b": 2}, "a", []byte("="), 1) if err == nil { t.Errorf("Where called with none string op value didn't return an expected error") } _, err = where(map[string]int{"a": 1, "b": 2}, "a", []byte("="), 1, 2) if err == nil { t.Errorf("Where called with more than two variable arguments didn't return an expected error") } _, err = where(map[string]int{"a": 1, "b": 2}, "a") if err == nil { t.Errorf("Where called with no variable arguments didn't return an expected error") } } func TestDelimit(t *testing.T) { for i, this := range []struct { sequence interface{} delimiter interface{} last interface{} expect template.HTML }{ {[]string{"class1", "class2", "class3"}, " ", nil, "class1 class2 class3"}, {[]int{1, 2, 3, 4, 5}, ",", nil, "1,2,3,4,5"}, {[]int{1, 2, 3, 4, 5}, ", ", nil, "1, 2, 3, 4, 5"}, {[]string{"class1", "class2", "class3"}, " ", " and ", "class1 class2 and class3"}, {[]int{1, 2, 3, 4, 5}, ",", ",", "1,2,3,4,5"}, {[]int{1, 2, 3, 4, 5}, ", ", ", and ", "1, 2, 3, 4, and 5"}, // test maps with and without sorting required {map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, "--", nil, "10--20--30--40--50"}, {map[string]int{"3": 10, "2": 20, "1": 30, "4": 40, "5": 50}, "--", nil, "30--20--10--40--50"}, {map[string]string{"1": "10", "2": "20", "3": "30", "4": "40", "5": "50"}, "--", nil, "10--20--30--40--50"}, {map[string]string{"3": "10", "2": "20", "1": "30", "4": "40", "5": "50"}, "--", nil, "30--20--10--40--50"}, {map[string]string{"one": "10", "two": "20", "three": "30", "four": "40", "five": "50"}, "--", nil, "50--40--10--30--20"}, {map[int]string{1: "10", 2: "20", 3: "30", 4: "40", 5: "50"}, "--", nil, "10--20--30--40--50"}, {map[int]string{3: "10", 2: "20", 1: "30", 4: "40", 5: "50"}, "--", nil, "30--20--10--40--50"}, {map[float64]string{3.3: "10", 2.3: "20", 1.3: "30", 4.3: "40", 5.3: "50"}, "--", nil, "30--20--10--40--50"}, // test maps with a last delimiter {map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, "--", "--and--", "10--20--30--40--and--50"}, {map[string]int{"3": 10, "2": 20, "1": 30, "4": 40, "5": 50}, "--", "--and--", "30--20--10--40--and--50"}, {map[string]string{"1": "10", "2": "20", "3": "30", "4": "40", "5": "50"}, "--", "--and--", "10--20--30--40--and--50"}, {map[string]string{"3": "10", "2": "20", "1": "30", "4": "40", "5": "50"}, "--", "--and--", "30--20--10--40--and--50"}, {map[string]string{"one": "10", "two": "20", "three": "30", "four": "40", "five": "50"}, "--", "--and--", "50--40--10--30--and--20"}, {map[int]string{1: "10", 2: "20", 3: "30", 4: "40", 5: "50"}, "--", "--and--", "10--20--30--40--and--50"}, {map[int]string{3: "10", 2: "20", 1: "30", 4: "40", 5: "50"}, "--", "--and--", "30--20--10--40--and--50"}, {map[float64]string{3.5: "10", 2.5: "20", 1.5: "30", 4.5: "40", 5.5: "50"}, "--", "--and--", "30--20--10--40--and--50"}, } { var result template.HTML var err error if this.last == nil { result, err = delimit(this.sequence, this.delimiter) } else { result, err = delimit(this.sequence, this.delimiter, this.last) } if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] Delimit called on sequence: %v | delimiter: `%v` | last: `%v`, got %v but expected %v", i, this.sequence, this.delimiter, this.last, result, this.expect) } } } func TestSort(t *testing.T) { type ts struct { MyInt int MyFloat float64 MyString string } type mid struct { Tst TstX } for i, this := range []struct { sequence interface{} sortByField interface{} sortAsc string expect interface{} }{ {[]string{"class1", "class2", "class3"}, nil, "asc", []string{"class1", "class2", "class3"}}, {[]string{"class3", "class1", "class2"}, nil, "asc", []string{"class1", "class2", "class3"}}, {[]int{1, 2, 3, 4, 5}, nil, "asc", []int{1, 2, 3, 4, 5}}, {[]int{5, 4, 3, 1, 2}, nil, "asc", []int{1, 2, 3, 4, 5}}, // test sort key parameter is focibly set empty {[]string{"class3", "class1", "class2"}, map[int]string{1: "a"}, "asc", []string{"class1", "class2", "class3"}}, // test map sorting by keys {map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, nil, "asc", []int{10, 20, 30, 40, 50}}, {map[string]int{"3": 10, "2": 20, "1": 30, "4": 40, "5": 50}, nil, "asc", []int{30, 20, 10, 40, 50}}, {map[string]string{"1": "10", "2": "20", "3": "30", "4": "40", "5": "50"}, nil, "asc", []string{"10", "20", "30", "40", "50"}}, {map[string]string{"3": "10", "2": "20", "1": "30", "4": "40", "5": "50"}, nil, "asc", []string{"30", "20", "10", "40", "50"}}, {map[string]string{"one": "10", "two": "20", "three": "30", "four": "40", "five": "50"}, nil, "asc", []string{"50", "40", "10", "30", "20"}}, {map[int]string{1: "10", 2: "20", 3: "30", 4: "40", 5: "50"}, nil, "asc", []string{"10", "20", "30", "40", "50"}}, {map[int]string{3: "10", 2: "20", 1: "30", 4: "40", 5: "50"}, nil, "asc", []string{"30", "20", "10", "40", "50"}}, {map[float64]string{3.3: "10", 2.3: "20", 1.3: "30", 4.3: "40", 5.3: "50"}, nil, "asc", []string{"30", "20", "10", "40", "50"}}, // test map sorting by value {map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, "value", "asc", []int{10, 20, 30, 40, 50}}, {map[string]int{"3": 10, "2": 20, "1": 30, "4": 40, "5": 50}, "value", "asc", []int{10, 20, 30, 40, 50}}, // test map sorting by field value { map[string]ts{"1": {10, 10.5, "ten"}, "2": {20, 20.5, "twenty"}, "3": {30, 30.5, "thirty"}, "4": {40, 40.5, "forty"}, "5": {50, 50.5, "fifty"}}, "MyInt", "asc", []ts{{10, 10.5, "ten"}, {20, 20.5, "twenty"}, {30, 30.5, "thirty"}, {40, 40.5, "forty"}, {50, 50.5, "fifty"}}, }, { map[string]ts{"1": {10, 10.5, "ten"}, "2": {20, 20.5, "twenty"}, "3": {30, 30.5, "thirty"}, "4": {40, 40.5, "forty"}, "5": {50, 50.5, "fifty"}}, "MyFloat", "asc", []ts{{10, 10.5, "ten"}, {20, 20.5, "twenty"}, {30, 30.5, "thirty"}, {40, 40.5, "forty"}, {50, 50.5, "fifty"}}, }, { map[string]ts{"1": {10, 10.5, "ten"}, "2": {20, 20.5, "twenty"}, "3": {30, 30.5, "thirty"}, "4": {40, 40.5, "forty"}, "5": {50, 50.5, "fifty"}}, "MyString", "asc", []ts{{50, 50.5, "fifty"}, {40, 40.5, "forty"}, {10, 10.5, "ten"}, {30, 30.5, "thirty"}, {20, 20.5, "twenty"}}, }, // test sort desc {[]string{"class1", "class2", "class3"}, "value", "desc", []string{"class3", "class2", "class1"}}, {[]string{"class3", "class1", "class2"}, "value", "desc", []string{"class3", "class2", "class1"}}, // test sort by struct's method { []TstX{{A: "i", B: "j"}, {A: "e", B: "f"}, {A: "c", B: "d"}, {A: "g", B: "h"}, {A: "a", B: "b"}}, "TstRv", "asc", []TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, {A: "g", B: "h"}, {A: "i", B: "j"}}, }, { []*TstX{{A: "i", B: "j"}, {A: "e", B: "f"}, {A: "c", B: "d"}, {A: "g", B: "h"}, {A: "a", B: "b"}}, "TstRp", "asc", []*TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, {A: "g", B: "h"}, {A: "i", B: "j"}}, }, // test map sorting by struct's method { map[string]TstX{"1": {A: "i", B: "j"}, "2": {A: "e", B: "f"}, "3": {A: "c", B: "d"}, "4": {A: "g", B: "h"}, "5": {A: "a", B: "b"}}, "TstRv", "asc", []TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, {A: "g", B: "h"}, {A: "i", B: "j"}}, }, { map[string]*TstX{"1": {A: "i", B: "j"}, "2": {A: "e", B: "f"}, "3": {A: "c", B: "d"}, "4": {A: "g", B: "h"}, "5": {A: "a", B: "b"}}, "TstRp", "asc", []*TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}, {A: "g", B: "h"}, {A: "i", B: "j"}}, }, // test sort by dot chaining key argument { []map[string]TstX{{"foo": TstX{A: "e", B: "f"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}}, "foo.A", "asc", []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}}, }, { []map[string]TstX{{"foo": TstX{A: "e", B: "f"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}}, ".foo.A", "asc", []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}}, }, { []map[string]TstX{{"foo": TstX{A: "e", B: "f"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}}, "foo.TstRv", "asc", []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}}, }, { []map[string]*TstX{{"foo": &TstX{A: "e", B: "f"}}, {"foo": &TstX{A: "a", B: "b"}}, {"foo": &TstX{A: "c", B: "d"}}}, "foo.TstRp", "asc", []map[string]*TstX{{"foo": &TstX{A: "a", B: "b"}}, {"foo": &TstX{A: "c", B: "d"}}, {"foo": &TstX{A: "e", B: "f"}}}, }, { []map[string]mid{{"foo": mid{Tst: TstX{A: "e", B: "f"}}}, {"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}}, "foo.Tst.A", "asc", []map[string]mid{{"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": mid{Tst: TstX{A: "e", B: "f"}}}}, }, { []map[string]mid{{"foo": mid{Tst: TstX{A: "e", B: "f"}}}, {"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}}, "foo.Tst.TstRv", "asc", []map[string]mid{{"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": mid{Tst: TstX{A: "e", B: "f"}}}}, }, // test map sorting by dot chaining key argument { map[string]map[string]TstX{"1": {"foo": TstX{A: "e", B: "f"}}, "2": {"foo": TstX{A: "a", B: "b"}}, "3": {"foo": TstX{A: "c", B: "d"}}}, "foo.A", "asc", []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}}, }, { map[string]map[string]TstX{"1": {"foo": TstX{A: "e", B: "f"}}, "2": {"foo": TstX{A: "a", B: "b"}}, "3": {"foo": TstX{A: "c", B: "d"}}}, ".foo.A", "asc", []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}}, }, { map[string]map[string]TstX{"1": {"foo": TstX{A: "e", B: "f"}}, "2": {"foo": TstX{A: "a", B: "b"}}, "3": {"foo": TstX{A: "c", B: "d"}}}, "foo.TstRv", "asc", []map[string]TstX{{"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}, {"foo": TstX{A: "e", B: "f"}}}, }, { map[string]map[string]*TstX{"1": {"foo": &TstX{A: "e", B: "f"}}, "2": {"foo": &TstX{A: "a", B: "b"}}, "3": {"foo": &TstX{A: "c", B: "d"}}}, "foo.TstRp", "asc", []map[string]*TstX{{"foo": &TstX{A: "a", B: "b"}}, {"foo": &TstX{A: "c", B: "d"}}, {"foo": &TstX{A: "e", B: "f"}}}, }, { map[string]map[string]mid{"1": {"foo": mid{Tst: TstX{A: "e", B: "f"}}}, "2": {"foo": mid{Tst: TstX{A: "a", B: "b"}}}, "3": {"foo": mid{Tst: TstX{A: "c", B: "d"}}}}, "foo.Tst.A", "asc", []map[string]mid{{"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": mid{Tst: TstX{A: "e", B: "f"}}}}, }, { map[string]map[string]mid{"1": {"foo": mid{Tst: TstX{A: "e", B: "f"}}}, "2": {"foo": mid{Tst: TstX{A: "a", B: "b"}}}, "3": {"foo": mid{Tst: TstX{A: "c", B: "d"}}}}, "foo.Tst.TstRv", "asc", []map[string]mid{{"foo": mid{Tst: TstX{A: "a", B: "b"}}}, {"foo": mid{Tst: TstX{A: "c", B: "d"}}}, {"foo": mid{Tst: TstX{A: "e", B: "f"}}}}, }, // interface slice with missing elements { []interface{}{ map[interface{}]interface{}{"Title": "Foo", "Weight": 10}, map[interface{}]interface{}{"Title": "Bar"}, map[interface{}]interface{}{"Title": "Zap", "Weight": 5}, }, "Weight", "asc", []interface{}{ map[interface{}]interface{}{"Title": "Bar"}, map[interface{}]interface{}{"Title": "Zap", "Weight": 5}, map[interface{}]interface{}{"Title": "Foo", "Weight": 10}, }, }, // test error cases {(*[]TstX)(nil), nil, "asc", false}, {TstX{A: "a", B: "b"}, nil, "asc", false}, { []map[string]TstX{{"foo": TstX{A: "e", B: "f"}}, {"foo": TstX{A: "a", B: "b"}}, {"foo": TstX{A: "c", B: "d"}}}, "foo.NotAvailable", "asc", false, }, { map[string]map[string]TstX{"1": {"foo": TstX{A: "e", B: "f"}}, "2": {"foo": TstX{A: "a", B: "b"}}, "3": {"foo": TstX{A: "c", B: "d"}}}, "foo.NotAvailable", "asc", false, }, {nil, nil, "asc", false}, } { var result interface{} var err error if this.sortByField == nil { result, err = sortSeq(this.sequence) } else { result, err = sortSeq(this.sequence, this.sortByField, this.sortAsc) } if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] Sort didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] Sort called on sequence: %v | sortByField: `%v` | got %v but expected %v", i, this.sequence, this.sortByField, result, this.expect) } } } } func TestReturnWhenSet(t *testing.T) { for i, this := range []struct { data interface{} key interface{} expect interface{} }{ {[]int{1, 2, 3}, 1, int64(2)}, {[]uint{1, 2, 3}, 1, uint64(2)}, {[]float64{1.1, 2.2, 3.3}, 1, float64(2.2)}, {[]string{"foo", "bar", "baz"}, 1, "bar"}, {[]TstX{{A: "a", B: "b"}, {A: "c", B: "d"}, {A: "e", B: "f"}}, 1, ""}, {map[string]int{"foo": 1, "bar": 2, "baz": 3}, "bar", int64(2)}, {map[string]uint{"foo": 1, "bar": 2, "baz": 3}, "bar", uint64(2)}, {map[string]float64{"foo": 1.1, "bar": 2.2, "baz": 3.3}, "bar", float64(2.2)}, {map[string]string{"foo": "FOO", "bar": "BAR", "baz": "BAZ"}, "bar", "BAR"}, {map[string]TstX{"foo": {A: "a", B: "b"}, "bar": {A: "c", B: "d"}, "baz": {A: "e", B: "f"}}, "bar", ""}, {(*[]string)(nil), "bar", ""}, } { result := returnWhenSet(this.data, this.key) if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] ReturnWhenSet got %v (type %v) but expected %v (type %v)", i, result, reflect.TypeOf(result), this.expect, reflect.TypeOf(this.expect)) } } } func TestMarkdownify(t *testing.T) { viper.Set("currentContentLanguage", helpers.NewDefaultLanguage()) for i, this := range []struct { in interface{} expect interface{} }{ {"Hello **World!**", template.HTML("Hello <strong>World!</strong>")}, {[]byte("Hello Bytes **World!**"), template.HTML("Hello Bytes <strong>World!</strong>")}, } { result, err := markdownify(this.in) if err != nil { t.Fatalf("[%d] unexpected error in markdownify: %s", i, err) } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] markdownify got %v (type %v) but expected %v (type %v)", i, result, reflect.TypeOf(result), this.expect, reflect.TypeOf(this.expect)) } } if _, err := markdownify(t); err == nil { t.Fatalf("markdownify should have errored") } } func TestApply(t *testing.T) { strings := []interface{}{"a\n", "b\n"} noStringers := []interface{}{tstNoStringer{}, tstNoStringer{}} chomped, _ := apply(strings, "chomp", ".") assert.Equal(t, []interface{}{template.HTML("a"), template.HTML("b")}, chomped) chomped, _ = apply(strings, "chomp", "c\n") assert.Equal(t, []interface{}{template.HTML("c"), template.HTML("c")}, chomped) chomped, _ = apply(nil, "chomp", ".") assert.Equal(t, []interface{}{}, chomped) _, err := apply(strings, "apply", ".") if err == nil { t.Errorf("apply with apply should fail") } var nilErr *error _, err = apply(nilErr, "chomp", ".") if err == nil { t.Errorf("apply with nil in seq should fail") } _, err = apply(strings, "dobedobedo", ".") if err == nil { t.Errorf("apply with unknown func should fail") } _, err = apply(noStringers, "chomp", ".") if err == nil { t.Errorf("apply when func fails should fail") } _, err = apply(tstNoStringer{}, "chomp", ".") if err == nil { t.Errorf("apply with non-sequence should fail") } } func TestChomp(t *testing.T) { base := "\n This is\na story " for i, item := range []string{ "\n", "\n\n", "\r", "\r\r", "\r\n", "\r\n\r\n", } { c, _ := chomp(base + item) chomped := string(c) if chomped != base { t.Errorf("[%d] Chomp failed, got '%v'", i, chomped) } _, err := chomp(tstNoStringer{}) if err == nil { t.Errorf("Chomp should fail") } } } func TestLower(t *testing.T) { cases := []struct { s interface{} want string isErr bool }{ {"TEST", "test", false}, {template.HTML("LoWeR"), "lower", false}, {[]byte("BYTES"), "bytes", false}, } for i, c := range cases { res, err := lower(c.s) if (err != nil) != c.isErr { t.Fatalf("[%d] unexpected isErr state: want %v, got %v, err = %v", i, c.want, (err != nil), err) } if res != c.want { t.Errorf("[%d] lower failed: want %v, got %v", i, c.want, res) } } } func TestTitle(t *testing.T) { cases := []struct { s interface{} want string isErr bool }{ {"test", "Test", false}, {template.HTML("hypertext"), "Hypertext", false}, {[]byte("bytes"), "Bytes", false}, } for i, c := range cases { res, err := title(c.s) if (err != nil) != c.isErr { t.Fatalf("[%d] unexpected isErr state: want %v, got %v, err = %v", i, c.want, (err != nil), err) } if res != c.want { t.Errorf("[%d] title failed: want %v, got %v", i, c.want, res) } } } func TestUpper(t *testing.T) { cases := []struct { s interface{} want string isErr bool }{ {"test", "TEST", false}, {template.HTML("UpPeR"), "UPPER", false}, {[]byte("bytes"), "BYTES", false}, } for i, c := range cases { res, err := upper(c.s) if (err != nil) != c.isErr { t.Fatalf("[%d] unexpected isErr state: want %v, got %v, err = %v", i, c.want, (err != nil), err) } if res != c.want { t.Errorf("[%d] upper failed: want %v, got %v", i, c.want, res) } } } func TestHighlight(t *testing.T) { code := "func boo() {}" highlighted, err := highlight(code, "go", "") if err != nil { t.Fatal("Highlight returned error:", err) } // this depends on a Pygments installation, but will always contain the function name. if !strings.Contains(string(highlighted), "boo") { t.Errorf("Highlight mismatch, got %v", highlighted) } _, err = highlight(t, "go", "") if err == nil { t.Error("Expected highlight error") } } func TestInflect(t *testing.T) { for i, this := range []struct { inflectFunc func(i interface{}) (string, error) in interface{} expected string }{ {humanize, "MyCamel", "My camel"}, {humanize, "", ""}, {humanize, "103", "103rd"}, {humanize, "41", "41st"}, {humanize, 103, "103rd"}, {humanize, int64(92), "92nd"}, {humanize, "5.5", "5.5"}, {pluralize, "cat", "cats"}, {pluralize, "", ""}, {singularize, "cats", "cat"}, {singularize, "", ""}, } { result, err := this.inflectFunc(this.in) if err != nil { t.Errorf("[%d] Unexpected Inflect error: %s", i, err) } else if result != this.expected { t.Errorf("[%d] Inflect method error, got %v expected %v", i, result, this.expected) } _, err = this.inflectFunc(t) if err == nil { t.Errorf("[%d] Expected Inflect error", i) } } } func TestCounterFuncs(t *testing.T) { for i, this := range []struct { countFunc func(i interface{}) (int, error) in string expected int }{ {countWords, "Do Be Do Be Do", 5}, {countWords, "旁边", 2}, {countRunes, "旁边", 2}, } { result, err := this.countFunc(this.in) if err != nil { t.Errorf("[%d] Unexpected counter error: %s", i, err) } else if result != this.expected { t.Errorf("[%d] Count method error, got %v expected %v", i, result, this.expected) } _, err = this.countFunc(t) if err == nil { t.Errorf("[%d] Expected Count error", i) } } } func TestReplace(t *testing.T) { v, _ := replace("aab", "a", "b") assert.Equal(t, "bbb", v) v, _ = replace("11a11", 1, 2) assert.Equal(t, "22a22", v) v, _ = replace(12345, 1, 2) assert.Equal(t, "22345", v) _, e := replace(tstNoStringer{}, "a", "b") assert.NotNil(t, e, "tstNoStringer isn't trimmable") _, e = replace("a", tstNoStringer{}, "b") assert.NotNil(t, e, "tstNoStringer cannot be converted to string") _, e = replace("a", "b", tstNoStringer{}) assert.NotNil(t, e, "tstNoStringer cannot be converted to string") } func TestReplaceRE(t *testing.T) { for i, val := range []struct { pattern interface{} repl interface{} src interface{} expect string ok bool }{ {"^https?://([^/]+).*", "$1", "http://gohugo.io/docs", "gohugo.io", true}, {"^https?://([^/]+).*", "$2", "http://gohugo.io/docs", "", true}, {tstNoStringer{}, "$2", "http://gohugo.io/docs", "", false}, {"^https?://([^/]+).*", tstNoStringer{}, "http://gohugo.io/docs", "", false}, {"^https?://([^/]+).*", "$2", tstNoStringer{}, "", false}, {"(ab)", "AB", "aabbaab", "aABbaAB", true}, {"(ab", "AB", "aabb", "", false}, // invalid re } { v, err := replaceRE(val.pattern, val.repl, val.src) if (err == nil) != val.ok { t.Errorf("[%d] %s", i, err) } assert.Equal(t, val.expect, v) } } func TestFindRE(t *testing.T) { for i, this := range []struct { expr string content interface{} limit int expect []string ok bool }{ {"[G|g]o", "Hugo is a static site generator written in Go.", 2, []string{"go", "Go"}, true}, {"[G|g]o", "Hugo is a static site generator written in Go.", -1, []string{"go", "Go"}, true}, {"[G|g]o", "Hugo is a static site generator written in Go.", 1, []string{"go"}, true}, {"[G|g]o", "Hugo is a static site generator written in Go.", 0, []string(nil), true}, {"[G|go", "Hugo is a static site generator written in Go.", 0, []string(nil), false}, {"[G|g]o", t, 0, []string(nil), false}, } { var ( res []string err error ) if this.limit >= 0 { res, err = findRE(this.expr, this.content, this.limit) } else { res, err = findRE(this.expr, this.content) } if err != nil && this.ok { t.Errorf("[%d] returned an unexpected error: %s", i, err) } assert.Equal(t, this.expect, res) } } func TestTrim(t *testing.T) { for i, this := range []struct { v1 interface{} v2 string expect interface{} }{ {"1234 my way 13", "123 ", "4 my way"}, {" my way ", " ", "my way"}, {1234, "14", "23"}, {tstNoStringer{}, " ", false}, } { result, err := trim(this.v1, this.v2) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] trim didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] failed: %s", i, err) continue } if !reflect.DeepEqual(result, this.expect) { t.Errorf("[%d] got '%s' but expected %s", i, result, this.expect) } } } } func TestDateFormat(t *testing.T) { for i, this := range []struct { layout string value interface{} expect interface{} }{ {"Monday, Jan 2, 2006", "2015-01-21", "Wednesday, Jan 21, 2015"}, {"Monday, Jan 2, 2006", time.Date(2015, time.January, 21, 0, 0, 0, 0, time.UTC), "Wednesday, Jan 21, 2015"}, {"This isn't a date layout string", "2015-01-21", "This isn't a date layout string"}, // The following test case gives either "Tuesday, Jan 20, 2015" or "Monday, Jan 19, 2015" depending on the local time zone {"Monday, Jan 2, 2006", 1421733600, time.Unix(1421733600, 0).Format("Monday, Jan 2, 2006")}, {"Monday, Jan 2, 2006", 1421733600.123, false}, {time.RFC3339, time.Date(2016, time.March, 3, 4, 5, 0, 0, time.UTC), "2016-03-03T04:05:00Z"}, {time.RFC1123, time.Date(2016, time.March, 3, 4, 5, 0, 0, time.UTC), "Thu, 03 Mar 2016 04:05:00 UTC"}, {time.RFC3339, "Thu, 03 Mar 2016 04:05:00 UTC", "2016-03-03T04:05:00Z"}, {time.RFC1123, "2016-03-03T04:05:00Z", "Thu, 03 Mar 2016 04:05:00 UTC"}, } { result, err := dateFormat(this.layout, this.value) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] DateFormat didn't return an expected error, got %v", i, result) } } else { if err != nil { t.Errorf("[%d] DateFormat failed: %s", i, err) continue } if result != this.expect { t.Errorf("[%d] DateFormat got %v but expected %v", i, result, this.expect) } } } } func TestDefaultFunc(t *testing.T) { then := time.Now() now := time.Now() for i, this := range []struct { dflt interface{} given interface{} expected interface{} }{ {true, false, false}, {"5", 0, "5"}, {"test1", "set", "set"}, {"test2", "", "test2"}, {"test3", nil, "test3"}, {[2]int{10, 20}, [2]int{1, 2}, [2]int{1, 2}}, {[2]int{10, 20}, [0]int{}, [2]int{10, 20}}, {[2]int{100, 200}, nil, [2]int{100, 200}}, {[]string{"one"}, []string{"uno"}, []string{"uno"}}, {[]string{"two"}, []string{}, []string{"two"}}, {[]string{"three"}, nil, []string{"three"}}, {map[string]int{"one": 1}, map[string]int{"uno": 1}, map[string]int{"uno": 1}}, {map[string]int{"one": 1}, map[string]int{}, map[string]int{"one": 1}}, {map[string]int{"two": 2}, nil, map[string]int{"two": 2}}, {10, 1, 1}, {10, 0, 10}, {20, nil, 20}, {float32(10), float32(1), float32(1)}, {float32(10), 0, float32(10)}, {float32(20), nil, float32(20)}, {complex(2, -2), complex(1, -1), complex(1, -1)}, {complex(2, -2), complex(0, 0), complex(2, -2)}, {complex(3, -3), nil, complex(3, -3)}, {struct{ f string }{f: "one"}, struct{ f string }{}, struct{ f string }{}}, {struct{ f string }{f: "two"}, nil, struct{ f string }{f: "two"}}, {then, now, now}, {then, time.Time{}, then}, } { res, err := dfault(this.dflt, this.given) if err != nil { t.Errorf("[%d] default returned an error: %s", i, err) continue } if !reflect.DeepEqual(this.expected, res) { t.Errorf("[%d] default returned %v, but expected %v", i, res, this.expected) } } } func TestDefault(t *testing.T) { for i, this := range []struct { input interface{} tpl string expected string ok bool }{ {map[string]string{"foo": "bar"}, `{{ index . "foo" | default "nope" }}`, `bar`, true}, {map[string]string{"foo": "pop"}, `{{ index . "bar" | default "nada" }}`, `nada`, true}, {map[string]string{"foo": "cat"}, `{{ default "nope" .foo }}`, `cat`, true}, {map[string]string{"foo": "dog"}, `{{ default "nope" .foo "extra" }}`, ``, false}, {map[string]interface{}{"images": []string{}}, `{{ default "default.jpg" (index .images 0) }}`, `default.jpg`, true}, } { tmpl, err := New().New("test").Parse(this.tpl) if err != nil { t.Errorf("[%d] unable to create new html template %q: %s", i, this.tpl, err) continue } buf := new(bytes.Buffer) err = tmpl.Execute(buf, this.input) if (err == nil) != this.ok { t.Errorf("[%d] execute template returned unexpected error: %s", i, err) continue } if buf.String() != this.expected { t.Errorf("[%d] execute template got %v, but expected %v", i, buf.String(), this.expected) } } } func TestSafeHTML(t *testing.T) { for i, this := range []struct { str string tmplStr string expectWithoutEscape string expectWithEscape string }{ {`<div></div>`, `{{ . }}`, `&lt;div&gt;&lt;/div&gt;`, `<div></div>`}, } { tmpl, err := template.New("test").Parse(this.tmplStr) if err != nil { t.Errorf("[%d] unable to create new html template %q: %s", i, this.tmplStr, err) continue } buf := new(bytes.Buffer) err = tmpl.Execute(buf, this.str) if err != nil { t.Errorf("[%d] execute template with a raw string value returns unexpected error: %s", i, err) } if buf.String() != this.expectWithoutEscape { t.Errorf("[%d] execute template with a raw string value, got %v but expected %v", i, buf.String(), this.expectWithoutEscape) } buf.Reset() v, err := safeHTML(this.str) if err != nil { t.Fatalf("[%d] unexpected error in safeHTML: %s", i, err) } err = tmpl.Execute(buf, v) if err != nil { t.Errorf("[%d] execute template with an escaped string value by safeHTML returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { t.Errorf("[%d] execute template with an escaped string value by safeHTML, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } func TestSafeHTMLAttr(t *testing.T) { for i, this := range []struct { str string tmplStr string expectWithoutEscape string expectWithEscape string }{ {`href="irc://irc.freenode.net/#golang"`, `<a {{ . }}>irc</a>`, `<a ZgotmplZ>irc</a>`, `<a href="irc://irc.freenode.net/#golang">irc</a>`}, } { tmpl, err := template.New("test").Parse(this.tmplStr) if err != nil { t.Errorf("[%d] unable to create new html template %q: %s", i, this.tmplStr, err) continue } buf := new(bytes.Buffer) err = tmpl.Execute(buf, this.str) if err != nil { t.Errorf("[%d] execute template with a raw string value returns unexpected error: %s", i, err) } if buf.String() != this.expectWithoutEscape { t.Errorf("[%d] execute template with a raw string value, got %v but expected %v", i, buf.String(), this.expectWithoutEscape) } buf.Reset() v, err := safeHTMLAttr(this.str) if err != nil { t.Fatalf("[%d] unexpected error in safeHTMLAttr: %s", i, err) } err = tmpl.Execute(buf, v) if err != nil { t.Errorf("[%d] execute template with an escaped string value by safeHTMLAttr returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { t.Errorf("[%d] execute template with an escaped string value by safeHTMLAttr, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } func TestSafeCSS(t *testing.T) { for i, this := range []struct { str string tmplStr string expectWithoutEscape string expectWithEscape string }{ {`width: 60px;`, `<div style="{{ . }}"></div>`, `<div style="ZgotmplZ"></div>`, `<div style="width: 60px;"></div>`}, } { tmpl, err := template.New("test").Parse(this.tmplStr) if err != nil { t.Errorf("[%d] unable to create new html template %q: %s", i, this.tmplStr, err) continue } buf := new(bytes.Buffer) err = tmpl.Execute(buf, this.str) if err != nil { t.Errorf("[%d] execute template with a raw string value returns unexpected error: %s", i, err) } if buf.String() != this.expectWithoutEscape { t.Errorf("[%d] execute template with a raw string value, got %v but expected %v", i, buf.String(), this.expectWithoutEscape) } buf.Reset() v, err := safeCSS(this.str) if err != nil { t.Fatalf("[%d] unexpected error in safeCSS: %s", i, err) } err = tmpl.Execute(buf, v) if err != nil { t.Errorf("[%d] execute template with an escaped string value by safeCSS returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { t.Errorf("[%d] execute template with an escaped string value by safeCSS, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } func TestSafeJS(t *testing.T) { for i, this := range []struct { str string tmplStr string expectWithoutEscape string expectWithEscape string }{ {`619c16f`, `<script>var x{{ . }};</script>`, `<script>var x"619c16f";</script>`, `<script>var x619c16f;</script>`}, } { tmpl, err := template.New("test").Parse(this.tmplStr) if err != nil { t.Errorf("[%d] unable to create new html template %q: %s", i, this.tmplStr, err) continue } buf := new(bytes.Buffer) err = tmpl.Execute(buf, this.str) if err != nil { t.Errorf("[%d] execute template with a raw string value returns unexpected error: %s", i, err) } if buf.String() != this.expectWithoutEscape { t.Errorf("[%d] execute template with a raw string value, got %v but expected %v", i, buf.String(), this.expectWithoutEscape) } buf.Reset() v, err := safeJS(this.str) if err != nil { t.Fatalf("[%d] unexpected error in safeJS: %s", i, err) } err = tmpl.Execute(buf, v) if err != nil { t.Errorf("[%d] execute template with an escaped string value by safeJS returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { t.Errorf("[%d] execute template with an escaped string value by safeJS, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } func TestSafeURL(t *testing.T) { for i, this := range []struct { str string tmplStr string expectWithoutEscape string expectWithEscape string }{ {`irc://irc.freenode.net/#golang`, `<a href="{{ . }}">IRC</a>`, `<a href="#ZgotmplZ">IRC</a>`, `<a href="irc://irc.freenode.net/#golang">IRC</a>`}, } { tmpl, err := template.New("test").Parse(this.tmplStr) if err != nil { t.Errorf("[%d] unable to create new html template %q: %s", i, this.tmplStr, err) continue } buf := new(bytes.Buffer) err = tmpl.Execute(buf, this.str) if err != nil { t.Errorf("[%d] execute template with a raw string value returns unexpected error: %s", i, err) } if buf.String() != this.expectWithoutEscape { t.Errorf("[%d] execute template with a raw string value, got %v but expected %v", i, buf.String(), this.expectWithoutEscape) } buf.Reset() v, err := safeURL(this.str) if err != nil { t.Fatalf("[%d] unexpected error in safeURL: %s", i, err) } err = tmpl.Execute(buf, v) if err != nil { t.Errorf("[%d] execute template with an escaped string value by safeURL returns unexpected error: %s", i, err) } if buf.String() != this.expectWithEscape { t.Errorf("[%d] execute template with an escaped string value by safeURL, got %v but expected %v", i, buf.String(), this.expectWithEscape) } } } func TestBase64Decode(t *testing.T) { testStr := "abc123!?$*&()'-=@~" enc := base64.StdEncoding.EncodeToString([]byte(testStr)) result, err := base64Decode(enc) if err != nil { t.Error("base64Decode returned error:", err) } if result != testStr { t.Errorf("base64Decode: got '%s', expected '%s'", result, testStr) } _, err = base64Decode(t) if err == nil { t.Error("Expected error from base64Decode") } } func TestBase64Encode(t *testing.T) { testStr := "YWJjMTIzIT8kKiYoKSctPUB+" dec, err := base64.StdEncoding.DecodeString(testStr) if err != nil { t.Error("base64Encode: the DecodeString function of the base64 package returned an error:", err) } result, err := base64Encode(string(dec)) if err != nil { t.Errorf("base64Encode: Can't cast arg '%s' into a string:", testStr) } if result != testStr { t.Errorf("base64Encode: got '%s', expected '%s'", result, testStr) } _, err = base64Encode(t) if err == nil { t.Error("Expected error from base64Encode") } } func TestMD5(t *testing.T) { for i, this := range []struct { input string expectedHash string }{ {"Hello world, gophers!", "b3029f756f98f79e7f1b7f1d1f0dd53b"}, {"Lorem ipsum dolor", "06ce65ac476fc656bea3fca5d02cfd81"}, } { result, err := md5(this.input) if err != nil { t.Errorf("md5 returned error: %s", err) } if result != this.expectedHash { t.Errorf("[%d] md5: expected '%s', got '%s'", i, this.expectedHash, result) } } _, err := md5(t) if err == nil { t.Error("Expected error from md5") } } func TestSHA1(t *testing.T) { for i, this := range []struct { input string expectedHash string }{ {"Hello world, gophers!", "c8b5b0e33d408246e30f53e32b8f7627a7a649d4"}, {"Lorem ipsum dolor", "45f75b844be4d17b3394c6701768daf39419c99b"}, } { result, err := sha1(this.input) if err != nil { t.Errorf("sha1 returned error: %s", err) } if result != this.expectedHash { t.Errorf("[%d] sha1: expected '%s', got '%s'", i, this.expectedHash, result) } } _, err := sha1(t) if err == nil { t.Error("Expected error from sha1") } } func TestSHA256(t *testing.T) { for i, this := range []struct { input string expectedHash string }{ {"Hello world, gophers!", "6ec43b78da9669f50e4e422575c54bf87536954ccd58280219c393f2ce352b46"}, {"Lorem ipsum dolor", "9b3e1beb7053e0f900a674dd1c99aca3355e1275e1b03d3cb1bc977f5154e196"}, } { result, err := sha256(this.input) if err != nil { t.Errorf("sha256 returned error: %s", err) } if result != this.expectedHash { t.Errorf("[%d] sha256: expected '%s', got '%s'", i, this.expectedHash, result) } } _, err := sha256(t) if err == nil { t.Error("Expected error from sha256") } } func TestReadFile(t *testing.T) { viper.Reset() defer viper.Reset() workingDir := "/home/hugo" viper.Set("workingDir", workingDir) fs := &afero.MemMapFs{} hugofs.InitFs(fs) afero.WriteFile(fs, filepath.Join(workingDir, "/f/f1.txt"), []byte("f1-content"), 0755) afero.WriteFile(fs, filepath.Join("/home", "f2.txt"), []byte("f2-content"), 0755) for i, this := range []struct { filename string expect interface{} }{ {"", false}, {"b", false}, {filepath.FromSlash("/f/f1.txt"), "f1-content"}, {filepath.FromSlash("f/f1.txt"), "f1-content"}, {filepath.FromSlash("../f2.txt"), false}, } { result, err := readFileFromWorkingDir(this.filename) if b, ok := this.expect.(bool); ok && !b { if err == nil { t.Errorf("[%d] readFile didn't return an expected error", i) } } else { if err != nil { t.Errorf("[%d] readFile failed: %s", i, err) continue } if result != this.expect { t.Errorf("[%d] readFile got %q but expected %q", i, result, this.expect) } } } } func TestPartialCached(t *testing.T) { testCases := []struct { name string partial string tmpl string variant string }{ // name and partial should match between test cases. {"test1", "{{ .Title }} seq: {{ shuffle (seq 1 20) }}", `{{ partialCached "test1" . }}`, ""}, {"test1", "{{ .Title }} seq: {{ shuffle (seq 1 20) }}", `{{ partialCached "test1" . "%s" }}`, "header"}, {"test1", "{{ .Title }} seq: {{ shuffle (seq 1 20) }}", `{{ partialCached "test1" . "%s" }}`, "footer"}, {"test1", "{{ .Title }} seq: {{ shuffle (seq 1 20) }}", `{{ partialCached "test1" . "%s" }}`, "header"}, } results := make(map[string]string, len(testCases)) var data struct { Title string Section string Params map[string]interface{} } data.Title = "**BatMan**" data.Section = "blog" data.Params = map[string]interface{}{"langCode": "en"} tstInitTemplates() InitializeT() for i, tc := range testCases { var tmp string if tc.variant != "" { tmp = fmt.Sprintf(tc.tmpl, tc.variant) } else { tmp = tc.tmpl } tmpl, err := New().New("testroot").Parse(tmp) if err != nil { t.Fatalf("[%d] unable to create new html template: %s", i, err) } if tmpl == nil { t.Fatalf("[%d] tmpl should not be nil!", i) } tmpl.New("partials/" + tc.name).Parse(tc.partial) buf := new(bytes.Buffer) err = tmpl.Execute(buf, &data) if err != nil { t.Fatalf("[%d] error executing template: %s", i, err) } for j := 0; j < 10; j++ { buf2 := new(bytes.Buffer) err = tmpl.Execute(buf2, nil) if err != nil { t.Fatalf("[%d] error executing template 2nd time: %s", i, err) } if !reflect.DeepEqual(buf, buf2) { t.Fatalf("[%d] cached results do not match:\nResult 1:\n%q\nResult 2:\n%q", i, buf, buf2) } } // double-check against previous test cases of the same variant previous, ok := results[tc.name+tc.variant] if !ok { results[tc.name+tc.variant] = buf.String() } else { if previous != buf.String() { t.Errorf("[%d] cached variant differs from previous rendering; got:\n%q\nwant:\n%q", i, buf.String(), previous) } } } } func BenchmarkPartial(b *testing.B) { InitializeT() tmpl, err := New().New("testroot").Parse(`{{ partial "bench1" . }}`) if err != nil { b.Fatalf("unable to create new html template: %s", err) } tmpl.New("partials/bench1").Parse(`{{ shuffle (seq 1 10) }}`) buf := new(bytes.Buffer) b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { if err = tmpl.Execute(buf, nil); err != nil { b.Fatalf("error executing template: %s", err) } buf.Reset() } } func BenchmarkPartialCached(b *testing.B) { InitializeT() tmpl, err := New().New("testroot").Parse(`{{ partialCached "bench1" . }}`) if err != nil { b.Fatalf("unable to create new html template: %s", err) } tmpl.New("partials/bench1").Parse(`{{ shuffle (seq 1 10) }}`) buf := new(bytes.Buffer) b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { if err = tmpl.Execute(buf, nil); err != nil { b.Fatalf("error executing template: %s", err) } buf.Reset() } } func BenchmarkPartialCachedVariants(b *testing.B) { InitializeT() tmpl, err := New().New("testroot").Parse(`{{ partialCached "bench1" . "header" }}`) if err != nil { b.Fatalf("unable to create new html template: %s", err) } tmpl.New("partials/bench1").Parse(`{{ shuffle (seq 1 10) }}`) buf := new(bytes.Buffer) b.ReportAllocs() b.ResetTimer() for i := 0; i < b.N; i++ { if err = tmpl.Execute(buf, nil); err != nil { b.Fatalf("error executing template: %s", err) } buf.Reset() } }<|fim▁end|>
{"a": []int64{1, 2, 3}, "b": []int64{4, 5, 6}}, {"a": []int64{7, 8, 9}, "b": []int64{10, 11, 12}}, }, },
<|file_name|>FPV_resnet_fullycoupled.py<|end_file_name|><|fim▁begin|>import os import numpy as np import pandas as pd import matplotlib.pyplot as plt from sklearn.model_selection import train_test_split import tensorflow as tf from keras.models import Model from keras.layers import Dense, Input from keras.callbacks import ModelCheckpoint from resBlock import res_block_org from data_reader import read_hdf_data, read_hdf_data_psi from writeANNProperties import writeANNProperties from keras import backend as K from keras.models import load_model import ast ########################## # Parameters n_neuron = 500 branches = 3 scale = 3 batch_size = 1024*4 epochs = 2000 vsplit = 0.1 batch_norm = False # define the type of scaler: MinMax or Standard scaler = 'Standard' # 'Standard' 'MinMax' ########################## labels = [] with open('GRI_species_order_reduced', 'r') as f: species = f.readlines() for line in species: # remove linebreak which is the last character of the string current_place = line[:-1] # add item to the list labels.append(current_place) # append other fields: heatrelease, T, PVs #labels.append('heatRelease') labels.append('T') labels.append('PVs') # tabulate psi, mu, alpha labels.append('psi') labels.append('mu') labels.append('alpha') # DO NOT CHANGE THIS ORDER!! input_features=['f','zeta','pv'] # read in the data X, y, df, in_scaler, out_scaler = read_hdf_data_psi('./tables_of_fgm.H5',key='of_tables', in_labels=input_features, labels = labels,scaler=scaler) # split into train and test data X_train, X_test, y_train, y_test = train_test_split(X,y, test_size=0.01) # %% print('set up ANN') # ANN parameters dim_input = X_train.shape[1] dim_label = y_train.shape[1] # This returns a tensor inputs = Input(shape=(dim_input,))#,name='input_1') # a layer instance is callable on a tensor, and returns a tensor x = Dense(n_neuron, activation='relu')(inputs) # # x = res_block(x, scale, n_neuron, stage=1, block='a', bn=batch_norm,branches=branches) # x = res_block(x, scale, n_neuron, stage=1, block='b', bn=batch_norm,branches=branches) # x = res_block(x, scale, n_neuron, stage=1, block='c', bn=batch_norm,branches=branches) x = res_block_org(x, n_neuron, stage=1, block='a', bn=batch_norm) x = res_block_org(x, n_neuron, stage=1, block='b', bn=batch_norm) x = res_block_org(x, n_neuron, stage=1, block='c', bn=batch_norm) #x = res_block(x, n_neuron, stage=1, block='d', bn=batch_norm) predictions = Dense(dim_label, activation='linear')(x) model = Model(inputs=inputs, outputs=predictions) model.compile(loss='mse', optimizer='adam', metrics=['accuracy']) # get the model summary model.summary() # checkpoint (save the best model based validate loss) filepath = "./tmp/weights.best.cntk.hdf5" checkpoint = ModelCheckpoint(filepath, monitor='val_loss', verbose=1, save_best_only=True, mode='min', period=10) callbacks_list = [checkpoint] # fit the model history = model.fit( X_train, y_train, epochs=epochs, batch_size=batch_size, validation_split=vsplit, verbose=2, callbacks=callbacks_list, shuffle=True) #%% model.load_weights("./tmp/weights.best.cntk.hdf5") # cntk.combine(model.outputs).save('mayerTest.dnn') # # %% # ref = df.loc[df['p'] == 40] # x_test = in_scaler.transform(ref[['p', 'he']]) predict_val = model.predict(X_test) X_test_df = pd.DataFrame(in_scaler.inverse_transform(X_test),columns=input_features) y_test_df = pd.DataFrame(out_scaler.inverse_transform(y_test),columns=labels) sp='PVs' # loss fig = plt.figure() plt.semilogy(history.history['loss']) if vsplit: plt.semilogy(history.history['val_loss']) plt.title('mse') plt.ylabel('loss') plt.xlabel('epoch') plt.legend(['train', 'test'], loc='upper right') plt.savefig('./exported/Loss_%s_%s_%i.eps' % (sp,scaler,n_neuron),format='eps') plt.show(block=False) predict_df = pd.DataFrame(out_scaler.inverse_transform(predict_val), columns=labels) plt.figure()<|fim▁hole|>plt.title(sp) plt.savefig('./exported/Error_%s_%s_%i.eps' % (sp,scaler,n_neuron),format='eps') plt.show(block=False) plt.figure() plt.scatter(predict_df[sp],y_test_df[sp],s=1) plt.title('R2 for '+sp) plt.savefig('./exported/R2_%s_%s_%i.eps' % (sp,scaler,n_neuron),format='eps') plt.show(block=False) # %% a=(y_test_df[sp] - predict_df[sp]) / y_test_df[sp] test_data=pd.concat([X_test_df,y_test_df],axis=1) pred_data=pd.concat([X_test_df,predict_df],axis=1) test_data.to_hdf('sim_check.H5',key='test') pred_data.to_hdf('sim_check.H5',key='pred') # Save model sess = K.get_session() saver = tf.train.Saver(tf.global_variables()) saver.save(sess, './exported/my_model') model.save('FPV_ANN_tabulated_%s.H5' % scaler) # write the OpenFOAM ANNProperties file writeANNProperties(in_scaler,out_scaler,scaler) # Convert the model to #run -i k2tf.py --input_model='FPV_ANN_tabulated_Standard.H5' --output_model='exported/FPV_ANN_tabulated_Standard.pb'<|fim▁end|>
plt.title('Error of %s ' % sp) plt.plot((y_test_df[sp] - predict_df[sp]) / y_test_df[sp])
<|file_name|>getLibrary.py<|end_file_name|><|fim▁begin|>#!/usr/bin/ipython library_file = open('/srv/http/.config/cmus/lib.pl');<|fim▁hole|>for x in tracks: print('<li>' + x + '</li>')<|fim▁end|>
tracks = library_file.readlines()
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import unittest, tempfile, uuid, os, shutil, sys class Test_CreateRemoveTempLocation(unittest.TestCase): def __init__(self, *args, **kwargs): super(Test_CreateRemoveTempLocation, self).__init__(*args, **kwargs) self._tmpTestFolder = None def setUp(self): self._tmpTestFolder = os.path.join(tempfile.gettempdir(), "unittest_%s_%s" % (self.__class__.__name__, str( uuid.uuid4() )[:8])) os.mkdir(self._tmpTestFolder) sys.path.append(self._tmpTestFolder) def tearDown(self):<|fim▁hole|> if os.path.isdir(self._tmpTestFolder): print "removing test folder: '%s'" %self._tmpTestFolder shutil.rmtree(self._tmpTestFolder) self._tmpTestFolder = None if self._tmpTestFolder in sys.path: sys.path.remove(self._tmpTestFolder)<|fim▁end|>
if not self._tmpTestFolder: return
<|file_name|>getting_started_test.py<|end_file_name|><|fim▁begin|># Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and<|fim▁hole|> from getting_started import main def test_main(cloud_config, capsys): main(cloud_config.project) out, _ = capsys.readouterr() assert re.search(re.compile( r'Query Results:.hamlet', re.DOTALL), out)<|fim▁end|>
# limitations under the License. import re
<|file_name|>debug_warp_ocmlogistic.py<|end_file_name|><|fim▁begin|>import numpy as np import fdasrsf as fs from scipy.integrate import cumtrapz from scipy.linalg import norm, expm import h5py fun = h5py.File('/home/dtucker/fdasrsf/debug_data_oc_mlogit.h5') q = fun['q'][:] y = fun['y'][:] alpha = fun['alpha'][:] nu = fun['nu'][:] max_itr = 8000 # 4000 tol = 1e-4 deltag = .05 deltaO = .08 display = 1 alpha = alpha/norm(alpha) q, scale = fs.scale_curve(q) # q/norm(q) for ii in range(0, nu.shape[2]): nu[:, :, ii], scale = fs.scale_curve(nu[:, :, ii]) # nu/norm(nu) # python code n = q.shape[0] TT = q.shape[1] m = nu.shape[2] time = np.linspace(0, 1, TT) binsize = 1. / (TT - 1) gam = np.linspace(0, 1, TT) O = np.eye(n) O_old = O.copy() gam_old = gam.copy() qtilde = q.copy() # rotation basis (Skew Symmetric) # E = np.array([[0, -1.], [1., 0]]) # warping basis (Fourier) p = 20 f_basis = np.zeros((TT, p)) for i in range(0, int(p/2)): f_basis[:, 2*i] = 1/np.sqrt(np.pi) * np.sin(2*np.pi*(i+1)*time) f_basis[:, 2*i + 1] = 1/np.sqrt(np.pi) * np.cos(2*np.pi*(i+1)*time) itr = 0 max_val = np.zeros(max_itr+1) while itr <= max_itr: # inner product value A = np.zeros(m) for i in range(0, m): A[i] = fs.innerprod_q2(qtilde, nu[:, :, i]) # form gradient for rotation # B = np.zeros((n, n, m)) # for i in range(0, m): # B[:, :, i] = cf.innerprod_q2(E.dot(qtilde), nu[:, :, i]) * E<|fim▁hole|> # O_new = O_old.dot(expm(deltaO * hO)) theta = np.arccos(O_old[0, 0]) Ograd = np.array([(-1*np.sin(theta), -1*np.cos(theta)), (np.cos(theta), -1*np.sin(theta))]) B = np.zeros(m) for i in range(0, m): B[i] = fs.innerprod_q2(Ograd.dot(qtilde), nu[:, :, i]) tmp1 = np.sum(np.exp(alpha + A)) tmp2 = np.sum(np.exp(alpha + A) * B) hO = np.sum(y * B) - (tmp2 / tmp1) O_new = fs.rot_mat(theta+deltaO*hO) # form gradient for warping qtilde_diff = np.gradient(qtilde, binsize) qtilde_diff = qtilde_diff[1] c = np.zeros((TT, m)) for i in range(0, m): tmp3 = np.zeros((TT, p)) for j in range(0, p): cbar = cumtrapz(f_basis[:, j], time, initial=0) ctmp = 2*qtilde_diff*cbar + qtilde*f_basis[:, j] tmp3[:, j] = fs.innerprod_q2(ctmp, nu[:, :, i]) * f_basis[:, j] c[:, i] = np.sum(tmp3, axis=1) tmp2 = np.sum(np.exp(alpha + A) * c, axis=1) hpsi = np.sum(y * c, axis=1) - (tmp2 / tmp1) vecnorm = norm(hpsi) costmp = np.cos(deltag * vecnorm) * np.ones(TT) sintmp = np.sin(deltag * vecnorm) * (hpsi / vecnorm) psi_new = costmp + sintmp gam_tmp = cumtrapz(psi_new * psi_new, time, initial=0) gam_tmp = (gam_tmp - gam_tmp[0]) / (gam_tmp[-1] - gam_tmp[0]) gam_new = np.interp(gam_tmp, time, gam_old) max_val[itr] = np.sum(y * (alpha + A)) - np.log(tmp1) if display == 1: print("Iteration %d : Cost %f" % (itr+1, max_val[itr])) gam_old = gam_new.copy() O_old = O_new.copy() qtilde = fs.group_action_by_gamma(O_old.dot(q), gam_old) if vecnorm < tol and hO < tol: break itr += 1<|fim▁end|>
# tmp1 = np.sum(np.exp(alpha + A)) # tmp2 = np.sum(np.exp(alpha + A) * B, axis=2) # hO = np.sum(y * B, axis=2) - (tmp2 / tmp1)
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2015-2018 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import contextlib import logging import os import stat from unittest import mock import fixtures import http.server import progressbar import threading import testscenarios import testtools import snapcraft from snapcraft.internal import common, elf, steps from snapcraft.internal.project_loader import grammar_processing from tests import fake_servers, fixture_setup from tests.file_utils import get_snapcraft_path class ContainsList(list): def __eq__(self, other): return all([i[0] in i[1] for i in zip(self, other)]) class MockOptions: def __init__( self, source=None, source_type=None, source_branch=None, source_tag=None, source_subdir=None, source_depth=None, source_commit=None, source_checksum=None, disable_parallel=False, ): self.source = source self.source_type = source_type self.source_depth = source_depth self.source_branch = source_branch self.source_commit = source_commit self.source_tag = source_tag self.source_subdir = source_subdir self.disable_parallel = disable_parallel class IsExecutable: """Match if a file path is executable.""" def __str__(self): return "IsExecutable()" def match(self, file_path): if not os.stat(file_path).st_mode & stat.S_IEXEC: return testtools.matchers.Mismatch( "Expected {!r} to be executable, but it was not".format(file_path) ) return None<|fim▁hole|> class LinkExists: """Match if a file path is a symlink.""" def __init__(self, expected_target=None): self._expected_target = expected_target def __str__(self): return "LinkExists()" def match(self, file_path): if not os.path.exists(file_path): return testtools.matchers.Mismatch( "Expected {!r} to be a symlink, but it doesn't exist".format(file_path) ) if not os.path.islink(file_path): return testtools.matchers.Mismatch( "Expected {!r} to be a symlink, but it was not".format(file_path) ) target = os.readlink(file_path) if target != self._expected_target: return testtools.matchers.Mismatch( "Expected {!r} to be a symlink pointing to {!r}, but it was " "pointing to {!r}".format(file_path, self._expected_target, target) ) return None class TestCase(testscenarios.WithScenarios, testtools.TestCase): def setUp(self): super().setUp() temp_cwd_fixture = fixture_setup.TempCWD() self.useFixture(temp_cwd_fixture) self.path = temp_cwd_fixture.path # Use a separate path for XDG dirs, or changes there may be detected as # source changes. self.xdg_path = self.useFixture(fixtures.TempDir()).path self.useFixture(fixture_setup.TempXDG(self.xdg_path)) self.fake_terminal = fixture_setup.FakeTerminal() self.useFixture(self.fake_terminal) self.useFixture(fixture_setup.SilentSnapProgress()) # Some tests will directly or indirectly change the plugindir, which # is a module variable. Make sure that it is returned to the original # value when a test ends. self.addCleanup(common.set_plugindir, common.get_plugindir()) self.addCleanup(common.set_schemadir, common.get_schemadir()) self.addCleanup(common.set_librariesdir, common.get_librariesdir()) self.addCleanup(common.set_extensionsdir, common.get_extensionsdir()) self.addCleanup(common.reset_env) common.set_schemadir(os.path.join(get_snapcraft_path(), "schema")) self.fake_logger = fixtures.FakeLogger(level=logging.ERROR) self.useFixture(self.fake_logger) patcher = mock.patch("multiprocessing.cpu_count") self.cpu_count = patcher.start() self.cpu_count.return_value = 2 self.addCleanup(patcher.stop) # We do not want the paths to affect every test we have. patcher = mock.patch( "snapcraft.file_utils.get_tool_path", side_effect=lambda x: x ) patcher.start() self.addCleanup(patcher.stop) patcher = mock.patch( "snapcraft.internal.indicators.ProgressBar", new=SilentProgressBar ) patcher.start() self.addCleanup(patcher.stop) # These are what we expect by default self.snap_dir = os.path.join(os.getcwd(), "snap") self.prime_dir = os.path.join(os.getcwd(), "prime") self.stage_dir = os.path.join(os.getcwd(), "stage") self.parts_dir = os.path.join(os.getcwd(), "parts") self.local_plugins_dir = os.path.join(self.snap_dir, "plugins") # Avoid installing patchelf in the tests self.useFixture(fixtures.EnvironmentVariable("SNAPCRAFT_NO_PATCHELF", "1")) # Disable Sentry reporting for tests, otherwise they'll hang waiting # for input self.useFixture( fixtures.EnvironmentVariable("SNAPCRAFT_ENABLE_ERROR_REPORTING", "false") ) # Don't let the managed host variable leak into tests self.useFixture(fixtures.EnvironmentVariable("SNAPCRAFT_MANAGED_HOST")) machine = os.environ.get("SNAPCRAFT_TEST_MOCK_MACHINE", None) self.base_environment = fixture_setup.FakeBaseEnvironment(machine=machine) self.useFixture(self.base_environment) # Make sure "SNAPCRAFT_ENABLE_DEVELOPER_DEBUG" is reset between tests self.useFixture( fixtures.EnvironmentVariable("SNAPCRAFT_ENABLE_DEVELOPER_DEBUG") ) self.useFixture(fixture_setup.FakeSnapcraftctl()) def make_snapcraft_yaml(self, content, encoding="utf-8"): with contextlib.suppress(FileExistsError): os.mkdir("snap") snapcraft_yaml = os.path.join("snap", "snapcraft.yaml") with open(snapcraft_yaml, "w", encoding=encoding) as fp: fp.write(content) return snapcraft_yaml def verify_state(self, part_name, state_dir, expected_step_name): self.assertTrue( os.path.isdir(state_dir), "Expected state directory for {}".format(part_name), ) # Expect every step up to and including the specified one to be run step = steps.get_step_by_name(expected_step_name) for step in step.previous_steps() + [step]: self.assertTrue( os.path.exists(os.path.join(state_dir, step.name)), "Expected {!r} to be run for {}".format(step.name, part_name), ) def load_part( self, part_name, plugin_name=None, part_properties=None, project_options=None, stage_packages_repo=None, base="core", confinement="strict", snap_type="app", ): if not plugin_name: plugin_name = "nil" properties = {"plugin": plugin_name} if part_properties: properties.update(part_properties) if not project_options: project_options = snapcraft.ProjectOptions() validator = snapcraft.internal.project_loader.Validator() schema = validator.part_schema definitions_schema = validator.definitions_schema plugin = snapcraft.internal.pluginhandler.load_plugin( part_name=part_name, plugin_name=plugin_name, properties=properties, project_options=project_options, part_schema=schema, definitions_schema=definitions_schema, ) if not stage_packages_repo: stage_packages_repo = mock.Mock() grammar_processor = grammar_processing.PartGrammarProcessor( plugin=plugin, properties=properties, project=project_options, repo=stage_packages_repo, ) return snapcraft.internal.pluginhandler.PluginHandler( plugin=plugin, part_properties=properties, project_options=project_options, part_schema=schema, definitions_schema=definitions_schema, grammar_processor=grammar_processor, stage_packages_repo=stage_packages_repo, snap_base_path="/snap/fake-name/current", base=base, confinement=confinement, snap_type=snap_type, soname_cache=elf.SonameCache(), ) class TestWithFakeRemoteParts(TestCase): def setUp(self): super().setUp() self.useFixture(fixture_setup.FakeParts()) class FakeFileHTTPServerBasedTestCase(TestCase): def setUp(self): super().setUp() self.useFixture(fixtures.EnvironmentVariable("no_proxy", "localhost,127.0.0.1")) self.server = http.server.HTTPServer( ("127.0.0.1", 0), fake_servers.FakeFileHTTPRequestHandler ) server_thread = threading.Thread(target=self.server.serve_forever) self.addCleanup(server_thread.join) self.addCleanup(self.server.server_close) self.addCleanup(self.server.shutdown) server_thread.start() class SilentProgressBar(progressbar.ProgressBar): """A progress bar causing no spurious output during tests.""" def start(self): pass def update(self, value=None): pass def finish(self): pass<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
pub mod back; pub mod front; pub mod worker;
<|file_name|>findandreplacedlg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (c) 2008-9 Qtrac Ltd. All rights reserved. # This program or module is free software: you can redistribute it and/or # modify it under the terms of the GNU General Public License as published # by the Free Software Foundation, either version 2 of the License, or # version 3 of the License, or (at your option) any later version. It is # provided for educational purposes and is distributed in the hope that # it will be useful, but WITHOUT ANY WARRANTY; without even the implied # warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See # the GNU General Public License for more details. import re from PyQt4.QtCore import (Qt, SIGNAL, pyqtSignature) from PyQt4.QtGui import (QApplication, QDialog) import ui_findandreplacedlg MAC = True try: from PyQt4.QtGui import qt_mac_set_native_menubar except ImportError: MAC = False class FindAndReplaceDlg(QDialog, ui_findandreplacedlg.Ui_FindAndReplaceDlg): def __init__(self, text, parent=None): super(FindAndReplaceDlg, self).__init__(parent) self.__text = str(text) self.__index = 0 self.setupUi(self) if not MAC: self.findButton.setFocusPolicy(Qt.NoFocus) self.replaceButton.setFocusPolicy(Qt.NoFocus) self.replaceAllButton.setFocusPolicy(Qt.NoFocus) self.closeButton.setFocusPolicy(Qt.NoFocus) self.updateUi() @pyqtSignature("QString") def on_findLineEdit_textEdited(self, text): self.__index = 0 self.updateUi() def makeRegex(self): findText = str(self.findLineEdit.text()) if str(self.syntaxComboBox.currentText()) == "Literal": findText = re.escape(findText) flags = re.MULTILINE|re.DOTALL|re.UNICODE if not self.caseCheckBox.isChecked(): flags |= re.IGNORECASE if self.wholeCheckBox.isChecked(): findText = r"\b{0}\b".format(findText) return re.compile(findText, flags) @pyqtSignature("") def on_findButton_clicked(self): regex = self.makeRegex() match = regex.search(self.__text, self.__index) if match is not None: self.__index = match.end() self.emit(SIGNAL("found"), match.start()) else: self.emit(SIGNAL("notfound")) @pyqtSignature("") def on_replaceButton_clicked(self): regex = self.makeRegex() self.__text = regex.sub(str(self.replaceLineEdit.text()), self.__text, 1) @pyqtSignature("") def on_replaceAllButton_clicked(self): regex = self.makeRegex() self.__text = regex.sub(str(self.replaceLineEdit.text()), self.__text) def updateUi(self): enable = not self.findLineEdit.text().isEmpty() self.findButton.setEnabled(enable) self.replaceButton.setEnabled(enable) self.replaceAllButton.setEnabled(enable) def text(self): return self.__text if __name__ == "__main__": import sys<|fim▁hole|> text = """US experience shows that, unlike traditional patents, software patents do not encourage innovation and R&D, quite the contrary. In particular they hurt small and medium-sized enterprises and generally newcomers in the market. They will just weaken the market and increase spending on patents and litigation, at the expense of technological innovation and research. Especially dangerous are attempts to abuse the patent system by preventing interoperability as a means of avoiding competition with technological ability. --- Extract quoted from Linus Torvalds and Alan Cox's letter to the President of the European Parliament http://www.effi.org/patentit/patents_torvalds_cox.html""" def found(where): print("Found at {0}".format(where)) def nomore(): print("No more found") app = QApplication(sys.argv) form = FindAndReplaceDlg(text) form.connect(form, SIGNAL("found"), found) form.connect(form, SIGNAL("notfound"), nomore) form.show() app.exec_() print(form.text())<|fim▁end|>
<|file_name|>texture_browser.py<|end_file_name|><|fim▁begin|># ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### import asyncio import logging import threading import os import bpy import bgl import blf import pillarsdk from . import async_loop, pillar, cache, blender, utils REQUIRED_ROLES_FOR_TEXTURE_BROWSER = {'subscriber', 'demo'} MOUSE_SCROLL_PIXELS_PER_TICK = 50 ICON_WIDTH = 128 ICON_HEIGHT = 128 TARGET_ITEM_WIDTH = 400 TARGET_ITEM_HEIGHT = 128 ITEM_MARGIN_X = 5 ITEM_MARGIN_Y = 5 ITEM_PADDING_X = 5 library_path = '/tmp' library_icons_path = os.path.join(os.path.dirname(__file__), "icons") log = logging.getLogger(__name__) class SpecialFolderNode(pillarsdk.Node): NODE_TYPE = 'SPECIAL' class UpNode(SpecialFolderNode): NODE_TYPE = 'UP' def __init__(self): super().__init__() self['_id'] = 'UP' self['node_type'] = self.NODE_TYPE class ProjectNode(SpecialFolderNode): NODE_TYPE = 'PROJECT' def __init__(self, project): super().__init__() assert isinstance(project, pillarsdk.Project), 'wrong type for project: %r' % type(project) self.merge(project.to_dict()) self['node_type'] = self.NODE_TYPE class MenuItem: """GUI menu item for the 3D View GUI.""" icon_margin_x = 4 icon_margin_y = 4 text_margin_x = 6 text_height = 16 text_width = 72 DEFAULT_ICONS = { 'FOLDER': os.path.join(library_icons_path, 'folder.png'), 'SPINNER': os.path.join(library_icons_path, 'spinner.png'), } FOLDER_NODE_TYPES = {'group_texture', 'group_hdri', UpNode.NODE_TYPE, ProjectNode.NODE_TYPE} SUPPORTED_NODE_TYPES = {'texture', 'hdri'}.union(FOLDER_NODE_TYPES) def __init__(self, node, file_desc, thumb_path: str, label_text): self.log = logging.getLogger('%s.MenuItem' % __name__) if node['node_type'] not in self.SUPPORTED_NODE_TYPES: self.log.info('Invalid node type in node: %s', node) raise TypeError('Node of type %r not supported; supported are %r.' % ( node['node_type'], self.SUPPORTED_NODE_TYPES)) assert isinstance(node, pillarsdk.Node), 'wrong type for node: %r' % type(node) assert isinstance(node['_id'], str), 'wrong type for node["_id"]: %r' % type(node['_id']) self.node = node # pillarsdk.Node, contains 'node_type' key to indicate type self.file_desc = file_desc # pillarsdk.File object, or None if a 'folder' node. self.label_text = label_text self._thumb_path = '' self.icon = None self._is_folder = node['node_type'] in self.FOLDER_NODE_TYPES self._is_spinning = False # Determine sorting order. # by default, sort all the way at the end and folders first. self._order = 0 if self._is_folder else 10000 if node and node.properties and node.properties.order is not None: self._order = node.properties.order self.thumb_path = thumb_path # Updated when drawing the image self.x = 0 self.y = 0 self.width = 0 self.height = 0 def sort_key(self): """Key for sorting lists of MenuItems.""" return self._order, self.label_text @property def thumb_path(self) -> str: return self._thumb_path @thumb_path.setter def thumb_path(self, new_thumb_path: str): self._is_spinning = new_thumb_path == 'SPINNER' self._thumb_path = self.DEFAULT_ICONS.get(new_thumb_path, new_thumb_path) if self._thumb_path: self.icon = bpy.data.images.load(filepath=self._thumb_path) else: self.icon = None @property def node_uuid(self) -> str: return self.node['_id'] def represents(self, node) -> bool: """Returns True iff this MenuItem represents the given node.""" node_uuid = node['_id'] return self.node_uuid == node_uuid def update(self, node, file_desc, thumb_path: str, label_text=None): # We can get updated information about our Node, but a MenuItem should # always represent one node, and it shouldn't be shared between nodes. if self.node_uuid != node['_id']: raise ValueError("Don't change the node ID this MenuItem reflects, " "just create a new one.") self.node = node self.file_desc = file_desc # pillarsdk.File object, or None if a 'folder' node. self.thumb_path = thumb_path if label_text is not None: self.label_text = label_text @property def is_folder(self) -> bool: return self._is_folder @property def is_spinning(self) -> bool: return self._is_spinning def update_placement(self, x, y, width, height): """Use OpenGL to draw this one menu item.""" self.x = x self.y = y self.width = width self.height = height def draw(self, highlighted: bool): bgl.glEnable(bgl.GL_BLEND) if highlighted: bgl.glColor4f(0.555, 0.555, 0.555, 0.8) else: bgl.glColor4f(0.447, 0.447, 0.447, 0.8) bgl.glRectf(self.x, self.y, self.x + self.width, self.y + self.height) texture = self.icon err = texture.gl_load(filter=bgl.GL_NEAREST, mag=bgl.GL_NEAREST) assert not err, 'OpenGL error: %i' % err bgl.glColor4f(0.0, 0.0, 1.0, 0.5) # bgl.glLineWidth(1.5) # ------ TEXTURE ---------# bgl.glBindTexture(bgl.GL_TEXTURE_2D, texture.bindcode[0]) bgl.glEnable(bgl.GL_TEXTURE_2D) bgl.glBlendFunc(bgl.GL_SRC_ALPHA, bgl.GL_ONE_MINUS_SRC_ALPHA) bgl.glColor4f(1, 1, 1, 1) bgl.glBegin(bgl.GL_QUADS) bgl.glTexCoord2d(0, 0) bgl.glVertex2d(self.x + self.icon_margin_x, self.y) bgl.glTexCoord2d(0, 1) bgl.glVertex2d(self.x + self.icon_margin_x, self.y + ICON_HEIGHT) bgl.glTexCoord2d(1, 1) bgl.glVertex2d(self.x + self.icon_margin_x + ICON_WIDTH, self.y + ICON_HEIGHT) bgl.glTexCoord2d(1, 0) bgl.glVertex2d(self.x + self.icon_margin_x + ICON_WIDTH, self.y) bgl.glEnd() bgl.glDisable(bgl.GL_TEXTURE_2D) bgl.glDisable(bgl.GL_BLEND) texture.gl_free() # draw some text font_id = 0 blf.position(font_id, self.x + self.icon_margin_x + ICON_WIDTH + self.text_margin_x, self.y + ICON_HEIGHT * 0.5 - 0.25 * self.text_height, 0) blf.size(font_id, self.text_height, self.text_width) blf.draw(font_id, self.label_text) def hits(self, mouse_x: int, mouse_y: int) -> bool: return self.x < mouse_x < self.x + self.width and self.y < mouse_y < self.y + self.height class BlenderCloudBrowser(pillar.PillarOperatorMixin, async_loop.AsyncModalOperatorMixin, bpy.types.Operator): bl_idname = 'pillar.browser' bl_label = 'Blender Cloud Texture Browser' _draw_handle = None current_path = pillar.CloudPath('/') project_name = '' # This contains a stack of Node objects that lead up to the currently browsed node. path_stack = [] # This contains a stack of MenuItem objects that lead up to the currently browsed node. menu_item_stack = [] timer = None log = logging.getLogger('%s.BlenderCloudBrowser' % __name__) _menu_item_lock = threading.Lock() current_display_content = [] # list of MenuItems currently displayed loaded_images = set()<|fim▁hole|> thumbnails_cache = '' maximized_area = False mouse_x = 0 mouse_y = 0 scroll_offset = 0 scroll_offset_target = 0 scroll_offset_max = 0 scroll_offset_space_left = 0 def invoke(self, context, event): # Refuse to start if the file hasn't been saved. It's okay if # it's dirty, we just need to know where '//' points to. if not os.path.exists(context.blend_data.filepath): self.report({'ERROR'}, 'Please save your Blend file before using ' 'the Blender Cloud addon.') return {'CANCELLED'} wm = context.window_manager self.current_path = pillar.CloudPath(wm.last_blender_cloud_location) self.path_stack = [] # list of nodes that make up the current path. self.thumbnails_cache = cache.cache_directory('thumbnails') self.mouse_x = event.mouse_x self.mouse_y = event.mouse_y # See if we have to maximize the current area if not context.screen.show_fullscreen: self.maximized_area = True bpy.ops.screen.screen_full_area(use_hide_panels=True) # Add the region OpenGL drawing callback # draw in view space with 'POST_VIEW' and 'PRE_VIEW' self._draw_handle = context.space_data.draw_handler_add( self.draw_menu, (context,), 'WINDOW', 'POST_PIXEL') self.current_display_content = [] self.loaded_images = set() self._scroll_reset() context.window.cursor_modal_set('DEFAULT') return async_loop.AsyncModalOperatorMixin.invoke(self, context, event) def modal(self, context, event): result = async_loop.AsyncModalOperatorMixin.modal(self, context, event) if not {'PASS_THROUGH', 'RUNNING_MODAL'}.intersection(result): return result if event.type == 'TAB' and event.value == 'RELEASE': self.log.info('Ensuring async loop is running') async_loop.ensure_async_loop() if event.type == 'TIMER': self._scroll_smooth() context.area.tag_redraw() return {'RUNNING_MODAL'} if 'MOUSE' in event.type: context.area.tag_redraw() self.mouse_x = event.mouse_x self.mouse_y = event.mouse_y left_mouse_release = event.type == 'LEFTMOUSE' and event.value == 'RELEASE' if self._state == 'PLEASE_SUBSCRIBE' and left_mouse_release: self.open_browser_subscribe() self._finish(context) return {'FINISHED'} if self._state == 'BROWSING': selected = self.get_clicked() if selected: if selected.is_spinning: context.window.cursor_set('WAIT') else: context.window.cursor_set('HAND') else: context.window.cursor_set('DEFAULT') # Scrolling if event.type == 'WHEELUPMOUSE': self._scroll_by(MOUSE_SCROLL_PIXELS_PER_TICK) context.area.tag_redraw() elif event.type == 'WHEELDOWNMOUSE': self._scroll_by(-MOUSE_SCROLL_PIXELS_PER_TICK) context.area.tag_redraw() elif event.type == 'TRACKPADPAN': self._scroll_by(event.mouse_prev_y - event.mouse_y, smooth=False) context.area.tag_redraw() if left_mouse_release: if selected is None: # No item clicked, ignore it. return {'RUNNING_MODAL'} if selected.is_spinning: # This can happen when the thumbnail information isn't loaded yet. return {'RUNNING_MODAL'} if selected.is_folder: self.descend_node(selected) else: self.handle_item_selection(context, selected) if event.type in {'RIGHTMOUSE', 'ESC'}: self._finish(context) return {'CANCELLED'} return {'RUNNING_MODAL'} async def async_execute(self, context): self._state = 'CHECKING_CREDENTIALS' self.log.debug('Checking credentials') try: db_user = await self.check_credentials(context, REQUIRED_ROLES_FOR_TEXTURE_BROWSER) except pillar.NotSubscribedToCloudError: self.log.info('User not subscribed to Blender Cloud.') self._show_subscribe_screen() return None if db_user is None: raise pillar.UserNotLoggedInError() await self.async_download_previews() def _show_subscribe_screen(self): """Shows the "You need to subscribe" screen.""" self._state = 'PLEASE_SUBSCRIBE' bpy.context.window.cursor_set('HAND') def descend_node(self, menu_item: MenuItem): """Descends the node hierarchy by visiting this menu item's node. Also keeps track of the current node, so that we know where the "up" button should go. """ node = menu_item.node assert isinstance(node, pillarsdk.Node), 'Wrong type %s' % node if isinstance(node, UpNode): # Going up. self.log.debug('Going up to %r', self.current_path) self.current_path = self.current_path.parent if self.path_stack: self.path_stack.pop() if self.menu_item_stack: self.menu_item_stack.pop() if not self.path_stack: self.project_name = '' else: # Going down, keep track of where we were if isinstance(node, ProjectNode): self.project_name = node['name'] self.current_path /= node['_id'] self.log.debug('Going down to %r', self.current_path) self.path_stack.append(node) self.menu_item_stack.append(menu_item) self.browse_assets() @property def node(self): if not self.path_stack: return None return self.path_stack[-1] def _finish(self, context): self.log.debug('Finishing the modal operator') async_loop.AsyncModalOperatorMixin._finish(self, context) self.clear_images() context.space_data.draw_handler_remove(self._draw_handle, 'WINDOW') context.window.cursor_modal_restore() if self.maximized_area: bpy.ops.screen.screen_full_area(use_hide_panels=True) context.area.tag_redraw() self.log.debug('Modal operator finished') def clear_images(self): """Removes all images we loaded from Blender's memory.""" for image in bpy.data.images: if image.filepath_raw not in self.loaded_images: continue image.user_clear() bpy.data.images.remove(image) self.loaded_images.clear() self.current_display_content.clear() def add_menu_item(self, *args) -> MenuItem: menu_item = MenuItem(*args) # Just make this thread-safe to be on the safe side. with self._menu_item_lock: self.current_display_content.append(menu_item) self.loaded_images.add(menu_item.icon.filepath_raw) self.sort_menu() return menu_item def update_menu_item(self, node, *args) -> MenuItem: node_uuid = node['_id'] # Just make this thread-safe to be on the safe side. with self._menu_item_lock: for menu_item in self.current_display_content: if menu_item.represents(node): menu_item.update(node, *args) self.loaded_images.add(menu_item.icon.filepath_raw) break else: raise ValueError('Unable to find MenuItem(node_uuid=%r)' % node_uuid) self.sort_menu() def sort_menu(self): """Sorts the self.current_display_content list.""" if not self.current_display_content: return with self._menu_item_lock: self.current_display_content.sort(key=MenuItem.sort_key) async def async_download_previews(self): self._state = 'BROWSING' thumbnails_directory = self.thumbnails_cache self.log.info('Asynchronously downloading previews to %r', thumbnails_directory) self.log.info('Current BCloud path is %r', self.current_path) self.clear_images() self._scroll_reset() project_uuid = self.current_path.project_uuid node_uuid = self.current_path.node_uuid if node_uuid: # Query for sub-nodes of this node. self.log.debug('Getting subnodes for parent node %r', node_uuid) children = await pillar.get_nodes(parent_node_uuid=node_uuid, node_type={'group_texture', 'group_hdri'}) elif project_uuid: # Query for top-level nodes. self.log.debug('Getting subnodes for project node %r', project_uuid) children = await pillar.get_nodes(project_uuid=project_uuid, parent_node_uuid='', node_type={'group_texture', 'group_hdri'}) else: # Query for projects self.log.debug('No node UUID and no project UUID, listing available projects') children = await pillar.get_texture_projects() for proj_dict in children: self.add_menu_item(ProjectNode(proj_dict), None, 'FOLDER', proj_dict['name']) return # Make sure we can go up again. self.add_menu_item(UpNode(), None, 'FOLDER', '.. up ..') # Download all child nodes self.log.debug('Iterating over child nodes of %r', self.current_path) for child in children: # print(' - %(_id)s = %(name)s' % child) if child['node_type'] not in MenuItem.SUPPORTED_NODE_TYPES: self.log.debug('Skipping node of type %r', child['node_type']) continue self.add_menu_item(child, None, 'FOLDER', child['name']) # There are only sub-nodes at the project level, no texture nodes, # so we won't have to bother looking for textures. if not node_uuid: return directory = os.path.join(thumbnails_directory, project_uuid, node_uuid) os.makedirs(directory, exist_ok=True) self.log.debug('Fetching texture thumbnails for node %r', node_uuid) def thumbnail_loading(node, texture_node): self.add_menu_item(node, None, 'SPINNER', texture_node['name']) def thumbnail_loaded(node, file_desc, thumb_path): self.update_menu_item(node, file_desc, thumb_path) await pillar.fetch_texture_thumbs(node_uuid, 's', directory, thumbnail_loading=thumbnail_loading, thumbnail_loaded=thumbnail_loaded, future=self.signalling_future) def browse_assets(self): self.log.debug('Browsing assets at %r', self.current_path) self._new_async_task(self.async_download_previews()) def draw_menu(self, context): """Draws the GUI with OpenGL.""" drawers = { 'CHECKING_CREDENTIALS': self._draw_checking_credentials, 'BROWSING': self._draw_browser, 'DOWNLOADING_TEXTURE': self._draw_downloading, 'EXCEPTION': self._draw_exception, 'PLEASE_SUBSCRIBE': self._draw_subscribe, } if self._state in drawers: drawer = drawers[self._state] drawer(context) # For debugging: draw the state font_id = 0 bgl.glColor4f(1.0, 1.0, 1.0, 1.0) blf.size(font_id, 20, 72) blf.position(font_id, 5, 5, 0) blf.draw(font_id, '%s %s' % (self._state, self.project_name)) bgl.glDisable(bgl.GL_BLEND) @staticmethod def _window_region(context): window_regions = [region for region in context.area.regions if region.type == 'WINDOW'] return window_regions[0] def _draw_browser(self, context): """OpenGL drawing code for the BROWSING state.""" window_region = self._window_region(context) content_width = window_region.width - ITEM_MARGIN_X * 2 content_height = window_region.height - ITEM_MARGIN_Y * 2 content_x = ITEM_MARGIN_X content_y = context.area.height - ITEM_MARGIN_Y - TARGET_ITEM_HEIGHT col_count = content_width // TARGET_ITEM_WIDTH item_width = (content_width - (col_count * ITEM_PADDING_X)) / col_count item_height = TARGET_ITEM_HEIGHT block_width = item_width + ITEM_PADDING_X block_height = item_height + ITEM_MARGIN_Y bgl.glEnable(bgl.GL_BLEND) bgl.glColor4f(0.0, 0.0, 0.0, 0.6) bgl.glRectf(0, 0, window_region.width, window_region.height) if self.current_display_content: bottom_y = float('inf') # The -1 / +2 are for extra rows that are drawn only half at the top/bottom. first_item_idx = max(0, int(-self.scroll_offset // block_height - 1) * col_count) items_per_page = int(content_height // item_height + 2) * col_count last_item_idx = first_item_idx + items_per_page for item_idx, item in enumerate(self.current_display_content): x = content_x + (item_idx % col_count) * block_width y = content_y - (item_idx // col_count) * block_height - self.scroll_offset item.update_placement(x, y, item_width, item_height) if first_item_idx <= item_idx < last_item_idx: # Only draw if the item is actually on screen. item.draw(highlighted=item.hits(self.mouse_x, self.mouse_y)) bottom_y = min(y, bottom_y) self.scroll_offset_space_left = window_region.height - bottom_y self.scroll_offset_max = (self.scroll_offset - self.scroll_offset_space_left + 0.25 * block_height) else: font_id = 0 text = "Communicating with Blender Cloud" bgl.glColor4f(1.0, 1.0, 1.0, 1.0) blf.size(font_id, 20, 72) text_width, text_height = blf.dimensions(font_id, text) blf.position(font_id, content_x + content_width * 0.5 - text_width * 0.5, content_y - content_height * 0.3 + text_height * 0.5, 0) blf.draw(font_id, text) bgl.glDisable(bgl.GL_BLEND) # bgl.glColor4f(0.0, 0.0, 0.0, 1.0) def _draw_downloading(self, context): """OpenGL drawing code for the DOWNLOADING_TEXTURE state.""" self._draw_text_on_colour(context, 'Downloading texture from Blender Cloud', (0.0, 0.0, 0.2, 0.6)) def _draw_checking_credentials(self, context): """OpenGL drawing code for the CHECKING_CREDENTIALS state.""" self._draw_text_on_colour(context, 'Checking login credentials', (0.0, 0.0, 0.2, 0.6)) def _draw_text_on_colour(self, context, text, bgcolour): content_height, content_width = self._window_size(context) bgl.glEnable(bgl.GL_BLEND) bgl.glColor4f(*bgcolour) bgl.glRectf(0, 0, content_width, content_height) font_id = 0 bgl.glColor4f(1.0, 1.0, 1.0, 1.0) blf.size(font_id, 20, 72) text_width, text_height = blf.dimensions(font_id, text) blf.position(font_id, content_width * 0.5 - text_width * 0.5, content_height * 0.7 + text_height * 0.5, 0) blf.draw(font_id, text) bgl.glDisable(bgl.GL_BLEND) def _window_size(self, context): window_region = self._window_region(context) content_width = window_region.width content_height = window_region.height return content_height, content_width def _draw_exception(self, context): """OpenGL drawing code for the EXCEPTION state.""" import textwrap content_height, content_width = self._window_size(context) bgl.glEnable(bgl.GL_BLEND) bgl.glColor4f(0.2, 0.0, 0.0, 0.6) bgl.glRectf(0, 0, content_width, content_height) font_id = 0 ex = self.async_task.exception() if isinstance(ex, pillar.UserNotLoggedInError): ex_msg = 'You are not logged in on Blender ID. Please log in at User Preferences, ' \ 'System, Blender ID.' else: ex_msg = str(ex) if not ex_msg: ex_msg = str(type(ex)) text = "An error occurred:\n%s" % ex_msg lines = textwrap.wrap(text) bgl.glColor4f(1.0, 1.0, 1.0, 1.0) blf.size(font_id, 20, 72) _, text_height = blf.dimensions(font_id, 'yhBp') def position(line_nr): blf.position(font_id, content_width * 0.1, content_height * 0.8 - line_nr * text_height, 0) for line_idx, line in enumerate(lines): position(line_idx) blf.draw(font_id, line) bgl.glDisable(bgl.GL_BLEND) def _draw_subscribe(self, context): self._draw_text_on_colour(context, 'Click to subscribe to the Blender Cloud', (0.0, 0.0, 0.2, 0.6)) def get_clicked(self) -> MenuItem: for item in self.current_display_content: if item.hits(self.mouse_x, self.mouse_y): return item return None def handle_item_selection(self, context, item: MenuItem): """Called when the user clicks on a menu item that doesn't represent a folder.""" from pillarsdk.utils import sanitize_filename self.clear_images() self._state = 'DOWNLOADING_TEXTURE' node_path_components = (node['name'] for node in self.path_stack if node is not None) local_path_components = [sanitize_filename(comp) for comp in node_path_components] top_texture_directory = bpy.path.abspath(context.scene.local_texture_dir) local_path = os.path.join(top_texture_directory, *local_path_components) meta_path = os.path.join(top_texture_directory, '.blender_cloud') self.log.info('Downloading texture %r to %s', item.node_uuid, local_path) self.log.debug('Metadata will be stored at %s', meta_path) file_paths = [] select_dblock = None node = item.node def texture_downloading(file_path, *_): self.log.info('Texture downloading to %s', file_path) def texture_downloaded(file_path, file_desc, map_type): nonlocal select_dblock self.log.info('Texture downloaded to %r.', file_path) if context.scene.local_texture_dir.startswith('//'): file_path = bpy.path.relpath(file_path) image_dblock = bpy.data.images.load(filepath=file_path) image_dblock['bcloud_file_uuid'] = file_desc['_id'] image_dblock['bcloud_node_uuid'] = node['_id'] image_dblock['bcloud_node_type'] = node['node_type'] image_dblock['bcloud_node'] = pillar.node_to_id(node) if node['node_type'] == 'hdri': # All HDRi variations should use the same image datablock, hence once name. image_dblock.name = node['name'] else: # All texture variations are loaded at once, and thus need the map type in the name. image_dblock.name = '%s-%s' % (node['name'], map_type) # Select the image in the image editor (if the context is right). # Just set the first image we download, if context.area.type == 'IMAGE_EDITOR': if select_dblock is None or file_desc.map_type == 'color': select_dblock = image_dblock context.space_data.image = select_dblock file_paths.append(file_path) def texture_download_completed(_): self.log.info('Texture download complete, inspect:\n%s', '\n'.join(file_paths)) self._state = 'QUIT' # For HDRi nodes: only download the first file. download_node = pillarsdk.Node.new(node) if node['node_type'] == 'hdri': download_node.properties.files = [download_node.properties.files[0]] signalling_future = asyncio.Future() self._new_async_task(pillar.download_texture(download_node, local_path, metadata_directory=meta_path, texture_loading=texture_downloading, texture_loaded=texture_downloaded, future=signalling_future)) self.async_task.add_done_callback(texture_download_completed) def open_browser_subscribe(self): import webbrowser webbrowser.open_new_tab('https://cloud.blender.org/join') self.report({'INFO'}, 'We just started a browser for you.') def _scroll_smooth(self): diff = self.scroll_offset_target - self.scroll_offset if diff == 0: return if abs(round(diff)) < 1: self.scroll_offset = self.scroll_offset_target return self.scroll_offset += diff * 0.5 def _scroll_by(self, amount, *, smooth=True): # Slow down scrolling up if smooth and amount < 0 and -amount > self.scroll_offset_space_left / 4: amount = -self.scroll_offset_space_left / 4 self.scroll_offset_target = min(0, max(self.scroll_offset_max, self.scroll_offset_target + amount)) if not smooth: self._scroll_offset = self.scroll_offset_target def _scroll_reset(self): self.scroll_offset_target = self.scroll_offset = 0 class PILLAR_OT_switch_hdri(pillar.PillarOperatorMixin, async_loop.AsyncModalOperatorMixin, bpy.types.Operator): bl_idname = 'pillar.switch_hdri' bl_label = 'Switch with another variation' bl_description = 'Downloads the selected variation of an HDRi, ' \ 'replacing the current image' log = logging.getLogger('bpy.ops.%s' % bl_idname) image_name = bpy.props.StringProperty(name='image_name', description='Name of the image block to replace') file_uuid = bpy.props.StringProperty(name='file_uuid', description='File ID to download') async def async_execute(self, context): """Entry point of the asynchronous operator.""" self.report({'INFO'}, 'Communicating with Blender Cloud') try: try: db_user = await self.check_credentials(context, REQUIRED_ROLES_FOR_TEXTURE_BROWSER) user_id = db_user['_id'] except pillar.NotSubscribedToCloudError: self.log.exception('User not subscribed to cloud.') self.report({'ERROR'}, 'Please subscribe to the Blender Cloud.') self._state = 'QUIT' return except pillar.UserNotLoggedInError: self.log.exception('Error checking/refreshing credentials.') self.report({'ERROR'}, 'Please log in on Blender ID first.') self._state = 'QUIT' return if not user_id: raise pillar.UserNotLoggedInError() await self.download_and_replace(context) except Exception as ex: self.log.exception('Unexpected exception caught.') self.report({'ERROR'}, 'Unexpected error %s: %s' % (type(ex), ex)) self._state = 'QUIT' async def download_and_replace(self, context): from .pillar import sanitize_filename self._state = 'DOWNLOADING_TEXTURE' current_image = bpy.data.images[self.image_name] node = current_image['bcloud_node'] filename = '%s.taken_from_file' % sanitize_filename(node['name']) local_path = os.path.dirname(bpy.path.abspath(current_image.filepath)) top_texture_directory = bpy.path.abspath(context.scene.local_texture_dir) meta_path = os.path.join(top_texture_directory, '.blender_cloud') file_uuid = self.file_uuid resolution = next(file_ref['resolution'] for file_ref in node['properties']['files'] if file_ref['file'] == file_uuid) self.log.info('Downloading file %r-%s to %s', file_uuid, resolution, local_path) self.log.debug('Metadata will be stored at %s', meta_path) def file_loading(file_path, file_desc, map_type): self.log.info('Texture downloading to %s (%s)', file_path, utils.sizeof_fmt(file_desc['length'])) async def file_loaded(file_path, file_desc, map_type): if context.scene.local_texture_dir.startswith('//'): file_path = bpy.path.relpath(file_path) self.log.info('Texture downloaded to %s', file_path) current_image['bcloud_file_uuid'] = file_uuid current_image.filepath = file_path # This automatically reloads the image from disk. await pillar.download_file_by_uuid(file_uuid, local_path, meta_path, filename=filename, map_type=resolution, file_loading=file_loading, file_loaded_sync=file_loaded, future=self.signalling_future) self.report({'INFO'}, 'Image download complete') # store keymaps here to access after registration addon_keymaps = [] def image_editor_menu(self, context): self.layout.operator(BlenderCloudBrowser.bl_idname, text='Get image from Blender Cloud', icon_value=blender.icon('CLOUD')) def hdri_download_panel__image_editor(self, context): _hdri_download_panel(self, context.edit_image) def hdri_download_panel__node_editor(self, context): if context.active_node.type not in {'TEX_ENVIRONMENT', 'TEX_IMAGE'}: return _hdri_download_panel(self, context.active_node.image) def _hdri_download_panel(self, current_image): if not current_image: return if 'bcloud_node_type' not in current_image: return if current_image['bcloud_node_type'] != 'hdri': return try: current_variation = current_image['bcloud_file_uuid'] except KeyError: log.warning('Image %r has a bcloud_node_type but no bcloud_file_uuid property.', current_image.name) return row = self.layout.row(align=True).split(0.3) row.label('HDRi', icon_value=blender.icon('CLOUD')) row.prop(current_image, 'hdri_variation', text='') if current_image.hdri_variation != current_variation: props = row.operator(PILLAR_OT_switch_hdri.bl_idname, text='Replace', icon='FILE_REFRESH') props.image_name = current_image.name props.file_uuid = current_image.hdri_variation # Storage for variation labels, as the strings in EnumProperty items # MUST be kept in Python memory. variation_label_storage = {} def hdri_variation_choices(self, context): if context.area.type == 'IMAGE_EDITOR': image = context.edit_image elif context.area.type == 'NODE_EDITOR': image = context.active_node.image else: return [] if 'bcloud_node' not in image: return [] choices = [] for file_doc in image['bcloud_node']['properties']['files']: label = file_doc['resolution'] variation_label_storage[label] = label choices.append((file_doc['file'], label, '')) return choices def register(): bpy.utils.register_class(BlenderCloudBrowser) bpy.utils.register_class(PILLAR_OT_switch_hdri) bpy.types.IMAGE_MT_image.prepend(image_editor_menu) bpy.types.IMAGE_PT_image_properties.append(hdri_download_panel__image_editor) bpy.types.NODE_PT_active_node_properties.append(hdri_download_panel__node_editor) # HDRi resolution switcher/chooser. # TODO: when an image is selected, switch this property to its current resolution. bpy.types.Image.hdri_variation = bpy.props.EnumProperty( name='HDRi variations', items=hdri_variation_choices, description='Select a variation with which to replace this image' ) # handle the keymap wm = bpy.context.window_manager kc = wm.keyconfigs.addon if not kc: print('No addon key configuration space found, so no custom hotkeys added.') return km = kc.keymaps.new(name='Screen') kmi = km.keymap_items.new('pillar.browser', 'A', 'PRESS', ctrl=True, shift=True, alt=True) addon_keymaps.append((km, kmi)) def unregister(): # handle the keymap for km, kmi in addon_keymaps: km.keymap_items.remove(kmi) addon_keymaps.clear() if hasattr(bpy.types.Image, 'hdri_variation'): del bpy.types.Image.hdri_variation bpy.types.IMAGE_MT_image.remove(image_editor_menu) bpy.types.IMAGE_PT_image_properties.remove(hdri_download_panel__image_editor) bpy.types.NODE_PT_active_node_properties.remove(hdri_download_panel__node_editor) bpy.utils.unregister_class(BlenderCloudBrowser) bpy.utils.unregister_class(PILLAR_OT_switch_hdri)<|fim▁end|>
<|file_name|>test_distribute.py<|end_file_name|><|fim▁begin|>from pathlib import Path from subprocess import (PIPE, Popen) import fnmatch import shutil import os def test_distribute(tmp_path): """ Check that the scripts to compute a trajectory are generated correctly """ cmd1 = "distribute_jobs.py -i test/test_files/input_test_distribute_derivative_couplings.yml" cmd2 = "distribute_jobs.py -i test/test_files/input_test_distribute_absorption_spectrum.yml" for cmd in [cmd1, cmd2]: print("testing: ", cmd) call_distribute(tmp_path, cmd) def call_distribute(tmp_path, cmd): """ Execute the distribute script and check that if finish succesfully. """ try: p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, shell=True) out, err = p.communicate() if err: raise RuntimeError(err) check_scripts() finally: remove_chunk_folder() <|fim▁hole|> paths = fnmatch.filter(os.listdir('.'), "chunk*") # Check that the files are created correctly files = ["launch.sh", "chunk_xyz*", "input.yml"] for p in paths: p = Path(p) for f in files: try: next(p.glob(f)) except StopIteration: msg = f"There is not file: {f}" print(msg) raise RuntimeError(msg) def remove_chunk_folder(): """ Remove resulting scripts """ for path in fnmatch.filter(os.listdir('.'), "chunk*"): shutil.rmtree(path)<|fim▁end|>
def check_scripts(): """ Check that the distribution scripts were created correctly """
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin from holidays.models import (Holiday, StaticHoliday, NthXDayHoliday, NthXDayAfterHoliday, CustomHoliday) class HolidayAdmin(admin.ModelAdmin): pass class StaticHolidayAdmin(admin.ModelAdmin): pass class NthXDayHolidayAdmin(admin.ModelAdmin): pass <|fim▁hole|>class NthXDayAfterHolidayAdmin(admin.ModelAdmin): pass class CustomHolidayAdmin(admin.ModelAdmin): pass admin.site.register(Holiday, HolidayAdmin) admin.site.register(StaticHoliday, StaticHolidayAdmin) admin.site.register(NthXDayHoliday, NthXDayHolidayAdmin) admin.site.register(NthXDayAfterHoliday, NthXDayAfterHolidayAdmin) admin.site.register(CustomHoliday, CustomHolidayAdmin)<|fim▁end|>
<|file_name|>Subject.js<|end_file_name|><|fim▁begin|>import Subject from "parsers/Subject"; describe("parsers/Subject", () => { it("should split valid subject lines into object hash", () => { let subject = "type(scope): summary summary summary"; let pull = { commits: [{ commit: { message: subject } }] }; <|fim▁hole|> expect((new Subject()).parse(pull)).toEqual({ type: "type", scope: "scope", summary: "summary summary summary" }); }); it("should return object with null values on invalid message", () => { let subject = "type(scope) summary summary summary"; let pull = { commits: [{ commit: { message: subject } }] }; expect((new Subject()).parse(pull)).toEqual({ type: null, scope: null, summary: null }); }); it("should parse subjects with special characters", () => { let subject = "type($state!): summary summary summary"; let pull = { commits: [{ commit: { message: subject } }] }; expect((new Subject()).parse(pull).scope).toBe("$state!"); }); });<|fim▁end|>
<|file_name|>task_2.py<|end_file_name|><|fim▁begin|>#task_2<|fim▁hole|><|fim▁end|>
a = int(input()) print(sum(list(map(int, list((bin(a))[2:])))))
<|file_name|>upgrade_ng1_adapter.ts<|end_file_name|><|fim▁begin|>import { Directive, DoCheck, ElementRef, EventEmitter, Inject, OnInit, OnChanges, SimpleChange, Type } from 'angular2/core'; import { NG1_COMPILE, NG1_SCOPE, NG1_HTTP_BACKEND, NG1_TEMPLATE_CACHE, NG1_CONTROLLER } from './constants'; import {controllerKey} from './util'; import * as angular from './angular_js'; const CAMEL_CASE = /([A-Z])/g; const INITIAL_VALUE = { __UNINITIALIZED__: true }; const NOT_SUPPORTED: any = 'NOT_SUPPORTED'; export class UpgradeNg1ComponentAdapterBuilder { type: Type; inputs: string[] = []; inputsRename: string[] = []; outputs: string[] = []; outputsRename: string[] = []; propertyOutputs: string[] = []; checkProperties: string[] = []; propertyMap: {[name: string]: string} = {}; linkFn: angular.ILinkFn = null; directive: angular.IDirective = null; $controller: angular.IControllerService = null; constructor(public name: string) { var selector = name.replace(CAMEL_CASE, (all, next: string) => '-' + next.toLowerCase()); var self = this; this.type = Directive({selector: selector, inputs: this.inputsRename, outputs: this.outputsRename}) .Class({ constructor: [ new Inject(NG1_SCOPE), ElementRef, function(scope: angular.IScope, elementRef: ElementRef) { return new UpgradeNg1ComponentAdapter( self.linkFn, scope, self.directive, elementRef, self.$controller, self.inputs, self.outputs, self.propertyOutputs, self.checkProperties, self.propertyMap); } ], ngOnInit: function() { /* needs to be here for ng2 to properly detect it */ }, ngOnChanges: function() { /* needs to be here for ng2 to properly detect it */ }, ngDoCheck: function() { /* needs to be here for ng2 to properly detect it */ } }); } extractDirective(injector: angular.IInjectorService): angular.IDirective { var directives: angular.IDirective[] = injector.get(this.name + 'Directive'); if (directives.length > 1) { throw new Error('Only support single directive definition for: ' + this.name); } var directive = directives[0]; if (directive.replace) this.notSupported('replace'); if (directive.terminal) this.notSupported('terminal'); var link = directive.link; if (typeof link == 'object') { if ((<angular.IDirectivePrePost>link).post) this.notSupported('link.post'); } return directive; } private notSupported(feature: string) { throw new Error(`Upgraded directive '${this.name}' does not support '${feature}'.`); } extractBindings() { var btcIsObject = typeof this.directive.bindToController === 'object'; if (btcIsObject && Object.keys(this.directive.scope).length) { throw new Error( `Binding definitions on scope and controller at the same time are not supported.`); } var context = (btcIsObject) ? this.directive.bindToController : this.directive.scope; if (typeof context == 'object') { for (var name in context) { if ((<any>context).hasOwnProperty(name)) { var localName = context[name]; var type = localName.charAt(0); localName = localName.substr(1) || name; var outputName = 'output_' + name; var outputNameRename = outputName + ': ' + name; var outputNameRenameChange = outputName + ': ' + name + 'Change'; var inputName = 'input_' + name; var inputNameRename = inputName + ': ' + name; switch (type) { case '=': this.propertyOutputs.push(outputName); this.checkProperties.push(localName); this.outputs.push(outputName); this.outputsRename.push(outputNameRenameChange); this.propertyMap[outputName] = localName; // don't break; let it fall through to '@' case '@': // handle the '<' binding of angular 1.5 components case '<': this.inputs.push(inputName); this.inputsRename.push(inputNameRename); this.propertyMap[inputName] = localName; break; case '&': this.outputs.push(outputName); this.outputsRename.push(outputNameRename); this.propertyMap[outputName] = localName; break; default: var json = JSON.stringify(context); throw new Error( `Unexpected mapping '${type}' in '${json}' in '${this.name}' directive.`); } } } } } compileTemplate(compile: angular.ICompileService, templateCache: angular.ITemplateCacheService, httpBackend: angular.IHttpBackendService): Promise<any> { if (this.directive.template !== undefined) { this.linkFn = compileHtml(this.directive.template); } else if (this.directive.templateUrl) { var url = this.directive.templateUrl; var html = templateCache.get(url); if (html !== undefined) { this.linkFn = compileHtml(html); } else { return new Promise((resolve, err) => { httpBackend('GET', url, null, (status, response) => { if (status == 200) { resolve(this.linkFn = compileHtml(templateCache.put(url, response))); } else { err(`GET ${url} returned ${status}: ${response}`); } }); }); } } else { throw new Error(`Directive '${this.name}' is not a component, it is missing template.`); } return null; function compileHtml(html): angular.ILinkFn { var div = document.createElement('div'); div.innerHTML = html; return compile(div.childNodes); } } static resolve(exportedComponents: {[name: string]: UpgradeNg1ComponentAdapterBuilder}, injector: angular.IInjectorService): Promise<any> { var promises = []; var compile: angular.ICompileService = injector.get(NG1_COMPILE); var templateCache: angular.ITemplateCacheService = injector.get(NG1_TEMPLATE_CACHE); var httpBackend: angular.IHttpBackendService = injector.get(NG1_HTTP_BACKEND); var $controller: angular.IControllerService = injector.get(NG1_CONTROLLER); for (var name in exportedComponents) { if ((<any>exportedComponents).hasOwnProperty(name)) { var exportedComponent = exportedComponents[name]; exportedComponent.directive = exportedComponent.extractDirective(injector); exportedComponent.$controller = $controller; exportedComponent.extractBindings(); var promise = exportedComponent.compileTemplate(compile, templateCache, httpBackend); if (promise) promises.push(promise); } } return Promise.all(promises); } } <|fim▁hole|> checkLastValues: any[] = []; componentScope: angular.IScope; element: Element; constructor(private linkFn: angular.ILinkFn, scope: angular.IScope, private directive: angular.IDirective, elementRef: ElementRef, $controller: angular.IControllerService, private inputs: string[], private outputs: string[], private propOuts: string[], private checkProperties: string[], private propertyMap: {[key: string]: string}) { this.element = elementRef.nativeElement; this.componentScope = scope.$new(!!directive.scope); var $element = angular.element(this.element); var controllerType = directive.controller; var controller: any = null; if (controllerType) { var locals = {$scope: this.componentScope, $element: $element}; controller = $controller(controllerType, locals, null, directive.controllerAs); $element.data(controllerKey(directive.name), controller); } var link = directive.link; if (typeof link == 'object') link = (<angular.IDirectivePrePost>link).pre; if (link) { var attrs: angular.IAttributes = NOT_SUPPORTED; var transcludeFn: angular.ITranscludeFunction = NOT_SUPPORTED; var linkController = this.resolveRequired($element, directive.require); (<angular.IDirectiveLinkFn>directive.link)(this.componentScope, $element, attrs, linkController, transcludeFn); } this.destinationObj = directive.bindToController && controller ? controller : this.componentScope; for (var i = 0; i < inputs.length; i++) { this[inputs[i]] = null; } for (var j = 0; j < outputs.length; j++) { var emitter = this[outputs[j]] = new EventEmitter(); this.setComponentProperty(outputs[j], ((emitter) => (value) => emitter.emit(value))(emitter)); } for (var k = 0; k < propOuts.length; k++) { this[propOuts[k]] = new EventEmitter(); this.checkLastValues.push(INITIAL_VALUE); } } ngOnInit() { var childNodes: Node[] = []; var childNode; while (childNode = this.element.firstChild) { this.element.removeChild(childNode); childNodes.push(childNode); } this.linkFn(this.componentScope, (clonedElement: Node[], scope: angular.IScope) => { for (var i = 0, ii = clonedElement.length; i < ii; i++) { this.element.appendChild(clonedElement[i]); } }, {parentBoundTranscludeFn: (scope, cloneAttach) => { cloneAttach(childNodes); }}); if (this.destinationObj.$onInit) { this.destinationObj.$onInit(); } } ngOnChanges(changes: {[name: string]: SimpleChange}) { for (var name in changes) { if ((<Object>changes).hasOwnProperty(name)) { var change: SimpleChange = changes[name]; this.setComponentProperty(name, change.currentValue); } } } ngDoCheck(): number { var count = 0; var destinationObj = this.destinationObj; var lastValues = this.checkLastValues; var checkProperties = this.checkProperties; for (var i = 0; i < checkProperties.length; i++) { var value = destinationObj[checkProperties[i]]; var last = lastValues[i]; if (value !== last) { if (typeof value == 'number' && isNaN(value) && typeof last == 'number' && isNaN(last)) { // ignore because NaN != NaN } else { var eventEmitter: EventEmitter<any> = this[this.propOuts[i]]; eventEmitter.emit(lastValues[i] = value); } } } return count; } setComponentProperty(name: string, value: any) { this.destinationObj[this.propertyMap[name]] = value; } private resolveRequired($element: angular.IAugmentedJQuery, require: string | string[]): any { if (!require) { return undefined; } else if (typeof require == 'string') { var name: string = <string>require; var isOptional = false; var startParent = false; var searchParents = false; var ch: string; if (name.charAt(0) == '?') { isOptional = true; name = name.substr(1); } if (name.charAt(0) == '^') { searchParents = true; name = name.substr(1); } if (name.charAt(0) == '^') { startParent = true; name = name.substr(1); } var key = controllerKey(name); if (startParent) $element = $element.parent(); var dep = searchParents ? $element.inheritedData(key) : $element.data(key); if (!dep && !isOptional) { throw new Error(`Can not locate '${require}' in '${this.directive.name}'.`); } return dep; } else if (require instanceof Array) { var deps = []; for (var i = 0; i < require.length; i++) { deps.push(this.resolveRequired($element, require[i])); } return deps; } throw new Error( `Directive '${this.directive.name}' require syntax unrecognized: ${this.directive.require}`); } }<|fim▁end|>
class UpgradeNg1ComponentAdapter implements OnInit, OnChanges, DoCheck { destinationObj: any = null;
<|file_name|>aggregate-to-folders.js<|end_file_name|><|fim▁begin|>/* eslint-disable security/detect-object-injection */ const path = require("path").posix; const { calculateInstability, metricsAreCalculable } = require("../module-utl"); const { getAfferentCouplings, getEfferentCouplings, getParentFolders, object2Array, } = require("./utl"); function upsertCouplings(pAllDependents, pNewDependents) { pNewDependents.forEach((pNewDependent) => { pAllDependents[pNewDependent] = pAllDependents[pNewDependent] || { count: 0, }; pAllDependents[pNewDependent].count += 1; }); } function upsertFolderAttributes(pAllMetrics, pModule, pDirname) { pAllMetrics[pDirname] = pAllMetrics[pDirname] || { dependencies: {}, dependents: {}, moduleCount: 0, }; upsertCouplings( pAllMetrics[pDirname].dependents, getAfferentCouplings(pModule, pDirname) ); upsertCouplings( pAllMetrics[pDirname].dependencies, getEfferentCouplings(pModule, pDirname).map( (pDependency) => pDependency.resolved ) ); pAllMetrics[pDirname].moduleCount += 1; return pAllMetrics; } function aggregateToFolder(pAllFolders, pModule) { getParentFolders(path.dirname(pModule.source)).forEach((pParentDirectory) => upsertFolderAttributes(pAllFolders, pModule, pParentDirectory) ); return pAllFolders; } function sumCounts(pAll, pCurrent) { return pAll + pCurrent.count; } function getFolderLevelCouplings(pCouplingArray) { return Array.from( new Set( pCouplingArray.map((pCoupling) => path.dirname(pCoupling.name) === "." ? pCoupling.name : path.dirname(pCoupling.name) ) ) ).map((pCoupling) => ({ name: pCoupling })); } function calculateFolderMetrics(pFolder) {<|fim▁hole|> // and dependencies?), but it isn't because there can be > 1 relation between // two folders const lAfferentCouplings = lModuleDependents.reduce(sumCounts, 0); const lEfferentCouplings = lModuleDependencies.reduce(sumCounts, 0); return { ...pFolder, afferentCouplings: lAfferentCouplings, efferentCouplings: lEfferentCouplings, instability: calculateInstability(lEfferentCouplings, lAfferentCouplings), dependents: getFolderLevelCouplings(lModuleDependents), dependencies: getFolderLevelCouplings(lModuleDependencies), }; } function findFolderByName(pAllFolders, pName) { return pAllFolders.find((pFolder) => pFolder.name === pName); } function denormalizeInstability(pFolder, _, pAllFolders) { return { ...pFolder, dependencies: pFolder.dependencies.map((pDependency) => { const lFolder = findFolderByName(pAllFolders, pDependency.name) || {}; return { ...pDependency, instability: lFolder.instability >= 0 ? lFolder.instability : 0, }; }), }; } module.exports = function aggregateToFolders(pModules) { const lFolders = object2Array( pModules.filter(metricsAreCalculable).reduce(aggregateToFolder, {}) ).map(calculateFolderMetrics); return lFolders.map(denormalizeInstability); };<|fim▁end|>
const lModuleDependents = object2Array(pFolder.dependents); const lModuleDependencies = object2Array(pFolder.dependencies); // this calculation might look superfluous (why not just .length the dependents
<|file_name|>generic.go<|end_file_name|><|fim▁begin|>package populator import ( "encoding/base64" "fmt" "io/ioutil" "os" "path/filepath" "sync/atomic" v1payload "github.com/nerdalize/nerd/nerd/client/auth/v1/payload" "github.com/pkg/errors" "k8s.io/client-go/tools/clientcmd/api" // this blank import is necessary to load the oidc plugin for client-go _ "k8s.io/client-go/plugin/pkg/client/auth/oidc" ) //GenericPopulator is an implementation of the P interface using on Open ID Connect credentials. type GenericPopulator struct { // kubeConfigFile is the path where the kube config is stored // Only access this with atomic ops kubeConfigFile atomic.Value cluster *v1payload.GetClusterOutput } func newGeneric(kubeConfigFile string, cluster *v1payload.GetClusterOutput) *GenericPopulator { o := &GenericPopulator{ cluster: cluster, } o.kubeConfigFile.Store(kubeConfigFile) return o } //GetKubeConfigFile returns the path where the kube config is stored. func (o *GenericPopulator) GetKubeConfigFile() string { return o.kubeConfigFile.Load().(string) } //RemoveConfig deletes the precised cluster context and cluster info. func (o *GenericPopulator) RemoveConfig(cluster string) error { cluster = fmt.Sprintf("%s-%s", Prefix, cluster) // read existing config or create new if does not exist kubecfg, err := ReadConfigOrNew(o.GetKubeConfigFile()) if err != nil { return err } delete(kubecfg.Clusters, cluster) delete(kubecfg.AuthInfos, cluster) delete(kubecfg.Contexts, cluster) kubecfg.CurrentContext = "" // write back to disk if err := WriteConfig(kubecfg, o.GetKubeConfigFile()); err != nil { return errors.Wrap(err, "could not write kubeconfig") } return nil } // PopulateKubeConfig populates an api.Config object and set the current context to the provided cluster. func (o *GenericPopulator) PopulateKubeConfig(namespace string) error { c := api.NewCluster() if o.cluster == nil { return errors.New("Cannot use an empty cluster") } if o.cluster.ServiceType == "public-kubernetes" { if o.cluster.CaCertificate == "" { c.InsecureSkipTLSVerify = true } else { data, err := base64.StdEncoding.DecodeString(o.cluster.CaCertificate) if err != nil { return err } c.CertificateAuthorityData = data } } c.Server = o.cluster.ServiceURL auth := api.NewAuthInfo() if o.cluster.KubeConfigUser.Token != "" { auth.Token = o.cluster.KubeConfigUser.Token } else { auth.AuthProvider = &api.AuthProviderConfig{ Name: "oidc", Config: map[string]string{ "client-id": o.cluster.KubeConfigUser.AuthProvider.Config.ClientID, "id-token": o.cluster.KubeConfigUser.AuthProvider.Config.IDToken, "idp-issuer-url": o.cluster.KubeConfigUser.AuthProvider.Config.IdpIssuerURL, "refresh-token": o.cluster.KubeConfigUser.AuthProvider.Config.RefreshToken, }, } } if namespace == "" { if len(o.cluster.Namespaces) != 0 { namespace = o.cluster.Namespaces[0].Name } else { namespace = "default" } } // context context := api.NewContext() clusterName := fmt.Sprintf("%s-%s", Prefix, o.cluster.ShortName) context.Cluster = clusterName context.AuthInfo = clusterName context.Namespace = namespace // read existing config or create new if does not exist kubecfg, err := ReadConfigOrNew(o.GetKubeConfigFile()) if err != nil { return err } kubecfg.Clusters[clusterName] = c kubecfg.CurrentContext = clusterName kubecfg.AuthInfos[clusterName] = auth kubecfg.Contexts[clusterName] = context // write back to disk if err := WriteConfig(kubecfg, o.GetKubeConfigFile()); err != nil { return errors.Wrap(err, "could not write kubeconfig") } return nil } func (o *GenericPopulator) createCertificate(data, cluster, homedir string) (string, error) { if data == "" { return "", nil } dir := filepath.Join(homedir, ".nerd", "certs") filename := filepath.Join(dir, cluster+".cert") _, err := os.Stat(dir) if err != nil { if !os.IsNotExist(err) { return "", errors.Errorf("'%v' is not a path", dir) } err = os.MkdirAll(dir, DirPermissions) if err != nil { return "", errors.Wrap(err, fmt.Sprintf("The provided path '%s' does not exist and could not be created.", dir)) } _, err = os.Stat(dir) if err != nil { return "", err }<|fim▁hole|> return "", err } err = ioutil.WriteFile(filename, d, 0644) if err != nil { return "", err } return filename, nil }<|fim▁end|>
} d, err := base64.StdEncoding.DecodeString(data) if err != nil {
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9.1 on 2016-02-06 16:27 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Logs', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('message', models.CharField(max_length=200)), ('date', models.DateField(auto_now=True)), ], ), migrations.CreateModel( name='Member', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ('value', models.IntegerField()), ('last_consomation', models.DateField()), ('user_Acount_inside_club_Nix', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Product', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=30)), ('description', models.CharField(max_length=200)), ('price', models.IntegerField()), ('hide', models.BooleanField(default=False)), ('image', models.ImageField(upload_to='static')), ], ), migrations.AddField( model_name='logs', name='product', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='snackix.Product'), ), migrations.AddField( model_name='logs',<|fim▁hole|> field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='snackix.Member'), ), ]<|fim▁end|>
name='user',
<|file_name|>template.js<|end_file_name|><|fim▁begin|>define( ({ viewer: { loading: { step1: "CARGANDO APLICACIÓN", step2: "CARGANDO DATOS", step3: "INICIALIZANDO", fail: "La carga de la comparativa de mapas ha fallado", loadBuilder: "CAMBIANDO A MODO DE BUILDER", redirectSignIn: "REDIRIGIENDO A LA PÁGINA DE INICIO DE SESIÓN", redirectSignIn2: "(se te redirigirá aquí después del inicio de sesión)", failButton: "Reintentar" }, errors: { boxTitle: "Se ha producido un error", portalSelf: "Error muy grave: no se ha podido obtener la configuración del portal", invalidConfig: "Error muy grave: configuración no válida", invalidConfigNoWebmap: "Error muy grave: configuración no válida (no se ha especificado mapa Web)", createMap: "No se puede crear el mapa", invalidApp: "Error muy grave: la aplicación no se puede cargar", initMobile: "Bienvenido a la aplicación Web para la comparativa. La aplicación no está configurada. El builder interactivo no es compatible con dispositivos móviles.", noBuilderIE8: "El builder interactivo de comparativas no es compatible con las versiones anteriores a Internet Explorer 9.", noLayerView: "Bienvenido a la aplicación Web para la comparativa.<br />La aplicación aún no está configurada.", appSave: "Error al guardar la aplicación web", mapSave: "Error al guardar el mapa web", notAuthorized: "No tienes autorización para acceder a esta aplicación", conflictingProjectionsTitle: "Conflicto de proyecciones", conflictingProjections: "La comparativa de mapas no admite el uso de dos mapas web con distintas proyecciones. Abre los ajustes y utiliza un mapa web que use la misma proyección que el primer mapa.", cpButton: "Cerrar" }, mobileView: { hideIntro: "OCULTAR INTRODUCCIÓN", navLeft: "Leyenda", navMap: "Mapa", navRight: "Datos" }, desktopView: { storymapsText: "Un mapa de historias", builderButton: "Cambiar a modo de builder", bitlyTooltip: "Consigue un enlace corto a la aplicación" } }, builder: { builder: { panelHeader: "CONFIGURACIÓN DE LA APLICACIÓN", buttonSave: "GUARDAR", buttonHelp: "Ayuda", buttonShare: "Compartir", buttonDiscard: "CANCELAR", buttonSettings: "Configuración", buttonView: "Modo Vista", buttonItem: "Abre el elemento de la aplicación web", noPendingChange: "Sin cambios pendientes", unSavedChangeSingular: "1 cambio sin guardar", unSavedChangePlural: "cambios no guardados", popoverDiscard: "¿Estás seguro de que deseas descartar los cambios no guardados?", yes: "Sí", no: "No", popoverOpenViewExplain: "Al abrir el visor, perderás los cambios no guardados", popoverOpenViewOk: "Aceptar", popoverOpenViewCancel: "Cancelar", popoverSaveWhenDone: "No olvides guardar los cambios cuando hayas terminado", closeWithPendingChange: "¿Estás seguro de que deseas confirmar la acción? Tus cambios se perderán.", gotIt: "Aceptar", savingApplication: "Guardando la aplicación", <|fim▁hole|> saveError: "Error al guardar. Inténtalo de nuevo", saveError2: "Error al guardar a causa de una etiqueta HTML no válida en un nombre o una descripción", saveError3: "El título no puede estar vacío", signIn: "Inicia sesión con una cuenta en", signInTwo: "para guardar la aplicación." }, header:{ editMe: "¡Modifícame!", templateTitle: "Establecer título de plantilla", templateSubtitle: "Establecer subtítulo de plantilla" }, settings: { settingsHeader: "Ajustes de la aplicación", modalCancel: "Cancelar", modalApply: "Aplicar" }, settingsColors: { settingsTabColor: "Tema", settingsColorExplain: "Elige un tema para la aplicación o define tus propios colores.", settingsLabelColor: "Colores de fondo del encabezado y el panel lateral" }, settingsHeader: { settingsTabLogo: "Encabezado", settingsLogoExplain: "Personaliza el logotipo del encabezado (el valor máximo es 250 x 50px).", settingsLogoEsri: "Logotipo de Esri", settingsLogoNone: "Sin logotipo", settingsLogoCustom: "Logotipo personalizado", settingsLogoCustomPlaceholder: "URL de imagen", settingsLogoCustomTargetPlaceholder: "Enlace click-through", settingsLogoSocialExplain: "Personaliza el enlace superior derecho del encabezado.", settingsLogoSocialText: "Texto", settingsLogoSocialLink: "Vínculo", settingsLogoSocialDisabled: "El administrador ha deshabilitado esta entidad" }, settingsExtent: { settingsTabExtent: "Extensión", settingsExtentExplain: "Establecer la extensión inicial mediante el mapa interactivo siguiente.", settingsExtentExplainBottom: "La extensión que definas modificará la extensión inicial del mapa web. Ten en cuenta que si estás llevando a cabo una serie comparativa no se usará esa extensión.", settingsExtentDateLineError: "La extensión no puede atravesar el meridiano de longitud 180�", settingsExtentDateLineError2: "Error al calcular la extensión", settingsExtentDrawBtn: "Dibuja una nueva extensión", settingsExtentModifyBtn: "Edita la extensión actual", settingsExtentApplyBtn: "Aplica en el mapa principal", settingsExtentUseMainMap: "Usa la extensión del mapa principal" } }, swipe: { mobileData: { noData: "No hay datos para mostrar", noDataExplain: "Puntea el mapa para seleccionar una entidad y volver aquí", noDataMap: "No hay datos para este mapa", noPopup: "No se han encontrado ventanas emergentes para esta entidad" }, mobileLegend: { noLegend: "No hay leyenda para mostrar." }, swipeSidePanel: { editTooltip: "Establecer la descripción del panel lateral", editMe: "¡Modifícame!", legendTitle: "Leyenda" }, infoWindow: { noFeature: "No hay datos que mostrar", noFeatureExplain: "Puntea el mapa para seleccionar una entidad" }, settingsLayout: { settingsTabLayout: "Cambiar estilo", settingsLayoutExplain: "Elige un estilo para la comparativa de mapas.", settingsLayoutSwipe: "Barra vertical", settingsLayoutSpyGlass: "Lupa", settingsLayoutSelected: "Diseño seleccionado", settingsLayoutSelect: "Selecciona este diseño", settingsSaveConfirm: "Algunos de tus cambios requieren que guardes y vuelvas a cargar la aplicación" }, settingsDataModel: { settingsTabDataModel: "Tipo de comparación", settingsDataModelExplainSwipe: "¿Qué quieres que comparen los usuarios?", settingsDataModelExplainSwipe2: "", settingsDataModelExplainSpyGlass: "Elige la capa o el mapa Web que aparecerá en la lupa.", settingsDataModelOneMap: "Una capa en un mapa web", settingsDataModel1Explain: "Selecciona la capa que quieras comparar", settingsDataModel1Warning: "Si la capa está oculta por capas superiores, la comparativa de mapas no tendrá ningún efecto.", settingsDataModel1SpyGlassExplain: "Selecciona la capa que aparecerá en la lupa.", settingsDataModelTwoMaps: "Dos mapas Web", settingsDataModelLayerIds: "ID de capa de mapa Web", settingsDataModelSelected: "Tipo seleccionado", settingsDataModelWebmapSwipeId1: "ID del mapa Web derecho", settingsDataModelWebmapSwipeId2: "ID del mapa Web izquierdo", settingsDataModelWebmapGlassId1: "ID del mapa Web principal", settingsDataModelWebmapGlassId2: "ID del mapa Web de la lupa", settingsDataModelSelect: "Selecciona este tipo", settingsDataModel2Explain: "Comparar con otro mapa Web.", settingsDataModel2SpyGlassExplain: "Deja al descubierto otro mapa Web.", settingsDataModel2HelpTitle: "¿Cómo puedo encontrar el ID de un mapa Web?", settingsDataModel2HelpContent: "Copia y pega los dígitos que hay tras el signo \"=\" en la URL del mapa Web", switchMaps: "Intercambiar mapas", browseWebMaps: "Examinar mapas web" }, settingsLegend: { settingsTabLegend: "Diseño de la aplicación", settingsLegendExplain: "Selecciona los ajustes de diseño de la aplicación.", settingsLegendEnable: "Activar leyenda", settingsDescriptionEnable: "Activar descripción", settingsBookmarksEnable: "Activar series de comparativas", settingsPopupDisable: "Habilitar ventana emergente", settingsLocationSearchEnable: "Habilitar la búsqueda del localizador", settingsGeolocatorEnable: "Habilitar geolocalizador", settingsLegendHelpContent: "Utiliza la tabla de contenido del visor de mapas web de ArcGIS.com (ocultar en leyenda) para delimitar el contenido de la leyenda.", settingsSeriesHelpContent: "Las series comparativas es una opción de navegación por pestañas que guía al usuario a una extensión concreta y muestra un título y un texto descriptivo en el panel lateral. En el momento de la primera activación, los marcadores de mapas web se importarán y usarán para rellenar la barra de series. Si deshabilitas esta opción, la barra de series se desactivará, pero la configuración de las series se conservará para usarse de nuevo.", settingsSeriesHelpContent2: "Las series de comparativas te permiten crear y editar una selección de ubicaciones junto con títulos y texto. Si tu mapa Web tiene marcadores de posición, se mostrarán. Puedes desactivar las series, pero la configuración se mantendrá para su uso futuro.", settingsSeriesHelpLink: "Mira un ejemplo de una aplicación con una serie de comparativas aquí", preview: "Vista previa de la interfaz de usuario", settingsLocateButtonExplain: "Esta funcionalidad es compatible con la mayoría de dispositivos móviles y navegadores de escritorio (incluido Internet Explorer 9+).", settingsLocateButton: "Habilitar un botón \'Localizar\' en los navegadores compatibles", settingsAddressSearch: "Habilitar una herramienta de búsqueda de direcciones" }, settingsSwipePopup: { settingsSwipePopup: "Ventana emergente", settingsSwipePopupExplain: "Personaliza la apariencia de los encabezados emergentes para ayudar al usuario a asociar las ventanas emergentes con las capas de mapas.", settingsSwipePopupSwipe1: "Mapa izquierdo", settingsSwipePopupSwipe2: "Mapa derecho", settingsSwipePopupGlass1: "Mapa principal", settingsSwipePopupGlass2: "Mapa de la lupa", settingsSwipePopupTitle: "Título del encabezado", settingsSwipePopupColor: "Color del encabezado" }, initPopup: { initHeader: "Bienvenido al builder de Comparativa/Lupa", modalNext: "Siguiente", modalPrev: "Anterior", modalApply: "Abrir la aplicación" }, seriesPanel: { title: "Título", descr: "Descripción", discard: "Descartar marcadores", saveExtent: "Configurar extensión de marcadores", discardDisabled: "No puedes eliminar ese marcador. Las series comparativas pueden deshabilitarse en la Configuración." }, helpPopup: { title: "Ayuda", close: "Cerrar", tab1: { div1: "La plantilla Comparativa/Lupa se ha diseñado para comparar dos mapas web o dos capas de un mismo mapa web en una aplicación web atractiva y fácil de usar que se puede utilizar en cualquier navegador web o dispositivo, incluidos los smartphones y las tablets.", div2: "Si quieres obtener información adicional sobre la plantilla Comparativa/Lupa, incluidos algunos ejemplos creados por los usuarios, <a href='http://storymaps.arcgis.com/en/app-list/swipe/' target='_blank'> visita el sitio web de Story Maps</a>. También puedes seguirnos en Twitter en <a href='https://twitter.com/EsriStoryMaps' target='_blank'>@EsriStoryMaps</a>.", div3: "Nos gusta mucho tener noticias tuyas. Tanto si tienes alguna pregunta, si deseas solicitar un nueva característica o si crees que has encontrado un error, visita el <a href='http://links.esri.com/storymaps/forum' target='_blank'>foro de usuarios de Story Maps</a>." } }, share: { firstSaveTitle: "La aplicación se ha guardado correctamente", firstSaveHeader: "La aplicación se ha guardado en ArcGIS Online. Lee las siguientes respuestas a las preguntas frecuentes.", firstSaveA1: "Si no estás familiarizado con el uso de ArcGIS Online o necesitas un acceso directo a la interfaz de creación, puedes guardar el siguiente enlace: %LINK1%", firstSaveA1bis: "También puedes encontrar la aplicación en tu <a href='%LINK2%' target='_blank'>carpeta de contenido de ArcGIS Online</a>.", firstSaveQ2: "¿Se comparte mi aplicación?", firstSaveA2: "Actualmente, tu aplicación no se comparte. Para compartirla, usa el botón COMPARTIR.", shareTitle: "Compartir la aplicación", sharePrivateHeader: "Tu aplicación no se comparte. ¿Deseas compartirla?", sharePrivateBtn1: "Compartir públicamente", sharePrivateBtn2: "Compartir con mi organización", sharePrivateProgress: "Uso compartido en curso...", sharePrivateErr: "Error del uso compartido. Inténtalo de nuevo o", sharePrivateOk: "Uso compartido actualizado correctamente, cargando...", shareStatus1: "La aplicación no se ha guardado", shareStatus2: "La aplicación se comparte públicamente", shareStatus3: "La aplicación se comparte dentro de la organización", shareStatus4: "La aplicación no se comparte", sharePreviewAsUser: "Presentación preliminar", shareHeader1: "Tu aplicación está <strong>disponible públicamente</strong>.", shareHeader2: "Tu aplicación está disponible para los miembros de tu organización (se requiere inicio de sesión).", shareLinkHeader: "Comparte la aplicación con tu audiencia", shareLinkOpen: "ABRIR", learnMore: "Más información", shareQ1Opt1: "¿Qué debo hacer para que la aplicación siga siendo privada?", shareQ1Opt2: "¿Qué debo hacer para que la aplicación siga siendo privada o para compartirla públicamente?", shareA1: "Usa %SHAREIMG% en <a href='%LINK1%' target='_blank'>la página de elemento de la aplicación</a>. Si también quieres dejar de compartir el mapa web, usa <a href='%LINK2%' target='_blank'>la página de elemento del mapa web</a>.", shareA1bis: "Si también deseas dejar de compartir el servicio de entidades, utiliza la <a href='%LINK1%' target='_blank'>página de elementos del servicio de entidades</a>.", shareQ2: "¿Cómo puedo editar la aplicación más adelante?", shareQ2bis: "¿Cómo regreso a la interfaz de creación?", shareA2div1: "Guarda y vuelve a usar el siguiente vínculo %LINK1% o utiliza la <a href='%LINK2%' target='_blank'>página de elementos de la aplicación</a>.", shareA2div2: "Como propietario de la aplicación, cuando inicias sesión en ArcGIS.com, la aplicación incluye un botón para abrir el builder interactivo:", shareQ3: "¿Dónde se almacenan los datos?", shareA3: "La configuración de la aplicación se almacena en este elemento de aplicación web</a>.", shareWarning: "Se ha deshabilitado la opción de compartir %WITH% porque no eres el propietario del <a href='%LINK%' target='_blank'>mapa web</a>.", shareWarningWith1: "públicamente", shareWarningWith2: "públicamente y con la organización" }, directCreation: { header: "Bienvenido al builder de Comparativa/Lupa", mapPickHeader: "Para empezar, escribe un Id. de mapa web válido o usa el botón de búsqueda para examinar mapas web.", launchBuilder: "Iniciar Builder", chooseWebmapLbl: "Elegir mapa web...", explain2: "Si vas a crear un mapa de historia de Comparativa o Lupa, usa el botón siguiente para elegir el mapa web de ArcGIS Online que deseas utilizar. Si lo prefieres, puedes pegar el Id. del mapa web en el campo siguiente.", explain3: "Si deseas usar dos mapas web en tu mapa de historia, se te pedirá el segundo mapa web más adelante cuando elijas esa opción.", webmapPlaceholder: "Especificar un Id. de mapa web..." } }, configure: { mapdlg:{ items:{ organizationLabel: "Mi organización", onlineLabel: "ArcGIS Online", contentLabel: "Mi contenido", favoritesLabel: "Mis favoritos" }, title: "Seleccionar mapa web", searchTitle: "Buscar", ok: "Aceptar", cancel: "Cancelar", placeholder: "Introducir término de búsqueda" } } }) );<|fim▁end|>
saveSuccess: "La aplicación se ha guardado con éxito",
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- <|fim▁hole|><|fim▁end|>
__version__ = '0.0.1' __license__ = 'MIT'
<|file_name|>slow-stochastic.src.min.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
"use strict";import"../../Stock/Indicators/SlowStochastic/SlowStochasticIndicator.js";
<|file_name|>test_support.rs<|end_file_name|><|fim▁begin|>// -*- coding: utf-8 -*- // ------------------------------------------------------------------------------------------------ // Copyright © 2020, HST authors. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the // License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either // express or implied. See the License for the specific language governing permissions and // limitations under the License. // ------------------------------------------------------------------------------------------------ use std::collections::HashSet; use std::fmt::Debug; use std::fmt::Display; use bit_array::BitArray; use proptest::arbitrary::any; use proptest::arbitrary::Arbitrary; use proptest::collection::hash_set; use proptest::collection::vec; use proptest::strategy::BoxedStrategy; use proptest::strategy::Strategy; use crate::event::DisjointSum; use crate::event::EventSet; use crate::primitives::PrimitiveEvents; /// An event that is identified by a number. Makes it easy to construct distinct events in /// test cases. #[derive(Clone, Copy, Eq, Hash, PartialEq)] pub struct NumberedEvent(pub u16); impl From<u16> for NumberedEvent { fn from(from: u16) -> NumberedEvent { NumberedEvent(from) } } const SUBSCRIPT_DIGITS: [char; 10] = ['₀', '₁', '₂', '₃', '₄', '₅', '₆', '₇', '₈', '₉']; impl Debug for NumberedEvent { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { (self as &dyn Display).fmt(f) } } impl Display for NumberedEvent { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let digits: String = self .0 .to_string() .chars() .map(|ch| SUBSCRIPT_DIGITS[ch.to_digit(10).unwrap() as usize]) .collect(); write!(f, "E{}", digits) } } impl Arbitrary for NumberedEvent { type Parameters = (); type Strategy = BoxedStrategy<NumberedEvent>; fn arbitrary_with(_args: ()) -> Self::Strategy { any::<u16>().prop_map_into().boxed() } } #[test] fn can_display_events() { assert_eq!(NumberedEvent(0).to_string(), "E₀"); assert_eq!(NumberedEvent(10).to_string(), "E₁₀"); assert_eq!(NumberedEvent(01234).to_string(), "E₁₂₃₄"); } #[derive(Clone, Eq, Hash, PartialEq)] pub struct NumberedEvents(BitArray<usize, typenum::U65536>); impl NumberedEvents { pub fn add(&mut self, event: NumberedEvent) { let index = event.0 as usize; self.0.set(index, true); } pub fn contains(&self, event: NumberedEvent) -> bool { let index = event.0 as usize; self.0[index] } } impl Display for NumberedEvents { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.debug_set() .entries( self.0 .iter() .enumerate() .filter(|(_index, value)| *value) .map(|(index, _value)| index as u16) .map(NumberedEvent::from), ) .finish() } } impl Debug for NumberedEvents { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "NumberedEvents ")?; f.debug_set() .entries( self.0 .iter() .enumerate() .filter(|(_index, value)| *value) .map(|(index, _value)| index as u16) .map(NumberedEvent::from), ) .finish() } } impl From<NumberedEvent> for NumberedEvents { fn from(event: NumberedEvent) -> NumberedEvents { let mut events = NumberedEvents::empty(); events.add(event); events } } impl From<HashSet<NumberedEvent>> for NumberedEvents { fn from(set: HashSet<NumberedEvent>) -> NumberedEvents { let mut events = NumberedEvents::empty(); for event in set { events.add(event); } events } } impl EventSet for NumberedEvents { fn empty() -> Self { NumberedEvents(BitArray::from_elem(false)) } fn intersect(&mut self, other: &Self) { self.0.intersect(&other.0); } fn is_empty(&self) -> bool { self.0.none() } fn negate(&mut self) { self.0.negate(); } fn subtract(&mut self, other: &Self) { self.0.difference(&other.0); } fn union(&mut self, other: &Self) { self.0.union(&other.0); } fn universe() -> Self { NumberedEvents(BitArray::from_elem(true)) } } impl IntoIterator for NumberedEvents { type Item = NumberedEvents; type IntoIter = Box<dyn Iterator<Item = NumberedEvents>>; fn into_iter(self) -> Self::IntoIter { Box::new( self.0 .into_iter() .enumerate() .filter(|(_index, value)| *value) .map(|(index, _value)| index as u16) .map(NumberedEvent::from) .map(NumberedEvents::from), ) } } impl Arbitrary for NumberedEvents { type Parameters = (); type Strategy = BoxedStrategy<NumberedEvents>; fn arbitrary_with(_args: ()) -> Self::Strategy { hash_set(any::<NumberedEvent>(), 0..32) .prop_map_into() .boxed() } } #[derive(Debug)] pub struct NonemptyNumberedEvents(NumberedEvents); impl From<NonemptyNumberedEvents> for NumberedEvents { fn from(events: NonemptyNumberedEvents) -> NumberedEvents { events.0 } } impl Arbitrary for NonemptyNumberedEvents { type Parameters = (); type Strategy = BoxedStrategy<NonemptyNumberedEvents>; fn arbitrary_with(_args: ()) -> Self::Strategy { hash_set(any::<NumberedEvent>(), 1..32) .prop_map_into() .prop_map(NonemptyNumberedEvents) .boxed() } } #[cfg(test)] mod numbered_events_tests { use proptest_attr_macro::proptest; use super::*; #[proptest] fn can_intersect(a: NumberedEvents, b: NumberedEvents, event: NumberedEvent) { let mut intersection = a.clone(); intersection.intersect(&b); assert_eq!( intersection.contains(event), a.contains(event) && b.contains(event) ); } #[proptest] fn intersection_is_commutative(a: NumberedEvents, b: NumberedEvents) { let mut i1 = a.clone(); i1.intersect(&b); let mut i2 = b.clone(); i2.intersect(&a);<|fim▁hole|> } #[proptest] fn can_negate(a: NumberedEvents, event: NumberedEvent) { let mut negation = a.clone(); negation.negate(); assert_eq!(negation.contains(event), !a.contains(event)); } #[proptest] fn negation_is_reversible(a: NumberedEvents) { let mut negated_twice = a.clone(); negated_twice.negate(); negated_twice.negate(); assert_eq!(a, negated_twice); } #[proptest] fn can_subtract(a: NumberedEvents, b: NumberedEvents, event: NumberedEvent) { let mut difference = a.clone(); difference.subtract(&b); assert_eq!( difference.contains(event), a.contains(event) && !b.contains(event) ); } #[proptest] fn can_union(a: NumberedEvents, b: NumberedEvents, event: NumberedEvent) { let mut union = a.clone(); union.union(&b); assert_eq!( union.contains(event), a.contains(event) || b.contains(event) ); } #[proptest] fn union_is_commutative(a: NumberedEvents, b: NumberedEvents) { let mut u1 = a.clone(); u1.union(&b); let mut u2 = b.clone(); u2.union(&a); assert_eq!(u1, u2); } } /// An event type that is useful in test cases. It can be a NumberedEvent or any of the /// built-in event types. pub type TestEvents = DisjointSum<PrimitiveEvents, NumberedEvents>; impl From<NumberedEvent> for TestEvents { fn from(event: NumberedEvent) -> TestEvents { TestEvents::from_b(event.into()) } } impl From<NumberedEvents> for TestEvents { fn from(events: NumberedEvents) -> TestEvents { TestEvents::from_b(events) } } /// A proptest helper type that generates a non-empty vector of values. #[derive(Clone, Debug)] pub struct NonemptyVec<T> { pub vec: Vec<T>, } impl<T> Arbitrary for NonemptyVec<T> where T: Arbitrary + Clone + Debug + 'static, { type Parameters = (); type Strategy = BoxedStrategy<NonemptyVec<T>>; fn arbitrary_with(_args: ()) -> Self::Strategy { vec(any::<T>(), 1..16) .prop_map(|vec| NonemptyVec { vec }) .boxed() } }<|fim▁end|>
assert_eq!(i1, i2);
<|file_name|>routes.py<|end_file_name|><|fim▁begin|>from framework.routing import Rule, json_renderer from addons.s3 import views api_routes = { 'rules': [ Rule( [ '/settings/s3/accounts/', ], 'post', views.s3_add_user_account, json_renderer, ), Rule( [<|fim▁hole|> '/settings/s3/accounts/', ], 'get', views.s3_account_list, json_renderer, ), Rule( [ '/project/<pid>/s3/settings/', '/project/<pid>/node/<nid>/s3/settings/', ], 'put', views.s3_set_config, json_renderer, ), Rule( [ '/project/<pid>/s3/settings/', '/project/<pid>/node/<nid>/s3/settings/', ], 'get', views.s3_get_config, json_renderer, ), Rule( [ '/project/<pid>/s3/user-auth/', '/project/<pid>/node/<nid>/s3/user-auth/', ], 'put', views.s3_import_auth, json_renderer, ), Rule( [ '/project/<pid>/s3/user-auth/', '/project/<pid>/node/<nid>/s3/user-auth/', ], 'delete', views.s3_deauthorize_node, json_renderer, ), Rule( [ '/project/<pid>/s3/buckets/', '/project/<pid>/node/<nid>/s3/buckets/', ], 'get', views.s3_folder_list, json_renderer, ), Rule( [ '/project/<pid>/s3/newbucket/', '/project/<pid>/node/<nid>/s3/newbucket/', ], 'post', views.create_bucket, json_renderer ), ], 'prefix': '/api/v1', }<|fim▁end|>
<|file_name|>ExampleUnitTest.java<|end_file_name|><|fim▁begin|>package cat.foixench.test.parcelable; import org.junit.Test; import static org.junit.Assert.*; /** * Example local unit test, which will execute on the development machine (host). * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleUnitTest {<|fim▁hole|> @Test public void addition_isCorrect() throws Exception { assertEquals(4, 2 + 2); } }<|fim▁end|>
<|file_name|>components.ts<|end_file_name|><|fim▁begin|>import { Reflection, Type } from "../models"; import type { Serializer } from "./serializer"; import type { ModelToObject } from "./schema"; /** * Represents Serializer plugin component. * * Like {@link Converter} plugins each {@link Serializer} plugin defines a predicate that instructs if an * object can be serialized by it, this is done dynamically at runtime via a `supports` method. * * Additionally, each {@link Serializer} plugin must define a predicate that instructs the group * it belongs to. * * Serializers are grouped to improve performance when finding serializers that apply to a node, * this makes it possible to skip the `supports` calls for `Type`s when searching for a * `Reflection` and vise versa. */ export abstract class SerializerComponent<T> { /** * The priority this serializer should be executed with. * A higher priority means the {@link Serializer} will be applied earlier. */ static PRIORITY = 0; constructor(owner: Serializer) { this.owner = owner; } /** * Set when the SerializerComponent is added to the serializer. */ protected owner: Serializer; /** * A high-level predicate filtering which group this serializer belongs to. * This is a high-level filter before the {@link SerializerComponent.supports} predicate filter. * * For example, use the {@link Reflection} class class to group all reflection based serializers: * ```typescript * class ReflectionSerializer { * serializeGroup(instance) { return instance instanceof Reflection } * } * ``` * * Use the {@link Type} class to group all type based serializers: * ```typescript * class TypeSerializer { * serializeGroup(instance) { return instance instanceof Type } * } * ``` */ abstract serializeGroup(instance: unknown): boolean; /** * The priority this serializer should be executed with. * A higher priority means the {@link Serializer} will be applied earlier. */ get priority(): number { return ( (this.constructor as typeof SerializerComponent)["PRIORITY"] || SerializerComponent.PRIORITY ); } abstract supports(item: unknown): boolean; abstract toObject(item: T, obj?: object): Partial<ModelToObject<T>>; } export abstract class ReflectionSerializerComponent< T extends Reflection<|fim▁hole|> /** * Filter for instances of {@link Reflection} */ serializeGroup(instance: unknown): boolean { return instance instanceof Reflection; } } export abstract class TypeSerializerComponent< T extends Type > extends SerializerComponent<T> { /** * Filter for instances of {@link Type} */ serializeGroup(instance: unknown): boolean { return instance instanceof Type; } }<|fim▁end|>
> extends SerializerComponent<T> {
<|file_name|>GuideActivity.java<|end_file_name|><|fim▁begin|>package android_testsuite; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.view.View; import android.widget.Button; import android_testsuite.mytest.application_search.AppSearchActivity; import android_testsuite.mytest.application_search.UidActivity; import android_testsuite.mytest.camera.CameraActivity; import android_testsuite.mytest.camera.CameraIntentTestActivity; import android_testsuite.mytest.custom_loading.CustomLoadingActivity; import android_testsuite.mytest.media.MediaPlayerTestActivity; import android_testsuite.mytest.network_test.HttpActivity; import android_testsuite.mytest.network_test.SocketActivity; import android_testsuite.mytest.rsa.RsaActivity; import android_testsuite.mytest.seekbar.SeekBarActivity; /** * @author Ren Hui * @since 1.0.1.058 */ public class GuideActivity extends Activity { private Button mBtSelHttp; private Button mBtSelSocket; private Button mBtSearchApp; private Button mBtRSa; private Button mBtUid; private Button mBtMedia; private Button mCameraBt; private Button mCustomLoadingBt; private Button mCameraNewBtn; private Button mSeekBarBtn; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_guide); this.mBtSelHttp = (Button) findViewById(R.id.bt_selHttp); mBtSelHttp.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(); intent.setClass(GuideActivity.this, HttpActivity.class); GuideActivity.this.startActivity(intent); } }); this.mBtSelSocket = (Button) findViewById(R.id.bt_selSocket); mBtSelSocket.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(); intent.setClass(GuideActivity.this, SocketActivity.class); GuideActivity.this.startActivity(intent); } }); this.mBtSearchApp = (Button) findViewById(R.id.bt_searchApp); mBtSearchApp.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(); intent.setClass(GuideActivity.this, AppSearchActivity.class); GuideActivity.this.startActivity(intent); } }); this.mBtRSa = (Button) findViewById(R.id.RSA); mBtRSa.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(); intent.setClass(GuideActivity.this, RsaActivity.class); GuideActivity.this.startActivity(intent); } }); this.mBtUid = (Button) findViewById(R.id.uid); mBtUid.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(); intent.setClass(GuideActivity.this, UidActivity.class); GuideActivity.this.startActivity(intent); } }); this.mBtMedia = (Button) findViewById(R.id.media);<|fim▁hole|> mBtMedia.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(); intent.setClass(GuideActivity.this, MediaPlayerTestActivity.class); GuideActivity.this.startActivity(intent); } }); this.mCameraBt = (Button) findViewById(R.id.camera_intent); mCameraBt.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(); intent.setClass(GuideActivity.this, CameraIntentTestActivity.class); GuideActivity.this.startActivity(intent); } }); this.mCustomLoadingBt = (Button) findViewById(R.id.custom_loading); mCustomLoadingBt.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startActivity(new Intent(GuideActivity.this, CustomLoadingActivity.class)); } }); mCameraNewBtn = (Button) findViewById(R.id.camera); mCameraNewBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startActivity(new Intent(GuideActivity.this, CameraActivity.class)); } }); mSeekBarBtn = (Button) findViewById(R.id.seek_bar); mSeekBarBtn.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startActivity(new Intent(GuideActivity.this, SeekBarActivity.class)); } }); } }<|fim▁end|>
<|file_name|>performancetiming.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::codegen::Bindings::PerformanceTimingBinding; use crate::dom::bindings::codegen::Bindings::PerformanceTimingBinding::PerformanceTimingMethods; use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods; use crate::dom::bindings::reflector::{reflect_dom_object, Reflector}; use crate::dom::bindings::root::{Dom, DomRoot}; use crate::dom::document::Document; use crate::dom::window::Window; use dom_struct::dom_struct; #[dom_struct] pub struct PerformanceTiming { reflector_: Reflector, navigation_start: u64, navigation_start_precise: u64, document: Dom<Document>, } impl PerformanceTiming { fn new_inherited( nav_start: u64, nav_start_precise: u64, document: &Document, ) -> PerformanceTiming { PerformanceTiming { reflector_: Reflector::new(), navigation_start: nav_start, navigation_start_precise: nav_start_precise, document: Dom::from_ref(document), } } #[allow(unrooted_must_root)]<|fim▁hole|> pub fn new( window: &Window, navigation_start: u64, navigation_start_precise: u64, ) -> DomRoot<PerformanceTiming> { let timing = PerformanceTiming::new_inherited( navigation_start, navigation_start_precise, &window.Document(), ); reflect_dom_object(Box::new(timing), window, PerformanceTimingBinding::Wrap) } } impl PerformanceTimingMethods for PerformanceTiming { // https://w3c.github.io/navigation-timing/#widl-PerformanceTiming-navigationStart fn NavigationStart(&self) -> u64 { self.navigation_start } // https://w3c.github.io/navigation-timing/#widl-PerformanceTiming-domLoading fn DomLoading(&self) -> u64 { self.document.get_dom_loading() } // https://w3c.github.io/navigation-timing/#widl-PerformanceTiming-domInteractive fn DomInteractive(&self) -> u64 { self.document.get_dom_interactive() } // https://w3c.github.io/navigation-timing/#widl-PerformanceTiming-domContentLoadedEventStart fn DomContentLoadedEventStart(&self) -> u64 { self.document.get_dom_content_loaded_event_start() } // https://w3c.github.io/navigation-timing/#widl-PerformanceTiming-domContentLoadedEventEnd fn DomContentLoadedEventEnd(&self) -> u64 { self.document.get_dom_content_loaded_event_end() } // https://w3c.github.io/navigation-timing/#widl-PerformanceTiming-domComplete fn DomComplete(&self) -> u64 { self.document.get_dom_complete() } // https://w3c.github.io/navigation-timing/#widl-PerformanceTiming-loadEventStart fn LoadEventStart(&self) -> u64 { self.document.get_load_event_start() } // https://w3c.github.io/navigation-timing/#widl-PerformanceTiming-loadEventEnd fn LoadEventEnd(&self) -> u64 { self.document.get_load_event_end() } // check-tidy: no specs after this line // Servo-only timing for when top-level content (not iframes) is complete fn TopLevelDomComplete(&self) -> u64 { self.document.get_top_level_dom_complete() } } impl PerformanceTiming { pub fn navigation_start_precise(&self) -> u64 { self.navigation_start_precise } }<|fim▁end|>
<|file_name|>Track.java<|end_file_name|><|fim▁begin|>package com.benjaminsproule.cloud.domain; import lombok.AccessLevel; import lombok.Data; import lombok.NoArgsConstructor; import lombok.Setter; @Data @Setter(AccessLevel.PACKAGE)<|fim▁hole|> private String path; private Long trackNumber; private String title; private String artist; private String album; private String genre; private String rating; private Long length; private boolean offline; private String encodingType; private ServiceName serviceName; private Long lastModified; }<|fim▁end|>
@NoArgsConstructor(access = AccessLevel.PACKAGE) public class Track { private String id;
<|file_name|>Ui_configdialog.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'configdialog.ui' # # by: PyQt4 UI code generator 4.5.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui<|fim▁hole|>class Ui_Dialog(object): def setupUi(self, Dialog): Dialog.setObjectName("Dialog") Dialog.resize(993, 455) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(":/icons/kaddressbook.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog.setWindowIcon(icon) self.verticalLayout_6 = QtGui.QVBoxLayout(Dialog) self.verticalLayout_6.setObjectName("verticalLayout_6") self.splitter = QtGui.QSplitter(Dialog) self.splitter.setOrientation(QtCore.Qt.Horizontal) self.splitter.setObjectName("splitter") self.pagelist = QtGui.QListWidget(self.splitter) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.pagelist.sizePolicy().hasHeightForWidth()) self.pagelist.setSizePolicy(sizePolicy) self.pagelist.setMaximumSize(QtCore.QSize(180, 16777215)) self.pagelist.setObjectName("pagelist") self.layoutWidget = QtGui.QWidget(self.splitter) self.layoutWidget.setObjectName("layoutWidget") self.verticalLayout = QtGui.QVBoxLayout(self.layoutWidget) self.verticalLayout.setObjectName("verticalLayout") self.container = QtGui.QScrollArea(self.layoutWidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(5) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.container.sizePolicy().hasHeightForWidth()) self.container.setSizePolicy(sizePolicy) self.container.setFrameShape(QtGui.QFrame.NoFrame) self.container.setWidgetResizable(True) self.container.setObjectName("container") self.scrollAreaWidgetContents = QtGui.QWidget(self.container) self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 241, 399)) self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents") self.verticalLayout_3 = QtGui.QVBoxLayout(self.scrollAreaWidgetContents) self.verticalLayout_3.setMargin(0) self.verticalLayout_3.setObjectName("verticalLayout_3") self.layout = QtGui.QVBoxLayout() self.layout.setObjectName("layout") self.verticalLayout_3.addLayout(self.layout) self.container.setWidget(self.scrollAreaWidgetContents) self.verticalLayout.addWidget(self.container) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem) self.pushButton_2 = QtGui.QPushButton(self.layoutWidget) self.pushButton_2.setObjectName("pushButton_2") self.horizontalLayout.addWidget(self.pushButton_2) self.verticalLayout.addLayout(self.horizontalLayout) self.tabWidget = QtGui.QTabWidget(self.splitter) self.tabWidget.setObjectName("tabWidget") self.tab = QtGui.QWidget() self.tab.setObjectName("tab") self.verticalLayout_2 = QtGui.QVBoxLayout(self.tab) self.verticalLayout_2.setMargin(0) self.verticalLayout_2.setObjectName("verticalLayout_2") self.scrollArea = QtGui.QScrollArea(self.tab) self.scrollArea.setWidgetResizable(True) self.scrollArea.setObjectName("scrollArea") self.scrollAreaWidgetContents_2 = QtGui.QWidget(self.scrollArea) self.scrollAreaWidgetContents_2.setGeometry(QtCore.QRect(0, 0, 532, 405)) self.scrollAreaWidgetContents_2.setObjectName("scrollAreaWidgetContents_2") self.verticalLayout_4 = QtGui.QVBoxLayout(self.scrollAreaWidgetContents_2) self.verticalLayout_4.setSpacing(3) self.verticalLayout_4.setContentsMargins(0, 3, 0, -1) self.verticalLayout_4.setObjectName("verticalLayout_4") self.horizontalLayout_4 = QtGui.QHBoxLayout() self.horizontalLayout_4.setObjectName("horizontalLayout_4") self.zoomin = QtGui.QToolButton(self.scrollAreaWidgetContents_2) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(":/icons/viewmag+.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.zoomin.setIcon(icon1) self.zoomin.setObjectName("zoomin") self.horizontalLayout_4.addWidget(self.zoomin) self.zoomout = QtGui.QToolButton(self.scrollAreaWidgetContents_2) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(":/icons/viewmag-.svg"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.zoomout.setIcon(icon2) self.zoomout.setObjectName("zoomout") self.horizontalLayout_4.addWidget(self.zoomout) spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.horizontalLayout_4.addItem(spacerItem1) self.verticalLayout_4.addLayout(self.horizontalLayout_4) self.preview = QtGui.QLabel(self.scrollAreaWidgetContents_2) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.preview.sizePolicy().hasHeightForWidth()) self.preview.setSizePolicy(sizePolicy) self.preview.setFrameShape(QtGui.QFrame.NoFrame) self.preview.setObjectName("preview") self.verticalLayout_4.addWidget(self.preview) spacerItem2 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) self.verticalLayout_4.addItem(spacerItem2) self.scrollArea.setWidget(self.scrollAreaWidgetContents_2) self.verticalLayout_2.addWidget(self.scrollArea) self.tabWidget.addTab(self.tab, "") self.tab_2 = QtGui.QWidget() self.tab_2.setObjectName("tab_2") self.verticalLayout_5 = QtGui.QVBoxLayout(self.tab_2) self.verticalLayout_5.setMargin(0) self.verticalLayout_5.setObjectName("verticalLayout_5") self.snippet = QtGui.QTextBrowser(self.tab_2) self.snippet.setObjectName("snippet") self.verticalLayout_5.addWidget(self.snippet) self.tabWidget.addTab(self.tab_2, "") self.verticalLayout_6.addWidget(self.splitter) self.retranslateUi(Dialog) self.tabWidget.setCurrentIndex(0) QtCore.QObject.connect(self.pushButton_2, QtCore.SIGNAL("clicked()"), Dialog.accept) QtCore.QMetaObject.connectSlotsByName(Dialog) def retranslateUi(self, Dialog): Dialog.setWindowTitle(QtGui.QApplication.translate("Dialog", "Bookrest Settings", None, QtGui.QApplication.UnicodeUTF8)) self.pushButton_2.setText(QtGui.QApplication.translate("Dialog", "Close", None, QtGui.QApplication.UnicodeUTF8)) self.zoomin.setText(QtGui.QApplication.translate("Dialog", "...", None, QtGui.QApplication.UnicodeUTF8)) self.zoomout.setText(QtGui.QApplication.translate("Dialog", "...", None, QtGui.QApplication.UnicodeUTF8)) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), QtGui.QApplication.translate("Dialog", "Preview", None, QtGui.QApplication.UnicodeUTF8)) self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), QtGui.QApplication.translate("Dialog", "Output", None, QtGui.QApplication.UnicodeUTF8)) import icons_rc if __name__ == "__main__": import sys app = QtGui.QApplication(sys.argv) Dialog = QtGui.QDialog() ui = Ui_Dialog() ui.setupUi(Dialog) Dialog.show() sys.exit(app.exec_())<|fim▁end|>
<|file_name|>async_utils.rs<|end_file_name|><|fim▁begin|>// Copyright 2021 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. //! Virtio device async helper functions. use std::cell::RefCell; use std::rc::Rc; use anyhow::{Context, Result}; use base::Event; use cros_async::{EventAsync, Executor}; use super::{Interrupt, SignalableInterrupt}; /// Async task that waits for a signal from `event`. Once this event is readable, exit. Exiting /// this future will cause the main loop to break and the worker thread to exit. pub async fn await_and_exit(ex: &Executor, event: Event) -> Result<()> { let event_async = EventAsync::new(event.0, ex).context("failed to create EventAsync")?; let _ = event_async.next_val().await; Ok(()) } /// Async task that resamples the status of the interrupt when the guest sends a request by /// signalling the resample event associated with the interrupt. pub async fn handle_irq_resample(ex: &Executor, interrupt: Rc<RefCell<Interrupt>>) -> Result<()> {<|fim▁hole|> // This is a separate block so that we do not hold a RefCell borrow across await. let resample_evt = if let Some(resample_evt) = interrupt.borrow().get_resample_evt() { let resample_evt = resample_evt .try_clone() .context("resample_evt.try_clone() failed")?; Some(EventAsync::new(resample_evt.0, ex).context("failed to create async resample event")?) } else { None }; if let Some(resample_evt) = resample_evt { loop { let _ = resample_evt .next_val() .await .context("failed to read resample event")?; interrupt.borrow().do_interrupt_resample(); } } else { // No resample event; park the future. let () = futures::future::pending().await; } Ok(()) }<|fim▁end|>
// Clone resample_evt if interrupt has one.
<|file_name|>MessagesPage.js<|end_file_name|><|fim▁begin|>/** * Created by Rory on 12/14/2015. */ Template.MessagesPage.helpers({ getMessages: function () { if (Meteor.user()) {<|fim▁hole|> getSender: function () { return {username:this.sender}; }, getTextbook: function () { return {_title:this.title}; }, isSellMessage: function () { if(this.offerType === "sell" && !this.acceptMessage) { return true; } else { return false; } }, isAcceptSellMessage: function () { if(this.offerType === "sell" && this.acceptMessage) { return true; } else { return false; } }, isBuyMessage: function () { if(this.offerType === "buy" && !this.acceptMessage) { return true; } else { return false; } }, isAcceptBuyMessage: function () { if(this.offerType === "buy" && this.acceptMessage) { return true; } else { return false; } } });<|fim▁end|>
return Messages.find({owner:Meteor.user().username}); } },
<|file_name|>ttlcontroller.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // The TTLController sets ttl annotations on nodes, based on cluster size. // The annotations are consumed by Kubelets as suggestions for how long // it can cache objects (e.g. secrets or config maps) before refetching // from apiserver again. // // TODO: This is a temporary workaround for the Kubelet not being able to // send "watch secrets attached to pods from my node" request. Once // sending such request will be possible, we will modify Kubelet to // use it and get rid of this controller completely. package ttl import ( "fmt" "math" "strconv" "sync" "time" "k8s.io/api/core/v1" apierrors "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/json" utilruntime "k8s.io/apimachinery/pkg/util/runtime" "k8s.io/apimachinery/pkg/util/strategicpatch" "k8s.io/apimachinery/pkg/util/wait" informers "k8s.io/client-go/informers/core/v1" clientset "k8s.io/client-go/kubernetes" "k8s.io/client-go/kubernetes/scheme" listers "k8s.io/client-go/listers/core/v1" "k8s.io/client-go/tools/cache" "k8s.io/client-go/util/workqueue" "k8s.io/kubernetes/pkg/controller" "github.com/golang/glog" ) type TTLController struct { kubeClient clientset.Interface // nodeStore is a local cache of nodes. nodeStore listers.NodeLister // Nodes that need to be synced. queue workqueue.RateLimitingInterface // Returns true if all underlying informers are synced. hasSynced func() bool lock sync.RWMutex // Number of nodes in the cluster. nodeCount int // Desired TTL for all nodes in the cluster. desiredTTLSeconds int // In which interval of cluster size we currently are. boundaryStep int } func NewTTLController(nodeInformer informers.NodeInformer, kubeClient clientset.Interface) *TTLController { ttlc := &TTLController{ kubeClient: kubeClient, queue: workqueue.NewNamedRateLimitingQueue(workqueue.DefaultControllerRateLimiter(), "ttlcontroller"), } nodeInformer.Informer().AddEventHandler(cache.ResourceEventHandlerFuncs{ AddFunc: ttlc.addNode, UpdateFunc: ttlc.updateNode, DeleteFunc: ttlc.deleteNode, }) ttlc.nodeStore = listers.NewNodeLister(nodeInformer.Informer().GetIndexer()) ttlc.hasSynced = nodeInformer.Informer().HasSynced return ttlc } type ttlBoundary struct { sizeMin int sizeMax int ttlSeconds int } var ( ttlBoundaries = []ttlBoundary{ {sizeMin: 0, sizeMax: 100, ttlSeconds: 0}, {sizeMin: 90, sizeMax: 500, ttlSeconds: 15}, {sizeMin: 450, sizeMax: 1000, ttlSeconds: 30}, {sizeMin: 900, sizeMax: 2000, ttlSeconds: 60}, {sizeMin: 1800, sizeMax: 10000, ttlSeconds: 300}, {sizeMin: 9000, sizeMax: math.MaxInt32, ttlSeconds: 600}, } ) func (ttlc *TTLController) Run(workers int, stopCh <-chan struct{}) { defer utilruntime.HandleCrash()<|fim▁hole|> defer glog.Infof("Shutting down TTL controller") if !controller.WaitForCacheSync("TTL", stopCh, ttlc.hasSynced) { return } for i := 0; i < workers; i++ { go wait.Until(ttlc.worker, time.Second, stopCh) } <-stopCh } func (ttlc *TTLController) addNode(obj interface{}) { node, ok := obj.(*v1.Node) if !ok { utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj)) return } func() { ttlc.lock.Lock() defer ttlc.lock.Unlock() ttlc.nodeCount++ if ttlc.nodeCount > ttlBoundaries[ttlc.boundaryStep].sizeMax { ttlc.boundaryStep++ ttlc.desiredTTLSeconds = ttlBoundaries[ttlc.boundaryStep].ttlSeconds } }() ttlc.enqueueNode(node) } func (ttlc *TTLController) updateNode(_, newObj interface{}) { node, ok := newObj.(*v1.Node) if !ok { utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", newObj)) return } // Processing all updates of nodes guarantees that we will update // the ttl annotation, when cluster size changes. // We are relying on the fact that Kubelet is updating node status // every 10s (or generally every X seconds), which means that whenever // required, its ttl annotation should be updated within that period. ttlc.enqueueNode(node) } func (ttlc *TTLController) deleteNode(obj interface{}) { _, ok := obj.(*v1.Node) if !ok { tombstone, ok := obj.(cache.DeletedFinalStateUnknown) if !ok { utilruntime.HandleError(fmt.Errorf("unexpected object type: %v", obj)) return } _, ok = tombstone.Obj.(*v1.Node) if !ok { utilruntime.HandleError(fmt.Errorf("unexpected object types: %v", obj)) return } } func() { ttlc.lock.Lock() defer ttlc.lock.Unlock() ttlc.nodeCount-- if ttlc.nodeCount < ttlBoundaries[ttlc.boundaryStep].sizeMin { ttlc.boundaryStep-- ttlc.desiredTTLSeconds = ttlBoundaries[ttlc.boundaryStep].ttlSeconds } }() // We are not processing the node, as it no longer exists. } func (ttlc *TTLController) enqueueNode(node *v1.Node) { key, err := controller.KeyFunc(node) if err != nil { glog.Errorf("Couldn't get key for object %+v", node) return } ttlc.queue.Add(key) } func (ttlc *TTLController) worker() { for ttlc.processItem() { } } func (ttlc *TTLController) processItem() bool { key, quit := ttlc.queue.Get() if quit { return false } defer ttlc.queue.Done(key) err := ttlc.updateNodeIfNeeded(key.(string)) if err == nil { ttlc.queue.Forget(key) return true } ttlc.queue.AddRateLimited(key) utilruntime.HandleError(err) return true } func (ttlc *TTLController) getDesiredTTLSeconds() int { ttlc.lock.RLock() defer ttlc.lock.RUnlock() return ttlc.desiredTTLSeconds } func getIntFromAnnotation(node *v1.Node, annotationKey string) (int, bool) { if node.Annotations == nil { return 0, false } annotationValue, ok := node.Annotations[annotationKey] if !ok { return 0, false } intValue, err := strconv.Atoi(annotationValue) if err != nil { glog.Warningf("Cannot convert the value %q with annotation key %q for the node %q", annotationValue, annotationKey, node.Name) return 0, false } return intValue, true } func setIntAnnotation(node *v1.Node, annotationKey string, value int) { if node.Annotations == nil { node.Annotations = make(map[string]string) } node.Annotations[annotationKey] = strconv.Itoa(value) } func (ttlc *TTLController) patchNodeWithAnnotation(node *v1.Node, annotationKey string, value int) error { oldData, err := json.Marshal(node) if err != nil { return err } setIntAnnotation(node, annotationKey, value) newData, err := json.Marshal(node) if err != nil { return err } patchBytes, err := strategicpatch.CreateTwoWayMergePatch(oldData, newData, &v1.Node{}) if err != nil { return err } _, err = ttlc.kubeClient.Core().Nodes().Patch(node.Name, types.StrategicMergePatchType, patchBytes) if err != nil { glog.V(2).Infof("Failed to change ttl annotation for node %s: %v", node.Name, err) return err } glog.V(2).Infof("Changed ttl annotation for node %s to %d seconds", node.Name, value) return nil } func (ttlc *TTLController) updateNodeIfNeeded(key string) error { node, err := ttlc.nodeStore.Get(key) if err != nil { if apierrors.IsNotFound(err) { return nil } return err } desiredTTL := ttlc.getDesiredTTLSeconds() currentTTL, ok := getIntFromAnnotation(node, v1.ObjectTTLAnnotationKey) if ok && currentTTL == desiredTTL { return nil } objCopy, err := scheme.Scheme.DeepCopy(node) if err != nil { return err } return ttlc.patchNodeWithAnnotation(objCopy.(*v1.Node), v1.ObjectTTLAnnotationKey, desiredTTL) }<|fim▁end|>
defer ttlc.queue.ShutDown() glog.Infof("Starting TTL controller")
<|file_name|>issue-42234-unknown-receiver-type.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// option. This file may not be copied, modified, or distributed // except according to those terms. // When the type of a method call's receiver is unknown, the span should point // to the receiver (and not the entire call, as was previously the case before // the fix of which this tests). fn shines_a_beacon_through_the_darkness() { let x: Option<_> = None; x.unwrap().method_that_could_exist_on_some_type(); //~^ ERROR 17:5: 17:15: type annotations needed } fn courier_to_des_moines_and_points_west(data: &[u32]) -> String { data.iter() //~ ERROR 22:5: 23:20: type annotations needed .sum::<_>() .to_string() } fn main() {}<|fim▁end|>
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
<|file_name|>interface.go<|end_file_name|><|fim▁begin|><|fim▁hole|>/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by main. DO NOT EDIT. package k3s import ( clientset "github.com/rancher/k3s/pkg/generated/clientset/versioned" v1 "github.com/rancher/k3s/pkg/generated/controllers/k3s.cattle.io/v1" informers "github.com/rancher/k3s/pkg/generated/informers/externalversions/k3s.cattle.io" "github.com/rancher/wrangler/pkg/generic" ) type Interface interface { V1() v1.Interface } type group struct { controllerManager *generic.ControllerManager informers informers.Interface client clientset.Interface } // New returns a new Interface. func New(controllerManager *generic.ControllerManager, informers informers.Interface, client clientset.Interface) Interface { return &group{ controllerManager: controllerManager, informers: informers, client: client, } } func (g *group) V1() v1.Interface { return v1.New(g.controllerManager, g.client.K3sV1(), g.informers.V1()) }<|fim▁end|>
<|file_name|>app.js<|end_file_name|><|fim▁begin|>// app.js /*jslint node: true */ 'use strict'; <|fim▁hole|>var mongoose = require('mongoose'); var flash = require('connect-flash'); var morgan = require('morgan'); var cookieParser = require('cookie-parser'); var bodyParser = require('body-parser'); var session = require('express-session'); var hbs = require('hbs'); // Handlebars templating engine var configDB = require('./config/database.js'); // Load our db configuration mongoose.connect(configDB.url); // Connect to our db. require('./config/passport')(passport); // Configure passport authentication var app = express(); // Set up express app app.use(compression({ threshold: 512 })); app.set('json spaces', 0); app.set('views', __dirname + '/views'); app.use(express.logger('dev')); app.use(express.errorHandler()); app.use(express.methodOverride()); app.use(express.cookieParser('flyingfish')); app.use(express.session()); app.set('view engine', 'html'); app.engine('html', hbs.__express); app.use(express.static(__dirname + '/public')); app.use(passport.initialize()); app.use(passport.session()); app.use(app.router); require('./routes')(app, passport); // Load our routes and pass in our app and passport app.listen(process.env.PORT || 3000);<|fim▁end|>
var compression = require('compression'); var express = require('express'); var passport = require('passport');
<|file_name|>url.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use dom::bindings::cell::DOMRefCell; use dom::bindings::codegen::Bindings::URLBinding::{self, URLMethods}; use dom::bindings::error::{Error, ErrorResult, Fallible}; use dom::bindings::global::GlobalRef; use dom::bindings::js::{JS, MutNullableHeap, Root}; use dom::bindings::reflector::{Reflectable, Reflector, reflect_dom_object}; use dom::bindings::str::USVString; use dom::urlhelper::UrlHelper; use dom::urlsearchparams::URLSearchParams; use std::borrow::ToOwned; use std::default::Default; use url::{Host, Url, UrlParser}; use util::str::DOMString; // https://url.spec.whatwg.org/#url<|fim▁hole|>#[dom_struct] pub struct URL { reflector_: Reflector, // https://url.spec.whatwg.org/#concept-url-url url: DOMRefCell<Url>, // https://url.spec.whatwg.org/#dom-url-searchparams search_params: MutNullableHeap<JS<URLSearchParams>>, } impl URL { fn new_inherited(url: Url) -> URL { URL { reflector_: Reflector::new(), url: DOMRefCell::new(url), search_params: Default::default(), } } pub fn new(global: GlobalRef, url: Url) -> Root<URL> { reflect_dom_object(box URL::new_inherited(url), global, URLBinding::Wrap) } pub fn set_query(&self, query: String) { self.url.borrow_mut().query = Some(query); } } impl URL { // https://url.spec.whatwg.org/#constructors pub fn Constructor(global: GlobalRef, url: USVString, base: Option<USVString>) -> Fallible<Root<URL>> { let parsed_base = match base { None => { // Step 1. None }, Some(base) => // Step 2.1. match Url::parse(&base.0) { Ok(base) => Some(base), Err(error) => { // Step 2.2. return Err(Error::Type(format!("could not parse base: {}", error))); } } }; // Step 3. let parsed_url = { let mut parser = UrlParser::new(); if let Some(parsed_base) = parsed_base.as_ref() { parser.base_url(parsed_base); } match parser.parse(&url.0) { Ok(url) => url, Err(error) => { // Step 4. return Err(Error::Type(format!("could not parse URL: {}", error))); } } }; // Step 5: Skip (see step 8 below). // Steps 6-7. let result = URL::new(global, parsed_url); // Step 8: Instead of construcing a new `URLSearchParams` object here, construct it // on-demand inside `URL::SearchParams`. // Step 9. Ok(result) } // https://url.spec.whatwg.org/#dom-url-domaintoasciidomain pub fn DomainToASCII(_: GlobalRef, origin: USVString) -> USVString { // Step 1. let ascii_domain = Host::parse(&origin.0); if let Ok(Host::Domain(string)) = ascii_domain { // Step 3. USVString(string.to_owned()) } else { // Step 2. USVString("".to_owned()) } } } impl URLMethods for URL { // https://url.spec.whatwg.org/#dom-url-hash fn Hash(&self) -> USVString { UrlHelper::Hash(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hash fn SetHash(&self, value: USVString) { UrlHelper::SetHash(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-host fn Host(&self) -> USVString { UrlHelper::Host(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-host fn SetHost(&self, value: USVString) { UrlHelper::SetHost(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-hostname fn Hostname(&self) -> USVString { UrlHelper::Hostname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-hostname fn SetHostname(&self, value: USVString) { UrlHelper::SetHostname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-href fn Href(&self) -> USVString { UrlHelper::Href(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-href fn SetHref(&self, value: USVString) -> ErrorResult { match Url::parse(&value.0) { Ok(url) => { *self.url.borrow_mut() = url; Ok(()) }, Err(error) => { Err(Error::Type(format!("could not parse URL: {}", error))) }, } } // https://url.spec.whatwg.org/#dom-url-password fn Password(&self) -> USVString { UrlHelper::Password(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-password fn SetPassword(&self, value: USVString) { UrlHelper::SetPassword(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-pathname fn Pathname(&self) -> USVString { UrlHelper::Pathname(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-pathname fn SetPathname(&self, value: USVString) { UrlHelper::SetPathname(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-port fn Port(&self) -> USVString { UrlHelper::Port(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-port fn SetPort(&self, value: USVString) { UrlHelper::SetPort(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-protocol fn Protocol(&self) -> USVString { UrlHelper::Protocol(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-protocol fn SetProtocol(&self, value: USVString) { UrlHelper::SetProtocol(&mut self.url.borrow_mut(), value); } // https://url.spec.whatwg.org/#dom-url-origin fn Origin(&self) -> USVString { UrlHelper::Origin(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn Search(&self) -> USVString { UrlHelper::Search(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-search fn SetSearch(&self, value: USVString) { UrlHelper::SetSearch(&mut self.url.borrow_mut(), value); if let Some(search_params) = self.search_params.get() { search_params.set_list(self.url.borrow().query_pairs().unwrap_or_else(|| vec![])); } } // https://url.spec.whatwg.org/#dom-url-searchparams fn SearchParams(&self) -> Root<URLSearchParams> { self.search_params.or_init(|| URLSearchParams::new(self.global().r(), Some(self))) } // https://url.spec.whatwg.org/#dom-url-href fn Stringifier(&self) -> DOMString { DOMString::from(self.Href().0) } // https://url.spec.whatwg.org/#dom-url-username fn Username(&self) -> USVString { UrlHelper::Username(&self.url.borrow()) } // https://url.spec.whatwg.org/#dom-url-username fn SetUsername(&self, value: USVString) { UrlHelper::SetUsername(&mut self.url.borrow_mut(), value); } }<|fim▁end|>
<|file_name|>hlo_to_ir_bindings.cc<|end_file_name|><|fim▁begin|>/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/compiler/xla/service/gpu/hlo_to_ir_bindings.h" #include "llvm/IR/BasicBlock.h" #include "llvm/IR/Function.h" #include "llvm/IR/Instructions.h" #include "tensorflow/compiler/xla/service/gpu/ir_emission_utils.h" #include "tensorflow/compiler/xla/service/hlo_opcode.h" #include "tensorflow/compiler/xla/service/llvm_ir/llvm_util.h" #include "tensorflow/compiler/xla/service/llvm_ir/tuple_ops.h" #include "tensorflow/core/lib/strings/str_util.h" #include "tensorflow/core/lib/strings/strcat.h" #include "tensorflow/core/platform/logging.h" #include "tensorflow/core/platform/types.h" namespace xla { namespace gpu { using tensorflow::strings::StrAppend; using tensorflow::strings::StrCat; void HloToIrBindings::EmitBasePointersForHlos( tensorflow::gtl::ArraySlice<const HloInstruction*> io_hlos, tensorflow::gtl::ArraySlice<const HloInstruction*> non_io_hlos) { // I/O HLOs are bound to the arguments of the current IR function. I.e., // // void IrFunction(io_0, io_1, ..., io_{m-1}, temp_buffer_base) { llvm::Function* function = ir_builder_->GetInsertBlock()->getParent(); CHECK_EQ(io_hlos.size() + 1, function->arg_size()); // An HLO can have duplicated operands. This data structure remembers which // operand HLOs are already bound to avoid rebinding the same HLO. std::set<const HloInstruction*> already_bound_for_this_function; auto arg_iter = function->arg_begin(); for (const HloInstruction* io_hlo : io_hlos) { if (!already_bound_for_this_function.count(io_hlo)) { if (!is_nested_ && io_hlo->opcode() == HloOpcode::kGetTupleElement) { BindHloToIrValue(*io_hlo, EmitGetTupleElement(io_hlo, &*arg_iter)); } else { BindHloToIrValue(*io_hlo, &*arg_iter); } already_bound_for_this_function.insert(io_hlo); } ++arg_iter; } temp_buffer_base_ = &*arg_iter; temp_buffer_base_->setName("temp_buffer"); for (const HloInstruction* non_io_hlo : non_io_hlos) { if (already_bound_for_this_function.count(non_io_hlo)) { continue; } already_bound_for_this_function.insert(non_io_hlo); if (non_io_hlo->opcode() == HloOpcode::kGetTupleElement) { if (!is_nested_) { // Lookup allocation GetTupleElement operand. const BufferAllocation::Slice slice = buffer_assignment_ ->GetUniqueTopLevelSlice(non_io_hlo->LatestNonGteAncestor()) .ConsumeValueOrDie(); // We are not in a nested context, so check non-thread-local allocation. CHECK(!slice.allocation()->is_thread_local()); const int64 offset = slice.offset(); CHECK_NE(nullptr, temp_buffer_base_); // Emit IR for GetTupleElement instruction and bind to emitted value. llvm::Value* base_ptr = ir_builder_->CreateInBoundsGEP( temp_buffer_base_, ir_builder_->getInt64(offset)); BindHloToIrValue(*non_io_hlo, EmitGetTupleElement(non_io_hlo, base_ptr)); } continue; } if (!buffer_assignment_->HasTopLevelAllocation(non_io_hlo)) { continue; } ShapeUtil::ForEachSubshape( non_io_hlo->shape(), [&](const Shape& /*subshape*/, const ShapeIndex& index) { // A non-IO HLO with a buffer is bound to // (1) an alloca if it is thread-local, or // (2) an internal pointer in temp_buffer_base according to its // offset. auto slice_result = buffer_assignment_->GetUniqueSlice(non_io_hlo, index); if (!slice_result.ok()) { return; } const BufferAllocation::Slice slice = slice_result.ConsumeValueOrDie(); if (slice.allocation()->is_thread_local()) { llvm::Type* pointee_type = llvm_ir::ShapeToIrType(non_io_hlo->shape(), module_); BindHloToIrValue(*non_io_hlo, ir_builder_->CreateAlloca(pointee_type), index); } else { const int64 offset = slice.offset(); CHECK_NE(nullptr, temp_buffer_base_); BindHloToIrValue( *non_io_hlo, ir_builder_->CreateInBoundsGEP(temp_buffer_base_, ir_builder_->getInt64(offset)), index); } }); } } llvm::Value* HloToIrBindings::EmitGetTupleElement(const HloInstruction* gte, llvm::Value* base_ptr) { // TODO(b/26344050): tighten the alignment based on the real element type. if (gte->operand(0)->opcode() != HloOpcode::kGetTupleElement) { return llvm_ir::EmitGetTupleElement( gte->shape(), gte->tuple_index(), /*alignment=*/1, GetTypedIrValue(*gte->operand(0), {}, base_ptr), ir_builder_, module_); } return llvm_ir::EmitGetTupleElement( gte->shape(), gte->tuple_index(), /*alignment=*/1, EmitGetTupleElement(gte->operand(0), base_ptr), ir_builder_, module_); } llvm::Value* HloToIrBindings::GetTypedIrValue(const HloInstruction& hlo, ShapeIndexView shape_index, llvm::Value* ir_value) { llvm::Type* pointee_type = llvm_ir::ShapeToIrType( ShapeUtil::GetSubshape(hlo.shape(), shape_index), module_); llvm::Type* dest_type = pointee_type->getPointerTo(); <|fim▁hole|> if (llvm::isa<llvm::GlobalVariable>(ir_value)) { typed_ir_value = llvm::ConstantExpr::getPointerBitCastOrAddrSpaceCast( llvm::cast<llvm::GlobalVariable>(ir_value), dest_type); } else { typed_ir_value = ir_builder_->CreateBitCast(ir_value, pointee_type->getPointerTo()); } ir_value->setName(llvm_ir::AsStringRef(llvm_ir::IrName(&hlo, "raw"))); typed_ir_value->setName(llvm_ir::AsStringRef(llvm_ir::IrName(&hlo, "typed"))); return typed_ir_value; } void HloToIrBindings::BindHloToIrValue(const HloInstruction& hlo, llvm::Value* ir_value, ShapeIndexView shape_index) { VLOG(2) << "Binding " << hlo.ToString(); const Shape& hlo_shape = hlo.shape(); llvm::Value* typed_ir_value = GetTypedIrValue(hlo, shape_index, ir_value); if (!BoundToIrValue(hlo)) { // Set the root of ShapeTree first before assigning the element ir value. InsertOrDie(&base_ptrs_, &hlo, ShapeTree<llvm::Value*>(hlo_shape, nullptr)); } *(base_ptrs_[&hlo].mutable_element(shape_index)) = typed_ir_value; } // Determines whether hlo's buffers are never modified within the execution of // consumer. static bool BuffersInvariantWithinConsumer( const HloInstruction& hlo, const HloInstruction& consumer, const BufferAssignment* buffer_assignment) { // Check if consumer is inside a fusion node -- if so, "dereference" it until // we get to a non-fusion node. const HloInstruction* c = &consumer; while (c->IsFused()) { c = c->parent()->FusionInstruction(); } // If, after dereferencing c, we end up with a node that's not inside our // module's top-level computation (say our node is inside a while loop), we // give up on marking array as invariant, because this HLO may be run multiple // times (e.g. multiple while loop iterations, or multiple invocations of a // reducer's computation). TODO(jlebar): We could relax this constraint if we // emitted an llvm.invariant.group.barrier at the end of the computation. return c->parent() == c->GetModule()->entry_computation() && buffer_assignment->HaveDisjointSlices(&hlo, &consumer); } llvm_ir::IrArray HloToIrBindings::GetIrArray(const HloInstruction& hlo, const HloInstruction& consumer, const ShapeIndex& shape_index) { llvm::Value* base_ptr = GetBasePointer(hlo, shape_index); CHECK_NE(base_ptr, nullptr) << "Buffer not assigned for shape_index " << shape_index.ToString() << " of " << hlo.ToString(); llvm_ir::IrArray ir_array(base_ptr, ShapeUtil::GetSubshape(hlo.shape(), shape_index)); alias_analysis_.AddAliasingInformationToIrArray(hlo, &ir_array, shape_index); // The GPU backend emits one kernel per top-level HLO, and LLVM views // execution of one kernel as the "whole program" executed on the GPU. // Therefore if hlo's output buffer is not modified within consumer, and if // consumer runs hlo only once (so that it doesn't create two different // outputs), then we can mark ir_array as invariant over the whole program. if (BuffersInvariantWithinConsumer(hlo, consumer, buffer_assignment_)) { VLOG(2) << "Marking " << hlo.name() << " as invariant within " << consumer.name(); ir_array.MarkInvariantOverWholeProgram(&module_->getContext()); } return ir_array; } void HloToIrBindings::UnbindAllLocalIrValues() { std::vector<const HloInstruction*> hlos_to_unbind; for (auto& key_value : base_ptrs_) { if (!llvm::isa<llvm::GlobalVariable>( (key_value.second.element({}))->stripPointerCasts())) { hlos_to_unbind.push_back(key_value.first); } } for (const HloInstruction* hlo_to_unbind : hlos_to_unbind) { VLOG(2) << "Unbinding " << hlo_to_unbind->ToString(); base_ptrs_.erase(hlo_to_unbind); } } string HloToIrBindings::ToString() const { string s = StrCat("** HloToIrBindings **\n"); StrAppend(&s, " is_nested_=", is_nested_, "\n"); StrAppend(&s, " temp_buffer_base_=", llvm_ir::DumpToString(*temp_buffer_base_), "\n"); if (base_ptrs_.empty()) { return s; } // Iterate over all computations in the module in topological order, and print // out the base pointers we have in each computation in topological order. for (const HloComputation* computation : base_ptrs_.begin()->first->GetModule()->MakeComputationPostOrder()) { bool is_first = true; for (const HloInstruction* instr : computation->MakeInstructionPostOrder()) { auto it = base_ptrs_.find(instr); if (it == base_ptrs_.end()) { continue; } if (is_first) { StrAppend(&s, " Base pointers for computation ", computation->name(), ":\n"); is_first = false; } StrAppend(&s, " ", instr->ToString()); const ShapeTree<llvm::Value*>& shape_tree = it->second; if (!ShapeUtil::IsTuple(instr->shape())) { const llvm::Value* val = shape_tree.begin()->second; StrAppend(&s, " -> ", llvm_ir::DumpToString(*val), "\n"); continue; } StrAppend(&s, "\n"); for (auto shape_it = shape_tree.begin(); shape_it != shape_tree.end(); ++shape_it) { llvm::Value* val = shape_it->second; StrAppend(&s, " ", shape_it->first.ToString(), " -> ", (val != nullptr ? llvm_ir::DumpToString(*val) : "null"), "\n"); } } } return s; } } // namespace gpu } // namespace xla<|fim▁end|>
llvm::Value* typed_ir_value;
<|file_name|>OnBehalfOfRequest.ts<|end_file_name|><|fim▁begin|>/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License.<|fim▁hole|>import { CommonOnBehalfOfRequest } from "@azure/msal-common"; /** * - scopes - Array of scopes the application is requesting access to. * - authority - URL of the authority, the security token service (STS) from which MSAL will acquire tokens. * - correlationId - Unique GUID set per request to trace a request end-to-end for telemetry purposes. * - oboAssertion - The access token that was sent to the middle-tier API. This token must have an audience of the app making this OBO request. * - skipCache - Skip token cache lookup and force request to authority to get a a new token. Defaults to false. * @public */ export type OnBehalfOfRequest = Partial<Omit<CommonOnBehalfOfRequest, "oboAssertion"|"scopes"|"resourceRequestMethod"|"resourceRequestUri"|"requestedClaimsHash">> & { oboAssertion: string; scopes: Array<string>; };<|fim▁end|>
*/
<|file_name|>configuration.py<|end_file_name|><|fim▁begin|><|fim▁hole|># coding:utf-8 """ Author : qbeenslee Created : 2014/12/12 """ import re # 客户端ID号 CLIENT_ID = "TR5kVmYeMEh9M" ''' 传输令牌格式 加密方式$迭代次数$盐$结果串 举个栗子: ====start==== md5$23$YUXQ_-2GfwhzVpt5IQWp$3ebb6e78bf7d0c1938578855982e2b1c ====end==== ''' MATCH_PWD = r"md5\$(\d\d)\$([a-zA-Z0-9_\-]{20})\$([a-f0-9]{32})" REMATCH_PWD = re.compile(MATCH_PWD) # 支持的上传文件格式 SUPPORT_IMAGE_TYPE_LIST = ['image/gif', 'image/jpeg', 'image/png', 'image/bmp', 'image/x-png', 'application/octet-stream'] # 最大上传大小 MAX_UPLOAD_FILE_SIZE = 10485760 # 10*1024*1024 =10M # 最小上传尺寸 MIN_IMAGE_SIZE = {'w': 10, 'h': 10} MAX_IMAGE_SIZE = {'w': 4000, 'h': 4000} # 图片裁剪的尺寸(THUMBNAIL) THUMB_SIZE_SMALL = {'w': 100, 'h': 100, 'thumb': 's'} THUMB_SIZE_NORMAL = {'w': 480, 'h': 480, 'thumb': 'n'} THUMB_SIZE_LARGE = {'w': 3000, 'h': 3000, 'thumb': 'l'} THUMB_SIZE_ORIGIN = {'w': 0, 'h': 0, 'thumb': 'r'} MAX_SHARE_DESCRIPTION_SIZE = 140 NOW_ANDROID_VERSION_CODE = 7 NOW_VERSION_DOWNLOAD_URL = "/static/download/nepenthes-beta0.9.3.apk" MAX_RAND_EMAIL_CODE = 99999 MIN_RAND_EMAIL_CODE = 10000 # 定位精度 PRECISION = 12 LOACTION_PRECISION = 4 PAGE_SIZE = 10<|fim▁end|>
<|file_name|>tcp.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(deprecated)] use prelude::v1::*; use old_io::net::ip; use old_io::IoResult; use libc; use mem; use ptr; use super::{last_error, last_net_error, sock_t};<|fim▁hole|>use sys::{self, c, set_nonblocking, wouldblock, timer}; use sys_common::{timeout, eof, net}; pub use sys_common::net::TcpStream; pub struct Event(c::WSAEVENT); unsafe impl Send for Event {} unsafe impl Sync for Event {} impl Event { pub fn new() -> IoResult<Event> { let event = unsafe { c::WSACreateEvent() }; if event == c::WSA_INVALID_EVENT { Err(super::last_error()) } else { Ok(Event(event)) } } pub fn handle(&self) -> c::WSAEVENT { let Event(handle) = *self; handle } } impl Drop for Event { fn drop(&mut self) { unsafe { let _ = c::WSACloseEvent(self.handle()); } } } //////////////////////////////////////////////////////////////////////////////// // TCP listeners //////////////////////////////////////////////////////////////////////////////// pub struct TcpListener { sock: sock_t } unsafe impl Send for TcpListener {} unsafe impl Sync for TcpListener {} impl TcpListener { pub fn bind(addr: ip::SocketAddr) -> IoResult<TcpListener> { sys::init_net(); let sock = try!(net::socket(addr, libc::SOCK_STREAM)); let ret = TcpListener { sock: sock }; let mut storage = unsafe { mem::zeroed() }; let len = net::addr_to_sockaddr(addr, &mut storage); let addrp = &storage as *const _ as *const libc::sockaddr; match unsafe { libc::bind(sock, addrp, len) } { -1 => Err(last_net_error()), _ => Ok(ret), } } pub fn socket(&self) -> sock_t { self.sock } pub fn listen(self, backlog: int) -> IoResult<TcpAcceptor> { match unsafe { libc::listen(self.socket(), backlog as libc::c_int) } { -1 => Err(last_net_error()), _ => { let accept = try!(Event::new()); let ret = unsafe { c::WSAEventSelect(self.socket(), accept.handle(), c::FD_ACCEPT) }; if ret != 0 { return Err(last_net_error()) } Ok(TcpAcceptor { inner: Arc::new(AcceptorInner { listener: self, abort: try!(Event::new()), accept: accept, closed: AtomicBool::new(false), }), deadline: 0, }) } } } pub fn socket_name(&mut self) -> IoResult<ip::SocketAddr> { net::sockname(self.socket(), libc::getsockname) } } impl Drop for TcpListener { fn drop(&mut self) { unsafe { super::close_sock(self.sock); } } } pub struct TcpAcceptor { inner: Arc<AcceptorInner>, deadline: u64, } struct AcceptorInner { listener: TcpListener, abort: Event, accept: Event, closed: AtomicBool, } unsafe impl Sync for AcceptorInner {} impl TcpAcceptor { pub fn socket(&self) -> sock_t { self.inner.listener.socket() } pub fn accept(&mut self) -> IoResult<TcpStream> { // Unlink unix, windows cannot invoke `select` on arbitrary file // descriptors like pipes, only sockets. Consequently, windows cannot // use the same implementation as unix for accept() when close_accept() // is considered. // // In order to implement close_accept() and timeouts, windows uses // event handles. An acceptor-specific abort event is created which // will only get set in close_accept(), and it will never be un-set. // Additionally, another acceptor-specific event is associated with the // FD_ACCEPT network event. // // These two events are then passed to WaitForMultipleEvents to see // which one triggers first, and the timeout passed to this function is // the local timeout for the acceptor. // // If the wait times out, then the accept timed out. If the wait // succeeds with the abort event, then we were closed, and if the wait // succeeds otherwise, then we do a nonblocking poll via `accept` to // see if we can accept a connection. The connection is candidate to be // stolen, so we do all of this in a loop as well. let events = [self.inner.abort.handle(), self.inner.accept.handle()]; while !self.inner.closed.load(Ordering::SeqCst) { let ms = if self.deadline == 0 { c::WSA_INFINITE as u64 } else { let now = timer::now(); if self.deadline < now {0} else {self.deadline - now} }; let ret = unsafe { c::WSAWaitForMultipleEvents(2, events.as_ptr(), libc::FALSE, ms as libc::DWORD, libc::FALSE) }; match ret { c::WSA_WAIT_TIMEOUT => { return Err(timeout("accept timed out")) } c::WSA_WAIT_FAILED => return Err(last_net_error()), c::WSA_WAIT_EVENT_0 => break, n => assert_eq!(n, c::WSA_WAIT_EVENT_0 + 1), } let mut wsaevents: c::WSANETWORKEVENTS = unsafe { mem::zeroed() }; let ret = unsafe { c::WSAEnumNetworkEvents(self.socket(), events[1], &mut wsaevents) }; if ret != 0 { return Err(last_net_error()) } if wsaevents.lNetworkEvents & c::FD_ACCEPT == 0 { continue } match unsafe { libc::accept(self.socket(), ptr::null_mut(), ptr::null_mut()) } { -1 if wouldblock() => {} -1 => return Err(last_net_error()), // Accepted sockets inherit the same properties as the caller, // so we need to deregister our event and switch the socket back // to blocking mode socket => { let stream = TcpStream::new(socket); let ret = unsafe { c::WSAEventSelect(socket, events[1], 0) }; if ret != 0 { return Err(last_net_error()) } set_nonblocking(socket, false); return Ok(stream) } } } Err(eof()) } pub fn set_timeout(&mut self, timeout: Option<u64>) { self.deadline = timeout.map(|a| timer::now() + a).unwrap_or(0); } pub fn close_accept(&mut self) -> IoResult<()> { self.inner.closed.store(true, Ordering::SeqCst); let ret = unsafe { c::WSASetEvent(self.inner.abort.handle()) }; if ret == libc::TRUE { Ok(()) } else { Err(last_net_error()) } } } impl Clone for TcpAcceptor { fn clone(&self) -> TcpAcceptor { TcpAcceptor { inner: self.inner.clone(), deadline: 0, } } }<|fim▁end|>
use sync::Arc; use sync::atomic::{AtomicBool, Ordering};
<|file_name|>terraindisplay.py<|end_file_name|><|fim▁begin|>"""Module for displaying Terrain, both in 2D and 3D. (Not accessible outside of package; use display methods of Terrain instead.) """ from Tkinter import Tk, Canvas, Frame, BOTH from mpl_toolkits.mplot3d import Axes3D import matplotlib.pyplot as plt import numpy as np class Terrain2D(Frame): """2D graphical representation of a Terrain object. Consists of a 2D top-down image of terrain as a grid of greyscale squares. Each square corresponds to a height value, being on a scale from white if 1 to black if 0. """ SQUARE_SIDE = 3 """Length of one side of colored square.""" @classmethod def display_terrain(cls, terrain): """Display a Terrain in 2D. Args: terrain (Terrain): Terrain to display. """ root = Tk() dimensions = "{0}x{1}".format(terrain.width * Terrain2D.SQUARE_SIDE, terrain.length * Terrain2D.SQUARE_SIDE) root.geometry(dimensions) app = Terrain2D(root, terrain) root.mainloop() def __init__(self, parent, terrain): """Make self child of a TK parent, then initialize own UI. Args: parent (TK): Parent to attach self to. terrain (Terrain): Terrain to display. """ Frame.__init__(self, parent) self.terrain = terrain self.parent = parent self.init_ui() def init_ui(self): """Initialize UI of window.""" self.parent.title("Terrain (top-down)") self.pack(fill=BOTH, expand=1) self.draw_heights() def draw_heights(self): """Draw grid of height values on window. Heights are shown as squares, with greyscale colors becoming brighter for greater heights. """ canvas = Canvas(self) for x in range(self.terrain.width): for y in range(self.terrain.length): x_pos = x * Terrain2D.SQUARE_SIDE y_pos = y * Terrain2D.SQUARE_SIDE color = int(self.terrain[x, y] * 15) hex_color = "#" + "0123456789abcdef"[color] * 3 canvas.create_rectangle(x_pos, y_pos, x_pos + Terrain2D.SQUARE_SIDE, y_pos + Terrain2D.SQUARE_SIDE, outline=hex_color, fill=hex_color) canvas.pack(fill=BOTH, expand=1) <|fim▁hole|>class Terrain3D(object): """A 3D representation of a Terrain. Consists of a 3D surface mesh, shown at an angle. Can be seen at different angles. Uses matplotlib.mplot3d to display rudimentary 3D version of terrain. Notes: Is somewhat guaranteed to be slow. Not intended for use other than visualizing terrain during development. """ def __init__(self, terrain): self.terrain = terrain self.x_grid, self.y_grid = np.meshgrid(range(self.terrain.width), range(self.terrain.length)) z_vals = np.array([self.terrain[x, y] for x, y in zip(np.ravel(self.x_grid), np.ravel(self.y_grid))]) self.z_grid = z_vals.reshape(self.x_grid.shape) def display_terrain(self): """Display 3D surface of terrain.""" fig = plt.figure() ax = fig.add_subplot(111, projection='3d') ax.plot_surface(self.x_grid, self.y_grid, self.z_grid) ax.set_zlim(0.0, 1.0) plt.show()<|fim▁end|>
<|file_name|>classify.py<|end_file_name|><|fim▁begin|># This file is part of Bioy # # Bioy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Bioy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Bioy. If not, see <http://www.gnu.org/licenses/>. """DEPRECATED: use the classifier subcommand Classify sequences by grouping blast output by matching taxonomic names Optional grouping by specimen and query sequences """ import sys import logging from csv import DictReader, DictWriter from collections import defaultdict from math import ceil from operator import itemgetter from bioy_pkg import sequtils from bioy_pkg.utils import Opener, opener, Csv2Dict, groupbyl log = logging.getLogger(__name__) def build_parser(parser): parser.add_argument('blast_file', nargs = '?', default = sys.stdin, type = Opener('r'), help = 'CSV tabular blast file of query and subject hits') parser.add_argument('--all-one-group', dest = 'all_one_group', action = 'store_true', help = """If --map is not provided, the default behavior is to treat all reads as one group; use this option to treat each read as a separate group [%(default)s]""") parser.add_argument('-a', '--asterisk', default = 100, metavar='PERCENT', type = float, help = 'Next to any species above a certain threshold [%(default)s]') parser.add_argument('--copy-numbers', metavar = 'CSV', type = Opener(), help = 'columns: tax_id, median') parser.add_argument('-c', '--coverage', default = 95, metavar = 'PERCENT', type = float, help = 'percent of alignment coverage of blast result [%(default)s]') parser.add_argument('--details-identity', metavar = 'PERCENT', help = 'Minimum identity to include blast hits in details file', type = float, default = 90) parser.add_argument('--details-full', action = 'store_true', help = 'do not limit out_details to only larget cluster per assignment') parser.add_argument('--exclude-by-taxid', metavar = 'CSV', type = lambda f: set(e for e in DictReader(opener(f), fieldnames ='tax_id')), default = {}, help = 'column: tax_id') parser.add_argument('--group-def', metavar = 'INT', action = 'append', default = [], help = """define a group threshold for a particular rank overriding --target-max-group-size. example: genus:2""") parser.add_argument('--group-label', metavar = 'LABEL', default = 'all', help = 'Single group label for reads') parser.add_argument('-o', '--out', default = sys.stdout, type = Opener('w'), metavar = 'CSV', help = """columns: specimen, max_percent, min_percent, max_coverage, min_coverage, assignment_id, assignment, clusters, reads, pct_reads, corrected, pct_corrected, target_rank, hi, low, tax_ids""") parser.add_argument('-m', '--map', metavar = 'CSV', type = Opener(), default = {}, help = 'columns: name, specimen') parser.add_argument('--max-ambiguous', metavar = 'INT', default = 3, type = int, help = 'Maximum ambiguous count in reference sequences [%(default)s]') parser.add_argument('--max-identity', default = 100, metavar = 'PERCENT', type = float, help = 'maximum identity threshold for accepting matches [<= %(default)s]') parser.add_argument('--min-cluster-size', default = 0, metavar = 'INT', type = int, help = 'minimum cluster size to include in classification output') parser.add_argument('--min-identity', default = 99, metavar = 'PERCENT', type = float, help = 'minimum identity threshold for accepting matches [> %(default)s]') parser.add_argument('-s', '--seq-info', required = True, metavar = 'CSV', type = Opener(), help = 'seq info file(s) to match sequence ids to taxids [%(default)s]') parser.add_argument('-t', '--taxonomy', required = True, metavar = 'CSV', type = Csv2Dict('tax_id'), help = 'tax table of taxids and species names [%(default)s]') parser.add_argument('-O', '--out-detail', type = lambda f: DictWriter(opener(f, 'w'), extrasaction = 'ignore', fieldnames = [ 'specimen', 'assignment', 'assignment_id', 'qseqid', 'sseqid', 'pident', 'coverage', 'ambig_count', 'accession', 'tax_id', 'tax_name', 'target_rank', 'rank', 'hi', 'low' ]), metavar = 'CSV', help = """columns: specimen, assignment, assignment_id, qseqid, sseqid, pident, coverage, ambig_count, accession, tax_id, tax_name, target_rank, rank, hi, low""") parser.add_argument('--target-max-group-size', metavar = 'INTEGER', default = 3, type = int, help = """group multiple target-rank assignments that excede a threshold to a higher rank [%(default)s]""") parser.add_argument('--target-rank', metavar='RANK', help = 'Rank at which to classify. Default: "%(default)s"', default = 'species') parser.add_argument('-w', '--weights', metavar = 'CSV', type = Opener(), help = 'columns: name, weight') ### csv.Sniffer.has_header is *not* reliable enough parser.add_argument('--has-header', action = 'store_true', help = 'specify this if blast data has a header') def coverage(start, end, length): return (float(end) - float(start) + 1) / float(length) * 100 def mean(l): l = list(l) return float(sum(l)) / len(l) if len(l) > 0 else 0 def condense(queries, floor_rank, max_size, ranks, rank_thresholds, target_rank = None): target_rank = target_rank or ranks[0] groups = list(groupbyl(queries, key = itemgetter(target_rank))) num_groups = len(groups) if rank_thresholds.get(target_rank, max_size) < num_groups: return queries # assign where available target_rank_ids # groups without 'i' values remain assigned at previous (higher) rank for g in (g for i,g in groups if i): for q in g: q['target_rank_id'] = q[target_rank] # return if we hit the floor if target_rank == floor_rank: return queries # else move down a rank target_rank = ranks[ranks.index(target_rank) + 1] # recurse down the tax tree condensed = [] for _,g in groups: c = condense(g, floor_rank, max_size, ranks, rank_thresholds, target_rank) condensed.extend(c) return condensed def action(args): ### format format blast data and add additional available information fieldnames = None if args.has_header else sequtils.BLAST_HEADER_DEFAULT blast_results = DictReader(args.blast_file, fieldnames = fieldnames) blast_results = list(blast_results) sseqids = set(s['sseqid'] for s in blast_results) qseqids = set(s['qseqid'] for s in blast_results) # load seq_info and map file mapfile = DictReader(args.map, fieldnames = ['name', 'specimen']) mapfile = {m['name']:m['specimen'] for m in mapfile if m['name'] in qseqids} seq_info = DictReader(args.seq_info) seq_info = {s['seqname']:s for s in seq_info if s['seqname'] in sseqids} # pident def pident(b): return dict(b, pident = float(b['pident'])) if b['sseqid'] else b blast_results = (pident(b) for b in blast_results) # coverage def cov(b): if b['sseqid'] and b['qcovs']: b['coverage'] = float(b['qcovs']) return b elif b['sseqid']: c = coverage(b['qstart'], b['qend'], b['qlen']) return dict(b, coverage = c) else: return b blast_results = (cov(b) for b in blast_results) # seq info def info(b): return dict(seq_info[b['sseqid']], **b) if b['sseqid'] else b blast_results = (info(b) for b in blast_results) # tax info def tax_info(b): return dict(args.taxonomy[b['tax_id']], **b) if b['sseqid'] else b blast_results = (tax_info(b) for b in blast_results) ### output file headers fieldnames = ['specimen', 'max_percent', 'min_percent', 'max_coverage', 'min_coverage', 'assignment_id', 'assignment'] if args.weights: weights = DictReader(args.weights, fieldnames = ['name', 'weight']) weights = {d['name']:d['weight'] for d in weights if d['name'] in qseqids} fieldnames += ['clusters', 'reads', 'pct_reads'] else: weights = {} if args.copy_numbers: copy_numbers = DictReader(args.copy_numbers) copy_numbers = {d['tax_id']:float(d['median']) for d in copy_numbers} fieldnames += ['corrected', 'pct_corrected'] else: copy_numbers = {} # TODO: take out target_rank, hi, low and provide in pipeline using csvmod # TODO: option to include tax_ids (default no) fieldnames += ['target_rank', 'hi', 'low', 'tax_ids'] ### Columns out = DictWriter(args.out, extrasaction = 'ignore', fieldnames = fieldnames) out.writeheader() if args.out_detail: args.out_detail.writeheader() def blast_hit(hit, args): return hit['sseqid'] and \ hit[args.target_rank] and \ hit['coverage'] >= args.coverage and \ float(weights.get(hit['qseqid'], 1)) >= args.min_cluster_size and \ hit[args.target_rank] not in args.exclude_by_taxid and \ hit['qseqid'] != hit['sseqid'] and \ int(hit['ambig_count']) <= args.max_ambiguous ### Rows etc = '[no blast result]' # This row will hold all unmatched # groups have list position prioritization groups = [ ('> {}%'.format(args.max_identity), lambda h: blast_hit(h, args) and h['pident'] > args.max_identity), (None, lambda h: blast_hit(h, args) and args.max_identity >= h['pident'] > args.min_identity), ('<= {}%'.format(args.min_identity), lambda h: blast_hit(h, args) and h['pident'] <= args.min_identity), ] # used later for results output group_cats = map(itemgetter(0), groups) group_cats.append(etc) # assignment rank thresholds rank_thresholds = (d.split(':') for d in args.group_def) rank_thresholds = dict((k, int(v)) for k,v in rank_thresholds) # rt = {k: int(v) for k, v in (d.split(':') for d in args.group_def)} # group by specimen if args.map: specimen_grouper = lambda s: mapfile[s['qseqid']] elif args.all_one_group: specimen_grouper = lambda s: args.group_label else: specimen_grouper = lambda s: s['qseqid'] blast_results = groupbyl(blast_results, key = specimen_grouper) assignments = [] # assignment list for assignment ids for specimen, hits in blast_results: categories = defaultdict(list) # clusters will hold the query ids as hits are matched to categories clusters = set() # filter out categories for cat, fltr in groups: matches = filter(fltr, hits) if cat: categories[cat] = matches else: # create sets of tax_rank_id query_group = groupbyl(matches, key = itemgetter('qseqid')) target_cats = defaultdict(list) for _,queries in query_group: queries = condense( queries, args.target_rank, args.target_max_group_size, sequtils.RANKS, rank_thresholds) cat = map(itemgetter('target_rank_id'), queries) cat = frozenset(cat) target_cats[cat].extend(queries) categories = dict(categories, **target_cats) # add query ids that were matched to a filter clusters |= set(map(itemgetter('qseqid'), matches)) # remove all hits corresponding to a matched query id (cluster) hits = filter(lambda h: h['qseqid'] not in clusters, hits) # remaining hits go in the etc ('no match') category categories[etc] = hits # calculate read counts read_counts = dict() for k,v in categories.items(): qseqids = set(map(itemgetter('qseqid'), v)) weight = sum(float(weights.get(q, 1)) for q in qseqids) read_counts[k] = weight taxids = set() for k,v in categories.items(): if k is not etc: for h in v: taxids.add(h['tax_id']) ### list of assigned ids for count corrections assigned_ids = dict() for k,v in categories.items(): if k is not etc and v: assigned_ids[k] = set(map(itemgetter('tax_id'), v)) # correction counts corrected_counts = dict() for k,v in categories.items(): if k is not etc and v: av = mean(copy_numbers.get(t, 1) for t in assigned_ids[k]) corrected_counts[k] = ceil(read_counts[k] / av) # finally take the root value for the etc category corrected_counts[etc] = ceil(read_counts[etc] / copy_numbers.get('1', 1)) # totals for percent calculations later total_reads = sum(v for v in read_counts.values()) total_corrected = sum(v for v in corrected_counts.values()) # Print classifications per specimen sorted by # of reads in reverse (descending) order sort_by_reads_assign = lambda (c,h): corrected_counts.get(c, None) for cat, hits in sorted(categories.items(), key = sort_by_reads_assign, reverse = True): # continue if their are hits if hits: # for incrementing assignment id's if cat not in assignments: assignments.append(cat) assignment_id = assignments.index(cat) reads = read_counts[cat] reads_corrected = corrected_counts[cat] clusters = set(map(itemgetter('qseqid'), hits)) results = dict( hi = args.max_identity, low = args.min_identity, target_rank = args.target_rank, specimen = specimen, assignment_id = assignment_id, reads = int(reads), pct_reads = '{0:.2f}'.format(reads / total_reads * 100), corrected = int(reads_corrected), pct_corrected = '{0:.2f}'.format(reads_corrected / total_corrected * 100), clusters = len(clusters)) if cat is etc: assignment = etc results = dict(results, assignment = assignment) else: taxids = set(map(itemgetter('tax_id'), hits)) coverages = set(map(itemgetter('coverage'), hits)) percents = set(map(itemgetter('pident'), hits)) if cat in group_cats: assignment = cat else: names = [args.taxonomy[h['target_rank_id']]['tax_name'] for h in hits] selectors = [h['pident'] >= args.asterisk for h in hits] assignment = sequtils.format_taxonomy(names, selectors, '*') results = dict(results, assignment = assignment,<|fim▁hole|> tax_ids = ' '.join(taxids)) out.writerow(results) if args.out_detail: if not args.details_full: # drop the no_hits hits = [h for h in hits if 'tax_id' in h] # only report heaviest centroid clusters_and_sizes = [(float(weights.get(c, 1.0)), c) for c in clusters] _, largest = max(clusters_and_sizes) hits = (h for h in hits if h['qseqid'] == largest) for h in hits: args.out_detail.writerow(dict( specimen = specimen, assignment = assignment, assignment_id = assignment_id, hi = args.max_identity, low = args.min_identity, target_rank = args.target_rank, **h))<|fim▁end|>
max_percent = '{0:.2f}'.format(max(percents)), min_percent = '{0:.2f}'.format(min(percents)), max_coverage = '{0:.2f}'.format(max(coverages)), min_coverage = '{0:.2f}'.format(min(coverages)),
<|file_name|>CServer.cpp<|end_file_name|><|fim▁begin|>#include <net/http/CServer.hpp> namespace net { namespace http { CServer::CServer(void) : net::CServer(net::EProtocol::TCP)<|fim▁hole|><|fim▁end|>
{ } }}
<|file_name|>version.go<|end_file_name|><|fim▁begin|>package main // Heavily inspired by https://github.com/btcsuite/btcd/blob/master/version.go import ( "bytes" "fmt" "strings" ) // semanticAlphabet const semanticAlphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-" // These constants define the application version and follow the semantic // versioning 2.0.0 spec (http://semver.org/). const ( appMajor uint = 0 appMinor uint = 1 appPatch uint = 0 // appPreRelease MUST only contain characters from semanticAlphabet // per the semantic versioning spec. appPreRelease = "alpha" ) // appBuild is defined as a variable so it can be overridden during the build // process with '-ldflags "-X main.appBuild foo' if needed. It MUST only // contain characters from semanticAlphabet per the semantic versioning spec. var appBuild string // version returns the application version as a properly formed string per the // semantic versioning 2.0.0 spec (http://semver.org/). func version() string { // Start with the major, minor, and patch versions. version := fmt.Sprintf("%d.%d.%d", appMajor, appMinor, appPatch) // Append pre-release version if there is one. The hyphen called for // by the semantic versioning spec is automatically appended and should // not be contained in the pre-release string. The pre-release version // is not appended if it contains invalid characters. preRelease := normalizeVerString(appPreRelease) if preRelease != "" { version = fmt.Sprintf("%s-%s", version, preRelease) } <|fim▁hole|> // string is not appended if it contains invalid characters. build := normalizeVerString(appBuild) if build != "" { version = fmt.Sprintf("%s+%s", version, build) } return version } // normalizeVerString returns the passed string stripped of all characters which // are not valid according to the semantic versioning guidelines for pre-release // version and build metadata strings. In particular they MUST only contain // characters in semanticAlphabet. func normalizeVerString(str string) string { var result bytes.Buffer for _, r := range str { if strings.ContainsRune(semanticAlphabet, r) { result.WriteRune(r) } } return result.String() }<|fim▁end|>
// Append build metadata if there is any. The plus called for // by the semantic versioning spec is automatically appended and should // not be contained in the build metadata string. The build metadata
<|file_name|>strings.py<|end_file_name|><|fim▁begin|>string = input()<|fim▁hole|><|fim▁end|>
string[0] = "a"
<|file_name|>test_hacking.py<|end_file_name|><|fim▁begin|># Copyright 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import textwrap import mock import pep8 from nova.hacking import checks from nova import test class HackingTestCase(test.NoDBTestCase): """This class tests the hacking checks in nova.hacking.checks by passing strings to the check methods like the pep8/flake8 parser would. The parser loops over each line in the file and then passes the parameters to the check method. The parameter names in the check method dictate what type of object is passed to the check method. The parameter types are:: logical_line: A processed line with the following modifications: - Multi-line statements converted to a single line. - Stripped left and right. - Contents of strings replaced with "xxx" of same length. - Comments removed. physical_line: Raw line of text from the input file. lines: a list of the raw lines from the input file tokens: the tokens that contribute to this logical line line_number: line number in the input file total_lines: number of lines in the input file blank_lines: blank lines before this one indent_char: indentation character in this file (" " or "\t") indent_level: indentation (with tabs expanded to multiples of 8) previous_indent_level: indentation on previous line previous_logical: previous logical line filename: Path of the file being run through pep8 When running a test on a check method the return will be False/None if there is no violation in the sample input. If there is an error a tuple is returned with a position in the line, and a message. So to check the result just assertTrue if the check is expected to fail and assertFalse if it should pass. """ def test_virt_driver_imports(self): expect = (0, "N311: importing code from other virt drivers forbidden") self.assertEqual(expect, checks.import_no_virt_driver_import_deps( "from nova.virt.libvirt import utils as libvirt_utils", "./nova/virt/xenapi/driver.py")) self.assertEqual(expect, checks.import_no_virt_driver_import_deps( "import nova.virt.libvirt.utils as libvirt_utils", "./nova/virt/xenapi/driver.py")) self.assertIsNone(checks.import_no_virt_driver_import_deps( "from nova.virt.libvirt import utils as libvirt_utils", "./nova/virt/libvirt/driver.py")) self.assertIsNone(checks.import_no_virt_driver_import_deps( "import nova.virt.firewall", "./nova/virt/libvirt/firewall.py")) def test_virt_driver_config_vars(self): self.assertIsInstance(checks.import_no_virt_driver_config_deps( "CONF.import_opt('volume_drivers', " "'nova.virt.libvirt.driver', group='libvirt')", "./nova/virt/xenapi/driver.py"), tuple) self.assertIsNone(checks.import_no_virt_driver_config_deps( "CONF.import_opt('volume_drivers', " "'nova.virt.libvirt.driver', group='libvirt')", "./nova/virt/libvirt/volume.py")) def test_no_vi_headers(self): lines = ['Line 1\n', 'Line 2\n', 'Line 3\n', 'Line 4\n', 'Line 5\n', 'Line 6\n', 'Line 7\n', 'Line 8\n', 'Line 9\n', 'Line 10\n', 'Line 11\n', 'Line 12\n', 'Line 13\n', 'Line14\n', 'Line15\n'] self.assertIsNone(checks.no_vi_headers( "Test string foo", 1, lines)) self.assertEqual(len(list(checks.no_vi_headers( "# vim: et tabstop=4 shiftwidth=4 softtabstop=4", 2, lines))), 2) self.assertIsNone(checks.no_vi_headers( "# vim: et tabstop=4 shiftwidth=4 softtabstop=4", 6, lines)) self.assertIsNone(checks.no_vi_headers( "# vim: et tabstop=4 shiftwidth=4 softtabstop=4", 9, lines)) self.assertEqual(len(list(checks.no_vi_headers( "# vim: et tabstop=4 shiftwidth=4 softtabstop=4", 14, lines))), 2) self.assertIsNone(checks.no_vi_headers( "Test end string for vi", 15, lines)) def test_assert_true_instance(self): self.assertEqual(len(list(checks.assert_true_instance( "self.assertTrue(isinstance(e, " "exception.BuildAbortException))"))), 1) self.assertEqual( len(list(checks.assert_true_instance("self.assertTrue()"))), 0) def test_assert_equal_type(self): self.assertEqual(len(list(checks.assert_equal_type( "self.assertEqual(type(als['QuicAssist']), list)"))), 1) self.assertEqual( len(list(checks.assert_equal_type("self.assertTrue()"))), 0) def test_assert_equal_in(self): self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(a in b, True)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual('str' in 'string', True)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(any(a==1 for a in b), True)"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, a in b)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, 'str' in 'string')"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, any(a==1 for a in b))"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(a in b, False)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual('str' in 'string', False)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(any(a==1 for a in b), False)"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, a in b)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, 'str' in 'string')"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, any(a==1 for a in b))"))), 0) def test_assert_equal_none(self): self.assertEqual(len(list(checks.assert_equal_none( "self.assertEqual(A, None)"))), 1) self.assertEqual(len(list(checks.assert_equal_none( "self.assertEqual(None, A)"))), 1) self.assertEqual( len(list(checks.assert_equal_none("self.assertIsNone()"))), 0) def test_assert_true_or_false_with_in_or_not_in(self): self.assertEqual(len(list(checks.assert_equal_none( "self.assertEqual(A, None)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A not in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A not in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A not in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A not in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in 'some string with spaces')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in 'some string with spaces')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in ['1', '2', '3'])"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in [1, 2, 3])"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(any(A > 5 for A in B))"))), 0) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(any(A > 5 for A in B), 'some message')"))), 0) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(some in list1 and some2 in list2)"))), 0) def test_no_translate_debug_logs(self): self.assertEqual(len(list(checks.no_translate_debug_logs( "LOG.debug(_('foo'))", "nova/scheduler/foo.py"))), 1) self.assertEqual(len(list(checks.no_translate_debug_logs( "LOG.debug('foo')", "nova/scheduler/foo.py"))), 0) self.assertEqual(len(list(checks.no_translate_debug_logs( "LOG.info(_('foo'))", "nova/scheduler/foo.py"))), 0) def test_no_setting_conf_directly_in_tests(self): self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = 1", "nova/tests/test_foo.py"))), 1) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.group.option = 1", "nova/tests/test_foo.py"))), 1) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = foo = 1", "nova/tests/test_foo.py"))), 1) # Shouldn't fail with comparisons self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option == 'foo'", "nova/tests/test_foo.py"))), 0) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option != 1", "nova/tests/test_foo.py"))), 0) # Shouldn't fail since not in nova/tests/ self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = 1", "nova/compute/foo.py"))), 0) def test_log_translations(self): logs = ['audit', 'error', 'info', 'warning', 'critical', 'warn', 'exception'] levels = ['_LI', '_LW', '_LE', '_LC'] debug = "LOG.debug('OK')" audit = "LOG.audit(_('OK'))" self.assertEqual( 0, len(list(checks.validate_log_translations(debug, debug, 'f')))) self.assertEqual( 0, len(list(checks.validate_log_translations(audit, audit, 'f')))) for log in logs: bad = 'LOG.%s("Bad")' % log self.assertEqual(1, len(list( checks.validate_log_translations(bad, bad, 'f')))) ok = "LOG.%s('OK') # noqa" % log self.assertEqual(0, len(list( checks.validate_log_translations(ok, ok, 'f')))) ok = "LOG.%s(variable)" % log self.assertEqual(0, len(list( checks.validate_log_translations(ok, ok, 'f')))) for level in levels: ok = "LOG.%s(%s('OK'))" % (log, level) self.assertEqual(0, len(list( checks.validate_log_translations(ok, ok, 'f')))) def test_no_mutable_default_args(self): self.assertEqual(1, len(list(checks.no_mutable_default_args( " def fake_suds_context(calls={}):")))) self.assertEqual(1, len(list(checks.no_mutable_default_args( "def get_info_from_bdm(virt_type, bdm, mapping=[])")))) self.assertEqual(0, len(list(checks.no_mutable_default_args( "defined = []")))) self.assertEqual(0, len(list(checks.no_mutable_default_args( "defined, undefined = [], {}")))) def test_check_explicit_underscore_import(self): self.assertEqual(len(list(checks.check_explicit_underscore_import( "LOG.info(_('My info message'))", "cinder/tests/other_files.py"))), 1) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "cinder/tests/other_files.py"))), 1) self.assertEqual(len(list(checks.check_explicit_underscore_import( "from cinder.i18n import _", "cinder/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "LOG.info(_('My info message'))", "cinder/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "cinder/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "from cinder.i18n import _, _LW", "cinder/tests/other_files2.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "cinder/tests/other_files2.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "_ = translations.ugettext", "cinder/tests/other_files3.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "cinder/tests/other_files3.py"))), 0) def test_use_jsonutils(self): def __get_msg(fun): msg = ("N324: jsonutils.%(fun)s must be used instead of " "json.%(fun)s" % {'fun': fun}) return [(0, msg)] for method in ('dump', 'dumps', 'load', 'loads'): self.assertEqual( __get_msg(method), list(checks.use_jsonutils("json.%s(" % method, "./nova/virt/xenapi/driver.py"))) self.assertEqual(0, len(list(checks.use_jsonutils("json.%s(" % method, "./plugins/xenserver/script.py")))) self.assertEqual(0, len(list(checks.use_jsonutils("jsonx.%s(" % method, "./nova/virt/xenapi/driver.py")))) self.assertEqual(0, len(list(checks.use_jsonutils("json.dumb", "./nova/virt/xenapi/driver.py")))) # We are patching pep8 so that only the check under test is actually # installed. @mock.patch('pep8._checks', {'physical_line': {}, 'logical_line': {}, 'tree': {}}) def _run_check(self, code, checker, filename=None): pep8.register_check(checker) lines = textwrap.dedent(code).strip().splitlines(True) checker = pep8.Checker(filename=filename, lines=lines) checker.check_all() checker.report._deferred_print.sort() return checker.report._deferred_print def _assert_has_errors(self, code, checker, expected_errors=None, filename=None): actual_errors = [e[:3] for e in self._run_check(code, checker, filename)] self.assertEqual(expected_errors or [], actual_errors) def _assert_has_no_errors(self, code, checker, filename=None): self._assert_has_errors(code, checker, filename=filename) def test_str_unicode_exception(self): checker = checks.CheckForStrUnicodeExc code = """ def f(a, b): try: p = str(a) + str(b) except ValueError as e: p = str(e) return p """ errors = [(5, 16, 'N325')] self._assert_has_errors(code, checker, expected_errors=errors) code = """ def f(a, b): try: p = unicode(a) + str(b) except ValueError as e: p = e return p """ self._assert_has_no_errors(code, checker) code = """ def f(a, b): try: p = str(a) + str(b) except ValueError as e: p = unicode(e) return p """ errors = [(5, 20, 'N325')] self._assert_has_errors(code, checker, expected_errors=errors) code = """ def f(a, b): try: p = str(a) + str(b) except ValueError as e: try: p = unicode(a) + unicode(b) except ValueError as ve: p = str(e) + str(ve) p = e return p """ errors = [(8, 20, 'N325'), (8, 29, 'N325')] self._assert_has_errors(code, checker, expected_errors=errors) code = """ def f(a, b): try: p = str(a) + str(b) except ValueError as e: try: p = unicode(a) + unicode(b) except ValueError as ve: p = str(e) + unicode(ve) p = str(e) return p """ errors = [(8, 20, 'N325'), (8, 33, 'N325'), (9, 16, 'N325')] self._assert_has_errors(code, checker, expected_errors=errors) def test_api_version_decorator_check(self): code = """ @some_other_decorator @wsgi.api_version("2.5") def my_method(): pass """ self._assert_has_errors(code, checks.check_api_version_decorator, expected_errors=[(2, 0, "N332")]) def test_oslo_namespace_imports_check(self): code = """ from oslo.concurrency import processutils """ self._assert_has_errors(code, checks.check_oslo_namespace_imports, expected_errors=[(1, 0, "N333")]) def test_oslo_namespace_imports_check_2(self): code = """ from oslo import i18n """ self._assert_has_errors(code, checks.check_oslo_namespace_imports, expected_errors=[(1, 0, "N333")]) def test_oslo_namespace_imports_check_3(self): code = """ import oslo.messaging """ self._assert_has_errors(code, checks.check_oslo_namespace_imports, expected_errors=[(1, 0, "N333")]) def test_oslo_assert_raises_regexp(self): code = """ self.assertRaisesRegexp(ValueError, "invalid literal for.*XYZ'$", int, 'XYZ') """ self._assert_has_errors(code, checks.assert_raises_regexp, expected_errors=[(1, 0, "N335")]) def test_api_version_decorator_check_no_errors(self): code = """ class ControllerClass(): @wsgi.api_version("2.5") def my_method(): pass """ self._assert_has_no_errors(code, checks.check_api_version_decorator) def test_trans_add(self): checker = checks.CheckForTransAdd code = """ def fake_tran(msg): return msg _ = fake_tran _LI = _ _LW = _ _LE = _ _LC = _ def f(a, b): msg = _('test') + 'add me' msg = _LI('test') + 'add me' msg = _LW('test') + 'add me' msg = _LE('test') + 'add me' msg = _LC('test') + 'add me' msg = 'add to me' + _('test') return msg """ errors = [(13, 10, 'N326'), (14, 10, 'N326'), (15, 10, 'N326'), (16, 10, 'N326'), (17, 10, 'N326'), (18, 24, 'N326')] self._assert_has_errors(code, checker, expected_errors=errors) code = """ def f(a, b): msg = 'test' + 'add me'<|fim▁hole|> def test_dict_constructor_with_list_copy(self): self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([(i, connect_info[i])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " attrs = dict([(k, _from_json(v))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " type_names = dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( "foo(param=dict((k, v) for k, v in bar.items()))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([[i,i] for i in range(3)])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dd = dict([i,i] for i in range(3))")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " create_kwargs = dict(snapshot=snapshot,")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " self._render_dict(xml, data_el, data.__dict__)"))))<|fim▁end|>
return msg """ self._assert_has_no_errors(code, checker)
<|file_name|>process-spawn-with-unicode-params.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // no-prefer-dynamic // The test copies itself into a subdirectory with a non-ASCII name and then<|fim▁hole|>// runs it as a child process within the subdirectory. The parent process // also adds an environment variable and an argument, both containing // non-ASCII characters. The child process ensures all the strings are // intact. extern crate native; use std::io; use std::io::fs; use std::io::Command; use std::os; use std::path::Path; fn main() { let my_args = os::args(); let my_cwd = os::getcwd(); let my_env = os::env(); let my_path = Path::new(os::self_exe_name().unwrap()); let my_dir = my_path.dir_path(); let my_ext = my_path.extension_str().unwrap_or(""); // some non-ASCII characters let blah = "\u03c0\u042f\u97f3\u00e6\u221e"; let child_name = "child"; let child_dir = format!("process-spawn-with-unicode-params-{}", blah); // parameters sent to child / expected to be received from parent let arg = blah; let cwd = my_dir.join(Path::new(child_dir.clone())); let env = ("RUST_TEST_PROC_SPAWN_UNICODE".to_string(), blah.to_string()); // am I the parent or the child? if my_args.len() == 1 { // parent let child_filestem = Path::new(child_name); let child_filename = child_filestem.with_extension(my_ext); let child_path = cwd.join(child_filename.clone()); // make a separate directory for the child drop(fs::mkdir(&cwd, io::UserRWX).is_ok()); assert!(fs::copy(&my_path, &child_path).is_ok()); // run child let p = Command::new(&child_path) .arg(arg) .cwd(&cwd) .env_set_all(my_env.append_one(env).as_slice()) .spawn().unwrap().wait_with_output().unwrap(); // display the output assert!(io::stdout().write(p.output.as_slice()).is_ok()); assert!(io::stderr().write(p.error.as_slice()).is_ok()); // make sure the child succeeded assert!(p.status.success()); } else { // child // check working directory (don't try to compare with `cwd` here!) assert!(my_cwd.ends_with_path(&Path::new(child_dir))); // check arguments assert_eq!(my_args.get(1).as_slice(), arg); // check environment variable assert!(my_env.contains(&env)); }; }<|fim▁end|>
<|file_name|>struct-log-self.rs<|end_file_name|><|fim▁begin|>//! Example of how to implement `KV` for a struct //! to conveniently log data associated with it. #[macro_use] extern crate slog; use slog::*; mod common; struct Peer { host: String, port: u32, } impl Peer { fn new(host: String, port: u32) -> Self { Peer { host: host, port: port, } }<|fim▁hole|>impl KV for Peer { fn serialize(&self, _record: &Record, serializer: &mut Serializer) -> Result { serializer.emit_u32(Key::from("peer-port"), self.port)?; serializer.emit_str(Key::from("peer-host"), &self.host)?; Ok(()) } } struct Server { _host: String, _port: u32, // One approach is to create new `Logger` with struct data // and embedded it into struct itself. This works when struct is mostly // immutable. log: Logger, } impl Server { fn new(host: String, port: u32, log: Logger) -> Server { let log = log.new(o!("server-host" => host.clone(), "server-port" => port)); Server { _host: host, _port: port, log: log, } } fn connection(&self, peer: &Peer) { // Another approach is to add struct to a logging message when it's // necessary. This might be necessary when struct data can change // between different logging statements (not the case here for `Peer`). info!(self.log, "new connection"; peer); } } struct PeerCounter { count: usize, log: Logger, } impl PeerCounter { fn new(log: Logger) -> Self { PeerCounter { count: 0, log: log } } // A hybrid approach with `Logger` with parent logging-context embedded into // a `struct` and a helper function adding mutable fields. fn log_info(&self, msg: &str, kv: BorrowedKV) { info!(self.log, "{}", msg; "current-count" => self.count, kv); } fn count(&mut self, peer: &Peer) { self.count += 1; self.log_info("counted peer", b!(peer)); } } fn main() { let log = Logger::root(Fuse(common::PrintlnDrain), o!("build-id" => "7.3.3-abcdef")); let server = Server::new("localhost".into(), 12345, log.clone()); let peer = Peer::new("1.2.3.4".into(), 999); server.connection(&peer); let mut counter = PeerCounter::new(log); counter.count(&peer); }<|fim▁end|>
} // `KV` can be implemented for a struct
<|file_name|>xlate.js<|end_file_name|><|fim▁begin|>var translations = { 'es': { 'One moment while we<br>log you in': 'Espera un momento mientras<br>iniciamos tu sesión', 'You are now connected to the network': 'Ahora estás conectado a la red', 'Account signups/purchases are disabled in preview mode': 'La inscripciones de cuenta/compras están desactivadas en el modo de vista previa.', 'Notice': 'Aviso', 'Day': 'Día', 'Days': 'Días', 'Hour': 'Hora', 'Hours': 'Horas', 'Minutes': 'Minutos', 'Continue': 'Continuar', 'Thank You for Trying TWC WiFi': 'Gracias por probar TWC WiFi', 'Please purchase a TWC Access Pass to continue using WiFi': 'Adquiere un Pase de acceso (Access Pass) de TWC para continuar usando la red WiFi', 'Your TWC Access Pass has expired. Please select a new Access Pass Now.': 'Tu Access Pass (Pase de acceso) de TWC ha vencido. Selecciona un nuevo Access Pass (Pase de acceso) ahora.', 'Your account information has been pre-populated into the form. If you wish to change any information, you may edit the form before completing the order.':<|fim▁hole|> 'Proceed to Login': 'Proceder con el inicio de sesión', 'Payment portal is not available at this moment': '', 'Redirecting to Payment portal...': '', 'Could not log you into the network': 'No se pudo iniciar sesión en la red' } } function translate(text, language) { if (language == 'en') return text; if (!translations[language]) return text; if (!translations[language][text]) return text; return translations[language][text] || text; }<|fim▁end|>
'El formulario ha sido llenado con la información de tu cuenta. Si deseas modificar algún dato, puedes editar el formulario antes de completar la solicitud.', 'Your Password': 'Tu contraseña',
<|file_name|>files.rs<|end_file_name|><|fim▁begin|>use crate::utils::schemas; use defaults::Defaults; use events::publish; use formats::FormatSpec; use schemars::{schema::Schema, JsonSchema}; use serde::Serialize; use serde_with::skip_serializing_none; use std::{ collections::{btree_map::Entry, BTreeMap, BTreeSet}, path::{Path, PathBuf}, time::UNIX_EPOCH, }; use strum::Display; /// A file or directory within a `Project` #[skip_serializing_none] #[derive(Debug, Defaults, Clone, JsonSchema, Serialize)] #[serde(rename_all = "camelCase")] #[schemars(deny_unknown_fields)] pub struct File { /// The absolute path of the file or directory pub path: PathBuf, /// The name of the file or directory pub name: String, /// Time that the file was last modified (Unix Epoch timestamp) pub modified: Option<u64>, /// Size of the file in bytes pub size: Option<u64>, /// Format of the file /// /// Usually this is the lower cased filename extension (if any) /// but may also be normalized. May be more convenient, /// and usually more available, than the `media_type` property. #[def = "FormatSpec::unknown(\"unknown\")"] #[schemars(schema_with = "File::schema_format")] pub format: FormatSpec, /// The parent `File`, if any pub parent: Option<PathBuf>, /// If a directory, a list of the canonical paths of the files within it. /// Otherwise, `None`. /// /// A `BTreeSet` rather than a `Vec` so that paths are ordered without /// having to be resorted after insertions. Another option is `BinaryHeap` /// but `BinaryHeap::retain` is only on nightly and so is awkward to use. pub children: Option<BTreeSet<PathBuf>>, } impl File { /// Generate the JSON Schema for the `format` property to avoid nested type. fn schema_format(_generator: &mut schemars::gen::SchemaGenerator) -> Schema { schemas::typescript("Format", true) } /// Get a file's name from it's path pub fn name(path: &Path) -> String { path.file_name() .map(|os_str| os_str.to_string_lossy()) .unwrap_or_default() .into() } /// Get a file's parent from it's path pub fn parent(path: &Path) -> Option<PathBuf> { path.parent().map(|parent| parent.into()) } /// Load a file from a path /// /// Note: this function is infallible, in that it will always return a /// `File`. However, if there were errors obtaining a field it will be /// `None`, or possible erroneous (e.g. in the unlikely event that /// `path.canonicalize()` fails for example). Having this function return /// a `File`, instead of a `Result<File>` simplifies other code substantially. pub fn load(path: &Path) -> File { let path = path.canonicalize().unwrap_or_else(|_error| path.into()); let name = File::name(&path); let parent = File::parent(&path); let (modified, size) = match path.metadata() { Ok(metadata) => { #[allow(clippy::bind_instead_of_map)] let modified = metadata .modified() .ok() .and_then(|time| time.duration_since(UNIX_EPOCH).ok()) .and_then(|duration| Some(duration.as_secs())); let size = Some(metadata.len()); (modified, size) } Err(_) => (None, None), }; let (format, children) = if path.is_file() { (formats::match_path(&path).spec(), None) } else { (FormatSpec::directory(), Some(BTreeSet::new())) }; File { path, name, modified, size, format, parent, children, } } } #[derive(Display, JsonSchema, Serialize)] #[serde(rename_all = "lowercase")] pub enum FileEventType { Refreshed, Created, Removed, Renamed, Modified, } /// An event associated with a `File` or a set of `File`s /// /// These events published under the `projects:<project-path>:files` topic. #[derive(JsonSchema, Serialize)] #[schemars(deny_unknown_fields)] pub struct FileEvent { /// The path of the project (absolute) pub project: PathBuf, /// The path of the file (absolute) /// /// For `renamed` events this is the _old_ path. pub path: PathBuf, /// The type of event e.g. `Refreshed`, `Modified`, `Created` /// /// A `refreshed` event is emitted when the entire set of /// files is updated. #[serde(rename = "type")] pub type_: FileEventType, /// The updated file /// /// Will be `None` for for `refreshed` and `removed` events, /// or if for some reason it was not possible to fetch metadata /// about the file. #[schemars(schema_with = "FileEvent::schema_file")] pub file: Option<File>, /// The updated set of files in the project /// /// Represents the new state of the file tree after the /// event including updated `parent` and `children` /// properties of files affects by the event. #[schemars(schema_with = "FileEvent::schema_files")] pub files: BTreeMap<PathBuf, File>, } impl FileEvent { /// Generate the JSON Schema for the `file` property fn schema_file(_generator: &mut schemars::gen::SchemaGenerator) -> Schema { schemas::typescript("File", false) }<|fim▁hole|> /// Generate the JSON Schema for the `files` property fn schema_files(_generator: &mut schemars::gen::SchemaGenerator) -> Schema { schemas::typescript("Record<string, File>", true) } pub fn publish( project: &Path, path: &Path, type_: FileEventType, file: Option<File>, files: &BTreeMap<PathBuf, File>, ) { let topic = &format!( "projects:{}:files:{}:{}", project.display(), path.display(), type_ ); let event = FileEvent { project: project.into(), path: path.into(), type_, file, files: files.clone(), }; publish(topic, &event) } } /// A registry of `File`s within a `Project` #[derive(Clone, Debug, Default, JsonSchema, Serialize)] pub struct Files { /// The root path of the project #[serde(skip)] path: PathBuf, /// The map of files in the project #[serde(flatten)] pub files: BTreeMap<PathBuf, File>, /// The set of Git ignore style files in the project /// /// Used to avoid adding ignored file when notified /// of changes by the watcher thread. #[serde(skip)] ignore_files: BTreeSet<PathBuf>, /// The set of files that, according to `ignore_files` /// should be ignored. /// /// Used as a cache to avoid reading and processing /// ignore files when notified of changes by the /// watcher thread. #[serde(skip)] files_ignored: BTreeSet<PathBuf>, } impl Files { const GITIGNORE_NAMES: [&'static str; 2] = [".ignore", ".gitignore"]; pub fn new<P: AsRef<Path>>(path: P) -> Files { let (files, ignore_files) = Files::walk(&path); Files { path: path.as_ref().to_path_buf(), files, ignore_files, ..Default::default() } } /// Walk a path and collect file and Git ignore files from it pub fn walk<P: AsRef<Path>>(path: P) -> (BTreeMap<PathBuf, File>, BTreeSet<PathBuf>) { // Build walker let walker = ignore::WalkBuilder::new(&path) // Respect .ignore files .ignore(true) // Respect .gitignore files .git_ignore(true) .build_parallel(); // Collect files in parallel using a collector thread and several walker thread // (number of which is chosen by the `ignore` walker) let (sender, receiver) = crossbeam_channel::bounded(100); let join_handle = std::thread::spawn(move || -> BTreeMap<PathBuf, File> { receiver.iter().collect() }); walker.run(|| { let sender = sender.clone(); Box::new(move |result| { use ignore::WalkState::*; if let Ok(entry) = result { let path = entry.path(); let file = File::load(path); sender .send((file.path.clone(), file)) .expect("Unable to send to collector"); } Continue }) }); drop(sender); let mut files = join_handle.join().expect("Unable to join collector thread"); // Resolve `children` properties and `ignore_files` files let mut ignore_files = BTreeSet::new(); for path in files.keys().cloned().collect::<Vec<PathBuf>>() { if Files::is_ignore_file(&path) { ignore_files.insert(path.clone()); } if let Some(parent) = path.parent() { if let Entry::Occupied(mut parent) = files.entry(parent.into()) { let parent = parent.get_mut(); if let Some(children) = &mut parent.children { children.insert(path); } } } } (files, ignore_files) } /// Should the registry be refreshed in response to a change in a file /// /// For example if a `.gitignore` file is added, removed, moved or modified. fn should_refresh(&mut self, path: &Path) -> bool { Files::is_ignore_file(path) } /// Refresh the registry if it should be fn did_refresh(&mut self, path: &Path) -> bool { if self.should_refresh(path) { self.refresh(); true } else { false } } /// Is the file a Git ignore file? fn is_ignore_file(path: &Path) -> bool { let name = File::name(path); Files::GITIGNORE_NAMES.contains(&name.as_str()) } /// Should a path be ignored? /// /// Used by the following functions to decide whether to update a file /// in the registry. Tries to be consistent with the `ignore` crate (which /// is used to initially load all the files). /// /// Checks against any of the `ignore_files` that are "above" the file in /// the file tree. Caches result to minimize re-reading the ignore file. fn should_ignore(&mut self, path: &Path) -> bool { if self.files_ignored.contains(path) { return true; } for ignore_file_path in &self.ignore_files { if let Some(ignore_file_dir) = ignore_file_path.parent() { if path.starts_with(ignore_file_dir) { if let Ok(ignore_file) = gitignore::File::new(ignore_file_path) { if ignore_file.is_excluded(path).unwrap_or(false) { self.files_ignored.insert(path.into()); return true; } } } } } false } /// Get the parent `File` of a path, ensure that all it's /// ancestors exist, and add the path as a child. /// /// This is used to ensure that the ancestor `File`s of a path exists /// in the registry (e.g. when a new file is created or renamed in a sub folder) /// and that the current path is added as a child. /// It will return `None` if the path has no parent (i.e is outside of the root) fn ensure_ancestors(&mut self, path: &Path) -> Option<&mut File> { if let Some(parent) = path.parent() { if !parent.starts_with(&self.path) { return None; } self.ensure_ancestors(parent); let parent = self .files .entry(parent.into()) .or_insert_with(|| File::load(parent)); if let Some(children) = &mut parent.children { children.insert(path.into()); } Some(parent) } else { None } } /// Refresh the file registry fn refresh(&mut self) { *self = Files::new(self.path.as_path()); FileEvent::publish( &self.path, Path::new("*"), FileEventType::Refreshed, None, &self.files, ) } // Update the file registry when a file is created pub fn created(&mut self, path: &Path) { if self.should_ignore(path) || self.did_refresh(path) { return; } // Load the file, insert it and add it to it's parent's children let file = File::load(path); self.files.insert(path.into(), file.clone()); self.ensure_ancestors(path); if path.is_dir() { // If the path created is a directory with empty sub-directories // we only get an event for the top level. // e.g. for `mkdir -p a/b/c` we only get an event for `a` being created. // So we have to walk it. This is potentially wasteful because we may // already loaded files when getting individual file `created` events // or when walking subdirectories (e.g. when a zip file is extracted). // But there does not seem to be an easy, safe alternative. let (files, ignore_files) = &mut Files::walk(path); self.files.append(files); self.ignore_files.append(ignore_files); } else { // If it's a file, we may need to add it to the ignore files if Files::is_ignore_file(path) { self.ignore_files.insert(path.into()); } } FileEvent::publish( &self.path, path, FileEventType::Created, Some(file), &self.files, ) } // Update the file registry when a file is removed pub fn removed(&mut self, path: &Path) { if self.should_ignore(path) || self.did_refresh(path) { return; } // Remove the file and remove it from its parent's children self.files.remove(path); if let Some(parent) = self.files.get_mut(path) { if let Some(children) = &mut parent.children { children.remove(path); } } FileEvent::publish(&self.path, path, FileEventType::Removed, None, &self.files) } // Update the file registry when a file is renamed pub fn renamed(&mut self, old_path: &Path, new_path: &Path) { if self.should_refresh(old_path) || self.should_refresh(new_path) { return self.refresh(); } let ignore_old = self.should_ignore(old_path); let ignore_new = self.should_ignore(new_path); if ignore_old && ignore_new { return; } else if ignore_new { return self.removed(old_path); } else if ignore_old { return self.created(new_path); } // Move the file let file = match self.files.entry(old_path.into()) { Entry::Occupied(entry) => { // Update it's path, name etc, and the paths of // all it's descendants. Move the file from old to new path. let mut file = entry.remove(); file.path = new_path.into(); file.name = File::name(new_path); file.parent = File::parent(new_path); file.format = formats::match_path(&new_path).spec(); rename_children(&mut self.files, &mut file, old_path, new_path); self.files.insert(new_path.into(), file.clone()); file } Entry::Vacant(_) => { // The entry should not be empty, but in case it is, load the file from, // and insert it at, the new path let file = File::load(new_path); self.files.insert(new_path.into(), file.clone()); file } }; // Recursively rename children of a `File` (if it has `children` i.e. is a directory) fn rename_children( registry: &mut BTreeMap<PathBuf, File>, file: &mut File, old_path: &Path, new_path: &Path, ) { if let Some(children) = &mut file.children { let mut new_children = BTreeSet::new(); for child_old_path in children.iter() { let child_new_path = new_path.join( child_old_path .strip_prefix(old_path) .expect("Unable to strip old path"), ); if let Entry::Occupied(entry) = registry.entry(child_old_path.into()) { let mut file = entry.remove(); file.path = child_new_path.clone(); file.parent = child_new_path.parent().map(|parent| parent.into()); rename_children(registry, &mut file, child_old_path, &child_new_path); registry.insert(child_new_path.clone(), file); } new_children.insert(child_new_path); } file.children = Some(new_children); } } // Remove the old path from the old path's parent's children if let Some(parent_path) = old_path.parent() { if let Some(parent) = self.files.get_mut(parent_path) { if let Some(children) = &mut parent.children { children.remove(old_path); } } } // Insert the new path to the new parent's children self.ensure_ancestors(new_path); FileEvent::publish( &self.path, old_path, FileEventType::Renamed, Some(file), &self.files, ) } // Update the file registry when a file is modified pub fn modified(&mut self, path: &Path) { if self.should_ignore(path) || self.did_refresh(path) { return; } // Insert the file let file = File::load(path); self.files.insert(path.into(), file.clone()); FileEvent::publish( &self.path, path, FileEventType::Modified, Some(file), &self.files, ) } }<|fim▁end|>
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import * as pako from "pako" import {Point} from "./Utils" import {DrawHelper} from "./DrawHelper" import {InputManager} from "./InputManager" import {KeyBindings} from "./InputManager" import {Data, Entity} from "./Entity" import {Grid} from "./Grid" export const COLOR_SCHEME = { background: "#282c34", borders: "#4f5052", crosshair: "#e0e0e0", }; export const OPTIONS = { GRID_SIZE: 40, ENTITY_SCALEUP: 10, SQUARES_WIDE: 50, SQUARES_HIGH: 50, BORDER_WIDTH: 4, FONT_SIZE: 25, CAMERA_MOVE_SPEED: 10, CURRENT_SELECTED_ITEM_OPACITY: .5 } enum LINE_SNAP{ None, Vertical, Horizontal, } export class Editor{ /*static readonly GRID_SIZE = 40; static readonly ENTITY_SCALEUP = 10; static readonly SQUARES_WIDE = 30; static readonly SQUARES_HIGH = 30; static readonly BORDER_WIDTH = 4; static readonly FONT_SIZE = 25; static readonly CAMERA_MOVE_SPEED = 10; static readonly CURRENT_SELECTED_ITEM_OPACITY = .5;*/ private static _canvas: HTMLCanvasElement; private static _ctx: CanvasRenderingContext2D; private static _menu: HTMLDivElement; private static _menu_button: HTMLDivElement; private static _import_button: HTMLDivElement; private static _export_button: HTMLDivElement; private static _last_mouse_grid_position: Point = new Point(0,0); private static _mouse_grid_position: Point = new Point(0,0); static get mouse_grid_position(): Point{ return this._mouse_grid_position; } private static _current_selected_item: Entity; private static _line_snap_type: LINE_SNAP; private static _unused_ids: number[] = []; private static _entities: Entity[] = []; private static _global_animators: Animator[] = []; private static _grid: Grid; static grid: number[][]; static Init(){ this._canvas = document.getElementById("editorCanvas") as HTMLCanvasElement; this._ctx = this._canvas.getContext("2d"); this._menu_button = document.getElementById("menuButton") as HTMLDivElement; this._menu_button.onclick = function(){ Editor.ToggleMenu(); } //Setup any styling this._canvas.style.backgroundColor=COLOR_SCHEME.background; this._canvas.oncontextmenu = function (e) { e.preventDefault(); }; this._canvas.onclick = function(){ Editor.CloseMenu(); } //test var test = "0eNqV0ckKwjAQBuB3+c8p2LRUzKuISJdBAu0kJFFaSt7dLh4EA9LjbB/DzIymf5J1mgPUDN0a9lDXGV4/uO7XXJgsQUEHGiDA9bBGNFpH3mfB1eytcSFrqA+IApo7GqHyeBMgDjpo2sUtmO78HBpyS8M/S8Aav4wbXrdYyKwQmKBOMYofTR7X8o8m0GlH7V6SCbs4bCfpMkGXh+kiRVfrsbcHqa9/CrzI+b2hLGVVnWUuLzG+ARDGqi4="; this.LoadBlueprint(test); InputManager.AddKeyEvent(false, KeyBindings.DropItem, ()=>{ this._current_selected_item = undefined; }); InputManager.AddKeyEvent(false, KeyBindings.Rotate, ()=>{ if(this._current_selected_item){ this._current_selected_item.Rotate(); } }); this._grid = new Grid( OPTIONS.GRID_SIZE, new Point(OPTIONS.SQUARES_WIDE, OPTIONS.SQUARES_HIGH), OPTIONS.BORDER_WIDTH, COLOR_SCHEME.borders, COLOR_SCHEME.crosshair, COLOR_SCHEME.background, OPTIONS.FONT_SIZE ) this.CreateMenu(); this.Resize(); Data.LoadImages(); } static Update(){ //Handle line snap if(InputManager.IsKeyDown(KeyBindings.LineSnap)){ if(this._line_snap_type == LINE_SNAP.None){ let diff = this._mouse_grid_position.SubtractC(this._last_mouse_grid_position); if(diff.x != 0){ this._line_snap_type = LINE_SNAP.Horizontal; } if(diff.y != 0){ this._line_snap_type = LINE_SNAP.Vertical } } } else{ this._line_snap_type = LINE_SNAP.None; } this._last_mouse_grid_position = this._mouse_grid_position.Copy(); this._mouse_grid_position = this.ScreenToGridCoords(InputManager.mouse_position); this._mouse_grid_position.Clamp( {x: 1, y: 1}, {x: OPTIONS.SQUARES_WIDE, y: OPTIONS.SQUARES_HIGH} ) if(this._line_snap_type == LINE_SNAP.Horizontal){ this._mouse_grid_position.y = this._last_mouse_grid_position.y; } else if(this._line_snap_type == LINE_SNAP.Vertical){ this._mouse_grid_position.x = this._last_mouse_grid_position.x; } DrawHelper.ClearScreen(this._ctx); //Handle Camera Movement if(InputManager.IsKeyDown(KeyBindings.MoveRight)){ DrawHelper.camera_position.Add({x: OPTIONS.CAMERA_MOVE_SPEED, y:0}); } else if(InputManager.IsKeyDown(KeyBindings.MoveLeft)){ DrawHelper.camera_position.Add({x: -OPTIONS.CAMERA_MOVE_SPEED, y:0}); } if(InputManager.IsKeyDown(KeyBindings.MoveDown)){ DrawHelper.camera_position.Add({x: 0, y: OPTIONS.CAMERA_MOVE_SPEED}); } else if(InputManager.IsKeyDown(KeyBindings.MoveUp)){ DrawHelper.camera_position.Add({x: 0, y: -OPTIONS.CAMERA_MOVE_SPEED}); } DrawHelper.camera_position.Clamp( { x: 0, y: 0}, { x: OPTIONS.GRID_SIZE*OPTIONS.SQUARES_WIDE - this._canvas.width, y: OPTIONS.GRID_SIZE*OPTIONS.SQUARES_HIGH - this._canvas.height } ) //Handle Placement if(this._current_selected_item && InputManager.IsMouseDown(0)){ this.TryPlace(); } if(InputManager.IsMouseDown(2)){ this.TryRemove(); } if(InputManager.IsMouseDown(1)){ console.log(this._grid); } this._grid.DrawCrosshairs(this._ctx); this._grid.DrawGrid(this._ctx); for(let key in this._global_animators){ let animator = this._global_animators[key]; animator.Update(); } if(this._current_selected_item){ this._current_selected_item.position = this._mouse_grid_position.Copy(); this._current_selected_item.Draw(this._ctx, OPTIONS.CURRENT_SELECTED_ITEM_OPACITY); } this._grid.DrawRulers(this._ctx); } static TryRemove(){ this._grid.RemoveAtPos(this.mouse_grid_position.Copy()); } static TryPlace(){ if(this._menu.classList.contains("open")) return; // console.log("Trying place at "+ this.mouse_grid_position.x); // console.log(this.grid[this.mouse_grid_position.x][this.mouse_grid_position.y]); let is_clear = this._grid.IsClear(this.mouse_grid_position.Copy(), this._current_selected_item); if(is_clear.Empty){ console.log("-Space is empty"); this._grid.Place(this._current_selected_item, this._mouse_grid_position.Copy()); let entity_name = this._current_selected_item.properties.name; let direction = this._current_selected_item.GetDirection(); let grid_size = { x: this._current_selected_item.properties.grid_size.x, y: this._current_selected_item.properties.grid_size.y } let children = []; if(this._current_selected_item.properties.children){ children = this._current_selected_item.properties.children; } let new_id = this._grid.GetNextID(); this._current_selected_item = new Entity(new_id, this._mouse_grid_position.Copy()); this._current_selected_item.LoadFromData(entity_name); this._current_selected_item.SetDirection(direction); this._current_selected_item.properties.grid_size = new Point(grid_size.x, grid_size.y); for(let i = 0; i<children.length; i++){ this._current_selected_item.properties.children[i].offset = children[i].offset; } // console.log("placed"); // console.log(this.grid); } else if(is_clear.SameType){ console.log("same type"); this.TryRemove(); this.TryPlace(); } else{ console.log("-Space is FULL"); } //console.log(this.current_selected_item); } static SelectItem(value: string){ let id = this._grid.GetNextID(); let new_entity = new Entity(id, this._mouse_grid_position.Copy()); new_entity.LoadFromData(value); this._current_selected_item = new_entity; } static GetAnimator(anim: string): Animator{ return this._global_animators[anim]; } static AddAnimator(anim: Animator, name: string){ this._global_animators[name] = anim; } /* static DrawRulers(){ let text_centering_factor = (OPTIONS.GRID_SIZE - OPTIONS.FONT_SIZE)/2; DrawHelper.DrawRect( this._ctx, new Point(DrawHelper.camera_position.x,DrawHelper.camera_position.y), new Point(this._canvas.width, OPTIONS.GRID_SIZE), { color:COLOR_SCHEME.background } ) DrawHelper.DrawRect( this._ctx, new Point(DrawHelper.camera_position.x,DrawHelper.camera_position.y), new Point(OPTIONS.GRID_SIZE, this._canvas.height), { color:COLOR_SCHEME.background } ) //Draw Numbers horizontal for(let x = 1; x<OPTIONS.SQUARES_WIDE; x++){ let x_pos = x*OPTIONS.GRID_SIZE; DrawHelper.DrawText( this._ctx, new Point( x_pos+text_centering_factor, OPTIONS.FONT_SIZE+text_centering_factor - 5 + DrawHelper.camera_position.y ), ("0"+x).slice(-2), { color: COLOR_SCHEME.borders, font: "700 "+OPTIONS.FONT_SIZE+"px Share Tech Mono" } ); DrawHelper.DrawLine( this._ctx, new Point(x_pos+OPTIONS.GRID_SIZE, DrawHelper.camera_position.y), new Point(x_pos+OPTIONS.GRID_SIZE, DrawHelper.camera_position.y + OPTIONS.GRID_SIZE), { color: COLOR_SCHEME.borders, line_width: OPTIONS.BORDER_WIDTH } ) } //Draw Numbers vertical for(let y = 1; y<OPTIONS.SQUARES_HIGH; y++){ let y_pos = y*OPTIONS.GRID_SIZE; DrawHelper.DrawText( this._ctx, new Point( text_centering_factor + DrawHelper.camera_position.x, y_pos+OPTIONS.FONT_SIZE+text_centering_factor - 5 ), ("0"+y).slice(-2), { color: COLOR_SCHEME.borders, font: "700 "+OPTIONS.FONT_SIZE+"px Share Tech Mono" } ); DrawHelper.DrawLine( this._ctx, new Point(DrawHelper.camera_position.x, y_pos+OPTIONS.GRID_SIZE), new Point(DrawHelper.camera_position.x + OPTIONS.GRID_SIZE, y_pos+OPTIONS.GRID_SIZE), { color: COLOR_SCHEME.borders, line_width: OPTIONS.BORDER_WIDTH } ) } //Little square in top left to hide overlapping numbers DrawHelper.DrawRect( this._ctx, new Point(DrawHelper.camera_position.x,DrawHelper.camera_position.y), new Point(OPTIONS.GRID_SIZE, OPTIONS.GRID_SIZE), { color:COLOR_SCHEME.background } ) //Bottom border below numbers DrawHelper.DrawLine( this._ctx, new Point(DrawHelper.camera_position.x, DrawHelper.camera_position.y+OPTIONS.GRID_SIZE), new Point(DrawHelper.camera_position.x+this._canvas.width, DrawHelper.camera_position.y+OPTIONS.GRID_SIZE), { color:COLOR_SCHEME.borders, line_width: OPTIONS.BORDER_WIDTH } ) //Right border to the right of numbers DrawHelper.DrawLine( this._ctx,<|fim▁hole|> color:COLOR_SCHEME.borders, line_width: OPTIONS.BORDER_WIDTH } ) } */ static CreateMenu(){ let accent_counter = 1;//Fancy tree colors InputManager.AddKeyEvent(false, KeyBindings.ToggleMenu, function(){ Editor.ToggleMenu(); }) this._menu = document.getElementById("menu") as HTMLDivElement; //create new ul for each menu type for(let type of Data.menu_types){ let new_link = document.createElement("div"); new_link.innerHTML = type.split("-").join(" "); let new_ul = document.createElement("ul"); new_ul.id = type; //Fancy tree colors new_ul.classList.add("accent"+accent_counter); accent_counter++; new_link.onclick = function(){ if(new_ul.classList.contains("open")){ new_ul.classList.remove("open"); } else{ new_ul.classList.add("open"); } } this._menu.appendChild(new_link); this._menu.appendChild(new_ul); } for(let entity of Data.entities){ let new_li = document.createElement("li"); new_li.innerHTML = "<span>"+entity.name+"</span>"; let value = entity.name; new_li.onclick = ()=>{ Editor.SelectItem(value); } document.getElementById(entity.menu_type).appendChild(new_li); } } static ToggleMenu(){ if(this._menu.classList.contains("open")){ this.CloseMenu(); } else{ this._menu.classList.add("open"); } } static CloseMenu(){ this._menu.classList.remove("open"); } static Resize(){ this._canvas.width = window.innerWidth; this._canvas.height = window.innerHeight; this._canvas.style.height = window.innerHeight+"px"; this._canvas.style.width = window.innerWidth+"px"; this._grid.Resize(new Point( this._canvas.width, this._canvas.height )); } static ScreenToCameraCoords(p: Point): Point{ return p.AddC(DrawHelper.camera_position); } static CameraToGridCoords(p: Point): Point{ return new Point( Math.round(p.x / OPTIONS.GRID_SIZE - .5), Math.round(p.y / OPTIONS.GRID_SIZE - .5) ); } static ScreenToGridCoords(p: Point): Point{ return this.CameraToGridCoords(this.ScreenToCameraCoords(p)); } static DecodeString(b64: string): any{ try{ let str_data = atob(b64.substr(1)); let char_data = str_data.split('').map(function(x){return x.charCodeAt(0);}); let bin_data = new Uint8Array(char_data); let data = pako.inflate(bin_data); let str = String.fromCharCode.apply(null, new Uint16Array(data)); let json_data = JSON.parse(str); console.log(json_data); return json_data; } catch (e){ console.log("Oops... tis borked"); } } static LoadBlueprint(b64: string){ let blueprint = this.DecodeString(b64).blueprint; for(let entity of blueprint.entities){ console.log(entity); } } } export class Animator{ private current_frame: number; private frame_count: number; private current_tick: number; private ticks_per_frame:number; constructor(frame_count:number, ticks_per_frame:number){ this.current_frame = 0; this.frame_count = frame_count; this.current_tick = 0; this.ticks_per_frame = ticks_per_frame; } public Update(){ if(this.current_tick < this.ticks_per_frame){ this.current_tick++; } else{ this.current_tick = 0; if(this.ticks_per_frame < 0){ this.current_frame+=Math.abs(this.ticks_per_frame); } else{ this.current_frame++; } if(this.current_frame >= this.frame_count){ this.current_frame = 0; } } } public CurrentFrame(): number{ return this.current_frame; } } window.onload = function(){ Editor.Init(); UpdateLoop(); } window.onresize = function(){ Editor.Resize(); } function UpdateLoop(){ //Do this so Editor can still use 'this' within the update function Editor.Update(); window.requestAnimationFrame(UpdateLoop); }<|fim▁end|>
new Point(DrawHelper.camera_position.x+OPTIONS.GRID_SIZE, DrawHelper.camera_position.y), new Point(DrawHelper.camera_position.x+OPTIONS.GRID_SIZE, DrawHelper.camera_position.y+this._canvas.height), {
<|file_name|>serviceconsumermanagement-gen.go<|end_file_name|><|fim▁begin|>// Package serviceconsumermanagement provides access to the Service Consumer Management API. // // See https://cloud.google.com/service-consumer-management/docs/overview // // Usage example: // // import "google.golang.org/api/serviceconsumermanagement/v1" // ... // serviceconsumermanagementService, err := serviceconsumermanagement.New(oauthHttpClient) package serviceconsumermanagement // import "google.golang.org/api/serviceconsumermanagement/v1" import ( "bytes" "encoding/json" "errors" "fmt" context "golang.org/x/net/context" ctxhttp "golang.org/x/net/context/ctxhttp" gensupport "google.golang.org/api/gensupport" googleapi "google.golang.org/api/googleapi" "io" "net/http" "net/url" "strconv" "strings" ) // Always reference these packages, just in case the auto-generated code // below doesn't. var _ = bytes.NewBuffer var _ = strconv.Itoa var _ = fmt.Sprintf var _ = json.NewDecoder var _ = io.Copy var _ = url.Parse var _ = gensupport.MarshalJSON var _ = googleapi.Version var _ = errors.New var _ = strings.Replace var _ = context.Canceled var _ = ctxhttp.Do const apiId = "serviceconsumermanagement:v1" const apiName = "serviceconsumermanagement" const apiVersion = "v1" const basePath = "https://serviceconsumermanagement.googleapis.com/" // OAuth2 scopes used by this API. const ( // View and manage your data across Google Cloud Platform services CloudPlatformScope = "https://www.googleapis.com/auth/cloud-platform" ) func New(client *http.Client) (*APIService, error) { if client == nil { return nil, errors.New("client is nil") } s := &APIService{client: client, BasePath: basePath} s.Operations = NewOperationsService(s) s.Services = NewServicesService(s) return s, nil } type APIService struct { client *http.Client BasePath string // API endpoint base URL UserAgent string // optional additional User-Agent fragment Operations *OperationsService Services *ServicesService } func (s *APIService) userAgent() string { if s.UserAgent == "" { return googleapi.UserAgent } return googleapi.UserAgent + " " + s.UserAgent } func NewOperationsService(s *APIService) *OperationsService { rs := &OperationsService{s: s} return rs } type OperationsService struct { s *APIService } func NewServicesService(s *APIService) *ServicesService { rs := &ServicesService{s: s} rs.TenancyUnits = NewServicesTenancyUnitsService(s) return rs } type ServicesService struct { s *APIService TenancyUnits *ServicesTenancyUnitsService } func NewServicesTenancyUnitsService(s *APIService) *ServicesTenancyUnitsService { rs := &ServicesTenancyUnitsService{s: s} return rs } type ServicesTenancyUnitsService struct { s *APIService } // AddTenantProjectRequest: Request to add a newly created and // configured tenant project to a tenancy // unit. type AddTenantProjectRequest struct { // ProjectConfig: Configuration of the new tenant project that will be // added to tenancy unit // resources. ProjectConfig *TenantProjectConfig `json:"projectConfig,omitempty"` // Tag: Tag of the added project. Must be less than 128 characters. // Required. Tag string `json:"tag,omitempty"` // ForceSendFields is a list of field names (e.g. "ProjectConfig") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "ProjectConfig") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *AddTenantProjectRequest) MarshalJSON() ([]byte, error) { type NoMethod AddTenantProjectRequest raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Api: Api is a light-weight descriptor for an API // Interface. // // Interfaces are also described as "protocol buffer services" in some // contexts, // such as by the "service" keyword in a .proto file, but they are // different // from API Services, which represent a concrete implementation of an // interface // as opposed to simply a description of methods and bindings. They are // also // sometimes simply referred to as "APIs" in other contexts, such as the // name of // this message itself. See // https://cloud.google.com/apis/design/glossary for // detailed terminology. type Api struct { // Methods: The methods of this interface, in unspecified order. Methods []*Method `json:"methods,omitempty"` // Mixins: Included interfaces. See Mixin. Mixins []*Mixin `json:"mixins,omitempty"` // Name: The fully qualified name of this interface, including package // name // followed by the interface's simple name. Name string `json:"name,omitempty"` // Options: Any metadata attached to the interface. Options []*Option `json:"options,omitempty"` // SourceContext: Source context for the protocol buffer service // represented by this // message. SourceContext *SourceContext `json:"sourceContext,omitempty"` // Syntax: The source syntax of the service. // // Possible values: // "SYNTAX_PROTO2" - Syntax `proto2`. // "SYNTAX_PROTO3" - Syntax `proto3`. Syntax string `json:"syntax,omitempty"` // Version: A version string for this interface. If specified, must have // the form // `major-version.minor-version`, as in `1.10`. If the minor version // is // omitted, it defaults to zero. If the entire version field is empty, // the // major version is derived from the package name, as outlined below. If // the // field is not empty, the version in the package name will be verified // to be // consistent with what is provided here. // // The versioning schema uses [semantic // versioning](http://semver.org) where the major version // number // indicates a breaking change and the minor version an // additive, // non-breaking change. Both version numbers are signals to users // what to expect from different versions, and should be // carefully // chosen based on the product plan. // // The major version is also reflected in the package name of // the // interface, which must end in `v<major-version>`, as // in // `google.feature.v1`. For major versions 0 and 1, the suffix can // be omitted. Zero major versions must only be used for // experimental, non-GA interfaces. // Version string `json:"version,omitempty"` // ForceSendFields is a list of field names (e.g. "Methods") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Methods") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Api) MarshalJSON() ([]byte, error) { type NoMethod Api raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // AuthProvider: Configuration for an anthentication provider, including // support for // [JSON Web Token // (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32) // . type AuthProvider struct { // Audiences: The list of // JWT // [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web- // token-32#section-4.1.3). // that are allowed to access. A JWT containing any of these audiences // will // be accepted. When this setting is absent, only JWTs with // audience // "https://Service_name/API_name" // will be accepted. For example, if no audiences are in the // setting, // LibraryService API will only accept JWTs with the following // audience // "https://library-example.googleapis.com/google.example.librar // y.v1.LibraryService". // // Example: // // audiences: bookstore_android.apps.googleusercontent.com, // bookstore_web.apps.googleusercontent.com Audiences string `json:"audiences,omitempty"` // AuthorizationUrl: Redirect URL if JWT token is required but no // present or is expired. // Implement authorizationUrl of securityDefinitions in OpenAPI spec. AuthorizationUrl string `json:"authorizationUrl,omitempty"` // Id: The unique identifier of the auth provider. It will be referred // to by // `AuthRequirement.provider_id`. // // Example: "bookstore_auth". Id string `json:"id,omitempty"` // Issuer: Identifies the principal that issued the JWT. // See // https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#sec // tion-4.1.1 // Usually a URL or an email address. // // Example: https://securetoken.google.com // Example: [email protected] Issuer string `json:"issuer,omitempty"` // JwksUri: URL of the provider's public key set to validate signature // of the JWT. See // [OpenID // Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html# // ProviderMetadata). // Optional if the key set document: // - can be retrieved from // [OpenID // Discovery](https://openid.net/specs/openid-connect-discovery-1_0.html // // of the issuer. // - can be inferred from the email domain of the issuer (e.g. a Google // service account). // // Example: https://www.googleapis.com/oauth2/v1/certs JwksUri string `json:"jwksUri,omitempty"` // ForceSendFields is a list of field names (e.g. "Audiences") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Audiences") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *AuthProvider) MarshalJSON() ([]byte, error) { type NoMethod AuthProvider raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // AuthRequirement: User-defined authentication requirements, including // support for // [JSON Web Token // (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32) // . type AuthRequirement struct { // Audiences: NOTE: This will be deprecated soon, once // AuthProvider.audiences is // implemented and accepted in all the runtime components. // // The list of // JWT // [audiences](https://tools.ietf.org/html/draft-ietf-oauth-json-web- // token-32#section-4.1.3). // that are allowed to access. A JWT containing any of these audiences // will // be accepted. When this setting is absent, only JWTs with // audience // "https://Service_name/API_name" // will be accepted. For example, if no audiences are in the // setting, // LibraryService API will only accept JWTs with the following // audience // "https://library-example.googleapis.com/google.example.librar // y.v1.LibraryService". // // Example: // // audiences: bookstore_android.apps.googleusercontent.com, // bookstore_web.apps.googleusercontent.com Audiences string `json:"audiences,omitempty"` // ProviderId: id from authentication provider. // // Example: // // provider_id: bookstore_auth ProviderId string `json:"providerId,omitempty"` // ForceSendFields is a list of field names (e.g. "Audiences") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Audiences") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *AuthRequirement) MarshalJSON() ([]byte, error) { type NoMethod AuthRequirement raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Authentication: `Authentication` defines the authentication // configuration for an API. // // Example for an API targeted for external use: // // name: calendar.googleapis.com // authentication: // providers: // - id: google_calendar_auth // jwks_uri: https://www.googleapis.com/oauth2/v1/certs // issuer: https://securetoken.google.com // rules: // - selector: "*" // requirements: // provider_id: google_calendar_auth type Authentication struct { // Providers: Defines a set of authentication providers that a service // supports. Providers []*AuthProvider `json:"providers,omitempty"` // Rules: A list of authentication rules that apply to individual API // methods. // // **NOTE:** All service configuration rules follow "last one wins" // order. Rules []*AuthenticationRule `json:"rules,omitempty"` // ForceSendFields is a list of field names (e.g. "Providers") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Providers") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Authentication) MarshalJSON() ([]byte, error) { type NoMethod Authentication raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // AuthenticationRule: Authentication rules for the service. // // By default, if a method has any authentication requirements, every // request // must include a valid credential matching one of the // requirements. // It's an error to include more than one kind of credential in a // single // request. // // If a method doesn't have any auth requirements, request credentials // will be // ignored. type AuthenticationRule struct { // AllowWithoutCredential: If true, the service accepts API keys without // any other credential. AllowWithoutCredential bool `json:"allowWithoutCredential,omitempty"` // Oauth: The requirements for OAuth credentials. Oauth *OAuthRequirements `json:"oauth,omitempty"` // Requirements: Requirements for additional authentication providers. Requirements []*AuthRequirement `json:"requirements,omitempty"` // Selector: Selects the methods to which this rule applies. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. // "AllowWithoutCredential") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "AllowWithoutCredential") // to include in API requests with the JSON null value. By default, // fields with empty values are omitted from API requests. However, any // field with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *AuthenticationRule) MarshalJSON() ([]byte, error) { type NoMethod AuthenticationRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // AuthorizationConfig: Configuration of authorization. // // This section determines the authorization provider, if unspecified, // then no // authorization check will be done. // // Example: // // experimental: // authorization: // provider: firebaserules.googleapis.com type AuthorizationConfig struct { // Provider: The name of the authorization provider, such // as // firebaserules.googleapis.com. Provider string `json:"provider,omitempty"` // ForceSendFields is a list of field names (e.g. "Provider") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Provider") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *AuthorizationConfig) MarshalJSON() ([]byte, error) { type NoMethod AuthorizationConfig raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // AuthorizationRule: Authorization rule for API services. // // It specifies the permission(s) required for an API element for the // overall // API request to succeed. It is typically used to mark request message // fields // that contain the name of the resource and indicates the permissions // that // will be checked on that resource. // // For example: // // package google.storage.v1; // // message CopyObjectRequest { // string source = 1 [ // (google.api.authz).permissions = "storage.objects.get"]; // // string destination = 2 [ // (google.api.authz).permissions = // "storage.objects.create,storage.objects.update"]; // } type AuthorizationRule struct { // Permissions: The required permissions. The acceptable values vary // depend on the // authorization system used. For Google APIs, it should be a // comma-separated // Google IAM permission values. When multiple permissions are listed, // the // semantics is not defined by the system. Additional documentation // must // be provided manually. Permissions string `json:"permissions,omitempty"` // Selector: Selects the API elements to which this rule applies. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. "Permissions") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Permissions") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *AuthorizationRule) MarshalJSON() ([]byte, error) { type NoMethod AuthorizationRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Backend: `Backend` defines the backend configuration for a service. type Backend struct { // Rules: A list of API backend rules that apply to individual API // methods. // // **NOTE:** All service configuration rules follow "last one wins" // order. Rules []*BackendRule `json:"rules,omitempty"` // ForceSendFields is a list of field names (e.g. "Rules") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Rules") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Backend) MarshalJSON() ([]byte, error) { type NoMethod Backend raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // BackendRule: A backend rule provides configuration for an individual // API element. type BackendRule struct { // Address: The address of the API backend. Address string `json:"address,omitempty"` // Deadline: The number of seconds to wait for a response from a // request. The default // deadline for gRPC is infinite (no deadline) and HTTP requests is 5 // seconds. Deadline float64 `json:"deadline,omitempty"` // MinDeadline: Minimum deadline in seconds needed for this method. // Calls having deadline // value lower than this will be rejected. MinDeadline float64 `json:"minDeadline,omitempty"` // Selector: Selects the methods to which this rule applies. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. "Address") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Address") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *BackendRule) MarshalJSON() ([]byte, error) { type NoMethod BackendRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } func (s *BackendRule) UnmarshalJSON(data []byte) error { type NoMethod BackendRule var s1 struct { Deadline gensupport.JSONFloat64 `json:"deadline"` MinDeadline gensupport.JSONFloat64 `json:"minDeadline"` *NoMethod } s1.NoMethod = (*NoMethod)(s) if err := json.Unmarshal(data, &s1); err != nil { return err } s.Deadline = float64(s1.Deadline) s.MinDeadline = float64(s1.MinDeadline) return nil } // Billing: Billing related configuration of the service. // // The following example shows how to configure monitored resources and // metrics // for billing: // // monitored_resources: // - type: library.googleapis.com/branch // labels: // - key: /city // description: The city where the library branch is located // in. // - key: /name // description: The name of the branch. // metrics: // - name: library.googleapis.com/book/borrowed_count // metric_kind: DELTA // value_type: INT64 // billing: // consumer_destinations: // - monitored_resource: library.googleapis.com/branch // metrics: // - library.googleapis.com/book/borrowed_count type Billing struct { // ConsumerDestinations: Billing configurations for sending metrics to // the consumer project. // There can be multiple consumer destinations per service, each one // must have // a different monitored resource type. A metric can be used in at // most // one consumer destination. ConsumerDestinations []*BillingDestination `json:"consumerDestinations,omitempty"` // ForceSendFields is a list of field names (e.g. // "ConsumerDestinations") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "ConsumerDestinations") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *Billing) MarshalJSON() ([]byte, error) { type NoMethod Billing raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // BillingConfig: Describes billing configuration for a new tenant // project. type BillingConfig struct { // BillingAccount: Name of the billing account. // For example `billingAccounts/012345-567890-ABCDEF`. BillingAccount string `json:"billingAccount,omitempty"` // ForceSendFields is a list of field names (e.g. "BillingAccount") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "BillingAccount") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *BillingConfig) MarshalJSON() ([]byte, error) { type NoMethod BillingConfig raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // BillingDestination: Configuration of a specific billing destination // (Currently only support // bill against consumer project). type BillingDestination struct { // Metrics: Names of the metrics to report to this billing // destination. // Each name must be defined in Service.metrics section. Metrics []string `json:"metrics,omitempty"` // MonitoredResource: The monitored resource type. The type must be // defined in // Service.monitored_resources section. MonitoredResource string `json:"monitoredResource,omitempty"` // ForceSendFields is a list of field names (e.g. "Metrics") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Metrics") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *BillingDestination) MarshalJSON() ([]byte, error) { type NoMethod BillingDestination raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // CancelOperationRequest: The request message for // Operations.CancelOperation. type CancelOperationRequest struct { } // Context: `Context` defines which contexts an API // requests. // // Example: // // context: // rules: // - selector: "*" // requested: // - google.rpc.context.ProjectContext // - google.rpc.context.OriginContext // // The above specifies that all methods in the API // request // `google.rpc.context.ProjectContext` // and // `google.rpc.context.OriginContext`. // // Available context types are defined in // package // `google.rpc.context`. // // This also provides mechanism to whitelist any protobuf message // extension that // can be sent in grpc metadata using // “x-goog-ext-<extension_id>-bin” // and // “x-goog-ext-<extension_id>-jspb” format. For example, list any // service // specific protobuf types that can appear in grpc metadata as follows // in your // yaml file: // // Example: // // context: // rules: // - selector: // "google.example.library.v1.LibraryService.CreateBook" // allowed_request_extensions: // - google.foo.v1.NewExtension // allowed_response_extensions: // - google.foo.v1.NewExtension // // You can also specify extension ID instead of fully qualified // extension name // here. type Context struct { // Rules: A list of RPC context rules that apply to individual API // methods. // // **NOTE:** All service configuration rules follow "last one wins" // order. Rules []*ContextRule `json:"rules,omitempty"` // ForceSendFields is a list of field names (e.g. "Rules") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Rules") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Context) MarshalJSON() ([]byte, error) { type NoMethod Context raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ContextRule: A context rule provides information about the context // for an individual API // element. type ContextRule struct { // AllowedRequestExtensions: A list of full type names or extension IDs // of extensions allowed in grpc // side channel from client to backend. AllowedRequestExtensions []string `json:"allowedRequestExtensions,omitempty"` // AllowedResponseExtensions: A list of full type names or extension IDs // of extensions allowed in grpc // side channel from backend to client. AllowedResponseExtensions []string `json:"allowedResponseExtensions,omitempty"` // Provided: A list of full type names of provided contexts. Provided []string `json:"provided,omitempty"` // Requested: A list of full type names of requested contexts. Requested []string `json:"requested,omitempty"` // Selector: Selects the methods to which this rule applies. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. // "AllowedRequestExtensions") to unconditionally include in API // requests. By default, fields with empty values are omitted from API // requests. However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "AllowedRequestExtensions") // to include in API requests with the JSON null value. By default, // fields with empty values are omitted from API requests. However, any // field with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *ContextRule) MarshalJSON() ([]byte, error) { type NoMethod ContextRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Control: Selects and configures the service controller used by the // service. The // service controller handles features like abuse, quota, billing, // logging, // monitoring, etc. type Control struct { // Environment: The service control environment to use. If empty, no // control plane // feature (like quota and billing) will be enabled. Environment string `json:"environment,omitempty"` // ForceSendFields is a list of field names (e.g. "Environment") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Environment") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Control) MarshalJSON() ([]byte, error) { type NoMethod Control raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // CreateTenancyUnitRequest: Request to create a tenancy unit for a // consumer of a service. type CreateTenancyUnitRequest struct { // TenancyUnitId: Optional producer provided identifier of the tenancy // unit. // Must be no longer than 40 characters and preferably URI friendly. // If it is not provided, a UID for the tenancy unit will be auto // generated. // It must be unique across a service. // If the tenancy unit already exists for the service and consumer // pair, // `CreateTenancyUnit` will return the existing tenancy unit if the // provided // identifier is identical or empty, otherwise the call will fail. TenancyUnitId string `json:"tenancyUnitId,omitempty"` // ForceSendFields is a list of field names (e.g. "TenancyUnitId") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "TenancyUnitId") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *CreateTenancyUnitRequest) MarshalJSON() ([]byte, error) { type NoMethod CreateTenancyUnitRequest raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // CustomError: Customize service error responses. For example, list // any service // specific protobuf types that can appear in error detail lists // of // error responses. // // Example: // // custom_error: // types: // - google.foo.v1.CustomError // - google.foo.v1.AnotherError type CustomError struct { // Rules: The list of custom error rules that apply to individual API // messages. // // **NOTE:** All service configuration rules follow "last one wins" // order. Rules []*CustomErrorRule `json:"rules,omitempty"` // Types: The list of custom error detail types, e.g. // 'google.foo.v1.CustomError'. Types []string `json:"types,omitempty"` // ForceSendFields is a list of field names (e.g. "Rules") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Rules") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *CustomError) MarshalJSON() ([]byte, error) { type NoMethod CustomError raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // CustomErrorRule: A custom error rule. type CustomErrorRule struct { // IsErrorType: Mark this message as possible payload in error response. // Otherwise, // objects of this type will be filtered when they appear in error // payload. IsErrorType bool `json:"isErrorType,omitempty"` // Selector: Selects messages to which this rule applies. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. "IsErrorType") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "IsErrorType") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *CustomErrorRule) MarshalJSON() ([]byte, error) { type NoMethod CustomErrorRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // CustomHttpPattern: A custom pattern is used for defining custom HTTP // verb. type CustomHttpPattern struct { // Kind: The name of this custom HTTP verb. Kind string `json:"kind,omitempty"` // Path: The path matched by this custom verb. Path string `json:"path,omitempty"` // ForceSendFields is a list of field names (e.g. "Kind") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Kind") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *CustomHttpPattern) MarshalJSON() ([]byte, error) { type NoMethod CustomHttpPattern raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Documentation: `Documentation` provides the information for // describing a service. // // Example: // <pre><code>documentation: // summary: > // The Google Calendar API gives access // to most calendar features. // pages: // - name: Overview // content: &#40;== include google/foo/overview.md ==&#41; // - name: Tutorial // content: &#40;== include google/foo/tutorial.md ==&#41; // subpages; // - name: Java // content: &#40;== include google/foo/tutorial_java.md ==&#41; // rules: // - selector: google.calendar.Calendar.Get // description: > // ... // - selector: google.calendar.Calendar.Put // description: > // ... // </code></pre> // Documentation is provided in markdown syntax. In addition to // standard markdown features, definition lists, tables and fenced // code blocks are supported. Section headers can be provided and // are // interpreted relative to the section nesting of the context where // a documentation fragment is embedded. // // Documentation from the IDL is merged with documentation defined // via the config at normalization time, where documentation provided // by config rules overrides IDL provided. // // A number of constructs specific to the API platform are supported // in documentation text. // // In order to reference a proto element, the following // notation can be // used: // <pre><code>&#91;fully.qualified.proto.name]&#91;]</code></pre> // T // o override the display text used for the link, this can be // used: // <pre><code>&#91;display // text]&#91;fully.qualified.proto.name]</code></pre> // Text can be excluded from doc using the following // notation: // <pre><code>&#40;-- internal comment --&#41;</code></pre> // // A few directives are available in documentation. Note that // directives must appear on a single line to be properly // identified. The `include` directive includes a markdown file from // an external source: // <pre><code>&#40;== include path/to/file ==&#41;</code></pre> // The `resource_for` directive marks a message to be the resource of // a collection in REST view. If it is not specified, tools attempt // to infer the resource from the operations in a // collection: // <pre><code>&#40;== resource_for v1.shelves.books // ==&#41;</code></pre> // The directive `suppress_warning` does not directly affect // documentation // and is documented together with service config validation. type Documentation struct { // DocumentationRootUrl: The URL to the root of documentation. DocumentationRootUrl string `json:"documentationRootUrl,omitempty"` // Overview: Declares a single overview page. For // example: // <pre><code>documentation: // summary: ... // overview: &#40;== include overview.md ==&#41; // </code></pre> // This is a shortcut for the following declaration (using pages // style): // <pre><code>documentation: // summary: ... // pages: // - name: Overview // content: &#40;== include overview.md ==&#41; // </code></pre> // Note: you cannot specify both `overview` field and `pages` field. Overview string `json:"overview,omitempty"` // Pages: The top level pages for the documentation set. Pages []*Page `json:"pages,omitempty"` // Rules: A list of documentation rules that apply to individual API // elements. // // **NOTE:** All service configuration rules follow "last one wins" // order. Rules []*DocumentationRule `json:"rules,omitempty"` // Summary: A short summary of what the service does. Can only be // provided by // plain text. Summary string `json:"summary,omitempty"` // ForceSendFields is a list of field names (e.g. // "DocumentationRootUrl") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "DocumentationRootUrl") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *Documentation) MarshalJSON() ([]byte, error) { type NoMethod Documentation raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // DocumentationRule: A documentation rule provides information about // individual API elements. type DocumentationRule struct { // DeprecationDescription: Deprecation description of the selected // element(s). It can be provided if an // element is marked as `deprecated`. DeprecationDescription string `json:"deprecationDescription,omitempty"` // Description: Description of the selected API(s). Description string `json:"description,omitempty"` // Selector: The selector is a comma-separated list of patterns. Each // pattern is a // qualified name of the element which may end in "*", indicating a // wildcard. // Wildcards are only allowed at the end and for a whole component of // the // qualified name, i.e. "foo.*" is ok, but not "foo.b*" or "foo.*.bar". // To // specify a default for all applicable elements, the whole pattern // "*" // is used. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. // "DeprecationDescription") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "DeprecationDescription") // to include in API requests with the JSON null value. By default, // fields with empty values are omitted from API requests. However, any // field with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *DocumentationRule) MarshalJSON() ([]byte, error) { type NoMethod DocumentationRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Empty: A generic empty message that you can re-use to avoid defining // duplicated // empty messages in your APIs. A typical example is to use it as the // request // or the response type of an API method. For instance: // // service Foo { // rpc Bar(google.protobuf.Empty) returns // (google.protobuf.Empty); // } // // The JSON representation for `Empty` is empty JSON object `{}`. type Empty struct { // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` } // Endpoint: `Endpoint` describes a network endpoint that serves a set // of APIs. // A service may expose any number of endpoints, and all endpoints share // the // same service configuration, such as quota configuration and // monitoring // configuration. // // Example service configuration: // // name: library-example.googleapis.com // endpoints: // # Below entry makes 'google.example.library.v1.Library' // # API be served from endpoint address // library-example.googleapis.com. // # It also allows HTTP OPTIONS calls to be passed to the // backend, for // # it to decide whether the subsequent cross-origin request is // # allowed to proceed. // - name: library-example.googleapis.com // allow_cors: true type Endpoint struct { // Aliases: DEPRECATED: This field is no longer supported. Instead of // using aliases, // please specify multiple google.api.Endpoint for each of the // intended // aliases. // // Additional names that this endpoint will be hosted on. Aliases []string `json:"aliases,omitempty"` // AllowCors: // Allowing // [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sh // aring), aka // cross-domain traffic, would allow the backends served from this // endpoint to // receive and respond to HTTP OPTIONS requests. The response will be // used by // the browser to determine whether the subsequent cross-origin request // is // allowed to proceed. AllowCors bool `json:"allowCors,omitempty"` // Features: The list of features enabled on this endpoint. Features []string `json:"features,omitempty"` // Name: The canonical name of this endpoint. Name string `json:"name,omitempty"` // Target: The specification of an Internet routable address of API // frontend that will // handle requests to this [API // Endpoint](https://cloud.google.com/apis/design/glossary). // It should be either a valid IPv4 address or a fully-qualified domain // name. // For example, "8.8.8.8" or "myservice.appspot.com". Target string `json:"target,omitempty"` // ForceSendFields is a list of field names (e.g. "Aliases") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Aliases") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Endpoint) MarshalJSON() ([]byte, error) { type NoMethod Endpoint raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Enum: Enum type definition. type Enum struct { // Enumvalue: Enum value definitions. Enumvalue []*EnumValue `json:"enumvalue,omitempty"` // Name: Enum type name. Name string `json:"name,omitempty"` // Options: Protocol buffer options. Options []*Option `json:"options,omitempty"` // SourceContext: The source context. SourceContext *SourceContext `json:"sourceContext,omitempty"` // Syntax: The source syntax. // // Possible values: // "SYNTAX_PROTO2" - Syntax `proto2`. // "SYNTAX_PROTO3" - Syntax `proto3`. Syntax string `json:"syntax,omitempty"` // ForceSendFields is a list of field names (e.g. "Enumvalue") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Enumvalue") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Enum) MarshalJSON() ([]byte, error) { type NoMethod Enum raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // EnumValue: Enum value definition. type EnumValue struct { // Name: Enum value name. Name string `json:"name,omitempty"` // Number: Enum value number. Number int64 `json:"number,omitempty"` // Options: Protocol buffer options. Options []*Option `json:"options,omitempty"` // ForceSendFields is a list of field names (e.g. "Name") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Name") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *EnumValue) MarshalJSON() ([]byte, error) { type NoMethod EnumValue raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Experimental: Experimental service configuration. These configuration // options can // only be used by whitelisted users. type Experimental struct { // Authorization: Authorization configuration. Authorization *AuthorizationConfig `json:"authorization,omitempty"` // ForceSendFields is a list of field names (e.g. "Authorization") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Authorization") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Experimental) MarshalJSON() ([]byte, error) { type NoMethod Experimental raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Field: A single field of a message type. type Field struct { // Cardinality: The field cardinality. // // Possible values: // "CARDINALITY_UNKNOWN" - For fields with unknown cardinality. // "CARDINALITY_OPTIONAL" - For optional fields. // "CARDINALITY_REQUIRED" - For required fields. Proto2 syntax only. // "CARDINALITY_REPEATED" - For repeated fields. Cardinality string `json:"cardinality,omitempty"` // DefaultValue: The string value of the default value of this field. // Proto2 syntax only. DefaultValue string `json:"defaultValue,omitempty"` // JsonName: The field JSON name. JsonName string `json:"jsonName,omitempty"` // Kind: The field type. // // Possible values: // "TYPE_UNKNOWN" - Field type unknown. // "TYPE_DOUBLE" - Field type double. // "TYPE_FLOAT" - Field type float. // "TYPE_INT64" - Field type int64. // "TYPE_UINT64" - Field type uint64. // "TYPE_INT32" - Field type int32. // "TYPE_FIXED64" - Field type fixed64. // "TYPE_FIXED32" - Field type fixed32. // "TYPE_BOOL" - Field type bool. // "TYPE_STRING" - Field type string. // "TYPE_GROUP" - Field type group. Proto2 syntax only, and // deprecated. // "TYPE_MESSAGE" - Field type message. // "TYPE_BYTES" - Field type bytes. // "TYPE_UINT32" - Field type uint32. // "TYPE_ENUM" - Field type enum. // "TYPE_SFIXED32" - Field type sfixed32. // "TYPE_SFIXED64" - Field type sfixed64. // "TYPE_SINT32" - Field type sint32. // "TYPE_SINT64" - Field type sint64. Kind string `json:"kind,omitempty"` // Name: The field name. Name string `json:"name,omitempty"` // Number: The field number. Number int64 `json:"number,omitempty"` // OneofIndex: The index of the field type in `Type.oneofs`, for message // or enumeration // types. The first type has index 1; zero means the type is not in the // list. OneofIndex int64 `json:"oneofIndex,omitempty"` // Options: The protocol buffer options. Options []*Option `json:"options,omitempty"` // Packed: Whether to use alternative packed wire representation. Packed bool `json:"packed,omitempty"` // TypeUrl: The field type URL, without the scheme, for message or // enumeration // types. Example: "type.googleapis.com/google.protobuf.Timestamp". TypeUrl string `json:"typeUrl,omitempty"` // ForceSendFields is a list of field names (e.g. "Cardinality") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Cardinality") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Field) MarshalJSON() ([]byte, error) { type NoMethod Field raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Http: Defines the HTTP configuration for an API service. It contains // a list of // HttpRule, each specifying the mapping of an RPC method // to one or more HTTP REST API methods. type Http struct { // FullyDecodeReservedExpansion: When set to true, URL path parmeters // will be fully URI-decoded except in // cases of single segment matches in reserved expansion, where "%2F" // will be // left encoded. // // The default behavior is to not decode RFC 6570 reserved characters in // multi // segment matches. FullyDecodeReservedExpansion bool `json:"fullyDecodeReservedExpansion,omitempty"` // Rules: A list of HTTP configuration rules that apply to individual // API methods. // // **NOTE:** All service configuration rules follow "last one wins" // order. Rules []*HttpRule `json:"rules,omitempty"` // ForceSendFields is a list of field names (e.g. // "FullyDecodeReservedExpansion") to unconditionally include in API // requests. By default, fields with empty values are omitted from API // requests. However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. // "FullyDecodeReservedExpansion") to include in API requests with the // JSON null value. By default, fields with empty values are omitted // from API requests. However, any field with an empty value appearing // in NullFields will be sent to the server as null. It is an error if a // field in this list has a non-empty value. This may be used to include // null fields in Patch requests. NullFields []string `json:"-"` } func (s *Http) MarshalJSON() ([]byte, error) { type NoMethod Http raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // HttpRule: `HttpRule` defines the mapping of an RPC method to one or // more HTTP // REST API methods. The mapping specifies how different portions of the // RPC // request message are mapped to URL path, URL query parameters, // and // HTTP request body. The mapping is typically specified as // an // `google.api.http` annotation on the RPC method, // see "google/api/annotations.proto" for details. // // The mapping consists of a field specifying the path template // and // method kind. The path template can refer to fields in the // request // message, as in the example below which describes a REST GET // operation on a resource collection of messages: // // // service Messaging { // rpc GetMessage(GetMessageRequest) returns (Message) { // option (google.api.http).get = // "/v1/messages/{message_id}/{sub.subfield}"; // } // } // message GetMessageRequest { // message SubMessage { // string subfield = 1; // } // string message_id = 1; // mapped to the URL // SubMessage sub = 2; // `sub.subfield` is url-mapped // } // message Message { // string text = 1; // content of the resource // } // // The same http annotation can alternatively be expressed inside // the // `GRPC API Configuration` YAML file. // // http: // rules: // - selector: <proto_package_name>.Messaging.GetMessage // get: /v1/messages/{message_id}/{sub.subfield} // // This definition enables an automatic, bidrectional mapping of // HTTP // JSON to RPC. Example: // // HTTP | RPC // -----|----- // `GET /v1/messages/123456/foo` | `GetMessage(message_id: "123456" // sub: SubMessage(subfield: "foo"))` // // In general, not only fields but also field paths can be // referenced // from a path pattern. Fields mapped to the path pattern cannot // be // repeated and must have a primitive (non-message) type. // // Any fields in the request message which are not bound by the // path // pattern automatically become (optional) HTTP query // parameters. Assume the following definition of the request // message: // // // service Messaging { // rpc GetMessage(GetMessageRequest) returns (Message) { // option (google.api.http).get = "/v1/messages/{message_id}"; // } // } // message GetMessageRequest { // message SubMessage { // string subfield = 1; // } // string message_id = 1; // mapped to the URL // int64 revision = 2; // becomes a parameter // SubMessage sub = 3; // `sub.subfield` becomes a parameter // } // // // This enables a HTTP JSON to RPC mapping as below: // // HTTP | RPC // -----|----- // `GET /v1/messages/123456?revision=2&sub.subfield=foo` | // `GetMessage(message_id: "123456" revision: 2 sub: // SubMessage(subfield: "foo"))` // // Note that fields which are mapped to HTTP parameters must have // a // primitive type or a repeated primitive type. Message types are // not // allowed. In the case of a repeated type, the parameter can // be // repeated in the URL, as in `...?param=A&param=B`. // // For HTTP method kinds which allow a request body, the `body` // field // specifies the mapping. Consider a REST update method on the // message resource collection: // // // service Messaging { // rpc UpdateMessage(UpdateMessageRequest) returns (Message) { // option (google.api.http) = { // put: "/v1/messages/{message_id}" // body: "message" // }; // } // } // message UpdateMessageRequest { // string message_id = 1; // mapped to the URL // Message message = 2; // mapped to the body // } // // // The following HTTP JSON to RPC mapping is enabled, where // the // representation of the JSON in the request body is determined // by // protos JSON encoding: // // HTTP | RPC // -----|----- // `PUT /v1/messages/123456 { "text": "Hi!" }` | // `UpdateMessage(message_id: "123456" message { text: "Hi!" })` // // The special name `*` can be used in the body mapping to define // that // every field not bound by the path template should be mapped to // the // request body. This enables the following alternative definition // of // the update method: // // service Messaging { // rpc UpdateMessage(Message) returns (Message) { // option (google.api.http) = { // put: "/v1/messages/{message_id}" // body: "*" // }; // } // } // message Message { // string message_id = 1; // string text = 2; // } // // // The following HTTP JSON to RPC mapping is enabled: // // HTTP | RPC // -----|----- // `PUT /v1/messages/123456 { "text": "Hi!" }` | // `UpdateMessage(message_id: "123456" text: "Hi!")` // // Note that when using `*` in the body mapping, it is not possible // to // have HTTP parameters, as all fields not bound by the path end in // the body. This makes this option more rarely used in practice // of // defining REST APIs. The common usage of `*` is in custom // methods // which don't use the URL at all for transferring data. // // It is possible to define multiple HTTP methods for one RPC by // using // the `additional_bindings` option. Example: // // service Messaging { // rpc GetMessage(GetMessageRequest) returns (Message) { // option (google.api.http) = { // get: "/v1/messages/{message_id}" // additional_bindings { // get: "/v1/users/{user_id}/messages/{message_id}" // } // }; // } // } // message GetMessageRequest { // string message_id = 1; // string user_id = 2; // } // // // This enables the following two alternative HTTP JSON to // RPC // mappings: // // HTTP | RPC // -----|----- // `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` // `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" // message_id: "123456")` // // # Rules for HTTP mapping // // The rules for mapping HTTP path, query parameters, and body fields // to the request message are as follows: // // 1. The `body` field specifies either `*` or a field path, or is // omitted. If omitted, it indicates there is no HTTP request // body. // 2. Leaf fields (recursive expansion of nested messages in the // request) can be classified into three types: // (a) Matched in the URL template. // (b) Covered by body (if body is `*`, everything except (a) // fields; // else everything under the body field) // (c) All other fields. // 3. URL query parameters found in the HTTP request are mapped to (c) // fields. // 4. Any body sent with an HTTP request can contain only (b) // fields. // // The syntax of the path template is as follows: // // Template = "/" Segments [ Verb ] ; // Segments = Segment { "/" Segment } ; // Segment = "*" | "**" | LITERAL | Variable ; // Variable = "{" FieldPath [ "=" Segments ] "}" ; // FieldPath = IDENT { "." IDENT } ; // Verb = ":" LITERAL ; // // The syntax `*` matches a single path segment. The syntax `**` matches // zero // or more path segments, which must be the last part of the path except // the // `Verb`. The syntax `LITERAL` matches literal text in the path. // // The syntax `Variable` matches part of the URL path as specified by // its // template. A variable template must not contain other variables. If a // variable // matches a single path segment, its template may be omitted, e.g. // `{var}` // is equivalent to `{var=*}`. // // If a variable contains exactly one path segment, such as "{var}" // or // "{var=*}", when such a variable is expanded into a URL path, all // characters // except `[-_.~0-9a-zA-Z]` are percent-encoded. Such variables show up // in the // Discovery Document as `{var}`. // // If a variable contains one or more path segments, such as // "{var=foo/*}" // or "{var=**}", when such a variable is expanded into a URL path, // all // characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. Such // variables // show up in the Discovery Document as `{+var}`. // // NOTE: While the single segment variable matches the semantics of // [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 // Simple String Expansion, the multi segment variable **does not** // match // RFC 6570 Reserved Expansion. The reason is that the Reserved // Expansion // does not expand special characters like `?` and `#`, which would // lead // to invalid URLs. // // NOTE: the field paths in variables and in the `body` must not refer // to // repeated fields or map fields. type HttpRule struct { // AdditionalBindings: Additional HTTP bindings for the selector. Nested // bindings must // not contain an `additional_bindings` field themselves (that is, // the nesting may only be one level deep). AdditionalBindings []*HttpRule `json:"additionalBindings,omitempty"` // Authorizations: Specifies the permission(s) required for an API // element for the overall // API request to succeed. It is typically used to mark request message // fields // that contain the name of the resource and indicates the permissions // that // will be checked on that resource. Authorizations []*AuthorizationRule `json:"authorizations,omitempty"` // Body: The name of the request field whose value is mapped to the HTTP // body, or // `*` for mapping all fields not captured by the path pattern to the // HTTP // body. NOTE: the referred field must not be a repeated field and must // be // present at the top-level of request message type. Body string `json:"body,omitempty"` // Custom: The custom pattern is used for specifying an HTTP method that // is not // included in the `pattern` field, such as HEAD, or "*" to leave // the // HTTP method unspecified for this rule. The wild-card rule is // useful // for services that provide content to Web (HTML) clients. Custom *CustomHttpPattern `json:"custom,omitempty"` // Delete: Used for deleting a resource. Delete string `json:"delete,omitempty"` // Get: Used for listing and getting information about resources. Get string `json:"get,omitempty"` // MediaDownload: Use this only for Scotty Requests. Do not use this for // bytestream methods. // For media support, add instead [][google.bytestream.RestByteStream] // as an // API to your configuration. MediaDownload *MediaDownload `json:"mediaDownload,omitempty"` // MediaUpload: Use this only for Scotty Requests. Do not use this for // media support using // Bytestream, add instead // [][google.bytestream.RestByteStream] as an API to your // configuration for Bytestream methods. MediaUpload *MediaUpload `json:"mediaUpload,omitempty"` // Patch: Used for updating a resource. Patch string `json:"patch,omitempty"` // Post: Used for creating a resource. Post string `json:"post,omitempty"` // Put: Used for updating a resource. Put string `json:"put,omitempty"` // ResponseBody: Optional. The name of the response field whose value is // mapped to the HTTP // body of response. Other response fields are ignored. When // not set, the response message will be used as HTTP body of response. ResponseBody string `json:"responseBody,omitempty"` // RestCollection: DO NOT USE. This is an experimental field. // // Optional. The REST collection name is by default derived from the // URL // pattern. If specified, this field overrides the default collection // name. // Example: // // rpc AddressesAggregatedList(AddressesAggregatedListRequest) // returns (AddressesAggregatedListResponse) { // option (google.api.http) = { // get: "/v1/projects/{project_id}/aggregated/addresses" // rest_collection: "projects.addresses" // }; // } // // This method has the automatically derived collection // name // "projects.aggregated". Because, semantically, this rpc is actually // an // operation on the "projects.addresses" collection, the // `rest_collection` // field is configured to override the derived collection name. RestCollection string `json:"restCollection,omitempty"` // RestMethodName: DO NOT USE. This is an experimental field. // // Optional. The rest method name is by default derived from the // URL // pattern. If specified, this field overrides the default method // name. // Example: // // rpc CreateResource(CreateResourceRequest) // returns (CreateResourceResponse) { // option (google.api.http) = { // post: "/v1/resources", // body: "resource", // rest_method_name: "insert" // }; // } // // This method has the automatically derived rest method name // "create", but for backwards compatibility with apiary, it is // specified as // insert. RestMethodName string `json:"restMethodName,omitempty"` // Selector: Selects methods to which this rule applies. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. "AdditionalBindings") // to unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "AdditionalBindings") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *HttpRule) MarshalJSON() ([]byte, error) { type NoMethod HttpRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LabelDescriptor: A description of a label. type LabelDescriptor struct { // Description: A human-readable description for the label. Description string `json:"description,omitempty"` // Key: The label key. Key string `json:"key,omitempty"` // ValueType: The type of data that can be assigned to the label. // // Possible values: // "STRING" - A variable-length string. This is the default. // "BOOL" - Boolean; true or false. // "INT64" - A 64-bit signed integer. ValueType string `json:"valueType,omitempty"` // ForceSendFields is a list of field names (e.g. "Description") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Description") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LabelDescriptor) MarshalJSON() ([]byte, error) { type NoMethod LabelDescriptor raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListOperationsResponse: The response message for // Operations.ListOperations. type ListOperationsResponse struct { // NextPageToken: The standard List next-page token. NextPageToken string `json:"nextPageToken,omitempty"` // Operations: A list of operations that matches the specified filter in // the request. Operations []*Operation `json:"operations,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "NextPageToken") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "NextPageToken") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListOperationsResponse) MarshalJSON() ([]byte, error) { type NoMethod ListOperationsResponse raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ListTenancyUnitsResponse: Response for the list request. type ListTenancyUnitsResponse struct { // NextPageToken: Pagination token for large results. NextPageToken string `json:"nextPageToken,omitempty"` // TenancyUnits: Tenancy units matching the request. TenancyUnits []*TenancyUnit `json:"tenancyUnits,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "NextPageToken") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "NextPageToken") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ListTenancyUnitsResponse) MarshalJSON() ([]byte, error) { type NoMethod ListTenancyUnitsResponse raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LogDescriptor: A description of a log type. Example in YAML format: // // - name: library.googleapis.com/activity_history // description: The history of borrowing and returning library // items. // display_name: Activity // labels: // - key: /customer_id // description: Identifier of a library customer type LogDescriptor struct { // Description: A human-readable description of this log. This // information appears in // the documentation and can contain details. Description string `json:"description,omitempty"` // DisplayName: The human-readable name for this log. This information // appears on // the user interface and should be concise. DisplayName string `json:"displayName,omitempty"` // Labels: The set of labels that are available to describe a specific // log entry. // Runtime requests that contain labels not specified here // are // considered invalid. Labels []*LabelDescriptor `json:"labels,omitempty"` // Name: The name of the log. It must be less than 512 characters long // and can // include the following characters: upper- and lower-case // alphanumeric // characters [A-Za-z0-9], and punctuation characters including // slash, underscore, hyphen, period [/_-.]. Name string `json:"name,omitempty"` // ForceSendFields is a list of field names (e.g. "Description") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Description") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LogDescriptor) MarshalJSON() ([]byte, error) { type NoMethod LogDescriptor raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Logging: Logging configuration of the service. // // The following example shows how to configure logs to be sent to // the // producer and consumer projects. In the example, the // `activity_history` // log is sent to both the producer and consumer projects, whereas // the // `purchase_history` log is only sent to the producer project. // // monitored_resources: // - type: library.googleapis.com/branch // labels: // - key: /city // description: The city where the library branch is located // in. // - key: /name // description: The name of the branch. // logs: // - name: activity_history // labels: // - key: /customer_id // - name: purchase_history // logging: // producer_destinations: // - monitored_resource: library.googleapis.com/branch // logs: // - activity_history // - purchase_history // consumer_destinations: // - monitored_resource: library.googleapis.com/branch // logs: // - activity_history type Logging struct { // ConsumerDestinations: Logging configurations for sending logs to the // consumer project. // There can be multiple consumer destinations, each one must have // a // different monitored resource type. A log can be used in at most // one consumer destination. ConsumerDestinations []*LoggingDestination `json:"consumerDestinations,omitempty"` // ProducerDestinations: Logging configurations for sending logs to the // producer project. // There can be multiple producer destinations, each one must have // a // different monitored resource type. A log can be used in at most // one producer destination. ProducerDestinations []*LoggingDestination `json:"producerDestinations,omitempty"` // ForceSendFields is a list of field names (e.g. // "ConsumerDestinations") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "ConsumerDestinations") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *Logging) MarshalJSON() ([]byte, error) { type NoMethod Logging raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // LoggingDestination: Configuration of a specific logging destination // (the producer project // or the consumer project). type LoggingDestination struct { // Logs: Names of the logs to be sent to this destination. Each name // must // be defined in the Service.logs section. If the log name is // not a domain scoped name, it will be automatically prefixed with // the service name followed by "/". Logs []string `json:"logs,omitempty"` // MonitoredResource: The monitored resource type. The type must be // defined in the // Service.monitored_resources section. MonitoredResource string `json:"monitoredResource,omitempty"` // ForceSendFields is a list of field names (e.g. "Logs") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Logs") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *LoggingDestination) MarshalJSON() ([]byte, error) { type NoMethod LoggingDestination raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // MediaDownload: Defines the Media configuration for a service in case // of a download. // Use this only for Scotty Requests. Do not use this for media support // using // Bytestream, add instead [][google.bytestream.RestByteStream] as an // API to // your configuration for Bytestream methods. type MediaDownload struct { // CompleteNotification: A boolean that determines whether a // notification for the completion of a // download should be sent to the backend. CompleteNotification bool `json:"completeNotification,omitempty"` // DownloadService: DO NOT USE FIELDS BELOW THIS LINE UNTIL THIS WARNING // IS REMOVED. // // Specify name of the download service if one is used for download. DownloadService string `json:"downloadService,omitempty"` // Dropzone: Name of the Scotty dropzone to use for the current API. Dropzone string `json:"dropzone,omitempty"` // Enabled: Whether download is enabled. Enabled bool `json:"enabled,omitempty"` // MaxDirectDownloadSize: Optional maximum acceptable size for direct // download. // The size is specified in bytes. MaxDirectDownloadSize int64 `json:"maxDirectDownloadSize,omitempty,string"` // UseDirectDownload: A boolean that determines if direct download from // ESF should be used for // download of this media. UseDirectDownload bool `json:"useDirectDownload,omitempty"` // ForceSendFields is a list of field names (e.g. // "CompleteNotification") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "CompleteNotification") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *MediaDownload) MarshalJSON() ([]byte, error) { type NoMethod MediaDownload raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // MediaUpload: Defines the Media configuration for a service in case of // an upload. // Use this only for Scotty Requests. Do not use this for media support // using // Bytestream, add instead [][google.bytestream.RestByteStream] as an // API to // your configuration for Bytestream methods. type MediaUpload struct { // CompleteNotification: A boolean that determines whether a // notification for the completion of an // upload should be sent to the backend. These notifications will not be // seen // by the client and will not consume quota. CompleteNotification bool `json:"completeNotification,omitempty"` // Dropzone: Name of the Scotty dropzone to use for the current API. Dropzone string `json:"dropzone,omitempty"` // Enabled: Whether upload is enabled. Enabled bool `json:"enabled,omitempty"` // MaxSize: Optional maximum acceptable size for an upload. // The size is specified in bytes. MaxSize int64 `json:"maxSize,omitempty,string"` // MimeTypes: An array of mimetype patterns. Esf will only accept // uploads that match one // of the given patterns. MimeTypes []string `json:"mimeTypes,omitempty"` // ProgressNotification: Whether to receive a notification for progress // changes of media upload. ProgressNotification bool `json:"progressNotification,omitempty"` // StartNotification: Whether to receive a notification on the start of // media upload. StartNotification bool `json:"startNotification,omitempty"` // UploadService: DO NOT USE FIELDS BELOW THIS LINE UNTIL THIS WARNING // IS REMOVED. // // Specify name of the upload service if one is used for upload. UploadService string `json:"uploadService,omitempty"` // ForceSendFields is a list of field names (e.g. // "CompleteNotification") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "CompleteNotification") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *MediaUpload) MarshalJSON() ([]byte, error) { type NoMethod MediaUpload raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Method: Method represents a method of an API interface. type Method struct { // Name: The simple name of this method. Name string `json:"name,omitempty"` // Options: Any metadata attached to the method. Options []*Option `json:"options,omitempty"` // RequestStreaming: If true, the request is streamed. RequestStreaming bool `json:"requestStreaming,omitempty"` // RequestTypeUrl: A URL of the input message type. RequestTypeUrl string `json:"requestTypeUrl,omitempty"` // ResponseStreaming: If true, the response is streamed. ResponseStreaming bool `json:"responseStreaming,omitempty"` // ResponseTypeUrl: The URL of the output message type. ResponseTypeUrl string `json:"responseTypeUrl,omitempty"` // Syntax: The source syntax of this method. // // Possible values: // "SYNTAX_PROTO2" - Syntax `proto2`. // "SYNTAX_PROTO3" - Syntax `proto3`. Syntax string `json:"syntax,omitempty"` // ForceSendFields is a list of field names (e.g. "Name") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Name") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Method) MarshalJSON() ([]byte, error) { type NoMethod Method raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // MetricDescriptor: Defines a metric type and its schema. Once a metric // descriptor is created, // deleting or altering it stops data collection and makes the metric // type's // existing data unusable. type MetricDescriptor struct { // Description: A detailed description of the metric, which can be used // in documentation. Description string `json:"description,omitempty"` // DisplayName: A concise name for the metric, which can be displayed in // user interfaces. // Use sentence case without an ending period, for example "Request // count". // This field is optional but it is recommended to be set for any // metrics // associated with user-visible concepts, such as Quota. DisplayName string `json:"displayName,omitempty"` // Labels: The set of labels that can be used to describe a // specific // instance of this metric type. For example, // the // `appengine.googleapis.com/http/server/response_latencies` metric // type has a label for the HTTP response code, `response_code`, so // you can look at latencies for successful responses or just // for responses that failed. Labels []*LabelDescriptor `json:"labels,omitempty"` // Metadata: Optional. Metadata which can be used to guide usage of the // metric. Metadata *MetricDescriptorMetadata `json:"metadata,omitempty"` // MetricKind: Whether the metric records instantaneous values, changes // to a value, etc. // Some combinations of `metric_kind` and `value_type` might not be // supported. // // Possible values: // "METRIC_KIND_UNSPECIFIED" - Do not use this default value. // "GAUGE" - An instantaneous measurement of a value. // "DELTA" - The change in a value during a time interval. // "CUMULATIVE" - A value accumulated over a time interval. // Cumulative // measurements in a time series should have the same start time // and increasing end times, until an event resets the cumulative // value to zero and sets a new start time for the following // points. MetricKind string `json:"metricKind,omitempty"` // Name: The resource name of the metric descriptor. Name string `json:"name,omitempty"` // Type: The metric type, including its DNS name prefix. The type is // not // URL-encoded. All user-defined metric types have the DNS // name // `custom.googleapis.com` or `external.googleapis.com`. Metric types // should // use a natural hierarchical grouping. For example: // // "custom.googleapis.com/invoice/paid/amount" // "external.googleapis.com/prometheus/up" // "appengine.googleapis.com/http/server/response_latencies" Type string `json:"type,omitempty"` // Unit: The unit in which the metric value is reported. It is only // applicable // if the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`. // The // supported units are a subset of [The Unified Code for Units // of // Measure](http://unitsofmeasure.org/ucum.html) standard: // // **Basic units (UNIT)** // // * `bit` bit // * `By` byte // * `s` second // * `min` minute // * `h` hour // * `d` day // // **Prefixes (PREFIX)** // // * `k` kilo (10**3) // * `M` mega (10**6) // * `G` giga (10**9) // * `T` tera (10**12) // * `P` peta (10**15) // * `E` exa (10**18) // * `Z` zetta (10**21) // * `Y` yotta (10**24) // * `m` milli (10**-3) // * `u` micro (10**-6) // * `n` nano (10**-9) // * `p` pico (10**-12) // * `f` femto (10**-15) // * `a` atto (10**-18) // * `z` zepto (10**-21) // * `y` yocto (10**-24) // * `Ki` kibi (2**10) // * `Mi` mebi (2**20) // * `Gi` gibi (2**30) // * `Ti` tebi (2**40) // // **Grammar** // // The grammar also includes these connectors: // // * `/` division (as an infix operator, e.g. `1/s`). // * `.` multiplication (as an infix operator, e.g. `GBy.d`) // // The grammar for a unit is as follows: // // Expression = Component { "." Component } { "/" Component } ; // // Component = ( [ PREFIX ] UNIT | "%" ) [ Annotation ] // | Annotation // | "1" // ; // // Annotation = "{" NAME "}" ; // // Notes: // // * `Annotation` is just a comment if it follows a `UNIT` and is // equivalent to `1` if it is used alone. For examples, // `{requests}/s == 1/s`, `By{transmitted}/s == By/s`. // * `NAME` is a sequence of non-blank printable ASCII characters not // containing '{' or '}'. // * `1` represents dimensionless value 1, such as in `1/s`. // * `%` represents dimensionless value 1/100, and annotates values // giving // a percentage. Unit string `json:"unit,omitempty"` // ValueType: Whether the measurement is an integer, a floating-point // number, etc. // Some combinations of `metric_kind` and `value_type` might not be // supported. // // Possible values: // "VALUE_TYPE_UNSPECIFIED" - Do not use this default value. // "BOOL" - The value is a boolean. // This value type can be used only if the metric kind is `GAUGE`. // "INT64" - The value is a signed 64-bit integer. // "DOUBLE" - The value is a double precision floating point number. // "STRING" - The value is a text string. // This value type can be used only if the metric kind is `GAUGE`. // "DISTRIBUTION" - The value is a `Distribution`. // "MONEY" - The value is money. ValueType string `json:"valueType,omitempty"` // ForceSendFields is a list of field names (e.g. "Description") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Description") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *MetricDescriptor) MarshalJSON() ([]byte, error) { type NoMethod MetricDescriptor raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // MetricDescriptorMetadata: Additional annotations that can be used to // guide the usage of a metric. type MetricDescriptorMetadata struct { // IngestDelay: The delay of data points caused by ingestion. Data // points older than this // age are guaranteed to be ingested and available to be read, // excluding // data loss due to errors. IngestDelay string `json:"ingestDelay,omitempty"` // LaunchStage: The launch stage of the metric definition. // // Possible values: // "LAUNCH_STAGE_UNSPECIFIED" - Do not use this default value. // "EARLY_ACCESS" - Early Access features are limited to a closed // group of testers. To use // these features, you must sign up in advance and sign a Trusted // Tester // agreement (which includes confidentiality provisions). These features // may // be unstable, changed in backward-incompatible ways, and are // not // guaranteed to be released. // "ALPHA" - Alpha is a limited availability test for releases before // they are cleared // for widespread use. By Alpha, all significant design issues are // resolved // and we are in the process of verifying functionality. Alpha // customers // need to apply for access, agree to applicable terms, and have // their // projects whitelisted. Alpha releases don’t have to be feature // complete, // no SLAs are provided, and there are no technical support obligations, // but // they will be far enough along that customers can actually use them // in // test environments or for limited-use tests -- just like they would // in // normal production cases. // "BETA" - Beta is the point at which we are ready to open a release // for any // customer to use. There are no SLA or technical support obligations in // a // Beta release. Products will be complete from a feature perspective, // but // may have some open outstanding issues. Beta releases are suitable // for // limited production use cases. // "GA" - GA features are open to all developers and are considered // stable and // fully qualified for production use. // "DEPRECATED" - Deprecated features are scheduled to be shut down // and removed. For more // information, see the “Deprecation Policy” section of our [Terms // of // Service](https://cloud.google.com/terms/) // and the [Google Cloud Platform Subject to the // Deprecation // Policy](https://cloud.google.com/terms/deprecation) documentation. LaunchStage string `json:"launchStage,omitempty"` // SamplePeriod: The sampling period of metric data points. For metrics // which are written // periodically, consecutive data points are stored at this time // interval, // excluding data loss due to errors. Metrics with a higher granularity // have // a smaller sampling period. SamplePeriod string `json:"samplePeriod,omitempty"` // ForceSendFields is a list of field names (e.g. "IngestDelay") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "IngestDelay") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *MetricDescriptorMetadata) MarshalJSON() ([]byte, error) { type NoMethod MetricDescriptorMetadata raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // MetricRule: Bind API methods to metrics. Binding a method to a metric // causes that // metric's configured quota behaviors to apply to the method call. type MetricRule struct { // MetricCosts: Metrics to update when the selected methods are called, // and the associated // cost applied to each metric. // // The key of the map is the metric name, and the values are the // amount // increased for the metric against which the quota limits are // defined. // The value must not be negative. MetricCosts map[string]string `json:"metricCosts,omitempty"` // Selector: Selects the methods to which this rule applies. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. "MetricCosts") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "MetricCosts") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *MetricRule) MarshalJSON() ([]byte, error) { type NoMethod MetricRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Mixin: Declares an API Interface to be included in this interface. // The including // interface must redeclare all the methods from the included interface, // but // documentation and options are inherited as follows: // // - If after comment and whitespace stripping, the documentation // string of the redeclared method is empty, it will be inherited // from the original method. // // - Each annotation belonging to the service config (http, // visibility) which is not set in the redeclared method will be // inherited. // // - If an http annotation is inherited, the path pattern will be // modified as follows. Any version prefix will be replaced by the // version of the including interface plus the root path if // specified. // // Example of a simple mixin: // // package google.acl.v1; // service AccessControl { // // Get the underlying ACL object. // rpc GetAcl(GetAclRequest) returns (Acl) { // option (google.api.http).get = "/v1/{resource=**}:getAcl"; // } // } // // package google.storage.v2; // service Storage { // // rpc GetAcl(GetAclRequest) returns (Acl); // // // Get a data record. // rpc GetData(GetDataRequest) returns (Data) { // option (google.api.http).get = "/v2/{resource=**}"; // } // } // // Example of a mixin configuration: // // apis: // - name: google.storage.v2.Storage // mixins: // - name: google.acl.v1.AccessControl // // The mixin construct implies that all methods in `AccessControl` // are // also declared with same name and request/response types in // `Storage`. A documentation generator or annotation processor will // see the effective `Storage.GetAcl` method after // inherting // documentation and annotations as follows: // // service Storage { // // Get the underlying ACL object. // rpc GetAcl(GetAclRequest) returns (Acl) { // option (google.api.http).get = "/v2/{resource=**}:getAcl"; // } // ... // } // // Note how the version in the path pattern changed from `v1` to // `v2`. // // If the `root` field in the mixin is specified, it should be // a // relative path under which inherited HTTP paths are placed. Example: // // apis: // - name: google.storage.v2.Storage // mixins: // - name: google.acl.v1.AccessControl // root: acls // // This implies the following inherited HTTP annotation: // // service Storage { // // Get the underlying ACL object. // rpc GetAcl(GetAclRequest) returns (Acl) { // option (google.api.http).get = // "/v2/acls/{resource=**}:getAcl"; // } // ... // } type Mixin struct { // Name: The fully qualified name of the interface which is included. Name string `json:"name,omitempty"` // Root: If non-empty specifies a path under which inherited HTTP // paths // are rooted. Root string `json:"root,omitempty"` // ForceSendFields is a list of field names (e.g. "Name") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Name") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Mixin) MarshalJSON() ([]byte, error) { type NoMethod Mixin raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // MonitoredResourceDescriptor: An object that describes the schema of a // MonitoredResource object using a // type name and a set of labels. For example, the monitored // resource // descriptor for Google Compute Engine VM instances has a type // of // "gce_instance" and specifies the use of the labels "instance_id" // and // "zone" to identify particular VM instances. // // Different APIs can support different monitored resource types. APIs // generally // provide a `list` method that returns the monitored resource // descriptors used // by the API. type MonitoredResourceDescriptor struct { // Description: Optional. A detailed description of the monitored // resource type that might // be used in documentation. Description string `json:"description,omitempty"` // DisplayName: Optional. A concise name for the monitored resource type // that might be // displayed in user interfaces. It should be a Title Cased Noun // Phrase, // without any article or other determiners. For example, // "Google Cloud SQL Database". DisplayName string `json:"displayName,omitempty"` // Labels: Required. A set of labels used to describe instances of this // monitored // resource type. For example, an individual Google Cloud SQL database // is // identified by values for the labels "database_id" and "zone". Labels []*LabelDescriptor `json:"labels,omitempty"` // Name: Optional. The resource name of the monitored resource // descriptor: // "projects/{project_id}/monitoredResourceDescriptors/{type // }" where // {type} is the value of the `type` field in this object // and // {project_id} is a project ID that provides API-specific context // for // accessing the type. APIs that do not use project information can use // the // resource name format "monitoredResourceDescriptors/{type}". Name string `json:"name,omitempty"` // Type: Required. The monitored resource type. For example, the // type // "cloudsql_database" represents databases in Google Cloud SQL. // The maximum length of this value is 256 characters. Type string `json:"type,omitempty"` // ForceSendFields is a list of field names (e.g. "Description") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the<|fim▁hole|> // NullFields is a list of field names (e.g. "Description") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *MonitoredResourceDescriptor) MarshalJSON() ([]byte, error) { type NoMethod MonitoredResourceDescriptor raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Monitoring: Monitoring configuration of the service. // // The example below shows how to configure monitored resources and // metrics // for monitoring. In the example, a monitored resource and two metrics // are // defined. The `library.googleapis.com/book/returned_count` metric is // sent // to both producer and consumer projects, whereas // the // `library.googleapis.com/book/overdue_count` metric is only sent to // the // consumer project. // // monitored_resources: // - type: library.googleapis.com/branch // labels: // - key: /city // description: The city where the library branch is located // in. // - key: /name // description: The name of the branch. // metrics: // - name: library.googleapis.com/book/returned_count // metric_kind: DELTA // value_type: INT64 // labels: // - key: /customer_id // - name: library.googleapis.com/book/overdue_count // metric_kind: GAUGE // value_type: INT64 // labels: // - key: /customer_id // monitoring: // producer_destinations: // - monitored_resource: library.googleapis.com/branch // metrics: // - library.googleapis.com/book/returned_count // consumer_destinations: // - monitored_resource: library.googleapis.com/branch // metrics: // - library.googleapis.com/book/returned_count // - library.googleapis.com/book/overdue_count type Monitoring struct { // ConsumerDestinations: Monitoring configurations for sending metrics // to the consumer project. // There can be multiple consumer destinations, each one must have // a // different monitored resource type. A metric can be used in at // most // one consumer destination. ConsumerDestinations []*MonitoringDestination `json:"consumerDestinations,omitempty"` // ProducerDestinations: Monitoring configurations for sending metrics // to the producer project. // There can be multiple producer destinations, each one must have // a // different monitored resource type. A metric can be used in at // most // one producer destination. ProducerDestinations []*MonitoringDestination `json:"producerDestinations,omitempty"` // ForceSendFields is a list of field names (e.g. // "ConsumerDestinations") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "ConsumerDestinations") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *Monitoring) MarshalJSON() ([]byte, error) { type NoMethod Monitoring raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // MonitoringDestination: Configuration of a specific monitoring // destination (the producer project // or the consumer project). type MonitoringDestination struct { // Metrics: Names of the metrics to report to this monitoring // destination. // Each name must be defined in Service.metrics section. Metrics []string `json:"metrics,omitempty"` // MonitoredResource: The monitored resource type. The type must be // defined in // Service.monitored_resources section. MonitoredResource string `json:"monitoredResource,omitempty"` // ForceSendFields is a list of field names (e.g. "Metrics") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Metrics") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *MonitoringDestination) MarshalJSON() ([]byte, error) { type NoMethod MonitoringDestination raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // OAuthRequirements: OAuth scopes are a way to define data and // permissions on data. For example, // there are scopes defined for "Read-only access to Google Calendar" // and // "Access to Cloud Platform". Users can consent to a scope for an // application, // giving it permission to access that data on their behalf. // // OAuth scope specifications should be fairly coarse grained; a user // will need // to see and understand the text description of what your scope // means. // // In most cases: use one or at most two OAuth scopes for an entire // family of // products. If your product has multiple APIs, you should probably be // sharing // the OAuth scope across all of those APIs. // // When you need finer grained OAuth consent screens: talk with your // product // management about how developers will use them in practice. // // Please note that even though each of the canonical scopes is enough // for a // request to be accepted and passed to the backend, a request can still // fail // due to the backend requiring additional scopes or permissions. type OAuthRequirements struct { // CanonicalScopes: The list of publicly documented OAuth scopes that // are allowed access. An // OAuth token containing any of these scopes will be // accepted. // // Example: // // canonical_scopes: https://www.googleapis.com/auth/calendar, // https://www.googleapis.com/auth/calendar.read CanonicalScopes string `json:"canonicalScopes,omitempty"` // ForceSendFields is a list of field names (e.g. "CanonicalScopes") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "CanonicalScopes") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *OAuthRequirements) MarshalJSON() ([]byte, error) { type NoMethod OAuthRequirements raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Operation: This resource represents a long-running operation that is // the result of a // network API call. type Operation struct { // Done: If the value is `false`, it means the operation is still in // progress. // If `true`, the operation is completed, and either `error` or // `response` is // available. Done bool `json:"done,omitempty"` // Error: The error result of the operation in case of failure or // cancellation. Error *Status `json:"error,omitempty"` // Metadata: Service-specific metadata associated with the operation. // It typically // contains progress information and common metadata such as create // time. // Some services might not provide such metadata. Any method that // returns a // long-running operation should document the metadata type, if any. Metadata googleapi.RawMessage `json:"metadata,omitempty"` // Name: The server-assigned name, which is only unique within the same // service that // originally returns it. If you use the default HTTP mapping, // the // `name` should have the format of `operations/some/unique/name`. Name string `json:"name,omitempty"` // Response: The normal response of the operation in case of success. // If the original // method returns no data on success, such as `Delete`, the response // is // `google.protobuf.Empty`. If the original method is // standard // `Get`/`Create`/`Update`, the response should be the resource. For // other // methods, the response should have the type `XxxResponse`, where // `Xxx` // is the original method name. For example, if the original method // name // is `TakeSnapshot()`, the inferred response type // is // `TakeSnapshotResponse`. Response googleapi.RawMessage `json:"response,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Done") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Done") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Operation) MarshalJSON() ([]byte, error) { type NoMethod Operation raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Option: A protocol buffer option, which can be attached to a message, // field, // enumeration, etc. type Option struct { // Name: The option's name. For protobuf built-in options (options // defined in // descriptor.proto), this is the short name. For example, // "map_entry". // For custom options, it should be the fully-qualified name. For // example, // "google.api.http". Name string `json:"name,omitempty"` // Value: The option's value packed in an Any message. If the value is a // primitive, // the corresponding wrapper type defined in // google/protobuf/wrappers.proto // should be used. If the value is an enum, it should be stored as an // int32 // value using the google.protobuf.Int32Value type. Value googleapi.RawMessage `json:"value,omitempty"` // ForceSendFields is a list of field names (e.g. "Name") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Name") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Option) MarshalJSON() ([]byte, error) { type NoMethod Option raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Page: Represents a documentation page. A page can contain subpages to // represent // nested documentation set structure. type Page struct { // Content: The Markdown content of the page. You can use <code>&#40;== // include {path} ==&#41;</code> // to include content from a Markdown file. Content string `json:"content,omitempty"` // Name: The name of the page. It will be used as an identity of the // page to // generate URI of the page, text of the link to this page in // navigation, // etc. The full page name (start from the root page name to this // page // concatenated with `.`) can be used as reference to the page in // your // documentation. For example: // <pre><code>pages: // - name: Tutorial // content: &#40;== include tutorial.md ==&#41; // subpages: // - name: Java // content: &#40;== include tutorial_java.md // ==&#41; // </code></pre> // You can reference `Java` page using Markdown reference link // syntax: // `Java`. Name string `json:"name,omitempty"` // Subpages: Subpages of this page. The order of subpages specified here // will be // honored in the generated docset. Subpages []*Page `json:"subpages,omitempty"` // ForceSendFields is a list of field names (e.g. "Content") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Content") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Page) MarshalJSON() ([]byte, error) { type NoMethod Page raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // PolicyBinding: Translates to IAM Policy bindings (without auditing at // this level) type PolicyBinding struct { // Members: Uses the same format as in IAM policy. // `member` must include both prefix and ID. For example, // `user:{emailId}`, // `serviceAccount:{emailId}`, `group:{emailId}`. Members []string `json:"members,omitempty"` // Role: Role. // (https://cloud.google.com/iam/docs/understanding-roles) // For example, `roles/viewer`, `roles/editor`, or `roles/owner`. Role string `json:"role,omitempty"` // ForceSendFields is a list of field names (e.g. "Members") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Members") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *PolicyBinding) MarshalJSON() ([]byte, error) { type NoMethod PolicyBinding raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Quota: Quota configuration helps to achieve fairness and budgeting in // service // usage. // // The quota configuration works this way: // - The service configuration defines a set of metrics. // - For API calls, the quota.metric_rules maps methods to metrics with // corresponding costs. // - The quota.limits defines limits on the metrics, which will be used // for // quota checks at runtime. // // An example quota configuration in yaml format: // // quota: // // - name: apiWriteQpsPerProject // metric: library.googleapis.com/write_calls // unit: "1/min/{project}" # rate limit for consumer projects // values: // STANDARD: 10000 // // // # The metric rules bind all methods to the read_calls metric, // # except for the UpdateBook and DeleteBook methods. These two // methods // # are mapped to the write_calls metric, with the UpdateBook // method // # consuming at twice rate as the DeleteBook method. // metric_rules: // - selector: "*" // metric_costs: // library.googleapis.com/read_calls: 1 // - selector: google.example.library.v1.LibraryService.UpdateBook // metric_costs: // library.googleapis.com/write_calls: 2 // - selector: google.example.library.v1.LibraryService.DeleteBook // metric_costs: // library.googleapis.com/write_calls: 1 // // Corresponding Metric definition: // // metrics: // - name: library.googleapis.com/read_calls // display_name: Read requests // metric_kind: DELTA // value_type: INT64 // // - name: library.googleapis.com/write_calls // display_name: Write requests // metric_kind: DELTA // value_type: INT64 type Quota struct { // Limits: List of `QuotaLimit` definitions for the service. Limits []*QuotaLimit `json:"limits,omitempty"` // MetricRules: List of `MetricRule` definitions, each one mapping a // selected method to one // or more metrics. MetricRules []*MetricRule `json:"metricRules,omitempty"` // ForceSendFields is a list of field names (e.g. "Limits") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Limits") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Quota) MarshalJSON() ([]byte, error) { type NoMethod Quota raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // QuotaLimit: `QuotaLimit` defines a specific limit that applies over a // specified duration // for a limit type. There can be at most one limit for a duration and // limit // type combination defined within a `QuotaGroup`. type QuotaLimit struct { // DefaultLimit: Default number of tokens that can be consumed during // the specified // duration. This is the number of tokens assigned when a // client // application developer activates the service for his/her // project. // // Specifying a value of 0 will block all requests. This can be used if // you // are provisioning quota to selected consumers and blocking // others. // Similarly, a value of -1 will indicate an unlimited quota. No // other // negative values are allowed. // // Used by group-based quotas only. DefaultLimit int64 `json:"defaultLimit,omitempty,string"` // Description: Optional. User-visible, extended description for this // quota limit. // Should be used only when more context is needed to understand this // limit // than provided by the limit's display name (see: `display_name`). Description string `json:"description,omitempty"` // DisplayName: User-visible display name for this limit. // Optional. If not set, the UI will provide a default display name // based on // the quota configuration. This field can be used to override the // default // display name generated from the configuration. DisplayName string `json:"displayName,omitempty"` // Duration: Duration of this limit in textual notation. Example: // "100s", "24h", "1d". // For duration longer than a day, only multiple of days is supported. // We // support only "100s" and "1d" for now. Additional support will be // added in // the future. "0" indicates indefinite duration. // // Used by group-based quotas only. Duration string `json:"duration,omitempty"` // FreeTier: Free tier value displayed in the Developers Console for // this limit. // The free tier is the number of tokens that will be subtracted from // the // billed amount when billing is enabled. // This field can only be set on a limit with duration "1d", in a // billable // group; it is invalid on any other limit. If this field is not set, // it // defaults to 0, indicating that there is no free tier for this // service. // // Used by group-based quotas only. FreeTier int64 `json:"freeTier,omitempty,string"` // MaxLimit: Maximum number of tokens that can be consumed during the // specified // duration. Client application developers can override the default // limit up // to this maximum. If specified, this value cannot be set to a value // less // than the default limit. If not specified, it is set to the default // limit. // // To allow clients to apply overrides with no upper bound, set this to // -1, // indicating unlimited maximum quota. // // Used by group-based quotas only. MaxLimit int64 `json:"maxLimit,omitempty,string"` // Metric: The name of the metric this quota limit applies to. The quota // limits with // the same metric will be checked together during runtime. The metric // must be // defined within the service config. Metric string `json:"metric,omitempty"` // Name: Name of the quota limit. // // The name must be provided, and it must be unique within the service. // The // name can only include alphanumeric characters as well as '-'. // // The maximum length of the limit name is 64 characters. Name string `json:"name,omitempty"` // Unit: Specify the unit of the quota limit. It uses the same syntax // as // Metric.unit. The supported unit kinds are determined by the // quota // backend system. // // Here are some examples: // * "1/min/{project}" for quota per minute per project. // // Note: the order of unit components is insignificant. // The "1" at the beginning is required to follow the metric unit // syntax. Unit string `json:"unit,omitempty"` // Values: Tiered limit values. You must specify this as a key:value // pair, with an // integer value that is the maximum number of requests allowed for // the // specified unit. Currently only STANDARD is supported. Values map[string]string `json:"values,omitempty"` // ForceSendFields is a list of field names (e.g. "DefaultLimit") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "DefaultLimit") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *QuotaLimit) MarshalJSON() ([]byte, error) { type NoMethod QuotaLimit raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // RemoveTenantProjectRequest: Request message to remove tenant project // resource from the tenancy unit. type RemoveTenantProjectRequest struct { // Tag: Tag of the resource within the tenancy unit. Tag string `json:"tag,omitempty"` // ForceSendFields is a list of field names (e.g. "Tag") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Tag") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *RemoveTenantProjectRequest) MarshalJSON() ([]byte, error) { type NoMethod RemoveTenantProjectRequest raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // SearchTenancyUnitsResponse: Response for the search query. type SearchTenancyUnitsResponse struct { // NextPageToken: Pagination token for large results. NextPageToken string `json:"nextPageToken,omitempty"` // TenancyUnits: Tenancy Units matching the request. TenancyUnits []*TenancyUnit `json:"tenancyUnits,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "NextPageToken") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "NextPageToken") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *SearchTenancyUnitsResponse) MarshalJSON() ([]byte, error) { type NoMethod SearchTenancyUnitsResponse raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Service: `Service` is the root object of Google service configuration // schema. It // describes basic information about a service, such as the name and // the // title, and delegates other aspects to sub-sections. Each sub-section // is // either a proto message or a repeated proto message that configures // a // specific aspect, such as auth. See each proto message definition for // details. // // Example: // // type: google.api.Service // config_version: 3 // name: calendar.googleapis.com // title: Google Calendar API // apis: // - name: google.calendar.v3.Calendar // authentication: // providers: // - id: google_calendar_auth // jwks_uri: https://www.googleapis.com/oauth2/v1/certs // issuer: https://securetoken.google.com // rules: // - selector: "*" // requirements: // provider_id: google_calendar_auth type Service struct { // Apis: A list of API interfaces exported by this service. Only the // `name` field // of the google.protobuf.Api needs to be provided by the // configuration // author, as the remaining fields will be derived from the IDL during // the // normalization process. It is an error to specify an API interface // here // which cannot be resolved against the associated IDL files. Apis []*Api `json:"apis,omitempty"` // Authentication: Auth configuration. Authentication *Authentication `json:"authentication,omitempty"` // Backend: API backend configuration. Backend *Backend `json:"backend,omitempty"` // Billing: Billing configuration. Billing *Billing `json:"billing,omitempty"` // ConfigVersion: The semantic version of the service configuration. The // config version // affects the interpretation of the service configuration. For // example, // certain features are enabled by default for certain config // versions. // The latest config version is `3`. ConfigVersion int64 `json:"configVersion,omitempty"` // Context: Context configuration. Context *Context `json:"context,omitempty"` // Control: Configuration for the service control plane. Control *Control `json:"control,omitempty"` // CustomError: Custom error configuration. CustomError *CustomError `json:"customError,omitempty"` // Documentation: Additional API documentation. Documentation *Documentation `json:"documentation,omitempty"` // Endpoints: Configuration for network endpoints. If this is empty, // then an endpoint // with the same name as the service is automatically generated to // service all // defined APIs. Endpoints []*Endpoint `json:"endpoints,omitempty"` // Enums: A list of all enum types included in this API service. // Enums // referenced directly or indirectly by the `apis` are // automatically // included. Enums which are not referenced but shall be // included // should be listed here by name. Example: // // enums: // - name: google.someapi.v1.SomeEnum Enums []*Enum `json:"enums,omitempty"` // Experimental: Experimental configuration. Experimental *Experimental `json:"experimental,omitempty"` // Http: HTTP configuration. Http *Http `json:"http,omitempty"` // Id: A unique ID for a specific instance of this message, typically // assigned // by the client for tracking purpose. If empty, the server may choose // to // generate one instead. Id string `json:"id,omitempty"` // Logging: Logging configuration. Logging *Logging `json:"logging,omitempty"` // Logs: Defines the logs used by this service. Logs []*LogDescriptor `json:"logs,omitempty"` // Metrics: Defines the metrics used by this service. Metrics []*MetricDescriptor `json:"metrics,omitempty"` // MonitoredResources: Defines the monitored resources used by this // service. This is required // by the Service.monitoring and Service.logging configurations. MonitoredResources []*MonitoredResourceDescriptor `json:"monitoredResources,omitempty"` // Monitoring: Monitoring configuration. Monitoring *Monitoring `json:"monitoring,omitempty"` // Name: The DNS address at which this service is available, // e.g. `calendar.googleapis.com`. Name string `json:"name,omitempty"` // ProducerProjectId: The Google project that owns this service. ProducerProjectId string `json:"producerProjectId,omitempty"` // Quota: Quota configuration. Quota *Quota `json:"quota,omitempty"` // SourceInfo: Output only. The source information for this // configuration if available. SourceInfo *SourceInfo `json:"sourceInfo,omitempty"` // SystemParameters: System parameter configuration. SystemParameters *SystemParameters `json:"systemParameters,omitempty"` // SystemTypes: A list of all proto message types included in this API // service. // It serves similar purpose as [google.api.Service.types], except // that // these types are not needed by user-defined APIs. Therefore, they will // not // show up in the generated discovery doc. This field should only be // used // to define system APIs in ESF. SystemTypes []*Type `json:"systemTypes,omitempty"` // Title: The product title for this service. Title string `json:"title,omitempty"` // Types: A list of all proto message types included in this API // service. // Types referenced directly or indirectly by the `apis` // are // automatically included. Messages which are not referenced but // shall be included, such as types used by the `google.protobuf.Any` // type, // should be listed here by name. Example: // // types: // - name: google.protobuf.Int32 Types []*Type `json:"types,omitempty"` // Usage: Configuration controlling usage of this service. Usage *Usage `json:"usage,omitempty"` // ForceSendFields is a list of field names (e.g. "Apis") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Apis") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Service) MarshalJSON() ([]byte, error) { type NoMethod Service raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // ServiceAccountConfig: Describes service account configuration for the // tenant project. type ServiceAccountConfig struct { // AccountId: ID of the IAM service account to be created in tenant // project. // The email format of the service account will // be // "<account-id>@<tenant-project-id>.iam.gserviceaccount.com". // This account id has to be unique within tenant project and // producers // have to guarantee it. And it must be 6-30 characters long, and // matches the // regular expression `[a-z]([-a-z0-9]*[a-z0-9])`. AccountId string `json:"accountId,omitempty"` // TenantProjectRoles: Roles for the associated service account for the // tenant project. TenantProjectRoles []string `json:"tenantProjectRoles,omitempty"` // ForceSendFields is a list of field names (e.g. "AccountId") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "AccountId") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *ServiceAccountConfig) MarshalJSON() ([]byte, error) { type NoMethod ServiceAccountConfig raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // SourceContext: `SourceContext` represents information about the // source of a // protobuf element, like the file in which it is defined. type SourceContext struct { // FileName: The path-qualified name of the .proto file that contained // the associated // protobuf element. For example: // "google/protobuf/source_context.proto". FileName string `json:"fileName,omitempty"` // ForceSendFields is a list of field names (e.g. "FileName") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "FileName") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *SourceContext) MarshalJSON() ([]byte, error) { type NoMethod SourceContext raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // SourceInfo: Source information used to create a Service Config type SourceInfo struct { // SourceFiles: All files used during config generation. SourceFiles []googleapi.RawMessage `json:"sourceFiles,omitempty"` // ForceSendFields is a list of field names (e.g. "SourceFiles") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "SourceFiles") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *SourceInfo) MarshalJSON() ([]byte, error) { type NoMethod SourceInfo raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Status: The `Status` type defines a logical error model that is // suitable for different // programming environments, including REST APIs and RPC APIs. It is // used by // [gRPC](https://github.com/grpc). The error model is designed to // be: // // - Simple to use and understand for most users // - Flexible enough to meet unexpected needs // // # Overview // // The `Status` message contains three pieces of data: error code, error // message, // and error details. The error code should be an enum value // of // google.rpc.Code, but it may accept additional error codes if needed. // The // error message should be a developer-facing English message that // helps // developers *understand* and *resolve* the error. If a localized // user-facing // error message is needed, put the localized message in the error // details or // localize it in the client. The optional error details may contain // arbitrary // information about the error. There is a predefined set of error // detail types // in the package `google.rpc` that can be used for common error // conditions. // // # Language mapping // // The `Status` message is the logical representation of the error // model, but it // is not necessarily the actual wire format. When the `Status` message // is // exposed in different client libraries and different wire protocols, // it can be // mapped differently. For example, it will likely be mapped to some // exceptions // in Java, but more likely mapped to some error codes in C. // // # Other uses // // The error model and the `Status` message can be used in a variety // of // environments, either with or without APIs, to provide a // consistent developer experience across different // environments. // // Example uses of this error model include: // // - Partial errors. If a service needs to return partial errors to the // client, // it may embed the `Status` in the normal response to indicate the // partial // errors. // // - Workflow errors. A typical workflow has multiple steps. Each step // may // have a `Status` message for error reporting. // // - Batch operations. If a client uses batch request and batch // response, the // `Status` message should be used directly inside batch response, // one for // each error sub-response. // // - Asynchronous operations. If an API call embeds asynchronous // operation // results in its response, the status of those operations should // be // represented directly using the `Status` message. // // - Logging. If some API errors are stored in logs, the message // `Status` could // be used directly after any stripping needed for security/privacy // reasons. type Status struct { // Code: The status code, which should be an enum value of // google.rpc.Code. Code int64 `json:"code,omitempty"` // Details: A list of messages that carry the error details. There is a // common set of // message types for APIs to use. Details []googleapi.RawMessage `json:"details,omitempty"` // Message: A developer-facing error message, which should be in // English. Any // user-facing error message should be localized and sent in // the // google.rpc.Status.details field, or localized by the client. Message string `json:"message,omitempty"` // ForceSendFields is a list of field names (e.g. "Code") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Code") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Status) MarshalJSON() ([]byte, error) { type NoMethod Status raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // SystemParameter: Define a parameter's name and location. The // parameter may be passed as either // an HTTP header or a URL query parameter, and if both are passed the // behavior // is implementation-dependent. type SystemParameter struct { // HttpHeader: Define the HTTP header name to use for the parameter. It // is case // insensitive. HttpHeader string `json:"httpHeader,omitempty"` // Name: Define the name of the parameter, such as "api_key" . It is // case sensitive. Name string `json:"name,omitempty"` // UrlQueryParameter: Define the URL query parameter name to use for the // parameter. It is case // sensitive. UrlQueryParameter string `json:"urlQueryParameter,omitempty"` // ForceSendFields is a list of field names (e.g. "HttpHeader") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "HttpHeader") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *SystemParameter) MarshalJSON() ([]byte, error) { type NoMethod SystemParameter raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // SystemParameterRule: Define a system parameter rule mapping system // parameter definitions to // methods. type SystemParameterRule struct { // Parameters: Define parameters. Multiple names may be defined for a // parameter. // For a given method call, only one of them should be used. If // multiple // names are used the behavior is implementation-dependent. // If none of the specified names are present the behavior // is // parameter-dependent. Parameters []*SystemParameter `json:"parameters,omitempty"` // Selector: Selects the methods to which this rule applies. Use '*' to // indicate all // methods in all APIs. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // ForceSendFields is a list of field names (e.g. "Parameters") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Parameters") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *SystemParameterRule) MarshalJSON() ([]byte, error) { type NoMethod SystemParameterRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // SystemParameters: ### System parameter configuration // // A system parameter is a special kind of parameter defined by the // API // system, not by an individual API. It is typically mapped to an HTTP // header // and/or a URL query parameter. This configuration specifies which // methods // change the names of the system parameters. type SystemParameters struct { // Rules: Define system parameters. // // The parameters defined here will override the default // parameters // implemented by the system. If this field is missing from the // service // config, default system parameters will be used. Default system // parameters // and names is implementation-dependent. // // Example: define api key for all methods // // system_parameters // rules: // - selector: "*" // parameters: // - name: api_key // url_query_parameter: api_key // // // Example: define 2 api key names for a specific method. // // system_parameters // rules: // - selector: "/ListShelves" // parameters: // - name: api_key // http_header: Api-Key1 // - name: api_key // http_header: Api-Key2 // // **NOTE:** All service configuration rules follow "last one wins" // order. Rules []*SystemParameterRule `json:"rules,omitempty"` // ForceSendFields is a list of field names (e.g. "Rules") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Rules") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *SystemParameters) MarshalJSON() ([]byte, error) { type NoMethod SystemParameters raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // TenancyUnit: Representation of a tenancy unit. type TenancyUnit struct { // Consumer: @OutputOnly Cloud resource name of the consumer of this // service. // For example 'projects/123456'. Consumer string `json:"consumer,omitempty"` // CreateTime: @OutputOnly The time this tenancy unit was created. CreateTime string `json:"createTime,omitempty"` // Name: Globally unique identifier of this tenancy // unit // "services/{service}/{collection id}/{resource // id}/tenancyUnits/{unit}" Name string `json:"name,omitempty"` // Service: @OutputOnly Google Cloud API name of the service owning this // tenancy unit. // For example 'serviceconsumermanagement.googleapis.com'. Service string `json:"service,omitempty"` // TenantResources: Resources constituting the tenancy unit. // There can be at most 512 tenant resources in a tenancy unit. TenantResources []*TenantResource `json:"tenantResources,omitempty"` // ServerResponse contains the HTTP response code and headers from the // server. googleapi.ServerResponse `json:"-"` // ForceSendFields is a list of field names (e.g. "Consumer") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Consumer") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *TenancyUnit) MarshalJSON() ([]byte, error) { type NoMethod TenancyUnit raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // TenantProjectConfig: This structure defines a tenant project to be // added to the specified tenancy // unit and its initial configuration and properties. A project lien // will be // created for the tenant project to prevent the tenant project from // being // deleted accidentally. The lien will be deleted as part of tenant // project // removal. type TenantProjectConfig struct { // BillingConfig: Billing account properties. Billing account must be // specified. BillingConfig *BillingConfig `json:"billingConfig,omitempty"` // Folder: Folder where project in this tenancy unit must be // located // This folder must have been previously created with proper // permissions for the caller to create and configure a project in // it. // Valid folder resource names have the format // `folders/{folder_number}` // (for example, `folders/123456`). Folder string `json:"folder,omitempty"` // Labels: Labels that will be applied to this project. Labels map[string]string `json:"labels,omitempty"` // ServiceAccountConfig: Configuration for IAM service account on tenant // project. ServiceAccountConfig *ServiceAccountConfig `json:"serviceAccountConfig,omitempty"` // Services: Google Cloud API names of services that will be activated // on this project // during provisioning. If any of these services can not be // activated, // request will fail. // For example: 'compute.googleapis.com','cloudfunctions.googleapis.com' Services []string `json:"services,omitempty"` // TenantProjectPolicy: Describes ownership and policies for the new // tenant project. Required. TenantProjectPolicy *TenantProjectPolicy `json:"tenantProjectPolicy,omitempty"` // ForceSendFields is a list of field names (e.g. "BillingConfig") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "BillingConfig") to include // in API requests with the JSON null value. By default, fields with // empty values are omitted from API requests. However, any field with // an empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *TenantProjectConfig) MarshalJSON() ([]byte, error) { type NoMethod TenantProjectConfig raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // TenantProjectPolicy: Describes policy settings that need to be // applied to a newly // created tenant project. type TenantProjectPolicy struct { // PolicyBindings: Policy bindings to be applied to the tenant project, // in addition to the // 'roles/owner' role granted to the Service Consumer Management // service // account. // At least one binding must have the role `roles/owner`. Among the list // of // members for `roles/owner`, at least one of them must be either `user` // or // `group` type. PolicyBindings []*PolicyBinding `json:"policyBindings,omitempty"` // ForceSendFields is a list of field names (e.g. "PolicyBindings") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "PolicyBindings") to // include in API requests with the JSON null value. By default, fields // with empty values are omitted from API requests. However, any field // with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *TenantProjectPolicy) MarshalJSON() ([]byte, error) { type NoMethod TenantProjectPolicy raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // TenantResource: Resource constituting the TenancyUnit. type TenantResource struct { // Resource: @OutputOnly Identifier of the tenant resource. // For cloud projects, it is in the form 'projects/{number}'. // For example 'projects/123456'. Resource string `json:"resource,omitempty"` // Status: Status of tenant resource. // // Possible values: // "STATUS_UNSPECIFIED" - Unspecified status is the default unset // value. // "PENDING_CREATE" - Creation of the tenant resource is ongoing. // "ACTIVE" - Active resource. // "PENDING_DELETE" - Deletion of the resource is ongoing. // "FAILED" - Tenant resource creation or deletion has failed. Status string `json:"status,omitempty"` // Tag: Unique per single tenancy unit. Tag string `json:"tag,omitempty"` // ForceSendFields is a list of field names (e.g. "Resource") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Resource") to include in // API requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *TenantResource) MarshalJSON() ([]byte, error) { type NoMethod TenantResource raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Type: A protocol buffer message type. type Type struct { // Fields: The list of fields. Fields []*Field `json:"fields,omitempty"` // Name: The fully qualified message name. Name string `json:"name,omitempty"` // Oneofs: The list of types appearing in `oneof` definitions in this // type. Oneofs []string `json:"oneofs,omitempty"` // Options: The protocol buffer options. Options []*Option `json:"options,omitempty"` // SourceContext: The source context. SourceContext *SourceContext `json:"sourceContext,omitempty"` // Syntax: The source syntax. // // Possible values: // "SYNTAX_PROTO2" - Syntax `proto2`. // "SYNTAX_PROTO3" - Syntax `proto3`. Syntax string `json:"syntax,omitempty"` // ForceSendFields is a list of field names (e.g. "Fields") to // unconditionally include in API requests. By default, fields with // empty values are omitted from API requests. However, any non-pointer, // non-interface field appearing in ForceSendFields will be sent to the // server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "Fields") to include in API // requests with the JSON null value. By default, fields with empty // values are omitted from API requests. However, any field with an // empty value appearing in NullFields will be sent to the server as // null. It is an error if a field in this list has a non-empty value. // This may be used to include null fields in Patch requests. NullFields []string `json:"-"` } func (s *Type) MarshalJSON() ([]byte, error) { type NoMethod Type raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // Usage: Configuration controlling usage of a service. type Usage struct { // ProducerNotificationChannel: The full resource name of a channel used // for sending notifications to the // service producer. // // Google Service Management currently only supports // [Google Cloud Pub/Sub](https://cloud.google.com/pubsub) as a // notification // channel. To use Google Cloud Pub/Sub as the channel, this must be the // name // of a Cloud Pub/Sub topic that uses the Cloud Pub/Sub topic name // format // documented in https://cloud.google.com/pubsub/docs/overview. ProducerNotificationChannel string `json:"producerNotificationChannel,omitempty"` // Requirements: Requirements that must be satisfied before a consumer // project can use the // service. Each requirement is of the form // <service.name>/<requirement-id>; // for example 'serviceusage.googleapis.com/billing-enabled'. Requirements []string `json:"requirements,omitempty"` // Rules: A list of usage rules that apply to individual API // methods. // // **NOTE:** All service configuration rules follow "last one wins" // order. Rules []*UsageRule `json:"rules,omitempty"` // ForceSendFields is a list of field names (e.g. // "ProducerNotificationChannel") to unconditionally include in API // requests. By default, fields with empty values are omitted from API // requests. However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. // "ProducerNotificationChannel") to include in API requests with the // JSON null value. By default, fields with empty values are omitted // from API requests. However, any field with an empty value appearing // in NullFields will be sent to the server as null. It is an error if a // field in this list has a non-empty value. This may be used to include // null fields in Patch requests. NullFields []string `json:"-"` } func (s *Usage) MarshalJSON() ([]byte, error) { type NoMethod Usage raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // UsageRule: Usage configuration rules for the service. // // NOTE: Under development. // // // Use this rule to configure unregistered calls for the service. // Unregistered // calls are calls that do not contain consumer project // identity. // (Example: calls that do not contain an API key). // By default, API methods do not allow unregistered calls, and each // method call // must be identified by a consumer project identity. Use this rule // to // allow/disallow unregistered calls. // // Example of an API that wants to allow unregistered calls for entire // service. // // usage: // rules: // - selector: "*" // allow_unregistered_calls: true // // Example of a method that wants to allow unregistered calls. // // usage: // rules: // - selector: // "google.example.library.v1.LibraryService.CreateBook" // allow_unregistered_calls: true type UsageRule struct { // AllowUnregisteredCalls: If true, the selected method allows // unregistered calls, e.g. calls // that don't identify any user or application. AllowUnregisteredCalls bool `json:"allowUnregisteredCalls,omitempty"` // Selector: Selects the methods to which this rule applies. Use '*' to // indicate all // methods in all APIs. // // Refer to selector for syntax details. Selector string `json:"selector,omitempty"` // SkipServiceControl: If true, the selected method should skip service // control and the control // plane features, such as quota and billing, will not be // available. // This flag is used by Google Cloud Endpoints to bypass checks for // internal // methods, such as service health check methods. SkipServiceControl bool `json:"skipServiceControl,omitempty"` // ForceSendFields is a list of field names (e.g. // "AllowUnregisteredCalls") to unconditionally include in API requests. // By default, fields with empty values are omitted from API requests. // However, any non-pointer, non-interface field appearing in // ForceSendFields will be sent to the server regardless of whether the // field is empty or not. This may be used to include empty fields in // Patch requests. ForceSendFields []string `json:"-"` // NullFields is a list of field names (e.g. "AllowUnregisteredCalls") // to include in API requests with the JSON null value. By default, // fields with empty values are omitted from API requests. However, any // field with an empty value appearing in NullFields will be sent to the // server as null. It is an error if a field in this list has a // non-empty value. This may be used to include null fields in Patch // requests. NullFields []string `json:"-"` } func (s *UsageRule) MarshalJSON() ([]byte, error) { type NoMethod UsageRule raw := NoMethod(*s) return gensupport.MarshalJSON(raw, s.ForceSendFields, s.NullFields) } // method id "serviceconsumermanagement.operations.cancel": type OperationsCancelCall struct { s *APIService name string canceloperationrequest *CancelOperationRequest urlParams_ gensupport.URLParams ctx_ context.Context header_ http.Header } // Cancel: Starts asynchronous cancellation on a long-running operation. // The server // makes a best effort to cancel the operation, but success is // not // guaranteed. If the server doesn't support this method, it // returns // `google.rpc.Code.UNIMPLEMENTED`. Clients can // use // Operations.GetOperation or // other methods to check whether the cancellation succeeded or whether // the // operation completed despite cancellation. On successful // cancellation, // the operation is not deleted; instead, it becomes an operation // with // an Operation.error value with a google.rpc.Status.code of // 1, // corresponding to `Code.CANCELLED`. func (r *OperationsService) Cancel(name string, canceloperationrequest *CancelOperationRequest) *OperationsCancelCall { c := &OperationsCancelCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.name = name c.canceloperationrequest = canceloperationrequest return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *OperationsCancelCall) Fields(s ...googleapi.Field) *OperationsCancelCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *OperationsCancelCall) Context(ctx context.Context) *OperationsCancelCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *OperationsCancelCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *OperationsCancelCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.canceloperationrequest) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+name}:cancel") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "name": c.name, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.operations.cancel" call. // Exactly one of *Empty or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Empty.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *OperationsCancelCall) Do(opts ...googleapi.CallOption) (*Empty, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Empty{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Starts asynchronous cancellation on a long-running operation. The server\nmakes a best effort to cancel the operation, but success is not\nguaranteed. If the server doesn't support this method, it returns\n`google.rpc.Code.UNIMPLEMENTED`. Clients can use\nOperations.GetOperation or\nother methods to check whether the cancellation succeeded or whether the\noperation completed despite cancellation. On successful cancellation,\nthe operation is not deleted; instead, it becomes an operation with\nan Operation.error value with a google.rpc.Status.code of 1,\ncorresponding to `Code.CANCELLED`.", // "flatPath": "v1/operations/{operationsId}:cancel", // "httpMethod": "POST", // "id": "serviceconsumermanagement.operations.cancel", // "parameterOrder": [ // "name" // ], // "parameters": { // "name": { // "description": "The name of the operation resource to be cancelled.", // "location": "path", // "pattern": "^operations/.+$", // "required": true, // "type": "string" // } // }, // "path": "v1/{+name}:cancel", // "request": { // "$ref": "CancelOperationRequest" // }, // "response": { // "$ref": "Empty" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // method id "serviceconsumermanagement.operations.delete": type OperationsDeleteCall struct { s *APIService name string urlParams_ gensupport.URLParams ctx_ context.Context header_ http.Header } // Delete: Deletes a long-running operation. This method indicates that // the client is // no longer interested in the operation result. It does not cancel // the // operation. If the server doesn't support this method, it // returns // `google.rpc.Code.UNIMPLEMENTED`. func (r *OperationsService) Delete(name string) *OperationsDeleteCall { c := &OperationsDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.name = name return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *OperationsDeleteCall) Fields(s ...googleapi.Field) *OperationsDeleteCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *OperationsDeleteCall) Context(ctx context.Context) *OperationsDeleteCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *OperationsDeleteCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *OperationsDeleteCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+name}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("DELETE", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "name": c.name, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.operations.delete" call. // Exactly one of *Empty or error will be non-nil. Any non-2xx status // code is an error. Response headers are in either // *Empty.ServerResponse.Header or (if a response was returned at all) // in error.(*googleapi.Error).Header. Use googleapi.IsNotModified to // check whether the returned error was because http.StatusNotModified // was returned. func (c *OperationsDeleteCall) Do(opts ...googleapi.CallOption) (*Empty, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Empty{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Deletes a long-running operation. This method indicates that the client is\nno longer interested in the operation result. It does not cancel the\noperation. If the server doesn't support this method, it returns\n`google.rpc.Code.UNIMPLEMENTED`.", // "flatPath": "v1/operations/{operationsId}", // "httpMethod": "DELETE", // "id": "serviceconsumermanagement.operations.delete", // "parameterOrder": [ // "name" // ], // "parameters": { // "name": { // "description": "The name of the operation resource to be deleted.", // "location": "path", // "pattern": "^operations/.+$", // "required": true, // "type": "string" // } // }, // "path": "v1/{+name}", // "response": { // "$ref": "Empty" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // method id "serviceconsumermanagement.operations.get": type OperationsGetCall struct { s *APIService name string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context header_ http.Header } // Get: Gets the latest state of a long-running operation. Clients can // use this // method to poll the operation result at intervals as recommended by // the API // service. func (r *OperationsService) Get(name string) *OperationsGetCall { c := &OperationsGetCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.name = name return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *OperationsGetCall) Fields(s ...googleapi.Field) *OperationsGetCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *OperationsGetCall) IfNoneMatch(entityTag string) *OperationsGetCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *OperationsGetCall) Context(ctx context.Context) *OperationsGetCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *OperationsGetCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *OperationsGetCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+name}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "name": c.name, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.operations.get" call. // Exactly one of *Operation or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *Operation.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *OperationsGetCall) Do(opts ...googleapi.CallOption) (*Operation, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Operation{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Gets the latest state of a long-running operation. Clients can use this\nmethod to poll the operation result at intervals as recommended by the API\nservice.", // "flatPath": "v1/operations/{operationsId}", // "httpMethod": "GET", // "id": "serviceconsumermanagement.operations.get", // "parameterOrder": [ // "name" // ], // "parameters": { // "name": { // "description": "The name of the operation resource.", // "location": "path", // "pattern": "^operations/[^/]+$", // "required": true, // "type": "string" // } // }, // "path": "v1/{+name}", // "response": { // "$ref": "Operation" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // method id "serviceconsumermanagement.operations.list": type OperationsListCall struct { s *APIService name string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context header_ http.Header } // List: Lists operations that match the specified filter in the // request. If the // server doesn't support this method, it returns // `UNIMPLEMENTED`. // // NOTE: the `name` binding allows API services to override the // binding // to use different resource name schemes, such as `users/*/operations`. // To // override the binding, API services can add a binding such // as // "/v1/{name=users/*}/operations" to their service configuration. // For backwards compatibility, the default name includes the // operations // collection id, however overriding users must ensure the name // binding // is the parent resource, without the operations collection id. func (r *OperationsService) List(name string) *OperationsListCall { c := &OperationsListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.name = name return c } // Filter sets the optional parameter "filter": The standard list // filter. func (c *OperationsListCall) Filter(filter string) *OperationsListCall { c.urlParams_.Set("filter", filter) return c } // PageSize sets the optional parameter "pageSize": The standard list // page size. func (c *OperationsListCall) PageSize(pageSize int64) *OperationsListCall { c.urlParams_.Set("pageSize", fmt.Sprint(pageSize)) return c } // PageToken sets the optional parameter "pageToken": The standard list // page token. func (c *OperationsListCall) PageToken(pageToken string) *OperationsListCall { c.urlParams_.Set("pageToken", pageToken) return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *OperationsListCall) Fields(s ...googleapi.Field) *OperationsListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *OperationsListCall) IfNoneMatch(entityTag string) *OperationsListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *OperationsListCall) Context(ctx context.Context) *OperationsListCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *OperationsListCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *OperationsListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+name}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "name": c.name, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.operations.list" call. // Exactly one of *ListOperationsResponse or error will be non-nil. Any // non-2xx status code is an error. Response headers are in either // *ListOperationsResponse.ServerResponse.Header or (if a response was // returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *OperationsListCall) Do(opts ...googleapi.CallOption) (*ListOperationsResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListOperationsResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Lists operations that match the specified filter in the request. If the\nserver doesn't support this method, it returns `UNIMPLEMENTED`.\n\nNOTE: the `name` binding allows API services to override the binding\nto use different resource name schemes, such as `users/*/operations`. To\noverride the binding, API services can add a binding such as\n`\"/v1/{name=users/*}/operations\"` to their service configuration.\nFor backwards compatibility, the default name includes the operations\ncollection id, however overriding users must ensure the name binding\nis the parent resource, without the operations collection id.", // "flatPath": "v1/operations", // "httpMethod": "GET", // "id": "serviceconsumermanagement.operations.list", // "parameterOrder": [ // "name" // ], // "parameters": { // "filter": { // "description": "The standard list filter.", // "location": "query", // "type": "string" // }, // "name": { // "description": "The name of the operation's parent resource.", // "location": "path", // "pattern": "^operations$", // "required": true, // "type": "string" // }, // "pageSize": { // "description": "The standard list page size.", // "format": "int32", // "location": "query", // "type": "integer" // }, // "pageToken": { // "description": "The standard list page token.", // "location": "query", // "type": "string" // } // }, // "path": "v1/{+name}", // "response": { // "$ref": "ListOperationsResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // Pages invokes f for each page of results. // A non-nil error returned from f will halt the iteration. // The provided context supersedes any context provided to the Context method. func (c *OperationsListCall) Pages(ctx context.Context, f func(*ListOperationsResponse) error) error { c.ctx_ = ctx defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point for { x, err := c.Do() if err != nil { return err } if err := f(x); err != nil { return err } if x.NextPageToken == "" { return nil } c.PageToken(x.NextPageToken) } } // method id "serviceconsumermanagement.services.search": type ServicesSearchCall struct { s *APIService parent string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context header_ http.Header } // Search: Search tenancy units for a service. func (r *ServicesService) Search(parent string) *ServicesSearchCall { c := &ServicesSearchCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.parent = parent return c } // PageSize sets the optional parameter "pageSize": The maximum number // of results returned by this request. Currently, the // default maximum is set to 1000. If page_size is not provided or the // size // provided is a number larger than 1000, it will be automatically set // to // 1000. func (c *ServicesSearchCall) PageSize(pageSize int64) *ServicesSearchCall { c.urlParams_.Set("pageSize", fmt.Sprint(pageSize)) return c } // PageToken sets the optional parameter "pageToken": The continuation // token, which is used to page through large result sets. // To get the next page of results, set this parameter to the value // of // `nextPageToken` from the previous response. func (c *ServicesSearchCall) PageToken(pageToken string) *ServicesSearchCall { c.urlParams_.Set("pageToken", pageToken) return c } // Query sets the optional parameter "query": Set a query `{expression}` // for querying tenancy units. Your `{expression}` // must be in the format: `field_name=literal_string`. The `field_name` // is the // name of the field you want to compare. Supported fields // are // `tenant_resources.tag` and `tenant_resources.resource`. // // For example, to search tenancy units that contain at least one // tenant // resource with given tag 'xyz', use query // `tenant_resources.tag=xyz`. // To search tenancy units that contain at least one tenant resource // with // given resource name 'projects/123456', use // query // `tenant_resources.resource=projects/123456`. // // Multiple expressions can be joined with `AND`s. Tenancy units must // match // all expressions to be included in the result set. For // example, // `tenant_resources.tag=xyz AND // tenant_resources.resource=projects/123456` func (c *ServicesSearchCall) Query(query string) *ServicesSearchCall { c.urlParams_.Set("query", query) return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ServicesSearchCall) Fields(s ...googleapi.Field) *ServicesSearchCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ServicesSearchCall) IfNoneMatch(entityTag string) *ServicesSearchCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ServicesSearchCall) Context(ctx context.Context) *ServicesSearchCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *ServicesSearchCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *ServicesSearchCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+parent}:search") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "parent": c.parent, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.services.search" call. // Exactly one of *SearchTenancyUnitsResponse or error will be non-nil. // Any non-2xx status code is an error. Response headers are in either // *SearchTenancyUnitsResponse.ServerResponse.Header or (if a response // was returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ServicesSearchCall) Do(opts ...googleapi.CallOption) (*SearchTenancyUnitsResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &SearchTenancyUnitsResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Search tenancy units for a service.", // "flatPath": "v1/services/{servicesId}:search", // "httpMethod": "GET", // "id": "serviceconsumermanagement.services.search", // "parameterOrder": [ // "parent" // ], // "parameters": { // "pageSize": { // "description": "The maximum number of results returned by this request. Currently, the\ndefault maximum is set to 1000. If page_size is not provided or the size\nprovided is a number larger than 1000, it will be automatically set to\n1000.\n\nOptional.", // "format": "int32", // "location": "query", // "type": "integer" // }, // "pageToken": { // "description": "The continuation token, which is used to page through large result sets.\nTo get the next page of results, set this parameter to the value of\n`nextPageToken` from the previous response.\n\nOptional.", // "location": "query", // "type": "string" // }, // "parent": { // "description": "Service for which search is performed.\nservices/{service}\n{service} the name of a service, for example 'service.googleapis.com'.", // "location": "path", // "pattern": "^services/[^/]+$", // "required": true, // "type": "string" // }, // "query": { // "description": "Set a query `{expression}` for querying tenancy units. Your `{expression}`\nmust be in the format: `field_name=literal_string`. The `field_name` is the\nname of the field you want to compare. Supported fields are\n`tenant_resources.tag` and `tenant_resources.resource`.\n\nFor example, to search tenancy units that contain at least one tenant\nresource with given tag 'xyz', use query `tenant_resources.tag=xyz`.\nTo search tenancy units that contain at least one tenant resource with\ngiven resource name 'projects/123456', use query\n`tenant_resources.resource=projects/123456`.\n\nMultiple expressions can be joined with `AND`s. Tenancy units must match\nall expressions to be included in the result set. For example,\n`tenant_resources.tag=xyz AND tenant_resources.resource=projects/123456`\n\nOptional.", // "location": "query", // "type": "string" // } // }, // "path": "v1/{+parent}:search", // "response": { // "$ref": "SearchTenancyUnitsResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // Pages invokes f for each page of results. // A non-nil error returned from f will halt the iteration. // The provided context supersedes any context provided to the Context method. func (c *ServicesSearchCall) Pages(ctx context.Context, f func(*SearchTenancyUnitsResponse) error) error { c.ctx_ = ctx defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point for { x, err := c.Do() if err != nil { return err } if err := f(x); err != nil { return err } if x.NextPageToken == "" { return nil } c.PageToken(x.NextPageToken) } } // method id "serviceconsumermanagement.services.tenancyUnits.addProject": type ServicesTenancyUnitsAddProjectCall struct { s *APIService parent string addtenantprojectrequest *AddTenantProjectRequest urlParams_ gensupport.URLParams ctx_ context.Context header_ http.Header } // AddProject: Add a new tenant project to the tenancy unit. // There can be at most 512 tenant projects in a tenancy unit. // If there are previously failed `AddTenantProject` calls, you might // need to // call `RemoveTenantProject` first to clean them before you can make // another // `AddTenantProject` with the same tag. // Operation<response: Empty>. func (r *ServicesTenancyUnitsService) AddProject(parent string, addtenantprojectrequest *AddTenantProjectRequest) *ServicesTenancyUnitsAddProjectCall { c := &ServicesTenancyUnitsAddProjectCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.parent = parent c.addtenantprojectrequest = addtenantprojectrequest return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ServicesTenancyUnitsAddProjectCall) Fields(s ...googleapi.Field) *ServicesTenancyUnitsAddProjectCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ServicesTenancyUnitsAddProjectCall) Context(ctx context.Context) *ServicesTenancyUnitsAddProjectCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *ServicesTenancyUnitsAddProjectCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *ServicesTenancyUnitsAddProjectCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.addtenantprojectrequest) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+parent}:addProject") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "parent": c.parent, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.services.tenancyUnits.addProject" call. // Exactly one of *Operation or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *Operation.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *ServicesTenancyUnitsAddProjectCall) Do(opts ...googleapi.CallOption) (*Operation, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Operation{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Add a new tenant project to the tenancy unit.\nThere can be at most 512 tenant projects in a tenancy unit.\nIf there are previously failed `AddTenantProject` calls, you might need to\ncall `RemoveTenantProject` first to clean them before you can make another\n`AddTenantProject` with the same tag.\nOperation\u003cresponse: Empty\u003e.", // "flatPath": "v1/services/{servicesId}/{servicesId1}/{servicesId2}/tenancyUnits/{tenancyUnitsId}:addProject", // "httpMethod": "POST", // "id": "serviceconsumermanagement.services.tenancyUnits.addProject", // "parameterOrder": [ // "parent" // ], // "parameters": { // "parent": { // "description": "Name of the tenancy unit.", // "location": "path", // "pattern": "^services/[^/]+/[^/]+/[^/]+/tenancyUnits/[^/]+$", // "required": true, // "type": "string" // } // }, // "path": "v1/{+parent}:addProject", // "request": { // "$ref": "AddTenantProjectRequest" // }, // "response": { // "$ref": "Operation" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // method id "serviceconsumermanagement.services.tenancyUnits.create": type ServicesTenancyUnitsCreateCall struct { s *APIService parent string createtenancyunitrequest *CreateTenancyUnitRequest urlParams_ gensupport.URLParams ctx_ context.Context header_ http.Header } // Create: Creates a tenancy unit with no tenant resources. func (r *ServicesTenancyUnitsService) Create(parent string, createtenancyunitrequest *CreateTenancyUnitRequest) *ServicesTenancyUnitsCreateCall { c := &ServicesTenancyUnitsCreateCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.parent = parent c.createtenancyunitrequest = createtenancyunitrequest return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ServicesTenancyUnitsCreateCall) Fields(s ...googleapi.Field) *ServicesTenancyUnitsCreateCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ServicesTenancyUnitsCreateCall) Context(ctx context.Context) *ServicesTenancyUnitsCreateCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *ServicesTenancyUnitsCreateCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *ServicesTenancyUnitsCreateCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.createtenancyunitrequest) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+parent}/tenancyUnits") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "parent": c.parent, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.services.tenancyUnits.create" call. // Exactly one of *TenancyUnit or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *TenancyUnit.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *ServicesTenancyUnitsCreateCall) Do(opts ...googleapi.CallOption) (*TenancyUnit, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &TenancyUnit{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Creates a tenancy unit with no tenant resources.", // "flatPath": "v1/services/{servicesId}/{servicesId1}/{servicesId2}/tenancyUnits", // "httpMethod": "POST", // "id": "serviceconsumermanagement.services.tenancyUnits.create", // "parameterOrder": [ // "parent" // ], // "parameters": { // "parent": { // "description": "services/{service}/{collection id}/{resource id}\n{collection id} is the cloud resource collection type representing the\nservice consumer, for example 'projects', or 'organizations'.\n{resource id} is the consumer numeric id, such as project number: '123456'.\n{service} the name of a service, for example 'service.googleapis.com'.\nEnabled service binding using the new tenancy unit.", // "location": "path", // "pattern": "^services/[^/]+/[^/]+/[^/]+$", // "required": true, // "type": "string" // } // }, // "path": "v1/{+parent}/tenancyUnits", // "request": { // "$ref": "CreateTenancyUnitRequest" // }, // "response": { // "$ref": "TenancyUnit" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // method id "serviceconsumermanagement.services.tenancyUnits.delete": type ServicesTenancyUnitsDeleteCall struct { s *APIService name string urlParams_ gensupport.URLParams ctx_ context.Context header_ http.Header } // Delete: Delete a tenancy unit. Before the tenancy unit is deleted, // there should be // no tenant resources in it. // Operation<response: Empty>. func (r *ServicesTenancyUnitsService) Delete(name string) *ServicesTenancyUnitsDeleteCall { c := &ServicesTenancyUnitsDeleteCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.name = name return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ServicesTenancyUnitsDeleteCall) Fields(s ...googleapi.Field) *ServicesTenancyUnitsDeleteCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ServicesTenancyUnitsDeleteCall) Context(ctx context.Context) *ServicesTenancyUnitsDeleteCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *ServicesTenancyUnitsDeleteCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *ServicesTenancyUnitsDeleteCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+name}") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("DELETE", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "name": c.name, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.services.tenancyUnits.delete" call. // Exactly one of *Operation or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *Operation.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *ServicesTenancyUnitsDeleteCall) Do(opts ...googleapi.CallOption) (*Operation, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Operation{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Delete a tenancy unit. Before the tenancy unit is deleted, there should be\nno tenant resources in it.\nOperation\u003cresponse: Empty\u003e.", // "flatPath": "v1/services/{servicesId}/{servicesId1}/{servicesId2}/tenancyUnits/{tenancyUnitsId}", // "httpMethod": "DELETE", // "id": "serviceconsumermanagement.services.tenancyUnits.delete", // "parameterOrder": [ // "name" // ], // "parameters": { // "name": { // "description": "Name of the tenancy unit to be deleted.", // "location": "path", // "pattern": "^services/[^/]+/[^/]+/[^/]+/tenancyUnits/[^/]+$", // "required": true, // "type": "string" // } // }, // "path": "v1/{+name}", // "response": { // "$ref": "Operation" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // method id "serviceconsumermanagement.services.tenancyUnits.list": type ServicesTenancyUnitsListCall struct { s *APIService parent string urlParams_ gensupport.URLParams ifNoneMatch_ string ctx_ context.Context header_ http.Header } // List: Find the tenancy unit for a service and consumer. // This method should not be used in producers' runtime path, for // example // finding the tenant project number when creating VMs. Producers // should // persist the tenant project information after the project is created. func (r *ServicesTenancyUnitsService) List(parent string) *ServicesTenancyUnitsListCall { c := &ServicesTenancyUnitsListCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.parent = parent return c } // Filter sets the optional parameter "filter": Filter expression over // tenancy resources field. func (c *ServicesTenancyUnitsListCall) Filter(filter string) *ServicesTenancyUnitsListCall { c.urlParams_.Set("filter", filter) return c } // PageSize sets the optional parameter "pageSize": The maximum number // of results returned by this request. func (c *ServicesTenancyUnitsListCall) PageSize(pageSize int64) *ServicesTenancyUnitsListCall { c.urlParams_.Set("pageSize", fmt.Sprint(pageSize)) return c } // PageToken sets the optional parameter "pageToken": The continuation // token, which is used to page through large result sets. // To get the next page of results, set this parameter to the value // of // `nextPageToken` from the previous response. func (c *ServicesTenancyUnitsListCall) PageToken(pageToken string) *ServicesTenancyUnitsListCall { c.urlParams_.Set("pageToken", pageToken) return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ServicesTenancyUnitsListCall) Fields(s ...googleapi.Field) *ServicesTenancyUnitsListCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // IfNoneMatch sets the optional parameter which makes the operation // fail if the object's ETag matches the given value. This is useful for // getting updates only after the object has changed since the last // request. Use googleapi.IsNotModified to check whether the response // error from Do is the result of In-None-Match. func (c *ServicesTenancyUnitsListCall) IfNoneMatch(entityTag string) *ServicesTenancyUnitsListCall { c.ifNoneMatch_ = entityTag return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ServicesTenancyUnitsListCall) Context(ctx context.Context) *ServicesTenancyUnitsListCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *ServicesTenancyUnitsListCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *ServicesTenancyUnitsListCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) if c.ifNoneMatch_ != "" { reqHeaders.Set("If-None-Match", c.ifNoneMatch_) } var body io.Reader = nil c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+parent}/tenancyUnits") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("GET", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "parent": c.parent, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.services.tenancyUnits.list" call. // Exactly one of *ListTenancyUnitsResponse or error will be non-nil. // Any non-2xx status code is an error. Response headers are in either // *ListTenancyUnitsResponse.ServerResponse.Header or (if a response was // returned at all) in error.(*googleapi.Error).Header. Use // googleapi.IsNotModified to check whether the returned error was // because http.StatusNotModified was returned. func (c *ServicesTenancyUnitsListCall) Do(opts ...googleapi.CallOption) (*ListTenancyUnitsResponse, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &ListTenancyUnitsResponse{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Find the tenancy unit for a service and consumer.\nThis method should not be used in producers' runtime path, for example\nfinding the tenant project number when creating VMs. Producers should\npersist the tenant project information after the project is created.", // "flatPath": "v1/services/{servicesId}/{servicesId1}/{servicesId2}/tenancyUnits", // "httpMethod": "GET", // "id": "serviceconsumermanagement.services.tenancyUnits.list", // "parameterOrder": [ // "parent" // ], // "parameters": { // "filter": { // "description": "Filter expression over tenancy resources field. Optional.", // "location": "query", // "type": "string" // }, // "pageSize": { // "description": "The maximum number of results returned by this request.", // "format": "int32", // "location": "query", // "type": "integer" // }, // "pageToken": { // "description": "The continuation token, which is used to page through large result sets.\nTo get the next page of results, set this parameter to the value of\n`nextPageToken` from the previous response.", // "location": "query", // "type": "string" // }, // "parent": { // "description": "Service and consumer. Required.\nservices/{service}/{collection id}/{resource id}\n{collection id} is the cloud resource collection type representing the\nservice consumer, for example 'projects', or 'organizations'.\n{resource id} is the consumer numeric id, such as project number: '123456'.\n{service} the name of a service, for example 'service.googleapis.com'.", // "location": "path", // "pattern": "^services/[^/]+/[^/]+/[^/]+$", // "required": true, // "type": "string" // } // }, // "path": "v1/{+parent}/tenancyUnits", // "response": { // "$ref": "ListTenancyUnitsResponse" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } } // Pages invokes f for each page of results. // A non-nil error returned from f will halt the iteration. // The provided context supersedes any context provided to the Context method. func (c *ServicesTenancyUnitsListCall) Pages(ctx context.Context, f func(*ListTenancyUnitsResponse) error) error { c.ctx_ = ctx defer c.PageToken(c.urlParams_.Get("pageToken")) // reset paging to original point for { x, err := c.Do() if err != nil { return err } if err := f(x); err != nil { return err } if x.NextPageToken == "" { return nil } c.PageToken(x.NextPageToken) } } // method id "serviceconsumermanagement.services.tenancyUnits.removeProject": type ServicesTenancyUnitsRemoveProjectCall struct { s *APIService name string removetenantprojectrequest *RemoveTenantProjectRequest urlParams_ gensupport.URLParams ctx_ context.Context header_ http.Header } // RemoveProject: Removes specified project resource identified by // tenant resource tag. // It will remove project lien with 'TenantManager' origin if that was // added. // It will then attempt to delete the project. // If that operation fails, this method fails. // Operation<response: Empty>. func (r *ServicesTenancyUnitsService) RemoveProject(name string, removetenantprojectrequest *RemoveTenantProjectRequest) *ServicesTenancyUnitsRemoveProjectCall { c := &ServicesTenancyUnitsRemoveProjectCall{s: r.s, urlParams_: make(gensupport.URLParams)} c.name = name c.removetenantprojectrequest = removetenantprojectrequest return c } // Fields allows partial responses to be retrieved. See // https://developers.google.com/gdata/docs/2.0/basics#PartialResponse // for more information. func (c *ServicesTenancyUnitsRemoveProjectCall) Fields(s ...googleapi.Field) *ServicesTenancyUnitsRemoveProjectCall { c.urlParams_.Set("fields", googleapi.CombineFields(s)) return c } // Context sets the context to be used in this call's Do method. Any // pending HTTP request will be aborted if the provided context is // canceled. func (c *ServicesTenancyUnitsRemoveProjectCall) Context(ctx context.Context) *ServicesTenancyUnitsRemoveProjectCall { c.ctx_ = ctx return c } // Header returns an http.Header that can be modified by the caller to // add HTTP headers to the request. func (c *ServicesTenancyUnitsRemoveProjectCall) Header() http.Header { if c.header_ == nil { c.header_ = make(http.Header) } return c.header_ } func (c *ServicesTenancyUnitsRemoveProjectCall) doRequest(alt string) (*http.Response, error) { reqHeaders := make(http.Header) for k, v := range c.header_ { reqHeaders[k] = v } reqHeaders.Set("User-Agent", c.s.userAgent()) var body io.Reader = nil body, err := googleapi.WithoutDataWrapper.JSONReader(c.removetenantprojectrequest) if err != nil { return nil, err } reqHeaders.Set("Content-Type", "application/json") c.urlParams_.Set("alt", alt) urls := googleapi.ResolveRelative(c.s.BasePath, "v1/{+name}:removeProject") urls += "?" + c.urlParams_.Encode() req, _ := http.NewRequest("POST", urls, body) req.Header = reqHeaders googleapi.Expand(req.URL, map[string]string{ "name": c.name, }) return gensupport.SendRequest(c.ctx_, c.s.client, req) } // Do executes the "serviceconsumermanagement.services.tenancyUnits.removeProject" call. // Exactly one of *Operation or error will be non-nil. Any non-2xx // status code is an error. Response headers are in either // *Operation.ServerResponse.Header or (if a response was returned at // all) in error.(*googleapi.Error).Header. Use googleapi.IsNotModified // to check whether the returned error was because // http.StatusNotModified was returned. func (c *ServicesTenancyUnitsRemoveProjectCall) Do(opts ...googleapi.CallOption) (*Operation, error) { gensupport.SetOptions(c.urlParams_, opts...) res, err := c.doRequest("json") if res != nil && res.StatusCode == http.StatusNotModified { if res.Body != nil { res.Body.Close() } return nil, &googleapi.Error{ Code: res.StatusCode, Header: res.Header, } } if err != nil { return nil, err } defer googleapi.CloseBody(res) if err := googleapi.CheckResponse(res); err != nil { return nil, err } ret := &Operation{ ServerResponse: googleapi.ServerResponse{ Header: res.Header, HTTPStatusCode: res.StatusCode, }, } target := &ret if err := gensupport.DecodeResponse(target, res); err != nil { return nil, err } return ret, nil // { // "description": "Removes specified project resource identified by tenant resource tag.\nIt will remove project lien with 'TenantManager' origin if that was added.\nIt will then attempt to delete the project.\nIf that operation fails, this method fails.\nOperation\u003cresponse: Empty\u003e.", // "flatPath": "v1/services/{servicesId}/{servicesId1}/{servicesId2}/tenancyUnits/{tenancyUnitsId}:removeProject", // "httpMethod": "POST", // "id": "serviceconsumermanagement.services.tenancyUnits.removeProject", // "parameterOrder": [ // "name" // ], // "parameters": { // "name": { // "description": "Name of the tenancy unit.\nSuch as 'services/service.googleapis.com/projects/12345/tenancyUnits/abcd'.", // "location": "path", // "pattern": "^services/[^/]+/[^/]+/[^/]+/tenancyUnits/[^/]+$", // "required": true, // "type": "string" // } // }, // "path": "v1/{+name}:removeProject", // "request": { // "$ref": "RemoveTenantProjectRequest" // }, // "response": { // "$ref": "Operation" // }, // "scopes": [ // "https://www.googleapis.com/auth/cloud-platform" // ] // } }<|fim▁end|>
// server regardless of whether the field is empty or not. This may be // used to include empty fields in Patch requests. ForceSendFields []string `json:"-"`
<|file_name|>gnome.py<|end_file_name|><|fim▁begin|># Copyright 2015 The Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. '''This module provides helper functions for Gnome/GLib related functionality such as gobject-introspection and gresources.''' import build import os, sys import subprocess from coredata import MesonException import mlog class GnomeModule: def compile_resources(self, state, args, kwargs): cmd = ['glib-compile-resources', '@INPUT@', '--generate'] if 'source_dir' in kwargs: d = os.path.join(state.build_to_src, state.subdir, kwargs.pop('source_dir')) cmd += ['--sourcedir', d] if 'c_name' in kwargs: cmd += ['--c-name', kwargs.pop('c_name')] cmd += ['--target', '@OUTPUT@'] kwargs['command'] = cmd output_c = args[0] + '.c' output_h = args[0] + '.h' kwargs['input'] = args[1] kwargs['output'] = output_c target_c = build.CustomTarget(args[0]+'_c', state.subdir, kwargs) kwargs['output'] = output_h target_h = build.CustomTarget(args[0] + '_h', state.subdir, kwargs) return [target_c, target_h] def generate_gir(self, state, args, kwargs): if len(args) != 1: raise MesonException('Gir takes one argument') girtarget = args[0] while hasattr(girtarget, 'held_object'): girtarget = girtarget.held_object if not isinstance(girtarget, (build.Executable, build.SharedLibrary)): raise MesonException('Gir target must be an executable or shared library') pkgstr = subprocess.check_output(['pkg-config', '--cflags', 'gobject-introspection-1.0']) pkgargs = pkgstr.decode().strip().split() ns = kwargs.pop('namespace') nsversion = kwargs.pop('nsversion') libsources = kwargs.pop('sources') girfile = '%s-%s.gir' % (ns, nsversion) depends = [girtarget] scan_command = ['g-ir-scanner', '@INPUT@'] scan_command += pkgargs scan_command += ['--namespace='+ns, '--nsversion=' + nsversion, '--warn-all', '--output', '@OUTPUT@'] for incdirs in girtarget.include_dirs: for incdir in incdirs.get_incdirs(): scan_command += ['-I%s' % os.path.join(state.environment.get_source_dir(), incdir)] if 'link_with' in kwargs: link_with = kwargs.pop('link_with') for link in link_with: lib = link.held_object scan_command += ['-l%s' % lib.name] if isinstance(lib, build.SharedLibrary): scan_command += ['-L%s' % os.path.join(state.environment.get_build_dir(), lib.subdir)] depends.append(lib) if 'includes' in kwargs: includes = kwargs.pop('includes') if isinstance(includes, str): scan_command += ['--include=%s' % includes] elif isinstance(includes, list): scan_command += ['--include=%s' % inc for inc in includes] else: raise MesonException('Gir includes must be str or list') if state.global_args.get('c'): scan_command += ['--cflags-begin'] scan_command += state.global_args['c'] scan_command += ['--cflags-end'] if kwargs.get('symbol_prefix'): sym_prefix = kwargs.pop('symbol_prefix') if not isinstance(sym_prefix, str): raise MesonException('Gir symbol prefix must be str') scan_command += ['--symbol-prefix=%s' % sym_prefix] if kwargs.get('identifier_prefix'): identifier_prefix = kwargs.pop('identifier_prefix') if not isinstance(identifier_prefix, str): raise MesonException('Gir identifier prefix must be str') scan_command += ['--identifier-prefix=%s' % identifier_prefix] if kwargs.get('export_packages'): pkgs = kwargs.pop('export_packages') if isinstance(pkgs, str): scan_command += ['--pkg-export=%s' % pkgs] elif isinstance(pkgs, list): scan_command += ['--pkg-export=%s' % pkg for pkg in pkgs] else: raise MesonException('Gir export packages must be str or list') deps = None if 'dependencies' in kwargs: deps = kwargs.pop('dependencies') if not isinstance (deps, list): deps = [deps] for dep in deps: girdir = dep.held_object.get_variable ("girdir") if girdir: scan_command += ["--add-include-path=%s" % girdir] inc_dirs = None if kwargs.get('include_directories'): inc_dirs = kwargs.pop('include_directories') if isinstance(inc_dirs.held_object, build.IncludeDirs): scan_command += ['--add-include-path=%s' % inc for inc in inc_dirs.held_object.get_incdirs()] else: raise MesonException('Gir include dirs should be include_directories()') if isinstance(girtarget, build.Executable): scan_command += ['--program', girtarget] elif isinstance(girtarget, build.SharedLibrary): scan_command += ["-L", os.path.join (state.environment.get_build_dir(), girtarget.subdir)] libname = girtarget.get_basename() scan_command += ['--library', libname] scankwargs = {'output' : girfile,<|fim▁hole|> 'command' : scan_command, 'depends' : depends, } if kwargs.get('install'): scankwargs['install'] = kwargs['install'] scankwargs['install_dir'] = os.path.join(state.environment.get_datadir(), 'gir-1.0') scan_target = GirTarget(girfile, state.subdir, scankwargs) typelib_output = '%s-%s.typelib' % (ns, nsversion) typelib_cmd = ['g-ir-compiler', scan_target, '--output', '@OUTPUT@'] if inc_dirs: typelib_cmd += ['--includedir=%s' % inc for inc in inc_dirs.held_object.get_incdirs()] if deps: for dep in deps: girdir = dep.held_object.get_variable ("girdir") if girdir: typelib_cmd += ["--includedir=%s" % girdir] kwargs['output'] = typelib_output kwargs['command'] = typelib_cmd # Note that this can't be libdir, because e.g. on Debian it points to # lib/x86_64-linux-gnu but the girepo dir is always under lib. kwargs['install_dir'] = 'lib/girepository-1.0' typelib_target = TypelibTarget(typelib_output, state.subdir, kwargs) return [scan_target, typelib_target] def compile_schemas(self, state, args, kwargs): if len(args) != 0: raise MesonException('Compile_schemas does not take positional arguments.') srcdir = os.path.join(state.build_to_src, state.subdir) outdir = state.subdir cmd = ['glib-compile-schemas', '--targetdir', outdir, srcdir] kwargs['command'] = cmd kwargs['input'] = [] kwargs['output'] = 'gschemas.compiled' if state.subdir == '': targetname = 'gsettings-compile' else: targetname = 'gsettings-compile-' + state.subdir target_g = build.CustomTarget(targetname, state.subdir, kwargs) return target_g def gtkdoc(self, state, args, kwargs): if len(args) != 1: raise MesonException('Gtkdoc must have one positional argument.') modulename = args[0] if not isinstance(modulename, str): raise MesonException('Gtkdoc arg must be string.') if not 'src_dir' in kwargs: raise MesonException('Keyword argument src_dir missing.') main_file = kwargs.get('main_sgml', '') if not isinstance(main_file, str): raise MesonException('Main sgml keyword argument must be a string.') main_xml = kwargs.get('main_xml', '') if not isinstance(main_xml, str): raise MesonException('Main xml keyword argument must be a string.') if main_xml != '': if main_file != '': raise MesonException('You can only specify main_xml or main_sgml, not both.') main_file = main_xml src_dir = kwargs['src_dir'] targetname = modulename + '-doc' command = os.path.normpath(os.path.join(os.path.split(__file__)[0], "../gtkdochelper.py")) args = [state.environment.get_source_dir(), state.environment.get_build_dir(), state.subdir, os.path.normpath(os.path.join(state.subdir, src_dir)), main_file, modulename] res = [build.RunTarget(targetname, command, args, state.subdir)] if kwargs.get('install', True): res.append(build.InstallScript([command] + args)) return res def gdbus_codegen(self, state, args, kwargs): if len(args) != 2: raise MesonException('Gdbus_codegen takes two arguments, name and xml file.') namebase = args[0] xml_file = args[1] cmd = ['gdbus-codegen'] if 'interface_prefix' in kwargs: cmd += ['--interface-prefix', kwargs.pop('interface_prefix')] if 'namespace' in kwargs: cmd += ['--c-namespace', kwargs.pop('namespace')] cmd += ['--generate-c-code', os.path.join(state.subdir, namebase), '@INPUT@'] outputs = [namebase + '.c', namebase + '.h'] custom_kwargs = {'input' : xml_file, 'output' : outputs, 'command' : cmd } return build.CustomTarget(namebase + '-gdbus', state.subdir, custom_kwargs) def initialize(): mlog.log('Warning, glib compiled dependencies will not work until this upstream issue is fixed:', mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=745754')) return GnomeModule() class GirTarget(build.CustomTarget): def __init__(self, name, subdir, kwargs): super().__init__(name, subdir, kwargs) class TypelibTarget(build.CustomTarget): def __init__(self, name, subdir, kwargs): super().__init__(name, subdir, kwargs)<|fim▁end|>
'input' : libsources,
<|file_name|>fc_2015_05_04.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # imports go here import sched import time # # Free Coding session for 2015-05-04 # Written by Matt Warren # scheduler = sched.scheduler(time.time, time.sleep) def print_time(): print(time.time()) return True <|fim▁hole|>scheduler.enter(5, 1, print_time) print(scheduler.queue) scheduler.run() # blocking until all scheduled things finish print("done")<|fim▁end|>
scheduler.enter(3, 1, print_time)
<|file_name|>CapsWarningTooltip.js<|end_file_name|><|fim▁begin|>/** * * Modelo de Login usando MCV * Desenvolvido por Ricardo Hirashiki * Publicado em: http://www.sitedoricardo.com.br * Data: Ago/2011 * * Baseado na extensao criada por Wemerson Januario * http://code.google.com/p/login-window/ * */ Ext.define('Siccad.view.authentication.CapsWarningTooltip', { extend : 'Ext.tip.QuickTip', alias : 'widget.capswarningtooltip', target : 'authentication-login', id : 'toolcaps', <|fim▁hole|> anchorOffset : 60, width : 305, dismissDelay : 0, autoHide : false, disabled : false, title : '<b>Caps Lock est&aacute; ativada</b>', html : '<div>Se Caps lock estiver ativado, isso pode fazer com que voc&ecirc;</div>' + '<div>digite a senha incorretamente.</div><br/>' + '<div>Voc&ecirc; deve pressionar a tecla Caps lock para desativ&aacute;-la</div>' + '<div>antes de digitar a senha.</div>' });<|fim▁end|>
anchor : 'left',
<|file_name|>models.py<|end_file_name|><|fim▁begin|># coding=utf-8 from django.db import models from main.models import BaseModel STAT_TYPES = ( ('user_count', 'User count'), ('host_count', 'Host count'), ('ip_update_count', 'IP update count'), ) class StatisticsEntry(BaseModel): class Meta: verbose_name_plural = 'Statistics Entries'<|fim▁hole|> stat_type = models.CharField(max_length=32, choices=STAT_TYPES) value = models.IntegerField() def __unicode__(self): return "%s: %d (%s)" % (self.stat_type, self.value, self.created)<|fim▁end|>
<|file_name|>fi_system.hh<|end_file_name|><|fim▁begin|><|fim▁hole|>#ifndef _FI_SYSTEM #include <map> #include <utility> #include <iostream> #include <fstream> #include "config/full_system.hh" #include "config/the_isa.hh" #include "base/types.hh" #include "arch/types.hh" #include "base/trace.hh" #include "debug/FaultInjection.hh" class Fi_System; extern Fi_System *fi_system; class Fi_System : public MemObject{ public : ofstream ouput; ifstream input; string in_name; string out_name; InjectedFaultQueue mainInjectedFaultQueue("Main Fault Queue"); InjectedFaultQueue fetchStageInjectedFaultQueue("Fetch Stage Fault Queue"); InjectedFaultQueue decodeStageInjectedFaultQueue("Decode Stage Fault Queue"); InjectedFaultQueue iewStageInjectedFaultQueue("IEW Stage Fault Queue"); std::map<Addr, int> fi_activation; std::map<Addr, int>::iterator fi_activation_iter; int fi_active; int vectorpos; Addr MagicInstVirtualAddr; uint64_t MagicInstInstCnt; int64_t MagicInstTickCnt; std::vector <ThreadEnabledFault * > threadList; std::vector <cpuExecutedTicks * > coresCount; int cores; public: Fi_system(Params *p); ~Fi_system(); int increase_fi_counters(std :: string curCpu , ThreadEnabledFault *curThread , int64_t ticks); int increase_instr_executed(std:: string curCpu , ThreadEnabledFault *curThread); int get_fi_counters(InjectedFault *p , ThreadEnabledFault &thread,std::string curCpu , int64_t *exec_time , uint64_t *exec_instr ); void storeToFile(std::ofstream &os); void getFromFile(Params *p, std::ifstream &os); } #endif //_FI_SYSTEM<|fim▁end|>