repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
alvations/Sensible-SemEval | CWI-data/evaluate_system_original.py | 2 | 1946 | import sys
import argparse
def evaluateIdentifier(gold, pred):
"""
Performs an intrinsic evaluation of a Complex Word Identification approach.
@param gold: A vector containing gold-standard labels.
@param pred: A vector containing predicted labels.
@return: Precision, Recall and F-1.
"""
#Initialize variables:
precisionc = 0
precisiont = 0
recallc = 0
recallt = 0
#Calculate measures:
for i in range(0, len(gold)):
gold_label = gold[i]
predicted_label = pred[i]
if gold_label==predicted_label:
precisionc += 1
if gold_label==1:
recallc += 1
if gold_label==1:
recallt += 1
precisiont += 1
precision = float(precisionc)/float(precisiont)
recall = float(recallc)/float(recallt)
fmean = 0.0
if precision==0.0 and recall==0.0:
fmean = 0.0
else:
fmean = 2*(precision*recall)/(precision+recall)
#Return measures:
return precision, recall, fmean
if __name__=='__main__':
#Parse arguments:
description = 'Evaluation script for Task 11: Complex Word Identification.'
description += ' The gold-standard file is a dataset with labels in the format provided by the task organizers.'
description += ' The predicted labels file must contain one label 0 or 1 per line, and must have the same number of lines as the gold-standard.'
epilog = 'Returns: Precision, Recall and F1.'
parser=argparse.ArgumentParser(description=description, epilog=epilog)
parser.add_argument('--gold', required=True, help='File containing dataset with gold-standard labels.')
parser.add_argument('--pred', required=True, help='File containing predicted labels.')
args = vars(parser.parse_args())
#Retrieve labels:
gold = [int(line.strip().split('\t')[3]) for line in open(args['gold'])]
pred = [int(line.strip()) for line in open(args['pred'])]
#Calculate scores:
p, r, f = evaluateIdentifier(gold, pred)
#Present scores:
print('Precision: ' + str(p))
print('Recall: ' + str(r))
print('F1: ' + str(f))
| mit |
lanyuwen/openthread | tools/harness-automation/cases_R140/router_5_3_4.py | 18 | 1876 | #!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from autothreadharness.harness_case import HarnessCase
import unittest
class Router_5_3_4(HarnessCase):
role = HarnessCase.ROLE_ROUTER
case = '5 3 4'
golden_devices_required = 6
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
chuan9/chromium-crosswalk | tools/perf/page_sets/key_idle_power_cases.py | 6 | 2704 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from page_sets import android_screen_restoration_shared_state
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
from telemetry import story
class KeyIdlePowerPage(page_module.Page):
def __init__(self, url, page_set, turn_screen_off,
shared_page_state_class=shared_page_state.SharedMobilePageState):
super(KeyIdlePowerPage, self).__init__(
url=url,
page_set=page_set,
shared_page_state_class=(android_screen_restoration_shared_state
.AndroidScreenRestorationSharedState))
self._turn_screen_off = turn_screen_off
def RunNavigateSteps(self, action_runner):
super(KeyIdlePowerPage, self).RunNavigateSteps(action_runner)
action_runner.Wait(2)
if self._turn_screen_off:
# TODO(jdduke): Remove this API violation after the shared page state is
# exposed here, crbug.com/470147.
# pylint: disable=protected-access
action_runner._tab.browser.platform.android_action_runner.TurnScreenOff()
# We're not interested in tracking activity that occurs immediately after
# the screen is turned off. Several seconds should be enough time for the
# browser to "settle down" into an idle state.
action_runner.Wait(2)
def RunPageInteractions(self, action_runner):
# The page interaction is simply waiting in an idle state.
with action_runner.CreateInteraction('IdleWaiting'):
action_runner.Wait(20)
class KeyIdlePowerCasesPageSet(story.StorySet):
""" Key idle power cases """
def __init__(self):
super(KeyIdlePowerCasesPageSet, self).__init__()
foreground_urls_list = [
# Why: Ensure minimal activity for static, empty pages in the foreground.
'file://key_idle_power_cases/blank.html',
]
for url in foreground_urls_list:
self.AddStory(KeyIdlePowerPage(url, self, False))
background_urls_list = [
# Why: Ensure animated GIFs aren't processed when Chrome is backgrounded.
'file://key_idle_power_cases/animated-gif.html',
# Why: Ensure CSS animations aren't processed when Chrome is backgrounded.
'file://key_idle_power_cases/css-animation.html',
# Why: Ensure rAF is suppressed when Chrome is backgrounded.
'file://key_idle_power_cases/request-animation-frame.html',
# Why: Ensure setTimeout is throttled when Chrome is backgrounded.
'file://key_idle_power_cases/set-timeout.html',
]
for url in background_urls_list:
self.AddStory(KeyIdlePowerPage(url, self, True))
| bsd-3-clause |
Thraxis/pymedusa | lib/unidecode/x00b.py | 252 | 4132 | data = (
'[?]', # 0x00
'N', # 0x01
'N', # 0x02
'H', # 0x03
'[?]', # 0x04
'a', # 0x05
'aa', # 0x06
'i', # 0x07
'ii', # 0x08
'u', # 0x09
'uu', # 0x0a
'R', # 0x0b
'L', # 0x0c
'[?]', # 0x0d
'[?]', # 0x0e
'e', # 0x0f
'ai', # 0x10
'[?]', # 0x11
'[?]', # 0x12
'o', # 0x13
'au', # 0x14
'k', # 0x15
'kh', # 0x16
'g', # 0x17
'gh', # 0x18
'ng', # 0x19
'c', # 0x1a
'ch', # 0x1b
'j', # 0x1c
'jh', # 0x1d
'ny', # 0x1e
'tt', # 0x1f
'tth', # 0x20
'dd', # 0x21
'ddh', # 0x22
'nn', # 0x23
't', # 0x24
'th', # 0x25
'd', # 0x26
'dh', # 0x27
'n', # 0x28
'[?]', # 0x29
'p', # 0x2a
'ph', # 0x2b
'b', # 0x2c
'bh', # 0x2d
'm', # 0x2e
'y', # 0x2f
'r', # 0x30
'[?]', # 0x31
'l', # 0x32
'll', # 0x33
'[?]', # 0x34
'', # 0x35
'sh', # 0x36
'ss', # 0x37
's', # 0x38
'h', # 0x39
'[?]', # 0x3a
'[?]', # 0x3b
'\'', # 0x3c
'\'', # 0x3d
'aa', # 0x3e
'i', # 0x3f
'ii', # 0x40
'u', # 0x41
'uu', # 0x42
'R', # 0x43
'[?]', # 0x44
'[?]', # 0x45
'[?]', # 0x46
'e', # 0x47
'ai', # 0x48
'[?]', # 0x49
'[?]', # 0x4a
'o', # 0x4b
'au', # 0x4c
'', # 0x4d
'[?]', # 0x4e
'[?]', # 0x4f
'[?]', # 0x50
'[?]', # 0x51
'[?]', # 0x52
'[?]', # 0x53
'[?]', # 0x54
'[?]', # 0x55
'+', # 0x56
'+', # 0x57
'[?]', # 0x58
'[?]', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'rr', # 0x5c
'rh', # 0x5d
'[?]', # 0x5e
'yy', # 0x5f
'RR', # 0x60
'LL', # 0x61
'[?]', # 0x62
'[?]', # 0x63
'[?]', # 0x64
'[?]', # 0x65
'0', # 0x66
'1', # 0x67
'2', # 0x68
'3', # 0x69
'4', # 0x6a
'5', # 0x6b
'6', # 0x6c
'7', # 0x6d
'8', # 0x6e
'9', # 0x6f
'', # 0x70
'[?]', # 0x71
'[?]', # 0x72
'[?]', # 0x73
'[?]', # 0x74
'[?]', # 0x75
'[?]', # 0x76
'[?]', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'[?]', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'[?]', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'[?]', # 0x81
'N', # 0x82
'H', # 0x83
'[?]', # 0x84
'a', # 0x85
'aa', # 0x86
'i', # 0x87
'ii', # 0x88
'u', # 0x89
'uu', # 0x8a
'[?]', # 0x8b
'[?]', # 0x8c
'[?]', # 0x8d
'e', # 0x8e
'ee', # 0x8f
'ai', # 0x90
'[?]', # 0x91
'o', # 0x92
'oo', # 0x93
'au', # 0x94
'k', # 0x95
'[?]', # 0x96
'[?]', # 0x97
'[?]', # 0x98
'ng', # 0x99
'c', # 0x9a
'[?]', # 0x9b
'j', # 0x9c
'[?]', # 0x9d
'ny', # 0x9e
'tt', # 0x9f
'[?]', # 0xa0
'[?]', # 0xa1
'[?]', # 0xa2
'nn', # 0xa3
't', # 0xa4
'[?]', # 0xa5
'[?]', # 0xa6
'[?]', # 0xa7
'n', # 0xa8
'nnn', # 0xa9
'p', # 0xaa
'[?]', # 0xab
'[?]', # 0xac
'[?]', # 0xad
'm', # 0xae
'y', # 0xaf
'r', # 0xb0
'rr', # 0xb1
'l', # 0xb2
'll', # 0xb3
'lll', # 0xb4
'v', # 0xb5
'[?]', # 0xb6
'ss', # 0xb7
's', # 0xb8
'h', # 0xb9
'[?]', # 0xba
'[?]', # 0xbb
'[?]', # 0xbc
'[?]', # 0xbd
'aa', # 0xbe
'i', # 0xbf
'ii', # 0xc0
'u', # 0xc1
'uu', # 0xc2
'[?]', # 0xc3
'[?]', # 0xc4
'[?]', # 0xc5
'e', # 0xc6
'ee', # 0xc7
'ai', # 0xc8
'[?]', # 0xc9
'o', # 0xca
'oo', # 0xcb
'au', # 0xcc
'', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'[?]', # 0xd0
'[?]', # 0xd1
'[?]', # 0xd2
'[?]', # 0xd3
'[?]', # 0xd4
'[?]', # 0xd5
'[?]', # 0xd6
'+', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'[?]', # 0xda
'[?]', # 0xdb
'[?]', # 0xdc
'[?]', # 0xdd
'[?]', # 0xde
'[?]', # 0xdf
'[?]', # 0xe0
'[?]', # 0xe1
'[?]', # 0xe2
'[?]', # 0xe3
'[?]', # 0xe4
'[?]', # 0xe5
'0', # 0xe6
'1', # 0xe7
'2', # 0xe8
'3', # 0xe9
'4', # 0xea
'5', # 0xeb
'6', # 0xec
'7', # 0xed
'8', # 0xee
'9', # 0xef
'+10+', # 0xf0
'+100+', # 0xf1
'+1000+', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
| gpl-3.0 |
xuru/pyvisdk | pyvisdk/do/vmfs_datastore_base_option.py | 1 | 1045 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def VmfsDatastoreBaseOption(vim, *args, **kwargs):
'''Base class that describes a VMFS datastore provisioning option.'''
obj = vim.client.factory.create('ns0:VmfsDatastoreBaseOption')
# do some validation checking...
if (len(args) + len(kwargs)) < 1:
raise IndexError('Expected at least 2 arguments got: %d' % len(args))
required = [ 'layout' ]
optional = [ 'partitionFormatChange', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit |
stendarr/searchengine-imagescraper | bingscraperv3.py | 1 | 2359 | from bs4 import BeautifulSoup, SoupStrainer
from html.parser import *
import http.client
import urllib.request
from urllib.request import urlopen, Request
#99 questions
yes = ['y','ye','yes']
search_term = str(input('Bing Image Search: ')).replace(" ", "+")
link_limit = int(input("Enter link limit (1-100): "))
save_links_yn = str(input("Write links to a file? (y/n) ")).lower()
if save_links_yn in yes:
filename_links = str(input("How should the file be named? "))
download_pictures_yn = str(input("Download pictures? (y/n) ")).lower()
if download_pictures_yn in yes:
filename_pictures = str(input("How should the image files be named? "))
filepath_pictures = filename_pictures+'/'+filename_pictures
#sets bing url according to input
bing_url = 'http://www.bing.com/images/search?q='+search_term
#just checking the search url for mistakes
print("Checking following URL:\n"+bing_url+"\n")
#adding headers to fool bing
req = Request(bing_url, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36'})
soup = BeautifulSoup(urllib.request.urlopen(req), 'html.parser')
# for debugging and reverse engineering purposes
#open('souptest1.html', 'w').write(str(soup.encode("utf-8")))
#open('souptest2.txt', 'w').write(str(soup))
#find all a tags with attribute m because that's where the links are
divs = soup.findAll("a", attrs={"m": True})
link_counter = 0
exception_counter = 0
for div in divs:
try:
#stripping elements of unnecessary characters
div = str(div).partition('",imgurl:"')[-1]
div = div.rpartition('",tid:"')[0]
div = str(div)
#writing links to a file
if save_links_yn in yes:
open(filename_links+'.txt', 'a').write(div+"\n")
#downloading the images
if download_pictures_yn in yes:
urllib.request.urlretrieve(div, filename_pictures+str(link_counter+1)+".jpg")
#if counter's limit reached, stop
link_counter += 1
if link_counter == link_limit:
break
except IOError:
print("Error with:",div)
exception_counter += 1
link_counter -= 1
print("\nlinks found:", link_counter)
print("\nexceptions thrown:", exception_counter)
input("\n\n-----------------------\n EOP")
| mit |
Whisper-Cao/802.15.4-revision | docs/doxygen/swig_doc.py | 6 | 8675 | #
# Copyright 2010,2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
Creates the swig_doc.i SWIG interface file.
Execute using: python swig_doc.py xml_path outputfilename
The file instructs SWIG to transfer the doxygen comments into the
python docstrings.
"""
import sys
try:
from doxyxml import DoxyIndex, DoxyClass, DoxyFriend, DoxyFunction, DoxyFile, base
except ImportError:
from gnuradio.doxyxml import DoxyIndex, DoxyClass, DoxyFriend, DoxyFunction, DoxyFile, base
def py_name(name):
bits = name.split('_')
return '_'.join(bits[1:])
def make_name(name):
bits = name.split('_')
return bits[0] + '_make_' + '_'.join(bits[1:])
class Block(object):
"""
Checks if doxyxml produced objects correspond to a gnuradio block.
"""
@classmethod
def includes(cls, item):
if not isinstance(item, DoxyClass):
return False
# Check for a parsing error.
if item.error():
return False
return item.has_member(make_name(item.name()), DoxyFriend)
def utoascii(text):
"""
Convert unicode text into ascii and escape quotes.
"""
if text is None:
return ''
out = text.encode('ascii', 'replace')
out = out.replace('"', '\\"')
return out
def combine_descriptions(obj):
"""
Combines the brief and detailed descriptions of an object together.
"""
description = []
bd = obj.brief_description.strip()
dd = obj.detailed_description.strip()
if bd:
description.append(bd)
if dd:
description.append(dd)
return utoascii('\n\n'.join(description)).strip()
entry_templ = '%feature("docstring") {name} "{docstring}"'
def make_entry(obj, name=None, templ="{description}", description=None):
"""
Create a docstring entry for a swig interface file.
obj - a doxyxml object from which documentation will be extracted.
name - the name of the C object (defaults to obj.name())
templ - an optional template for the docstring containing only one
variable named 'description'.
description - if this optional variable is set then it's value is
used as the description instead of extracting it from obj.
"""
if name is None:
name=obj.name()
if "operator " in name:
return ''
if description is None:
description = combine_descriptions(obj)
docstring = templ.format(description=description)
if not docstring:
return ''
return entry_templ.format(
name=name,
docstring=docstring,
)
def make_func_entry(func, name=None, description=None, params=None):
"""
Create a function docstring entry for a swig interface file.
func - a doxyxml object from which documentation will be extracted.
name - the name of the C object (defaults to func.name())
description - if this optional variable is set then it's value is
used as the description instead of extracting it from func.
params - a parameter list that overrides using func.params.
"""
if params is None:
params = func.params
params = [prm.declname for prm in params]
if params:
sig = "Params: (%s)" % ", ".join(params)
else:
sig = "Params: (NONE)"
templ = "{description}\n\n" + sig
return make_entry(func, name=name, templ=utoascii(templ),
description=description)
def make_class_entry(klass, description=None):
"""
Create a class docstring for a swig interface file.
"""
output = []
output.append(make_entry(klass, description=description))
for func in klass.in_category(DoxyFunction):
name = klass.name() + '::' + func.name()
output.append(make_func_entry(func, name=name))
return "\n\n".join(output)
def make_block_entry(di, block):
"""
Create class and function docstrings of a gnuradio block for a
swig interface file.
"""
descriptions = []
# Get the documentation associated with the class.
class_desc = combine_descriptions(block)
if class_desc:
descriptions.append(class_desc)
# Get the documentation associated with the make function
make_func = di.get_member(make_name(block.name()), DoxyFunction)
make_func_desc = combine_descriptions(make_func)
if make_func_desc:
descriptions.append(make_func_desc)
# Get the documentation associated with the file
try:
block_file = di.get_member(block.name() + ".h", DoxyFile)
file_desc = combine_descriptions(block_file)
if file_desc:
descriptions.append(file_desc)
except base.Base.NoSuchMember:
# Don't worry if we can't find a matching file.
pass
# And join them all together to make a super duper description.
super_description = "\n\n".join(descriptions)
# Associate the combined description with the class and
# the make function.
output = []
output.append(make_class_entry(block, description=super_description))
creator = block.get_member(block.name(), DoxyFunction)
output.append(make_func_entry(make_func, description=super_description,
params=creator.params))
return "\n\n".join(output)
def make_swig_interface_file(di, swigdocfilename, custom_output=None):
output = ["""
/*
* This file was automatically generated using swig_doc.py.
*
* Any changes to it will be lost next time it is regenerated.
*/
"""]
if custom_output is not None:
output.append(custom_output)
# Create docstrings for the blocks.
blocks = di.in_category(Block)
make_funcs = set([])
for block in blocks:
try:
make_func = di.get_member(make_name(block.name()), DoxyFunction)
make_funcs.add(make_func.name())
output.append(make_block_entry(di, block))
except block.ParsingError:
print('Parsing error for block %s' % block.name())
# Create docstrings for functions
# Don't include the make functions since they have already been dealt with.
funcs = [f for f in di.in_category(DoxyFunction) if f.name() not in make_funcs]
for f in funcs:
try:
output.append(make_func_entry(f))
except f.ParsingError:
print('Parsing error for function %s' % f.name())
# Create docstrings for classes
block_names = [block.name() for block in blocks]
klasses = [k for k in di.in_category(DoxyClass) if k.name() not in block_names]
for k in klasses:
try:
output.append(make_class_entry(k))
except k.ParsingError:
print('Parsing error for class %s' % k.name())
# Docstrings are not created for anything that is not a function or a class.
# If this excludes anything important please add it here.
output = "\n\n".join(output)
swig_doc = file(swigdocfilename, 'w')
swig_doc.write(output)
swig_doc.close()
if __name__ == "__main__":
# Parse command line options and set up doxyxml.
err_msg = "Execute using: python swig_doc.py xml_path outputfilename"
if len(sys.argv) != 3:
raise StandardError(err_msg)
xml_path = sys.argv[1]
swigdocfilename = sys.argv[2]
di = DoxyIndex(xml_path)
# gnuradio.gr.msq_queue.insert_tail and delete_head create errors unless docstrings are defined!
# This is presumably a bug in SWIG.
#msg_q = di.get_member(u'gr_msg_queue', DoxyClass)
#insert_tail = msg_q.get_member(u'insert_tail', DoxyFunction)
#delete_head = msg_q.get_member(u'delete_head', DoxyFunction)
output = []
#output.append(make_func_entry(insert_tail, name='gr_py_msg_queue__insert_tail'))
#output.append(make_func_entry(delete_head, name='gr_py_msg_queue__delete_head'))
custom_output = "\n\n".join(output)
# Generate the docstrings interface file.
make_swig_interface_file(di, swigdocfilename, custom_output=custom_output)
| gpl-3.0 |
igordejanovic/textX | tests/functional/test_scoping/test_model_repository.py | 1 | 4915 | from __future__ import unicode_literals
from os.path import dirname, abspath, join
import textx.scoping.providers as scoping_providers
from textx import metamodel_from_file
from textx.scoping import is_file_included
def test_inclusion_check_1():
"""
Test to demonstrate how to check if a file is used by a model.
This can be used by an IDE to determine, if a model has to be
updated/reloaded.
"""
#################################
# META MODEL DEF
#################################
my_meta_model = metamodel_from_file(
join(abspath(dirname(__file__)), 'issue66', 'task_specification.tx'))
search_path = [
join(abspath(dirname(__file__)), 'issue66', 'somewhere1'), # assembly
join(abspath(dirname(__file__)), 'issue66', 'somewhere2') # position
]
my_meta_model.register_scope_providers(
{"*.*": scoping_providers.PlainNameImportURI(search_path=search_path)})
#################################
# MODEL PARSING
#################################
# This model load two files
# * one file exists locally and in a search path --> the local one should
# be preferred.
# * one only exists locally.
m = my_meta_model.model_from_file(
join(abspath(dirname(__file__)),
'issue66', 'assembly_car1.prog'))
# the model file itself is "included" (special case)
assert is_file_included(
join(abspath(dirname(__file__)), 'issue66',
'assembly_car1.prog'),
m
)
# another model file
assert not is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'local',
'assembly_car3.prog'),
m
)
# file in folder "local"
assert not is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'local',
'mylib', 'local.tasks'),
m
)
# file in folder "local"
assert not is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'local',
'mylib', 'position.tasks'),
m
)
# distant file (part of search path)
assert is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'somewhere1',
'mylib', 'assembly.tasks'),
m
)
# distant file (part of search path)
assert is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'somewhere2',
'mylib', 'position.tasks'),
m
)
#################################
# END
#################################
def test_inclusion_check_2():
"""
Test to demonstrate how to check if a file is used by a model.
This can be used by an IDE to determine, if a model has to be
updated/reloaded.
"""
#################################
# META MODEL DEF
#################################
my_meta_model = metamodel_from_file(
join(abspath(dirname(__file__)), 'issue66', 'task_specification.tx'))
search_path = [
join(abspath(dirname(__file__)), 'issue66', 'somewhere1'), # assembly
join(abspath(dirname(__file__)), 'issue66', 'somewhere2') # position
]
my_meta_model.register_scope_providers(
{"*.*": scoping_providers.PlainNameImportURI(search_path=search_path)})
#################################
# MODEL PARSING
#################################
# This model load two files
# * one file exists locally and in a search path --> the local one should
# be preferred.
# * one only exists locally.
m = my_meta_model.model_from_file(
join(abspath(dirname(__file__)),
'issue66', 'local', 'assembly_car3.prog'))
# the model file itself is "included" (special case)
assert is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'local',
'assembly_car3.prog'),
m
)
# local file
assert is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'local',
'mylib', 'local.tasks'),
m
)
# local file
assert is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'local',
'mylib', 'position.tasks'),
m
)
# distant file
assert not is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'somewhere1',
'mylib', 'assembly.tasks'),
m
)
# distant file
assert not is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'somewhere2',
'mylib', 'position.tasks'),
m
)
#################################
# END
#################################
def test_no_tx_model_repos():
from textx import metamodel_from_str
mm = metamodel_from_str("Model: 'A';")
m = mm.model_from_str("A")
assert not is_file_included(
join(abspath(dirname(__file__)), 'issue66', 'local',
'mylib', 'position.tasks'),
m
)
| mit |
medspx/QGIS | python/plugins/processing/gui/wrappers_map_theme.py | 30 | 1829 | # -*- coding: utf-8 -*-
"""
***************************************************************************
wrappers_map_theme.py - Map theme widget wrappers
---------------------
Date : August 2017
Copyright : (C) 2017 by OPENGIS.ch
Email : [email protected]
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from qgis.core import QgsProject
from qgis.PyQt.QtWidgets import QComboBox
from processing.gui.wrappers import (
BasicWidgetWrapper
)
class MapThemeWrapper(BasicWidgetWrapper):
"""
WidgetWrapper for ParameterString that createe a combobox widget
with the existing map themes.
"""
def createWidget(self):
self._combo = QComboBox()
self._combo.addItem('', '')
for item in self.items():
self._combo.addItem(item, item)
self._combo.currentIndexChanged.connect(lambda:
self.widgetValueHasChanged.emit(self))
return self._combo
def items(self):
return QgsProject.instance().mapThemeCollection().mapThemes()
def setValue(self, value):
self.setComboValue(value, self._combo)
def value(self):
return self.comboValue(combobox=self._combo)
| gpl-2.0 |
appsembler/edx-platform | cms/djangoapps/contentstore/management/commands/delete_course.py | 16 | 3568 | from __future__ import print_function
from six import text_type
from django.core.management.base import BaseCommand, CommandError
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from contentstore.utils import delete_course
from xmodule.contentstore.django import contentstore
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from .prompt import query_yes_no
class Command(BaseCommand):
"""
Delete a MongoDB backed course
Example usage:
$ ./manage.py cms delete_course 'course-v1:edX+DemoX+Demo_Course' --settings=devstack
$ ./manage.py cms delete_course 'course-v1:edX+DemoX+Demo_Course' --keep-instructors --settings=devstack
$ ./manage.py cms delete_course 'course-v1:edX+DemoX+Demo_Course' --remove-assets --settings=devstack
Note:
The keep-instructors option is useful for resolving issues that arise when a course run's ID is duplicated
in a case-insensitive manner. MongoDB is case-sensitive, but MySQL is case-insensitive. This results in
course-v1:edX+DemoX+1t2017 being treated differently in MongoDB from course-v1:edX+DemoX+1T2017 (capital 'T').
If you need to remove a duplicate that has resulted from casing issues, use the --keep-instructors flag
to ensure that permissions for the remaining course run are not deleted.
Use the remove-assets option to ensure all assets are deleted. This is especially relevant to users of the
split Mongo modulestore.
"""
help = 'Delete a MongoDB backed course'
def add_arguments(self, parser):
parser.add_argument(
'course_key',
help='ID of the course to delete.',
)
parser.add_argument(
'--keep-instructors',
action='store_true',
default=False,
help='Do not remove permissions of users and groups for course',
)
parser.add_argument(
'--remove-assets',
action='store_true',
help='Remove all assets associated with the course. '
'Be careful! These assets may be associated with another course',
)
def handle(self, *args, **options):
try:
# a course key may have unicode chars in it
try:
course_key = text_type(options['course_key'], 'utf8')
# May already be decoded to unicode if coming in through tests, this is ok.
except TypeError:
course_key = text_type(options['course_key'])
course_key = CourseKey.from_string(course_key)
except InvalidKeyError:
raise CommandError('Invalid course_key: {}'.format(options['course_key']))
if not modulestore().get_course(course_key):
raise CommandError('Course not found: {}'.format(options['course_key']))
print('Preparing to delete course %s from module store....' % options['course_key'])
if query_yes_no('Are you sure you want to delete course {}?'.format(course_key), default='no'):
if query_yes_no('Are you sure? This action cannot be undone!', default='no'):
delete_course(course_key, ModuleStoreEnum.UserID.mgmt_command, options['keep_instructors'])
if options['remove_assets']:
contentstore().delete_all_course_assets(course_key)
print('Deleted assets for course'.format(course_key))
print('Deleted course {}'.format(course_key))
| agpl-3.0 |
katstalk/android_external_chromium_org | third_party/re2/re2/make_unicode_groups.py | 219 | 2849 | #!/usr/bin/python
# Copyright 2008 The RE2 Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Generate C++ tables for Unicode Script and Category groups."""
import sys
import unicode
_header = """
// GENERATED BY make_unicode_groups.py; DO NOT EDIT.
// make_unicode_groups.py >unicode_groups.cc
#include "re2/unicode_groups.h"
namespace re2 {
"""
_trailer = """
} // namespace re2
"""
n16 = 0
n32 = 0
def MakeRanges(codes):
"""Turn a list like [1,2,3,7,8,9] into a range list [[1,3], [7,9]]"""
ranges = []
last = -100
for c in codes:
if c == last+1:
ranges[-1][1] = c
else:
ranges.append([c, c])
last = c
return ranges
def PrintRanges(type, name, ranges):
"""Print the ranges as an array of type named name."""
print "static %s %s[] = {" % (type, name,)
for lo, hi in ranges:
print "\t{ %d, %d }," % (lo, hi)
print "};"
# def PrintCodes(type, name, codes):
# """Print the codes as an array of type named name."""
# print "static %s %s[] = {" % (type, name,)
# for c in codes:
# print "\t%d," % (c,)
# print "};"
def PrintGroup(name, codes):
"""Print the data structures for the group of codes.
Return a UGroup literal for the group."""
# See unicode_groups.h for a description of the data structure.
# Split codes into 16-bit ranges and 32-bit ranges.
range16 = MakeRanges([c for c in codes if c < 65536])
range32 = MakeRanges([c for c in codes if c >= 65536])
# Pull singleton ranges out of range16.
# code16 = [lo for lo, hi in range16 if lo == hi]
# range16 = [[lo, hi] for lo, hi in range16 if lo != hi]
global n16
global n32
n16 += len(range16)
n32 += len(range32)
ugroup = "{ \"%s\", +1" % (name,)
# if len(code16) > 0:
# PrintCodes("uint16", name+"_code16", code16)
# ugroup += ", %s_code16, %d" % (name, len(code16))
# else:
# ugroup += ", 0, 0"
if len(range16) > 0:
PrintRanges("URange16", name+"_range16", range16)
ugroup += ", %s_range16, %d" % (name, len(range16))
else:
ugroup += ", 0, 0"
if len(range32) > 0:
PrintRanges("URange32", name+"_range32", range32)
ugroup += ", %s_range32, %d" % (name, len(range32))
else:
ugroup += ", 0, 0"
ugroup += " }"
return ugroup
def main():
print _header
ugroups = []
for name, codes in unicode.Categories().iteritems():
ugroups.append(PrintGroup(name, codes))
for name, codes in unicode.Scripts().iteritems():
ugroups.append(PrintGroup(name, codes))
print "// %d 16-bit ranges, %d 32-bit ranges" % (n16, n32)
print "UGroup unicode_groups[] = {";
ugroups.sort()
for ug in ugroups:
print "\t%s," % (ug,)
print "};"
print "int num_unicode_groups = %d;" % (len(ugroups),)
print _trailer
if __name__ == '__main__':
main()
| bsd-3-clause |
rchekaluk/cloudbiolinux | cloudbio/package/cpan.py | 9 | 3266 | """Install perl packages using CPAN and cpanminus (cpanm).
"""
import os
from fabric.api import cd, settings
from cloudbio.flavor.config import get_config_file
from cloudbio.fabutils import find_cmd
from cloudbio.package.shared import _yaml_to_packages
from cloudbio.custom import shared as cshared
def install_packages(env):
config_file = get_config_file(env, "perl-libs.yaml")
(packages, _) = _yaml_to_packages(config_file.base, subs_yaml_file=config_file.dist, namesort=False)
cpanm_cmd = find_cmd(env, "cpanm", "--version")
for package in packages:
if package.count("==") > 1:
_install_from_url(env, cpanm_cmd, package)
else:
_install_from_cpan(env, cpanm_cmd, package)
def _install_from_cpan(env, cpanm_cmd, package):
"""Install from CPAN using cpanm, handling special arguments.
The simplest input string is just a package to install (like XML::Simple) but
users can also specify build arguments and exports as additional items separated
by ';'
"""
parts = package.split(";")
if len(parts) == 1:
perl_lib = parts[0]
args = ""
exports = []
elif len(parts) == 2:
perl_lib, args = parts
exports = []
else:
perl_lib, args = parts[:2]
exports = parts[2:]
export_strs = []
for export in exports:
export_strs.append("export " + export.format(system_install=env.system_install))
export = " && ".join(export_strs) + " && " if export_strs else ""
build_args = ("--build-args='%s'" % args) if args else ""
env.safe_run("%s %s -i --notest --local-lib=%s %s '%s'" % (export, cpanm_cmd, env.system_install,
build_args, perl_lib))
def _install_from_url(env, cpanm_cmd, package):
"""Check version of a dependency and download and install with cpanm if not up to date.
Packages installed via URL have the package name, target version and URL separated
with '=='. They can also optionally have a build directory or dependency to remove.
"""
parts = package.split("==")
package, target_version, url = parts[:3]
args = {}
if len(parts) > 3:
for key, value in (x.split("=") for x in parts[3:]):
args[key] = value
with settings(warn_only=True):
cur_version = env.safe_run_output("export PERL5LIB=%s/lib/perl5:${PERL5LIB} && " % env.system_install +
"""perl -le 'eval "require $ARGV[0]" and print $ARGV[0]->VERSION' %s"""
% package)
if cur_version != target_version:
with cshared._make_tmp_dir() as work_dir:
with cd(work_dir):
dl_dir = cshared._fetch_and_unpack(url)
if args.get("build"):
dl_dir = os.path.join(dl_dir, args["build"])
with cd(dl_dir):
if args.get("depremove"):
for fname in ["Makefile.PL", "MYMETA.json", "MYMETA.yml"]:
env.safe_run(r"""sed -i.bak -e '/^.*%s.*/s/^/#/' %s""" % (args["depremove"], fname))
env.safe_run("%s -i --notest --local-lib=%s ." % (cpanm_cmd, env.system_install))
| mit |
SimonSickle/android_kernel_htc_primou | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
cesargtz/YecoraOdoo | addons/l10n_bo/__init__.py | 2120 | 1456 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2011 Cubic ERP - Teradata SAC. (http://cubicerp.com).
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
azumimuo/family-xbmc-addon | script.module.liveresolver/lib/liveresolver/resolvers/zoomtv.py | 1 | 1639 | # -*- coding: utf-8 -*-
import re,urlparse,urllib
from liveresolver.modules import client,decryptionUtils
from liveresolver.modules.log_utils import log
def resolve(url):
try:
referer = urlparse.parse_qs(urlparse.urlparse(url).query)['referer'][0]
headers = { 'referer': referer,
'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Content-Type' :'application/x-www-form-urlencoded',
'Connection' : 'keep-alive',
'Host' : 'www.zoomtv.me',
'Origin' : urlparse.urlparse(referer).netloc,
'User-Agent' : client.agent()
}
fid = urlparse.parse_qs(urlparse.urlparse(url).query)['v'][0]
pid = urlparse.parse_qs(urlparse.urlparse(url).query)['pid'][0]
url = 'http://www.zoomtv.me/embed.php?v=%s&vw=660&vh=450'%fid
page = url
post_data = 'uagent=uagent&pid='+pid
result = client.request(url, post=post_data,headers = headers)
result = decryptionUtils.doDemystify(result)
var = re.compile('var\s(.+?)\s*=\s*\'(.+?)\'').findall(result)
for v in var:
if 'm3u8' in v[1]:
m3u8 = v[1]
if 'file' in v[1]:
file = v[1]
url = m3u8 + file
url += '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': page,'X-Requested-With':'ShockwaveFlash/20.0.0.286'})
return url
except:
return | gpl-2.0 |
tarballs-are-good/sympy | sympy/physics/quantum/tests/test_matrixutils.py | 2 | 3418 | from sympy import Matrix, zeros, ones, Integer
from sympy.physics.quantum.matrixutils import (
to_sympy, to_numpy, to_scipy_sparse, matrix_tensor_product,
matrix_to_zero
)
m = Matrix([[1,2],[3,4]])
def test_sympy_to_sympy():
assert to_sympy(m) == m
def test_matrix_to_zero():
assert matrix_to_zero(m) == m
assert matrix_to_zero(Matrix([[0,0],[0,0]])) == Integer(0)
try:
import numpy as np
except ImportError:
pass
else:
def test_to_numpy():
result = np.matrix([[1,2],[3,4]], dtype='complex')
assert (to_numpy(m) == result).all()
def test_matrix_tensor_product():
l1 = zeros(4)
for i in range(16):
l1[i] = 2**i
l2 = zeros(4)
for i in range(16):
l2[i] = i
l3 = zeros(2)
for i in range(4):
l3[i] = i
vec = Matrix([1,2,3])
#test for Matrix known 4x4 matricies
numpyl1 = np.matrix(l1.tolist())
numpyl2 = np.matrix(l2.tolist())
numpy_product = np.kron(numpyl1,numpyl2)
args = [l1, l2]
sympy_product = matrix_tensor_product(*args)
assert numpy_product.tolist() == sympy_product.tolist()
numpy_product = np.kron(numpyl2,numpyl1)
args = [l2, l1]
sympy_product = matrix_tensor_product(*args)
assert numpy_product.tolist() == sympy_product.tolist()
#test for other known matrix of different dimensions
numpyl2 = np.matrix(l3.tolist())
numpy_product = np.kron(numpyl1,numpyl2)
args = [l1, l3]
sympy_product = matrix_tensor_product(*args)
assert numpy_product.tolist() == sympy_product.tolist()
numpy_product = np.kron(numpyl2,numpyl1)
args = [l3, l1]
sympy_product = matrix_tensor_product(*args)
assert numpy_product.tolist() == sympy_product.tolist()
#test for non square matrix
numpyl2 = np.matrix(vec.tolist())
numpy_product = np.kron(numpyl1,numpyl2)
args = [l1, vec]
sympy_product = matrix_tensor_product(*args)
assert numpy_product.tolist() == sympy_product.tolist()
numpy_product = np.kron(numpyl2,numpyl1)
args = [vec, l1]
sympy_product = matrix_tensor_product(*args)
assert numpy_product.tolist() == sympy_product.tolist()
#test for random matrix with random values that are floats
random_matrix1 = np.random.rand(np.random.rand()*5+1,np.random.rand()*5+1)
random_matrix2 = np.random.rand(np.random.rand()*5+1,np.random.rand()*5+1)
numpy_product = np.kron(random_matrix1,random_matrix2)
args = [Matrix(random_matrix1.tolist()),Matrix(random_matrix2.tolist())]
sympy_product = matrix_tensor_product(*args)
assert not (sympy_product - Matrix(numpy_product.tolist())).tolist() > \
(ones((sympy_product.rows,sympy_product.cols))*epsilon).tolist()
#test for three matrix kronecker
sympy_product = matrix_tensor_product(l1,vec,l2)
numpy_product = np.kron(l1,np.kron(vec,l2))
assert numpy_product.tolist() == sympy_product.tolist()
try:
import numpy as np
from scipy import sparse
except ImportError:
pass
else:
def test_to_scipy_sparse():
result = sparse.csr_matrix([[1,2],[3,4]], dtype='complex')
assert np.linalg.norm((to_scipy_sparse(m) - result).todense()) == 0.0
epsilon = .000001
| bsd-3-clause |
ah391/sc-python | datalook.py | 1 | 1722 |
# coding: utf-8
import sys
import numpy
import matplotlib.pyplot
def analyse(filename, outfile=None):
"""Load data and create plots.
Subplots with placeholders, with set lables, layout tight
"""
data = numpy.loadtxt(fname=filename, delimiter=',')
# Create a wide figure to hold the subplots
fig = matplotlib.pyplot.figure(figsize=(10.3, 3.0))
# create placeholders for plots
subplot1 = fig.add_subplot (1,3,1)
subplot2 = fig.add_subplot (1,3,2)
subplot3 = fig.add_subplot (1,3,3)
subplot1.set_ylabel('average')
subplot1.plot(numpy.mean(data, axis=0))
subplot2.set_ylabel('maximum')
subplot2.plot(numpy.max(data, axis=0))
subplot3.set_ylabel('minimum')
subplot3.plot(numpy.min(data, axis=0))
fig.tight_layout()
if outfile is None:
matplotlib.pyplot.show()
else:
matplotlib.pyplot.savefig(outfile)
def detect_problems(filename):
"""Some of our temperature files have problems, check for these
This function reads a file (filename argument) and reports on odd looking maxima and minima that add up to 0.
This seems to happen when the sensors break.
The function does not return any data.
"""
data = numpy.loadtxt(fname=filename, delimiter=',')
if numpy.max(data, axis=0)[0] ==0 and numpy.max(data, axis=0)[20]==20:
print("Suspicious looking maxima")
elif numpy.sum(numpy.min(data, axis=0))==0:
print("Minima add up to zero")
else:
print("Data looks OK")
if __name__ == "__main__":
print("Running",sys.argv[0])
print(sys.argv[1])
analyse(sys.argv[1], outfile=sys.argv[2])
detect_problems(sys.argv[1])
| mit |
halfcrazy/sqlalchemy | test/orm/inheritance/test_basic.py | 20 | 85967 | import warnings
from sqlalchemy.testing import eq_, assert_raises, assert_raises_message
from sqlalchemy import *
from sqlalchemy import exc as sa_exc, util, event
from sqlalchemy.orm import *
from sqlalchemy.orm.util import instance_str
from sqlalchemy.orm import exc as orm_exc, attributes
from sqlalchemy.testing.assertsql import AllOf, CompiledSQL, Or
from sqlalchemy.sql import table, column
from sqlalchemy import testing
from sqlalchemy.testing import engines
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy import inspect
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.testing.util import gc_collect
class O2MTest(fixtures.MappedTest):
"""deals with inheritance and one-to-many relationships"""
@classmethod
def define_tables(cls, metadata):
global foo, bar, blub
foo = Table('foo', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(20)))
bar = Table('bar', metadata,
Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
Column('bar_data', String(20)))
blub = Table('blub', metadata,
Column('id', Integer, ForeignKey('bar.id'), primary_key=True),
Column('foo_id', Integer, ForeignKey('foo.id'), nullable=False),
Column('blub_data', String(20)))
def test_basic(self):
class Foo(object):
def __init__(self, data=None):
self.data = data
def __repr__(self):
return "Foo id %d, data %s" % (self.id, self.data)
mapper(Foo, foo)
class Bar(Foo):
def __repr__(self):
return "Bar id %d, data %s" % (self.id, self.data)
mapper(Bar, bar, inherits=Foo)
class Blub(Bar):
def __repr__(self):
return "Blub id %d, data %s" % (self.id, self.data)
mapper(Blub, blub, inherits=Bar, properties={
'parent_foo':relationship(Foo)
})
sess = create_session()
b1 = Blub("blub #1")
b2 = Blub("blub #2")
f = Foo("foo #1")
sess.add(b1)
sess.add(b2)
sess.add(f)
b1.parent_foo = f
b2.parent_foo = f
sess.flush()
compare = ','.join([repr(b1), repr(b2), repr(b1.parent_foo),
repr(b2.parent_foo)])
sess.expunge_all()
l = sess.query(Blub).all()
result = ','.join([repr(l[0]), repr(l[1]),
repr(l[0].parent_foo), repr(l[1].parent_foo)])
eq_(compare, result)
eq_(l[0].parent_foo.data, 'foo #1')
eq_(l[1].parent_foo.data, 'foo #1')
class PolymorphicResolutionMultiLevel(fixtures.DeclarativeMappedTest,
testing.AssertsCompiledSQL):
run_setup_mappers = 'once'
__dialect__ = 'default'
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
class B(A):
__tablename__ = 'b'
id = Column(Integer, ForeignKey('a.id'), primary_key=True)
class C(A):
__tablename__ = 'c'
id = Column(Integer, ForeignKey('a.id'), primary_key=True)
class D(B):
__tablename__ = 'd'
id = Column(Integer, ForeignKey('b.id'), primary_key=True)
def test_ordered_b_d(self):
a_mapper = inspect(self.classes.A)
eq_(
a_mapper._mappers_from_spec(
[self.classes.B, self.classes.D], None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
def test_a(self):
a_mapper = inspect(self.classes.A)
eq_(
a_mapper._mappers_from_spec(
[self.classes.A], None),
[a_mapper]
)
def test_b_d_selectable(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D, self.classes.B]
eq_(
a_mapper._mappers_from_spec(
spec,
self.classes.B.__table__.join(self.classes.D.__table__)
),
[inspect(self.classes.B), inspect(self.classes.D)]
)
def test_d_selectable(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D]
eq_(
a_mapper._mappers_from_spec(
spec,
self.classes.B.__table__.join(self.classes.D.__table__)
),
[inspect(self.classes.D)]
)
def test_reverse_d_b(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D, self.classes.B]
eq_(
a_mapper._mappers_from_spec(
spec, None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
mappers, selectable = a_mapper._with_polymorphic_args(spec=spec)
self.assert_compile(selectable,
"a LEFT OUTER JOIN b ON a.id = b.id "
"LEFT OUTER JOIN d ON b.id = d.id")
def test_d_b_missing(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D]
eq_(
a_mapper._mappers_from_spec(
spec, None),
[a_mapper, inspect(self.classes.B), inspect(self.classes.D)]
)
mappers, selectable = a_mapper._with_polymorphic_args(spec=spec)
self.assert_compile(selectable,
"a LEFT OUTER JOIN b ON a.id = b.id "
"LEFT OUTER JOIN d ON b.id = d.id")
def test_d_c_b(self):
a_mapper = inspect(self.classes.A)
spec = [self.classes.D, self.classes.C, self.classes.B]
ms = a_mapper._mappers_from_spec(spec, None)
eq_(
ms[-1], inspect(self.classes.D)
)
eq_(ms[0], a_mapper)
eq_(
set(ms[1:3]), set(a_mapper._inheriting_mappers)
)
class PolymorphicOnNotLocalTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('x', String(10)),
Column('q', String(10)))
t2 = Table('t2', metadata,
Column('t2id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('y', String(10)),
Column('xid', ForeignKey('t1.id')))
@classmethod
def setup_classes(cls):
class Parent(cls.Comparable):
pass
class Child(Parent):
pass
def test_non_col_polymorphic_on(self):
Parent = self.classes.Parent
t2 = self.tables.t2
assert_raises_message(
sa_exc.ArgumentError,
"Can't determine polymorphic_on "
"value 'im not a column' - no "
"attribute is mapped to this name.",
mapper,
Parent, t2, polymorphic_on="im not a column"
)
def test_polymorphic_on_non_expr_prop(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent = self.classes.Parent
t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
def go():
interface_m = mapper(Parent, t2,
polymorphic_on=lambda:"hi",
polymorphic_identity=0)
assert_raises_message(
sa_exc.ArgumentError,
"Only direct column-mapped property or "
"SQL expression can be passed for polymorphic_on",
go
)
def test_polymorphic_on_not_present_col(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent = self.classes.Parent
t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
def go():
t1t2_join_2 = select([t1.c.q], from_obj=[t1.join(t2)]).alias()
interface_m = mapper(Parent, t2,
polymorphic_on=t1t2_join.c.x,
with_polymorphic=('*', t1t2_join_2),
polymorphic_identity=0)
assert_raises_message(
sa_exc.InvalidRequestError,
"Could not map polymorphic_on column 'x' to the mapped table - "
"polymorphic loads will not function properly",
go
)
def test_polymorphic_on_only_in_with_poly(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent = self.classes.Parent
t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
# if its in the with_polymorphic, then its OK
mapper(Parent, t2,
polymorphic_on=t1t2_join.c.x,
with_polymorphic=('*', t1t2_join),
polymorphic_identity=0)
def test_polymorpic_on_not_in_with_poly(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent = self.classes.Parent
t1t2_join = select([t1.c.x], from_obj=[t1.join(t2)]).alias()
# if with_polymorphic, but its not present, not OK
def go():
t1t2_join_2 = select([t1.c.q], from_obj=[t1.join(t2)]).alias()
interface_m = mapper(Parent, t2,
polymorphic_on=t1t2_join.c.x,
with_polymorphic=('*', t1t2_join_2),
polymorphic_identity=0)
assert_raises_message(
sa_exc.InvalidRequestError,
"Could not map polymorphic_on column 'x' "
"to the mapped table - "
"polymorphic loads will not function properly",
go
)
def test_polymorphic_on_expr_explicit_map(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
(t1.c.x=="p", "parent"),
(t1.c.x=="c", "child"),
])
mapper(Parent, t1, properties={
"discriminator":column_property(expr)
}, polymorphic_identity="parent",
polymorphic_on=expr)
mapper(Child, t2, inherits=Parent,
polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
def test_polymorphic_on_expr_implicit_map_no_label_joined(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
(t1.c.x=="p", "parent"),
(t1.c.x=="c", "child"),
])
mapper(Parent, t1, polymorphic_identity="parent",
polymorphic_on=expr)
mapper(Child, t2, inherits=Parent, polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
def test_polymorphic_on_expr_implicit_map_w_label_joined(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
(t1.c.x=="p", "parent"),
(t1.c.x=="c", "child"),
]).label(None)
mapper(Parent, t1, polymorphic_identity="parent",
polymorphic_on=expr)
mapper(Child, t2, inherits=Parent, polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
def test_polymorphic_on_expr_implicit_map_no_label_single(self):
"""test that single_table_criterion is propagated
with a standalone expr"""
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
(t1.c.x=="p", "parent"),
(t1.c.x=="c", "child"),
])
mapper(Parent, t1, polymorphic_identity="parent",
polymorphic_on=expr)
mapper(Child, inherits=Parent, polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
def test_polymorphic_on_expr_implicit_map_w_label_single(self):
"""test that single_table_criterion is propagated
with a standalone expr"""
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
(t1.c.x=="p", "parent"),
(t1.c.x=="c", "child"),
]).label(None)
mapper(Parent, t1, polymorphic_identity="parent",
polymorphic_on=expr)
mapper(Child, inherits=Parent, polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
def test_polymorphic_on_column_prop(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
(t1.c.x=="p", "parent"),
(t1.c.x=="c", "child"),
])
cprop = column_property(expr)
mapper(Parent, t1, properties={
"discriminator":cprop
}, polymorphic_identity="parent",
polymorphic_on=cprop)
mapper(Child, t2, inherits=Parent,
polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
def test_polymorphic_on_column_str_prop(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
expr = case([
(t1.c.x=="p", "parent"),
(t1.c.x=="c", "child"),
])
cprop = column_property(expr)
mapper(Parent, t1, properties={
"discriminator":cprop
}, polymorphic_identity="parent",
polymorphic_on="discriminator")
mapper(Child, t2, inherits=Parent,
polymorphic_identity="child")
self._roundtrip(parent_ident='p', child_ident='c')
def test_polymorphic_on_synonym(self):
t2, t1 = self.tables.t2, self.tables.t1
Parent, Child = self.classes.Parent, self.classes.Child
cprop = column_property(t1.c.x)
assert_raises_message(
sa_exc.ArgumentError,
"Only direct column-mapped property or "
"SQL expression can be passed for polymorphic_on",
mapper, Parent, t1, properties={
"discriminator":cprop,
"discrim_syn":synonym(cprop)
}, polymorphic_identity="parent",
polymorphic_on="discrim_syn")
def _roundtrip(self, set_event=True, parent_ident='parent', child_ident='child'):
Parent, Child = self.classes.Parent, self.classes.Child
if set_event:
@event.listens_for(Parent, "init", propagate=True)
def set_identity(instance, *arg, **kw):
ident = object_mapper(instance).polymorphic_identity
if ident == 'parent':
instance.x = parent_ident
elif ident == 'child':
instance.x = child_ident
else:
assert False, "Got unexpected identity %r" % ident
s = Session(testing.db)
s.add_all([
Parent(q="p1"),
Child(q="c1", y="c1"),
Parent(q="p2"),
])
s.commit()
s.close()
eq_(
[type(t) for t in s.query(Parent).order_by(Parent.id)],
[Parent, Child, Parent]
)
eq_(
[type(t) for t in s.query(Child).all()],
[Child]
)
class SortOnlyOnImportantFKsTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('b_id', Integer,
ForeignKey('b.id', use_alter=True, name='b'))
)
Table('b', metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True)
)
@classmethod
def setup_classes(cls):
Base = declarative_base()
class A(Base):
__tablename__ = "a"
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
b_id = Column(Integer, ForeignKey('b.id'))
class B(A):
__tablename__ = "b"
id = Column(Integer, ForeignKey('a.id'), primary_key=True)
__mapper_args__ = {'inherit_condition': id == A.id}
cls.classes.A = A
cls.classes.B = B
def test_flush(self):
s = Session(testing.db)
s.add(self.classes.B())
s.flush()
class FalseDiscriminatorTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global t1
t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('type', Boolean, nullable=False))
def test_false_on_sub(self):
class Foo(object):
pass
class Bar(Foo):
pass
mapper(Foo, t1, polymorphic_on=t1.c.type, polymorphic_identity=True)
mapper(Bar, inherits=Foo, polymorphic_identity=False)
sess = create_session()
b1 = Bar()
sess.add(b1)
sess.flush()
assert b1.type is False
sess.expunge_all()
assert isinstance(sess.query(Foo).one(), Bar)
def test_false_on_base(self):
class Ding(object):pass
class Bat(Ding):pass
mapper(Ding, t1, polymorphic_on=t1.c.type, polymorphic_identity=False)
mapper(Bat, inherits=Ding, polymorphic_identity=True)
sess = create_session()
d1 = Ding()
sess.add(d1)
sess.flush()
assert d1.type is False
sess.expunge_all()
assert sess.query(Ding).one() is not None
class PolymorphicSynonymTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global t1, t2
t1 = Table('t1', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(10), nullable=False),
Column('info', String(255)))
t2 = Table('t2', metadata,
Column('id', Integer, ForeignKey('t1.id'),
primary_key=True),
Column('data', String(10), nullable=False))
def test_polymorphic_synonym(self):
class T1(fixtures.ComparableEntity):
def info(self):
return "THE INFO IS:" + self._info
def _set_info(self, x):
self._info = x
info = property(info, _set_info)
class T2(T1):pass
mapper(T1, t1, polymorphic_on=t1.c.type, polymorphic_identity='t1',
properties={
'info':synonym('_info', map_column=True)
})
mapper(T2, t2, inherits=T1, polymorphic_identity='t2')
sess = create_session()
at1 = T1(info='at1')
at2 = T2(info='at2', data='t2 data')
sess.add(at1)
sess.add(at2)
sess.flush()
sess.expunge_all()
eq_(sess.query(T2).filter(T2.info=='at2').one(), at2)
eq_(at2.info, "THE INFO IS:at2")
class PolymorphicAttributeManagementTest(fixtures.MappedTest):
"""Test polymorphic_on can be assigned, can be mirrored, etc."""
run_setup_mappers = 'once'
@classmethod
def define_tables(cls, metadata):
Table('table_a', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('class_name', String(50))
)
Table('table_b', metadata,
Column('id', Integer, ForeignKey('table_a.id'),
primary_key=True),
Column('class_name', String(50)),
)
Table('table_c', metadata,
Column('id', Integer, ForeignKey('table_b.id'),
primary_key=True),
Column('data', String(10))
)
@classmethod
def setup_classes(cls):
table_b, table_c, table_a = (cls.tables.table_b,
cls.tables.table_c,
cls.tables.table_a)
class A(cls.Basic):
pass
class B(A):
pass
class C(B):
pass
class D(B):
pass
mapper(A, table_a,
polymorphic_on=table_a.c.class_name,
polymorphic_identity='a')
mapper(B, table_b, inherits=A,
polymorphic_on=table_b.c.class_name,
polymorphic_identity='b',
properties=dict(class_name=[table_a.c.class_name, table_b.c.class_name]))
mapper(C, table_c, inherits=B,
polymorphic_identity='c')
mapper(D, inherits=B,
polymorphic_identity='d')
def test_poly_configured_immediate(self):
A, C, B = (self.classes.A,
self.classes.C,
self.classes.B)
a = A()
b = B()
c = C()
eq_(a.class_name, 'a')
eq_(b.class_name, 'b')
eq_(c.class_name, 'c')
def test_base_class(self):
A, C, B = (self.classes.A,
self.classes.C,
self.classes.B)
sess = Session()
c1 = C()
sess.add(c1)
sess.commit()
assert isinstance(sess.query(B).first(), C)
sess.close()
assert isinstance(sess.query(A).first(), C)
def test_valid_assignment_upwards(self):
"""test that we can assign 'd' to a B, since B/D
both involve the same set of tables.
"""
D, B = self.classes.D, self.classes.B
sess = Session()
b1 = B()
b1.class_name = 'd'
sess.add(b1)
sess.commit()
sess.close()
assert isinstance(sess.query(B).first(), D)
def test_invalid_assignment_downwards(self):
"""test that we warn on assign of 'b' to a C, since this adds
a row to the C table we'd never load.
"""
C = self.classes.C
sess = Session()
c1 = C()
c1.class_name = 'b'
sess.add(c1)
assert_raises_message(
sa_exc.SAWarning,
"Flushing object %s with incompatible "
"polymorphic identity 'b'; the object may not "
"refresh and/or load correctly" % instance_str(c1),
sess.flush
)
def test_invalid_assignment_upwards(self):
"""test that we warn on assign of 'c' to a B, since we will have a
"C" row that has no joined row, which will cause object
deleted errors.
"""
B = self.classes.B
sess = Session()
b1 = B()
b1.class_name = 'c'
sess.add(b1)
assert_raises_message(
sa_exc.SAWarning,
"Flushing object %s with incompatible "
"polymorphic identity 'c'; the object may not "
"refresh and/or load correctly" % instance_str(b1),
sess.flush
)
def test_entirely_oob_assignment(self):
"""test warn on an unknown polymorphic identity.
"""
B = self.classes.B
sess = Session()
b1 = B()
b1.class_name = 'xyz'
sess.add(b1)
assert_raises_message(
sa_exc.SAWarning,
"Flushing object %s with incompatible "
"polymorphic identity 'xyz'; the object may not "
"refresh and/or load correctly" % instance_str(b1),
sess.flush
)
def test_not_set_on_upate(self):
C = self.classes.C
sess = Session()
c1 = C()
sess.add(c1)
sess.commit()
sess.expire(c1)
c1.data = 'foo'
sess.flush()
def test_validate_on_upate(self):
C = self.classes.C
sess = Session()
c1 = C()
sess.add(c1)
sess.commit()
sess.expire(c1)
c1.class_name = 'b'
assert_raises_message(
sa_exc.SAWarning,
"Flushing object %s with incompatible "
"polymorphic identity 'b'; the object may not "
"refresh and/or load correctly" % instance_str(c1),
sess.flush
)
class CascadeTest(fixtures.MappedTest):
"""that cascades on polymorphic relationships continue
cascading along the path of the instance's mapper, not
the base mapper."""
@classmethod
def define_tables(cls, metadata):
global t1, t2, t3, t4
t1= Table('t1', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(30))
)
t2 = Table('t2', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('t1id', Integer, ForeignKey('t1.id')),
Column('type', String(30)),
Column('data', String(30))
)
t3 = Table('t3', metadata,
Column('id', Integer, ForeignKey('t2.id'),
primary_key=True),
Column('moredata', String(30)))
t4 = Table('t4', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('t3id', Integer, ForeignKey('t3.id')),
Column('data', String(30)))
def test_cascade(self):
class T1(fixtures.BasicEntity):
pass
class T2(fixtures.BasicEntity):
pass
class T3(T2):
pass
class T4(fixtures.BasicEntity):
pass
mapper(T1, t1, properties={
't2s':relationship(T2, cascade="all")
})
mapper(T2, t2, polymorphic_on=t2.c.type, polymorphic_identity='t2')
mapper(T3, t3, inherits=T2, polymorphic_identity='t3', properties={
't4s':relationship(T4, cascade="all")
})
mapper(T4, t4)
sess = create_session()
t1_1 = T1(data='t1')
t3_1 = T3(data ='t3', moredata='t3')
t2_1 = T2(data='t2')
t1_1.t2s.append(t2_1)
t1_1.t2s.append(t3_1)
t4_1 = T4(data='t4')
t3_1.t4s.append(t4_1)
sess.add(t1_1)
assert t4_1 in sess.new
sess.flush()
sess.delete(t1_1)
assert t4_1 in sess.deleted
sess.flush()
class M2OUseGetTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('type', String(30))
)
Table('sub', metadata,
Column('id', Integer, ForeignKey('base.id'), primary_key=True),
)
Table('related', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('sub_id', Integer, ForeignKey('sub.id')),
)
def test_use_get(self):
base, sub, related = (self.tables.base,
self.tables.sub,
self.tables.related)
# test [ticket:1186]
class Base(fixtures.BasicEntity):
pass
class Sub(Base):
pass
class Related(Base):
pass
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='b')
mapper(Sub, sub, inherits=Base, polymorphic_identity='s')
mapper(Related, related, properties={
# previously, this was needed for the comparison to occur:
# the 'primaryjoin' looks just like "Sub"'s "get" clause (based on the Base id),
# and foreign_keys since that join condition doesn't actually have any fks in it
#'sub':relationship(Sub, primaryjoin=base.c.id==related.c.sub_id, foreign_keys=related.c.sub_id)
# now we can use this:
'sub':relationship(Sub)
})
assert class_mapper(Related).get_property('sub').strategy.use_get
sess = create_session()
s1 = Sub()
r1 = Related(sub=s1)
sess.add(r1)
sess.flush()
sess.expunge_all()
r1 = sess.query(Related).first()
s1 = sess.query(Sub).first()
def go():
assert r1.sub
self.assert_sql_count(testing.db, go, 0)
class GetTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
global foo, bar, blub
foo = Table('foo', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('type', String(30)),
Column('data', String(20)))
bar = Table('bar', metadata,
Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
Column('bar_data', String(20)))
blub = Table('blub', metadata,
Column('blub_id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('foo_id', Integer, ForeignKey('foo.id')),
Column('bar_id', Integer, ForeignKey('bar.id')),
Column('blub_data', String(20)))
@classmethod
def setup_classes(cls):
class Foo(cls.Basic):
pass
class Bar(Foo):
pass
class Blub(Bar):
pass
def test_get_polymorphic(self):
self._do_get_test(True)
def test_get_nonpolymorphic(self):
self._do_get_test(False)
def _do_get_test(self, polymorphic):
foo, Bar, Blub, blub, bar, Foo = (self.tables.foo,
self.classes.Bar,
self.classes.Blub,
self.tables.blub,
self.tables.bar,
self.classes.Foo)
if polymorphic:
mapper(Foo, foo, polymorphic_on=foo.c.type, polymorphic_identity='foo')
mapper(Bar, bar, inherits=Foo, polymorphic_identity='bar')
mapper(Blub, blub, inherits=Bar, polymorphic_identity='blub')
else:
mapper(Foo, foo)
mapper(Bar, bar, inherits=Foo)
mapper(Blub, blub, inherits=Bar)
sess = create_session()
f = Foo()
b = Bar()
bl = Blub()
sess.add(f)
sess.add(b)
sess.add(bl)
sess.flush()
if polymorphic:
def go():
assert sess.query(Foo).get(f.id) is f
assert sess.query(Foo).get(b.id) is b
assert sess.query(Foo).get(bl.id) is bl
assert sess.query(Bar).get(b.id) is b
assert sess.query(Bar).get(bl.id) is bl
assert sess.query(Blub).get(bl.id) is bl
# test class mismatches - item is present
# in the identity map but we requested a subclass
assert sess.query(Blub).get(f.id) is None
assert sess.query(Blub).get(b.id) is None
assert sess.query(Bar).get(f.id) is None
self.assert_sql_count(testing.db, go, 0)
else:
# this is testing the 'wrong' behavior of using get()
# polymorphically with mappers that are not configured to be
# polymorphic. the important part being that get() always
# returns an instance of the query's type.
def go():
assert sess.query(Foo).get(f.id) is f
bb = sess.query(Foo).get(b.id)
assert isinstance(b, Foo) and bb.id==b.id
bll = sess.query(Foo).get(bl.id)
assert isinstance(bll, Foo) and bll.id==bl.id
assert sess.query(Bar).get(b.id) is b
bll = sess.query(Bar).get(bl.id)
assert isinstance(bll, Bar) and bll.id == bl.id
assert sess.query(Blub).get(bl.id) is bl
self.assert_sql_count(testing.db, go, 3)
class EagerLazyTest(fixtures.MappedTest):
"""tests eager load/lazy load of child items off inheritance mappers, tests that
LazyLoader constructs the right query condition."""
@classmethod
def define_tables(cls, metadata):
global foo, bar, bar_foo
foo = Table('foo', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(30)))
bar = Table('bar', metadata,
Column('id', Integer, ForeignKey('foo.id'), primary_key=True),
Column('bar_data', String(30)))
bar_foo = Table('bar_foo', metadata,
Column('bar_id', Integer, ForeignKey('bar.id')),
Column('foo_id', Integer, ForeignKey('foo.id'))
)
def test_basic(self):
class Foo(object): pass
class Bar(Foo): pass
foos = mapper(Foo, foo)
bars = mapper(Bar, bar, inherits=foos)
bars.add_property('lazy', relationship(foos, bar_foo, lazy='select'))
bars.add_property('eager', relationship(foos, bar_foo, lazy='joined'))
foo.insert().execute(data='foo1')
bar.insert().execute(id=1, data='bar1')
foo.insert().execute(data='foo2')
bar.insert().execute(id=2, data='bar2')
foo.insert().execute(data='foo3') #3
foo.insert().execute(data='foo4') #4
bar_foo.insert().execute(bar_id=1, foo_id=3)
bar_foo.insert().execute(bar_id=2, foo_id=4)
sess = create_session()
q = sess.query(Bar)
self.assert_(len(q.first().lazy) == 1)
self.assert_(len(q.first().eager) == 1)
class EagerTargetingTest(fixtures.MappedTest):
"""test a scenario where joined table inheritance might be
confused as an eagerly loaded joined table."""
@classmethod
def define_tables(cls, metadata):
Table('a_table', metadata,
Column('id', Integer, primary_key=True),
Column('name', String(50)),
Column('type', String(30), nullable=False),
Column('parent_id', Integer, ForeignKey('a_table.id'))
)
Table('b_table', metadata,
Column('id', Integer, ForeignKey('a_table.id'), primary_key=True),
Column('b_data', String(50)),
)
def test_adapt_stringency(self):
b_table, a_table = self.tables.b_table, self.tables.a_table
class A(fixtures.ComparableEntity):
pass
class B(A):
pass
mapper(A, a_table, polymorphic_on=a_table.c.type, polymorphic_identity='A',
properties={
'children': relationship(A, order_by=a_table.c.name)
})
mapper(B, b_table, inherits=A, polymorphic_identity='B', properties={
'b_derived':column_property(b_table.c.b_data + "DATA")
})
sess=create_session()
b1=B(id=1, name='b1',b_data='i')
sess.add(b1)
sess.flush()
b2=B(id=2, name='b2', b_data='l', parent_id=1)
sess.add(b2)
sess.flush()
bid=b1.id
sess.expunge_all()
node = sess.query(B).filter(B.id==bid).all()[0]
eq_(node, B(id=1, name='b1',b_data='i'))
eq_(node.children[0], B(id=2, name='b2',b_data='l'))
sess.expunge_all()
node = sess.query(B).options(joinedload(B.children)).filter(B.id==bid).all()[0]
eq_(node, B(id=1, name='b1',b_data='i'))
eq_(node.children[0], B(id=2, name='b2',b_data='l'))
class FlushTest(fixtures.MappedTest):
"""test dependency sorting among inheriting mappers"""
@classmethod
def define_tables(cls, metadata):
Table('users', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('email', String(128)),
Column('password', String(16)),
)
Table('roles', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('description', String(32))
)
Table('user_roles', metadata,
Column('user_id', Integer, ForeignKey('users.id'), primary_key=True),
Column('role_id', Integer, ForeignKey('roles.id'), primary_key=True)
)
Table('admins', metadata,
Column('admin_id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('user_id', Integer, ForeignKey('users.id'))
)
def test_one(self):
admins, users, roles, user_roles = (self.tables.admins,
self.tables.users,
self.tables.roles,
self.tables.user_roles)
class User(object):pass
class Role(object):pass
class Admin(User):pass
role_mapper = mapper(Role, roles)
user_mapper = mapper(User, users, properties = {
'roles' : relationship(Role, secondary=user_roles, lazy='joined')
}
)
admin_mapper = mapper(Admin, admins, inherits=user_mapper)
sess = create_session()
adminrole = Role()
sess.add(adminrole)
sess.flush()
# create an Admin, and append a Role. the dependency processors
# corresponding to the "roles" attribute for the Admin mapper and the User mapper
# have to ensure that two dependency processors don't fire off and insert the
# many to many row twice.
a = Admin()
a.roles.append(adminrole)
a.password = 'admin'
sess.add(a)
sess.flush()
assert user_roles.count().scalar() == 1
def test_two(self):
admins, users, roles, user_roles = (self.tables.admins,
self.tables.users,
self.tables.roles,
self.tables.user_roles)
class User(object):
def __init__(self, email=None, password=None):
self.email = email
self.password = password
class Role(object):
def __init__(self, description=None):
self.description = description
class Admin(User):pass
role_mapper = mapper(Role, roles)
user_mapper = mapper(User, users, properties = {
'roles' : relationship(Role, secondary=user_roles, lazy='joined')
}
)
admin_mapper = mapper(Admin, admins, inherits=user_mapper)
# create roles
adminrole = Role('admin')
sess = create_session()
sess.add(adminrole)
sess.flush()
# create admin user
a = Admin(email='tim', password='admin')
a.roles.append(adminrole)
sess.add(a)
sess.flush()
a.password = 'sadmin'
sess.flush()
assert user_roles.count().scalar() == 1
class OptimizedGetOnDeferredTest(fixtures.MappedTest):
"""test that the 'optimized get' path accommodates deferred columns."""
@classmethod
def define_tables(cls, metadata):
Table(
"a", metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True)
)
Table(
"b", metadata,
Column('id', Integer, ForeignKey('a.id'), primary_key=True),
Column('data', String(10))
)
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
class B(A):
pass
@classmethod
def setup_mappers(cls):
A, B = cls.classes("A", "B")
a, b = cls.tables("a", "b")
mapper(A, a)
mapper(B, b, inherits=A, properties={
'data': deferred(b.c.data),
'expr': column_property(b.c.data + 'q', deferred=True)
})
def test_column_property(self):
A, B = self.classes("A", "B")
sess = Session()
b1 = B(data='x')
sess.add(b1)
sess.flush()
eq_(b1.expr, 'xq')
def test_expired_column(self):
A, B = self.classes("A", "B")
sess = Session()
b1 = B(data='x')
sess.add(b1)
sess.flush()
sess.expire(b1, ['data'])
eq_(b1.data, 'x')
class JoinedNoFKSortingTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table("a", metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True)
)
Table("b", metadata,
Column('id', Integer, primary_key=True)
)
Table("c", metadata,
Column('id', Integer, primary_key=True)
)
@classmethod
def setup_classes(cls):
class A(cls.Basic):
pass
class B(A):
pass
class C(A):
pass
@classmethod
def setup_mappers(cls):
A, B, C = cls.classes.A, cls.classes.B, cls.classes.C
mapper(A, cls.tables.a)
mapper(B, cls.tables.b, inherits=A,
inherit_condition=cls.tables.a.c.id == cls.tables.b.c.id,
inherit_foreign_keys=cls.tables.b.c.id)
mapper(C, cls.tables.c, inherits=A,
inherit_condition=cls.tables.a.c.id == cls.tables.c.c.id,
inherit_foreign_keys=cls.tables.c.c.id)
def test_ordering(self):
B, C = self.classes.B, self.classes.C
sess = Session()
sess.add_all([B(), C(), B(), C()])
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO a () VALUES ()",
{}
),
CompiledSQL(
"INSERT INTO a () VALUES ()",
{}
),
CompiledSQL(
"INSERT INTO a () VALUES ()",
{}
),
CompiledSQL(
"INSERT INTO a () VALUES ()",
{}
),
AllOf(
CompiledSQL(
"INSERT INTO b (id) VALUES (:id)",
[{"id": 1}, {"id": 3}]
),
CompiledSQL(
"INSERT INTO c (id) VALUES (:id)",
[{"id": 2}, {"id": 4}]
)
)
)
class VersioningTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('version_id', Integer, nullable=False),
Column('value', String(40)),
Column('discriminator', Integer, nullable=False)
)
Table('subtable', metadata,
Column('id', None, ForeignKey('base.id'), primary_key=True),
Column('subdata', String(50))
)
Table('stuff', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent', Integer, ForeignKey('base.id'))
)
@testing.emits_warning(r".*updated rowcount")
@engines.close_open_connections
def test_save_update(self):
subtable, base, stuff = (self.tables.subtable,
self.tables.base,
self.tables.stuff)
class Base(fixtures.BasicEntity):
pass
class Sub(Base):
pass
class Stuff(Base):
pass
mapper(Stuff, stuff)
mapper(Base, base,
polymorphic_on=base.c.discriminator,
version_id_col=base.c.version_id,
polymorphic_identity=1, properties={
'stuff':relationship(Stuff)
})
mapper(Sub, subtable, inherits=Base, polymorphic_identity=2)
sess = create_session()
b1 = Base(value='b1')
s1 = Sub(value='sub1', subdata='some subdata')
sess.add(b1)
sess.add(s1)
sess.flush()
sess2 = create_session()
s2 = sess2.query(Base).get(s1.id)
s2.subdata = 'sess2 subdata'
s1.subdata = 'sess1 subdata'
sess.flush()
assert_raises(orm_exc.StaleDataError,
sess2.query(Base).with_lockmode('read').get,
s1.id)
if not testing.db.dialect.supports_sane_rowcount:
sess2.flush()
else:
assert_raises(orm_exc.StaleDataError, sess2.flush)
sess2.refresh(s2)
if testing.db.dialect.supports_sane_rowcount:
assert s2.subdata == 'sess1 subdata'
s2.subdata = 'sess2 subdata'
sess2.flush()
@testing.emits_warning(r".*(update|delete)d rowcount")
def test_delete(self):
subtable, base = self.tables.subtable, self.tables.base
class Base(fixtures.BasicEntity):
pass
class Sub(Base):
pass
mapper(Base, base,
polymorphic_on=base.c.discriminator,
version_id_col=base.c.version_id, polymorphic_identity=1)
mapper(Sub, subtable, inherits=Base, polymorphic_identity=2)
sess = create_session()
b1 = Base(value='b1')
s1 = Sub(value='sub1', subdata='some subdata')
s2 = Sub(value='sub2', subdata='some other subdata')
sess.add(b1)
sess.add(s1)
sess.add(s2)
sess.flush()
sess2 = create_session()
s3 = sess2.query(Base).get(s1.id)
sess2.delete(s3)
sess2.flush()
s2.subdata = 'some new subdata'
sess.flush()
s1.subdata = 'some new subdata'
if testing.db.dialect.supports_sane_rowcount:
assert_raises(
orm_exc.StaleDataError,
sess.flush
)
else:
sess.flush()
class DistinctPKTest(fixtures.MappedTest):
"""test the construction of mapper.primary_key when an inheriting relationship
joins on a column other than primary key column."""
run_inserts = 'once'
run_deletes = None
@classmethod
def define_tables(cls, metadata):
global person_table, employee_table, Person, Employee
person_table = Table("persons", metadata,
Column("id", Integer, primary_key=True, test_needs_autoincrement=True),
Column("name", String(80)),
)
employee_table = Table("employees", metadata,
Column("eid", Integer, primary_key=True, test_needs_autoincrement=True),
Column("salary", Integer),
Column("person_id", Integer, ForeignKey("persons.id")),
)
class Person(object):
def __init__(self, name):
self.name = name
class Employee(Person): pass
@classmethod
def insert_data(cls):
person_insert = person_table.insert()
person_insert.execute(id=1, name='alice')
person_insert.execute(id=2, name='bob')
employee_insert = employee_table.insert()
employee_insert.execute(id=2, salary=250, person_id=1) # alice
employee_insert.execute(id=3, salary=200, person_id=2) # bob
def test_implicit(self):
person_mapper = mapper(Person, person_table)
mapper(Employee, employee_table, inherits=person_mapper)
assert list(class_mapper(Employee).primary_key) == [person_table.c.id]
def test_explicit_props(self):
person_mapper = mapper(Person, person_table)
mapper(Employee, employee_table, inherits=person_mapper,
properties={'pid':person_table.c.id,
'eid':employee_table.c.eid})
self._do_test(False)
def test_explicit_composite_pk(self):
person_mapper = mapper(Person, person_table)
mapper(Employee, employee_table,
inherits=person_mapper,
properties=dict(id=[employee_table.c.eid, person_table.c.id]),
primary_key=[person_table.c.id, employee_table.c.eid])
assert_raises_message(sa_exc.SAWarning,
r"On mapper Mapper\|Employee\|employees, "
"primary key column 'persons.id' is being "
"combined with distinct primary key column 'employees.eid' "
"in attribute 'id'. Use explicit properties to give "
"each column its own mapped attribute name.",
self._do_test, True
)
def test_explicit_pk(self):
person_mapper = mapper(Person, person_table)
mapper(Employee, employee_table, inherits=person_mapper, primary_key=[person_table.c.id])
self._do_test(False)
def _do_test(self, composite):
session = create_session()
query = session.query(Employee)
if composite:
alice1 = query.get([1,2])
bob = query.get([2,3])
alice2 = query.get([1,2])
else:
alice1 = query.get(1)
bob = query.get(2)
alice2 = query.get(1)
assert alice1.name == alice2.name == 'alice'
assert bob.name == 'bob'
class SyncCompileTest(fixtures.MappedTest):
"""test that syncrules compile properly on custom inherit conds"""
@classmethod
def define_tables(cls, metadata):
global _a_table, _b_table, _c_table
_a_table = Table('a', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data1', String(128))
)
_b_table = Table('b', metadata,
Column('a_id', Integer, ForeignKey('a.id'), primary_key=True),
Column('data2', String(128))
)
_c_table = Table('c', metadata,
# Column('a_id', Integer, ForeignKey('b.a_id'), primary_key=True), #works
Column('b_a_id', Integer, ForeignKey('b.a_id'), primary_key=True),
Column('data3', String(128))
)
def test_joins(self):
for j1 in (None, _b_table.c.a_id==_a_table.c.id, _a_table.c.id==_b_table.c.a_id):
for j2 in (None, _b_table.c.a_id==_c_table.c.b_a_id,
_c_table.c.b_a_id==_b_table.c.a_id):
self._do_test(j1, j2)
for t in reversed(_a_table.metadata.sorted_tables):
t.delete().execute().close()
def _do_test(self, j1, j2):
class A(object):
def __init__(self, **kwargs):
for key, value in list(kwargs.items()):
setattr(self, key, value)
class B(A):
pass
class C(B):
pass
mapper(A, _a_table)
mapper(B, _b_table, inherits=A,
inherit_condition=j1
)
mapper(C, _c_table, inherits=B,
inherit_condition=j2
)
session = create_session()
a = A(data1='a1')
session.add(a)
b = B(data1='b1', data2='b2')
session.add(b)
c = C(data1='c1', data2='c2', data3='c3')
session.add(c)
session.flush()
session.expunge_all()
assert len(session.query(A).all()) == 3
assert len(session.query(B).all()) == 2
assert len(session.query(C).all()) == 1
class OverrideColKeyTest(fixtures.MappedTest):
"""test overriding of column attributes."""
@classmethod
def define_tables(cls, metadata):
global base, subtable, subtable_two
base = Table('base', metadata,
Column('base_id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(255)),
Column('sqlite_fixer', String(10))
)
subtable = Table('subtable', metadata,
Column('base_id', Integer, ForeignKey('base.base_id'), primary_key=True),
Column('subdata', String(255))
)
subtable_two = Table('subtable_two', metadata,
Column('base_id', Integer, primary_key=True),
Column('fk_base_id', Integer, ForeignKey('base.base_id')),
Column('subdata', String(255))
)
def test_plain(self):
# control case
class Base(object):
pass
class Sub(Base):
pass
mapper(Base, base)
mapper(Sub, subtable, inherits=Base)
# Sub gets a "base_id" property using the "base_id"
# column of both tables.
eq_(
class_mapper(Sub).get_property('base_id').columns,
[subtable.c.base_id, base.c.base_id]
)
def test_override_explicit(self):
# this pattern is what you see when using declarative
# in particular, here we do a "manual" version of
# what we'd like the mapper to do.
class Base(object):
pass
class Sub(Base):
pass
mapper(Base, base, properties={
'id':base.c.base_id
})
mapper(Sub, subtable, inherits=Base, properties={
# this is the manual way to do it, is not really
# possible in declarative
'id':[base.c.base_id, subtable.c.base_id]
})
eq_(
class_mapper(Sub).get_property('id').columns,
[base.c.base_id, subtable.c.base_id]
)
s1 = Sub()
s1.id = 10
sess = create_session()
sess.add(s1)
sess.flush()
assert sess.query(Sub).get(10) is s1
def test_override_onlyinparent(self):
class Base(object):
pass
class Sub(Base):
pass
mapper(Base, base, properties={
'id':base.c.base_id
})
mapper(Sub, subtable, inherits=Base)
eq_(
class_mapper(Sub).get_property('id').columns,
[base.c.base_id]
)
eq_(
class_mapper(Sub).get_property('base_id').columns,
[subtable.c.base_id]
)
s1 = Sub()
s1.id = 10
s2 = Sub()
s2.base_id = 15
sess = create_session()
sess.add_all([s1, s2])
sess.flush()
# s1 gets '10'
assert sess.query(Sub).get(10) is s1
# s2 gets a new id, base_id is overwritten by the ultimate
# PK col
assert s2.id == s2.base_id != 15
def test_override_implicit(self):
# this is originally [ticket:1111].
# the pattern here is now disallowed by [ticket:1892]
class Base(object):
pass
class Sub(Base):
pass
mapper(Base, base, properties={
'id':base.c.base_id
})
def go():
mapper(Sub, subtable, inherits=Base, properties={
'id':subtable.c.base_id
})
# Sub mapper compilation needs to detect that "base.c.base_id"
# is renamed in the inherited mapper as "id", even though
# it has its own "id" property. It then generates
# an exception in 0.7 due to the implicit conflict.
assert_raises(sa_exc.InvalidRequestError, go)
def test_pk_fk_different(self):
class Base(object):
pass
class Sub(Base):
pass
mapper(Base, base)
def go():
mapper(Sub, subtable_two, inherits=Base)
assert_raises_message(
sa_exc.SAWarning,
"Implicitly combining column base.base_id with "
"column subtable_two.base_id under attribute 'base_id'",
go
)
def test_plain_descriptor(self):
"""test that descriptors prevent inheritance from propigating properties to subclasses."""
class Base(object):
pass
class Sub(Base):
@property
def data(self):
return "im the data"
mapper(Base, base)
mapper(Sub, subtable, inherits=Base)
s1 = Sub()
sess = create_session()
sess.add(s1)
sess.flush()
assert sess.query(Sub).one().data == "im the data"
def test_custom_descriptor(self):
"""test that descriptors prevent inheritance from propigating properties to subclasses."""
class MyDesc(object):
def __get__(self, instance, owner):
if instance is None:
return self
return "im the data"
class Base(object):
pass
class Sub(Base):
data = MyDesc()
mapper(Base, base)
mapper(Sub, subtable, inherits=Base)
s1 = Sub()
sess = create_session()
sess.add(s1)
sess.flush()
assert sess.query(Sub).one().data == "im the data"
def test_sub_columns_over_base_descriptors(self):
class Base(object):
@property
def subdata(self):
return "this is base"
class Sub(Base):
pass
mapper(Base, base)
mapper(Sub, subtable, inherits=Base)
sess = create_session()
b1 = Base()
assert b1.subdata == "this is base"
s1 = Sub()
s1.subdata = "this is sub"
assert s1.subdata == "this is sub"
sess.add_all([s1, b1])
sess.flush()
sess.expunge_all()
assert sess.query(Base).get(b1.base_id).subdata == "this is base"
assert sess.query(Sub).get(s1.base_id).subdata == "this is sub"
def test_base_descriptors_over_base_cols(self):
class Base(object):
@property
def data(self):
return "this is base"
class Sub(Base):
pass
mapper(Base, base)
mapper(Sub, subtable, inherits=Base)
sess = create_session()
b1 = Base()
assert b1.data == "this is base"
s1 = Sub()
assert s1.data == "this is base"
sess.add_all([s1, b1])
sess.flush()
sess.expunge_all()
assert sess.query(Base).get(b1.base_id).data == "this is base"
assert sess.query(Sub).get(s1.base_id).data == "this is base"
class OptimizedLoadTest(fixtures.MappedTest):
"""tests for the "optimized load" routine."""
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50)),
Column('type', String(50)),
Column('counter', Integer, server_default="1")
)
Table('sub', metadata,
Column('id', Integer, ForeignKey('base.id'), primary_key=True),
Column('sub', String(50)),
Column('subcounter', Integer, server_default="1"),
Column('subcounter2', Integer, server_default="1")
)
Table('subsub', metadata,
Column('id', Integer, ForeignKey('sub.id'), primary_key=True),
Column('subsubcounter2', Integer, server_default="1")
)
Table('with_comp', metadata,
Column('id', Integer, ForeignKey('base.id'), primary_key=True),
Column('a', String(10)),
Column('b', String(10))
)
def test_no_optimize_on_map_to_join(self):
base, sub = self.tables.base, self.tables.sub
class Base(fixtures.ComparableEntity):
pass
class JoinBase(fixtures.ComparableEntity):
pass
class SubJoinBase(JoinBase):
pass
mapper(Base, base)
mapper(JoinBase, base.outerjoin(sub), properties=util.OrderedDict(
[('id', [base.c.id, sub.c.id]),
('counter', [base.c.counter, sub.c.subcounter])])
)
mapper(SubJoinBase, inherits=JoinBase)
sess = Session()
sess.add(Base(data='data'))
sess.commit()
sjb = sess.query(SubJoinBase).one()
sjb_id = sjb.id
sess.expire(sjb)
# this should not use the optimized load,
# which assumes discrete tables
def go():
eq_(sjb.data, 'data')
self.assert_sql_execution(
testing.db,
go,
CompiledSQL(
"SELECT base.id AS base_id, sub.id AS sub_id, "
"base.counter AS base_counter, sub.subcounter AS sub_subcounter, "
"base.data AS base_data, base.type AS base_type, "
"sub.sub AS sub_sub, sub.subcounter2 AS sub_subcounter2 "
"FROM base LEFT OUTER JOIN sub ON base.id = sub.id "
"WHERE base.id = :param_1",
{'param_1': sjb_id}
),
)
def test_optimized_passes(self):
""""test that the 'optimized load' routine doesn't crash when
a column in the join condition is not available."""
base, sub = self.tables.base, self.tables.sub
class Base(fixtures.ComparableEntity):
pass
class Sub(Base):
pass
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
# redefine Sub's "id" to favor the "id" col in the subtable.
# "id" is also part of the primary join condition
mapper(Sub, sub, inherits=Base,
polymorphic_identity='sub',
properties={'id':[sub.c.id, base.c.id]})
sess = sessionmaker()()
s1 = Sub(data='s1data', sub='s1sub')
sess.add(s1)
sess.commit()
sess.expunge_all()
# load s1 via Base. s1.id won't populate since it's relative to
# the "sub" table. The optimized load kicks in and tries to
# generate on the primary join, but cannot since "id" is itself unloaded.
# the optimized load needs to return "None" so regular full-row loading proceeds
s1 = sess.query(Base).first()
assert s1.sub == 's1sub'
def test_column_expression(self):
base, sub = self.tables.base, self.tables.sub
class Base(fixtures.ComparableEntity):
pass
class Sub(Base):
pass
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
mapper(Sub, sub, inherits=Base, polymorphic_identity='sub', properties={
'concat': column_property(sub.c.sub + "|" + sub.c.sub)
})
sess = sessionmaker()()
s1 = Sub(data='s1data', sub='s1sub')
sess.add(s1)
sess.commit()
sess.expunge_all()
s1 = sess.query(Base).first()
assert s1.concat == 's1sub|s1sub'
def test_column_expression_joined(self):
base, sub = self.tables.base, self.tables.sub
class Base(fixtures.ComparableEntity):
pass
class Sub(Base):
pass
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
mapper(Sub, sub, inherits=Base, polymorphic_identity='sub', properties={
'concat': column_property(base.c.data + "|" + sub.c.sub)
})
sess = sessionmaker()()
s1 = Sub(data='s1data', sub='s1sub')
s2 = Sub(data='s2data', sub='s2sub')
s3 = Sub(data='s3data', sub='s3sub')
sess.add_all([s1, s2, s3])
sess.commit()
sess.expunge_all()
# query a bunch of rows to ensure there's no cartesian
# product against "base" occurring, it is in fact
# detecting that "base" needs to be in the join
# criterion
eq_(
sess.query(Base).order_by(Base.id).all(),
[
Sub(data='s1data', sub='s1sub', concat='s1data|s1sub'),
Sub(data='s2data', sub='s2sub', concat='s2data|s2sub'),
Sub(data='s3data', sub='s3sub', concat='s3data|s3sub')
]
)
def test_composite_column_joined(self):
base, with_comp = self.tables.base, self.tables.with_comp
class Base(fixtures.BasicEntity):
pass
class WithComp(Base):
pass
class Comp(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __composite_values__(self):
return self.a, self.b
def __eq__(self, other):
return (self.a == other.a) and (self.b == other.b)
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
mapper(WithComp, with_comp, inherits=Base, polymorphic_identity='wc', properties={
'comp': composite(Comp, with_comp.c.a, with_comp.c.b)
})
sess = sessionmaker()()
s1 = WithComp(data='s1data', comp=Comp('ham', 'cheese'))
s2 = WithComp(data='s2data', comp=Comp('bacon', 'eggs'))
sess.add_all([s1, s2])
sess.commit()
sess.expunge_all()
s1test, s2test = sess.query(Base).order_by(Base.id).all()
assert s1test.comp
assert s2test.comp
eq_(s1test.comp, Comp('ham', 'cheese'))
eq_(s2test.comp, Comp('bacon', 'eggs'))
def test_load_expired_on_pending(self):
base, sub = self.tables.base, self.tables.sub
class Base(fixtures.BasicEntity):
pass
class Sub(Base):
pass
mapper(Base, base, polymorphic_on=base.c.type, polymorphic_identity='base')
mapper(Sub, sub, inherits=Base, polymorphic_identity='sub')
sess = Session()
s1 = Sub(data='s1')
sess.add(s1)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO base (data, type) VALUES (:data, :type)",
[{'data':'s1','type':'sub'}]
),
CompiledSQL(
"INSERT INTO sub (id, sub) VALUES (:id, :sub)",
lambda ctx:{'id':s1.id, 'sub':None}
),
)
def go():
eq_( s1.subcounter2, 1 )
self.assert_sql_execution(
testing.db,
go,
CompiledSQL(
"SELECT base.counter AS base_counter, sub.subcounter AS sub_subcounter, "
"sub.subcounter2 AS sub_subcounter2 FROM base JOIN sub "
"ON base.id = sub.id WHERE base.id = :param_1",
lambda ctx:{'param_1': s1.id}
),
)
def test_dont_generate_on_none(self):
base, sub = self.tables.base, self.tables.sub
class Base(fixtures.BasicEntity):
pass
class Sub(Base):
pass
mapper(Base, base, polymorphic_on=base.c.type,
polymorphic_identity='base')
m = mapper(Sub, sub, inherits=Base, polymorphic_identity='sub')
s1 = Sub()
assert m._optimized_get_statement(attributes.instance_state(s1),
['subcounter2']) is None
# loads s1.id as None
eq_(s1.id, None)
# this now will come up with a value of None for id - should reject
assert m._optimized_get_statement(attributes.instance_state(s1),
['subcounter2']) is None
s1.id = 1
attributes.instance_state(s1)._commit_all(s1.__dict__, None)
assert m._optimized_get_statement(attributes.instance_state(s1),
['subcounter2']) is not None
def test_load_expired_on_pending_twolevel(self):
base, sub, subsub = (self.tables.base,
self.tables.sub,
self.tables.subsub)
class Base(fixtures.BasicEntity):
pass
class Sub(Base):
pass
class SubSub(Sub):
pass
mapper(Base, base, polymorphic_on=base.c.type,
polymorphic_identity='base')
mapper(Sub, sub, inherits=Base, polymorphic_identity='sub')
mapper(SubSub, subsub, inherits=Sub, polymorphic_identity='subsub')
sess = Session()
s1 = SubSub(data='s1', counter=1, subcounter=2)
sess.add(s1)
self.assert_sql_execution(
testing.db,
sess.flush,
CompiledSQL(
"INSERT INTO base (data, type, counter) VALUES "
"(:data, :type, :counter)",
[{'data':'s1','type':'subsub','counter':1}]
),
CompiledSQL(
"INSERT INTO sub (id, sub, subcounter) VALUES "
"(:id, :sub, :subcounter)",
lambda ctx:[{'subcounter': 2, 'sub': None, 'id': s1.id}]
),
CompiledSQL(
"INSERT INTO subsub (id) VALUES (:id)",
lambda ctx:{'id':s1.id}
),
)
def go():
eq_(
s1.subcounter2, 1
)
self.assert_sql_execution(
testing.db,
go,
Or(
CompiledSQL(
"SELECT subsub.subsubcounter2 AS subsub_subsubcounter2, "
"sub.subcounter2 AS sub_subcounter2 FROM subsub, sub "
"WHERE :param_1 = sub.id AND sub.id = subsub.id",
lambda ctx: {'param_1': s1.id}
),
CompiledSQL(
"SELECT sub.subcounter2 AS sub_subcounter2, "
"subsub.subsubcounter2 AS subsub_subsubcounter2 "
"FROM sub, subsub "
"WHERE :param_1 = sub.id AND sub.id = subsub.id",
lambda ctx: {'param_1': s1.id}
),
)
)
class TransientInheritingGCTest(fixtures.TestBase):
__requires__ = ('cpython',)
def _fixture(self):
Base = declarative_base()
class A(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
data = Column(String(10))
self.A = A
return Base
def setUp(self):
self.Base = self._fixture()
def tearDown(self):
self.Base.metadata.drop_all(testing.db)
#clear_mappers()
self.Base = None
def _do_test(self, go):
B = go()
self.Base.metadata.create_all(testing.db)
sess = Session(testing.db)
sess.add(B(data='some b'))
sess.commit()
b1 = sess.query(B).one()
assert isinstance(b1, B)
sess.close()
del sess
del b1
del B
gc_collect()
eq_(
len(self.A.__subclasses__()),
0)
def test_single(self):
def go():
class B(self.A):
pass
return B
self._do_test(go)
@testing.fails_if(lambda: True,
"not supported for joined inh right now.")
def test_joined(self):
def go():
class B(self.A):
__tablename__ = 'b'
id = Column(Integer, ForeignKey('a.id'),
primary_key=True)
return B
self._do_test(go)
class NoPKOnSubTableWarningTest(fixtures.TestBase):
def _fixture(self):
metadata = MetaData()
parent = Table('parent', metadata,
Column('id', Integer, primary_key=True)
)
child = Table('child', metadata,
Column('id', Integer, ForeignKey('parent.id'))
)
return parent, child
def tearDown(self):
clear_mappers()
def test_warning_on_sub(self):
parent, child = self._fixture()
class P(object):
pass
class C(P):
pass
mapper(P, parent)
assert_raises_message(
sa_exc.SAWarning,
"Could not assemble any primary keys for locally mapped "
"table 'child' - no rows will be persisted in this Table.",
mapper, C, child, inherits=P
)
def test_no_warning_with_explicit(self):
parent, child = self._fixture()
class P(object):
pass
class C(P):
pass
mapper(P, parent)
mc = mapper(C, child, inherits=P, primary_key=[parent.c.id])
eq_(mc.primary_key, (parent.c.id,))
class InhCondTest(fixtures.TestBase):
def test_inh_cond_nonexistent_table_unrelated(self):
metadata = MetaData()
base_table = Table("base", metadata,
Column("id", Integer, primary_key=True)
)
derived_table = Table("derived", metadata,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("owner_id", Integer, ForeignKey("owner.owner_id"))
)
class Base(object):
pass
class Derived(Base):
pass
mapper(Base, base_table)
# succeeds, despite "owner" table not configured yet
m2 = mapper(Derived, derived_table,
inherits=Base)
assert m2.inherit_condition.compare(
base_table.c.id==derived_table.c.id
)
def test_inh_cond_nonexistent_col_unrelated(self):
m = MetaData()
base_table = Table("base", m,
Column("id", Integer, primary_key=True)
)
derived_table = Table("derived", m,
Column("id", Integer, ForeignKey('base.id'),
primary_key=True),
Column('order_id', Integer, ForeignKey('order.foo'))
)
order_table = Table('order', m, Column('id', Integer, primary_key=True))
class Base(object):
pass
class Derived(Base):
pass
mapper(Base, base_table)
# succeeds, despite "order.foo" doesn't exist
m2 = mapper(Derived, derived_table, inherits=Base)
assert m2.inherit_condition.compare(
base_table.c.id==derived_table.c.id
)
def test_inh_cond_no_fk(self):
metadata = MetaData()
base_table = Table("base", metadata,
Column("id", Integer, primary_key=True)
)
derived_table = Table("derived", metadata,
Column("id", Integer, primary_key=True),
)
class Base(object):
pass
class Derived(Base):
pass
mapper(Base, base_table)
assert_raises_message(
sa_exc.ArgumentError,
"Can't find any foreign key relationships between "
"'base' and 'derived'.",
mapper,
Derived, derived_table, inherits=Base
)
def test_inh_cond_nonexistent_table_related(self):
m1 = MetaData()
m2 = MetaData()
base_table = Table("base", m1,
Column("id", Integer, primary_key=True)
)
derived_table = Table("derived", m2,
Column("id", Integer, ForeignKey('base.id'),
primary_key=True),
)
class Base(object):
pass
class Derived(Base):
pass
mapper(Base, base_table)
# the ForeignKey def is correct but there are two
# different metadatas. Would like the traditional
# "noreferencedtable" error to raise so that the
# user is directed towards the FK definition in question.
assert_raises_message(
sa_exc.NoReferencedTableError,
"Foreign key associated with column 'derived.id' "
"could not find table 'base' with which to generate "
"a foreign key to target column 'id'",
mapper,
Derived, derived_table, inherits=Base
)
def test_inh_cond_nonexistent_col_related(self):
m = MetaData()
base_table = Table("base", m,
Column("id", Integer, primary_key=True)
)
derived_table = Table("derived", m,
Column("id", Integer, ForeignKey('base.q'),
primary_key=True),
)
class Base(object):
pass
class Derived(Base):
pass
mapper(Base, base_table)
assert_raises_message(
sa_exc.NoReferencedColumnError,
"Could not initialize target column for ForeignKey "
"'base.q' on table "
"'derived': table 'base' has no column named 'q'",
mapper,
Derived, derived_table, inherits=Base
)
class PKDiscriminatorTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
parents = Table('parents', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('name', String(60)))
children = Table('children', metadata,
Column('id', Integer, ForeignKey('parents.id'),
primary_key=True),
Column('type', Integer,primary_key=True),
Column('name', String(60)))
def test_pk_as_discriminator(self):
parents, children = self.tables.parents, self.tables.children
class Parent(object):
def __init__(self, name=None):
self.name = name
class Child(object):
def __init__(self, name=None):
self.name = name
class A(Child):
pass
mapper(Parent, parents, properties={
'children': relationship(Child, backref='parent'),
})
mapper(Child, children, polymorphic_on=children.c.type,
polymorphic_identity=1)
mapper(A, inherits=Child, polymorphic_identity=2)
s = create_session()
p = Parent('p1')
a = A('a1')
p.children.append(a)
s.add(p)
s.flush()
assert a.id
assert a.type == 2
p.name='p1new'
a.name='a1new'
s.flush()
s.expire_all()
assert a.name=='a1new'
assert p.name=='p1new'
class NoPolyIdentInMiddleTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('base', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(50), nullable=False),
)
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
class B(A):
pass
class C(B):
pass
class D(B):
pass
class E(A):
pass
@classmethod
def setup_mappers(cls):
A, C, B, E, D, base = (cls.classes.A,
cls.classes.C,
cls.classes.B,
cls.classes.E,
cls.classes.D,
cls.tables.base)
mapper(A, base, polymorphic_on=base.c.type)
mapper(B, inherits=A, )
mapper(C, inherits=B, polymorphic_identity='c')
mapper(D, inherits=B, polymorphic_identity='d')
mapper(E, inherits=A, polymorphic_identity='e')
def test_load_from_middle(self):
C, B = self.classes.C, self.classes.B
s = Session()
s.add(C())
o = s.query(B).first()
eq_(o.type, 'c')
assert isinstance(o, C)
def test_load_from_base(self):
A, C = self.classes.A, self.classes.C
s = Session()
s.add(C())
o = s.query(A).first()
eq_(o.type, 'c')
assert isinstance(o, C)
def test_discriminator(self):
C, B, base = (self.classes.C,
self.classes.B,
self.tables.base)
assert class_mapper(B).polymorphic_on is base.c.type
assert class_mapper(C).polymorphic_on is base.c.type
def test_load_multiple_from_middle(self):
C, B, E, D, base = (self.classes.C,
self.classes.B,
self.classes.E,
self.classes.D,
self.tables.base)
s = Session()
s.add_all([C(), D(), E()])
eq_(
s.query(B).order_by(base.c.type).all(),
[C(), D()]
)
class DeleteOrphanTest(fixtures.MappedTest):
"""Test the fairly obvious, that an error is raised
when attempting to insert an orphan.
Previous SQLA versions would check this constraint
in memory which is the original rationale for this test.
"""
@classmethod
def define_tables(cls, metadata):
global single, parent
single = Table('single', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('type', String(50), nullable=False),
Column('data', String(50)),
Column('parent_id', Integer, ForeignKey('parent.id'), nullable=False),
)
parent = Table('parent', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50))
)
def test_orphan_message(self):
class Base(fixtures.BasicEntity):
pass
class SubClass(Base):
pass
class Parent(fixtures.BasicEntity):
pass
mapper(Base, single, polymorphic_on=single.c.type, polymorphic_identity='base')
mapper(SubClass, inherits=Base, polymorphic_identity='sub')
mapper(Parent, parent, properties={
'related':relationship(Base, cascade="all, delete-orphan")
})
sess = create_session()
s1 = SubClass(data='s1')
sess.add(s1)
assert_raises(sa_exc.DBAPIError, sess.flush)
class PolymorphicUnionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
def _fixture(self):
t1 = table('t1', column('c1', Integer),
column('c2', Integer),
column('c3', Integer))
t2 = table('t2', column('c1', Integer), column('c2', Integer),
column('c3', Integer),
column('c4', Integer))
t3 = table('t3', column('c1', Integer),
column('c3', Integer),
column('c5', Integer))
return t1, t2, t3
def test_type_col_present(self):
t1, t2, t3 = self._fixture()
self.assert_compile(
polymorphic_union(
util.OrderedDict([("a", t1), ("b", t2), ("c", t3)]),
'q1'
),
"SELECT t1.c1, t1.c2, t1.c3, CAST(NULL AS INTEGER) AS c4, "
"CAST(NULL AS INTEGER) AS c5, 'a' AS q1 FROM t1 UNION ALL "
"SELECT t2.c1, t2.c2, t2.c3, t2.c4, CAST(NULL AS INTEGER) AS c5, "
"'b' AS q1 FROM t2 UNION ALL SELECT t3.c1, "
"CAST(NULL AS INTEGER) AS c2, t3.c3, CAST(NULL AS INTEGER) AS c4, "
"t3.c5, 'c' AS q1 FROM t3"
)
def test_type_col_non_present(self):
t1, t2, t3 = self._fixture()
self.assert_compile(
polymorphic_union(
util.OrderedDict([("a", t1), ("b", t2), ("c", t3)]),
None
),
"SELECT t1.c1, t1.c2, t1.c3, CAST(NULL AS INTEGER) AS c4, "
"CAST(NULL AS INTEGER) AS c5 FROM t1 UNION ALL SELECT t2.c1, "
"t2.c2, t2.c3, t2.c4, CAST(NULL AS INTEGER) AS c5 FROM t2 "
"UNION ALL SELECT t3.c1, CAST(NULL AS INTEGER) AS c2, t3.c3, "
"CAST(NULL AS INTEGER) AS c4, t3.c5 FROM t3"
)
def test_no_cast_null(self):
t1, t2, t3 = self._fixture()
self.assert_compile(
polymorphic_union(
util.OrderedDict([("a", t1), ("b", t2), ("c", t3)]),
'q1', cast_nulls=False
),
"SELECT t1.c1, t1.c2, t1.c3, NULL AS c4, NULL AS c5, 'a' AS q1 "
"FROM t1 UNION ALL SELECT t2.c1, t2.c2, t2.c3, t2.c4, NULL AS c5, "
"'b' AS q1 FROM t2 UNION ALL SELECT t3.c1, NULL AS c2, t3.c3, "
"NULL AS c4, t3.c5, 'c' AS q1 FROM t3"
)
class NameConflictTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
content = Table('content', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('type', String(30))
)
foo = Table('foo', metadata,
Column('id', Integer, ForeignKey('content.id'),
primary_key=True),
Column('content_type', String(30))
)
def test_name_conflict(self):
class Content(object):
pass
class Foo(Content):
pass
mapper(Content, self.tables.content,
polymorphic_on=self.tables.content.c.type)
mapper(Foo, self.tables.foo, inherits=Content,
polymorphic_identity='foo')
sess = create_session()
f = Foo()
f.content_type = 'bar'
sess.add(f)
sess.flush()
f_id = f.id
sess.expunge_all()
assert sess.query(Content).get(f_id).content_type == 'bar'
| mit |
albertomurillo/ansible | test/units/modules/network/netact/test_netact_cm_command.py | 45 | 6140 | """
netact_cm_command unit tests
"""
# -*- coding: utf-8 -*-
# (c) 2017, Nokia
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=invalid-name,protected-access,function-redefined,unused-argument
# pylint: disable=unused-import,redundant-unittest-assert
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from units.compat import unittest
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
from ansible.modules.network.netact import netact_cm_command
from units.compat.mock import patch
from units.modules.utils import set_module_args, AnsibleExitJson, AnsibleFailJson, ModuleTestCase
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs):
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs):
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
def get_bin_path(self, arg, required=False):
"""Mock AnsibleModule.get_bin_path"""
if arg.endswith('netact_cm_command'):
return '/usr/bin/my_command'
else:
if required:
fail_json(msg='%r not found !' % arg)
class TestClass(unittest.TestCase):
"""
Test cases
"""
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json,
get_bin_path=get_bin_path)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
def test_module_fail_when_required_args_missing(self):
"""
Testing that command is failing if args are missing
:return:
"""
with self.assertRaises(AnsibleFailJson):
set_module_args({})
netact_cm_command.main()
self.assertTrue(False)
def test_ensure_command_called(self):
"""
Testing that command is executed with correct args
:return:
"""
set_module_args({
'operation': "Upload",
'opsName': 'Uploading_testi',
'DN': "PLMN-PLMN/MRBTS-746",
})
with patch.object(basic.AnsibleModule, 'run_command') as mock_run_command:
stdout = 'configuration updated'
stderr = ''
return_code = 0
mock_run_command.return_value = return_code, stdout, stderr # successful execution
with self.assertRaises(AnsibleExitJson) as result:
netact_cm_command.main()
print(result.exception.args)
self.assertTrue(result.exception.args[0]['changed']) # ensure result is changed
mock_run_command.assert_called_once_with(
['/opt/oss/bin/racclimx.sh', '-op', 'Upload', '-opsName', 'Uploading_testi',
'-DN', 'PLMN-PLMN/MRBTS-746'],
check_rc=True)
def test_ensure_backupPlanName_outputs_correctly(self):
"""
Testing that command is executed with correct args
:return:
"""
set_module_args({
'operation': "Provision",
'opsName': 'Provision_test',
'WS': "PLMN-PLMN/MRBTS-746",
'createBackupPlan': "Yes",
'backupPlanName': "backupPlanName"
})
with patch.object(basic.AnsibleModule, 'run_command') as mock_run_command:
stdout = 'configuration updated'
stderr = ''
return_code = 0
mock_run_command.return_value = return_code, stdout, stderr # successful execution
with self.assertRaises(AnsibleExitJson) as result:
netact_cm_command.main()
print(result.exception.args)
self.assertTrue(result.exception.args[0]['changed']) # ensure result is changed
mock_run_command.assert_called_once_with(
['/opt/oss/bin/racclimx.sh', '-op', 'Provision', '-opsName', 'Provision_test',
'-WS', 'PLMN-PLMN/MRBTS-746', '-createBackupPlan', 'true', '-backupPlanName', 'backupPlanName'],
check_rc=True)
def test_withwrongargs(self):
"""
Testing that wrong attribute causing error
:return:
"""
set_module_args({
'operation': "Upload",
'opsName': 'Uploading_testi',
'MR': "PLMN-PLMN/MRBTS-746",
'abc': 'abc'
})
with self.assertRaises(AnsibleFailJson):
with patch.object(basic.AnsibleModule, 'run_command') as mock_run_command:
stdout = 'configuration updated'
stderr = ''
return_code = 0
mock_run_command.return_value = return_code, stdout, stderr # successful execution
with self.assertRaises(AnsibleExitJson) as result:
netact_cm_command.main()
self.assertTrue(result.exception.args[0]['changed']) # ensure result is changed
self.assertFalse(True) # ensure result is changed
| gpl-3.0 |
google-research/google-research | widget_caption/widget_caption_input.py | 1 | 12750 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Widget captioning input pipeline."""
from absl import flags
import tensorflow as tf
# Constants for embeddings.
PADDING = 0
EOS = 1
UKN = 2
START = 3
FLAGS = flags.FLAGS
def _produce_target_phrase(phrases):
"""Randomly selects one phrase as the target phrase for training."""
with tf.variable_scope('produce_output'):
# Find indices for valid phrases with meaningful tokens.
valid_phrase_indices = tf.reshape(
tf.where(tf.reduce_any(tf.greater(phrases, EOS), -1)), [-1])
# If indices is empty (no valid tokens/annotations), just use the index 0,
# otherwise random shuffle the indices and select one.
index = tf.cond(
tf.greater(tf.shape(valid_phrase_indices)[0], 0),
lambda: tf.cast(tf.random.shuffle(valid_phrase_indices)[0], tf.int32),
lambda: 0)
phrase = phrases[index]
# Append EOS to the end of phrase.
phrase = tf.boolean_mask(phrase, mask=tf.greater(phrase, PADDING))
phrase = tf.concat([phrase, [EOS]], axis=0)
# Pad the phrase to length of 11 (10 words + EOS).
phrase = tf.pad(phrase, [[0, 11 - tf.shape(phrase)[-1]]])
return phrase
def _select_phrases(dense_features):
"""Selects phrases from the workers."""
with tf.variable_scope('select_phrases'):
# Sample one phrase for each node.
output_phrase = tf.map_fn(_produce_target_phrase,
dense_features['caption_token_id'])
# Output shape: [N, seq_len]
output_phrase = tf.reshape(output_phrase, [-1, 11])
return output_phrase
def _extract_image(dense_features, num_ui_objects, target_node=None):
"""Extracts image features."""
with tf.variable_scope('extract_image'):
visible = dense_features['visibility_seq'] * dense_features[
'visibility_to_user_seq']
obj_pixels = tf.reshape(dense_features['obj_img_mat'],
[num_ui_objects, 64, 64, 3])
if target_node is not None:
obj_pixels = tf.image.rgb_to_grayscale(tf.gather(obj_pixels, target_node))
else:
obj_pixels = tf.image.rgb_to_grayscale(obj_pixels)
w = (
dense_features['cord_x_seq'][:, 1] -
dense_features['cord_x_seq'][:, 0])
h = (
dense_features['cord_y_seq'][:, 1] -
dense_features['cord_y_seq'][:, 0])
obj_visible = tf.logical_and(
tf.equal(visible, 1),
tf.logical_or(tf.greater(w, 0.005), tf.greater(h, 0.005)))
obj_pixels = tf.where(obj_visible, obj_pixels, tf.zeros_like(obj_pixels))
return tf.cast(obj_pixels, tf.float32) / 255.0, obj_visible
def filter_empty_mturk():
"""Creates a filtering function."""
def _has_mturk_captions(dense_features):
"""Check whether it has nodes with MTurk captions."""
num_nodes = tf.shape(dense_features['label_flag'])[0]
token_ids = tf.reshape(dense_features['caption_token_id'],
[num_nodes, 4, 10])
nodes_with_annotations = tf.reduce_any(
tf.reduce_any(tf.greater(token_ids, EOS), -1), -1)
original_worker_node_mask = tf.equal(dense_features['label_flag'], 0)
worker_node_mask = tf.logical_and(original_worker_node_mask,
nodes_with_annotations)
return tf.reduce_any(worker_node_mask)
return _has_mturk_captions
def parse_tf_example(serialized_example):
"""Parses a single tf example."""
keys_to_features = {
'developer_token_id': tf.VarLenFeature(tf.int64),
'resource_token_id': tf.VarLenFeature(tf.int64),
'caption_token_id': tf.VarLenFeature(tf.int64),
'caption_phrase_id': tf.VarLenFeature(tf.int64),
'gold_caption': tf.VarLenFeature(tf.string),
'clickable_seq': tf.VarLenFeature(tf.int64),
'v_distance_seq': tf.VarLenFeature(tf.float32),
'h_distance_seq': tf.VarLenFeature(tf.float32),
'type_id_seq': tf.VarLenFeature(tf.int64),
'cord_x_seq': tf.VarLenFeature(tf.float32),
'cord_y_seq': tf.VarLenFeature(tf.float32),
'visibility_to_user_seq': tf.VarLenFeature(tf.int64),
'visibility_seq': tf.VarLenFeature(tf.int64),
'label_flag': tf.VarLenFeature(tf.int64), # 0: worker 1: developer
'parent_child_seq': tf.VarLenFeature(tf.int64),
'obj_img_mat': tf.VarLenFeature(tf.int64),
'obj_dom_pos': tf.VarLenFeature(tf.int64),
'is_leaf': tf.VarLenFeature(tf.int64),
}
parsed = tf.parse_single_example(serialized_example, keys_to_features)
dense_features = {}
for key in keys_to_features:
if key in ['gold_caption']:
default_value = ''
else:
default_value = 0
dense_features[key] = tf.sparse_tensor_to_dense(
parsed[key], default_value=default_value)
return dense_features
def create_parser(word_vocab_size,
phrase_vocab_size,
max_pixel_pos=100,
max_dom_pos=500,
is_inference=False):
"""Creates a parser for tf.Example."""
def process_tf_example(dense_features):
"""Parses a single tf example."""
# Reshape the features
num_ui_objects = tf.shape(dense_features['clickable_seq'])[0]
dense_features['caption_token_id'] = tf.reshape(
dense_features['caption_token_id'], [num_ui_objects, 4, 10])
dense_features['developer_token_id'] = tf.reshape(
dense_features['developer_token_id'], [num_ui_objects, 10])
dense_features['resource_token_id'] = tf.reshape(
dense_features['resource_token_id'], [num_ui_objects, 10])
dense_features['caption_token_id'] = tf.where(
tf.greater_equal(dense_features['caption_token_id'], word_vocab_size),
tf.cast(
tf.fill(tf.shape(dense_features['caption_token_id']), UKN),
dtype=tf.int64), dense_features['caption_token_id'])
dense_features['developer_token_id'] = tf.where(
tf.greater_equal(dense_features['developer_token_id'], word_vocab_size),
tf.cast(
tf.fill(tf.shape(dense_features['developer_token_id']), UKN),
dtype=tf.int64), dense_features['developer_token_id'])
dense_features['resource_token_id'] = tf.where(
tf.greater_equal(dense_features['resource_token_id'], word_vocab_size),
tf.cast(
tf.fill(tf.shape(dense_features['resource_token_id']), UKN),
dtype=tf.int64), dense_features['resource_token_id'])
dense_features['caption_phrase_id'] = tf.where(
tf.greater_equal(dense_features['caption_phrase_id'],
phrase_vocab_size),
tf.cast(
tf.fill(tf.shape(dense_features['caption_phrase_id']), UKN),
dtype=tf.int64), dense_features['caption_phrase_id'])
dense_features['v_distance_seq'] = tf.reshape(
dense_features['v_distance_seq'], [num_ui_objects, num_ui_objects],
name='v_distance_seq')
dense_features['h_distance_seq'] = tf.reshape(
dense_features['h_distance_seq'], [num_ui_objects, num_ui_objects],
name='h_distance_seq')
dense_features['cord_x_seq'] = tf.reshape(
dense_features['cord_x_seq'], [num_ui_objects, 2], name='cord_x_seq')
dense_features['cord_y_seq'] = tf.reshape(
dense_features['cord_y_seq'], [num_ui_objects, 2], name='cord_y_seq')
dense_features['parent_child_seq'] = tf.reshape(
tf.to_int32(dense_features['parent_child_seq']), [-1, num_ui_objects],
name='parent_child_seq')
dense_features['obj_dom_pos'] = tf.where(
tf.greater_equal(dense_features['obj_dom_pos'], max_dom_pos),
tf.cast(
tf.fill(tf.shape(dense_features['obj_dom_pos']), 0),
dtype=tf.int64), dense_features['obj_dom_pos'])
feature_dict = {}
if not is_inference:
output_phrase = _select_phrases(dense_features)
feature_dict['caption_token_id'] = output_phrase
feature_dict['caption_phrase_id'] = dense_features['caption_phrase_id']
feature_dict['developer_token_id'] = dense_features['developer_token_id']
feature_dict['resource_token_id'] = dense_features['resource_token_id']
feature_dict['reference'] = dense_features['gold_caption']
# feature_dict['obj_str_seq'] = dense_features['obj_str_seq']
feature_dict['label_flag'] = dense_features['label_flag']
feature_dict['obj_is_leaf'] = dense_features['is_leaf']
obj_pixels, obj_visible = _extract_image(dense_features, num_ui_objects)
feature_dict['obj_pixels'] = obj_pixels
feature_dict['obj_visible'] = obj_visible
feature_dict['obj_screen_pos'] = tf.concat(
[dense_features['cord_x_seq'], dense_features['cord_y_seq']], -1)
feature_dict['obj_screen_pos'] = tf.to_int32(
feature_dict['obj_screen_pos'] * (max_pixel_pos - 1))
feature_dict['obj_clickable'] = dense_features['clickable_seq']
feature_dict['obj_type'] = dense_features['type_id_seq']
feature_dict['obj_adjacency'] = dense_features['parent_child_seq']
feature_dict['obj_dom_pos'] = tf.reshape(dense_features['obj_dom_pos'],
[num_ui_objects, 3])
feature_dict['obj_is_padding'] = tf.zeros(tf.shape(num_ui_objects))
for key in [
'obj_adjacency',
'obj_type',
'obj_clickable',
'obj_screen_pos',
'obj_dom_pos',
'developer_token_id',
'resource_token_id',
]:
# Add the auxiliary step dimension.
feature_dict[key] = tf.expand_dims(feature_dict[key], 0)
for key in [
'caption_token_id',
'caption_phrase_id',
'developer_token_id',
'resource_token_id',
'label_flag',
'obj_adjacency',
'obj_type',
'obj_clickable',
'obj_visible',
'obj_is_leaf',
'icon_label',
'obj_dom_pos',
'obj_is_padding',
]:
if key in feature_dict:
feature_dict[key] = tf.cast(feature_dict[key], tf.int32)
return feature_dict
return process_tf_example
def input_fn(pattern,
batch_size,
word_vocab_size,
phrase_vocab_size,
max_pixel_pos=100,
max_dom_pos=500,
epoches=1,
buffer_size=1):
"""Retrieves batches of data for training."""
# files = tf.data.Dataset.list_files(pattern)
filepaths = tf.io.gfile.glob(pattern)
dataset = tf.data.TFRecordDataset([filepaths])
dataset = dataset.map(
parse_tf_example, num_parallel_calls=tf.data.experimental.AUTOTUNE)
dataset = dataset.filter(filter_empty_mturk())
dataset = dataset.map(
create_parser(word_vocab_size, phrase_vocab_size, max_pixel_pos,
max_dom_pos),
num_parallel_calls=tf.data.experimental.AUTOTUNE)
dataset = dataset.shuffle(buffer_size=buffer_size)
dataset = dataset.repeat(count=epoches)
padding_value_int = tf.cast(0, tf.int32)
anchor_padding_value_int = tf.cast(-1, tf.int32)
padding_info = [
('caption_token_id', [None, 11], padding_value_int),
('caption_phrase_id', [None], padding_value_int),
('developer_token_id', [1, None, 10], padding_value_int),
('resource_token_id', [1, None, 10], padding_value_int),
('reference', [None], tf.cast('', tf.string)),
('label_flag', [None], anchor_padding_value_int),
('icon_label', [None], padding_value_int),
('icon_iou', [None], 0.0),
('obj_pixels', [None, 64, 64, 1], tf.cast(0, tf.float32)),
('obj_adjacency', [1, None, None], padding_value_int),
('obj_type', [1, None], anchor_padding_value_int),
('obj_clickable', [1, None], padding_value_int),
('obj_screen_pos', [1, None, 4], padding_value_int),
('obj_dom_pos', [1, None, 3], padding_value_int),
('obj_visible', [None], padding_value_int),
('obj_is_leaf', [None], padding_value_int),
('obj_is_padding', [None], 1),
]
padded_shapes = {}
padded_values = {}
for (key, padding_shape, padding_value) in padding_info:
padded_shapes[key] = padding_shape
padded_values[key] = padding_value
dataset = dataset.padded_batch(
batch_size, padded_shapes=padded_shapes, padding_values=padded_values)
dataset = dataset.prefetch(buffer_size=1024)
return dataset
| apache-2.0 |
jbedorf/tensorflow | tensorflow/python/platform/gfile.py | 16 | 3104 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Import router for file_io."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import
from tensorflow.python.lib.io.file_io import copy as Copy
from tensorflow.python.lib.io.file_io import create_dir as MkDir
from tensorflow.python.lib.io.file_io import delete_file as Remove
from tensorflow.python.lib.io.file_io import delete_recursively as DeleteRecursively
from tensorflow.python.lib.io.file_io import file_exists as Exists
from tensorflow.python.lib.io.file_io import FileIO as _FileIO
from tensorflow.python.lib.io.file_io import get_matching_files as Glob
from tensorflow.python.lib.io.file_io import is_directory as IsDirectory
from tensorflow.python.lib.io.file_io import list_directory as ListDirectory
from tensorflow.python.lib.io.file_io import recursive_create_dir as MakeDirs
from tensorflow.python.lib.io.file_io import rename as Rename
from tensorflow.python.lib.io.file_io import stat as Stat
from tensorflow.python.lib.io.file_io import walk as Walk
# pylint: enable=unused-import
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.util.tf_export import tf_export
@tf_export('io.gfile.GFile', v1=['gfile.GFile', 'gfile.Open', 'io.gfile.GFile'])
class GFile(_FileIO):
"""File I/O wrappers without thread locking.
Note, that this is somewhat like builtin Python file I/O, but
there are semantic differences to make it more efficient for
some backing filesystems. For example, a write mode file will
not be opened until the first write call (to minimize RPC
invocations in network filesystems).
"""
def __init__(self, name, mode='r'):
super(GFile, self).__init__(name=name, mode=mode)
@tf_export(v1=['gfile.FastGFile'])
class FastGFile(_FileIO):
"""File I/O wrappers without thread locking.
Note, that this is somewhat like builtin Python file I/O, but
there are semantic differences to make it more efficient for
some backing filesystems. For example, a write mode file will
not be opened until the first write call (to minimize RPC
invocations in network filesystems).
"""
@deprecated(None, 'Use tf.gfile.GFile.')
def __init__(self, name, mode='r'):
super(FastGFile, self).__init__(name=name, mode=mode)
# Does not alias to Open so that we use our version of GFile to strip
# 'b' mode.
Open = GFile
| apache-2.0 |
dhenyjarasandy/scrapy | scrapy/settings/__init__.py | 124 | 6388 | import six
import json
import copy
import warnings
from collections import MutableMapping
from importlib import import_module
from scrapy.utils.deprecate import create_deprecated_class
from scrapy.exceptions import ScrapyDeprecationWarning
from . import default_settings
SETTINGS_PRIORITIES = {
'default': 0,
'command': 10,
'project': 20,
'spider': 30,
'cmdline': 40,
}
class SettingsAttribute(object):
"""Class for storing data related to settings attributes.
This class is intended for internal usage, you should try Settings class
for settings configuration, not this one.
"""
def __init__(self, value, priority):
self.value = value
self.priority = priority
def set(self, value, priority):
"""Sets value if priority is higher or equal than current priority."""
if priority >= self.priority:
self.value = value
self.priority = priority
def __str__(self):
return "<SettingsAttribute value={self.value!r} " \
"priority={self.priority}>".format(self=self)
__repr__ = __str__
class Settings(object):
def __init__(self, values=None, priority='project'):
self.frozen = False
self.attributes = {}
self.setmodule(default_settings, priority='default')
if values is not None:
self.setdict(values, priority)
def __getitem__(self, opt_name):
value = None
if opt_name in self.attributes:
value = self.attributes[opt_name].value
return value
def get(self, name, default=None):
return self[name] if self[name] is not None else default
def getbool(self, name, default=False):
"""
True is: 1, '1', True
False is: 0, '0', False, None
"""
return bool(int(self.get(name, default)))
def getint(self, name, default=0):
return int(self.get(name, default))
def getfloat(self, name, default=0.0):
return float(self.get(name, default))
def getlist(self, name, default=None):
value = self.get(name, default or [])
if isinstance(value, six.string_types):
value = value.split(',')
return list(value)
def getdict(self, name, default=None):
value = self.get(name, default or {})
if isinstance(value, six.string_types):
value = json.loads(value)
return dict(value)
def set(self, name, value, priority='project'):
self._assert_mutability()
if isinstance(priority, six.string_types):
priority = SETTINGS_PRIORITIES[priority]
if name not in self.attributes:
self.attributes[name] = SettingsAttribute(value, priority)
else:
self.attributes[name].set(value, priority)
def setdict(self, values, priority='project'):
self._assert_mutability()
for name, value in six.iteritems(values):
self.set(name, value, priority)
def setmodule(self, module, priority='project'):
self._assert_mutability()
if isinstance(module, six.string_types):
module = import_module(module)
for key in dir(module):
if key.isupper():
self.set(key, getattr(module, key), priority)
def _assert_mutability(self):
if self.frozen:
raise TypeError("Trying to modify an immutable Settings object")
def copy(self):
return copy.deepcopy(self)
def freeze(self):
self.frozen = True
def frozencopy(self):
copy = self.copy()
copy.freeze()
return copy
@property
def overrides(self):
warnings.warn("`Settings.overrides` attribute is deprecated and won't "
"be supported in Scrapy 0.26, use "
"`Settings.set(name, value, priority='cmdline')` instead",
category=ScrapyDeprecationWarning, stacklevel=2)
try:
o = self._overrides
except AttributeError:
self._overrides = o = _DictProxy(self, 'cmdline')
return o
@property
def defaults(self):
warnings.warn("`Settings.defaults` attribute is deprecated and won't "
"be supported in Scrapy 0.26, use "
"`Settings.set(name, value, priority='default')` instead",
category=ScrapyDeprecationWarning, stacklevel=2)
try:
o = self._defaults
except AttributeError:
self._defaults = o = _DictProxy(self, 'default')
return o
class _DictProxy(MutableMapping):
def __init__(self, settings, priority):
self.o = {}
self.settings = settings
self.priority = priority
def __len__(self):
return len(self.o)
def __getitem__(self, k):
return self.o[k]
def __setitem__(self, k, v):
self.settings.set(k, v, priority=self.priority)
self.o[k] = v
def __delitem__(self, k):
del self.o[k]
def __iter__(self, k, v):
return iter(self.o)
class CrawlerSettings(Settings):
def __init__(self, settings_module=None, **kw):
Settings.__init__(self, **kw)
self.settings_module = settings_module
def __getitem__(self, opt_name):
if opt_name in self.overrides:
return self.overrides[opt_name]
if self.settings_module and hasattr(self.settings_module, opt_name):
return getattr(self.settings_module, opt_name)
if opt_name in self.defaults:
return self.defaults[opt_name]
return Settings.__getitem__(self, opt_name)
def __str__(self):
return "<CrawlerSettings module=%r>" % self.settings_module
CrawlerSettings = create_deprecated_class(
'CrawlerSettings', CrawlerSettings,
new_class_path='scrapy.settings.Settings')
def iter_default_settings():
"""Return the default settings as an iterator of (name, value) tuples"""
for name in dir(default_settings):
if name.isupper():
yield name, getattr(default_settings, name)
def overridden_settings(settings):
"""Return a dict of the settings that have been overridden"""
for name, defvalue in iter_default_settings():
value = settings[name]
if not isinstance(defvalue, dict) and value != defvalue:
yield name, value
| bsd-3-clause |
richardcs/ansible | lib/ansible/utils/listify.py | 100 | 1497 | # (c) 2014 Michael DeHaan, <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.six import string_types
from ansible.module_utils.common._collections_compat import Iterable
from ansible.template.safe_eval import safe_eval
__all__ = ['listify_lookup_plugin_terms']
def listify_lookup_plugin_terms(terms, templar, loader, fail_on_undefined=True, convert_bare=False):
if isinstance(terms, string_types):
terms = templar.template(terms.strip(), convert_bare=convert_bare, fail_on_undefined=fail_on_undefined)
else:
terms = templar.template(terms, fail_on_undefined=fail_on_undefined)
if isinstance(terms, string_types) or not isinstance(terms, Iterable):
terms = [terms]
return terms
| gpl-3.0 |
bmya/odoo_addons | smile_account_voucher_group/models/__init__.py | 3 | 1087 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import payment_method
import account_move_line
import account_invoice
import account_voucher
import res_partner | agpl-3.0 |
kvar/ansible | contrib/inventory/vmware.py | 37 | 18538 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
VMware Inventory Script
=======================
Retrieve information about virtual machines from a vCenter server or
standalone ESX host. When `group_by=false` (in the INI file), host systems
are also returned in addition to VMs.
This script will attempt to read configuration from an INI file with the same
base filename if present, or `vmware.ini` if not. It is possible to create
symlinks to the inventory script to support multiple configurations, e.g.:
* `vmware.py` (this script)
* `vmware.ini` (default configuration, will be read by `vmware.py`)
* `vmware_test.py` (symlink to `vmware.py`)
* `vmware_test.ini` (test configuration, will be read by `vmware_test.py`)
* `vmware_other.py` (symlink to `vmware.py`, will read `vmware.ini` since no
`vmware_other.ini` exists)
The path to an INI file may also be specified via the `VMWARE_INI` environment
variable, in which case the filename matching rules above will not apply.
Host and authentication parameters may be specified via the `VMWARE_HOST`,
`VMWARE_USER` and `VMWARE_PASSWORD` environment variables; these options will
take precedence over options present in the INI file. An INI file is not
required if these options are specified using environment variables.
'''
from __future__ import print_function
import json
import logging
import optparse
import os
import ssl
import sys
import time
from ansible.module_utils.common._collections_compat import MutableMapping
from ansible.module_utils.six import integer_types, text_type, string_types
from ansible.module_utils.six.moves import configparser
# Disable logging message trigged by pSphere/suds.
try:
from logging import NullHandler
except ImportError:
from logging import Handler
class NullHandler(Handler):
def emit(self, record):
pass
logging.getLogger('psphere').addHandler(NullHandler())
logging.getLogger('suds').addHandler(NullHandler())
from psphere.client import Client
from psphere.errors import ObjectNotFoundError
from psphere.managedobjects import HostSystem, VirtualMachine, ManagedObject, Network, ClusterComputeResource
from suds.sudsobject import Object as SudsObject
class VMwareInventory(object):
def __init__(self, guests_only=None):
self.config = configparser.SafeConfigParser()
if os.environ.get('VMWARE_INI', ''):
config_files = [os.environ['VMWARE_INI']]
else:
config_files = [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'vmware.ini']
for config_file in config_files:
if os.path.exists(config_file):
self.config.read(config_file)
break
# Retrieve only guest VMs, or include host systems?
if guests_only is not None:
self.guests_only = guests_only
elif self.config.has_option('defaults', 'guests_only'):
self.guests_only = self.config.getboolean('defaults', 'guests_only')
else:
self.guests_only = True
# Read authentication information from VMware environment variables
# (if set), otherwise from INI file.
auth_host = os.environ.get('VMWARE_HOST')
if not auth_host and self.config.has_option('auth', 'host'):
auth_host = self.config.get('auth', 'host')
auth_user = os.environ.get('VMWARE_USER')
if not auth_user and self.config.has_option('auth', 'user'):
auth_user = self.config.get('auth', 'user')
auth_password = os.environ.get('VMWARE_PASSWORD')
if not auth_password and self.config.has_option('auth', 'password'):
auth_password = self.config.get('auth', 'password')
sslcheck = os.environ.get('VMWARE_SSLCHECK')
if not sslcheck and self.config.has_option('auth', 'sslcheck'):
sslcheck = self.config.get('auth', 'sslcheck')
if not sslcheck:
sslcheck = True
else:
if sslcheck.lower() in ['no', 'false']:
sslcheck = False
else:
sslcheck = True
# Limit the clusters being scanned
self.filter_clusters = os.environ.get('VMWARE_CLUSTERS')
if not self.filter_clusters and self.config.has_option('defaults', 'clusters'):
self.filter_clusters = self.config.get('defaults', 'clusters')
if self.filter_clusters:
self.filter_clusters = [x.strip() for x in self.filter_clusters.split(',') if x.strip()]
# Override certificate checks
if not sslcheck:
if hasattr(ssl, '_create_unverified_context'):
ssl._create_default_https_context = ssl._create_unverified_context
# Create the VMware client connection.
self.client = Client(auth_host, auth_user, auth_password)
def _put_cache(self, name, value):
'''
Saves the value to cache with the name given.
'''
if self.config.has_option('defaults', 'cache_dir'):
cache_dir = os.path.expanduser(self.config.get('defaults', 'cache_dir'))
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
cache_file = os.path.join(cache_dir, name)
with open(cache_file, 'w') as cache:
json.dump(value, cache)
def _get_cache(self, name, default=None):
'''
Retrieves the value from cache for the given name.
'''
if self.config.has_option('defaults', 'cache_dir'):
cache_dir = self.config.get('defaults', 'cache_dir')
cache_file = os.path.join(cache_dir, name)
if os.path.exists(cache_file):
if self.config.has_option('defaults', 'cache_max_age'):
cache_max_age = self.config.getint('defaults', 'cache_max_age')
else:
cache_max_age = 0
cache_stat = os.stat(cache_file)
if (cache_stat.st_mtime + cache_max_age) >= time.time():
with open(cache_file) as cache:
return json.load(cache)
return default
def _flatten_dict(self, d, parent_key='', sep='_'):
'''
Flatten nested dicts by combining keys with a separator. Lists with
only string items are included as is; any other lists are discarded.
'''
items = []
for k, v in d.items():
if k.startswith('_'):
continue
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, MutableMapping):
items.extend(self._flatten_dict(v, new_key, sep).items())
elif isinstance(v, (list, tuple)):
if all([isinstance(x, string_types) for x in v]):
items.append((new_key, v))
else:
items.append((new_key, v))
return dict(items)
def _get_obj_info(self, obj, depth=99, seen=None):
'''
Recursively build a data structure for the given pSphere object (depth
only applies to ManagedObject instances).
'''
seen = seen or set()
if isinstance(obj, ManagedObject):
try:
obj_unicode = text_type(getattr(obj, 'name'))
except AttributeError:
obj_unicode = ()
if obj in seen:
return obj_unicode
seen.add(obj)
if depth <= 0:
return obj_unicode
d = {}
for attr in dir(obj):
if attr.startswith('_'):
continue
try:
val = getattr(obj, attr)
obj_info = self._get_obj_info(val, depth - 1, seen)
if obj_info != ():
d[attr] = obj_info
except Exception as e:
pass
return d
elif isinstance(obj, SudsObject):
d = {}
for key, val in iter(obj):
obj_info = self._get_obj_info(val, depth, seen)
if obj_info != ():
d[key] = obj_info
return d
elif isinstance(obj, (list, tuple)):
l = []
for val in iter(obj):
obj_info = self._get_obj_info(val, depth, seen)
if obj_info != ():
l.append(obj_info)
return l
elif isinstance(obj, (type(None), bool, float) + string_types + integer_types):
return obj
else:
return ()
def _get_host_info(self, host, prefix='vmware'):
'''
Return a flattened dict with info about the given host system.
'''
host_info = {
'name': host.name,
}
for attr in ('datastore', 'network', 'vm'):
try:
value = getattr(host, attr)
host_info['%ss' % attr] = self._get_obj_info(value, depth=0)
except AttributeError:
host_info['%ss' % attr] = []
for k, v in self._get_obj_info(host.summary, depth=0).items():
if isinstance(v, MutableMapping):
for k2, v2 in v.items():
host_info[k2] = v2
elif k != 'host':
host_info[k] = v
try:
host_info['ipAddress'] = host.config.network.vnic[0].spec.ip.ipAddress
except Exception as e:
print(e, file=sys.stderr)
host_info = self._flatten_dict(host_info, prefix)
if ('%s_ipAddress' % prefix) in host_info:
host_info['ansible_ssh_host'] = host_info['%s_ipAddress' % prefix]
return host_info
def _get_vm_info(self, vm, prefix='vmware'):
'''
Return a flattened dict with info about the given virtual machine.
'''
vm_info = {
'name': vm.name,
}
for attr in ('datastore', 'network'):
try:
value = getattr(vm, attr)
vm_info['%ss' % attr] = self._get_obj_info(value, depth=0)
except AttributeError:
vm_info['%ss' % attr] = []
try:
vm_info['resourcePool'] = self._get_obj_info(vm.resourcePool, depth=0)
except AttributeError:
vm_info['resourcePool'] = ''
try:
vm_info['guestState'] = vm.guest.guestState
except AttributeError:
vm_info['guestState'] = ''
for k, v in self._get_obj_info(vm.summary, depth=0).items():
if isinstance(v, MutableMapping):
for k2, v2 in v.items():
if k2 == 'host':
k2 = 'hostSystem'
vm_info[k2] = v2
elif k != 'vm':
vm_info[k] = v
vm_info = self._flatten_dict(vm_info, prefix)
if ('%s_ipAddress' % prefix) in vm_info:
vm_info['ansible_ssh_host'] = vm_info['%s_ipAddress' % prefix]
return vm_info
def _add_host(self, inv, parent_group, host_name):
'''
Add the host to the parent group in the given inventory.
'''
p_group = inv.setdefault(parent_group, [])
if isinstance(p_group, dict):
group_hosts = p_group.setdefault('hosts', [])
else:
group_hosts = p_group
if host_name not in group_hosts:
group_hosts.append(host_name)
def _add_child(self, inv, parent_group, child_group):
'''
Add a child group to a parent group in the given inventory.
'''
if parent_group != 'all':
p_group = inv.setdefault(parent_group, {})
if not isinstance(p_group, dict):
inv[parent_group] = {'hosts': p_group}
p_group = inv[parent_group]
group_children = p_group.setdefault('children', [])
if child_group not in group_children:
group_children.append(child_group)
inv.setdefault(child_group, [])
def get_inventory(self, meta_hostvars=True):
'''
Reads the inventory from cache or VMware API via pSphere.
'''
# Use different cache names for guests only vs. all hosts.
if self.guests_only:
cache_name = '__inventory_guests__'
else:
cache_name = '__inventory_all__'
inv = self._get_cache(cache_name, None)
if inv is not None:
return inv
inv = {'all': {'hosts': []}}
if meta_hostvars:
inv['_meta'] = {'hostvars': {}}
default_group = os.path.basename(sys.argv[0]).rstrip('.py')
if not self.guests_only:
if self.config.has_option('defaults', 'hw_group'):
hw_group = self.config.get('defaults', 'hw_group')
else:
hw_group = default_group + '_hw'
if self.config.has_option('defaults', 'vm_group'):
vm_group = self.config.get('defaults', 'vm_group')
else:
vm_group = default_group + '_vm'
if self.config.has_option('defaults', 'prefix_filter'):
prefix_filter = self.config.get('defaults', 'prefix_filter')
else:
prefix_filter = None
if self.filter_clusters:
# Loop through clusters and find hosts:
hosts = []
for cluster in ClusterComputeResource.all(self.client):
if cluster.name in self.filter_clusters:
for host in cluster.host:
hosts.append(host)
else:
# Get list of all physical hosts
hosts = HostSystem.all(self.client)
# Loop through physical hosts:
for host in hosts:
if not self.guests_only:
self._add_host(inv, 'all', host.name)
self._add_host(inv, hw_group, host.name)
host_info = self._get_host_info(host)
if meta_hostvars:
inv['_meta']['hostvars'][host.name] = host_info
self._put_cache(host.name, host_info)
# Loop through all VMs on physical host.
for vm in host.vm:
if prefix_filter:
if vm.name.startswith(prefix_filter):
continue
self._add_host(inv, 'all', vm.name)
self._add_host(inv, vm_group, vm.name)
vm_info = self._get_vm_info(vm)
if meta_hostvars:
inv['_meta']['hostvars'][vm.name] = vm_info
self._put_cache(vm.name, vm_info)
# Group by resource pool.
vm_resourcePool = vm_info.get('vmware_resourcePool', None)
if vm_resourcePool:
self._add_child(inv, vm_group, 'resource_pools')
self._add_child(inv, 'resource_pools', vm_resourcePool)
self._add_host(inv, vm_resourcePool, vm.name)
# Group by datastore.
for vm_datastore in vm_info.get('vmware_datastores', []):
self._add_child(inv, vm_group, 'datastores')
self._add_child(inv, 'datastores', vm_datastore)
self._add_host(inv, vm_datastore, vm.name)
# Group by network.
for vm_network in vm_info.get('vmware_networks', []):
self._add_child(inv, vm_group, 'networks')
self._add_child(inv, 'networks', vm_network)
self._add_host(inv, vm_network, vm.name)
# Group by guest OS.
vm_guestId = vm_info.get('vmware_guestId', None)
if vm_guestId:
self._add_child(inv, vm_group, 'guests')
self._add_child(inv, 'guests', vm_guestId)
self._add_host(inv, vm_guestId, vm.name)
# Group all VM templates.
vm_template = vm_info.get('vmware_template', False)
if vm_template:
self._add_child(inv, vm_group, 'templates')
self._add_host(inv, 'templates', vm.name)
self._put_cache(cache_name, inv)
return inv
def get_host(self, hostname):
'''
Read info about a specific host or VM from cache or VMware API.
'''
inv = self._get_cache(hostname, None)
if inv is not None:
return inv
if not self.guests_only:
try:
host = HostSystem.get(self.client, name=hostname)
inv = self._get_host_info(host)
except ObjectNotFoundError:
pass
if inv is None:
try:
vm = VirtualMachine.get(self.client, name=hostname)
inv = self._get_vm_info(vm)
except ObjectNotFoundError:
pass
if inv is not None:
self._put_cache(hostname, inv)
return inv or {}
def main():
parser = optparse.OptionParser()
parser.add_option('--list', action='store_true', dest='list',
default=False, help='Output inventory groups and hosts')
parser.add_option('--host', dest='host', default=None, metavar='HOST',
help='Output variables only for the given hostname')
# Additional options for use when running the script standalone, but never
# used by Ansible.
parser.add_option('--pretty', action='store_true', dest='pretty',
default=False, help='Output nicely-formatted JSON')
parser.add_option('--include-host-systems', action='store_true',
dest='include_host_systems', default=False,
help='Include host systems in addition to VMs')
parser.add_option('--no-meta-hostvars', action='store_false',
dest='meta_hostvars', default=True,
help='Exclude [\'_meta\'][\'hostvars\'] with --list')
options, args = parser.parse_args()
if options.include_host_systems:
vmware_inventory = VMwareInventory(guests_only=False)
else:
vmware_inventory = VMwareInventory()
if options.host is not None:
inventory = vmware_inventory.get_host(options.host)
else:
inventory = vmware_inventory.get_inventory(options.meta_hostvars)
json_kwargs = {}
if options.pretty:
json_kwargs.update({'indent': 4, 'sort_keys': True})
json.dump(inventory, sys.stdout, **json_kwargs)
if __name__ == '__main__':
main()
| gpl-3.0 |
trondeau/gnuradio-old | gr-filter/python/filter/design/fir_design.py | 11 | 15888 | # Copyright 2012 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import scipy
from gnuradio import filter
from PyQt4 import QtGui
# Filter design functions using a window
def design_win_lpf(fs, gain, wintype, mainwin):
ret = True
pb,r = mainwin.gui.endofLpfPassBandEdit.text().toDouble()
ret = r and ret
sb,r = mainwin.gui.startofLpfStopBandEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.lpfStopBandAttenEdit.text().toDouble()
ret = r and ret
if(ret):
tb = sb - pb
try:
taps = filter.firdes.low_pass_2(gain, fs, pb, tb,
atten, wintype)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Runtime Error",
e.args[0], "&Ok")
return ([], [], ret)
else:
params = {"fs": fs, "gain": gain, "wintype": wintype,
"filttype": "lpf", "pbend": pb, "sbstart": sb,
"atten": atten, "ntaps": len(taps)}
return (taps, params, ret)
else:
return ([], [], ret)
def design_win_bpf(fs, gain, wintype, mainwin):
ret = True
pb1,r = mainwin.gui.startofBpfPassBandEdit.text().toDouble()
ret = r and ret
pb2,r = mainwin.gui.endofBpfPassBandEdit.text().toDouble()
ret = r and ret
tb,r = mainwin.gui.bpfTransitionEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.bpfStopBandAttenEdit.text().toDouble()
ret = r and ret
if(ret):
try:
taps = filter.firdes.band_pass_2(gain, fs, pb1, pb2, tb,
atten, wintype)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Runtime Error",
e.args[0], "&Ok")
return ([], [], ret)
else:
params = {"fs": fs, "gain": gain, "wintype": wintype,
"filttype": "bpf", "pbstart": pb1, "pbend": pb2,
"tb": tb, "atten": atten, "ntaps": len(taps)}
return (taps,params,r)
else:
return ([],[],ret)
def design_win_cbpf(fs, gain, wintype, mainwin):
ret = True
pb1,r = mainwin.gui.startofBpfPassBandEdit.text().toDouble()
ret = r and ret
pb2,r = mainwin.gui.endofBpfPassBandEdit.text().toDouble()
ret = r and ret
tb,r = mainwin.gui.bpfTransitionEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.bpfStopBandAttenEdit.text().toDouble()
ret = r and ret
if(ret):
try:
taps = filter.firdes.complex_band_pass_2(gain, fs, pb1, pb2, tb,
atten, wintype)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Runtime Error",
e.args[0], "&Ok")
return ([], [], ret)
else:
params = {"fs": fs, "gain": gain, "wintype": wintype,
"filttype": "cbpf", "pbstart": pb1, "pbend": pb2,
"tb": tb, "atten": atten, "ntaps": len(taps)}
return (taps,params,r)
else:
return ([],[],ret)
def design_win_bnf(fs, gain, wintype, mainwin):
ret = True
pb1,r = mainwin.gui.startofBnfStopBandEdit.text().toDouble()
ret = r and ret
pb2,r = mainwin.gui.endofBnfStopBandEdit.text().toDouble()
ret = r and ret
tb,r = mainwin.gui.bnfTransitionEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.bnfStopBandAttenEdit.text().toDouble()
ret = r and ret
if(ret):
try:
taps = filter.firdes.band_reject_2(gain, fs, pb1, pb2, tb,
atten, wintype)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Runtime Error",
e.args[0], "&Ok")
return ([], [], ret)
else:
params = {"fs": fs, "gain": gain, "wintype": wintype,
"filttype": "bnf", "sbstart": pb1, "sbend": pb2,
"tb": tb, "atten": atten, "ntaps": len(taps)}
return (taps,params,r)
else:
return ([],[],ret)
def design_win_hpf(fs, gain, wintype, mainwin):
ret = True
sb,r = mainwin.gui.endofHpfStopBandEdit.text().toDouble()
ret = r and ret
pb,r = mainwin.gui.startofHpfPassBandEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.hpfStopBandAttenEdit.text().toDouble()
ret = r and ret
if(ret):
tb = pb - sb
try:
taps = filter.firdes.high_pass_2(gain, fs, pb, tb,
atten, wintype)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Runtime Error",
e.args[0], "&Ok")
else:
params = {"fs": fs, "gain": gain, "wintype": wintype,
"filttype": "hpf", "sbend": sb, "pbstart": pb,
"atten": atten, "ntaps": len(taps)}
return (taps,params,ret)
else:
return ([],[],ret)
def design_win_hb(fs, gain, wintype, mainwin):
ret = True
filtord,r = mainwin.gui.firhbordEdit.text().toDouble()
ret = r and ret
trwidth,r = mainwin.gui.firhbtrEdit.text().toDouble()
ret = r and ret
filtwin = { filter.firdes.WIN_HAMMING : 'hamming',
filter.firdes.WIN_HANN : 'hanning',
filter.firdes.WIN_BLACKMAN : 'blackman',
filter.firdes.WIN_RECTANGULAR: 'boxcar',
filter.firdes.WIN_KAISER: ('kaiser', 4.0),
filter.firdes.WIN_BLACKMAN_hARRIS: 'blackmanharris'}
if int(filtord) & 1:
reply = QtGui.QMessageBox.information(mainwin, "Filter order should be even",
"Filter order should be even","&Ok")
return ([],[],False)
if(ret):
taps = scipy.signal.firwin(int(filtord)+1, 0.5, window = filtwin[wintype])
taps[abs(taps) <= 1e-6] = 0.
params = {"fs": fs, "gain": gain, "wintype": wintype,
"filttype": "hb","ntaps": len(taps)}
return (taps,params,ret)
else:
return ([],[],ret)
def design_win_rrc(fs, gain, wintype, mainwin):
ret = True
sr,r = mainwin.gui.rrcSymbolRateEdit.text().toDouble()
ret = r and ret
alpha,r = mainwin.gui.rrcAlphaEdit.text().toDouble()
ret = r and ret
ntaps,r = mainwin.gui.rrcNumTapsEdit.text().toInt()
ret = r and ret
if(ret):
try:
taps = filter.firdes.root_raised_cosine(gain, fs, sr,
alpha, ntaps)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Runtime Error",
e.args[0], "&Ok")
else:
params = {"fs": fs, "gain": gain, "wintype": wintype,
"filttype": "rrc", "srate": sr, "alpha": alpha,
"ntaps": ntaps}
return (taps,params,ret)
else:
return ([],[],ret)
def design_win_gaus(fs, gain, wintype, mainwin):
ret = True
sr,r = mainwin.gui.gausSymbolRateEdit.text().toDouble()
ret = r and ret
bt,r = mainwin.gui.gausBTEdit.text().toDouble()
ret = r and ret
ntaps,r = mainwin.gui.gausNumTapsEdit.text().toInt()
ret = r and ret
if(ret):
spb = fs / sr
try:
taps = filter.firdes.gaussian(gain, spb, bt, ntaps)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Runtime Error",
e.args[0], "&Ok")
else:
params = {"fs": fs, "gain": gain, "wintype": wintype,
"filttype": "gaus", "srate": sr, "bt": bt,
"ntaps": ntaps}
return (taps,params,ret)
else:
return ([],[],ret)
# Design Functions for Equiripple Filters
def design_opt_lpf(fs, gain, mainwin):
ret = True
pb,r = mainwin.gui.endofLpfPassBandEdit.text().toDouble()
ret = r and ret
sb,r = mainwin.gui.startofLpfStopBandEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.lpfStopBandAttenEdit.text().toDouble()
ret = r and ret
ripple,r = mainwin.gui.lpfPassBandRippleEdit.text().toDouble()
ret = r and ret
if(ret):
try:
taps = filter.optfir.low_pass(gain, fs, pb, sb,
ripple, atten)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Filter did not converge",
e.args[0], "&Ok")
return ([],[],False)
else:
params = {"fs": fs, "gain": gain, "wintype": mainwin.EQUIRIPPLE_FILT,
"filttype": "lpf", "pbend": pb, "sbstart": sb,
"atten": atten, "ripple": ripple, "ntaps": len(taps)}
return (taps, params, ret)
else:
return ([], [], ret)
def design_opt_bpf(fs, gain, mainwin):
ret = True
pb1,r = mainwin.gui.startofBpfPassBandEdit.text().toDouble()
ret = r and ret
pb2,r = mainwin.gui.endofBpfPassBandEdit.text().toDouble()
ret = r and ret
tb,r = mainwin.gui.bpfTransitionEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.bpfStopBandAttenEdit.text().toDouble()
ret = r and ret
ripple,r = mainwin.gui.bpfPassBandRippleEdit.text().toDouble()
ret = r and ret
if(r):
sb1 = pb1 - tb
sb2 = pb2 + tb
try:
taps = filter.optfir.band_pass(gain, fs, sb1, pb1, pb2, sb2,
ripple, atten)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Filter did not converge",
e.args[0], "&Ok")
return ([],[],False)
else:
params = {"fs": fs, "gain": gain, "wintype": mainwin.EQUIRIPPLE_FILT,
"filttype": "bpf", "pbstart": pb1, "pbend": pb2,
"tb": tb, "atten": atten, "ripple": ripple,
"ntaps": len(taps)}
return (taps,params,r)
else:
return ([],[],r)
def design_opt_cbpf(fs, gain, mainwin):
ret = True
pb1,r = mainwin.gui.startofBpfPassBandEdit.text().toDouble()
ret = r and ret
pb2,r = mainwin.gui.endofBpfPassBandEdit.text().toDouble()
ret = r and ret
tb,r = mainwin.gui.bpfTransitionEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.bpfStopBandAttenEdit.text().toDouble()
ret = r and ret
ripple,r = mainwin.gui.bpfPassBandRippleEdit.text().toDouble()
ret = r and ret
if(r):
sb1 = pb1 - tb
sb2 = pb2 + tb
try:
taps = filter.optfir.complex_band_pass(gain, fs, sb1, pb1, pb2, sb2,
ripple, atten)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Filter did not converge",
e.args[0], "&Ok")
return ([],[],False)
else:
params = {"fs": fs, "gain": gain, "wintype": self.EQUIRIPPLE_FILT,
"filttype": "cbpf", "pbstart": pb1, "pbend": pb2,
"tb": tb, "atten": atten, "ripple": ripple,
"ntaps": len(taps)}
return (taps,params,r)
else:
return ([],[],r)
def design_opt_bnf(fs, gain, mainwin):
ret = True
sb1,r = mainwin.gui.startofBnfStopBandEdit.text().toDouble()
ret = r and ret
sb2,r = mainwin.gui.endofBnfStopBandEdit.text().toDouble()
ret = r and ret
tb,r = mainwin.gui.bnfTransitionEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.bnfStopBandAttenEdit.text().toDouble()
ret = r and ret
ripple,r = mainwin.gui.bnfPassBandRippleEdit.text().toDouble()
ret = r and ret
if(ret):
pb1 = sb1 - tb
pb2 = sb2 + tb
try:
taps = filter.optfir.band_reject(gain, fs, pb1, sb1, sb2, pb2,
ripple, atten)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Filter did not converge",
e.args[0], "&Ok")
return ([],[],False)
else:
params = {"fs": fs, "gain": gain, "wintype": mainwin.EQUIRIPPLE_FILT,
"filttype": "bnf", "sbstart": pb1, "sbend": pb2,
"tb": tb, "atten": atten, "ripple": ripple,
"ntaps": len(taps)}
return (taps,params,ret)
else:
return ([],[],ret)
def design_opt_hb(fs, gain, mainwin):
ret = True
filtord,r = mainwin.gui.firhbordEdit.text().toDouble()
ret = r and ret
trwidth,r = mainwin.gui.firhbtrEdit.text().toDouble()
ret = r and ret
if int(filtord) & 1:
reply = QtGui.QMessageBox.information(mainwin, "Filter order should be even",
"Filter order should be even","&Ok")
return ([],[],False)
if(ret):
try:
bands = [0,.25 - (trwidth/fs), .25 + (trwidth/fs), 0.5]
taps = scipy.signal.remez(int(filtord)+1, bands, [1,0], [1,1])
taps[abs(taps) <= 1e-6] = 0.
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Filter Design Error",
e.args[0], "&Ok")
return ([],[],False)
else:
params = {"fs": fs, "gain": gain, "wintype": self.EQUIRIPPLE_FILT,
"filttype": "hb", "ntaps": len(taps)}
return (taps,params,ret)
else:
return ([],[],ret)
def design_opt_hpf(fs, gain, mainwin):
ret = True
sb,r = mainwin.gui.endofHpfStopBandEdit.text().toDouble()
ret = r and ret
pb,r = mainwin.gui.startofHpfPassBandEdit.text().toDouble()
ret = r and ret
atten,r = mainwin.gui.hpfStopBandAttenEdit.text().toDouble()
ret = r and ret
ripple,r = mainwin.gui.hpfPassBandRippleEdit.text().toDouble()
ret = r and ret
if(ret):
try:
taps = filter.optfir.high_pass(gain, fs, sb, pb,
atten, ripple)
except RuntimeError, e:
reply = QtGui.QMessageBox.information(mainwin, "Filter did not converge",
e.args[0], "&Ok")
return ([],[],False)
else:
params = {"fs": fs, "gain": gain, "wintype": self.EQUIRIPPLE_FILT,
"filttype": "hpf", "sbend": sb, "pbstart": pb,
"atten": atten, "ripple": ripple,
"ntaps": len(taps)}
return (taps,params,ret)
else:
return ([],[],ret)
| gpl-3.0 |
wimmuskee/mangrove | mangrove_libs/common.py | 1 | 3122 | # -*- coding: utf-8 -*-
"""
This module contains common functions
for the mangrove crawler.
Wim Muskee, 2013-2018
[email protected]
License: GPL-3
"""
def getConfig(configfile,section):
import json
with open(configfile, "r") as f:
configdata = json.loads(f.read())
config = {}
config.update(configdata["common"])
config.update(configdata[section])
config["configuration"] = section
return config
""" Dynamically import a method """
def import_from(module, name):
import importlib
module = __import__(module, fromlist=[name])
return getattr(module, name)
""" Download a file using chunks to deal with large files. Disable default compression handling. """
def downloadFile(httpProxy,source,dest):
import requests
headers = {"Accept-Encoding": "identity"}
r = requests.get(source, stream=True, proxies=httpProxy, headers=headers)
with open(dest, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def gzUnpack(source,dest):
import gzip
f = gzip.open(source,'rb')
output = open(dest,'wb')
output.write(f.read())
output.close()
def bz2Unpack(source,dest):
from bz2 import BZ2File
f = BZ2File( source, 'r')
output = open(dest,'wb')
output.write(f.read())
output.close()
def checkLocal():
from os import path, getcwd
if path.isdir( getcwd() + "/share" ):
return true
else:
return false
def getHttplib2Proxy(proxy_host,proxy_port):
import httplib2
import socks
return httplib2.Http(proxy_info = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, proxy_host, int(proxy_port), False))
# not actively used, keeping it however, just in case ...
def getUrllib2Proxy(proxy_host,proxy_port):
import urllib2
return urllib2.ProxyHandler({"http": proxy_host + ":" + proxy_port})
def getRequestsProxy(proxy_host,proxy_port):
return { "http": proxy_host + ":" + proxy_port }
""" Return path of program if exists, http://stackoverflow.com/a/377028/426990 """
def which(program):
import os
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
""" Quit when one of the programs is not found """
def checkPrograms(programlist):
for p in programlist:
if not which(p):
raise RuntimeError( "executable does not exist: " + p )
""" return simple logger object """
def getLogger(application):
import logging
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.DEBUG)
return logging.getLogger(application)
""" from for instance 2012-10-23T16:39:06Z """
def getTimestampFromZuluDT(dt):
from datetime import datetime
return int((datetime.strptime( dt, "%Y-%m-%dT%H:%M:%SZ") - datetime(1970, 1, 1)).total_seconds())
""" pretty printer for debug """
def prettyPrint(data):
import pprint
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(data)
| gpl-3.0 |
akd001/RPi | testRainbow.py | 1 | 1361 | import os, sys, subprocess, time
from rainbowhatwrapper.handlers import *
#CONSTANTS
BUTTON_A_STATE = False
BUTTON_B_STATE = False
BUTTON_C_STATE = False
def showUptime():
while True:
test = subprocess.Popen(["uptime"], stdout=subprocess.PIPE)
output = test.communicate()[0].split()[0].split(':')
hour = output[0]
mins = output[1]
RhDisplayHandler.printOnDisplay(hour + mins)
time.sleep(15)
def main():
RhPixelHandler.setPixel(0, 1, 1, 1)
RhPixelHandler.setPixel(1, 1, 1, 1)
RhPixelHandler.setPixel(2, 1, 1, 1)
RhPixelHandler.setPixel(3, 1, 1, 1)
RhPixelHandler.setPixel(4, 1, 1, 1)
RhPixelHandler.setPixel(5, 1, 1, 1)
RhPixelHandler.setPixel(6, 1, 1, 1)
RhBuzzerHandler.playBeginning()
# showUptime()
# song = [68, 68, 68, 69, 70, 70, 69, 70, 71, 72]
# for note in song:
# RhBuzzerHandler.playMidi(note, 0.5)
# time.sleep(1)
# RhBuzzerHandler.play(261, 1)
# print (RhWeatherHandler.getTemperature())
# print (RhWeatherHandler.getPressure())
RhDisplayHandler.printOnDisplay("hello.world.")
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print ('Interrupted!')
try:
RhDisplayHandler.clear()
sys.exit(0)
except SystemExit:
os._exit(0)
| mit |
IntelLabs/numba | numba/tests/test_sort.py | 7 | 33874 | import copy
import itertools
import math
import random
import sys
import numpy as np
from numba.core.compiler import compile_isolated, Flags
from numba import jit, njit
from numba.core import types, utils, errors
import unittest
from numba import testing
from numba.tests.support import TestCase, MemoryLeakMixin, tag
from numba.misc.quicksort import make_py_quicksort, make_jit_quicksort
from numba.misc.mergesort import make_jit_mergesort
from numba.misc.timsort import make_py_timsort, make_jit_timsort, MergeRun
def make_temp_list(keys, n):
return [keys[0]] * n
def make_temp_array(keys, n):
return np.empty(n, keys.dtype)
py_list_timsort = make_py_timsort(make_temp_list)
py_array_timsort = make_py_timsort(make_temp_array)
jit_list_timsort = make_jit_timsort(make_temp_list)
jit_array_timsort = make_jit_timsort(make_temp_array)
py_quicksort = make_py_quicksort()
jit_quicksort = make_jit_quicksort()
def sort_usecase(val):
val.sort()
def argsort_usecase(val):
return val.argsort()
def argsort_kind_usecase(val, is_stable=False):
if is_stable:
return val.argsort(kind='mergesort')
else:
return val.argsort(kind='quicksort')
def sorted_usecase(val):
return sorted(val)
def sorted_reverse_usecase(val, b):
return sorted(val, reverse=b)
def np_sort_usecase(val):
return np.sort(val)
def np_argsort_usecase(val):
return np.argsort(val)
def np_argsort_kind_usecase(val, is_stable=False):
if is_stable:
return np.argsort(val, kind='mergesort')
else:
return np.argsort(val, kind='quicksort')
def list_sort_usecase(n):
np.random.seed(42)
l = []
for i in range(n):
l.append(np.random.random())
ll = l[:]
ll.sort()
return l, ll
def list_sort_reverse_usecase(n, b):
np.random.seed(42)
l = []
for i in range(n):
l.append(np.random.random())
ll = l[:]
ll.sort(reverse=b)
return l, ll
class BaseSortingTest(object):
def random_list(self, n, offset=10):
random.seed(42)
l = list(range(offset, offset + n))
random.shuffle(l)
return l
def sorted_list(self, n, offset=10):
return list(range(offset, offset + n))
def revsorted_list(self, n, offset=10):
return list(range(offset, offset + n))[::-1]
def initially_sorted_list(self, n, m=None, offset=10):
if m is None:
m = n // 2
l = self.sorted_list(m, offset)
l += self.random_list(n - m, offset=l[-1] + offset)
return l
def duprandom_list(self, n, factor=None, offset=10):
random.seed(42)
if factor is None:
factor = int(math.sqrt(n))
l = (list(range(offset, offset + (n // factor) + 1)) * (factor + 1))[:n]
assert len(l) == n
random.shuffle(l)
return l
def dupsorted_list(self, n, factor=None, offset=10):
if factor is None:
factor = int(math.sqrt(n))
l = (list(range(offset, offset + (n // factor) + 1)) * (factor + 1))[:n]
assert len(l) == n, (len(l), n)
l.sort()
return l
def assertSorted(self, orig, result):
self.assertEqual(len(result), len(orig))
# sorted() returns a list, so make sure we compare to another list
self.assertEqual(list(result), sorted(orig))
def assertSortedValues(self, orig, orig_values, result, result_values):
self.assertEqual(len(result), len(orig))
self.assertEqual(list(result), sorted(orig))
zip_sorted = sorted(zip(orig, orig_values), key=lambda x: x[0])
zip_result = list(zip(result, result_values))
self.assertEqual(zip_sorted, zip_result)
# Check stability
for i in range(len(zip_result) - 1):
(k1, v1), (k2, v2) = zip_result[i], zip_result[i + 1]
if k1 == k2:
# Assuming values are unique, which is enforced by the tests
self.assertLess(orig_values.index(v1), orig_values.index(v2))
def fibo(self):
a = 1
b = 1
while True:
yield a
a, b = b, a + b
def make_sample_sorted_lists(self, n):
lists = []
for offset in (20, 120):
lists.append(self.sorted_list(n, offset))
lists.append(self.dupsorted_list(n, offset))
return lists
def make_sample_lists(self, n):
lists = []
for offset in (20, 120):
lists.append(self.sorted_list(n, offset))
lists.append(self.dupsorted_list(n, offset))
lists.append(self.revsorted_list(n, offset))
lists.append(self.duprandom_list(n, offset))
return lists
class BaseTimsortTest(BaseSortingTest):
def merge_init(self, keys):
f = self.timsort.merge_init
return f(keys)
def test_binarysort(self):
n = 20
def check(l, n, start=0):
res = self.array_factory(l)
f(res, res, 0, n, start)
self.assertSorted(l, res)
f = self.timsort.binarysort
l = self.sorted_list(n)
check(l, n)
check(l, n, n//2)
l = self.revsorted_list(n)
check(l, n)
l = self.initially_sorted_list(n, n//2)
check(l, n)
check(l, n, n//2)
l = self.revsorted_list(n)
check(l, n)
l = self.random_list(n)
check(l, n)
l = self.duprandom_list(n)
check(l, n)
def test_binarysort_with_values(self):
n = 20
v = list(range(100, 100+n))
def check(l, n, start=0):
res = self.array_factory(l)
res_v = self.array_factory(v)
f(res, res_v, 0, n, start)
self.assertSortedValues(l, v, res, res_v)
f = self.timsort.binarysort
l = self.sorted_list(n)
check(l, n)
check(l, n, n//2)
l = self.revsorted_list(n)
check(l, n)
l = self.initially_sorted_list(n, n//2)
check(l, n)
check(l, n, n//2)
l = self.revsorted_list(n)
check(l, n)
l = self.random_list(n)
check(l, n)
l = self.duprandom_list(n)
check(l, n)
def test_count_run(self):
n = 16
f = self.timsort.count_run
def check(l, lo, hi):
n, desc = f(self.array_factory(l), lo, hi)
# Fully check invariants
if desc:
for k in range(lo, lo + n - 1):
a, b = l[k], l[k + 1]
self.assertGreater(a, b)
if lo + n < hi:
self.assertLessEqual(l[lo + n - 1], l[lo + n])
else:
for k in range(lo, lo + n - 1):
a, b = l[k], l[k + 1]
self.assertLessEqual(a, b)
if lo + n < hi:
self.assertGreater(l[lo + n - 1], l[lo + n], l)
l = self.sorted_list(n, offset=100)
check(l, 0, n)
check(l, 1, n - 1)
check(l, 1, 2)
l = self.revsorted_list(n, offset=100)
check(l, 0, n)
check(l, 1, n - 1)
check(l, 1, 2)
l = self.random_list(n, offset=100)
for i in range(len(l) - 1):
check(l, i, n)
l = self.duprandom_list(n, offset=100)
for i in range(len(l) - 1):
check(l, i, n)
def test_gallop_left(self):
n = 20
f = self.timsort.gallop_left
def check(l, key, start, stop, hint):
k = f(key, l, start, stop, hint)
# Fully check invariants
self.assertGreaterEqual(k, start)
self.assertLessEqual(k, stop)
if k > start:
self.assertLess(l[k - 1], key)
if k < stop:
self.assertGreaterEqual(l[k], key)
def check_all_hints(l, key, start, stop):
for hint in range(start, stop):
check(l, key, start, stop, hint)
def check_sorted_list(l):
l = self.array_factory(l)
for key in (l[5], l[15], l[0], -1000, l[-1], 1000):
check_all_hints(l, key, 0, n)
check_all_hints(l, key, 1, n - 1)
check_all_hints(l, key, 8, n - 8)
l = self.sorted_list(n, offset=100)
check_sorted_list(l)
l = self.dupsorted_list(n, offset=100)
check_sorted_list(l)
def test_gallop_right(self):
n = 20
f = self.timsort.gallop_right
def check(l, key, start, stop, hint):
k = f(key, l, start, stop, hint)
# Fully check invariants
self.assertGreaterEqual(k, start)
self.assertLessEqual(k, stop)
if k > start:
self.assertLessEqual(l[k - 1], key)
if k < stop:
self.assertGreater(l[k], key)
def check_all_hints(l, key, start, stop):
for hint in range(start, stop):
check(l, key, start, stop, hint)
def check_sorted_list(l):
l = self.array_factory(l)
for key in (l[5], l[15], l[0], -1000, l[-1], 1000):
check_all_hints(l, key, 0, n)
check_all_hints(l, key, 1, n - 1)
check_all_hints(l, key, 8, n - 8)
l = self.sorted_list(n, offset=100)
check_sorted_list(l)
l = self.dupsorted_list(n, offset=100)
check_sorted_list(l)
def test_merge_compute_minrun(self):
f = self.timsort.merge_compute_minrun
for i in range(0, 64):
self.assertEqual(f(i), i)
for i in range(6, 63):
if 2**i > sys.maxsize:
break
self.assertEqual(f(2**i), 32)
for i in self.fibo():
if i < 64:
continue
if i >= sys.maxsize:
break
k = f(i)
self.assertGreaterEqual(k, 32)
self.assertLessEqual(k, 64)
if i > 500:
# i/k is close to, but strictly less than, an exact power of 2
quot = i // k
p = 2 ** utils.bit_length(quot)
self.assertLess(quot, p)
self.assertGreaterEqual(quot, 0.9 * p)
def check_merge_lo_hi(self, func, a, b):
na = len(a)
nb = len(b)
# Add sentinels at start and end, to check they weren't moved
orig_keys = [42] + a + b + [-42]
keys = self.array_factory(orig_keys)
ms = self.merge_init(keys)
ssa = 1
ssb = ssa + na
#new_ms = func(ms, keys, [], ssa, na, ssb, nb)
new_ms = func(ms, keys, keys, ssa, na, ssb, nb)
self.assertEqual(keys[0], orig_keys[0])
self.assertEqual(keys[-1], orig_keys[-1])
self.assertSorted(orig_keys[1:-1], keys[1:-1])
# Check the MergeState result
self.assertGreaterEqual(len(new_ms.keys), len(ms.keys))
self.assertGreaterEqual(len(new_ms.values), len(ms.values))
self.assertIs(new_ms.pending, ms.pending)
self.assertGreaterEqual(new_ms.min_gallop, 1)
def test_merge_lo_hi(self):
f_lo = self.timsort.merge_lo
f_hi = self.timsort.merge_hi
# The larger sizes exercise galloping
for (na, nb) in [(12, 16), (40, 40), (100, 110), (1000, 1100)]:
for a, b in itertools.product(self.make_sample_sorted_lists(na),
self.make_sample_sorted_lists(nb)):
self.check_merge_lo_hi(f_lo, a, b)
self.check_merge_lo_hi(f_hi, b, a)
def check_merge_at(self, a, b):
f = self.timsort.merge_at
# Prepare the array to be sorted
na = len(a)
nb = len(b)
# Add sentinels at start and end, to check they weren't moved
orig_keys = [42] + a + b + [-42]
ssa = 1
ssb = ssa + na
stack_sentinel = MergeRun(-42, -42)
def run_merge_at(ms, keys, i):
new_ms = f(ms, keys, keys, i)
self.assertEqual(keys[0], orig_keys[0])
self.assertEqual(keys[-1], orig_keys[-1])
self.assertSorted(orig_keys[1:-1], keys[1:-1])
# Check stack state
self.assertIs(new_ms.pending, ms.pending)
self.assertEqual(ms.pending[i], (ssa, na + nb))
self.assertEqual(ms.pending[0], stack_sentinel)
return new_ms
# First check with i == len(stack) - 2
keys = self.array_factory(orig_keys)
ms = self.merge_init(keys)
# Push sentinel on stack, to check it was't touched
ms = self.timsort.merge_append(ms, stack_sentinel)
i = ms.n
ms = self.timsort.merge_append(ms, MergeRun(ssa, na))
ms = self.timsort.merge_append(ms, MergeRun(ssb, nb))
ms = run_merge_at(ms, keys, i)
self.assertEqual(ms.n, i + 1)
# Now check with i == len(stack) - 3
keys = self.array_factory(orig_keys)
ms = self.merge_init(keys)
# Push sentinel on stack, to check it was't touched
ms = self.timsort.merge_append(ms, stack_sentinel)
i = ms.n
ms = self.timsort.merge_append(ms, MergeRun(ssa, na))
ms = self.timsort.merge_append(ms, MergeRun(ssb, nb))
# A last run (trivial here)
last_run = MergeRun(ssb + nb, 1)
ms = self.timsort.merge_append(ms, last_run)
ms = run_merge_at(ms, keys, i)
self.assertEqual(ms.n, i + 2)
self.assertEqual(ms.pending[ms.n - 1], last_run)
def test_merge_at(self):
# The larger sizes exercise galloping
for (na, nb) in [(12, 16), (40, 40), (100, 110), (500, 510)]:
for a, b in itertools.product(self.make_sample_sorted_lists(na),
self.make_sample_sorted_lists(nb)):
self.check_merge_at(a, b)
self.check_merge_at(b, a)
def test_merge_force_collapse(self):
f = self.timsort.merge_force_collapse
# Test with runs of ascending sizes, then descending sizes
sizes_list = [(8, 10, 15, 20)]
sizes_list.append(sizes_list[0][::-1])
for sizes in sizes_list:
for chunks in itertools.product(*(self.make_sample_sorted_lists(n)
for n in sizes)):
# Create runs of the given sizes
orig_keys = sum(chunks, [])
keys = self.array_factory(orig_keys)
ms = self.merge_init(keys)
pos = 0
for c in chunks:
ms = self.timsort.merge_append(ms, MergeRun(pos, len(c)))
pos += len(c)
# Sanity check
self.assertEqual(sum(ms.pending[ms.n - 1]), len(keys))
# Now merge the runs
ms = f(ms, keys, keys)
# Remaining run is the whole list
self.assertEqual(ms.n, 1)
self.assertEqual(ms.pending[0], MergeRun(0, len(keys)))
# The list is now sorted
self.assertSorted(orig_keys, keys)
def test_run_timsort(self):
f = self.timsort.run_timsort
for size_factor in (1, 10):
# Make lists to be sorted from three chunks of different kinds.
sizes = (15, 30, 20)
all_lists = [self.make_sample_lists(n * size_factor) for n in sizes]
for chunks in itertools.product(*all_lists):
orig_keys = sum(chunks, [])
keys = self.array_factory(orig_keys)
f(keys)
# The list is now sorted
self.assertSorted(orig_keys, keys)
def test_run_timsort_with_values(self):
# Run timsort, but also with a values array
f = self.timsort.run_timsort_with_values
for size_factor in (1, 5):
chunk_size = 80 * size_factor
a = self.dupsorted_list(chunk_size)
b = self.duprandom_list(chunk_size)
c = self.revsorted_list(chunk_size)
orig_keys = a + b + c
orig_values = list(range(1000, 1000 + len(orig_keys)))
keys = self.array_factory(orig_keys)
values = self.array_factory(orig_values)
f(keys, values)
# This checks sort stability
self.assertSortedValues(orig_keys, orig_values, keys, values)
class TestTimsortPurePython(BaseTimsortTest, TestCase):
timsort = py_list_timsort
# Much faster than a Numpy array in pure Python
array_factory = list
class TestTimsortArraysPurePython(BaseTimsortTest, TestCase):
timsort = py_array_timsort
def array_factory(self, lst):
return np.array(lst, dtype=np.int32)
class JITTimsortMixin(object):
timsort = jit_array_timsort
test_merge_at = None
test_merge_force_collapse = None
def wrap_with_mergestate(self, timsort, func, _cache={}):
"""
Wrap *func* into another compiled function inserting a runtime-created
mergestate as the first function argument.
"""
key = timsort, func
if key in _cache:
return _cache[key]
merge_init = timsort.merge_init
@timsort.compile
def wrapper(keys, values, *args):
ms = merge_init(keys)
res = func(ms, keys, values, *args)
return res
_cache[key] = wrapper
return wrapper
class TestTimsortArrays(JITTimsortMixin, BaseTimsortTest, TestCase):
def array_factory(self, lst):
return np.array(lst, dtype=np.int32)
def check_merge_lo_hi(self, func, a, b):
na = len(a)
nb = len(b)
func = self.wrap_with_mergestate(self.timsort, func)
# Add sentinels at start and end, to check they weren't moved
orig_keys = [42] + a + b + [-42]
keys = self.array_factory(orig_keys)
ssa = 1
ssb = ssa + na
new_ms = func(keys, keys, ssa, na, ssb, nb)
self.assertEqual(keys[0], orig_keys[0])
self.assertEqual(keys[-1], orig_keys[-1])
self.assertSorted(orig_keys[1:-1], keys[1:-1])
class BaseQuicksortTest(BaseSortingTest):
# NOTE these tests assume a non-argsort quicksort.
def test_insertion_sort(self):
n = 20
def check(l, n):
res = self.array_factory([9999] + l + [-9999])
f(res, res, 1, n)
self.assertEqual(res[0], 9999)
self.assertEqual(res[-1], -9999)
self.assertSorted(l, res[1:-1])
f = self.quicksort.insertion_sort
l = self.sorted_list(n)
check(l, n)
l = self.revsorted_list(n)
check(l, n)
l = self.initially_sorted_list(n, n//2)
check(l, n)
l = self.revsorted_list(n)
check(l, n)
l = self.random_list(n)
check(l, n)
l = self.duprandom_list(n)
check(l, n)
def test_partition(self):
n = 20
def check(l, n):
res = self.array_factory([9999] + l + [-9999])
index = f(res, res, 1, n)
self.assertEqual(res[0], 9999)
self.assertEqual(res[-1], -9999)
pivot = res[index]
for i in range(1, index):
self.assertLessEqual(res[i], pivot)
for i in range(index + 1, n):
self.assertGreaterEqual(res[i], pivot)
f = self.quicksort.partition
l = self.sorted_list(n)
check(l, n)
l = self.revsorted_list(n)
check(l, n)
l = self.initially_sorted_list(n, n//2)
check(l, n)
l = self.revsorted_list(n)
check(l, n)
l = self.random_list(n)
check(l, n)
l = self.duprandom_list(n)
check(l, n)
def test_partition3(self):
# Test the unused partition3() function
n = 20
def check(l, n):
res = self.array_factory([9999] + l + [-9999])
lt, gt = f(res, 1, n)
self.assertEqual(res[0], 9999)
self.assertEqual(res[-1], -9999)
pivot = res[lt]
for i in range(1, lt):
self.assertLessEqual(res[i], pivot)
for i in range(lt, gt + 1):
self.assertEqual(res[i], pivot)
for i in range(gt + 1, n):
self.assertGreater(res[i], pivot)
f = self.quicksort.partition3
l = self.sorted_list(n)
check(l, n)
l = self.revsorted_list(n)
check(l, n)
l = self.initially_sorted_list(n, n//2)
check(l, n)
l = self.revsorted_list(n)
check(l, n)
l = self.random_list(n)
check(l, n)
l = self.duprandom_list(n)
check(l, n)
def test_run_quicksort(self):
f = self.quicksort.run_quicksort
for size_factor in (1, 5):
# Make lists to be sorted from two chunks of different kinds.
sizes = (15, 20)
all_lists = [self.make_sample_lists(n * size_factor) for n in sizes]
for chunks in itertools.product(*all_lists):
orig_keys = sum(chunks, [])
keys = self.array_factory(orig_keys)
f(keys)
# The list is now sorted
self.assertSorted(orig_keys, keys)
def test_run_quicksort_lt(self):
def lt(a, b):
return a > b
f = self.make_quicksort(lt=lt).run_quicksort
for size_factor in (1, 5):
# Make lists to be sorted from two chunks of different kinds.
sizes = (15, 20)
all_lists = [self.make_sample_lists(n * size_factor) for n in sizes]
for chunks in itertools.product(*all_lists):
orig_keys = sum(chunks, [])
keys = self.array_factory(orig_keys)
f(keys)
# The list is now rev-sorted
self.assertSorted(orig_keys, keys[::-1])
# An imperfect comparison function, as LT(a, b) does not imply not LT(b, a).
# The sort should handle it gracefully.
def lt_floats(a, b):
return math.isnan(b) or a < b
f = self.make_quicksort(lt=lt_floats).run_quicksort
np.random.seed(42)
for size in (5, 20, 50, 500):
orig = np.random.random(size=size) * 100
orig[np.random.random(size=size) < 0.1] = float('nan')
orig_keys = list(orig)
keys = self.array_factory(orig_keys)
f(keys)
non_nans = orig[~np.isnan(orig)]
# Non-NaNs are sorted at the front
self.assertSorted(non_nans, keys[:len(non_nans)])
class TestQuicksortPurePython(BaseQuicksortTest, TestCase):
quicksort = py_quicksort
make_quicksort = staticmethod(make_py_quicksort)
# Much faster than a Numpy array in pure Python
array_factory = list
class TestQuicksortArrays(BaseQuicksortTest, TestCase):
quicksort = jit_quicksort
make_quicksort = staticmethod(make_jit_quicksort)
def array_factory(self, lst):
return np.array(lst, dtype=np.float64)
class TestNumpySort(TestCase):
def setUp(self):
np.random.seed(42)
def int_arrays(self):
for size in (5, 20, 50, 500):
yield np.random.randint(99, size=size)
def float_arrays(self):
for size in (5, 20, 50, 500):
yield np.random.random(size=size) * 100
# Now with NaNs. Numpy sorts them at the end.
for size in (5, 20, 50, 500):
orig = np.random.random(size=size) * 100
orig[np.random.random(size=size) < 0.1] = float('nan')
yield orig
def has_duplicates(self, arr):
"""
Whether the array has duplicates. Takes NaNs into account.
"""
if np.count_nonzero(np.isnan(arr)) > 1:
return True
if np.unique(arr).size < arr.size:
return True
return False
def check_sort_inplace(self, pyfunc, cfunc, val):
expected = copy.copy(val)
got = copy.copy(val)
pyfunc(expected)
cfunc(got)
self.assertPreciseEqual(got, expected)
def check_sort_copy(self, pyfunc, cfunc, val):
orig = copy.copy(val)
expected = pyfunc(val)
got = cfunc(val)
self.assertPreciseEqual(got, expected)
# The original wasn't mutated
self.assertPreciseEqual(val, orig)
def check_argsort(self, pyfunc, cfunc, val, kwargs={}):
orig = copy.copy(val)
expected = pyfunc(val, **kwargs)
got = cfunc(val, **kwargs)
self.assertPreciseEqual(orig[got], np.sort(orig),
msg="the array wasn't argsorted")
# Numba and Numpy results may differ if there are duplicates
# in the array
if not self.has_duplicates(orig):
self.assertPreciseEqual(got, expected)
# The original wasn't mutated
self.assertPreciseEqual(val, orig)
def test_array_sort_int(self):
pyfunc = sort_usecase
cfunc = jit(nopython=True)(pyfunc)
for orig in self.int_arrays():
self.check_sort_inplace(pyfunc, cfunc, orig)
def test_array_sort_float(self):
pyfunc = sort_usecase
cfunc = jit(nopython=True)(pyfunc)
for orig in self.float_arrays():
self.check_sort_inplace(pyfunc, cfunc, orig)
def test_np_sort_int(self):
pyfunc = np_sort_usecase
cfunc = jit(nopython=True)(pyfunc)
for orig in self.int_arrays():
self.check_sort_copy(pyfunc, cfunc, orig)
def test_np_sort_float(self):
pyfunc = np_sort_usecase
cfunc = jit(nopython=True)(pyfunc)
for size in (5, 20, 50, 500):
orig = np.random.random(size=size) * 100
orig[np.random.random(size=size) < 0.1] = float('nan')
self.check_sort_copy(pyfunc, cfunc, orig)
def test_argsort_int(self):
def check(pyfunc):
cfunc = jit(nopython=True)(pyfunc)
for orig in self.int_arrays():
self.check_argsort(pyfunc, cfunc, orig)
check(argsort_usecase)
check(np_argsort_usecase)
def test_argsort_kind_int(self):
def check(pyfunc, is_stable):
cfunc = jit(nopython=True)(pyfunc)
for orig in self.int_arrays():
self.check_argsort(pyfunc, cfunc, orig,
dict(is_stable=is_stable))
check(argsort_kind_usecase, is_stable=True)
check(np_argsort_kind_usecase, is_stable=True)
check(argsort_kind_usecase, is_stable=False)
check(np_argsort_kind_usecase, is_stable=False)
def test_argsort_float(self):
def check(pyfunc):
cfunc = jit(nopython=True)(pyfunc)
for orig in self.float_arrays():
self.check_argsort(pyfunc, cfunc, orig)
check(argsort_usecase)
check(np_argsort_usecase)
def test_argsort_float(self):
def check(pyfunc, is_stable):
cfunc = jit(nopython=True)(pyfunc)
for orig in self.float_arrays():
self.check_argsort(pyfunc, cfunc, orig,
dict(is_stable=is_stable))
check(argsort_kind_usecase, is_stable=True)
check(np_argsort_kind_usecase, is_stable=True)
check(argsort_kind_usecase, is_stable=False)
check(np_argsort_kind_usecase, is_stable=False)
class TestPythonSort(TestCase):
def test_list_sort(self):
pyfunc = list_sort_usecase
cfunc = jit(nopython=True)(pyfunc)
for size in (20, 50, 500):
orig, ret = cfunc(size)
self.assertEqual(sorted(orig), ret)
self.assertNotEqual(orig, ret) # sanity check
def test_list_sort_reverse(self):
pyfunc = list_sort_reverse_usecase
cfunc = jit(nopython=True)(pyfunc)
for size in (20, 50, 500):
for b in (False, True):
orig, ret = cfunc(size, b)
self.assertEqual(sorted(orig, reverse=b), ret)
self.assertNotEqual(orig, ret) # sanity check
def test_sorted(self):
pyfunc = sorted_usecase
cfunc = jit(nopython=True)(pyfunc)
for size in (20, 50, 500):
orig = np.random.random(size=size) * 100
expected = sorted(orig)
got = cfunc(orig)
self.assertPreciseEqual(got, expected)
self.assertNotEqual(list(orig), got) # sanity check
def test_sorted_reverse(self):
pyfunc = sorted_reverse_usecase
cfunc = jit(nopython=True)(pyfunc)
size = 20
orig = np.random.random(size=size) * 100
for b in (False, True):
expected = sorted(orig, reverse=b)
got = cfunc(orig, b)
self.assertPreciseEqual(got, expected)
self.assertNotEqual(list(orig), got) # sanity check
class TestMergeSort(TestCase):
def setUp(self):
np.random.seed(321)
def check_argsort_stable(self, sorter, low, high, count):
# make data with high possibility of duplicated key
data = np.random.randint(low, high, count)
expect = np.argsort(data, kind='mergesort')
got = sorter(data)
np.testing.assert_equal(expect, got)
def test_argsort_stable(self):
arglist = [
(-2, 2, 5),
(-5, 5, 10),
(0, 10, 101),
(0, 100, 1003),
]
imp = make_jit_mergesort(is_argsort=True)
toplevel = imp.run_mergesort
sorter = njit(lambda arr: toplevel(arr))
for args in arglist:
self.check_argsort_stable(sorter, *args)
nop_compiler = lambda x:x
class TestSortSlashSortedWithKey(MemoryLeakMixin, TestCase):
def test_01(self):
a = [3, 1, 4, 1, 5, 9]
@njit
def external_key(z):
return 1. / z
@njit
def foo(x, key=None):
new_x = x[:]
new_x.sort(key=key)
return sorted(x[:], key=key), new_x
self.assertPreciseEqual(foo(a[:]), foo.py_func(a[:]))
self.assertPreciseEqual(foo(a[:], external_key),
foo.py_func(a[:], external_key))
def test_02(self):
a = [3, 1, 4, 1, 5, 9]
@njit
def foo(x):
def closure_key(z):
return 1. / z
new_x = x[:]
new_x.sort(key=closure_key)
return sorted(x[:], key=closure_key), new_x
self.assertPreciseEqual(foo(a[:]), foo.py_func(a[:]))
def test_03(self):
a = [3, 1, 4, 1, 5, 9]
def gen(compiler):
@compiler
def bar(x, func):
new_x = x[:]
new_x.sort(key=func)
return sorted(x[:], key=func), new_x
@compiler
def foo(x):
def closure_escapee_key(z):
return 1. / z
return bar(x, closure_escapee_key)
return foo
self.assertPreciseEqual(gen(njit)(a[:]), gen(nop_compiler)(a[:]))
def test_04(self):
a = ['a','b','B','b','C','A']
@njit
def external_key(z):
return z.upper()
@njit
def foo(x, key=None):
new_x = x[:]
new_x.sort(key=key)
return sorted(x[:], key=key), new_x
self.assertPreciseEqual(foo(a[:]), foo.py_func(a[:]))
self.assertPreciseEqual(foo(a[:], external_key),
foo.py_func(a[:], external_key))
def test_05(self):
a = ['a','b','B','b','C','A']
@njit
def external_key(z):
return z.upper()
@njit
def foo(x, key=None, reverse=False):
new_x = x[:]
new_x.sort(key=key, reverse=reverse)
return (sorted(x[:], key=key, reverse=reverse), new_x)
for key, rev in itertools.product((None, external_key),
(True, False, 1, -12, 0)):
self.assertPreciseEqual(foo(a[:], key, rev),
foo.py_func(a[:], key, rev))
def test_optional_on_key(self):
a = [3, 1, 4, 1, 5, 9]
@njit
def foo(x, predicate):
if predicate:
def closure_key(z):
return 1. / z
else:
closure_key = None
new_x = x[:]
new_x.sort(key=closure_key)
return (sorted(x[:], key=closure_key), new_x)
with self.assertRaises(errors.TypingError) as raises:
TF = True
foo(a[:], TF)
msg = "Key must concretely be None or a Numba JIT compiled function"
self.assertIn(msg, str(raises.exception))
def test_exceptions_sorted(self):
@njit
def foo_sorted(x, key=None, reverse=False):
return sorted(x[:], key=key, reverse=reverse)
@njit
def foo_sort(x, key=None, reverse=False):
new_x = x[:]
new_x.sort(key=key, reverse=reverse)
return new_x
@njit
def external_key(z):
return 1. / z
a = [3, 1, 4, 1, 5, 9]
for impl in (foo_sort, foo_sorted):
# check illegal key
with self.assertRaises(errors.TypingError) as raises:
impl(a, key="illegal")
expect = "Key must be None or a Numba JIT compiled function"
self.assertIn(expect, str(raises.exception))
# check illegal reverse
with self.assertRaises(errors.TypingError) as raises:
impl(a, key=external_key, reverse="go backwards")
expect = "an integer is required for 'reverse'"
self.assertIn(expect, str(raises.exception))
if __name__ == '__main__':
unittest.main()
| bsd-2-clause |
FireballDWF/cloud-custodian | tools/c7n_salactus/setup.py | 5 | 1310 | # Copyright 2015-2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
import os
description = ""
if os.path.exists('README.md'):
description = open('README.md').read()
setup(
name="c7n_salactus",
version='0.3.0',
description="Cloud Custodian - Salactus S3",
long_description=description,
classifiers=[
"Topic :: System :: Systems Administration",
"Topic :: System :: Distributed Computing"
],
url="https://github.com/cloud-custodian/cloud-custodian",
author="Kapil Thangavelu",
license="Apache-2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'c7n-salactus = c7n_salactus.cli:cli']},
install_requires=["c7n", "click", "rq", "redis"],
)
| apache-2.0 |
trungnt13/scikit-learn | sklearn/decomposition/tests/test_dict_learning.py | 47 | 8095 | import numpy as np
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import ignore_warnings
from sklearn.decomposition import DictionaryLearning
from sklearn.decomposition import MiniBatchDictionaryLearning
from sklearn.decomposition import SparseCoder
from sklearn.decomposition import dict_learning_online
from sklearn.decomposition import sparse_encode
rng_global = np.random.RandomState(0)
n_samples, n_features = 10, 8
X = rng_global.randn(n_samples, n_features)
def test_dict_learning_shapes():
n_components = 5
dico = DictionaryLearning(n_components, random_state=0).fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_overcomplete():
n_components = 12
dico = DictionaryLearning(n_components, random_state=0).fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_reconstruction():
n_components = 12
dico = DictionaryLearning(n_components, transform_algorithm='omp',
transform_alpha=0.001, random_state=0)
code = dico.fit(X).transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X)
dico.set_params(transform_algorithm='lasso_lars')
code = dico.transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2)
# used to test lars here too, but there's no guarantee the number of
# nonzero atoms is right.
def test_dict_learning_reconstruction_parallel():
# regression test that parallel reconstruction works with n_jobs=-1
n_components = 12
dico = DictionaryLearning(n_components, transform_algorithm='omp',
transform_alpha=0.001, random_state=0, n_jobs=-1)
code = dico.fit(X).transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X)
dico.set_params(transform_algorithm='lasso_lars')
code = dico.transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2)
def test_dict_learning_nonzero_coefs():
n_components = 4
dico = DictionaryLearning(n_components, transform_algorithm='lars',
transform_n_nonzero_coefs=3, random_state=0)
code = dico.fit(X).transform(X[1])
assert_true(len(np.flatnonzero(code)) == 3)
dico.set_params(transform_algorithm='omp')
code = dico.transform(X[1])
assert_equal(len(np.flatnonzero(code)), 3)
def test_dict_learning_unknown_fit_algorithm():
n_components = 5
dico = DictionaryLearning(n_components, fit_algorithm='<unknown>')
assert_raises(ValueError, dico.fit, X)
def test_dict_learning_split():
n_components = 5
dico = DictionaryLearning(n_components, transform_algorithm='threshold',
random_state=0)
code = dico.fit(X).transform(X)
dico.split_sign = True
split_code = dico.transform(X)
assert_array_equal(split_code[:, :n_components] -
split_code[:, n_components:], code)
def test_dict_learning_online_shapes():
rng = np.random.RandomState(0)
n_components = 8
code, dictionary = dict_learning_online(X, n_components=n_components,
alpha=1, random_state=rng)
assert_equal(code.shape, (n_samples, n_components))
assert_equal(dictionary.shape, (n_components, n_features))
assert_equal(np.dot(code, dictionary).shape, X.shape)
def test_dict_learning_online_verbosity():
n_components = 5
# test verbosity
from sklearn.externals.six.moves import cStringIO as StringIO
import sys
old_stdout = sys.stdout
try:
sys.stdout = StringIO()
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=1,
random_state=0)
dico.fit(X)
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=2,
random_state=0)
dico.fit(X)
dict_learning_online(X, n_components=n_components, alpha=1, verbose=1,
random_state=0)
dict_learning_online(X, n_components=n_components, alpha=1, verbose=2,
random_state=0)
finally:
sys.stdout = old_stdout
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_estimator_shapes():
n_components = 5
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, random_state=0)
dico.fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_overcomplete():
n_components = 12
dico = MiniBatchDictionaryLearning(n_components, n_iter=20,
random_state=0).fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_initialization():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features)
dico = MiniBatchDictionaryLearning(n_components, n_iter=0,
dict_init=V, random_state=0).fit(X)
assert_array_equal(dico.components_, V)
def test_dict_learning_online_partial_fit():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
dict1 = MiniBatchDictionaryLearning(n_components, n_iter=10 * len(X),
batch_size=1,
alpha=1, shuffle=False, dict_init=V,
random_state=0).fit(X)
dict2 = MiniBatchDictionaryLearning(n_components, alpha=1,
n_iter=1, dict_init=V,
random_state=0)
for i in range(10):
for sample in X:
dict2.partial_fit(sample)
assert_true(not np.all(sparse_encode(X, dict1.components_, alpha=1) ==
0))
assert_array_almost_equal(dict1.components_, dict2.components_,
decimal=2)
def test_sparse_encode_shapes():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
for algo in ('lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'):
code = sparse_encode(X, V, algorithm=algo)
assert_equal(code.shape, (n_samples, n_components))
def test_sparse_encode_error():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
code = sparse_encode(X, V, alpha=0.001)
assert_true(not np.all(code == 0))
assert_less(np.sqrt(np.sum((np.dot(code, V) - X) ** 2)), 0.1)
def test_sparse_encode_error_default_sparsity():
rng = np.random.RandomState(0)
X = rng.randn(100, 64)
D = rng.randn(2, 64)
code = ignore_warnings(sparse_encode)(X, D, algorithm='omp',
n_nonzero_coefs=None)
assert_equal(code.shape, (100, 2))
def test_unknown_method():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
assert_raises(ValueError, sparse_encode, X, V, algorithm="<unknown>")
def test_sparse_coder_estimator():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
code = SparseCoder(dictionary=V, transform_algorithm='lasso_lars',
transform_alpha=0.001).transform(X)
assert_true(not np.all(code == 0))
assert_less(np.sqrt(np.sum((np.dot(code, V) - X) ** 2)), 0.1)
| bsd-3-clause |
SaschaMester/delicium | third_party/mojo/src/mojo/public/tools/bindings/generators/mojom_dart_generator.py | 1 | 17908 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates dart source files from a mojom.Module."""
import os
import re
import shutil
import sys
import mojom.generate.constant_resolver as resolver
import mojom.generate.generator as generator
import mojom.generate.module as mojom
import mojom.generate.pack as pack
from mojom.generate.template_expander import UseJinja
GENERATOR_PREFIX = 'dart'
_kind_to_dart_default_value = {
mojom.BOOL: "false",
mojom.INT8: "0",
mojom.UINT8: "0",
mojom.INT16: "0",
mojom.UINT16: "0",
mojom.INT32: "0",
mojom.UINT32: "0",
mojom.FLOAT: "0.0",
mojom.HANDLE: "null",
mojom.DCPIPE: "null",
mojom.DPPIPE: "null",
mojom.MSGPIPE: "null",
mojom.SHAREDBUFFER: "null",
mojom.NULLABLE_HANDLE: "null",
mojom.NULLABLE_DCPIPE: "null",
mojom.NULLABLE_DPPIPE: "null",
mojom.NULLABLE_MSGPIPE: "null",
mojom.NULLABLE_SHAREDBUFFER: "null",
mojom.INT64: "0",
mojom.UINT64: "0",
mojom.DOUBLE: "0.0",
mojom.STRING: "null",
mojom.NULLABLE_STRING: "null"
}
_kind_to_dart_decl_type = {
mojom.BOOL: "bool",
mojom.INT8: "int",
mojom.UINT8: "int",
mojom.INT16: "int",
mojom.UINT16: "int",
mojom.INT32: "int",
mojom.UINT32: "int",
mojom.FLOAT: "double",
mojom.HANDLE: "core.MojoHandle",
mojom.DCPIPE: "core.MojoDataPipeConsumer",
mojom.DPPIPE: "core.MojoDataPipeProducer",
mojom.MSGPIPE: "core.MojoMessagePipeEndpoint",
mojom.SHAREDBUFFER: "core.MojoSharedBuffer",
mojom.NULLABLE_HANDLE: "core.MojoHandle",
mojom.NULLABLE_DCPIPE: "core.MojoDataPipeConsumer",
mojom.NULLABLE_DPPIPE: "core.MojoDataPipeProducer",
mojom.NULLABLE_MSGPIPE: "core.MojoMessagePipeEndpoint",
mojom.NULLABLE_SHAREDBUFFER: "core.MojoSharedBuffer",
mojom.INT64: "int",
mojom.UINT64: "int",
mojom.DOUBLE: "double",
mojom.STRING: "String",
mojom.NULLABLE_STRING: "String"
}
_spec_to_decode_method = {
mojom.BOOL.spec: 'decodeBool',
mojom.DCPIPE.spec: 'decodeConsumerHandle',
mojom.DOUBLE.spec: 'decodeDouble',
mojom.DPPIPE.spec: 'decodeProducerHandle',
mojom.FLOAT.spec: 'decodeFloat',
mojom.HANDLE.spec: 'decodeHandle',
mojom.INT16.spec: 'decodeInt16',
mojom.INT32.spec: 'decodeInt32',
mojom.INT64.spec: 'decodeInt64',
mojom.INT8.spec: 'decodeInt8',
mojom.MSGPIPE.spec: 'decodeMessagePipeHandle',
mojom.NULLABLE_DCPIPE.spec: 'decodeConsumerHandle',
mojom.NULLABLE_DPPIPE.spec: 'decodeProducerHandle',
mojom.NULLABLE_HANDLE.spec: 'decodeHandle',
mojom.NULLABLE_MSGPIPE.spec: 'decodeMessagePipeHandle',
mojom.NULLABLE_SHAREDBUFFER.spec: 'decodeSharedBufferHandle',
mojom.NULLABLE_STRING.spec: 'decodeString',
mojom.SHAREDBUFFER.spec: 'decodeSharedBufferHandle',
mojom.STRING.spec: 'decodeString',
mojom.UINT16.spec: 'decodeUint16',
mojom.UINT32.spec: 'decodeUint32',
mojom.UINT64.spec: 'decodeUint64',
mojom.UINT8.spec: 'decodeUint8',
}
_spec_to_encode_method = {
mojom.BOOL.spec: 'encodeBool',
mojom.DCPIPE.spec: 'encodeConsumerHandle',
mojom.DOUBLE.spec: 'encodeDouble',
mojom.DPPIPE.spec: 'encodeProducerHandle',
mojom.FLOAT.spec: 'encodeFloat',
mojom.HANDLE.spec: 'encodeHandle',
mojom.INT16.spec: 'encodeInt16',
mojom.INT32.spec: 'encodeInt32',
mojom.INT64.spec: 'encodeInt64',
mojom.INT8.spec: 'encodeInt8',
mojom.MSGPIPE.spec: 'encodeMessagePipeHandle',
mojom.NULLABLE_DCPIPE.spec: 'encodeConsumerHandle',
mojom.NULLABLE_DPPIPE.spec: 'encodeProducerHandle',
mojom.NULLABLE_HANDLE.spec: 'encodeHandle',
mojom.NULLABLE_MSGPIPE.spec: 'encodeMessagePipeHandle',
mojom.NULLABLE_SHAREDBUFFER.spec: 'encodeSharedBufferHandle',
mojom.NULLABLE_STRING.spec: 'encodeString',
mojom.SHAREDBUFFER.spec: 'encodeSharedBufferHandle',
mojom.STRING.spec: 'encodeString',
mojom.UINT16.spec: 'encodeUint16',
mojom.UINT32.spec: 'encodeUint32',
mojom.UINT64.spec: 'encodeUint64',
mojom.UINT8.spec: 'encodeUint8',
}
def GetDartType(kind):
if kind.imported_from:
return kind.imported_from["unique_name"] + "." + GetNameForElement(kind)
return GetNameForElement(kind)
def DartDefaultValue(field):
if field.default:
if mojom.IsStructKind(field.kind):
assert field.default == "default"
return "new %s()" % GetDartType(field.kind)
return ExpressionToText(field.default)
if field.kind in mojom.PRIMITIVES:
return _kind_to_dart_default_value[field.kind]
if mojom.IsStructKind(field.kind):
return "null"
if mojom.IsUnionKind(field.kind):
return "null"
if mojom.IsArrayKind(field.kind):
return "null"
if mojom.IsMapKind(field.kind):
return "null"
if mojom.IsInterfaceKind(field.kind) or \
mojom.IsInterfaceRequestKind(field.kind):
return "null"
if mojom.IsEnumKind(field.kind):
return "0"
def DartDeclType(kind):
if kind in mojom.PRIMITIVES:
return _kind_to_dart_decl_type[kind]
if mojom.IsStructKind(kind):
return GetDartType(kind)
if mojom.IsUnionKind(kind):
return "%sWrapper" % GetDartType(kind)
if mojom.IsArrayKind(kind):
array_type = DartDeclType(kind.kind)
return "List<" + array_type + ">"
if mojom.IsMapKind(kind):
key_type = DartDeclType(kind.key_kind)
value_type = DartDeclType(kind.value_kind)
return "Map<"+ key_type + ", " + value_type + ">"
if mojom.IsInterfaceKind(kind) or \
mojom.IsInterfaceRequestKind(kind):
return "Object"
if mojom.IsEnumKind(kind):
return "int"
def NameToComponent(name):
# insert '_' between anything and a Title name (e.g, HTTPEntry2FooBar ->
# HTTP_Entry2_FooBar)
name = re.sub('([^_])([A-Z][^A-Z_]+)', r'\1_\2', name)
# insert '_' between non upper and start of upper blocks (e.g.,
# HTTP_Entry2_FooBar -> HTTP_Entry2_Foo_Bar)
name = re.sub('([^A-Z_])([A-Z])', r'\1_\2', name)
return [x.lower() for x in name.split('_')]
def UpperCamelCase(name):
return ''.join([x.capitalize() for x in NameToComponent(name)])
def CamelCase(name):
uccc = UpperCamelCase(name)
return uccc[0].lower() + uccc[1:]
def ConstantStyle(name):
components = NameToComponent(name)
if components[0] == 'k' and len(components) > 1:
components = components[1:]
# variable cannot starts with a digit.
if components[0][0].isdigit():
components[0] = '_' + components[0]
return '_'.join([x.upper() for x in components])
def DotToUnderscore(name):
return name.replace('.', '_')
def GetNameForElement(element):
if (mojom.IsEnumKind(element) or mojom.IsInterfaceKind(element) or
mojom.IsStructKind(element) or mojom.IsUnionKind(element)):
return UpperCamelCase(element.name)
if mojom.IsInterfaceRequestKind(element):
return GetNameForElement(element.kind)
if isinstance(element, (mojom.Method,
mojom.Parameter,
mojom.StructField)):
return CamelCase(element.name)
if isinstance(element, mojom.UnionField):
return "f%s" % UpperCamelCase(element.name)
if isinstance(element, mojom.EnumValue):
return (GetNameForElement(element.enum) + '.' +
ConstantStyle(element.name))
if isinstance(element, (mojom.NamedValue,
mojom.Constant,
mojom.EnumField)):
return ConstantStyle(element.name)
raise Exception('Unexpected element: %s' % element)
def GetUnionFieldTagName(element):
if not isinstance(element, mojom.UnionField):
raise Exception('Unexpected element: %s is not a union field.' % element)
return 'tag%s' % UpperCamelCase(element.name)
def GetInterfaceResponseName(method):
return UpperCamelCase(method.name + 'Response')
def GetDartTrueFalse(value):
return 'true' if value else 'false'
def GetArrayNullabilityFlags(kind):
"""Returns nullability flags for an array type, see codec.dart.
As we have dedicated decoding functions for arrays, we have to pass
nullability information about both the array itself, as well as the array
element type there.
"""
assert mojom.IsArrayKind(kind)
ARRAY_NULLABLE = 'bindings.kArrayNullable'
ELEMENT_NULLABLE = 'bindings.kElementNullable'
NOTHING_NULLABLE = 'bindings.kNothingNullable'
flags_to_set = []
if mojom.IsNullableKind(kind):
flags_to_set.append(ARRAY_NULLABLE)
if mojom.IsNullableKind(kind.kind):
flags_to_set.append(ELEMENT_NULLABLE)
if not flags_to_set:
flags_to_set = [NOTHING_NULLABLE]
return ' | '.join(flags_to_set)
def AppendDecodeParams(initial_params, kind, bit):
""" Appends standard parameters for decode calls. """
params = list(initial_params)
if (kind == mojom.BOOL):
params.append(str(bit))
if mojom.IsReferenceKind(kind):
if mojom.IsArrayKind(kind):
params.append(GetArrayNullabilityFlags(kind))
else:
params.append(GetDartTrueFalse(mojom.IsNullableKind(kind)))
if mojom.IsInterfaceKind(kind):
params.append('%sProxy.newFromEndpoint' % GetDartType(kind))
if mojom.IsArrayKind(kind) and mojom.IsInterfaceKind(kind.kind):
params.append('%sProxy.newFromEndpoint' % GetDartType(kind.kind))
if mojom.IsInterfaceRequestKind(kind):
params.append('%sStub.newFromEndpoint' % GetDartType(kind.kind))
if mojom.IsArrayKind(kind) and mojom.IsInterfaceRequestKind(kind.kind):
params.append('%sStub.newFromEndpoint' % GetDartType(kind.kind.kind))
if mojom.IsArrayKind(kind):
params.append(GetArrayExpectedLength(kind))
return params
def AppendEncodeParams(initial_params, kind, bit):
""" Appends standard parameters shared between encode and decode calls. """
params = list(initial_params)
if (kind == mojom.BOOL):
params.append(str(bit))
if mojom.IsReferenceKind(kind):
if mojom.IsArrayKind(kind):
params.append(GetArrayNullabilityFlags(kind))
else:
params.append(GetDartTrueFalse(mojom.IsNullableKind(kind)))
if mojom.IsArrayKind(kind):
params.append(GetArrayExpectedLength(kind))
return params
def DecodeMethod(kind, offset, bit):
def _DecodeMethodName(kind):
if mojom.IsArrayKind(kind):
return _DecodeMethodName(kind.kind) + 'Array'
if mojom.IsEnumKind(kind):
return _DecodeMethodName(mojom.INT32)
if mojom.IsInterfaceRequestKind(kind):
return 'decodeInterfaceRequest'
if mojom.IsInterfaceKind(kind):
return 'decodeServiceInterface'
return _spec_to_decode_method[kind.spec]
methodName = _DecodeMethodName(kind)
params = AppendDecodeParams([ str(offset) ], kind, bit)
return '%s(%s)' % (methodName, ', '.join(params))
def EncodeMethod(kind, variable, offset, bit):
def _EncodeMethodName(kind):
if mojom.IsStructKind(kind):
return 'encodeStruct'
if mojom.IsUnionKind(kind):
return 'encodeUnion'
if mojom.IsArrayKind(kind):
return _EncodeMethodName(kind.kind) + 'Array'
if mojom.IsEnumKind(kind):
return _EncodeMethodName(mojom.INT32)
if mojom.IsInterfaceRequestKind(kind):
return 'encodeInterfaceRequest'
if mojom.IsInterfaceKind(kind):
return 'encodeInterface'
return _spec_to_encode_method[kind.spec]
methodName = _EncodeMethodName(kind)
params = AppendEncodeParams([ variable, str(offset) ], kind, bit)
return '%s(%s)' % (methodName, ', '.join(params))
def TranslateConstants(token):
if isinstance(token, (mojom.EnumValue, mojom.NamedValue)):
# Both variable and enum constants are constructed like:
# NamespaceUid.Struct.Enum_CONSTANT_NAME
name = ""
if token.imported_from:
name = token.imported_from["unique_name"] + "."
if token.parent_kind:
name = name + token.parent_kind.name + "."
if isinstance(token, mojom.EnumValue):
name = name + token.enum.name + "_"
return name + token.name
if isinstance(token, mojom.BuiltinValue):
if token.value == "double.INFINITY" or token.value == "float.INFINITY":
return "double.INFINITY";
if token.value == "double.NEGATIVE_INFINITY" or \
token.value == "float.NEGATIVE_INFINITY":
return "double.NEGATIVE_INFINITY";
if token.value == "double.NAN" or token.value == "float.NAN":
return "double.NAN";
# Strip leading '+'.
if token[0] == '+':
token = token[1:]
return token
def ExpressionToText(token):
if isinstance(token, (mojom.EnumValue, mojom.NamedValue)):
return str(token.resolved_value)
return TranslateConstants(token)
def GetArrayKind(kind, size = None):
if size is None:
return mojom.Array(kind)
else:
array = mojom.Array(kind, 0)
array.dart_map_size = size
return array
def GetArrayExpectedLength(kind):
if mojom.IsArrayKind(kind) and kind.length is not None:
return getattr(kind, 'dart_map_size', str(kind.length))
else:
return 'bindings.kUnspecifiedArrayLength'
def IsPointerArrayKind(kind):
if not mojom.IsArrayKind(kind):
return False
sub_kind = kind.kind
return mojom.IsObjectKind(sub_kind)
def GetImportUri(module):
elements = module.namespace.split('.')
elements.append("%s" % module.name)
return os.path.join("mojom", *elements)
class Generator(generator.Generator):
dart_filters = {
'array_expected_length': GetArrayExpectedLength,
'array': GetArrayKind,
'decode_method': DecodeMethod,
'default_value': DartDefaultValue,
'encode_method': EncodeMethod,
'expression_to_text': ExpressionToText,
'is_map_kind': mojom.IsMapKind,
'is_nullable_kind': mojom.IsNullableKind,
'is_pointer_array_kind': IsPointerArrayKind,
'is_struct_kind': mojom.IsStructKind,
'is_union_kind': mojom.IsUnionKind,
'dart_true_false': GetDartTrueFalse,
'dart_type': DartDeclType,
'name': GetNameForElement,
'tag_name': GetUnionFieldTagName,
'interface_response_name': GetInterfaceResponseName,
'dot_to_underscore': DotToUnderscore,
}
def GetParameters(self, args):
return {
"namespace": self.module.namespace,
"imports": self.GetImports(args),
"kinds": self.module.kinds,
"enums": self.module.enums,
"module": resolver.ResolveConstants(self.module, ExpressionToText),
"structs": self.GetStructs() + self.GetStructsFromMethods(),
"unions": self.GetUnions(),
"interfaces": self.GetInterfaces(),
"imported_interfaces": self.GetImportedInterfaces(),
"imported_from": self.ImportedFrom(),
}
@UseJinja("dart_templates/module.lib.tmpl", filters=dart_filters)
def GenerateLibModule(self, args):
return self.GetParameters(args)
def GenerateFiles(self, args):
elements = self.module.namespace.split('.')
elements.append("%s.dart" % self.module.name)
path = os.path.join("dart-pkg", "mojom/lib", *elements)
self.Write(self.GenerateLibModule(args), path)
path = os.path.join("dart-gen", "mojom/lib", *elements)
self.Write(self.GenerateLibModule(args), path)
link = self.MatchMojomFilePath("%s.dart" % self.module.name)
if os.path.exists(os.path.join(self.output_dir, link)):
os.unlink(os.path.join(self.output_dir, link))
try:
if sys.platform == "win32":
shutil.copy(os.path.join(self.output_dir, path),
os.path.join(self.output_dir, link))
else:
os.symlink(os.path.join(self.output_dir, path),
os.path.join(self.output_dir, link))
except OSError as e:
# Errno 17 is file already exists. If the link fails because file already
# exists assume another instance of this script tried to create the same
# file and continue on.
if e.errno != 17:
raise e
def GetImports(self, args):
used_names = set()
for each_import in self.module.imports:
simple_name = each_import["module_name"].split(".")[0]
# Since each import is assigned a library in Dart, they need to have
# unique names.
unique_name = simple_name
counter = 0
while unique_name in used_names:
counter += 1
unique_name = simple_name + str(counter)
used_names.add(unique_name)
each_import["unique_name"] = unique_name + '_mojom'
counter += 1
each_import["rebased_path"] = GetImportUri(each_import['module'])
return self.module.imports
def GetImportedInterfaces(self):
interface_to_import = {}
for each_import in self.module.imports:
for each_interface in each_import["module"].interfaces:
name = each_interface.name
interface_to_import[name] = each_import["unique_name"] + "." + name
return interface_to_import
def ImportedFrom(self):
interface_to_import = {}
for each_import in self.module.imports:
for each_interface in each_import["module"].interfaces:
name = each_interface.name
interface_to_import[name] = each_import["unique_name"] + "."
return interface_to_import
| bsd-3-clause |
thinkopensolutions/odoo-saas-tools | saas_server_backup_ftp/models/saas_server.py | 1 | 2303 | # -*- coding: utf-8 -*-
import tempfile
from odoo import api, models
import logging
_logger = logging.getLogger(__name__)
try:
import pysftp
except ImportError:
_logger.debug('saas_server_backup_ftp requires the python library pysftp which is not found on your installation')
class SaasServerClient(models.Model):
_inherit = 'saas_server.client'
@api.model
def _transport_backup(self, dump_db, filename=None):
server = self.env['ir.config_parameter'].get_param('saas_server.sftp_server', None)
username = self.env['ir.config_parameter'].get_param('saas_server.sftp_username', None)
password = self.env['ir.config_parameter'].get_param('saas_server.sftp_password', None)
path = self.env['ir.config_parameter'].get_param('saas_server.sftp_path', None)
sftp_rsa_key_path = self.env['ir.config_parameter'].get_param(
'saas_server.sftp_rsa_key_path', None)
if sftp_rsa_key_path:
srv = pysftp.Connection(host=server, username=username,
private_key=sftp_rsa_key_path,
private_key_pass=password)
else:
srv = pysftp.Connection(host=server, username=username,
password=password)
# set keepalive to prevent socket closed / connection dropped error
srv._transport.set_keepalive(30)
try:
srv.chdir(path)
except IOError:
# Create directory and subdirs if they do not exist.
currentDir = ''
for dirElement in path.split('/'):
currentDir += dirElement + '/'
try:
srv.chdir(currentDir)
except:
print('(Part of the) path didn\'t exist. Creating it now at ' + currentDir)
# Make directory and then navigate into it
srv.mkdir(currentDir, mode=777)
srv.chdir(currentDir)
srv.chdir(path)
with tempfile.TemporaryFile() as t:
dump_db(t)
t.seek(0)
srv.putfo(t, filename)
srv.close()
@api.model
def schedule_saas_databases_backup(self):
self.search([('state', '!=', 'deleted')]).backup_database()
| lgpl-3.0 |
uclouvain/osis | base/tests/utils/test_operator.py | 1 | 2143 | # ############################################################################
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
# ############################################################################
from django.test import SimpleTestCase
from base.utils import operator
class TestIsYearLower(SimpleTestCase):
def test_should_return_false_when_base_year_is_none(self):
self.assertFalse(
operator.is_year_lower(None, 2025)
)
def test_should_return_true_when_year_to_compare_to_is_none(self):
self.assertTrue(
operator.is_year_lower(2029, None)
)
def test_should_return_true_when_base_year_is_inferior_to_other_year(self):
self.assertTrue(
operator.is_year_lower(2017, 2029)
)
def test_should_return_false_when_base_year_is_equal_to_other_year(self):
self.assertFalse(
operator.is_year_lower(2017, 2017)
)
def test_should_return_false_when_base_year_is_greater_to_other_year(self):
self.assertFalse(
operator.is_year_lower(2019, 2017)
)
| agpl-3.0 |
NewpTone/stacklab-nova | debian/tmp/usr/lib/python2.7/dist-packages/nova/tests/test_configdrive2.py | 7 | 3462 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mox
import os
import tempfile
from nova import test
from nova import flags
from nova.openstack.common import log
from nova import utils
from nova.virt import configdrive
from nova.virt.libvirt import utils as virtutils
FLAGS = flags.FLAGS
LOG = log.getLogger(__name__)
class ConfigDriveTestCase(test.TestCase):
def test_create_configdrive_iso(self):
imagefile = None
try:
self.mox.StubOutWithMock(utils, 'execute')
utils.execute('genisoimage', '-o', mox.IgnoreArg(), '-ldots',
'-allow-lowercase', '-allow-multidot', '-l',
'-publisher', mox.IgnoreArg(), '-quiet', '-J', '-r',
'-V', 'config-2', mox.IgnoreArg(), attempts=1,
run_as_root=False).AndReturn(None)
self.mox.ReplayAll()
c = configdrive.ConfigDriveBuilder()
c._add_file('this/is/a/path/hello', 'This is some content')
(fd, imagefile) = tempfile.mkstemp(prefix='cd_iso_')
os.close(fd)
c._make_iso9660(imagefile)
c.cleanup()
# Check cleanup
self.assertFalse(os.path.exists(c.tempdir))
finally:
if imagefile:
utils.delete_if_exists(imagefile)
def test_create_configdrive_vfat(self):
imagefile = None
try:
self.mox.StubOutWithMock(virtutils, 'mkfs')
self.mox.StubOutWithMock(utils, 'execute')
self.mox.StubOutWithMock(utils, 'trycmd')
virtutils.mkfs('vfat', mox.IgnoreArg(),
label='config-2').AndReturn(None)
utils.trycmd('mount', '-o', 'loop', mox.IgnoreArg(),
mox.IgnoreArg(),
run_as_root=True).AndReturn((None, None))
utils.trycmd('chown', mox.IgnoreArg(), mox.IgnoreArg(),
run_as_root=True).AndReturn((None, None))
utils.execute('umount', mox.IgnoreArg(),
run_as_root=True).AndReturn(None)
self.mox.ReplayAll()
c = configdrive.ConfigDriveBuilder()
c._add_file('this/is/a/path/hello', 'This is some content')
(fd, imagefile) = tempfile.mkstemp(prefix='cd_vfat_')
os.close(fd)
c._make_vfat(imagefile)
c.cleanup()
# Check cleanup
self.assertFalse(os.path.exists(c.tempdir))
# NOTE(mikal): we can't check for a VFAT output here because the
# filesystem creation stuff has been mocked out because it
# requires root permissions
finally:
if imagefile:
utils.delete_if_exists(imagefile)
| apache-2.0 |
colognecoin/colognecoin | contrib/bitrpc/bitrpc.py | 239 | 7836 | from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| mit |
tortib/nzbToMedia | libs/requests/models.py | 16 | 26200 | # -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import DecodeError
from .exceptions import (
HTTPError, RequestException, MissingSchema, InvalidURL,
ChunkedEncodingError, ContentDecodingError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring, IncompleteRead)
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_moved, # 307
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, str):
fp = StringIO(fp)
if isinstance(fp, bytes):
fp = BytesIO(fp)
rf = RequestField(name=k, data=fp.read(),
filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach the request. If a dictionary is provided, form-encoding will take place.
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy()
p._cookies = self._cookies.copy()
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = self.method.upper()
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
try:
url = unicode(url)
except NameError:
# We're on Python 3.
url = str(url)
except UnicodeDecodeError:
pass
# Don't do any URL preparation for oddball schemes
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
scheme, auth, host, port, path, query, fragment = parse_url(url)
if not scheme:
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
"Perhaps you meant http://{0}?".format(url))
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, dict))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length is not None:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
body.seek(0, 2)
self.headers['Content-Length'] = builtin_str(body.tell())
body.seek(0, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
elif self.method not in ('GET', 'HEAD'):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content',
'status_code',
'headers',
'url',
'history',
'encoding',
'reason',
'cookies',
'elapsed',
'request',
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta)
self.elapsed = datetime.timedelta(0)
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except RequestException:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
"""
if self._content_consumed:
# simulate reading small chunks of the content
return iter_slices(self._content, chunk_size)
def generate():
try:
# Special case for urllib3.
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except IncompleteRead as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
gen = generate()
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return json.loads(self.content.decode(encoding), **kwargs)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return json.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Closes the underlying file descriptor and releases the connection
back to the pool.
*Note: Should not normally need to be called explicitly.*
"""
return self.raw.release_conn()
| gpl-3.0 |
j-carl/ansible | hacking/build_library/build_ansible/command_plugins/release_announcement.py | 55 | 2905 | # coding: utf-8
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from collections import UserString
from distutils.version import LooseVersion
# Pylint doesn't understand Python3 namespace modules.
from ..commands import Command # pylint: disable=relative-beyond-top-level
from .. import errors # pylint: disable=relative-beyond-top-level
class VersionStr(UserString):
def __init__(self, string):
super().__init__(string.strip())
self.ver_obj = LooseVersion(string)
def transform_args(args):
# Make it possible to sort versions in the jinja2 templates
new_versions = []
for version in args.versions:
new_versions.append(VersionStr(version))
args.versions = new_versions
return args
def write_message(filename, message):
if filename != '-':
with open(filename, 'w') as out_file:
out_file.write(message)
else:
sys.stdout.write('\n\n')
sys.stdout.write(message)
class ReleaseAnnouncementCommand(Command):
name = 'release-announcement'
@classmethod
def init_parser(cls, add_parser):
parser = add_parser(cls.name,
description="Generate email and twitter announcements from template")
parser.add_argument("--version", dest="versions", type=str, required=True, action='append',
help="Versions of Ansible to announce")
parser.add_argument("--name", type=str, required=True, help="Real name to use on emails")
parser.add_argument("--email-out", type=str, default="-",
help="Filename to place the email announcement into")
parser.add_argument("--twitter-out", type=str, default="-",
help="Filename to place the twitter announcement into")
@classmethod
def main(cls, args):
if sys.version_info < (3, 6):
raise errors.DependencyError('The {0} subcommand needs Python-3.6+'
' to run'.format(cls.name))
# Import here because these functions are invalid on Python-3.5 and the command plugins and
# init_parser() method need to be compatible with Python-3.4+ for now.
# Pylint doesn't understand Python3 namespace modules.
from .. announce import create_short_message, create_long_message # pylint: disable=relative-beyond-top-level
args = transform_args(args)
twitter_message = create_short_message(args.versions)
email_message = create_long_message(args.versions, args.name)
write_message(args.twitter_out, twitter_message)
write_message(args.email_out, email_message)
return 0
| gpl-3.0 |
eugena/django | django/contrib/sessions/backends/signed_cookies.py | 383 | 2895 | from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase
from django.core import signing
class SessionStore(SessionBase):
def load(self):
"""
We load the data from the key itself instead of fetching from
some external data store. Opposite of _get_session_key(),
raises BadSignature if signature fails.
"""
try:
return signing.loads(self.session_key,
serializer=self.serializer,
# This doesn't handle non-default expiry dates, see #19201
max_age=settings.SESSION_COOKIE_AGE,
salt='django.contrib.sessions.backends.signed_cookies')
except Exception:
# BadSignature, ValueError, or unpickling exceptions. If any of
# these happen, reset the session.
self.create()
return {}
def create(self):
"""
To create a new key, we simply make sure that the modified flag is set
so that the cookie is set on the client for the current request.
"""
self.modified = True
def save(self, must_create=False):
"""
To save, we get the session key as a securely signed string and then
set the modified flag so that the cookie is set on the client for the
current request.
"""
self._session_key = self._get_session_key()
self.modified = True
def exists(self, session_key=None):
"""
This method makes sense when you're talking to a shared resource, but
it doesn't matter when you're storing the information in the client's
cookie.
"""
return False
def delete(self, session_key=None):
"""
To delete, we clear the session key and the underlying data structure
and set the modified flag so that the cookie is set on the client for
the current request.
"""
self._session_key = ''
self._session_cache = {}
self.modified = True
def cycle_key(self):
"""
Keeps the same data but with a new key. To do this, we just have to
call ``save()`` and it will automatically save a cookie with a new key
at the end of the request.
"""
self.save()
def _get_session_key(self):
"""
Most session backends don't need to override this method, but we do,
because instead of generating a random string, we want to actually
generate a secure url-safe Base64-encoded string of data as our
session key.
"""
session_cache = getattr(self, '_session_cache', {})
return signing.dumps(session_cache, compress=True,
salt='django.contrib.sessions.backends.signed_cookies',
serializer=self.serializer)
@classmethod
def clear_expired(cls):
pass
| bsd-3-clause |
fubecka/f5-dashboard | flask/lib/python2.6/site-packages/dns/rdtypes/txtbase.py | 100 | 2994 | # Copyright (C) 2006, 2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""TXT-like base class."""
import dns.exception
import dns.rdata
import dns.tokenizer
class TXTBase(dns.rdata.Rdata):
"""Base class for rdata that is like a TXT record
@ivar strings: the text strings
@type strings: list of string
@see: RFC 1035"""
__slots__ = ['strings']
def __init__(self, rdclass, rdtype, strings):
super(TXTBase, self).__init__(rdclass, rdtype)
if isinstance(strings, str):
strings = [ strings ]
self.strings = strings[:]
def to_text(self, origin=None, relativize=True, **kw):
txt = ''
prefix = ''
for s in self.strings:
txt += '%s"%s"' % (prefix, dns.rdata._escapify(s))
prefix = ' '
return txt
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
strings = []
while 1:
token = tok.get().unescape()
if token.is_eol_or_eof():
break
if not (token.is_quoted_string() or token.is_identifier()):
raise dns.exception.SyntaxError("expected a string")
if len(token.value) > 255:
raise dns.exception.SyntaxError("string too long")
strings.append(token.value)
if len(strings) == 0:
raise dns.exception.UnexpectedEnd
return cls(rdclass, rdtype, strings)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
for s in self.strings:
l = len(s)
assert l < 256
byte = chr(l)
file.write(byte)
file.write(s)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
strings = []
while rdlen > 0:
l = ord(wire[current])
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
s = wire[current : current + l].unwrap()
current += l
rdlen -= l
strings.append(s)
return cls(rdclass, rdtype, strings)
from_wire = classmethod(from_wire)
def _cmp(self, other):
return cmp(self.strings, other.strings)
| apache-2.0 |
guschmue/tensorflow | tensorflow/python/kernel_tests/concatenate_dataset_op_test.py | 42 | 5552 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.util import nest
from tensorflow.python.framework import errors
from tensorflow.python.framework import tensor_shape
from tensorflow.python.platform import test
class ConcatenateDatasetTest(test.TestCase):
def testConcatenateDataset(self):
input_components = (
np.tile(np.array([[1], [2], [3], [4]]), 20),
np.tile(np.array([[12], [13], [14], [15]]), 15),
np.array([37.0, 38.0, 39.0, 40.0]))
to_concatenate_components = (
np.tile(np.array([[1], [2], [3], [4], [5]]), 20),
np.tile(np.array([[12], [13], [14], [15], [16]]), 15),
np.array([37.0, 38.0, 39.0, 40.0, 41.0]))
input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices(
to_concatenate_components)
concatenated = input_dataset.concatenate(dataset_to_concatenate)
self.assertEqual(concatenated.output_shapes, (tensor_shape.TensorShape(
[20]), tensor_shape.TensorShape([15]), tensor_shape.TensorShape([])))
iterator = concatenated.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for i in range(9):
result = sess.run(get_next)
if i < 4:
for component, result_component in zip(input_components, result):
self.assertAllEqual(component[i], result_component)
else:
for component, result_component in zip(to_concatenate_components,
result):
self.assertAllEqual(component[i - 4], result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testConcatenateDatasetDifferentShape(self):
input_components = (
np.tile(np.array([[1], [2], [3], [4]]), 20),
np.tile(np.array([[12], [13], [14], [15]]), 4))
to_concatenate_components = (
np.tile(np.array([[1], [2], [3], [4], [5]]), 20),
np.tile(np.array([[12], [13], [14], [15], [16]]), 15))
input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices(
to_concatenate_components)
concatenated = input_dataset.concatenate(dataset_to_concatenate)
self.assertEqual(
[ts.as_list()
for ts in nest.flatten(concatenated.output_shapes)], [[20], [None]])
iterator = concatenated.make_initializable_iterator()
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for i in range(9):
result = sess.run(get_next)
if i < 4:
for component, result_component in zip(input_components, result):
self.assertAllEqual(component[i], result_component)
else:
for component, result_component in zip(to_concatenate_components,
result):
self.assertAllEqual(component[i - 4], result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testConcatenateDatasetDifferentStructure(self):
input_components = (
np.tile(np.array([[1], [2], [3], [4]]), 5),
np.tile(np.array([[12], [13], [14], [15]]), 4))
to_concatenate_components = (
np.tile(np.array([[1], [2], [3], [4], [5]]), 20),
np.tile(np.array([[12], [13], [14], [15], [16]]), 15),
np.array([37.0, 38.0, 39.0, 40.0, 41.0]))
input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices(
to_concatenate_components)
with self.assertRaisesRegexp(ValueError,
"don't have the same number of elements"):
input_dataset.concatenate(dataset_to_concatenate)
def testConcatenateDatasetDifferentType(self):
input_components = (
np.tile(np.array([[1], [2], [3], [4]]), 5),
np.tile(np.array([[12], [13], [14], [15]]), 4))
to_concatenate_components = (
np.tile(np.array([[1.0], [2.0], [3.0], [4.0]]), 5),
np.tile(np.array([[12], [13], [14], [15]]), 15))
input_dataset = dataset_ops.Dataset.from_tensor_slices(input_components)
dataset_to_concatenate = dataset_ops.Dataset.from_tensor_slices(
to_concatenate_components)
with self.assertRaisesRegexp(TypeError, "have different types"):
input_dataset.concatenate(dataset_to_concatenate)
if __name__ == "__main__":
test.main()
| apache-2.0 |
eneldoserrata/marcos_openerp | addons/product_container/product_container.py | 1 | 1581 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class product_ul(osv.osv):
_inherit = "product.ul"
_columns = {
'container_id' : fields.many2one('product.product', 'Container Product', domain=[('container_ok','=',True)]),
}
product_ul()
class product_product(osv.Model):
_inherit = 'product.product'
_columns = {
'container_ok': fields.boolean('Container', help='Select this if the product will act as a container to carry other products.'),
'container_id': fields.many2one('product.product', 'Packed In', domain=[('container_ok','=',True)])
}
product_product() | agpl-3.0 |
bioinformatics-ua/montra | emif/questionnaire/migrations/0006_auto__chg_field_choice_value__chg_field_choice_text_en__chg_field_ques.py | 2 | 12721 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Universidade de Aveiro, DETI/IEETA, Bioinformatics Group - http://bioinformatics.ua.pt/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Choice.value'
db.alter_column('questionnaire_choice', 'value', self.gf('django.db.models.fields.CharField')(max_length=1000))
# Changing field 'Choice.text_en'
db.alter_column('questionnaire_choice', 'text_en', self.gf('django.db.models.fields.CharField')(max_length=2000))
# Changing field 'Question.number'
db.alter_column('questionnaire_question', 'number', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'Question.help_text'
db.alter_column('questionnaire_question', 'help_text', self.gf('django.db.models.fields.CharField')(max_length=2255))
# Changing field 'QuestionSet.heading'
db.alter_column('questionnaire_questionset', 'heading', self.gf('django.db.models.fields.CharField')(max_length=255))
def backwards(self, orm):
# Changing field 'Choice.value'
db.alter_column('questionnaire_choice', 'value', self.gf('django.db.models.fields.CharField')(max_length=64))
# Changing field 'Choice.text_en'
db.alter_column('questionnaire_choice', 'text_en', self.gf('django.db.models.fields.CharField')(max_length=200))
# Changing field 'Question.number'
db.alter_column('questionnaire_question', 'number', self.gf('django.db.models.fields.CharField')(max_length=8))
# Changing field 'Question.help_text'
db.alter_column('questionnaire_question', 'help_text', self.gf('django.db.models.fields.CharField')(max_length=255))
# Changing field 'QuestionSet.heading'
db.alter_column('questionnaire_questionset', 'heading', self.gf('django.db.models.fields.CharField')(max_length=64))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'questionnaire.answer': {
'Meta': {'object_name': 'Answer'},
'answer': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.Question']"}),
'runid': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.Subject']"})
},
'questionnaire.choice': {
'Meta': {'object_name': 'Choice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.Question']"}),
'sortid': ('django.db.models.fields.IntegerField', [], {}),
'text_en': ('django.db.models.fields.CharField', [], {'max_length': '2000'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'questionnaire.question': {
'Meta': {'object_name': 'Question'},
'checks': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'extra_en': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'footer_en': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'help_text': ('django.db.models.fields.CharField', [], {'max_length': '2255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'questionset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.QuestionSet']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'stats': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'text_en': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'questionnaire.questionnaire': {
'Meta': {'object_name': 'Questionnaire'},
'disable': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'redirect_url': ('django.db.models.fields.CharField', [], {'default': "'/static/complete.html'", 'max_length': '128'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'questionnaire.questionset': {
'Meta': {'object_name': 'QuestionSet'},
'checks': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'heading': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'questionnaire': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.Questionnaire']"}),
'sortid': ('django.db.models.fields.IntegerField', [], {}),
'text_en': ('django.db.models.fields.TextField', [], {})
},
'questionnaire.runinfo': {
'Meta': {'object_name': 'RunInfo'},
'cookies': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'emailcount': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'emailsent': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastemailerror': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'questionset': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.QuestionSet']", 'null': 'True', 'blank': 'True'}),
'random': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'runid': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'skipped': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.Subject']"}),
'tags': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'questionnaire.runinfohistory': {
'Meta': {'object_name': 'RunInfoHistory'},
'completed': ('django.db.models.fields.DateField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'questionnaire': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.Questionnaire']"}),
'runid': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'skipped': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['questionnaire.Subject']"}),
'tags': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'questionnaire.subject': {
'Meta': {'object_name': 'Subject'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'formtype': ('django.db.models.fields.CharField', [], {'default': "'email'", 'max_length': '16'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'unset'", 'max_length': '8', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '2'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'nextrun': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'default': "'inactive'", 'max_length': '16'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['questionnaire']
| gpl-3.0 |
cmdunkers/DeeperMind | PythonEnv/lib/python2.7/site-packages/scipy/weave/examples/fibonacci.py | 100 | 3980 | # Typical run:
# C:\home\eric\wrk\scipy\weave\examples>python fibonacci.py
# Recursively computing the first 30 fibonacci numbers:
# speed in python: 4.31599998474
# speed in c: 0.0499999523163
# speed up: 86.32
# Looping to compute the first 30 fibonacci numbers:
# speed in python: 0.000520999908447
# speed in c: 5.00000715256e-005
# speed up: 10.42
# fib(30) 832040 832040 832040 832040
from __future__ import absolute_import, print_function
import sys
sys.path.insert(0,'..')
import ext_tools
def build_fibonacci():
""" Builds an extension module with fibonacci calculators.
"""
mod = ext_tools.ext_module('fibonacci_ext')
a = 1 # this is effectively a type declaration
# recursive fibonacci in C
fib_code = """
int fib1(int a)
{
if(a <= 2)
return 1;
else
return fib1(a-2) + fib1(a-1);
}
"""
ext_code = """
return_val = fib1(a);
"""
fib = ext_tools.ext_function('c_fib1',ext_code,['a'])
fib.customize.add_support_code(fib_code)
mod.add_function(fib)
# looping fibonacci in C
fib_code = """
int fib2( int a )
{
int last, next_to_last, result;
if( a <= 2 )
return 1;
last = next_to_last = 1;
for(int i = 2; i < a; i++ )
{
result = last + next_to_last;
next_to_last = last;
last = result;
}
return result;
}
"""
ext_code = """
return_val = fib2(a);
"""
fib = ext_tools.ext_function('c_fib2',ext_code,['a'])
fib.customize.add_support_code(fib_code)
mod.add_function(fib)
mod.compile()
try:
import fibonacci_ext
except ImportError:
build_fibonacci()
import fibonacci_ext
c_fib1 = fibonacci_ext.c_fib1
c_fib2 = fibonacci_ext.c_fib2
#################################################################
# This where it might normally end, but we've added some timings
# below. Recursive solutions are much slower, and C is 10-50x faster
# than equivalent in Python for this simple little routine
#
#################################################################
def py_fib1(a):
if a <= 2:
return 1
else:
return py_fib1(a-2) + py_fib1(a-1)
def py_fib2(a):
if a <= 2:
return 1
last = next_to_last = 1
for i in range(2,a):
result = last + next_to_last
next_to_last = last
last = result
return result
import time
def recurse_compare(n):
print('Recursively computing the first %d fibonacci numbers:' % n)
t1 = time.time()
for i in range(n):
py_fib1(i)
t2 = time.time()
py = t2 - t1
print(' speed in python:', t2 - t1)
# load into cache
c_fib1(i)
t1 = time.time()
for i in range(n):
c_fib1(i)
t2 = time.time()
print(' speed in c:',t2 - t1)
print(' speed up: %3.2f' % (py/(t2-t1)))
def loop_compare(m,n):
print('Looping to compute the first %d fibonacci numbers:' % n)
t1 = time.time()
for i in range(m):
for i in range(n):
py_fib2(i)
t2 = time.time()
py = (t2-t1)
print(' speed in python:', (t2 - t1)/m)
# load into cache
c_fib2(i)
t1 = time.time()
for i in range(m):
for i in range(n):
c_fib2(i)
t2 = time.time()
print(' speed in c:',(t2 - t1) / m)
print(' speed up: %3.2f' % (py/(t2-t1)))
if __name__ == "__main__":
n = 30
recurse_compare(n)
m = 1000
loop_compare(m,n)
print('fib(30)', c_fib1(30),py_fib1(30),c_fib2(30),py_fib2(30))
| bsd-3-clause |
SGenheden/lammps | tools/moltemplate/src/postprocess_input_script.py | 19 | 5958 | #!/usr/bin/env python
"""
Reorder the integer arguments to the commands in a LAMMPS input
file if these arguments violate LAMMPS order requirements.
We have to do this because the moltemplate.sh script will automatically
assign these integers in a way which may violate these restrictions
and the user has little control over this.
This script:
swaps the I and J integers in "pair_coeff I J ..." commands when I > J
Other features may be added later
"""
import sys
lines_orig = []
f = None
fname = None
num_lines_ignore = 0
# Lines from files passed as arguments are read and processed silently.
# (Why? Sometimes it's necessary to read the contents of previous input scripts
# in order to be able to understand a script command which appears later.
# I'm assuming these files will be processed by lammps in the same order. So I
# must insure that moltemplate.sh passes them to this program in that order.
# I'm too lazy to read the "include" commands in input scripts correctly.)
if len(sys.argv) > 1:
for fname in sys.argv[1:]:
f = open(fname, 'r')
in_stream = f
lines_orig += in_stream.readlines()
num_lines_ignore += len(lines_orig)
f.close()
# Lines read from the standard input are read, processed, and printed to stdout
in_stream = sys.stdin
lines_orig += in_stream.readlines()
pair_style_list=[]
swap_occured = False
warn_wildcard = False
i=0
while i < len(lines_orig):
# Read the next logical line
# Any lines ending in '&' should be merged with the next line before breaking
line_orig = ''
while i < len(lines_orig):
line_counter = 1 + i - num_lines_ignore
line_orig += lines_orig[i]
if ((len(line_orig) < 2) or (line_orig[-2:] != '&\n')):
break
i += 1
line = line_orig.replace('&\n','\n').rstrip('\n')
comment = ''
if '#' in line_orig:
ic = line.find('#')
line = line_orig[:ic]
comment = line_orig[ic:] # keep track of comments (put them back later)
tokens = line.strip().split()
if ((len(tokens) >= 2) and (tokens[0] == 'pair_style')):
pair_style_list = tokens[1:]
if ((len(tokens) >= 3) and (tokens[0] == 'pair_coeff')):
if ((tokens[1].isdigit() and (tokens[2].isdigit())) and
(int(tokens[1]) > int(tokens[2]))):
swap_occured = True
tmp = tokens[2]
tokens[2] = tokens[1]
tokens[1] = tmp
if i >= num_lines_ignore:
# polite warning:
sys.stderr.write('swapped pair_coeff order on line '+str(line_counter))
#if (fname != None):
# sys.stderr.write(' of file \"'+fname+'\"')
sys.stderr.write('\n')
# Deal with the "hbond/" pair coeffs.
#
# The hbond/dreiding pair style designates one of the two atom types
# as a donor, and the other as an acceptor (using the 'i','j' flags)
# If swapped atom types eariler, we also need to swap 'i' with 'j'.
#
# If "hbond/dreiding.." pair style is used with "hybrid" or
# "hybrid/overlay" then tokens[3] is the name of the pair style
# and tokens[5] is either 'i' or 'j'.
if len(pair_style_list) > 0:
if ((pair_style_list[0] == 'hybrid') or
(pair_style_list[0] == 'hybrid/overlay')):
if ((len(tokens) > 5) and (tokens[5] == 'i') and (tokens[3][0:6]=='hbond/')):
tokens[5] = 'j'
sys.stderr.write(' (and replaced \"i\" with \"j\")\n')
elif ((len(tokens) > 5) and (tokens[5] == 'j') and (tokens[3][0:6]=='hbond/')):
tokens[5] = 'i'
sys.stderr.write(' (and replaced \"j\" with \"i\")\n')
elif (pair_style_list[0][0:6] == 'hbond/'):
if ((len(tokens) > 4) and (tokens[4] == 'i')):
tokens[4] = 'j'
sys.stderr.write(' (and replaced \"i\" with \"j\")\n')
elif ((len(tokens) > 4) and (tokens[4] == 'j')):
tokens[4] = 'i'
sys.stderr.write(' (and replaced \"j\" with \"i\")\n')
sys.stdout.write((' '.join(tokens)+comment).replace('\n','&\n')+'\n')
else:
if ((('*' in tokens[1]) or ('*' in tokens[2]))
and
(not (('*' == tokens[1]) and ('*' == tokens[2])))):
warn_wildcard = True
if i >= num_lines_ignore:
sys.stdout.write(line_orig)
else:
if i >= num_lines_ignore:
sys.stdout.write(line_orig)
i += 1
if swap_occured:
sys.stderr.write('\n'
' WARNING: Atom order in some pair_coeff commands was swapped to pacify LAMMPS.\n'
' For some exotic pair_styles such as hbond/dreiding, this is not enough. If you\n'
' use exotic pair_styles, please verify the \"pair_coeff\" commands are correct.\n')
if warn_wildcard:
sys.stderr.write('\n'
' WARNING: The use of wildcard characters (\"*\") in your \"pair_coeff\"\n'
' commands is not recommended.\n'
' (It is safer to specify each interaction pair manually.\n'
' Check every pair_coeff command. Make sure that every atom type in\n'
' the first group is <= atom types in the second group.\n'
' Moltemplate does NOT do this when wildcards are used.)\n'
' If you are using a many-body pair style then ignore this warning.\n')
| gpl-2.0 |
marionleborgne/cloudbrain-websocket-server | src/cbws/server.py | 2 | 12783 | import pika
import json
import logging
from collections import defaultdict
from sockjs.tornado.conn import SockJSConnection
from sockjs.tornado import SockJSRouter
from tornado.ioloop import IOLoop
from tornado.web import Application
from uuid import uuid4
from cloudbrain.core.auth import CloudbrainAuth
_LOGGER = logging.getLogger()
_LOGGER.setLevel(logging.INFO)
recursivedict = lambda: defaultdict(recursivedict)
def _rt_stream_connection_factory(rabbitmq_address, rabbit_auth_url):
"""
RtStreamConnection class factory.
:param rabbitmq_address: RabbitMQ server address.
:param rabbit_auth_url: RabbitMQ authentication server address.
:return: RtStreamConnection
"""
class RtStreamConnection(SockJSConnection):
"""RtStreamConnection connection implementation"""
clients = set()
def __init__(self, session):
super(self.__class__, self).__init__(session)
self.subscribers = recursivedict()
self.total_records = recursivedict()
def send_probe_factory(self, exchange_name, routing_key):
def send_probe(body):
logging.debug("GOT: " + body)
buffer_content = json.loads(body)
# FIXME: Keep old buffer parsing for backward compatibility.
if type(buffer_content) == list:
for record in buffer_content:
self.subscribers[exchange_name][routing_key] \
["total_records"] += 1
record["exchangeName"] = exchange_name
record["routingKey"] = routing_key
self.send(json.dumps(record))
# FIXME: This is the new data format. Keep this parsing.
elif type(buffer_content) == dict:
for record in buffer_content['chunk']:
self.subscribers[exchange_name][routing_key] \
["total_records"] += 1
record["exchangeName"] = exchange_name
record["routingKey"] = routing_key
self.send(json.dumps(record))
return send_probe
def on_open(self, info):
logging.info("Got a new connection...")
self.clients.add(self)
def on_message(self, message):
"""
This will receive instructions from the client to change the
stream. After the connection is established we expect to receive a
JSON with exchangeName, routingKey, token; then we subscribe to
RabbitMQ and start streaming the data.
NOTE: it's not possible to open multiple connections from the same
client. so in case we need to stream different devices/metrics/etc.
at the same time, we need to use a solution that is like the
multiplexing in the sockjs-tornado examples folder.
:param message: subscription message to process
"""
logging.info("Got a new subscription message: " + message)
msg_dict = json.loads(message)
if msg_dict['type'] == 'subscription':
self.handle_channel_subscription(msg_dict)
elif msg_dict['type'] == 'unsubscription':
self.handle_channel_unsubscription(msg_dict)
def handle_channel_subscription(self, stream_configuration):
exchange_name = stream_configuration['exchangeName']
routing_key = stream_configuration['routingKey']
queue_name = 'websocket-client-%s' % str(uuid4())
# Look for a token.
token = (stream_configuration['token']
if 'token' in stream_configuration else None)
# If there is a token, use it in place of rabbitmq user and pwd.
if token:
rabbitmq_user = token
rabbitmq_pwd = ""
# Otherwise, look for a username and password.
elif ('user' in stream_configuration
and 'password' in stream_configuration):
rabbitmq_user = stream_configuration['user']
rabbitmq_pwd = stream_configuration['password']
else:
raise ValueError('Missing parameters in subscribe message '
'to websocket server. You muse either pass '
'the param "token" OR two params: '
'"username" and "password".')
if not self.routing_key_exists(exchange_name, routing_key):
self.subscribers[exchange_name][routing_key] = {
"subscriber": TornadoSubscriber(
callback=self.send_probe_factory(
exchange_name, routing_key),
exchange_name=exchange_name,
routing_key=routing_key,
rabbit_auth_url=rabbit_auth_url,
rabbitmq_address=rabbitmq_address,
rabbitmq_user=rabbitmq_user,
rabbitmq_pwd=rabbitmq_pwd,
queue_name=queue_name,
token=token
),
"total_records": 0
}
self.subscribers[exchange_name] \
[routing_key]["subscriber"].connect()
def handle_channel_unsubscription(self, unsubscription_msg):
exchange_name = unsubscription_msg['exchangeName']
routing_key = unsubscription_msg['routingKey']
logging.info("Unsubscription received for "
"exchange_name: %s, routing_key: %s"
% (exchange_name, routing_key))
if self.routing_key_exists(exchange_name, routing_key):
self.subscribers[exchange_name][routing_key] \
["subscriber"].disconnect()
def on_close(self):
logging.info("Disconnecting client...")
for exchange_name in self.subscribers:
for routing_key in self.subscribers[exchange_name]:
subscriber = self.subscribers[exchange_name] \
[routing_key]["subscriber"]
if subscriber is not None:
logging.info(
"Disconnecting subscriber for exchange_name: %s, "
"routing_key: %s" % (exchange_name, routing_key))
subscriber.disconnect()
self.subscribers = {}
self.clients.remove(self)
logging.info("Client disconnection complete!")
def send_heartbeat(self):
self.broadcast(self.clients, 'message')
def routing_key_exists(self, exchange_name, routing_key):
return (self.subscribers.has_key(exchange_name)
and self.subscribers[exchange_name].has_key(routing_key))
return RtStreamConnection
class TornadoSubscriber(object):
"""
See: https://pika.readthedocs.org/en/0.9.14/examples/tornado_consumer.html
"""
def __init__(self, callback, exchange_name, routing_key, rabbit_auth_url,
rabbitmq_address, rabbitmq_user, rabbitmq_pwd,
queue_name, token=None):
self.callback = callback
self.exchange_name = exchange_name
self.routing_key = routing_key
self.rabbit_auth_url = rabbit_auth_url
self.rabbitmq_address = rabbitmq_address
self.rabbitmq_user = rabbitmq_user
self.rabbitmq_pwd = rabbitmq_pwd
self.queue_name = queue_name
self.token = token
self.connection = None
self.channel = None
self.consumer_tag = None
def connect(self):
auth = CloudbrainAuth(self.rabbit_auth_url)
if self.token:
credentials = pika.PlainCredentials(self.token, '')
vhost = auth.get_vhost_by_token(self.token)
connection_params = pika.ConnectionParameters(
host=self.rabbitmq_address, virtual_host=vhost,
credentials=credentials)
else:
credentials = pika.PlainCredentials(self.rabbitmq_user,
self.rabbitmq_pwd)
vhost = getattr(self, 'rabbitmq_vhost',
auth.get_vhost_by_username(self.rabbitmq_user))
connection_params = pika.ConnectionParameters(
host=self.rabbitmq_address, virtual_host=vhost,
credentials=credentials)
self.connection = pika.adapters.tornado_connection.TornadoConnection(
connection_params,
self.on_connected,
stop_ioloop_on_close=False,
custom_ioloop=IOLoop.instance())
def disconnect(self):
if self.connection is not None:
self.connection.close()
def on_connected(self, connection):
self.connection = connection
self.connection.add_on_close_callback(self.on_connection_closed)
self.connection.add_backpressure_callback(self.on_backpressure_callback)
self.open_channel()
def on_connection_closed(self, connection, reply_code, reply_text):
self.connection = None
self.channel = None
def on_backpressure_callback(self, connection):
logging.info("******** Backpressure detected for exchange %s and "
"routing key %s" % (self.exchange_name, self.routing_key))
def open_channel(self):
self.connection.channel(self.on_channel_open)
def on_channel_open(self, channel):
self.channel = channel
self.channel.add_on_close_callback(self.on_channel_closed)
logging.info("Declaring exchange: %s" % self.exchange_name)
if self.exchange_name == 'amq.topic':
# Note: this is the reserved excahnge name for MQTT. Therefore,
# "type" must be "topic" and "durable" must be set to "True".
self.channel.exchange_declare(self.on_exchange_declareok,
exchange=self.exchange_name,
type='topic',
durable=True)
else:
self.channel.exchange_declare(self.on_exchange_declareok,
exchange=self.exchange_name,
type='direct',
passive=True)
def on_channel_closed(self, channel, reply_code, reply_text):
self.connection.close()
def on_exchange_declareok(self, unused_frame):
self.channel.queue_declare(self.on_queue_declareok,
self.queue_name,
exclusive=True)
def on_queue_declareok(self, unused_frame):
logging.info("Binding queue. Exchange name: %s. Routing key: %s"
% (self.exchange_name, self.routing_key))
self.channel.queue_bind(
self.on_bindok,
exchange=self.exchange_name,
queue=self.queue_name,
routing_key=self.routing_key)
def on_bindok(self, unused_frame):
self.channel.add_on_cancel_callback(self.on_consumer_cancelled)
self.consumer_tag = self.channel.basic_consume(self.on_message,
self.queue_name,
exclusive=True,
no_ack=True)
def on_consumer_cancelled(self, method_frame):
if self.channel:
self.channel.close()
def on_message(self, unused_channel, basic_deliver, properties, body):
self.callback(body)
class WebsocketServer(object):
def __init__(self, ws_server_port, rabbitmq_address, rabbit_auth_url):
self.rabbitmq_address = rabbitmq_address
self.ws_server_port = ws_server_port
self.rabbit_auth_url = rabbit_auth_url
def start(self):
RtStreamConnection = _rt_stream_connection_factory(
self.rabbitmq_address, self.rabbit_auth_url)
# 1. Create chat router
RtStreamRouter = SockJSRouter(RtStreamConnection, '/rt-stream')
# 2. Create Tornado application
app = Application(RtStreamRouter.urls)
# 3. Make Tornado app listen on Pi
app.listen(self.ws_server_port)
print("Real-time data server running at "
"http://localhost:%s" % self.ws_server_port)
# 4. Start IOLoop
IOLoop.instance().start()
def stop(self):
IOLoop.instance().stop()
| agpl-3.0 |
andrewyoung1991/scons | test/option/h.py | 5 | 2083 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import TestSCons
test = TestSCons.TestSCons()
test.run(arguments = '-h')
test.must_contain_all_lines(test.stdout(), ['-h, --help'])
test.run(arguments = '-u -h')
test.must_contain_all_lines(test.stdout(), ['-h, --help'])
test.run(arguments = '-U -h')
test.must_contain_all_lines(test.stdout(), ['-h, --help'])
test.run(arguments = '-D -h')
test.must_contain_all_lines(test.stdout(), ['-h, --help'])
test.write('SConstruct', "")
test.run(arguments = '-h')
test.must_contain_all_lines(test.stdout(), ['-h, --help'])
test.run(arguments = '-u -h')
test.must_contain_all_lines(test.stdout(), ['-h, --help'])
test.run(arguments = '-U -h')
test.must_contain_all_lines(test.stdout(), ['-h, --help'])
test.run(arguments = '-D -h')
test.must_contain_all_lines(test.stdout(), ['-h, --help'])
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
larsbutler/coveragepy | tests/helpers.py | 2 | 1599 | # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""Helpers for coverage.py tests."""
import subprocess
def run_command(cmd):
"""Run a command in a sub-process.
Returns the exit status code and the combined stdout and stderr.
"""
proc = subprocess.Popen(
cmd, shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
)
output, _ = proc.communicate()
status = proc.returncode
# Get the output, and canonicalize it to strings with newlines.
if not isinstance(output, str):
output = output.decode('utf-8')
output = output.replace('\r', '')
return status, output
class CheckUniqueFilenames(object):
"""Asserts the uniqueness of filenames passed to a function."""
def __init__(self, wrapped):
self.filenames = set()
self.wrapped = wrapped
@classmethod
def hook(cls, cov, method_name):
"""Replace a method with our checking wrapper."""
method = getattr(cov, method_name)
hook = cls(method)
setattr(cov, method_name, hook.wrapper)
return hook
def wrapper(self, filename, *args, **kwargs):
"""The replacement method. Check that we don't have dupes."""
assert filename not in self.filenames, (
"Filename %r passed to %r twice" % (filename, self.wrapped)
)
self.filenames.add(filename)
ret = self.wrapped(filename, *args, **kwargs)
return ret
| apache-2.0 |
wskplho/sl4a | python/python-twitter/simplejson/decoder.py | 135 | 12032 | """Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
raise ValueError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
raise ValueError(
errmsg("Invalid \\escape: %r" % (esc,), s, end))
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError(errmsg(msg, s, end))
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = {}
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True):
"""``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
"""
self.encoding = encoding
self.object_hook = object_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
| apache-2.0 |
BhallaLab/moose-examples | traub_2005/py/fig_a3.py | 1 | 3700 | # fig_a3.py ---
#
# Filename: fig_a3.py
# Description:
# Author:
# Maintainer:
# Created: Wed Jun 26 17:07:59 2013 (+0530)
# Version:
# Last-Updated: Sun Jun 25 15:09:46 2017 (-0400)
# By: subha
# Update #: 162
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
"""This script is intended for reproducing figure A3 of the Traub et
al 2005 paper. This is a test for spiny stellate cell."""
import numpy as np
import pylab
import moose
from moose import utils
from cells import SpinyStellate
import config
simtime = 500e-3
simdt = 2e-5
plotdt=1e-4
def setup_model(root='/', hsolve=True):
moose.setCwe(root)
model = moose.Neutral('model')
data = moose.Neutral('data')
cell = SpinyStellate('%s/spinystellate' % (model.path))
p = '%s/comp_1'%cell.path
soma = moose.element(p) if moose.exists(p) else moose.Compartment(p)
if hsolve:
solver = moose.HSolve('%s/solve' % (cell.path))
solver.dt = simdt
solver.target = model.path
pulse = moose.PulseGen('%s/stimulus' % (model.path))
moose.connect(pulse, 'output', soma, 'injectMsg')
tab_vm = moose.Table('%s/spinystellate_soma_Vm' % (data.path))
moose.connect(tab_vm, 'requestOut', soma, 'getVm')
tab_stim = moose.Table('%s/spinystellate_soma_inject' % (data.path))
moose.connect(tab_stim, 'requestOut', pulse, 'getOutputValue')
utils.setDefaultDt(elecdt=simdt, plotdt2=plotdt)
utils.assignDefaultTicks(model, data)
return {'stimulus': pulse,
'tab_vm': tab_vm,
'tab_stim': tab_stim}
def do_sim(pulsegen, amp):
pulsegen.level[0] = amp
pulsegen.delay[0] = 50e-3
pulsegen.width[0] = 400e-3
moose.reinit()
utils.stepRun(simtime, 10000*simdt, logger=config.logger)
def main():
amps = [0.167e-9, 0.25e-9, 0.333e-9]
model_dict = setup_model()
for ii, a in enumerate(amps):
do_sim(model_dict['stimulus'], a)
config.logger.info('##### %d' % (model_dict['tab_vm'].size))
vm = model_dict['tab_vm'].vector
inject = model_dict['tab_stim'].vector.copy()
t = np.linspace(0, simtime, len(vm))
fname = 'data_fig_a3_%s.txt' % (chr(ord('A')+ii))
np.savetxt(fname,
np.vstack((t, inject, vm)).transpose())
msg = 'Saved data for %g A current pulse in %s' % (a, fname)
config.logger.info(msg)
print(msg)
pylab.subplot(3,1,ii+1)
pylab.title('%g nA' % (a*1e9))
pylab.plot(t, vm, label='soma-Vm (mV)')
stim_boundary = np.flatnonzero(np.diff(inject))
pylab.plot((t[stim_boundary[0]]), (vm.min()), 'r^', label='stimulus start')
pylab.plot((t[stim_boundary[-1]]), (vm.min()), 'gv', label='stimulus end')
pylab.legend()
pylab.savefig('fig_a3.png')
pylab.show()
if __name__ == '__main__':
main()
#
# fig_a3.py ends here
| gpl-2.0 |
alsrgv/tensorflow | tensorflow/contrib/periodic_resample/__init__.py | 56 | 1176 | # =============================================================================
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Custom op used by periodic_resample."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.periodic_resample.python.ops.periodic_resample_op import periodic_resample
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = ["periodic_resample"]
remove_undocumented(__name__, _allowed_symbols)
| apache-2.0 |
vanda/DigitalLabels | labels/management/commands/build.py | 1 | 2432 | import codecs
import os
from distutils.dir_util import copy_tree
from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.test.client import Client
from django.template.defaultfilters import slugify
from labels.models import DigitalLabel, Portal
class Command(BaseCommand):
args = "<digitallabel_id digitallabel_id>"
help = "Creates a static bundle of HTML, media and images for the labels"
option_list = BaseCommand.option_list + (
make_option('-o',
'--out',
default=".",
help="Where to put them?"
),
)
def handle(self, *args, **options):
destination = options['out']
for dl in DigitalLabel.objects.all():
self.save_html(dl, destination)
for pt in Portal.objects.all():
self.save_html(pt, destination)
# handle static media: JS, IMG, CSS, etc.
# SOURCE DIRS
media_abspath = os.path.abspath(settings.MEDIA_ROOT)
static_abspath = os.path.abspath(settings.STATIC_ROOT)
# DESTINATION DIRS
static_build_dir = os.path.join(destination,
os.path.basename(static_abspath))
media_build_dir = os.path.join(destination,
os.path.basename(media_abspath))
# COPY FILES
copy_tree(settings.STATIC_ROOT, static_build_dir)
copy_tree(os.path.join(settings.MEDIA_CACHE_ROOT), media_build_dir)
def save_html(self, screen, destination):
cl = Client()
page_html = cl.get('/%s/%d/' % (screen.model_name, screen.pk)).content
# make img, css and js links relative
page_html = page_html.replace('data-img-l="/media/cache/', 'data-img-l="./media/'
).replace('src="/', 'src="./'
).replace('src="./media/cache/', 'src="./media/'
).replace('href="/', 'href="./')
dest_abspath = os.path.abspath(destination)
if not os.path.exists(dest_abspath):
print 'Making %s' % (dest_abspath)
os.mkdir(dest_abspath)
filename = os.path.join(destination,
'%s.html' % (slugify(screen.name)))
f = codecs.open(filename, 'w', 'UTF-8')
unicode_html = unicode(page_html, 'UTF-8')
f.write(unicode_html)
| bsd-3-clause |
kpingul/shoppingWidget | node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/generator/ninja.py | 372 | 89149 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import hashlib
import json
import multiprocessing
import os.path
import re
import signal
import subprocess
import sys
import gyp
import gyp.common
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
from cStringIO import StringIO
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
# Gyp expects the following variables to be expandable by the build
# system to the appropriate locations. Ninja prefers paths to be
# known at gyp time. To resolve this, introduce special
# variables starting with $! and $| (which begin with a $ so gyp knows it
# should be treated specially, but is otherwise an invalid
# ninja/shell variable) that are passed to gyp here but expanded
# before writing out into the target .ninja files; see
# ExpandSpecial.
# $! is used for variables that represent a path and that can only appear at
# the start of a string, while $| is used for variables that can appear
# anywhere in a string.
'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
'PRODUCT_DIR': '$!PRODUCT_DIR',
'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
# Special variables that may be used by gyp 'rule' targets.
# We generate definitions for these variables on the fly when processing a
# rule.
'RULE_INPUT_ROOT': '${root}',
'RULE_INPUT_DIRNAME': '${dirname}',
'RULE_INPUT_PATH': '${source}',
'RULE_INPUT_EXT': '${ext}',
'RULE_INPUT_NAME': '${name}',
}
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
# TODO: figure out how to not build extra host objects in the non-cross-compile
# case when this is enabled, and enable unconditionally.
generator_supports_multiple_toolsets = (
os.environ.get('GYP_CROSSCOMPILE') or
os.environ.get('AR_host') or
os.environ.get('CC_host') or
os.environ.get('CXX_host') or
os.environ.get('AR_target') or
os.environ.get('CC_target') or
os.environ.get('CXX_target'))
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
return arg[len(prefix):]
return arg
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# whitelist common OK ones and quote anything else.
if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
return arg # No quoting necessary.
if flavor == 'win':
return gyp.msvs_emulation.QuoteForRspFile(arg)
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
shell-escaped."""
if flavor == 'win':
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
d = d.replace('#', '\\%03o' % ord('#'))
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
def AddArch(output, arch):
"""Adds an arch string to an output path."""
output, extension = os.path.splitext(output)
return '%s.%s%s' % (output, arch, extension)
class Target:
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
self.type = type
# File representing whether any input dependencies necessary for
# dependent actions have completed.
self.preaction_stamp = None
# File representing whether any input dependencies necessary for
# dependent compiles have completed.
self.precompile_stamp = None
# File representing the completion of actions/rules/copies, if any.
self.actions_stamp = None
# Path to the output of the link step, if any.
self.binary = None
# Path to the file representing the completion of building the bundle,
# if any.
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
# here.
self.component_objs = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
def Linkable(self):
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
if flavor == 'win' or self.bundle:
return False
return self.type in ('shared_library', 'loadable_module')
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + '.TOC'
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
return self.bundle or self.binary or self.actions_stamp
# A small discourse on paths as used within the Ninja build:
# All files we produce (both at gyp and at build time) appear in the
# build directory (e.g. out/Debug).
#
# Paths within a given .gyp file are always relative to the directory
# containing the .gyp file. Call these "gyp paths". This includes
# sources as well as the starting directory a given gyp rule/action
# expects to be run from. We call the path from the source root to
# the gyp file the "base directory" within the per-.gyp-file
# NinjaWriter code.
#
# All paths as written into the .ninja files are relative to the build
# directory. Call these paths "ninja paths".
#
# We translate between these two notions of paths with two helper
# functions:
#
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
# into the equivalent ninja path.
#
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
# an output file; the result can be namespaced such that it is unique
# to the input file name as well as the output target name.
class NinjaWriter:
def __init__(self, qualified_target, target_outputs, base_dir, build_dir,
output_file, toplevel_build, output_file_name, flavor,
toplevel_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
self.qualified_target = qualified_target
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
self.toplevel_build = toplevel_build
self.output_file_name = output_file_name
self.flavor = flavor
self.abs_build_dir = None
if toplevel_dir is not None:
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
build_dir))
self.obj_ext = '.obj' if flavor == 'win' else '.o'
if flavor == 'win':
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
self.win_env = {}
for arch in ('x86', 'x64'):
self.win_env[arch] = 'environment.' + arch
# Relative path from build output dir to base dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
self.build_to_base = os.path.join(build_to_top, base_dir)
# Relative path from base dir to build dir.
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
self.base_to_build = os.path.join(base_to_top, build_dir)
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = '$!PRODUCT_DIR'
if PRODUCT_DIR in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
path = path.replace(PRODUCT_DIR + '/', '')
path = path.replace(PRODUCT_DIR + '\\', '')
path = path.replace(PRODUCT_DIR, '.')
INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
if INTERMEDIATE_DIR in path:
int_dir = self.GypPathToUniqueOutput('gen')
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
path = path.replace(INTERMEDIATE_DIR,
os.path.join(product_dir or '', int_dir))
CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
path = path.replace(CONFIGURATION_NAME, self.config_name)
return path
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
if self.flavor == 'win':
path = self.msvs_settings.ConvertVSMacros(
path, config=self.config_name)
path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
dirname)
path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
return path
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
See the above discourse on path conversions."""
if env:
if self.flavor == 'mac':
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
elif self.flavor == 'win':
path = gyp.msvs_emulation.ExpandMacros(path, env)
if path.startswith('$!'):
expanded = self.ExpandSpecial(path)
if self.flavor == 'win':
expanded = os.path.normpath(expanded)
return expanded
if '$|' in path:
path = self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith('$'), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
def WriteCollapsedDependencies(self, name, targets):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
assert targets == filter(None, targets), targets
if len(targets) == 0:
return None
if len(targets) > 1:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
targets = self.ninja.build(stamp, 'stamp', targets)
self.ninja.newline()
return targets[0]
def _SubninjaNameForArch(self, arch):
output_file_base = os.path.splitext(self.output_file_name)[0]
return '%s.%s.ninja' % (output_file_base, arch)
def WriteSpec(self, spec, config_name, generator_flags):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec['target_name']
self.toolset = spec['toolset']
config = spec['configurations'][config_name]
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
# Track if this target contains any C++ files, to decide if gcc or g++
# should be used for linking.
self.uses_cpp = False
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[arch])
self.ninja.variable('cc', '$cl_' + arch)
self.ninja.variable('cxx', '$cl_' + arch)
if self.flavor == 'mac':
self.archs = self.xcode_settings.GetActiveArchs(config_name)
if len(self.archs) > 1:
self.arch_subninjas = dict(
(arch, ninja_syntax.Writer(
OpenOutput(os.path.join(self.toplevel_build,
self._SubninjaNameForArch(arch)),
'w')))
for arch in self.archs)
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
# any of its action/rule/copy steps.
# compile_depends is the dependencies this target depends on before running
# any of its compile steps.
actions_depends = []
compile_depends = []
# TODO(evan): it is rather confusing which things are lists and which
# are strings. Fix these.
if 'dependencies' in spec:
for dep in spec['dependencies']:
if dep in self.target_outputs:
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
actions_depends)
compile_depends = self.WriteCollapsedDependencies('compile_depends',
compile_depends)
self.target.preaction_stamp = actions_depends
self.target.precompile_stamp = compile_depends
# Write out actions, rules, and copies. These must happen before we
# compile any sources, so compute a list of predependencies for sources
# while we do it.
extra_sources = []
mac_bundle_depends = []
self.target.actions_stamp = self.WriteActionsRulesCopies(
spec, extra_sources, actions_depends, mac_bundle_depends)
# If we have actions/rules/copies, we depend directly on those, but
# otherwise we depend on dependent target's actions/rules/copies etc.
# We never need to explicitly depend on previous target's link steps,
# because no compile ever depends on them.
compile_depends_stamp = (self.target.actions_stamp or compile_depends)
# Write out the compilation steps, if any.
link_deps = []
sources = extra_sources + spec.get('sources', [])
if sources:
if self.flavor == 'mac' and len(self.archs) > 1:
# Write subninja file containing compile and link commands scoped to
# a single arch if a fat binary is being built.
for arch in self.archs:
self.ninja.subninja(self._SubninjaNameForArch(arch))
pch = None
if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
pch = gyp.msvs_emulation.PrecompiledHeader(
self.msvs_settings, config_name, self.GypPathToNinja,
self.GypPathToUniqueOutput, self.obj_ext)
else:
pch = gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources(
self.ninja, config_name, config, sources, compile_depends_stamp, pch,
spec)
# Some actions/rules output 'sources' that are already object files.
obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
if obj_outputs:
if self.flavor != 'mac' or len(self.archs) == 1:
link_deps += [self.GypPathToNinja(o) for o in obj_outputs]
else:
print "Warning: Actions/rules writing object files don't work with " \
"multiarch targets, dropping. (target %s)" % spec['target_name']
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
# Write out a link step, if needed.
output = None
is_empty_bundle = not link_deps and not mac_bundle_depends
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
self.target.actions_stamp or actions_depends)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
if not output:
return None
assert self.target.FinalOutput(), output
return self.target
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name)
outdir = self.GypPathToNinja(outdir)
def fix_path(path, rel=None):
path = os.path.join(outdir, path)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
path = self.ExpandRuleVariables(
path, root, dirname, source, ext, basename)
if rel:
path = os.path.relpath(path, rel)
return path
vars = [(name, fix_path(value, outdir)) for name, value in vars]
output = [fix_path(p) for p in output]
vars.append(('outdir', outdir))
vars.append(('idlflags', flags))
input = self.GypPathToNinja(source)
self.ninja.build(output, 'idl', input,
variables=vars, order_only=prebuild)
outputs.extend(output)
def WriteWinIdlFiles(self, spec, prebuild):
"""Writes rules to match MSVS's implicit idl handling."""
assert self.flavor == 'win'
if self.msvs_settings.HasExplicitIdlRules(spec):
return []
outputs = []
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
self._WinIdlRule(source, prebuild, outputs)
return outputs
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
mac_bundle_depends):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
if self.is_mac_bundle:
mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
else:
mac_bundle_resources = []
extra_mac_bundle_resources = []
if 'actions' in spec:
outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
mac_bundle_resources,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
self.WriteMacBundleResources(
extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
self.WriteMacInfoPlist(mac_bundle_depends)
return stamp
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != 'target':
verb += '(%s)' % self.toolset
if message:
return '%s %s' % (verb, self.ExpandSpecial(message))
else:
return '%s %s: %s' % (verb, self.name, fallback)
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
env = self.GetSortedXcodeEnv()
if self.flavor == 'win':
env = self.msvs_settings.GetVSMacroEnv(
'$!PRODUCT_DIR', config=self.config_name)
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = '%s_%s' % (action['action_name'],
hashlib.md5(self.qualified_target).hexdigest())
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False)
args = action['action']
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
is_cygwin, env=env)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
extra_sources += action['outputs']
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += action['outputs']
outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
# Then write out an edge using the rule.
self.ninja.build(outputs, rule_name, inputs,
order_only=prebuild)
all_outputs += outputs
self.ninja.newline()
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
mac_bundle_resources, extra_mac_bundle_resources):
env = self.GetSortedXcodeEnv()
all_outputs = []
for rule in rules:
# First write out a rule for the rule action.
name = '%s_%s' % (rule['rule_name'],
hashlib.md5(self.qualified_target).hexdigest())
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
args = rule['action']
description = self.GenerateDescription(
'RULE',
rule.get('message', None),
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False)
rule_name, args = self.WriteNewNinjaRule(
name, args, description, is_cygwin, env=env)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
# Rules can potentially make use of some special variables which
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ('source', 'root', 'dirname', 'ext', 'name')
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
if ('${%s}' % var) in argument:
needed_variables.add(var)
def cygwin_munge(path):
if is_cygwin:
return path.replace('\\', '/')
return path
# For each source file, write an edge that generates all the outputs.
for source in rule.get('rule_sources', []):
source = os.path.normpath(source)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
# Gather the list of inputs and outputs, expanding $vars if possible.
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
inputs = [self.ExpandRuleVariables(i, root, dirname,
source, ext, basename)
for i in rule.get('inputs', [])]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
was_mac_bundle_resource = source in mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Note: This is n_resources * n_outputs_in_rule. Put to-be-removed
# items in a set and remove them all in a single pass if this becomes
# a performance issue.
if was_mac_bundle_resource:
mac_bundle_resources.remove(source)
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', cygwin_munge(root)))
elif var == 'dirname':
# '$dirname' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
dirname_expanded = self.ExpandSpecial(dirname, self.base_to_build)
extra_bindings.append(('dirname', cygwin_munge(dirname_expanded)))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
source_expanded = self.ExpandSpecial(source, self.base_to_build)
extra_bindings.append(('source', cygwin_munge(source_expanded)))
elif var == 'ext':
extra_bindings.append(('ext', ext))
elif var == 'name':
extra_bindings.append(('name', cygwin_munge(basename)))
else:
assert var == None, repr(var)
inputs = [self.GypPathToNinja(i, env) for i in inputs]
outputs = [self.GypPathToNinja(o, env) for o in outputs]
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
variables=extra_bindings)
all_outputs.extend(outputs)
return all_outputs
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
env = self.GetSortedXcodeEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
path = os.path.normpath(path)
basename = os.path.split(path)[1]
src = self.GypPathToNinja(path, env)
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
if self.is_mac_bundle:
# gyp has mac_bundle_resources to copy things into a bundle's
# Resources folder, but there's no built-in way to copy files to other
# places in the bundle. Hence, some targets use copies for this. Check
# if this file is copied into the current bundle, and if so add it to
# the bundle depends so that dependent targets get rebuilt if the copy
# input changes.
if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
mac_bundle_depends.append(dst)
return outputs
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource')])
bundle_depends.append(output)
def WriteMacInfoPlist(self, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
out = self.ExpandSpecial(out)
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join([Define(d, self.flavor) for d in defines])
info_plist = self.ninja.build(
intermediate_plist, 'preprocess_infoplist', info_plist,
variables=[('defines',defines)])
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
keys = QuoteShellArgument(json.dumps(keys), self.flavor)
self.ninja.build(out, 'copy_infoplist', info_plist,
variables=[('env', env), ('keys', keys)])
bundle_depends.append(out)
def WriteSources(self, ninja_file, config_name, config, sources, predepends,
precompiled_header, spec):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host')
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
self.ninja.variable('ldxx', '$ldxx_host')
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteSourcesForArch(
self.ninja, config_name, config, sources, predepends,
precompiled_header, spec)
else:
return dict((arch, self.WriteSourcesForArch(
self.arch_subninjas[arch], config_name, config, sources, predepends,
precompiled_header, spec, arch=arch))
for arch in self.archs)
def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
predepends, precompiled_header, spec, arch=None):
"""Write build rules to compile all of |sources|."""
extra_defines = []
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(config_name, arch=arch)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
self.xcode_settings.GetCflagsObjC(config_name)
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
# See comment at cc_command for why there's two .pdb files.
pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
config_name, self.ExpandSpecial)
if not pdbpath_c:
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
pdbpath_c = pdbpath + '.c.pdb'
pdbpath_cc = pdbpath + '.cc.pdb'
self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
# Respect environment variables related to build, but target-specific
# flags can still override them.
if self.toolset == 'target':
cflags_c = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CFLAGS', '').split() + cflags_c)
cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
os.environ.get('CXXFLAGS', '').split() + cflags_cc)
defines = config.get('defines', []) + extra_defines
self.WriteVariableList(ninja_file, 'defines',
[Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
self.WriteVariableList(ninja_file, 'rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
env = self.GetSortedXcodeEnv()
if self.flavor == 'win':
env = self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
config=config_name)
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList(ninja_file, 'includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in include_dirs])
pch_commands = precompiled_header.GetPchBuildCommands(arch)
if self.flavor == 'mac':
# Most targets use no precompiled headers, so only write these if needed.
for ext, var in [('c', 'cflags_pch_c'), ('cc', 'cflags_pch_cc'),
('m', 'cflags_pch_objc'), ('mm', 'cflags_pch_objcc')]:
include = precompiled_header.GetInclude(ext, arch)
if include: ninja_file.variable(var, include)
self.WriteVariableList(ninja_file, 'cflags',
map(self.ExpandSpecial, cflags))
self.WriteVariableList(ninja_file, 'cflags_c',
map(self.ExpandSpecial, cflags_c))
self.WriteVariableList(ninja_file, 'cflags_cc',
map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
self.WriteVariableList(ninja_file, 'cflags_objc',
map(self.ExpandSpecial, cflags_objc))
self.WriteVariableList(ninja_file, 'cflags_objcc',
map(self.ExpandSpecial, cflags_objcc))
ninja_file.newline()
outputs = []
has_rc_source = False
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
self.uses_cpp = True
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and
self.msvs_settings.GetArch(config_name) == 'x86' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
# Asm files only get auto assembled for x86 (not x64).
command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and
# .asm files of the same name without collision.
obj_ext = '_asm.obj'
elif self.flavor == 'mac' and ext == 'm':
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
self.uses_cpp = True
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
has_rc_source = True
else:
# Ignore unhandled extensions.
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext)
if arch is not None:
output = AddArch(output, arch)
implicit = precompiled_header.GetObjDependencies([input], [output], arch)
variables = []
if self.flavor == 'win':
variables, output, implicit = precompiled_header.GetFlagsModifications(
input, output, implicit, command, cflags_c, cflags_cc,
self.ExpandSpecial)
ninja_file.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends, variables=variables)
outputs.append(output)
if has_rc_source:
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
self.WriteVariableList(ninja_file, 'resource_includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
for i in resource_include_dirs])
self.WritePchTargets(ninja_file, pch_commands)
ninja_file.newline()
return outputs
def WritePchTargets(self, ninja_file, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
var_name = {
'c': 'cflags_pch_c',
'cc': 'cflags_pch_cc',
'm': 'cflags_pch_objc',
'mm': 'cflags_pch_objcc',
}[lang]
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
cmd = map.get(lang)
ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
if self.flavor != 'mac' or len(self.archs) == 1:
return self.WriteLinkForArch(
self.ninja, spec, config_name, config, link_deps)
else:
output = self.ComputeOutput(spec)
inputs = [self.WriteLinkForArch(self.arch_subninjas[arch], spec,
config_name, config, link_deps[arch],
arch=arch)
for arch in self.archs]
extra_bindings = []
if not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
self.ninja.build(output, 'lipo', inputs, variables=extra_bindings)
return output
def WriteLinkForArch(self, ninja_file, spec, config_name, config,
link_deps, arch=None):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
command_suffix = ''
implicit_deps = set()
solibs = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
# - Non-linkable dependencies (like a rule that generates a file
# and writes a stamp file): add them to implicit_deps
extra_link_deps = set()
for dep in spec['dependencies']:
target = self.target_outputs.get(dep)
if not target:
continue
linkable = target.Linkable()
if linkable:
new_deps = []
if (self.flavor == 'win' and
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
new_deps = target.component_objs
elif self.flavor == 'win' and target.import_lib:
new_deps = [target.import_lib]
elif target.UsesToc(self.flavor):
solibs.add(target.binary)
implicit_deps.add(target.binary + '.TOC')
else:
new_deps = [target.binary]
for new_dep in new_deps:
if new_dep not in extra_link_deps:
extra_link_deps.add(new_dep)
link_deps.append(new_dep)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
extra_bindings = []
if self.uses_cpp and self.flavor != 'win':
extra_bindings.append(('ld', '$ldxx'))
output = self.ComputeOutput(spec, arch)
if arch is None and not self.is_mac_bundle:
self.AppendPostbuildVariable(extra_bindings, spec, output, output)
is_executable = spec['type'] == 'executable'
# The ldflags config key is not used on mac or win. On those platforms
# linker flags are set via xcode_settings and msvs_settings, respectively.
env_ldflags = os.environ.get('LDFLAGS', '').split()
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.GypPathToNinja, arch)
ldflags = env_ldflags + ldflags
elif self.flavor == 'win':
manifest_base_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
ldflags, intermediate_manifest, manifest_files = \
self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
self.ExpandSpecial, manifest_base_name,
output, is_executable,
self.toplevel_build)
ldflags = env_ldflags + ldflags
self.WriteVariableList(ninja_file, 'manifests', manifest_files)
implicit_deps = implicit_deps.union(manifest_files)
if intermediate_manifest:
self.WriteVariableList(
ninja_file, 'intermediatemanifest', [intermediate_manifest])
command_suffix = _GetWinLinkRuleNameSuffix(
self.msvs_settings.IsEmbedManifest(config_name))
def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
if def_file:
implicit_deps.add(def_file)
else:
# Respect environment variables related to build, but target-specific
# flags can still override them.
ldflags = env_ldflags + config.get('ldflags', [])
if is_executable and len(solibs):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
ldflags.append('-Wl,-rpath=\$$ORIGIN/%s' % rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
gyp.common.uniquer(map(self.ExpandSpecial, ldflags)))
library_dirs = config.get('library_dirs', [])
if self.flavor == 'win':
library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
for l in library_dirs]
library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
else:
library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
self.flavor)
for l in library_dirs]
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
elif self.flavor == 'win':
libraries = self.msvs_settings.AdjustLibraries(libraries)
self.WriteVariableList(ninja_file, 'libs', library_dirs + libraries)
linked_binary = output
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
if self.flavor == 'win':
extra_bindings.append(('binary', output))
if '/NOENTRY' not in ldflags:
self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
output = [output, self.target.import_lib]
if pdbname:
output.append(pdbname)
elif not self.is_mac_bundle:
output = [output, output + '.TOC']
else:
command = command + '_notoc'
elif self.flavor == 'win':
extra_bindings.append(('binary', output))
pdbname = self.msvs_settings.GetPDBName(
config_name, self.ExpandSpecial, output + '.pdb')
if pdbname:
output = [output, pdbname]
if len(solibs):
extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
ninja_file.build(output, command + command_suffix, link_deps,
implicit=list(implicit_deps),
variables=extra_bindings)
return linked_binary
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
extra_link_deps = any(self.target_outputs.get(dep).Linkable()
for dep in spec.get('dependencies', [])
if dep in self.target_outputs)
if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
self.target.type = 'none'
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
if (self.flavor not in ('mac', 'openbsd', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps)
else:
variables = []
if self.xcode_settings:
libtool_flags = self.xcode_settings.GetLibtoolflags(config_name)
if libtool_flags:
variables.append(('libtool_flags', libtool_flags))
if self.msvs_settings:
libflags = self.msvs_settings.GetLibFlags(config_name,
self.GypPathToNinja)
variables.append(('libflags', libflags))
if self.flavor != 'mac' or len(self.archs) == 1:
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps, variables=variables)
else:
inputs = []
for arch in self.archs:
output = self.ComputeOutput(spec, arch)
self.arch_subninjas[arch].build(output, 'alink', link_deps[arch],
order_only=compile_deps,
variables=variables)
inputs.append(output)
# TODO: It's not clear if libtool_flags should be passed to the alink
# call that combines single-arch .a files into a fat .a file.
self.AppendPostbuildVariable(variables, spec,
self.target.binary, self.target.binary)
self.ninja.build(self.target.binary, 'alink', inputs,
# FIXME: test proving order_only=compile_deps isn't
# needed.
variables=variables)
else:
self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
if is_empty:
output += '.stamp'
variables = []
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
self.target.bundle = output
return output
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
abs_build_dir = self.abs_build_dir
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, abs_build_dir,
os.path.join(abs_build_dir, self.build_to_base), self.config_name,
additional_settings)
def GetSortedXcodePostbuildEnv(self):
"""Returns the variables Xcode would set for postbuild steps."""
postbuild_settings = {}
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
def AppendPostbuildVariable(self, variables, spec, output, binary,
is_command_start=False):
"""Adds a 'postbuild' variable if there is a postbuild for |output|."""
postbuild = self.GetPostbuildCommand(spec, output, binary, is_command_start)
if postbuild:
variables.append(('postbuilds', postbuild))
def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output, self.flavor)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
if output_binary is not None:
postbuilds = self.xcode_settings.AddImplicitPostbuilds(
self.config_name,
os.path.normpath(os.path.join(self.base_to_build, output)),
QuoteShellArgument(
os.path.normpath(os.path.join(self.base_to_build, output_binary)),
self.flavor),
postbuilds, quiet=True)
if not postbuilds:
return ''
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
['cd', self.build_to_base]))
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
# G will be non-null if any postbuild fails. Run all postbuilds in a
# subshell.
commands = env + ' (' + \
' && '.join([ninja_syntax.escape(command) for command in postbuilds])
command_string = (commands + '); G=$$?; '
# Remove the final output if any postbuild failed.
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
if is_command_start:
return '(' + command_string + ' && '
else:
return '$ && (' + command_string
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append('export %s=%s;' %
(k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
return ' '.join(export_str)
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return self.ExpandSpecial(
os.path.join(path, self.xcode_settings.GetWrapperName()))
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
type = spec['type']
default_variables = copy.copy(generator_default_variables)
CalculateVariables(default_variables, {'flavor': self.flavor})
# Compute filename prefix: the product prefix, or a default for
# the product type.
DEFAULT_PREFIX = {
'loadable_module': default_variables['SHARED_LIB_PREFIX'],
'shared_library': default_variables['SHARED_LIB_PREFIX'],
'static_library': default_variables['STATIC_LIB_PREFIX'],
'executable': default_variables['EXECUTABLE_PREFIX'],
}
prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
# Compute filename extension: the product extension, or a default
# for the product type.
DEFAULT_EXTENSION = {
'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
'shared_library': default_variables['SHARED_LIB_SUFFIX'],
'static_library': default_variables['STATIC_LIB_SUFFIX'],
'executable': default_variables['EXECUTABLE_SUFFIX'],
}
extension = spec.get('product_extension')
if extension:
extension = '.' + extension
else:
extension = DEFAULT_EXTENSION.get(type, '')
if 'product_name' in spec:
# If we were given an explicit name, use that.
target = spec['product_name']
else:
# Otherwise, derive a name from the target name.
target = spec['target_name']
if prefix == 'lib':
# Snip out an extra 'lib' from libs if appropriate.
target = StripPrefix(target, 'lib')
if type in ('static_library', 'loadable_module', 'shared_library',
'executable'):
return '%s%s%s' % (prefix, target, extension)
elif type == 'none':
return '%s.stamp' % target
else:
raise Exception('Unhandled output type %s' % type)
def ComputeOutput(self, spec, arch=None):
"""Compute the path for the final output of the spec."""
type = spec['type']
if self.flavor == 'win':
override = self.msvs_settings.GetOutputName(self.config_name,
self.ExpandSpecial)
if override:
return override
if arch is None and self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
if arch is None and 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
# Some products go into the output root, libraries go into shared library
# dir, and everything else goes into the normal place.
type_in_output_root = ['executable', 'loadable_module']
if self.flavor == 'mac' and self.toolset == 'target':
type_in_output_root += ['shared_library', 'static_library']
elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library']
if arch is not None:
# Make sure partial executables don't end up in a bundle or the regular
# output directory.
archdir = 'arch'
if self.toolset != 'target':
archdir = os.path.join('arch', '%s' % self.toolset)
return os.path.join(archdir, AddArch(filename, arch))
elif type in type_in_output_root or self.is_standalone_static_library:
return filename
elif type == 'shared_library':
libdir = 'lib'
if self.toolset != 'target':
libdir = os.path.join('lib', '%s' % self.toolset)
return os.path.join(libdir, filename)
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
def WriteVariableList(self, ninja_file, var, values):
assert not isinstance(values, str)
if values is None:
values = []
ninja_file.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == 'win':
args = [self.msvs_settings.ConvertVSMacros(
arg, self.base_to_build, config=self.config_name)
for arg in args]
description = self.msvs_settings.ConvertVSMacros(
description, config=self.config_name)
elif self.flavor == 'mac':
# |env| is an empty list on non-mac.
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
# TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really
# should be scoped to the subninja.
rule_name = self.name
if self.toolset == 'target':
rule_name += '.' + self.toolset
rule_name += '.' + name
rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
# Remove variable references, but not if they refer to the magic rule
# variables. This is not quite right, as it also protects these for
# actions, not just for rules where they are valid. Good enough.
protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
description = re.sub(protect + r'\$', '_', description)
# gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in
# the arguments to point to the proper locations.
rspfile = None
rspfile_content = None
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
if self.flavor == 'win':
rspfile = rule_name + '.$unique_name.rsp'
# The cygwin case handles this inside the bash sub-shell.
run_in = '' if is_cygwin else ' ' + self.build_to_base
if is_cygwin:
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
args, self.build_to_base)
else:
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
rspfile + run_in)
else:
env = self.ComputeExportEnvString(env)
command = gyp.common.EncodePOSIXShellList(args)
command = 'cd %s; ' % self.build_to_base + env + command
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
self.ninja.rule(rule_name, command, description, restat=True,
rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline()
return rule_name, args
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
global generator_additional_non_configuration_keys
global generator_additional_path_sections
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
import gyp.generator.xcode as xcode_generator
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
default_variables.setdefault('OS', 'win')
default_variables['EXECUTABLE_SUFFIX'] = '.exe'
default_variables['STATIC_LIB_PREFIX'] = ''
default_variables['STATIC_LIB_SUFFIX'] = '.lib'
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.dll'
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'lib'))
default_variables.setdefault('LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'obj'))
def ComputeOutputDir(params):
"""Returns the path from the toplevel_dir to the build output directory."""
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
# Relative path from source root to our output files. e.g. "out"
return os.path.normpath(os.path.join(generator_dir, output_dir))
def CalculateGeneratorInputInfo(params):
"""Called by __init__ to initialize generator values based on params."""
# E.g. "out/gypfiles"
toplevel = params['options'].toplevel_dir
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, ComputeOutputDir(params), 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
gyp.common.EnsureDirExists(path)
return open(path, mode)
def CommandWithWrapper(cmd, wrappers, prog):
wrapper = wrappers.get(cmd, '')
if wrapper:
return wrapper + ' ' + prog
return prog
def GetDefaultConcurrentLinks():
"""Returns a best-guess for a number of concurrent links."""
if sys.platform in ('win32', 'cygwin'):
import ctypes
class MEMORYSTATUSEX(ctypes.Structure):
_fields_ = [
("dwLength", ctypes.c_ulong),
("dwMemoryLoad", ctypes.c_ulong),
("ullTotalPhys", ctypes.c_ulonglong),
("ullAvailPhys", ctypes.c_ulonglong),
("ullTotalPageFile", ctypes.c_ulonglong),
("ullAvailPageFile", ctypes.c_ulonglong),
("ullTotalVirtual", ctypes.c_ulonglong),
("ullAvailVirtual", ctypes.c_ulonglong),
("sullAvailExtendedVirtual", ctypes.c_ulonglong),
]
stat = MEMORYSTATUSEX()
stat.dwLength = ctypes.sizeof(stat)
ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30))) # total / 4GB
hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
# return min(mem_limit, hard_cap)
# TODO(scottmg): Temporary speculative fix for OOM on builders
# See http://crbug.com/333000.
return 2
elif sys.platform.startswith('linux'):
with open("/proc/meminfo") as meminfo:
memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
for line in meminfo:
match = memtotal_re.match(line)
if not match:
continue
# Allow 8Gb per link on Linux because Gold is quite memory hungry
return max(1, int(match.group(1)) / (8 * (2 ** 20)))
return 1
elif sys.platform == 'darwin':
try:
avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
# A static library debug build of Chromium's unit_tests takes ~2.7GB, so
# 4GB per ld process allows for some more bloat.
return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB
except:
return 1
else:
# TODO(scottmg): Implement this for other platforms.
return 1
def _GetWinLinkRuleNameSuffix(embed_manifest):
"""Returns the suffix used to select an appropriate linking rule depending on
whether the manifest embedding is enabled."""
return '_embed' if embed_manifest else ''
def _AddWinLinkRules(master_ninja, embed_manifest):
"""Adds link rules for Windows platform to |master_ninja|."""
def FullLinkCommand(ldcmd, out, binary_type):
resource_name = {
'exe': '1',
'dll': '2',
}[binary_type]
return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
'%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
'$manifests' % {
'python': sys.executable,
'out': out,
'ldcmd': ldcmd,
'resname': resource_name,
'embed': embed_manifest }
rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
use_separate_mspdbsrv = (
int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo $implibflag /DLL /OUT:$binary '
'@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
master_ninja.rule('solink' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
master_ninja.rule('solink_module' + rule_name_suffix,
description=dlldesc, command=dllcmd,
rspfile='$binary.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True,
pool='link_pool')
# Note that ldflags goes at the end so that it has the option of
# overriding default settings earlier in the command line.
exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
'$ld /nologo /OUT:$binary @$binary.rsp' %
(sys.executable, use_separate_mspdbsrv))
exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
master_ninja.rule('link' + rule_name_suffix,
description='LINK%s $binary' % rule_name_suffix.upper(),
command=exe_cmd,
rspfile='$binary.rsp',
rspfile_content='$in_newline $libs $ldflags',
pool='link_pool')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(
os.path.join(ComputeOutputDir(params), config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
# Grab make settings for CC/CXX.
# The rules are
# - The priority from low to high is gcc/g++, the 'make_global_settings' in
# gyp, the environment variable.
# - If there is no 'make_global_settings' for CC.host/CXX.host or
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
# Overridden by local arch choice in the use_deps case.
# Chromium's ffmpeg c99conv.py currently looks for a 'cc =' line in
# build.ninja so needs something valid here. http://crbug.com/233985
cc = 'cl.exe'
cxx = 'cl.exe'
ld = 'link.exe'
ld_host = '$ld'
else:
cc = 'cc'
cxx = 'c++'
ld = '$cc'
ldxx = '$cxx'
ld_host = '$cc_host'
ldxx_host = '$cxx_host'
cc_host = None
cxx_host = None
cc_host_global_setting = None
cxx_host_global_setting = None
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
wrappers = {}
for key, value in make_global_settings:
if key == 'CC':
cc = os.path.join(build_to_root, value)
if key == 'CXX':
cxx = os.path.join(build_to_root, value)
if key == 'CC.host':
cc_host = os.path.join(build_to_root, value)
cc_host_global_setting = value
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
# Support wrappers from environment variables too.
for key, value in os.environ.iteritems():
if key.lower().endswith('_wrapper'):
key_prefix = key[:-len('_wrapper')]
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
if flavor == 'win':
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, OpenOutput)
for arch, path in cl_paths.iteritems():
master_ninja.variable(
'cl_' + arch, CommandWithWrapper('CC', wrappers,
QuoteShellArgument(path, flavor)))
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
if flavor == 'win':
master_ninja.variable('ld', ld)
master_ninja.variable('idl', 'midl.exe')
master_ninja.variable('ar', 'lib.exe')
master_ninja.variable('rc', 'rc.exe')
master_ninja.variable('asm', 'ml.exe')
master_ninja.variable('mt', 'mt.exe')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
if generator_supports_multiple_toolsets:
if not cc_host:
cc_host = cc
if not cxx_host:
cxx_host = cxx
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
cc_host = GetEnvironFallback(['CC_host'], cc_host)
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
# The environment variable could be used in 'make_global_settings', like
# ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
if '$(CC)' in cc_host and cc_host_global_setting:
cc_host = cc_host_global_setting.replace('$(CC)', cc)
if '$(CXX)' in cxx_host and cxx_host_global_setting:
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
master_ninja.variable('cc_host',
CommandWithWrapper('CC.host', wrappers, cc_host))
master_ninja.variable('cxx_host',
CommandWithWrapper('CXX.host', wrappers, cxx_host))
if flavor == 'win':
master_ninja.variable('ld_host', ld_host)
else:
master_ninja.variable('ld_host', CommandWithWrapper(
'LINK', wrappers, ld_host))
master_ninja.variable('ldxx_host', CommandWithWrapper(
'LINK', wrappers, ldxx_host))
master_ninja.newline()
master_ninja.pool('link_pool', depth=GetDefaultConcurrentLinks())
master_ninja.newline()
deps = 'msvc' if flavor == 'win' else 'gcc'
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'cc_s',
description='CC $out',
command=('$cc $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'))
master_ninja.rule(
'cxx',
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
depfile='$out.d',
deps=deps)
else:
# TODO(scottmg) Separate pdb names is a test to see if it works around
# http://crbug.com/142362. It seems there's a race between the creation of
# the .pdb by the precompiled header step for .cc and the compilation of
# .c files. This should be handled by mspdbsrv, but rarely errors out with
# c1xx : fatal error C1033: cannot open program database
# By making the rules target separate pdb files this might be avoided.
cc_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
cxx_command = ('ninja -t msvc -e $arch ' +
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
master_ninja.rule(
'cc',
description='CC $out',
command=cc_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c',
deps=deps)
master_ninja.rule(
'cxx',
description='CXX $out',
command=cxx_command,
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc',
deps=deps)
master_ninja.rule(
'idl',
description='IDL $in',
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
'$tlb $h $dlldata $iid $proxy $in '
'$idlflags' % sys.executable))
master_ninja.rule(
'rc',
description='RC $in',
# Note: $in must be last otherwise rc.exe complains.
command=('%s gyp-win-tool rc-wrapper '
'$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
sys.executable))
master_ninja.rule(
'asm',
description='ASM $in',
command=('%s gyp-win-tool asm-wrapper '
'$arch $asm $defines $includes /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='rm -f $out && $ar rcs $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
command='rm -f $out && $ar rcsT $out $in')
# This allows targets that only need to depend on $lib's API to declare an
# order-only dependency on $lib.TOC and avoid relinking such downstream
# dependencies when $lib changes only in non-public ways.
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then '
'%(solink)s && %(extract_toc)s > ${lib}.TOC; else '
'%(solink)s && %(extract_toc)s > ${lib}.tmp && '
'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
('{ readelf -d ${lib} | grep SONAME ; '
'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
'$libs'}),
pool='link_pool')
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group '
'$libs'}),
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
'-Wl,--start-group $in $solibs -Wl,--end-group $libs'),
pool='link_pool')
elif flavor == 'win':
master_ninja.rule(
'alink',
description='LIB $out',
command=('%s gyp-win-tool link-wrapper $arch False '
'$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
_AddWinLinkRules(master_ninja, embed_manifest=True)
_AddWinLinkRules(master_ninja, embed_manifest=False)
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds')
master_ninja.rule(
'lipo',
description='LIPO $out, POSTBUILDS',
command='rm -f $out && lipo -create $in -output $out$postbuilds')
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
'%(solink)s && %(extract_toc)s > ${lib}.TOC; '
'else '
'%(solink)s && %(extract_toc)s > ${lib}.tmp && '
'if ! cmp -s ${lib}.tmp ${lib}.TOC; then '
'mv ${lib}.tmp ${lib}.TOC ; '
'fi; '
'fi'
% { 'solink': solink_base,
'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
solink_suffix = '$in $solibs $libs$postbuilds'
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_suffix,
'type': '-shared'},
pool='link_pool')
master_ninja.rule(
'solink_notoc',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
pool='link_pool')
solink_module_suffix = '$in $solibs $libs$postbuilds'
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=mtime_preserving_solink_base % {'suffix': solink_module_suffix,
'type': '-bundle'},
pool='link_pool')
master_ninja.rule(
'solink_module_notoc',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=solink_base % {'suffix': solink_module_suffix, 'type': '-bundle'},
pool='link_pool')
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
'$in $solibs $libs$postbuilds'),
pool='link_pool')
master_ninja.rule(
'preprocess_infoplist',
description='PREPROCESS INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
'copy_infoplist',
description='COPY INFOPLIST $in',
command='$env ./gyp-mac-tool copy-info-plist $in $out $keys')
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
description='STAMP $out',
command='%s gyp-win-tool stamp $out' % sys.executable)
master_ninja.rule(
'copy',
description='COPY $in $out',
command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
else:
master_ninja.rule(
'stamp',
description='STAMP $out',
command='${postbuilds}touch $out')
master_ninja.rule(
'copy',
description='COPY $in $out',
command='rm -rf $out && cp -af $in $out')
master_ninja.newline()
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_targets.add(target)
all_outputs = set()
# target_outputs is a map from qualified target name to a Target object.
target_outputs = {}
# target_short_names is a map from target short name to a list of Target
# objects.
target_short_names = {}
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
gyp.common.ParseQualifiedTarget(qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s" %
(this_make_global_settings, make_global_settings))
spec = target_dicts[qualified_target]
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
base_path = os.path.dirname(build_file)
obj = 'obj'
if toolset != 'target':
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
ninja_output = StringIO()
writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
ninja_output,
toplevel_build, output_file,
flavor, toplevel_dir=options.toplevel_dir)
target = writer.WriteSpec(spec, config_name, generator_flags)
if ninja_output.tell() > 0:
# Only create files for ninja files that actually have contents.
with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
ninja_file.write(ninja_output.getvalue())
ninja_output.close()
master_ninja.subninja(output_file)
if target:
if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target)
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
if target_short_names:
# Write a short name to build this target. This benefits both the
# "build chrome" case as well as the gyp tests, which expect to be
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
for short_name in target_short_names:
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
if all_outputs:
master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
# Update target_dicts for iOS device builds.
target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
target_dicts)
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
| mit |
finfish/scrapy | scrapy/spidermiddlewares/offsite.py | 7 | 2563 | """
Offsite Spider Middleware
See documentation in docs/topics/spider-middleware.rst
"""
import re
import logging
import warnings
from scrapy import signals
from scrapy.http import Request
from scrapy.utils.httpobj import urlparse_cached
logger = logging.getLogger(__name__)
class OffsiteMiddleware(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
o = cls(crawler.stats)
crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
return o
def process_spider_output(self, response, result, spider):
for x in result:
if isinstance(x, Request):
if x.dont_filter or self.should_follow(x, spider):
yield x
else:
domain = urlparse_cached(x).hostname
if domain and domain not in self.domains_seen:
self.domains_seen.add(domain)
logger.debug(
"Filtered offsite request to %(domain)r: %(request)s",
{'domain': domain, 'request': x}, extra={'spider': spider})
self.stats.inc_value('offsite/domains', spider=spider)
self.stats.inc_value('offsite/filtered', spider=spider)
else:
yield x
def should_follow(self, request, spider):
regex = self.host_regex
# hostname can be None for wrong urls (like javascript links)
host = urlparse_cached(request).hostname or ''
return bool(regex.search(host))
def get_host_regex(self, spider):
"""Override this method to implement a different offsite policy"""
allowed_domains = getattr(spider, 'allowed_domains', None)
if not allowed_domains:
return re.compile('') # allow all by default
url_pattern = re.compile("^https?://.*$")
for domain in allowed_domains:
if url_pattern.match(domain):
message = ("allowed_domains accepts only domains, not URLs. "
"Ignoring URL entry %s in allowed_domains." % domain)
warnings.warn(message, URLWarning)
domains = [re.escape(d) for d in allowed_domains if d is not None]
regex = r'^(.*\.)?(%s)$' % '|'.join(domains)
return re.compile(regex)
def spider_opened(self, spider):
self.host_regex = self.get_host_regex(spider)
self.domains_seen = set()
class URLWarning(Warning):
pass
| bsd-3-clause |
Theer108/invenio | invenio/utils/remote_debugger/__init__.py | 5 | 10582 | # This file is part of Invenio.
# Copyright (C) 2011, 2013, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Module for debugging mod_python && mod_wsgi applications that run inside
the Apache webserver (or any other webserver). This is a utility module
that makes remote debugging possible and easy.
"""
import warnings
from invenio.utils.deprecation import RemovedInInvenio21Warning
warnings.warn("Remote debugger is going to be removed. "
"Please use native Werkzeug debugger.",
RemovedInInvenio21Warning)
# Debug mode is activated by passing debug=[debugger_id] in the url, when
# you try to load a webpage using such url, the execution will stop (if
# breakpoints are set, or automatically depending on the debugger you are
# using). This module is only a helper utility, the actual debugging is
# done by others.
#
# Each debugger has its own number:
#
# local winpdb: debug=1
# remote winpdb: debug=2
# remote pydev: debug=3
#
# If the debug parameter is not present, the code is executed normally
# (without stopping).
#
#
# Each debugger has its own parameters that can be set via url parameters,
# you can also create your your own debugging functions, and assign a new
# number to them. Please see get_debugger() function for more information on
# how to create a new call, and see the individual debuggers for information
# what parameters they accept.
#
# Important: Remember to set WSGIDaemonProcess processes=1 threads=1 in Apache
# ----------------------------- CONFIGURATION -----------------------------------------
from .config import CFG_REMOTE_DEBUGGER_ENABLED, \
CFG_REMOTE_DEBUGGER_IMPORT, CFG_REMOTE_DEBUGGER_WINPDB_PASSWORD, \
CFG_REMOTE_DEBUGGER_PYDEV_REMOTE_IP, CFG_REMOTE_DEBUGGER_PYDEV_REMOTE_PORT, \
CFG_REMOTE_DEBUGGER_PYDEV_PATHS, CFG_REMOTE_DEBUGGER_WSGI_RELOAD, \
CFG_PYDEV_DEBUG, CFG_REMOTE_DEBUGGER_TYPE, CFG_REMOTE_DEBUGGER_NAME
# -------------------------------------------------------------------------------------
# --------------------------- no config past this point -------------------------------
# -------------------------------------------------------------------------------------
from invenio import config
import os
import glob
import traceback
import sys
from six import StringIO
def start_file_changes_monitor():
from invenio.utils import remote_debugger_wsgi_reload as monitor
monitor.start(interval=1.0)
for pattern in CFG_REMOTE_DEBUGGER_WSGI_RELOAD:
for f in glob.glob(os.path.join(config.CFG_PREFIX, pattern)):
monitor.track(f)
# -------------------------------------------------------------------------------------
# ----------------------------- DEBUGGER PART LOADING --------------------------
# -------------------------------------------------------------------------------------
normcase = os.path.normcase
# raise exception so that this module is not loaded (this modules is always imported
# in try...except manner)
if not CFG_REMOTE_DEBUGGER_ENABLED:
raise Exception('Remote debugger is disabled')
# import modules that are configured for this debugger, at least for Eclipse, this
# MUST HAPPEN before other stuff gets loaded
for path, name in CFG_REMOTE_DEBUGGER_IMPORT.get(CFG_REMOTE_DEBUGGER_TYPE, {}).items():
try:
if '.' in path:
globals()[name] = __import__(path, globals(), locals(), path.split('.'))
else:
globals()[name] = __import__(path)
except Exception:
traceback.print_exc()
sys.stderr.write("Error in remote_debugger, import of the %s failed" % path)
def error_msg(debugger_args):
"""Error has been caught and we were given chance to report it"""
debug_no, params = parse_args(debugger_args)
if debug_no == '3':
exc_info = sys.exc_info()
if exc_info[0]:
exception_data = StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], None, exception_data)
exception_data = exception_data.getvalue()
if exception_data.endswith('\n'):
exception_data = exception_data[:-1]
#pydev is truncating data (no help printing in loop)
sys.stderr.write('\n\n...')
sys.stderr.write(exception_data[-600:])
sys.stderr.write('\n\n')
def start():
"""
Switch into a debugger mode manualy - to be called fromt the command line scripts mostly
@var debugger_args: string, eg. "3|ip:192.168.31.1|port:9999"
"""
debug_starter = get_debugger()
if debug_starter is None:
raise Exception("Requested debugger not found or not initalized properly.")
debug_starter()
def get_debugger():
"""
Returns function that will initialize the debugger
@var arg: arg passed from url parameter debug=xxx
@return: function call
"""
params = {}
if 'winpdb-local' == CFG_REMOTE_DEBUGGER_NAME:
func = start_embedded_winpdb_debugger
elif 'winpdb-remote' == CFG_REMOTE_DEBUGGER_NAME:
func = start_remote_winpdb_debugger
elif 'pydev-remote' == CFG_REMOTE_DEBUGGER_NAME:
func = start_remote_pydev_debugger
else:
return None
# we could determine the function signature and check arguments
# func.func_code.co_varnames[:func.func_code.co_argcount]
# but I don't do that intentionally (to raise error if something wrong is
# sumbmitted)
#raise(str(params))
return lambda: func(**params)
def parse_args(arg):
"""Parses arguments supplied through url param debug=xcxv
@return: tuple of debuggper_no, additional_params
"""
debug_no = ''
params = {}
# parse the passed-in arg
if '|' in arg[0]:
# it will raise error if something wrong happens
a = arg[0].split('|')
debug_no = a[0]
for k, v in map(lambda x: x.split(':'), a[1:]):
try:
v = int(v)
except:
if v == 'False':
v = False
elif v == 'True':
v = True
params[k] = v
else:
debug_no = arg[0]
return (debug_no, params)
def start_embedded_winpdb_debugger(passwd=None):
"""
Winpdb debugger, rpdb2 must be enabled in the
CFG_REMOTE_DEBUGGER_IMPORT
Change the call to suit your needs
"""
p = passwd or CFG_REMOTE_DEBUGGER_WINPDB_PASSWORD
rpdb2.start_embedded_debugger(p)
def start_remote_winpdb_debugger(passwd=None):
"""
Winpdb remote debugger, change the call to suit your needs
"""
p = passwd or CFG_REMOTE_DEBUGGER_WINPDB_PASSWORD
rpdb2.start_embedded_debugger(p, fAllowRemote=True)
def start_remote_pydev_debugger(ip=None, port=None, suspend=False, stderr=True, stdout=True, path=None):
"""
remote eclipse/pydev debugger, pydev and putils module should be available
in the CFG_REMOTE_DEBUGGER_IMPORT
If you want to change behaviour of the debugger interactively, you can
pass arguments encoded in the url, example:
http://someurl/collection/X?debug=3|ip:192.168.31.1|port:9999|stderr:0
@keyword ip: (str) the machine where the Pydev debugger is listening for incoming connections
@keyword port: (str) the port of the remote machine
@keyword suspend: (bool) whether to stop execution right after the debugger was activated
@keyword stderr: (bool) redirect the stderr to the remote debugging machine console
@keyword stdout: (bool) redirect the stdout to the remote debugging machine console
@keyword path: (str) list of mappings of <source> -> <target> paths separated by '#'
"""
# to see the translation
if CFG_PYDEV_DEBUG:
sys.stderr.write("We set the pydev to be verbose")
putils.DEBUG_CLIENT_SERVER_TRANSLATION = True
if hasattr(pydevd, "MYDEBUG"):
pydevd.MYDEBUG = False
i = ip or CFG_REMOTE_DEBUGGER_PYDEV_REMOTE_IP
p = port or CFG_REMOTE_DEBUGGER_PYDEV_REMOTE_PORT
_pydev_paths = None
if hasattr(putils, 'PATHS_FROM_ECLIPSE_TO_PYTHON'): #never versions of Pydev
_pydev_paths = getattr(putils, 'PATHS_FROM_ECLIPSE_TO_PYTHON')
elif hasattr(putils, 'PATHS_FROM_CLIENT_TO_SERVER'): # pydev 1.5
_pydev_paths = getattr(putils, 'PATHS_FROM_CLIENT_TO_SERVER')
# Eclipse needs to know how to map the file from the remote server
if CFG_REMOTE_DEBUGGER_PYDEV_PATHS:
xpaths = map(lambda x: (normcase(x[0]), normcase(x[1])), CFG_REMOTE_DEBUGGER_PYDEV_PATHS)
for couple in xpaths:
if couple not in _pydev_paths:
_pydev_paths.append(couple)
# paths set through the url parameter
if path:
elements = path.split('#')
if len(elements) % 2 == 1:
elements.pop(-1)
i = 0
xpaths = []
while len(elements):
xpaths.append((normcase(elements.pop(0)), normcase(elements.pop(0))))
for couple in xpaths:
if couple not in _pydev_paths:
_pydev_paths.append(couple)
# the first argument is the IP of the (remote) machine where Eclipse Pydev
# is listening, we send suspend=False to not bother with stopping the code executing when
# pydev is initialized, set your own breakpoints inside Eclipse to stop execution
# this is HACK!!! we basically try to reconnect to another IP as requested on url param
# I dont know if it does not break pydev internals at some point
if (ip is not None) and hasattr(pydevd, 'oldxxxip') and pydevd.oldxxxip != ip:
pydevd.connected = False
pydevd.settrace(i,
stdoutToServer=stdout,
stderrToServer=stderr,
port=p,
suspend=suspend)
pydevd.oldxxxip = ip
if CFG_PYDEV_DEBUG:
sys.stderr.write("These are the mapping paths\n")
sys.stderr.write(str(_pydev_paths) + "\n")
| gpl-2.0 |
rodrigods/keystone | keystone/catalog/backends/sql.py | 1 | 12218 | # Copyright 2012 OpenStack Foundation
# Copyright 2012 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
import sqlalchemy
from keystone import catalog
from keystone.catalog import core
from keystone.common import sql
from keystone import config
from keystone import exception
CONF = config.CONF
class Region(sql.ModelBase, sql.DictBase):
__tablename__ = 'region'
attributes = ['id', 'description', 'parent_region_id', 'url']
id = sql.Column(sql.String(64), primary_key=True)
description = sql.Column(sql.String(255), nullable=False)
url = sql.Column(sql.String(255), nullable=True)
# NOTE(jaypipes): Right now, using an adjacency list model for
# storing the hierarchy of regions is fine, since
# the API does not support any kind of querying for
# more complex hierarchical queries such as "get me only
# the regions that are subchildren of this region", etc.
# If, in the future, such queries are needed, then it
# would be possible to add in columns to this model for
# "left" and "right" and provide support for a nested set
# model.
parent_region_id = sql.Column(sql.String(64), nullable=True)
# TODO(jaypipes): I think it's absolutely stupid that every single model
# is required to have an "extra" column because of the
# DictBase in the keystone.common.sql.core module. Forcing
# tables to have pointless columns in the database is just
# bad. Remove all of this extra JSON blob stuff.
# See: https://bugs.launchpad.net/keystone/+bug/1265071
extra = sql.Column(sql.JsonBlob())
class Service(sql.ModelBase, sql.DictBase):
__tablename__ = 'service'
attributes = ['id', 'type', 'enabled']
id = sql.Column(sql.String(64), primary_key=True)
type = sql.Column(sql.String(255))
enabled = sql.Column(sql.Boolean, nullable=False, default=True,
server_default=sqlalchemy.sql.expression.true())
extra = sql.Column(sql.JsonBlob())
endpoints = sqlalchemy.orm.relationship("Endpoint", backref="service")
class Endpoint(sql.ModelBase, sql.DictBase):
__tablename__ = 'endpoint'
attributes = ['id', 'interface', 'region', 'service_id', 'url',
'legacy_endpoint_id', 'enabled']
id = sql.Column(sql.String(64), primary_key=True)
legacy_endpoint_id = sql.Column(sql.String(64))
interface = sql.Column(sql.String(8), nullable=False)
region = sql.Column(sql.String(255))
service_id = sql.Column(sql.String(64),
sql.ForeignKey('service.id'),
nullable=False)
url = sql.Column(sql.Text(), nullable=False)
enabled = sql.Column(sql.Boolean, nullable=False, default=True,
server_default=sqlalchemy.sql.expression.true())
extra = sql.Column(sql.JsonBlob())
class Catalog(catalog.Driver):
# Regions
def list_regions(self, hints):
session = sql.get_session()
regions = session.query(Region)
regions = sql.filter_limit_query(Region, regions, hints)
return [s.to_dict() for s in list(regions)]
def _get_region(self, session, region_id):
ref = session.query(Region).get(region_id)
if not ref:
raise exception.RegionNotFound(region_id=region_id)
return ref
def _delete_child_regions(self, session, region_id):
"""Delete all child regions.
Recursively delete any region that has the supplied region
as its parent.
"""
children = session.query(Region).filter_by(parent_region_id=region_id)
for child in children:
self._delete_child_regions(session, child.id)
session.delete(child)
def _check_parent_region(self, session, region_ref):
"""Raise a NotFound if the parent region does not exist.
If the region_ref has a specified parent_region_id, check that
the parent exists, otherwise, raise a NotFound.
"""
parent_region_id = region_ref.get('parent_region_id')
if parent_region_id is not None:
# This will raise NotFound if the parent doesn't exist,
# which is the behavior we want.
self._get_region(session, parent_region_id)
def get_region(self, region_id):
session = sql.get_session()
return self._get_region(session, region_id).to_dict()
def delete_region(self, region_id):
session = sql.get_session()
with session.begin():
ref = self._get_region(session, region_id)
self._delete_child_regions(session, region_id)
session.delete(ref)
@sql.handle_conflicts(conflict_type='region')
def create_region(self, region_ref):
session = sql.get_session()
with session.begin():
self._check_parent_region(session, region_ref)
region = Region.from_dict(region_ref)
session.add(region)
return region.to_dict()
def update_region(self, region_id, region_ref):
session = sql.get_session()
with session.begin():
self._check_parent_region(session, region_ref)
ref = self._get_region(session, region_id)
old_dict = ref.to_dict()
old_dict.update(region_ref)
new_region = Region.from_dict(old_dict)
for attr in Region.attributes:
if attr != 'id':
setattr(ref, attr, getattr(new_region, attr))
return ref.to_dict()
# Services
@sql.truncated
def list_services(self, hints):
session = sql.get_session()
services = session.query(Service)
services = sql.filter_limit_query(Service, services, hints)
return [s.to_dict() for s in list(services)]
def _get_service(self, session, service_id):
ref = session.query(Service).get(service_id)
if not ref:
raise exception.ServiceNotFound(service_id=service_id)
return ref
def get_service(self, service_id):
session = sql.get_session()
return self._get_service(session, service_id).to_dict()
def delete_service(self, service_id):
session = sql.get_session()
with session.begin():
ref = self._get_service(session, service_id)
session.query(Endpoint).filter_by(service_id=service_id).delete()
session.delete(ref)
def create_service(self, service_id, service_ref):
session = sql.get_session()
with session.begin():
service = Service.from_dict(service_ref)
session.add(service)
return service.to_dict()
def update_service(self, service_id, service_ref):
session = sql.get_session()
with session.begin():
ref = self._get_service(session, service_id)
old_dict = ref.to_dict()
old_dict.update(service_ref)
new_service = Service.from_dict(old_dict)
for attr in Service.attributes:
if attr != 'id':
setattr(ref, attr, getattr(new_service, attr))
ref.extra = new_service.extra
return ref.to_dict()
# Endpoints
def create_endpoint(self, endpoint_id, endpoint_ref):
session = sql.get_session()
self.get_service(endpoint_ref['service_id'])
new_endpoint = Endpoint.from_dict(endpoint_ref)
with session.begin():
session.add(new_endpoint)
return new_endpoint.to_dict()
def delete_endpoint(self, endpoint_id):
session = sql.get_session()
with session.begin():
ref = self._get_endpoint(session, endpoint_id)
session.delete(ref)
def _get_endpoint(self, session, endpoint_id):
try:
return session.query(Endpoint).filter_by(id=endpoint_id).one()
except sql.NotFound:
raise exception.EndpointNotFound(endpoint_id=endpoint_id)
def get_endpoint(self, endpoint_id):
session = sql.get_session()
return self._get_endpoint(session, endpoint_id).to_dict()
@sql.truncated
def list_endpoints(self, hints):
session = sql.get_session()
endpoints = session.query(Endpoint)
endpoints = sql.filter_limit_query(Endpoint, endpoints, hints)
return [e.to_dict() for e in list(endpoints)]
def update_endpoint(self, endpoint_id, endpoint_ref):
session = sql.get_session()
with session.begin():
ref = self._get_endpoint(session, endpoint_id)
old_dict = ref.to_dict()
old_dict.update(endpoint_ref)
new_endpoint = Endpoint.from_dict(old_dict)
for attr in Endpoint.attributes:
if attr != 'id':
setattr(ref, attr, getattr(new_endpoint, attr))
ref.extra = new_endpoint.extra
return ref.to_dict()
def get_catalog(self, user_id, tenant_id, metadata=None):
substitutions = dict(six.iteritems(CONF))
substitutions.update({'tenant_id': tenant_id, 'user_id': user_id})
session = sql.get_session()
t = True # variable for singleton for PEP8, E712.
endpoints = (session.query(Endpoint).
options(sql.joinedload(Endpoint.service)).
filter(Endpoint.enabled == t).all())
catalog = {}
for endpoint in endpoints:
if not endpoint.service['enabled']:
continue
try:
url = core.format_url(endpoint['url'], substitutions)
except exception.MalformedEndpoint:
continue # this failure is already logged in format_url()
region = endpoint['region']
service_type = endpoint.service['type']
default_service = {
'id': endpoint['id'],
'name': endpoint.service['name'],
'publicURL': ''
}
catalog.setdefault(region, {})
catalog[region].setdefault(service_type, default_service)
interface_url = '%sURL' % endpoint['interface']
catalog[region][service_type][interface_url] = url
return catalog
def get_v3_catalog(self, user_id, tenant_id, metadata=None):
d = dict(six.iteritems(CONF))
d.update({'tenant_id': tenant_id,
'user_id': user_id})
session = sql.get_session()
t = True # variable for singleton for PEP8, E712.
services = (session.query(Service).filter(Service.enabled == t).
options(sql.joinedload(Service.endpoints)).
all())
def make_v3_endpoints(endpoints):
for endpoint in (ep.to_dict() for ep in endpoints if ep.enabled):
del endpoint['service_id']
del endpoint['legacy_endpoint_id']
del endpoint['enabled']
try:
endpoint['url'] = core.format_url(endpoint['url'], d)
except exception.MalformedEndpoint:
continue # this failure is already logged in format_url()
yield endpoint
def make_v3_service(svc):
eps = list(make_v3_endpoints(svc.endpoints))
service = {'endpoints': eps, 'id': svc.id, 'type': svc.type}
name = svc.extra.get('name')
if name:
service['name'] = name
return service
return [make_v3_service(svc) for svc in services]
| apache-2.0 |
GeorgePlukov/FloodWatch | XBee-2.2.3/xbee/tests/test_zigbee.py | 27 | 9957 | """
test_zigbee.py
By Paul Malmsten, 2010
[email protected]
Tests the XBee ZB (ZigBee) implementation class for API compliance
"""
import unittest
from xbee.zigbee import ZigBee
class TestZigBee(unittest.TestCase):
"""
Tests ZigBee-specific features
"""
def setUp(self):
self.zigbee = ZigBee(None)
def test_null_terminated_field(self):
"""
Packets with null-terminated fields
should be properly parsed
"""
expected_data = b'\x01\x02\x03\x04'
terminator = b'\x00'
node_identifier = b'\x95' + b'\x00' * 21 + expected_data + terminator + b'\x00' * 8
data = self.zigbee._split_response(node_identifier)
self.assertEqual(data['node_id'], expected_data)
def test_split_node_identification_identifier(self):
data = b'\x95\x00\x13\xa2\x00\x40\x52\x2b\xaa\x7d\x84\x02\x7d\x84\x00\x13\xa2\x00\x40\x52\x2b\xaa\x20\x00\xff\xfe\x01\x01\xc1\x05\x10\x1e'
info = self.zigbee._split_response(data)
expected_info = {
'id': 'node_id_indicator',
'sender_addr_long': b'\x00\x13\xa2\x00\x40\x52\x2b\xaa',
'sender_addr': b'\x7d\x84',
'options': b'\x02',
'source_addr': b'\x7d\x84',
'source_addr_long': b'\x00\x13\xa2\x00\x40\x52\x2b\xaa',
'node_id': b' ',
'parent_source_addr': b'\xff\xfe',
'device_type': b'\x01',
'source_event': b'\x01',
'digi_profile_id': b'\xc1\x05',
'manufacturer_id': b'\x10\x1e',
}
self.assertEqual(info, expected_info)
def test_split_node_identification_identifier2(self):
data = b'\x95\x00\x13\xa2\x00\x40\x52\x2b\xaa\x7d\x84\x02\x7d\x84\x00\x13\xa2\x00\x40\x52\x2b\xaaCoordinator\x00\xff\xfe\x01\x01\xc1\x05\x10\x1e'
info = self.zigbee._split_response(data)
expected_info = {
'id': 'node_id_indicator',
'sender_addr_long': b'\x00\x13\xa2\x00\x40\x52\x2b\xaa',
'sender_addr': b'\x7d\x84',
'options': b'\x02',
'source_addr': b'\x7d\x84',
'source_addr_long': b'\x00\x13\xa2\x00\x40\x52\x2b\xaa',
'node_id': b'Coordinator',
'parent_source_addr': b'\xff\xfe',
'device_type': b'\x01',
'source_event': b'\x01',
'digi_profile_id': b'\xc1\x05',
'manufacturer_id': b'\x10\x1e',
}
self.assertEqual(info, expected_info)
def test_is_remote_at_response_parameter_parsed_as_io_samples(self):
"""
A remote AT command of IS, to take a sample immediately and respond
with the results, must be appropriately parsed for IO data.
"""
data = b'\x97A\x00\x13\xa2\x00@oG\xe4v\x1aIS\x00\x01\x1c\xc0\x06\x18\x00\x02\x8c\x03\x96'
info = self.zigbee._split_response(data)
expected_info = {
'id': 'remote_at_response',
'frame_id': b'A',
'source_addr_long': b'\x00\x13\xa2\x00@oG\xe4',
'source_addr': b'v\x1a',
'command': b'IS',
'status': b'\x00',
'parameter': [{'adc-1': 652,
'adc-2': 918,
'dio-10': False,
'dio-11': True,
'dio-12': True,
'dio-6': False,
'dio-7': False
}]
}
self.assertEqual(info, expected_info)
def test_lowercase_is_remote_at_response_parameter_parsed_as_io_samples(self):
"""
A remote AT command of lowercase is, to take a sample immediately and respond
with the results, must be appropriately parsed for IO data.
"""
data = b'\x97A\x00\x13\xa2\x00@oG\xe4v\x1ais\x00\x01\x1c\xc0\x06\x18\x00\x02\x8c\x03\x96'
info = self.zigbee._split_response(data)
expected_info = {
'id': 'remote_at_response',
'frame_id': b'A',
'source_addr_long': b'\x00\x13\xa2\x00@oG\xe4',
'source_addr': b'v\x1a',
'command': b'is',
'status': b'\x00',
'parameter': [{'adc-1': 652,
'adc-2': 918,
'dio-10': False,
'dio-11': True,
'dio-12': True,
'dio-6': False,
'dio-7': False
}]
}
self.assertEqual(info, expected_info)
def test_parsing_may_encounter_field_which_does_not_exist(self):
"""
Some fields are optional and may not exist; parsing should not crash
if/when they are not available.
"""
data = b'\x97A\x00\x13\xa2\x00@oG\xe4v\x1aIS\x01'
info = self.zigbee._split_response(data)
expected_info = {
'id': 'remote_at_response',
'frame_id': b'A',
'source_addr_long': b'\x00\x13\xa2\x00@oG\xe4',
'source_addr': b'v\x1a',
'command': b'IS',
'status': b'\x01',
}
self.assertEqual(info, expected_info)
def test_nd_at_response_parameter_parsed(self):
"""
An at_response for an ND command must be parsed.
"""
data = b'\x88AND\x00v\x1a\x00\x13\xa2\x00@oG\xe4ENDPOINT-1\x00\xff\xfe\x01\x00\xc1\x05\x10\x1e'
info = self.zigbee._split_response(data)
expected_info = {
'id': 'at_response',
'frame_id': b'A',
'command': b'ND',
'status': b'\x00',
'parameter': {'source_addr': b'\x76\x1a',
'source_addr_long': b'\x00\x13\xa2\x00\x40\x6f\x47\xe4',
'node_identifier': b'ENDPOINT-1',
'parent_address': b'\xff\xfe',
'device_type': b'\x01',
'status': b'\x00',
'profile_id': b'\xc1\x05',
'manufacturer': b'\x10\x1e',
}
}
self.assertEqual(info, expected_info)
def test_lowercase_nd_at_response_parameter_parsed(self):
"""
An at_response for a lowercase nd command must be parsed.
"""
data = b'\x88And\x00v\x1a\x00\x13\xa2\x00@oG\xe4ENDPOINT-1\x00\xff\xfe\x01\x00\xc1\x05\x10\x1e'
info = self.zigbee._split_response(data)
expected_info = {
'id': 'at_response',
'frame_id': b'A',
'command': b'nd',
'status': b'\x00',
'parameter': {'source_addr': b'\x76\x1a',
'source_addr_long': b'\x00\x13\xa2\x00\x40\x6f\x47\xe4',
'node_identifier': b'ENDPOINT-1',
'parent_address': b'\xff\xfe',
'device_type': b'\x01',
'status': b'\x00',
'profile_id': b'\xc1\x05',
'manufacturer': b'\x10\x1e',
}
}
self.assertEqual(info, expected_info)
class TestParseZigBeeIOData(unittest.TestCase):
"""
Test parsing ZigBee specific IO data
"""
def setUp(self):
self.zigbee = ZigBee(None)
def test_parse_dio_adc(self):
data = b'\x01\x08\x00\x0e\x08\x00\x00\x00\x02P\x02\x06'
expected_results = [{'dio-11': True,
'adc-1': 0,
'adc-2': 592,
'adc-3': 518}]
results = self.zigbee._parse_samples(data)
self.assertEqual(results, expected_results)
def test_parse_samples_ticket_44(self):
"""
This example is from ticket 44 on Google Code:
https://code.google.com/p/python-xbee/issues/detail?id=44
The author claims the given data is generated by an
Xbee Pro 900HP module, but I could only find a definition
for packet with a response type of 0x92 in the XBee ZB
specification.
"""
data = (b'\x01' + # Number of samples
b'\x10\x00' + # Digital I/O mask (CD/DIO12 enabled)
b'\x0E' + # ADC 1,2,3 enabled
b'\x10\x00' + # DIO12 is high
b'\x03\xA4' + # ADC1 = 932
b'\x01\x31' + # ADC2 = 305
b'\x03\x31') # ADC3 = 817
expected_results = [{'dio-12': True,
'adc-1': 932,
'adc-2': 305,
'adc-3': 817}]
results = self.zigbee._parse_samples(data)
self.assertEqual(results, expected_results)
def test_parse_dio_adc_supply_voltage_not_clamped(self):
"""
When bit 7 on the ADC mask is set, the supply voltage is included
in the ADC I/O sample section. This sample may exceed 10 bits of
precision, even though all other ADC channels are limited to a
range of 0-1.2v with 10 bits of precision. I assume that a voltage
divider and the firmware is used internally to compute the actual
Vcc voltage.
Therefore, the I/O sampling routine must not clamp this ADC
channel to 10 bits of precision.
"""
data = b'\x01\x00\x00\x80\x0D\x18'
expected_results = [{'adc-7':0xD18}]
results = self.zigbee._parse_samples(data)
self.assertEqual(results, expected_results)
| mit |
megamanfx/grit-i18n | grit/format/policy_templates/writers/android_policy_writer.py | 17 | 3492 | #!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from grit.format.policy_templates.writers import xml_formatted_writer
from xml.dom import minidom
from xml.sax import saxutils as xml_escape
def GetWriter(config):
'''Factory method for creating AndroidPolicyWriter objects.
See the constructor of TemplateWriter for description of
arguments.
'''
return AndroidPolicyWriter(['android'], config)
def _EscapeResource(resource):
'''Escape the resource for usage in an Android resource XML file.
This includes standard XML escaping as well as those specific to Android.
'''
if type(resource) == int:
return str(resource)
return xml_escape.escape(resource, {"'": "\\'", '"': '\\"', '\\': '\\\\'})
class AndroidPolicyWriter(xml_formatted_writer.XMLFormattedWriter):
'''Outputs localized Android Resource XML files.
The policy strings are localized and exposed as string resources for
consumption through Android's App restriction Schema.
'''
# DOM root node of the generated XML document.
_doc = None
# The resources node contains all resource 'string' and 'string-array'
# elements.
_resources = None
def AddStringResource(self, name, string):
'''Add a string resource of the given name.
'''
string_node = self._doc.createElement('string')
string_node.setAttribute('name', name)
string_node.appendChild(self._doc.createTextNode(_EscapeResource(string)))
self._resources.appendChild(string_node)
def AddStringArrayResource(self, name, string_items):
'''Add a string-array resource of the given name and
elements from string_items.
'''
string_array_node = self._doc.createElement('string-array')
string_array_node.setAttribute('name', name)
self._resources.appendChild(string_array_node)
for item in string_items:
string_node = self._doc.createElement('item')
string_node.appendChild(self._doc.createTextNode(_EscapeResource(item)))
string_array_node.appendChild(string_node)
def PreprocessPolicies(self, policy_list):
return self.FlattenGroupsAndSortPolicies(policy_list)
def CanBeRecommended(self, policy):
return False
def IsDeprecatedPolicySupported(self, policy):
return True
def IsFuturePolicySupported(self, policy):
return True
def WritePolicy(self, policy):
name = policy['name']
self.AddStringResource(name + 'Title', policy['caption'])
# Get the first line of the policy description.
description = policy['desc'].split('\n', 1)[0]
self.AddStringResource(name + 'Desc', description)
items = policy.get('items')
if items is not None:
entries = [ item['caption'] for item in items ]
values = [ item['value'] for item in items ]
self.AddStringArrayResource(name + 'Entries', entries)
self.AddStringArrayResource(name + 'Values', values)
def BeginTemplate(self):
comment_text = 'DO NOT MODIFY THIS FILE DIRECTLY!\n' \
'IT IS GENERATED FROM policy_templates.json.'
comment_node = self._doc.createComment(comment_text)
self._doc.insertBefore(comment_node, self._resources)
def Init(self):
impl = minidom.getDOMImplementation()
self._doc = impl.createDocument(None, 'resources', None)
self._resources = self._doc.documentElement
def GetTemplateText(self):
return self.ToPrettyXml(self._doc)
| bsd-2-clause |
lduarte1991/edx-platform | common/djangoapps/terrain/stubs/tests/test_youtube_stub.py | 172 | 2639 | """
Unit test for stub YouTube implementation.
"""
import unittest
import requests
from ..youtube import StubYouTubeService
class StubYouTubeServiceTest(unittest.TestCase):
def setUp(self):
super(StubYouTubeServiceTest, self).setUp()
self.server = StubYouTubeService()
self.url = "http://127.0.0.1:{0}/".format(self.server.port)
self.server.config['time_to_response'] = 0.0
self.addCleanup(self.server.shutdown)
def test_unused_url(self):
response = requests.get(self.url + 'unused_url')
self.assertEqual("Unused url", response.content)
@unittest.skip('Failing intermittently due to inconsistent responses from YT. See TE-871')
def test_video_url(self):
response = requests.get(
self.url + 'test_youtube/OEoXaMPEzfM?v=2&alt=jsonc&callback=callback_func'
)
# YouTube metadata for video `OEoXaMPEzfM` states that duration is 116.
self.assertEqual(
'callback_func({"data": {"duration": 116, "message": "I\'m youtube.", "id": "OEoXaMPEzfM"}})',
response.content
)
def test_transcript_url_equal(self):
response = requests.get(
self.url + 'test_transcripts_youtube/t__eq_exist'
)
self.assertEqual(
"".join([
'<?xml version="1.0" encoding="utf-8" ?>',
'<transcript><text start="1.0" dur="1.0">',
'Equal transcripts</text></transcript>'
]), response.content
)
def test_transcript_url_not_equal(self):
response = requests.get(
self.url + 'test_transcripts_youtube/t_neq_exist',
)
self.assertEqual(
"".join([
'<?xml version="1.0" encoding="utf-8" ?>',
'<transcript><text start="1.1" dur="5.5">',
'Transcripts sample, different that on server',
'</text></transcript>'
]), response.content
)
def test_transcript_not_found(self):
response = requests.get(self.url + 'test_transcripts_youtube/some_id')
self.assertEqual(404, response.status_code)
def test_reset_configuration(self):
reset_config_url = self.url + 'del_config'
# add some configuration data
self.server.config['test_reset'] = 'This is a reset config test'
# reset server configuration
response = requests.delete(reset_config_url)
self.assertEqual(response.status_code, 200)
# ensure that server config dict is empty after successful reset
self.assertEqual(self.server.config, {})
| agpl-3.0 |
gregoil/rotest | src/rotest/management/common/messages.py | 1 | 6686 | """Holds the common resource management messages."""
from __future__ import absolute_import
from abc import ABCMeta
from basicstruct import BasicStruct
import six
def slots_extender(new_slots):
"""Extender decorator to add new slots to the wrapped class.
Arguments:
new_slots (tuple): new slots names.
Returns:
func. a method that decorate a class.
"""
def decorator(origin_class):
"""Decorate a class and add the given slots to it.
Actually, it creates a new class that derives from the given class and
add the new slots to it, also, it copies the documentation.
Arguments:
origin_class (class): the class to be wrapped.
Returns:
class. the new class.
"""
new_class = type(origin_class.__name__, (origin_class,), {})
new_class.__slots__ = origin_class.__slots__ + new_slots
new_class.__doc__ = origin_class.__doc__
return new_class
return decorator
@slots_extender(('msg_id',))
class AbstractMessage(six.with_metaclass(ABCMeta, BasicStruct)):
"""Basic message class.
Holds the common data for resource management messages.
Attributes:
msg_id (number): sending side unique message identifier.
"""
@slots_extender(('reason',))
class ParsingFailure(AbstractMessage):
"""Reply message on a request that failed to parse."""
pass
@slots_extender(('request_id',))
class AbstractReply(six.with_metaclass(ABCMeta, AbstractMessage)):
"""Abstract reply message for parsed request.
Attributes:
request_id (number): msg_id of the requested operation.
"""
class SuccessReply(AbstractReply):
"""Success reply message, answer on successful request."""
pass
@slots_extender(('should_skip',))
class ShouldSkipReply(AbstractReply):
"""Reply message to the 'should_skip' remote query."""
pass
@slots_extender(('code', 'content'))
class ErrorReply(AbstractReply):
"""Error reply message, answer on unsuccessful request.
Attributes:
code (number): error code.
content (str): content describing the failure.
"""
pass
@slots_extender(('resources',))
class ResourcesReply(AbstractReply):
"""Resources reply message.
Sent as an answer to a successful 'LockResources' request.
Attributes:
resources (list): list of
:class:'rotest.common.models.base_resource.BaseResource'.
"""
pass
@slots_extender(('descriptors',))
class QueryResources(AbstractMessage):
"""Query resources request message.
Attributes:
descriptors (dict): descriptors of to query in the format
{'type': resource_type_name, 'properties': {'key': value}}
timeout (number): seconds to wait for resources if they're unavailable.
"""
pass
@slots_extender(('descriptors', 'timeout'))
class LockResources(AbstractMessage):
"""Lock resources request message.
Attributes:
descriptors (list): descriptors of resources. list of dictionaries of
{'type': resource_type_name, 'properties': {'key': value}}
timeout (number): seconds to wait for resources if they're unavailable.
"""
pass
@slots_extender(('requests',))
class ReleaseResources(AbstractMessage):
"""Release resources request message.
Attributes:
requests (list): list of resources names.
"""
pass
@slots_extender(('user_name',))
class CleanupUser(AbstractMessage):
"""Clean user's resources request message.
Attributes:
user_name (str): name of the user to be cleaned.
"""
pass
@slots_extender(('tests', 'run_data'))
class StartTestRun(AbstractMessage):
"""Start the run of the test message.
Attributes:
tests (dict): structure and data of the tests to run.
run_data (dict): additional data relevant to the run.
"""
pass
class RunFinished(AbstractMessage):
"""Signals the end of the run.
Note:
This message is used in multiproccess runner to inform the manager of
the end of a worker's run.
"""
pass
@slots_extender(('run_data',))
class UpdateRunData(AbstractMessage):
"""Update the run data message.
Attributes:
run_data (dict): run data fields and values.
"""
pass
@slots_extender(('model', 'filter', 'kwargs'))
class UpdateFields(AbstractMessage):
"""Request to update content in the server's DB.
Attributes:
model (type): Django model to apply changes on.
filter (dict): arguments to filter by.
kwargs (dict): changes to apply on the filtered instances.
"""
pass
@slots_extender(('test_id',))
class AbstractTestEventMessage(AbstractMessage):
"""Abstract test event message.
Attributes:
test_id (number): identifier of the test.
"""
pass
class StartTest(AbstractTestEventMessage):
"""Start the run of a test message."""
pass
class SetupFinished(AbstractTestEventMessage):
"""Finished the setup of a test message."""
pass
class StartTeardown(AbstractTestEventMessage):
"""Start the teardown of a test message."""
pass
class ShouldSkip(AbstractTestEventMessage):
"""Check if the test should be skipped message."""
pass
class StopTest(AbstractTestEventMessage):
"""End the run of a test message."""
pass
@slots_extender(('resources',))
class UpdateResources(AbstractTestEventMessage):
"""Update the resources list of the test's locked resources.
Attributes:
resources (list): list of resource descriptor of the test.
"""
pass
@slots_extender(('resources',))
class CloneResources(AbstractTestEventMessage):
"""Update the resources list of the test's locked resources.
Attributes:
resources (dict): dict of the locked resources of the test.
Note:
This message is used in multiproccess runner to inform the manager of
the test's 'locked_resources' dict content.
"""
pass
class StartComposite(AbstractTestEventMessage):
"""Start the run of a composite test message."""
pass
class StopComposite(AbstractTestEventMessage):
"""End the run of a composite test message."""
pass
@slots_extender(('code', 'info'))
class AddResult(AbstractTestEventMessage):
"""Update a test result message.
Attributes:
code (number): TestOutcome result code.
info (str): additional data about the result (traceback, reason, etc.).
"""
pass
@slots_extender(('info',))
class AddInfo(AbstractTestEventMessage):
"""Register a success message.
Attributes:
info (str): success message.
"""
pass
| mit |
totalspectrum/binutils-propeller | gdb/testsuite/gdb.perf/lib/perftest/reporter.py | 7 | 2912 | # Copyright (C) 2013-2017 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Text reports are written here.
# This is the perftest counterpart to gdb.sum.
SUM_FILE_NAME = "perftest.sum"
# Raw data that went into the report is written here.
# This is the perftest counterpart to gdb.log.
LOG_FILE_NAME = "perftest.log"
class Reporter(object):
"""Base class of reporter to report test results in a certain format.
Subclass, which is specific to a report format, should overwrite
methods report, start and end.
"""
def __init__(self, append):
"""Constructor of Reporter.
attribute append is used to determine whether to append or
overwrite log file.
"""
self.append = append
def report(self, *args):
raise NotImplementedError("Abstract Method:report.")
def start(self):
"""Invoked when reporting is started."""
raise NotImplementedError("Abstract Method:start.")
def end(self):
"""Invoked when reporting is done.
It must be overridden to do some cleanups, such as closing file
descriptors.
"""
raise NotImplementedError("Abstract Method:end.")
class TextReporter(Reporter):
"""Report results in a plain text file 'perftest.log'."""
def __init__(self, append):
super (TextReporter, self).__init__(Reporter(append))
self.txt_sum = None
self.txt_log = None
def report(self, test_name, measurement_name, data_points):
if len(data_points) == 0:
self.txt_sum.write("%s %s *no data recorded*\n" % (
test_name, measurement_name))
return
average = sum(data_points) / len(data_points)
data_min = min(data_points)
data_max = max(data_points)
self.txt_sum.write("%s %s %s\n" % (
test_name, measurement_name, average))
self.txt_log.write("%s %s %s, min %s, max %s, data %s\n" % (
test_name, measurement_name, average, data_min, data_max,
data_points))
def start(self):
mode = "a+" if self.append else "w"
self.txt_sum = open (SUM_FILE_NAME, mode);
self.txt_log = open (LOG_FILE_NAME, mode);
def end(self):
self.txt_sum.close ()
self.txt_log.close ()
| gpl-2.0 |
nitzmahone/ansible | lib/ansible/module_utils/aws/waf.py | 71 | 7415 | # Copyright (c) 2017 Will Thames
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
This module adds shared support for Web Application Firewall modules
"""
from ansible.module_utils.ec2 import camel_dict_to_snake_dict, AWSRetry
from ansible.module_utils.aws.waiters import get_waiter
try:
import botocore
except ImportError:
pass # caught by imported HAS_BOTO3
MATCH_LOOKUP = {
'byte': {
'method': 'byte_match_set',
'conditionset': 'ByteMatchSet',
'conditiontuple': 'ByteMatchTuple',
'type': 'ByteMatch'
},
'geo': {
'method': 'geo_match_set',
'conditionset': 'GeoMatchSet',
'conditiontuple': 'GeoMatchConstraint',
'type': 'GeoMatch'
},
'ip': {
'method': 'ip_set',
'conditionset': 'IPSet',
'conditiontuple': 'IPSetDescriptor',
'type': 'IPMatch'
},
'regex': {
'method': 'regex_match_set',
'conditionset': 'RegexMatchSet',
'conditiontuple': 'RegexMatchTuple',
'type': 'RegexMatch'
},
'size': {
'method': 'size_constraint_set',
'conditionset': 'SizeConstraintSet',
'conditiontuple': 'SizeConstraint',
'type': 'SizeConstraint'
},
'sql': {
'method': 'sql_injection_match_set',
'conditionset': 'SqlInjectionMatchSet',
'conditiontuple': 'SqlInjectionMatchTuple',
'type': 'SqlInjectionMatch',
},
'xss': {
'method': 'xss_match_set',
'conditionset': 'XssMatchSet',
'conditiontuple': 'XssMatchTuple',
'type': 'XssMatch'
},
}
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def get_rule_with_backoff(client, rule_id):
return client.get_rule(RuleId=rule_id)['Rule']
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def get_byte_match_set_with_backoff(client, byte_match_set_id):
return client.get_byte_match_set(ByteMatchSetId=byte_match_set_id)['ByteMatchSet']
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def get_ip_set_with_backoff(client, ip_set_id):
return client.get_ip_set(IPSetId=ip_set_id)['IPSet']
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def get_size_constraint_set_with_backoff(client, size_constraint_set_id):
return client.get_size_constraint_set(SizeConstraintSetId=size_constraint_set_id)['SizeConstraintSet']
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def get_sql_injection_match_set_with_backoff(client, sql_injection_match_set_id):
return client.get_sql_injection_match_set(SqlInjectionMatchSetId=sql_injection_match_set_id)['SqlInjectionMatchSet']
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def get_xss_match_set_with_backoff(client, xss_match_set_id):
return client.get_xss_match_set(XssMatchSetId=xss_match_set_id)['XssMatchSet']
def get_rule(client, module, rule_id):
try:
rule = get_rule_with_backoff(client, rule_id)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't obtain waf rule")
match_sets = {
'ByteMatch': get_byte_match_set_with_backoff,
'IPMatch': get_ip_set_with_backoff,
'SizeConstraint': get_size_constraint_set_with_backoff,
'SqlInjectionMatch': get_sql_injection_match_set_with_backoff,
'XssMatch': get_xss_match_set_with_backoff
}
if 'Predicates' in rule:
for predicate in rule['Predicates']:
if predicate['Type'] in match_sets:
predicate.update(match_sets[predicate['Type']](client, predicate['DataId']))
# replaced by Id from the relevant MatchSet
del(predicate['DataId'])
return rule
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def get_web_acl_with_backoff(client, web_acl_id):
return client.get_web_acl(WebACLId=web_acl_id)['WebACL']
def get_web_acl(client, module, web_acl_id):
try:
web_acl = get_web_acl_with_backoff(client, web_acl_id)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't obtain web acl")
if web_acl:
try:
for rule in web_acl['Rules']:
rule.update(get_rule(client, module, rule['RuleId']))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't obtain web acl rule")
return camel_dict_to_snake_dict(web_acl)
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def list_rules_with_backoff(client):
paginator = client.get_paginator('list_rules')
return paginator.paginate().build_full_result()['Rules']
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def list_web_acls_with_backoff(client):
paginator = client.get_paginator('list_web_acls')
return paginator.paginate().build_full_result()['WebACLs']
def list_web_acls(client, module):
try:
return list_web_acls_with_backoff(client)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't obtain web acls")
def get_change_token(client, module):
try:
token = client.get_change_token()
return token['ChangeToken']
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Couldn't obtain change token")
@AWSRetry.backoff(tries=10, delay=2, backoff=2.0, catch_extra_error_codes=['WAFStaleDataException'])
def run_func_with_change_token_backoff(client, module, params, func, wait=False):
params['ChangeToken'] = get_change_token(client, module)
result = func(**params)
if wait:
get_waiter(
client, 'change_token_in_sync',
).wait(
ChangeToken=result['ChangeToken']
)
return result
| gpl-3.0 |
werbk/task-6.19 | conftest.py | 1 | 2403 | import pytest
import logging
import json
import jsonpickle
import os.path
import importlib
from fixture.TestBase import BaseClass
from fixture.variables import UserLogin
fixture = None
target = None
@pytest.fixture
def app(request):
global fixture
global target
browser = request.config.getoption('--browser')
if target is None:
config_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), request.config.getoption('--target'))
with open(config_file) as file:
target = json.load(file)
#url = request.config.getoption('--baseUrl')
#login_user = request.config.getoption('--login_user')
#login_password = request.config.getoption('--login_password')
if fixture is None or not fixture.is_valid():
fixture = BaseClass(browser=browser, base_url=target['baseUrl'])
fixture.session.ensure_login(user_name=target['username'], password=target['password'])
return fixture
@pytest.fixture(scope='session', autouse=True)
def stop(request):
def fin():
fixture.session.ensure_logout()
fixture.restore()
request.addfinalizer(fin)
return fixture
def pytest_addoption(parser):
default_login_user = [UserLogin.name, UserLogin.password]
parser.addoption('--browser', action='store', default='firefox')
parser.addoption('--target', action='store', default='target.json') #'http://localhost/addressbook/')
# i believe that it possible do in 1 line but i don't know how two in 1 Login take to parameter at same time
#parser.addoption('--loginu', action='store', default=default_login_user[0])
#parser.addoption('--loginp', action='store', default=default_login_user[1])
def load_from_module(module):
return importlib.import_module("data.%s" % module).test_data
def load_from_json(file):
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data/%s.json' % file)) as f:
return jsonpickle.decode(f.read())
def pytest_generate_tests(metafunc):
for fixture in metafunc.fixturenames:
if fixture.startswith("data_"):
testdata = load_from_module(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
elif fixture.startswith("json_"):
testdata = load_from_json(fixture[5:])
metafunc.parametrize(fixture, testdata, ids=[str(x) for x in testdata])
| apache-2.0 |
gerrive/horizon | openstack_dashboard/test/test_data/heat_data.py | 6 | 15719 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from heatclient.v1 import resource_types
from heatclient.v1 import resources
from heatclient.v1 import services
from heatclient.v1 import stacks
from openstack_dashboard.test.test_data import utils
# suppress warnings about our use of object comparisons in heatclient
logging.getLogger('heatclient.openstack.common.apiclient.base') \
.setLevel('ERROR')
# A slightly hacked up copy of a sample cloudformation template for testing.
TEMPLATE = """
{
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "AWS CloudFormation Sample Template.",
"Parameters": {
"KeyName": {
"Description": "Name of an EC2 Key Pair to enable SSH access to the instances",
"Type": "String"
},
"InstanceType": {
"Description": "WebServer EC2 instance type",
"Type": "String",
"Default": "m1.small",
"AllowedValues": [
"m1.tiny",
"m1.small",
"m1.medium",
"m1.large",
"m1.xlarge"
],
"ConstraintDescription": "must be a valid EC2 instance type."
},
"DBName": {
"Default": "wordpress",
"Description": "The WordPress database name",
"Type": "String",
"MinLength": "1",
"MaxLength": "64",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"ConstraintDescription": "must begin with a letter and..."
},
"DBUsername": {
"Default": "admin",
"NoEcho": "true",
"Description": "The WordPress database admin account username",
"Type": "String",
"MinLength": "1",
"MaxLength": "16",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"ConstraintDescription": "must begin with a letter and..."
},
"DBPassword": {
"Default": "admin",
"NoEcho": "true",
"Description": "The WordPress database admin account password",
"Type": "String",
"MinLength": "1",
"MaxLength": "41",
"AllowedPattern": "[a-zA-Z0-9]*",
"ConstraintDescription": "must contain only alphanumeric characters."
},
"DBRootPassword": {
"Default": "admin",
"NoEcho": "true",
"Description": "Root password for MySQL",
"Type": "String",
"MinLength": "1",
"MaxLength": "41",
"AllowedPattern": "[a-zA-Z0-9]*",
"ConstraintDescription": "must contain only alphanumeric characters."
},
"LinuxDistribution": {
"Default": "F17",
"Description": "Distribution of choice",
"Type": "String",
"AllowedValues": [
"F18",
"F17",
"U10",
"RHEL-6.1",
"RHEL-6.2",
"RHEL-6.3"
]
},
"Network": {
"Type": "String",
"CustomConstraint": "neutron.network"
}
},
"Mappings": {
"AWSInstanceType2Arch": {
"m1.tiny": {
"Arch": "32"
},
"m1.small": {
"Arch": "64"
},
"m1.medium": {
"Arch": "64"
},
"m1.large": {
"Arch": "64"
},
"m1.xlarge": {
"Arch": "64"
}
},
"DistroArch2AMI": {
"F18": {
"32": "F18-i386-cfntools",
"64": "F18-x86_64-cfntools"
},
"F17": {
"32": "F17-i386-cfntools",
"64": "F17-x86_64-cfntools"
},
"U10": {
"32": "U10-i386-cfntools",
"64": "U10-x86_64-cfntools"
},
"RHEL-6.1": {
"32": "rhel61-i386-cfntools",
"64": "rhel61-x86_64-cfntools"
},
"RHEL-6.2": {
"32": "rhel62-i386-cfntools",
"64": "rhel62-x86_64-cfntools"
},
"RHEL-6.3": {
"32": "rhel63-i386-cfntools",
"64": "rhel63-x86_64-cfntools"
}
}
},
"Resources": {
"WikiDatabase": {
"Type": "AWS::EC2::Instance",
"Metadata": {
"AWS::CloudFormation::Init": {
"config": {
"packages": {
"yum": {
"mysql": [],
"mysql-server": [],
"httpd": [],
"wordpress": []
}
},
"services": {
"systemd": {
"mysqld": {
"enabled": "true",
"ensureRunning": "true"
},
"httpd": {
"enabled": "true",
"ensureRunning": "true"
}
}
}
}
}
},
"Properties": {
"ImageId": {
"Fn::FindInMap": [
"DistroArch2AMI",
{
"Ref": "LinuxDistribution"
},
{
"Fn::FindInMap": [
"AWSInstanceType2Arch",
{
"Ref": "InstanceType"
},
"Arch"
]
}
]
},
"InstanceType": {
"Ref": "InstanceType"
},
"KeyName": {
"Ref": "KeyName"
},
"UserData": {
"Fn::Base64": {
"Fn::Join": [
"",
[
"#!/bin/bash -v\\n",
"/opt/aws/bin/cfn-init\\n"
]
]
}
}
}
}
},
"Outputs": {
"WebsiteURL": {
"Value": {
"Fn::Join": [
"",
[
"http://",
{
"Fn::GetAtt": [
"WikiDatabase",
"PublicIp"
]
},
"/wordpress"
]
]
},
"Description": "URL for Wordpress wiki"
}
}
}
"""
VALIDATE = """
{
"Description": "AWS CloudFormation Sample Template.",
"Parameters": {
"DBUsername": {
"Type": "String",
"Description": "The WordPress database admin account username",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "16",
"ConstraintDescription": "must begin with a letter and..."
},
"LinuxDistribution": {
"Default": "F17",
"Type": "String",
"Description": "Distribution of choice",
"AllowedValues": [
"F18",
"F17",
"U10",
"RHEL-6.1",
"RHEL-6.2",
"RHEL-6.3"
]
},
"DBRootPassword": {
"Type": "String",
"Description": "Root password for MySQL",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "41",
"ConstraintDescription": "must contain only alphanumeric characters."
},
"KeyName": {
"Type": "String",
"Description": "Name of an EC2 Key Pair to enable SSH access to the instances"
},
"DBName": {
"Type": "String",
"Description": "The WordPress database name",
"Default": "wordpress",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"MaxLength": "64",
"ConstraintDescription": "must begin with a letter and..."
},
"DBPassword": {
"Type": "String",
"Description": "The WordPress database admin account password",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "41",
"ConstraintDescription": "must contain only alphanumeric characters."
},
"InstanceType": {
"Default": "m1.small",
"Type": "String",
"ConstraintDescription": "must be a valid EC2 instance type.",
"Description": "WebServer EC2 instance type",
"AllowedValues": [
"m1.tiny",
"m1.small",
"m1.medium",
"m1.large",
"m1.xlarge"
]
},
"Network": {
"Type": "String",
"CustomConstraint": "neutron.network"
}
}
}
"""
ENVIRONMENT = """
parameters:
InstanceType: m1.xsmall
db_password: verybadpass
KeyName: heat_key
"""
SNAPSHOT_CREATE = """
{
"status": "IN_PROGRESS",
"name": "None",
"data": "None",
"creation_time": "2016-02-19T07:25:23.494152",
"status_reason": "None",
"id": "8af90c07-b788-44ee-a8ab-5990197f5e32"
}
"""
class Environment(object):
def __init__(self, data):
self.data = data
class Template(object):
def __init__(self, data, validate):
self.data = data
self.validate = validate
class Snapshot(object):
def __init__(self, data):
self.data = data
def data(TEST):
TEST.stacks = utils.TestDataContainer()
TEST.stack_templates = utils.TestDataContainer()
TEST.stack_environments = utils.TestDataContainer()
TEST.stack_snapshot_create = utils.TestDataContainer()
TEST.stack_snapshot = utils.TestDataContainer()
TEST.resource_types = utils.TestDataContainer()
TEST.heat_resources = utils.TestDataContainer()
TEST.heat_services = utils.TestDataContainer()
# Services
service_1 = services.Service(services.ServiceManager(None), {
"status": "up",
"binary": "heat-engine",
"report_interval": 60,
"engine_id": "2f7b5a9b-c50b-4b01-8248-f89f5fb338d1",
"created_at": "2015-02-06T03:23:32.000000",
"hostname": "mrkanag",
"updated_at": "2015-02-20T09:49:52.000000",
"topic": "engine",
"host": "engine-1",
"deleted_at": None,
"id": "1efd7015-5016-4caa-b5c8-12438af7b100"
})
service_2 = services.Service(services.ServiceManager(None), {
"status": "up",
"binary": "heat-engine",
"report_interval": 60,
"engine_id": "2f7b5a9b-c50b-4b01-8248-f89f5fb338d2",
"created_at": "2015-02-06T03:23:32.000000",
"hostname": "mrkanag",
"updated_at": "2015-02-20T09:49:52.000000",
"topic": "engine",
"host": "engine-2",
"deleted_at": None,
"id": "1efd7015-5016-4caa-b5c8-12438af7b100"
})
TEST.heat_services.add(service_1)
TEST.heat_services.add(service_2)
# Data return by heatclient.
TEST.api_resource_types = utils.TestDataContainer()
for i in range(10):
stack_data = {
"description": "No description",
"links": [{
"href": "http://192.168.1.70:8004/v1/"
"051c727ee67040d6a7b7812708485a97/"
"stacks/stack-test{0}/"
"05b4f39f-ea96-4d91-910c-e758c078a089{0}".format(i),
"rel": "self"
}],
"parameters": {
'DBUsername': '******',
'InstanceType': 'm1.small',
'AWS::StackId': (
'arn:openstack:heat::2ce287:stacks/teststack/88553ec'),
'DBRootPassword': '******',
'AWS::StackName': "teststack{0}".format(i),
'DBPassword': '******',
'AWS::Region': 'ap-southeast-1',
'DBName': u'wordpress'
},
"stack_status_reason": "Stack successfully created",
"stack_name": "stack-test{0}".format(i),
"creation_time": "2013-04-22T00:11:39Z",
"updated_time": "2013-04-22T00:11:39Z",
"stack_status": "CREATE_COMPLETE",
"id": "05b4f39f-ea96-4d91-910c-e758c078a089{0}".format(i)
}
stack = stacks.Stack(stacks.StackManager(None), stack_data)
TEST.stacks.add(stack)
for i in range(10):
snapshot_data = {
"status": "COMPLETE",
"name": 'null',
"data": {
"files": {},
"status": "COMPLETE",
"name": "zhao3",
"tags": ["a", " 123", " b", " 456"],
"stack_user_project_id": "3cba4460875444049a2a7cc5420ccddb",
"environment": {
"encrypted_param_names": [],
"parameter_defaults": {},
"event_sinks": [],
"parameters": {},
"resource_registry": {
"resources": {}
}
},
"template": {
"heat_template_version": "2013-05-23",
"description":
"HOT template for Test.",
"resources": {
"private_subnet": {
"type": "OS::Neutron::Subnet",
"properties": {
"network_id": {"get_resource": "private_net"},
"cidr": "172.16.2.0/24",
"gateway_ip": "172.16.2.1"
}
},
"private_net": {
"type": "OS::Neutron::Net",
"properties": {"name": "private-net"}
}
}
},
"action": "SNAPSHOT",
"project_id": "1acd0026829f4d28bb2eff912d7aad0d",
"id": "70650725-bdbd-419f-b53f-5707767bfe0e",
"resources": {
"private_subnet": {
"status": "COMPLETE",
"name": "private_subnet",
"resource_data": {},
"resource_id": "9c7211b3-31c7-41f6-b92a-442ad3f71ef0",
"action": "SNAPSHOT",
"type": "OS::Neutron::Subnet",
"metadata": {}
},
"private_net": {
"status": "COMPLETE",
"name": "private_net",
"resource_data": {},
"resource_id": "ff4fd287-31b2-4d00-bc96-c409bc1db027",
"action": "SNAPSHOT",
"type": "OS::Neutron::Net",
"metadata": {}
}
}
},
"creation_time": "2016-02-21T04:02:54",
"status_reason": "Stack SNAPSHOT completed successfully",
"id": "01558a3b-ba05-4427-bbb4-1e4ab71cfcad"
}
TEST.stack_snapshot.add(snapshot_data)
TEST.stack_templates.add(Template(TEMPLATE, VALIDATE))
TEST.stack_environments.add(Environment(ENVIRONMENT))
TEST.stack_snapshot_create.add(Snapshot(SNAPSHOT_CREATE))
# Resource types list
r_type_1 = {
"resource_type": "AWS::CloudFormation::Stack",
"attributes": {},
"properties": {
"Parameters": {
"description":
"The set of parameters passed to this nested stack.",
"immutable": False,
"required": False,
"type": "map",
"update_allowed": True},
"TemplateURL": {
"description": "The URL of a template that specifies"
" the stack to be created as a resource.",
"immutable": False,
"required": True,
"type": "string",
"update_allowed": True},
"TimeoutInMinutes": {
"description": "The length of time, in minutes,"
" to wait for the nested stack creation.",
"immutable": False,
"required": False,
"type": "number",
"update_allowed": True}
}
}
r_type_2 = {
"resource_type": "OS::Heat::CloudConfig",
"attributes": {
"config": {
"description": "The config value of the software config."}
},
"properties": {
"cloud_config": {
"description": "Map representing the cloud-config data"
" structure which will be formatted as YAML.",
"immutable": False,
"required": False,
"type": "map",
"update_allowed": False}
}
}
r_types_list = [r_type_1, r_type_2]
for rt in r_types_list:
r_type = resource_types.ResourceType(
resource_types.ResourceTypeManager(None), rt['resource_type'])
TEST.resource_types.add(r_type)
TEST.api_resource_types.add(rt)
# Resources
resource_1 = resources.Resource(resources.ResourceManager(None), {
"logical_resource_id": "my_resource",
"physical_resource_id": "7b5e29b1-c94d-402d-b69c-df9ac6dfc0ce",
"resource_name": "my_resource",
"links": [
{
"href": "http://192.168.1.70:8004/v1/"
"051c727ee67040d6a7b7812708485a97/"
"stacks/%s/%s/resources/my_resource" %
(TEST.stacks.first().stack_name,
TEST.stacks.first().id),
"rel": "self"
},
{
"href": "http://192.168.1.70:8004/v1/"
"051c727ee67040d6a7b7812708485a97/"
"stacks/%s/%s" %
(TEST.stacks.first().stack_name,
TEST.stacks.first().id),
"rel": "stack"
}
],
"attributes": {
"metadata": {}
}
})
TEST.heat_resources.add(resource_1)
| apache-2.0 |
Jumpscale/jumpscale6_core | apps/portalbase/wiki/Help/.macros/wiki/menuadmin_jdoc/1_menuadmin.py | 2 | 1685 |
def main(j, args, params, tags, tasklet):
params.merge(args)
doc = params.doc
tags = params.tags
params.result = ""
# spaces = sorted(j.core.portal.active.getSpaces())
# spacestxt=""
# for item in spaces:
# if item[0] != "_" and item.strip() != "" and item.find("space_system")==-1 and item.find("test")==-1 and item.find("gridlogs")==-1:
# spacestxt += "%s:/%s\n" % (item, item.lower().strip("/"))
C = """
{{menudropdown: name:Doc
Edit:/system/edit?space=$$space&page=$$page
--------------
Logout:/system/login?user_logoff_=1
Access:/system/OverviewAccess?space=$$space
System:/system
--------------
Doc Core:/doc_jumpscale_core
Doc Devel:/doc_jumpscale_devel
Doc Grid:/doc_jumpscale_grid
Doc Howto:/doc_jumpscale_howto
Doc Portal:/doc_jumpscale_portal
"""
# C+=spacestxt
C+='}}'
#was inside
#Reload:javascript:$.ajax({'url': '/system/ReloadSpace?name=$$space'}).done(function(){location.reload()});void(0);
#ShowLogs:/system/ShowSpaceAccessLog?space=$$space
#ResetLogs:/system/ResetAccessLog?space=$$space
#Spaces:/system/Spaces
#Pages:/system/Pages?space=$$space
#ReloadAll:javascript:(function loadAll() {$.ajax({'url': '/system/ReloadApplication'});(function checkSpaceIsUp(trials) {if (trials <= 0) return;setTimeout(function() {$.ajax({'url': '/system/'}).done(function(){location.reload();console.log('Reloaded');}).error(function(){checkSpaceIsUp(trials - 1)});}, 1000);})(10);})();void(0);
if j.core.portal.active.isAdminFromCTX(params.requestContext):
params.result = C
params.result = (params.result, doc)
return params
def match(j, args, params, tags, tasklet):
return True
| bsd-2-clause |
anthonypdawson/LazyLibrarian | lazylibrarian/providers.py | 1 | 4430 | import time, threading, urllib, urllib2, re
from xml.etree import ElementTree
import lazylibrarian
from lazylibrarian import logger, SimpleCache
def NewzNab(book=None):
HOST = lazylibrarian.NEWZNAB_HOST
results = []
logger.info('Searching for %s.' % book['searchterm'])
if lazylibrarian.EBOOK_TYPE == None:
params = {
"t": "book",
"apikey": lazylibrarian.NEWZNAB_API,
#"cat": 7020,
"author": book['searchterm']
}
else:
params = {
"t": "search",
"apikey": lazylibrarian.NEWZNAB_API,
"cat": 7020,
"q": book['searchterm'],
"extended": 1,
}
if not str(HOST)[:4] == "http":
HOST = 'http://' + HOST
URL = HOST + '/api?' + urllib.urlencode(params)
try :
request = urllib2.Request(URL)
request.add_header('User-Agent', 'lazylibrary/0.0 +https://github.com/LibrarianMike/LazyLibrarian')
opener = urllib2.build_opener(SimpleCache.CacheHandler(".urllib2cache"), SimpleCache.ThrottlingProcessor(5))
resp = opener.open(request)
try:
data = ElementTree.parse(resp)
except (urllib2.URLError, IOError, EOFError), e:
logger.warn('Error fetching data from %s: %s' % (lazylibrarian.NEWZNAB_HOST, e))
data = None
except Exception, e:
logger.error("Error 403 openning url")
data = None
if data:
# to debug because of api
logger.debug(u'Parsing results from <a href="%s">%s</a>' % (URL, lazylibrarian.NEWZNAB_HOST))
rootxml = data.getroot()
resultxml = rootxml.getiterator('item')
nzbcount = 0
for nzb in resultxml:
try:
nzbcount = nzbcount+1
results.append({
'bookid': book['bookid'],
'nzbprov': "NewzNab",
'nzbtitle': nzb[0].text,
'nzburl': nzb[2].text,
'nzbdate': nzb[4].text,
'nzbsize': nzb[7].attrib.get('length')
})
except IndexError:
logger.debug('No results')
if nzbcount:
logger.debug('Found %s nzb for: %s' % (nzbcount, book['searchterm']))
else:
logger.debug(u'Newznab returned 0 results for: ' + book['searchterm'] + '. Adding book to queue.')
return results
def NZBMatrix(book=None):
results = []
if ((lazylibrarian.EBOOK_TYPE == None) or (lazylibrarian.EBOOK_TYPE == "")):
params = {
"page": "download",
"username": lazylibrarian.NZBMATRIX_USER,
"apikey": lazylibrarian.NZBMATRIX_API,
"subcat": 36,
"age": lazylibrarian.USENET_RETENTION,
"term": book['searchterm']
}
else:
params = {
"page": "download",
"username": lazylibrarian.NZBMATRIX_USER,
"apikey": lazylibrarian.NZBMATRIX_API,
"subcat": 36,
"age": lazylibrarian.USENET_RETENTION,
"term": book['searchterm']
}
logger.debug('Searching for: ' + book['searchterm'])
URL = "http://rss.nzbmatrix.com/rss.php?" + urllib.urlencode(params)
# to debug because of api
logger.debug(u'Parsing results from <a href="%s">NZBMatrix</a>' % (URL))
try:
data = ElementTree.parse(urllib2.urlopen(URL, timeout=30))
except (urllib2.URLError, IOError, EOFError), e:
logger.warn('Error fetching data from NZBMatrix: %s' % e)
data = None
if data:
rootxml = data.getroot()
resultxml = rootxml.getiterator('item')
nzbcount = 0
for nzb in resultxml:
try:
results.append({
'bookid': book['bookid'],
'nzbprov': "NZBMatrix",
'nzbtitle': nzb[0].text,
'nzburl': nzb[2].text,
'nzbsize': nzb[7].attrib.get('length')
})
nzbcount = nzbcount+1
except IndexError:
logger.debug('No results')
if nzbcount:
logger.debug('Found %s nzb for: %s' % (nzbcount, book['searchterm']))
else:
logger.debug('NZBMatrix returned 0 results for: ' + book['searchterm'] + '. Adding book to queue.')
return results
| gpl-3.0 |
geodrinx/gearthview | ext-libs/twisted/internet/_utilspy3.py | 4 | 1916 | # -*- test-case-name: twisted.internet.test.test_utilspy3 -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utility methods, ported to Python 3.
"""
from __future__ import division, absolute_import
import sys, warnings
from functools import wraps
from twisted.python.compat import reraise
from twisted.internet import defer
def _resetWarningFilters(passthrough, addedFilters):
for f in addedFilters:
try:
warnings.filters.remove(f)
except ValueError:
pass
return passthrough
def runWithWarningsSuppressed(suppressedWarnings, f, *a, **kw):
"""Run the function C{f}, but with some warnings suppressed.
@param suppressedWarnings: A list of arguments to pass to filterwarnings.
Must be a sequence of 2-tuples (args, kwargs).
@param f: A callable, followed by its arguments and keyword arguments
"""
for args, kwargs in suppressedWarnings:
warnings.filterwarnings(*args, **kwargs)
addedFilters = warnings.filters[:len(suppressedWarnings)]
try:
result = f(*a, **kw)
except:
exc_info = sys.exc_info()
_resetWarningFilters(None, addedFilters)
reraise(exc_info[1], exc_info[2])
else:
if isinstance(result, defer.Deferred):
result.addBoth(_resetWarningFilters, addedFilters)
else:
_resetWarningFilters(None, addedFilters)
return result
def suppressWarnings(f, *suppressedWarnings):
"""
Wrap C{f} in a callable which suppresses the indicated warnings before
invoking C{f} and unsuppresses them afterwards. If f returns a Deferred,
warnings will remain suppressed until the Deferred fires.
"""
@wraps(f)
def warningSuppressingWrapper(*a, **kw):
return runWithWarningsSuppressed(suppressedWarnings, f, *a, **kw)
return warningSuppressingWrapper
| gpl-3.0 |
maestro-hybrid-cloud/horizon | openstack_dashboard/dashboards/identity/domains/views.py | 55 | 3604 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import messages
from horizon import tables
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.dashboards.identity.domains import constants
from openstack_dashboard.dashboards.identity.domains \
import tables as project_tables
from openstack_dashboard.dashboards.identity.domains \
import workflows as project_workflows
class IndexView(tables.DataTableView):
table_class = project_tables.DomainsTable
template_name = constants.DOMAINS_INDEX_VIEW_TEMPLATE
page_title = _("Domains")
def get_data(self):
domains = []
domain_context = self.request.session.get('domain_context', None)
if policy.check((("identity", "identity:list_domains"),),
self.request):
try:
if domain_context:
domain = api.keystone.domain_get(self.request,
domain_context)
domains.append(domain)
else:
domains = api.keystone.domain_list(self.request)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve domain list.'))
elif policy.check((("identity", "identity:get_domain"),),
self.request):
try:
domain = api.keystone.domain_get(self.request,
self.request.user.domain_id)
domains.append(domain)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve domain information.'))
else:
msg = _("Insufficient privilege level to view domain information.")
messages.info(self.request, msg)
return domains
class CreateDomainView(workflows.WorkflowView):
workflow_class = project_workflows.CreateDomain
class UpdateDomainView(workflows.WorkflowView):
workflow_class = project_workflows.UpdateDomain
def get_initial(self):
initial = super(UpdateDomainView, self).get_initial()
domain_id = self.kwargs['domain_id']
initial['domain_id'] = domain_id
try:
# get initial domain info
domain_info = api.keystone.domain_get(self.request,
domain_id)
for field in constants.DOMAIN_INFO_FIELDS:
initial[field] = getattr(domain_info, field, None)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve domain details.'),
redirect=reverse(constants.DOMAINS_INDEX_URL))
return initial
| apache-2.0 |
abhattad4/Digi-Menu | digimenu2/tests/foreign_object/tests.py | 113 | 18071 | import datetime
from operator import attrgetter
from django import forms
from django.core.exceptions import FieldError
from django.test import TestCase, skipUnlessDBFeature
from django.utils import translation
from .models import (
Article, ArticleIdea, ArticleTag, ArticleTranslation, Country, Friendship,
Group, Membership, NewsArticle, Person,
)
# Note that these tests are testing internal implementation details.
# ForeignObject is not part of public API.
class MultiColumnFKTests(TestCase):
def setUp(self):
# Creating countries
self.usa = Country.objects.create(name="United States of America")
self.soviet_union = Country.objects.create(name="Soviet Union")
Person()
# Creating People
self.bob = Person()
self.bob.name = 'Bob'
self.bob.person_country = self.usa
self.bob.save()
self.jim = Person.objects.create(name='Jim', person_country=self.usa)
self.george = Person.objects.create(name='George', person_country=self.usa)
self.jane = Person.objects.create(name='Jane', person_country=self.soviet_union)
self.mark = Person.objects.create(name='Mark', person_country=self.soviet_union)
self.sam = Person.objects.create(name='Sam', person_country=self.soviet_union)
# Creating Groups
self.kgb = Group.objects.create(name='KGB', group_country=self.soviet_union)
self.cia = Group.objects.create(name='CIA', group_country=self.usa)
self.republican = Group.objects.create(name='Republican', group_country=self.usa)
self.democrat = Group.objects.create(name='Democrat', group_country=self.usa)
def test_get_succeeds_on_multicolumn_match(self):
# Membership objects have access to their related Person if both
# country_ids match between them
membership = Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
person = membership.person
self.assertEqual((person.id, person.name), (self.bob.id, "Bob"))
def test_get_fails_on_multicolumn_mismatch(self):
# Membership objects returns DoesNotExist error when the there is no
# Person with the same id and country_id
membership = Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jane.id, group_id=self.cia.id)
self.assertRaises(Person.DoesNotExist, getattr, membership, 'person')
def test_reverse_query_returns_correct_result(self):
# Creating a valid membership because it has the same country has the person
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
# Creating an invalid membership because it has a different country has the person
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.bob.id,
group_id=self.republican.id)
self.assertQuerysetEqual(
self.bob.membership_set.all(), [
self.cia.id
],
attrgetter("group_id")
)
def test_query_filters_correctly(self):
# Creating a to valid memberships
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id)
# Creating an invalid membership
Membership.objects.create(membership_country_id=self.soviet_union.id,
person_id=self.george.id, group_id=self.cia.id)
self.assertQuerysetEqual(
Membership.objects.filter(person__name__contains='o'), [
self.bob.id
],
attrgetter("person_id")
)
def test_reverse_query_filters_correctly(self):
timemark = datetime.datetime.utcnow()
timedelta = datetime.timedelta(days=1)
# Creating a to valid memberships
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id,
group_id=self.cia.id, date_joined=timemark - timedelta)
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id, date_joined=timemark + timedelta)
# Creating an invalid membership
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.george.id,
group_id=self.cia.id, date_joined=timemark + timedelta)
self.assertQuerysetEqual(
Person.objects.filter(membership__date_joined__gte=timemark), [
'Jim'
],
attrgetter('name')
)
def test_forward_in_lookup_filters_correctly(self):
Membership.objects.create(membership_country_id=self.usa.id, person_id=self.bob.id,
group_id=self.cia.id)
Membership.objects.create(membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id)
# Creating an invalid membership
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.george.id,
group_id=self.cia.id)
self.assertQuerysetEqual(
Membership.objects.filter(person__in=[self.george, self.jim]), [
self.jim.id,
],
attrgetter('person_id')
)
self.assertQuerysetEqual(
Membership.objects.filter(person__in=Person.objects.filter(name='Jim')), [
self.jim.id,
],
attrgetter('person_id')
)
def test_double_nested_query(self):
m1 = Membership.objects.create(membership_country_id=self.usa.id, person_id=self.bob.id,
group_id=self.cia.id)
m2 = Membership.objects.create(membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id)
Friendship.objects.create(from_friend_country_id=self.usa.id, from_friend_id=self.bob.id,
to_friend_country_id=self.usa.id, to_friend_id=self.jim.id)
self.assertQuerysetEqual(Membership.objects.filter(
person__in=Person.objects.filter(
from_friend__in=Friendship.objects.filter(
to_friend__in=Person.objects.all()))),
[m1], lambda x: x)
self.assertQuerysetEqual(Membership.objects.exclude(
person__in=Person.objects.filter(
from_friend__in=Friendship.objects.filter(
to_friend__in=Person.objects.all()))),
[m2], lambda x: x)
def test_select_related_foreignkey_forward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(1):
people = [m.person for m in Membership.objects.select_related('person').order_by('pk')]
normal_people = [m.person for m in Membership.objects.all().order_by('pk')]
self.assertEqual(people, normal_people)
def test_prefetch_foreignkey_forward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
people = [
m.person for m in Membership.objects.prefetch_related('person').order_by('pk')]
normal_people = [m.person for m in Membership.objects.order_by('pk')]
self.assertEqual(people, normal_people)
def test_prefetch_foreignkey_reverse_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
membership_sets = [
list(p.membership_set.all())
for p in Person.objects.prefetch_related('membership_set').order_by('pk')]
normal_membership_sets = [list(p.membership_set.all())
for p in Person.objects.order_by('pk')]
self.assertEqual(membership_sets, normal_membership_sets)
def test_m2m_through_forward_returns_valid_members(self):
# We start out by making sure that the Group 'CIA' has no members.
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.cia)
# Let's check to make sure that it worked. Bob and Jim should be members of the CIA.
self.assertQuerysetEqual(
self.cia.members.all(), [
'Bob',
'Jim'
], attrgetter("name")
)
def test_m2m_through_reverse_returns_valid_members(self):
# We start out by making sure that Bob is in no groups.
self.assertQuerysetEqual(
self.bob.groups.all(),
[]
)
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.bob,
group=self.republican)
# Bob should be in the CIA and a Republican
self.assertQuerysetEqual(
self.bob.groups.all(), [
'CIA',
'Republican'
], attrgetter("name")
)
def test_m2m_through_forward_ignores_invalid_members(self):
# We start out by making sure that the Group 'CIA' has no members.
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
# Something adds jane to group CIA but Jane is in Soviet Union which isn't CIA's country
Membership.objects.create(membership_country=self.usa, person=self.jane, group=self.cia)
# There should still be no members in CIA
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
def test_m2m_through_reverse_ignores_invalid_members(self):
# We start out by making sure that Jane has no groups.
self.assertQuerysetEqual(
self.jane.groups.all(),
[]
)
# Something adds jane to group CIA but Jane is in Soviet Union which isn't CIA's country
Membership.objects.create(membership_country=self.usa, person=self.jane, group=self.cia)
# Jane should still not be in any groups
self.assertQuerysetEqual(
self.jane.groups.all(),
[]
)
def test_m2m_through_on_self_works(self):
self.assertQuerysetEqual(
self.jane.friends.all(),
[]
)
Friendship.objects.create(
from_friend_country=self.jane.person_country, from_friend=self.jane,
to_friend_country=self.george.person_country, to_friend=self.george)
self.assertQuerysetEqual(
self.jane.friends.all(),
['George'], attrgetter("name")
)
def test_m2m_through_on_self_ignores_mismatch_columns(self):
self.assertQuerysetEqual(self.jane.friends.all(), [])
# Note that we use ids instead of instances. This is because instances on ForeignObject
# properties will set all related field off of the given instance
Friendship.objects.create(
from_friend_id=self.jane.id, to_friend_id=self.george.id,
to_friend_country_id=self.jane.person_country_id,
from_friend_country_id=self.george.person_country_id)
self.assertQuerysetEqual(self.jane.friends.all(), [])
def test_prefetch_related_m2m_foward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
members_lists = [list(g.members.all())
for g in Group.objects.prefetch_related('members')]
normal_members_lists = [list(g.members.all()) for g in Group.objects.all()]
self.assertEqual(members_lists, normal_members_lists)
def test_prefetch_related_m2m_reverse_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
groups_lists = [list(p.groups.all()) for p in Person.objects.prefetch_related('groups')]
normal_groups_lists = [list(p.groups.all()) for p in Person.objects.all()]
self.assertEqual(groups_lists, normal_groups_lists)
@translation.override('fi')
def test_translations(self):
a1 = Article.objects.create(pub_date=datetime.date.today())
at1_fi = ArticleTranslation(article=a1, lang='fi', title='Otsikko', body='Diipadaapa')
at1_fi.save()
at2_en = ArticleTranslation(article=a1, lang='en', title='Title', body='Lalalalala')
at2_en.save()
with self.assertNumQueries(1):
fetched = Article.objects.select_related('active_translation').get(
active_translation__title='Otsikko')
self.assertEqual(fetched.active_translation.title, 'Otsikko')
a2 = Article.objects.create(pub_date=datetime.date.today())
at2_fi = ArticleTranslation(article=a2, lang='fi', title='Atsikko', body='Diipadaapa',
abstract='dipad')
at2_fi.save()
a3 = Article.objects.create(pub_date=datetime.date.today())
at3_en = ArticleTranslation(article=a3, lang='en', title='A title', body='lalalalala',
abstract='lala')
at3_en.save()
# Test model initialization with active_translation field.
a3 = Article(id=a3.id, pub_date=a3.pub_date, active_translation=at3_en)
a3.save()
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None)),
[a1, a3])
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None,
active_translation__pk__isnull=False)),
[a1])
with translation.override('en'):
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None)),
[a1, a2])
def test_foreign_key_raises_informative_does_not_exist(self):
referrer = ArticleTranslation()
with self.assertRaisesMessage(Article.DoesNotExist, 'ArticleTranslation has no article'):
referrer.article
def test_foreign_key_related_query_name(self):
a1 = Article.objects.create(pub_date=datetime.date.today())
ArticleTag.objects.create(article=a1, name="foo")
self.assertEqual(Article.objects.filter(tag__name="foo").count(), 1)
self.assertEqual(Article.objects.filter(tag__name="bar").count(), 0)
with self.assertRaises(FieldError):
Article.objects.filter(tags__name="foo")
def test_many_to_many_related_query_name(self):
a1 = Article.objects.create(pub_date=datetime.date.today())
i1 = ArticleIdea.objects.create(name="idea1")
a1.ideas.add(i1)
self.assertEqual(Article.objects.filter(idea_things__name="idea1").count(), 1)
self.assertEqual(Article.objects.filter(idea_things__name="idea2").count(), 0)
with self.assertRaises(FieldError):
Article.objects.filter(ideas__name="idea1")
@translation.override('fi')
def test_inheritance(self):
na = NewsArticle.objects.create(pub_date=datetime.date.today())
ArticleTranslation.objects.create(
article=na, lang="fi", title="foo", body="bar")
self.assertQuerysetEqual(
NewsArticle.objects.select_related('active_translation'),
[na], lambda x: x
)
with self.assertNumQueries(1):
self.assertEqual(
NewsArticle.objects.select_related(
'active_translation')[0].active_translation.title,
"foo")
@skipUnlessDBFeature('has_bulk_insert')
def test_batch_create_foreign_object(self):
""" See: https://code.djangoproject.com/ticket/21566 """
objs = [Person(name="abcd_%s" % i, person_country=self.usa) for i in range(0, 5)]
Person.objects.bulk_create(objs, 10)
class FormsTests(TestCase):
# ForeignObjects should not have any form fields, currently the user needs
# to manually deal with the foreignobject relation.
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = '__all__'
def test_foreign_object_form(self):
# A very crude test checking that the non-concrete fields do not get form fields.
form = FormsTests.ArticleForm()
self.assertIn('id_pub_date', form.as_table())
self.assertNotIn('active_translation', form.as_table())
form = FormsTests.ArticleForm(data={'pub_date': str(datetime.date.today())})
self.assertTrue(form.is_valid())
a = form.save()
self.assertEqual(a.pub_date, datetime.date.today())
form = FormsTests.ArticleForm(instance=a, data={'pub_date': '2013-01-01'})
a2 = form.save()
self.assertEqual(a.pk, a2.pk)
self.assertEqual(a2.pub_date, datetime.date(2013, 1, 1))
| bsd-3-clause |
geminy/aidear | oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/v8/tools/testrunner/objects/testcase.py | 5 | 4522 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from . import output
class TestCase(object):
def __init__(self, suite, path, variant=None, flags=None,
override_shell=None):
self.suite = suite # TestSuite object
self.path = path # string, e.g. 'div-mod', 'test-api/foo'
self.flags = flags or [] # list of strings, flags specific to this test
self.variant = variant # name of the used testing variant
self.override_shell = override_shell
self.outcomes = frozenset([])
self.output = None
self.id = None # int, used to map result back to TestCase instance
self.duration = None # assigned during execution
self.run = 1 # The nth time this test is executed.
def CopyAddingFlags(self, variant, flags):
copy = TestCase(self.suite, self.path, variant, self.flags + flags,
self.override_shell)
copy.outcomes = self.outcomes
return copy
def PackTask(self):
"""
Extracts those parts of this object that are required to run the test
and returns them as a JSON serializable object.
"""
assert self.id is not None
return [self.suitename(), self.path, self.variant, self.flags,
self.override_shell, list(self.outcomes or []),
self.id]
@staticmethod
def UnpackTask(task):
"""Creates a new TestCase object based on packed task data."""
# For the order of the fields, refer to PackTask() above.
test = TestCase(str(task[0]), task[1], task[2], task[3], task[4])
test.outcomes = frozenset(task[5])
test.id = task[6]
test.run = 1
return test
def SetSuiteObject(self, suites):
self.suite = suites[self.suite]
def PackResult(self):
"""Serializes the output of the TestCase after it has run."""
self.suite.StripOutputForTransmit(self)
return [self.id, self.output.Pack(), self.duration]
def MergeResult(self, result):
"""Applies the contents of a Result to this object."""
assert result[0] == self.id
self.output = output.Output.Unpack(result[1])
self.duration = result[2]
def suitename(self):
return self.suite.name
def GetLabel(self):
return self.suitename() + "/" + self.suite.CommonTestName(self)
def shell(self):
if self.override_shell:
return self.override_shell
return self.suite.shell()
def __getstate__(self):
"""Representation to pickle test cases.
The original suite won't be sent beyond process boundaries. Instead
send the name only and retrieve a process-local suite later.
"""
return dict(self.__dict__, suite=self.suite.name)
def __cmp__(self, other):
# Make sure that test cases are sorted correctly if sorted without
# key function. But using a key function is preferred for speed.
return cmp(
(self.suite.name, self.path, self.flags),
(other.suite.name, other.path, other.flags),
)
def __str__(self):
return "[%s/%s %s]" % (self.suite.name, self.path, self.flags)
| gpl-3.0 |
uglyboxer/linear_neuron | net-p3/lib/python3.5/site-packages/_pytest/helpconfig.py | 180 | 5120 | """ version info, help messages, tracing configuration. """
import py
import pytest
import os, sys
def pytest_addoption(parser):
group = parser.getgroup('debugconfig')
group.addoption('--version', action="store_true",
help="display pytest lib version and import information.")
group._addoption("-h", "--help", action="store_true", dest="help",
help="show help message and configuration info")
group._addoption('-p', action="append", dest="plugins", default = [],
metavar="name",
help="early-load given plugin (multi-allowed). "
"To avoid loading of plugins, use the `no:` prefix, e.g. "
"`no:doctest`.")
group.addoption('--traceconfig', '--trace-config',
action="store_true", default=False,
help="trace considerations of conftest.py files."),
group.addoption('--debug',
action="store_true", dest="debug", default=False,
help="store internal tracing debug information in 'pytestdebug.log'.")
@pytest.hookimpl(hookwrapper=True)
def pytest_cmdline_parse():
outcome = yield
config = outcome.get_result()
if config.option.debug:
path = os.path.abspath("pytestdebug.log")
debugfile = open(path, 'w')
debugfile.write("versions pytest-%s, py-%s, "
"python-%s\ncwd=%s\nargs=%s\n\n" %(
pytest.__version__, py.__version__,
".".join(map(str, sys.version_info)),
os.getcwd(), config._origargs))
config.trace.root.setwriter(debugfile.write)
undo_tracing = config.pluginmanager.enable_tracing()
sys.stderr.write("writing pytestdebug information to %s\n" % path)
def unset_tracing():
debugfile.close()
sys.stderr.write("wrote pytestdebug information to %s\n" %
debugfile.name)
config.trace.root.setwriter(None)
undo_tracing()
config.add_cleanup(unset_tracing)
def pytest_cmdline_main(config):
if config.option.version:
p = py.path.local(pytest.__file__)
sys.stderr.write("This is pytest version %s, imported from %s\n" %
(pytest.__version__, p))
plugininfo = getpluginversioninfo(config)
if plugininfo:
for line in plugininfo:
sys.stderr.write(line + "\n")
return 0
elif config.option.help:
config._do_configure()
showhelp(config)
config._ensure_unconfigure()
return 0
def showhelp(config):
reporter = config.pluginmanager.get_plugin('terminalreporter')
tw = reporter._tw
tw.write(config._parser.optparser.format_help())
tw.line()
tw.line()
#tw.sep( "=", "config file settings")
tw.line("[pytest] ini-options in the next "
"pytest.ini|tox.ini|setup.cfg file:")
tw.line()
for name in config._parser._ininames:
help, type, default = config._parser._inidict[name]
if type is None:
type = "string"
spec = "%s (%s)" % (name, type)
line = " %-24s %s" %(spec, help)
tw.line(line[:tw.fullwidth])
tw.line()
tw.line("environment variables:")
vars = [
("PYTEST_ADDOPTS", "extra command line options"),
("PYTEST_PLUGINS", "comma-separated plugins to load during startup"),
("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals")
]
for name, help in vars:
tw.line(" %-24s %s" % (name, help))
tw.line()
tw.line()
tw.line("to see available markers type: py.test --markers")
tw.line("to see available fixtures type: py.test --fixtures")
tw.line("(shown according to specified file_or_dir or current dir "
"if not specified)")
for warningreport in reporter.stats.get('warnings', []):
tw.line("warning : " + warningreport.message, red=True)
return
conftest_options = [
('pytest_plugins', 'list of plugin names to load'),
]
def getpluginversioninfo(config):
lines = []
plugininfo = config.pluginmanager.list_plugin_distinfo()
if plugininfo:
lines.append("setuptools registered plugins:")
for plugin, dist in plugininfo:
loc = getattr(plugin, '__file__', repr(plugin))
content = "%s-%s at %s" % (dist.project_name, dist.version, loc)
lines.append(" " + content)
return lines
def pytest_report_header(config):
lines = []
if config.option.debug or config.option.traceconfig:
lines.append("using: pytest-%s pylib-%s" %
(pytest.__version__,py.__version__))
verinfo = getpluginversioninfo(config)
if verinfo:
lines.extend(verinfo)
if config.option.traceconfig:
lines.append("active plugins:")
items = config.pluginmanager.list_name_plugin()
for name, plugin in items:
if hasattr(plugin, '__file__'):
r = plugin.__file__
else:
r = repr(plugin)
lines.append(" %-20s: %s" %(name, r))
return lines
| mit |
anu7495/airmozilla | airmozilla/manage/tests/test_helpers.py | 5 | 2690 | import datetime
import time
import jinja2
from nose.tools import ok_, eq_
from django.test import TestCase
from airmozilla.main.models import Event
from airmozilla.manage.helpers import (
almost_equal,
event_status_to_css_label,
format_message,
formatduration,
)
class TestAlmostEqual(TestCase):
def test_almost_equal(self):
date1 = datetime.datetime.now()
time.sleep(0.001)
date2 = datetime.datetime.now()
assert date1 != date2
ok_(almost_equal(date1, date2))
ok_(almost_equal(date2, date1))
def test_almost_equal_different_days(self):
date1 = date2 = datetime.datetime.now()
date2 += datetime.timedelta(days=1)
ok_(not almost_equal(date1, date2))
ok_(not almost_equal(date2, date1))
def test_not_equal_but_close(self):
date1 = date2 = datetime.datetime.now()
date2 += datetime.timedelta(seconds=1)
ok_(not almost_equal(date1, date2))
ok_(not almost_equal(date2, date1))
class MiscTests(TestCase):
def test_event_status_to_css_label(self):
label = event_status_to_css_label(Event.STATUS_REMOVED)
eq_(label, 'label-danger')
label = event_status_to_css_label(Event.STATUS_INITIATED)
eq_(label, 'label-default')
label = event_status_to_css_label(Event.STATUS_SCHEDULED)
eq_(label, 'label-success')
label = event_status_to_css_label(Event.STATUS_PENDING)
eq_(label, 'label-primary')
def test_format_message(self):
result = format_message('bla')
eq_(result, 'bla')
ok_(not isinstance(result, jinja2.Markup))
# or it's an object
class M(object):
message = 'ble'
m = M()
eq_(format_message(m), 'ble')
# or a message containing a markdown style relative
result = format_message("Go [to](/page.html)")
eq_(
result,
'Go <a href="/page.html" class="message-inline">to</a>'
)
ok_(isinstance(result, jinja2.Markup))
# or if it contains a balance <code> tag
result = format_message("<code>Code</code>")
eq_(
result,
'<code>Code</code>'
)
ok_(isinstance(result, jinja2.Markup))
def test_formatduration(self):
output = formatduration(10)
eq_(output, '10s')
output = formatduration(60)
eq_(output, u'1m\xa00s')
output = formatduration(70)
eq_(output, u'1m\xa010s')
output = formatduration(60 * 60)
eq_(output, u'1h\xa00m\xa00s')
output = formatduration(60 * 60 + 61)
eq_(output, u'1h\xa01m\xa01s')
| bsd-3-clause |
DigiThinkIT/stem | stem/response/authchallenge.py | 5 | 1910 | # Copyright 2012-2014, Damian Johnson and The Tor Project
# See LICENSE for licensing information
import binascii
import stem.response
import stem.socket
import stem.util.str_tools
import stem.util.tor_tools
class AuthChallengeResponse(stem.response.ControlMessage):
"""
AUTHCHALLENGE query response.
:var str server_hash: server hash provided by tor
:var str server_nonce: server nonce provided by tor
"""
def _parse_message(self):
# Example:
# 250 AUTHCHALLENGE SERVERHASH=680A73C9836C4F557314EA1C4EDE54C285DB9DC89C83627401AEF9D7D27A95D5 SERVERNONCE=F8EA4B1F2C8B40EF1AF68860171605B910E3BBCABADF6FC3DB1FA064F4690E85
self.server_hash = None
self.server_nonce = None
if not self.is_ok():
raise stem.ProtocolError("AUTHCHALLENGE response didn't have an OK status:\n%s" % self)
elif len(self) > 1:
raise stem.ProtocolError('Received multiline AUTHCHALLENGE response:\n%s' % self)
line = self[0]
# sanity check that we're a AUTHCHALLENGE response
if not line.pop() == 'AUTHCHALLENGE':
raise stem.ProtocolError('Message is not an AUTHCHALLENGE response (%s)' % self)
if line.is_next_mapping('SERVERHASH'):
value = line.pop_mapping()[1]
if not stem.util.tor_tools.is_hex_digits(value, 64):
raise stem.ProtocolError('SERVERHASH has an invalid value: %s' % value)
self.server_hash = binascii.a2b_hex(stem.util.str_tools._to_bytes(value))
else:
raise stem.ProtocolError('Missing SERVERHASH mapping: %s' % line)
if line.is_next_mapping('SERVERNONCE'):
value = line.pop_mapping()[1]
if not stem.util.tor_tools.is_hex_digits(value, 64):
raise stem.ProtocolError('SERVERNONCE has an invalid value: %s' % value)
self.server_nonce = binascii.a2b_hex(stem.util.str_tools._to_bytes(value))
else:
raise stem.ProtocolError('Missing SERVERNONCE mapping: %s' % line)
| lgpl-3.0 |
w1ll1am23/home-assistant | homeassistant/components/vesync/switch.py | 5 | 3299 | """Support for VeSync switches."""
import logging
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .common import VeSyncDevice
from .const import DOMAIN, VS_DISCOVERY, VS_DISPATCHERS, VS_SWITCHES
_LOGGER = logging.getLogger(__name__)
DEV_TYPE_TO_HA = {
"wifi-switch-1.3": "outlet",
"ESW03-USA": "outlet",
"ESW01-EU": "outlet",
"ESW15-USA": "outlet",
"ESWL01": "switch",
"ESWL03": "switch",
"ESO15-TB": "outlet",
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up switches."""
async def async_discover(devices):
"""Add new devices to platform."""
_async_setup_entities(devices, async_add_entities)
disp = async_dispatcher_connect(
hass, VS_DISCOVERY.format(VS_SWITCHES), async_discover
)
hass.data[DOMAIN][VS_DISPATCHERS].append(disp)
_async_setup_entities(hass.data[DOMAIN][VS_SWITCHES], async_add_entities)
return True
@callback
def _async_setup_entities(devices, async_add_entities):
"""Check if device is online and add entity."""
dev_list = []
for dev in devices:
if DEV_TYPE_TO_HA.get(dev.device_type) == "outlet":
dev_list.append(VeSyncSwitchHA(dev))
elif DEV_TYPE_TO_HA.get(dev.device_type) == "switch":
dev_list.append(VeSyncLightSwitch(dev))
else:
_LOGGER.warning(
"%s - Unknown device type - %s", dev.device_name, dev.device_type
)
continue
async_add_entities(dev_list, update_before_add=True)
class VeSyncBaseSwitch(VeSyncDevice, SwitchEntity):
"""Base class for VeSync switch Device Representations."""
def turn_on(self, **kwargs):
"""Turn the device on."""
self.device.turn_on()
class VeSyncSwitchHA(VeSyncBaseSwitch, SwitchEntity):
"""Representation of a VeSync switch."""
def __init__(self, plug):
"""Initialize the VeSync switch device."""
super().__init__(plug)
self.smartplug = plug
@property
def extra_state_attributes(self):
"""Return the state attributes of the device."""
if not hasattr(self.smartplug, "weekly_energy_total"):
return {}
return {
"voltage": self.smartplug.voltage,
"weekly_energy_total": self.smartplug.weekly_energy_total,
"monthly_energy_total": self.smartplug.monthly_energy_total,
"yearly_energy_total": self.smartplug.yearly_energy_total,
}
@property
def current_power_w(self):
"""Return the current power usage in W."""
return self.smartplug.power
@property
def today_energy_kwh(self):
"""Return the today total energy usage in kWh."""
return self.smartplug.energy_today
def update(self):
"""Update outlet details and energy usage."""
self.smartplug.update()
self.smartplug.update_energy()
class VeSyncLightSwitch(VeSyncBaseSwitch, SwitchEntity):
"""Handle representation of VeSync Light Switch."""
def __init__(self, switch):
"""Initialize Light Switch device class."""
super().__init__(switch)
self.switch = switch
| apache-2.0 |
KaranToor/MA450 | google-cloud-sdk/.install/.backup/lib/googlecloudsdk/third_party/apis/compute/alpha/compute_alpha_client.py | 2 | 425195 | """Generated client library for compute version alpha."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.compute.alpha import compute_alpha_messages as messages
class ComputeAlpha(base_api.BaseApiClient):
"""Generated client library for service compute version alpha."""
MESSAGES_MODULE = messages
BASE_URL = u'https://www.googleapis.com/compute/alpha/'
_PACKAGE = u'compute'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/compute', u'https://www.googleapis.com/auth/compute.readonly', u'https://www.googleapis.com/auth/devstorage.full_control', u'https://www.googleapis.com/auth/devstorage.read_only', u'https://www.googleapis.com/auth/devstorage.read_write']
_VERSION = u'alpha'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = u'ComputeAlpha'
_URL_VERSION = u'alpha'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None):
"""Create a new compute handle."""
url = url or self.BASE_URL
super(ComputeAlpha, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers)
self.acceleratorTypes = self.AcceleratorTypesService(self)
self.addresses = self.AddressesService(self)
self.autoscalers = self.AutoscalersService(self)
self.backendBuckets = self.BackendBucketsService(self)
self.backendServices = self.BackendServicesService(self)
self.clientSslPolicies = self.ClientSslPoliciesService(self)
self.commitments = self.CommitmentsService(self)
self.diskTypes = self.DiskTypesService(self)
self.disks = self.DisksService(self)
self.firewalls = self.FirewallsService(self)
self.forwardingRules = self.ForwardingRulesService(self)
self.globalAddresses = self.GlobalAddressesService(self)
self.globalForwardingRules = self.GlobalForwardingRulesService(self)
self.globalOperations = self.GlobalOperationsService(self)
self.healthChecks = self.HealthChecksService(self)
self.hostTypes = self.HostTypesService(self)
self.hosts = self.HostsService(self)
self.httpHealthChecks = self.HttpHealthChecksService(self)
self.httpsHealthChecks = self.HttpsHealthChecksService(self)
self.images = self.ImagesService(self)
self.instanceGroupManagers = self.InstanceGroupManagersService(self)
self.instanceGroups = self.InstanceGroupsService(self)
self.instanceTemplates = self.InstanceTemplatesService(self)
self.instances = self.InstancesService(self)
self.licenses = self.LicensesService(self)
self.machineTypes = self.MachineTypesService(self)
self.networks = self.NetworksService(self)
self.projects = self.ProjectsService(self)
self.regionAutoscalers = self.RegionAutoscalersService(self)
self.regionBackendServices = self.RegionBackendServicesService(self)
self.regionDiskTypes = self.RegionDiskTypesService(self)
self.regionDisks = self.RegionDisksService(self)
self.regionInstanceGroupManagers = self.RegionInstanceGroupManagersService(self)
self.regionInstanceGroups = self.RegionInstanceGroupsService(self)
self.regionOperations = self.RegionOperationsService(self)
self.regions = self.RegionsService(self)
self.routers = self.RoutersService(self)
self.routes = self.RoutesService(self)
self.snapshots = self.SnapshotsService(self)
self.sslCertificates = self.SslCertificatesService(self)
self.subnetworks = self.SubnetworksService(self)
self.targetHttpProxies = self.TargetHttpProxiesService(self)
self.targetHttpsProxies = self.TargetHttpsProxiesService(self)
self.targetInstances = self.TargetInstancesService(self)
self.targetPools = self.TargetPoolsService(self)
self.targetSslProxies = self.TargetSslProxiesService(self)
self.targetTcpProxies = self.TargetTcpProxiesService(self)
self.targetVpnGateways = self.TargetVpnGatewaysService(self)
self.urlMaps = self.UrlMapsService(self)
self.vpnTunnels = self.VpnTunnelsService(self)
self.zoneOperations = self.ZoneOperationsService(self)
self.zones = self.ZonesService(self)
class AcceleratorTypesService(base_api.BaseApiService):
"""Service class for the acceleratorTypes resource."""
_NAME = u'acceleratorTypes'
def __init__(self, client):
super(ComputeAlpha.AcceleratorTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of accelerator types.
Args:
request: (ComputeAcceleratorTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/acceleratorTypes',
request_field='',
request_type_name=u'ComputeAcceleratorTypesAggregatedListRequest',
response_type_name=u'AcceleratorTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified accelerator type. Get a list of available accelerator types by making a list() request.
Args:
request: (ComputeAcceleratorTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.get',
ordered_params=[u'project', u'zone', u'acceleratorType'],
path_params=[u'acceleratorType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/acceleratorTypes/{acceleratorType}',
request_field='',
request_type_name=u'ComputeAcceleratorTypesGetRequest',
response_type_name=u'AcceleratorType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of accelerator types available to the specified project.
Args:
request: (ComputeAcceleratorTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AcceleratorTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.acceleratorTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/acceleratorTypes',
request_field='',
request_type_name=u'ComputeAcceleratorTypesListRequest',
response_type_name=u'AcceleratorTypeList',
supports_download=False,
)
class AddressesService(base_api.BaseApiService):
"""Service class for the addresses resource."""
_NAME = u'addresses'
def __init__(self, client):
super(ComputeAlpha.AddressesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of addresses.
Args:
request: (ComputeAddressesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/addresses',
request_field='',
request_type_name=u'ComputeAddressesAggregatedListRequest',
response_type_name=u'AddressAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified address resource.
Args:
request: (ComputeAddressesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.addresses.delete',
ordered_params=[u'project', u'region', u'address'],
path_params=[u'address', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses/{address}',
request_field='',
request_type_name=u'ComputeAddressesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified address resource.
Args:
request: (ComputeAddressesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Address) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.get',
ordered_params=[u'project', u'region', u'address'],
path_params=[u'address', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses/{address}',
request_field='',
request_type_name=u'ComputeAddressesGetRequest',
response_type_name=u'Address',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an address resource in the specified project using the data included in the request.
Args:
request: (ComputeAddressesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.addresses.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses',
request_field=u'address',
request_type_name=u'ComputeAddressesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of addresses contained within the specified region.
Args:
request: (ComputeAddressesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.addresses.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/addresses',
request_field='',
request_type_name=u'ComputeAddressesListRequest',
response_type_name=u'AddressList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on an Address. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeAddressesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.addresses.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeAddressesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeAddressesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.addresses.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/addresses/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeAddressesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class AutoscalersService(base_api.BaseApiService):
"""Service class for the autoscalers resource."""
_NAME = u'autoscalers'
def __init__(self, client):
super(ComputeAlpha.AutoscalersService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of autoscalers.
Args:
request: (ComputeAutoscalersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AutoscalerAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/autoscalers',
request_field='',
request_type_name=u'ComputeAutoscalersAggregatedListRequest',
response_type_name=u'AutoscalerAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified autoscaler.
Args:
request: (ComputeAutoscalersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.autoscalers.delete',
ordered_params=[u'project', u'zone', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeAutoscalersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified autoscaler resource. Get a list of available autoscalers by making a list() request.
Args:
request: (ComputeAutoscalersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Autoscaler) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.get',
ordered_params=[u'project', u'zone', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeAutoscalersGetRequest',
response_type_name=u'Autoscaler',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeAutoscalersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.autoscalers.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscaler',
request_type_name=u'ComputeAutoscalersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of autoscalers contained within the specified zone.
Args:
request: (ComputeAutoscalersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AutoscalerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.autoscalers.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field='',
request_type_name=u'ComputeAutoscalersListRequest',
response_type_name=u'AutoscalerList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request. This method supports patch semantics.
Args:
request: (ComputeAutoscalersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.autoscalers.patch',
ordered_params=[u'project', u'zone', u'autoscaler'],
path_params=[u'project', u'zone'],
query_params=[u'autoscaler'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeAutoscalersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeAutoscalersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.autoscalers.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/autoscalers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeAutoscalersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeAutoscalersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.autoscalers.update',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'autoscaler'],
relative_path=u'projects/{project}/zones/{zone}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeAutoscalersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class BackendBucketsService(base_api.BaseApiService):
"""Service class for the backendBuckets resource."""
_NAME = u'backendBuckets'
def __init__(self, client):
super(ComputeAlpha.BackendBucketsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified BackendBucket resource.
Args:
request: (ComputeBackendBucketsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.backendBuckets.delete',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field='',
request_type_name=u'ComputeBackendBucketsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified BackendBucket resource. Get a list of available backend buckets by making a list() request.
Args:
request: (ComputeBackendBucketsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendBucket) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendBuckets.get',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field='',
request_type_name=u'ComputeBackendBucketsGetRequest',
response_type_name=u'BackendBucket',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
"""Gets the access control policy for a resource. May be empty if no such policy or resource exists.
Args:
request: (ComputeBackendBucketsGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendBuckets.getIamPolicy',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{resource}/getIamPolicy',
request_field='',
request_type_name=u'ComputeBackendBucketsGetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a BackendBucket resource in the specified project using the data included in the request.
Args:
request: (ComputeBackendBucketsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendBuckets.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets',
request_field=u'backendBucket',
request_type_name=u'ComputeBackendBucketsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of BackendBucket resources available to the specified project.
Args:
request: (ComputeBackendBucketsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendBucketList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendBuckets.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/backendBuckets',
request_field='',
request_type_name=u'ComputeBackendBucketsListRequest',
response_type_name=u'BackendBucketList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified BackendBucket resource with the data included in the request. This method supports patch semantics.
Args:
request: (ComputeBackendBucketsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.backendBuckets.patch',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field=u'backendBucketResource',
request_type_name=u'ComputeBackendBucketsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
"""Sets the access control policy on the specified resource. Replaces any existing policy.
Args:
request: (ComputeBackendBucketsSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendBuckets.setIamPolicy',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{resource}/setIamPolicy',
request_field=u'policy',
request_type_name=u'ComputeBackendBucketsSetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeBackendBucketsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendBuckets.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeBackendBucketsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified BackendBucket resource with the data included in the request.
Args:
request: (ComputeBackendBucketsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.backendBuckets.update',
ordered_params=[u'project', u'backendBucket'],
path_params=[u'backendBucket', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendBuckets/{backendBucket}',
request_field=u'backendBucketResource',
request_type_name=u'ComputeBackendBucketsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class BackendServicesService(base_api.BaseApiService):
"""Service class for the backendServices resource."""
_NAME = u'backendServices'
def __init__(self, client):
super(ComputeAlpha.BackendServicesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of all BackendService resources, regional and global, available to the specified project.
Args:
request: (ComputeBackendServicesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/backendServices',
request_field='',
request_type_name=u'ComputeBackendServicesAggregatedListRequest',
response_type_name=u'BackendServiceAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified BackendService resource.
Args:
request: (ComputeBackendServicesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.backendServices.delete',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeBackendServicesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified BackendService resource. Get a list of available backend services by making a list() request.
Args:
request: (ComputeBackendServicesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendService) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.get',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeBackendServicesGetRequest',
response_type_name=u'BackendService',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for this BackendService.
Args:
request: (ComputeBackendServicesGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceGroupHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.getHealth',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}/getHealth',
request_field=u'resourceGroupReference',
request_type_name=u'ComputeBackendServicesGetHealthRequest',
response_type_name=u'BackendServiceGroupHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a BackendService resource in the specified project using the data included in the request. There are several restrictions and guidelines to keep in mind when creating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeBackendServicesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices',
request_field=u'backendService',
request_type_name=u'ComputeBackendServicesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of BackendService resources available to the specified project.
Args:
request: (ComputeBackendServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.backendServices.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/backendServices',
request_field='',
request_type_name=u'ComputeBackendServicesListRequest',
response_type_name=u'BackendServiceList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information. This method supports patch semantics.
Args:
request: (ComputeBackendServicesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.backendServices.patch',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeBackendServicesPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeBackendServicesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.backendServices.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeBackendServicesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeBackendServicesUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.backendServices.update',
ordered_params=[u'project', u'backendService'],
path_params=[u'backendService', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeBackendServicesUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ClientSslPoliciesService(base_api.BaseApiService):
"""Service class for the clientSslPolicies resource."""
_NAME = u'clientSslPolicies'
def __init__(self, client):
super(ComputeAlpha.ClientSslPoliciesService, self).__init__(client)
self._upload_configs = {
}
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeClientSslPoliciesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.clientSslPolicies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/clientSslPolicies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeClientSslPoliciesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class CommitmentsService(base_api.BaseApiService):
"""Service class for the commitments resource."""
_NAME = u'commitments'
def __init__(self, client):
super(ComputeAlpha.CommitmentsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of commitments.
Args:
request: (ComputeCommitmentsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(CommitmentAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.commitments.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/commitments',
request_field='',
request_type_name=u'ComputeCommitmentsAggregatedListRequest',
response_type_name=u'CommitmentAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified commitment resource. Get a list of available commitments by making a list() request.
Args:
request: (ComputeCommitmentsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Commitment) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.commitments.get',
ordered_params=[u'project', u'zone', u'commitment'],
path_params=[u'commitment', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/commitments/{commitment}',
request_field='',
request_type_name=u'ComputeCommitmentsGetRequest',
response_type_name=u'Commitment',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an commitment in the specified project using the data included in the request.
Args:
request: (ComputeCommitmentsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.commitments.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/commitments',
request_field=u'commitment',
request_type_name=u'ComputeCommitmentsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of commitments contained within the specified zone.
Args:
request: (ComputeCommitmentsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(CommitmentList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.commitments.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/commitments',
request_field='',
request_type_name=u'ComputeCommitmentsListRequest',
response_type_name=u'CommitmentList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeCommitmentsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.commitments.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/commitments/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeCommitmentsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class DiskTypesService(base_api.BaseApiService):
"""Service class for the diskTypes resource."""
_NAME = u'diskTypes'
def __init__(self, client):
super(ComputeAlpha.DiskTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of disk types.
Args:
request: (ComputeDiskTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/diskTypes',
request_field='',
request_type_name=u'ComputeDiskTypesAggregatedListRequest',
response_type_name=u'DiskTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified disk type. Get a list of available disk types by making a list() request.
Args:
request: (ComputeDiskTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.get',
ordered_params=[u'project', u'zone', u'diskType'],
path_params=[u'diskType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/diskTypes/{diskType}',
request_field='',
request_type_name=u'ComputeDiskTypesGetRequest',
response_type_name=u'DiskType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of disk types available to the specified project.
Args:
request: (ComputeDiskTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.diskTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/diskTypes',
request_field='',
request_type_name=u'ComputeDiskTypesListRequest',
response_type_name=u'DiskTypeList',
supports_download=False,
)
class DisksService(base_api.BaseApiService):
"""Service class for the disks resource."""
_NAME = u'disks'
def __init__(self, client):
super(ComputeAlpha.DisksService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of persistent disks.
Args:
request: (ComputeDisksAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/disks',
request_field='',
request_type_name=u'ComputeDisksAggregatedListRequest',
response_type_name=u'DiskAggregatedList',
supports_download=False,
)
def CreateSnapshot(self, request, global_params=None):
"""Creates a snapshot of a specified persistent disk.
Args:
request: (ComputeDisksCreateSnapshotRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('CreateSnapshot')
return self._RunMethod(
config, request, global_params=global_params)
CreateSnapshot.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.createSnapshot',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[u'guestFlush', u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}/createSnapshot',
request_field=u'snapshot',
request_type_name=u'ComputeDisksCreateSnapshotRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified persistent disk. Deleting a disk removes its data permanently and is irreversible. However, deleting a disk does not delete any snapshots previously made from the disk. You must separately delete snapshots.
Args:
request: (ComputeDisksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.disks.delete',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}',
request_field='',
request_type_name=u'ComputeDisksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns a specified persistent disk. Get a list of available persistent disks by making a list() request.
Args:
request: (ComputeDisksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Disk) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.get',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}',
request_field='',
request_type_name=u'ComputeDisksGetRequest',
response_type_name=u'Disk',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a persistent disk in the specified project using the data in the request. You can create a disk with a sourceImage, a sourceSnapshot, or create an empty 500 GB data disk by omitting all properties. You can also create a disk that is larger than the default size by specifying the sizeGb property.
Args:
request: (ComputeDisksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'requestId', u'sourceImage'],
relative_path=u'projects/{project}/zones/{zone}/disks',
request_field=u'disk',
request_type_name=u'ComputeDisksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of persistent disks contained within the specified zone.
Args:
request: (ComputeDisksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.disks.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/disks',
request_field='',
request_type_name=u'ComputeDisksListRequest',
response_type_name=u'DiskList',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Resizes the specified persistent disk.
Args:
request: (ComputeDisksResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.resize',
ordered_params=[u'project', u'zone', u'disk'],
path_params=[u'disk', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/disks/{disk}/resize',
request_field=u'disksResizeRequest',
request_type_name=u'ComputeDisksResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a disk. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeDisksSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.setLabels',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{resource}/setLabels',
request_field=u'zoneSetLabelsRequest',
request_type_name=u'ComputeDisksSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeDisksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.disks.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/disks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeDisksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class FirewallsService(base_api.BaseApiService):
"""Service class for the firewalls resource."""
_NAME = u'firewalls'
def __init__(self, client):
super(ComputeAlpha.FirewallsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified firewall.
Args:
request: (ComputeFirewallsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.firewalls.delete',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field='',
request_type_name=u'ComputeFirewallsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified firewall.
Args:
request: (ComputeFirewallsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Firewall) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.firewalls.get',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field='',
request_type_name=u'ComputeFirewallsGetRequest',
response_type_name=u'Firewall',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a firewall rule in the specified project using the data included in the request.
Args:
request: (ComputeFirewallsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.firewalls.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls',
request_field=u'firewall',
request_type_name=u'ComputeFirewallsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of firewall rules available to the specified project.
Args:
request: (ComputeFirewallsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(FirewallList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.firewalls.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/firewalls',
request_field='',
request_type_name=u'ComputeFirewallsListRequest',
response_type_name=u'FirewallList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified firewall rule with the data included in the request. This method supports patch semantics.
Args:
request: (ComputeFirewallsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.firewalls.patch',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field=u'firewallResource',
request_type_name=u'ComputeFirewallsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeFirewallsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.firewalls.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeFirewallsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified firewall rule with the data included in the request.
Args:
request: (ComputeFirewallsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.firewalls.update',
ordered_params=[u'project', u'firewall'],
path_params=[u'firewall', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/firewalls/{firewall}',
request_field=u'firewallResource',
request_type_name=u'ComputeFirewallsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ForwardingRulesService(base_api.BaseApiService):
"""Service class for the forwardingRules resource."""
_NAME = u'forwardingRules'
def __init__(self, client):
super(ComputeAlpha.ForwardingRulesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of forwarding rules.
Args:
request: (ComputeForwardingRulesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/forwardingRules',
request_field='',
request_type_name=u'ComputeForwardingRulesAggregatedListRequest',
response_type_name=u'ForwardingRuleAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified ForwardingRule resource.
Args:
request: (ComputeForwardingRulesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.forwardingRules.delete',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeForwardingRulesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified ForwardingRule resource.
Args:
request: (ComputeForwardingRulesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRule) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.get',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeForwardingRulesGetRequest',
response_type_name=u'ForwardingRule',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a ForwardingRule resource in the specified project and region using the data included in the request.
Args:
request: (ComputeForwardingRulesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules',
request_field=u'forwardingRule',
request_type_name=u'ComputeForwardingRulesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of ForwardingRule resources available to the specified project and region.
Args:
request: (ComputeForwardingRulesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.forwardingRules.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/forwardingRules',
request_field='',
request_type_name=u'ComputeForwardingRulesListRequest',
response_type_name=u'ForwardingRuleList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on the specified resource. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeForwardingRulesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeForwardingRulesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTarget(self, request, global_params=None):
"""Changes target URL for forwarding rule. The new target should be of the same type as the old target.
Args:
request: (ComputeForwardingRulesSetTargetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTarget')
return self._RunMethod(
config, request, global_params=global_params)
SetTarget.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.setTarget',
ordered_params=[u'project', u'region', u'forwardingRule'],
path_params=[u'forwardingRule', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{forwardingRule}/setTarget',
request_field=u'targetReference',
request_type_name=u'ComputeForwardingRulesSetTargetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeForwardingRulesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.forwardingRules.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/forwardingRules/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeForwardingRulesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class GlobalAddressesService(base_api.BaseApiService):
"""Service class for the globalAddresses resource."""
_NAME = u'globalAddresses'
def __init__(self, client):
super(ComputeAlpha.GlobalAddressesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified address resource.
Args:
request: (ComputeGlobalAddressesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalAddresses.delete',
ordered_params=[u'project', u'address'],
path_params=[u'address', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{address}',
request_field='',
request_type_name=u'ComputeGlobalAddressesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified address resource. Get a list of available addresses by making a list() request.
Args:
request: (ComputeGlobalAddressesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Address) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalAddresses.get',
ordered_params=[u'project', u'address'],
path_params=[u'address', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{address}',
request_field='',
request_type_name=u'ComputeGlobalAddressesGetRequest',
response_type_name=u'Address',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an address resource in the specified project using the data included in the request.
Args:
request: (ComputeGlobalAddressesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalAddresses.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/addresses',
request_field=u'address',
request_type_name=u'ComputeGlobalAddressesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of global addresses.
Args:
request: (ComputeGlobalAddressesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(AddressList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalAddresses.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/addresses',
request_field='',
request_type_name=u'ComputeGlobalAddressesListRequest',
response_type_name=u'AddressList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a GlobalAddress. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeGlobalAddressesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalAddresses.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeGlobalAddressesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeGlobalAddressesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalAddresses.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/addresses/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeGlobalAddressesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class GlobalForwardingRulesService(base_api.BaseApiService):
"""Service class for the globalForwardingRules resource."""
_NAME = u'globalForwardingRules'
def __init__(self, client):
super(ComputeAlpha.GlobalForwardingRulesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified ForwardingRule resource.
Args:
request: (ComputeGlobalForwardingRulesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalForwardingRules.delete',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified ForwardingRule resource. Get a list of available forwarding rules by making a list() request.
Args:
request: (ComputeGlobalForwardingRulesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRule) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalForwardingRules.get',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesGetRequest',
response_type_name=u'ForwardingRule',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a ForwardingRule resource in the specified project and region using the data included in the request.
Args:
request: (ComputeGlobalForwardingRulesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules',
request_field=u'forwardingRule',
request_type_name=u'ComputeGlobalForwardingRulesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of ForwardingRule resources available to the specified project.
Args:
request: (ComputeGlobalForwardingRulesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ForwardingRuleList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalForwardingRules.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/forwardingRules',
request_field='',
request_type_name=u'ComputeGlobalForwardingRulesListRequest',
response_type_name=u'ForwardingRuleList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on the specified resource. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeGlobalForwardingRulesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeGlobalForwardingRulesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTarget(self, request, global_params=None):
"""Changes target URL for forwarding rule. The new target should be of the same type as the old target.
Args:
request: (ComputeGlobalForwardingRulesSetTargetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTarget')
return self._RunMethod(
config, request, global_params=global_params)
SetTarget.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.setTarget',
ordered_params=[u'project', u'forwardingRule'],
path_params=[u'forwardingRule', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{forwardingRule}/setTarget',
request_field=u'targetReference',
request_type_name=u'ComputeGlobalForwardingRulesSetTargetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeGlobalForwardingRulesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.globalForwardingRules.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/forwardingRules/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeGlobalForwardingRulesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class GlobalOperationsService(base_api.BaseApiService):
"""Service class for the globalOperations resource."""
_NAME = u'globalOperations'
def __init__(self, client):
super(ComputeAlpha.GlobalOperationsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of all operations.
Args:
request: (ComputeGlobalOperationsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/operations',
request_field='',
request_type_name=u'ComputeGlobalOperationsAggregatedListRequest',
response_type_name=u'OperationAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Operations resource.
Args:
request: (ComputeGlobalOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeGlobalOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.globalOperations.delete',
ordered_params=[u'project', u'operation'],
path_params=[u'operation', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/operations/{operation}',
request_field='',
request_type_name=u'ComputeGlobalOperationsDeleteRequest',
response_type_name=u'ComputeGlobalOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified Operations resource. Get a list of operations by making a list() request.
Args:
request: (ComputeGlobalOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.get',
ordered_params=[u'project', u'operation'],
path_params=[u'operation', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/operations/{operation}',
request_field='',
request_type_name=u'ComputeGlobalOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified project.
Args:
request: (ComputeGlobalOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.globalOperations.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/operations',
request_field='',
request_type_name=u'ComputeGlobalOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class HealthChecksService(base_api.BaseApiService):
"""Service class for the healthChecks resource."""
_NAME = u'healthChecks'
def __init__(self, client):
super(ComputeAlpha.HealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HealthCheck resource.
Args:
request: (ComputeHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.healthChecks.delete',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field='',
request_type_name=u'ComputeHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HealthCheck resource. Get a list of available health checks by making a list() request.
Args:
request: (ComputeHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.healthChecks.get',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field='',
request_type_name=u'ComputeHealthChecksGetRequest',
response_type_name=u'HealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.healthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks',
request_field=u'healthCheck',
request_type_name=u'ComputeHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HealthCheck resources available to the specified project.
Args:
request: (ComputeHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.healthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/healthChecks',
request_field='',
request_type_name=u'ComputeHealthChecksListRequest',
response_type_name=u'HealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HealthCheck resource in the specified project using the data included in the request. This method supports patch semantics.
Args:
request: (ComputeHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.healthChecks.patch',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field=u'healthCheckResource',
request_type_name=u'ComputeHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeHealthChecksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.healthChecks.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeHealthChecksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.healthChecks.update',
ordered_params=[u'project', u'healthCheck'],
path_params=[u'healthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/healthChecks/{healthCheck}',
request_field=u'healthCheckResource',
request_type_name=u'ComputeHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class HostTypesService(base_api.BaseApiService):
"""Service class for the hostTypes resource."""
_NAME = u'hostTypes'
def __init__(self, client):
super(ComputeAlpha.HostTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of host types.
Args:
request: (ComputeHostTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HostTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.hostTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/hostTypes',
request_field='',
request_type_name=u'ComputeHostTypesAggregatedListRequest',
response_type_name=u'HostTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified host type. Get a list of available host types by making a list() request.
Args:
request: (ComputeHostTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HostType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.hostTypes.get',
ordered_params=[u'project', u'zone', u'hostType'],
path_params=[u'hostType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/hostTypes/{hostType}',
request_field='',
request_type_name=u'ComputeHostTypesGetRequest',
response_type_name=u'HostType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of host types available to the specified project.
Args:
request: (ComputeHostTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HostTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.hostTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/hostTypes',
request_field='',
request_type_name=u'ComputeHostTypesListRequest',
response_type_name=u'HostTypeList',
supports_download=False,
)
class HostsService(base_api.BaseApiService):
"""Service class for the hosts resource."""
_NAME = u'hosts'
def __init__(self, client):
super(ComputeAlpha.HostsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of hosts.
Args:
request: (ComputeHostsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HostAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.hosts.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/hosts',
request_field='',
request_type_name=u'ComputeHostsAggregatedListRequest',
response_type_name=u'HostAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Host resource.
Args:
request: (ComputeHostsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.hosts.delete',
ordered_params=[u'project', u'zone', u'host'],
path_params=[u'host', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/hosts/{host}',
request_field='',
request_type_name=u'ComputeHostsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified host. Get a list of available hosts by making a list() request.
Args:
request: (ComputeHostsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Host) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.hosts.get',
ordered_params=[u'project', u'zone', u'host'],
path_params=[u'host', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/hosts/{host}',
request_field='',
request_type_name=u'ComputeHostsGetRequest',
response_type_name=u'Host',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
"""Gets the access control policy for a resource. May be empty if no such policy or resource exists.
Args:
request: (ComputeHostsGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.hosts.getIamPolicy',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/hosts/{resource}/getIamPolicy',
request_field='',
request_type_name=u'ComputeHostsGetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a host resource in the specified project using the data included in the request.
Args:
request: (ComputeHostsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.hosts.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/hosts',
request_field=u'host',
request_type_name=u'ComputeHostsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of hosts available to the specified project.
Args:
request: (ComputeHostsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HostList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.hosts.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/hosts',
request_field='',
request_type_name=u'ComputeHostsListRequest',
response_type_name=u'HostList',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
"""Sets the access control policy on the specified resource. Replaces any existing policy.
Args:
request: (ComputeHostsSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.hosts.setIamPolicy',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/hosts/{resource}/setIamPolicy',
request_field=u'policy',
request_type_name=u'ComputeHostsSetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeHostsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.hosts.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/hosts/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeHostsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class HttpHealthChecksService(base_api.BaseApiService):
"""Service class for the httpHealthChecks resource."""
_NAME = u'httpHealthChecks'
def __init__(self, client):
super(ComputeAlpha.HttpHealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HttpHealthCheck resource.
Args:
request: (ComputeHttpHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.httpHealthChecks.delete',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HttpHealthCheck resource. Get a list of available HTTP health checks by making a list() request.
Args:
request: (ComputeHttpHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpHealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpHealthChecks.get',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpHealthChecksGetRequest',
response_type_name=u'HttpHealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HttpHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpHealthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks',
request_field=u'httpHealthCheck',
request_type_name=u'ComputeHttpHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HttpHealthCheck resources available to the specified project.
Args:
request: (ComputeHttpHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpHealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpHealthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/httpHealthChecks',
request_field='',
request_type_name=u'ComputeHttpHealthChecksListRequest',
response_type_name=u'HttpHealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HttpHealthCheck resource in the specified project using the data included in the request. This method supports patch semantics.
Args:
request: (ComputeHttpHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.httpHealthChecks.patch',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field=u'httpHealthCheckResource',
request_type_name=u'ComputeHttpHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeHttpHealthChecksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpHealthChecks.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeHttpHealthChecksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HttpHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.httpHealthChecks.update',
ordered_params=[u'project', u'httpHealthCheck'],
path_params=[u'httpHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpHealthChecks/{httpHealthCheck}',
request_field=u'httpHealthCheckResource',
request_type_name=u'ComputeHttpHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class HttpsHealthChecksService(base_api.BaseApiService):
"""Service class for the httpsHealthChecks resource."""
_NAME = u'httpsHealthChecks'
def __init__(self, client):
super(ComputeAlpha.HttpsHealthChecksService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified HttpsHealthCheck resource.
Args:
request: (ComputeHttpsHealthChecksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.httpsHealthChecks.delete',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified HttpsHealthCheck resource. Get a list of available HTTPS health checks by making a list() request.
Args:
request: (ComputeHttpsHealthChecksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpsHealthCheck) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpsHealthChecks.get',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksGetRequest',
response_type_name=u'HttpsHealthCheck',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a HttpsHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpsHealthChecksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpsHealthChecks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks',
request_field=u'httpsHealthCheck',
request_type_name=u'ComputeHttpsHealthChecksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of HttpsHealthCheck resources available to the specified project.
Args:
request: (ComputeHttpsHealthChecksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(HttpsHealthCheckList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.httpsHealthChecks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/httpsHealthChecks',
request_field='',
request_type_name=u'ComputeHttpsHealthChecksListRequest',
response_type_name=u'HttpsHealthCheckList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a HttpsHealthCheck resource in the specified project using the data included in the request. This method supports patch semantics.
Args:
request: (ComputeHttpsHealthChecksPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.httpsHealthChecks.patch',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field=u'httpsHealthCheckResource',
request_type_name=u'ComputeHttpsHealthChecksPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeHttpsHealthChecksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.httpsHealthChecks.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeHttpsHealthChecksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a HttpsHealthCheck resource in the specified project using the data included in the request.
Args:
request: (ComputeHttpsHealthChecksUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.httpsHealthChecks.update',
ordered_params=[u'project', u'httpsHealthCheck'],
path_params=[u'httpsHealthCheck', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/httpsHealthChecks/{httpsHealthCheck}',
request_field=u'httpsHealthCheckResource',
request_type_name=u'ComputeHttpsHealthChecksUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ImagesService(base_api.BaseApiService):
"""Service class for the images resource."""
_NAME = u'images'
def __init__(self, client):
super(ComputeAlpha.ImagesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified image.
Args:
request: (ComputeImagesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.images.delete',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/{image}',
request_field='',
request_type_name=u'ComputeImagesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Deprecate(self, request, global_params=None):
"""Sets the deprecation status of an image.
If an empty request body is given, clears the deprecation status instead.
Args:
request: (ComputeImagesDeprecateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Deprecate')
return self._RunMethod(
config, request, global_params=global_params)
Deprecate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.deprecate',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/{image}/deprecate',
request_field=u'deprecationStatus',
request_type_name=u'ComputeImagesDeprecateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified image. Get a list of available images by making a list() request.
Args:
request: (ComputeImagesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Image) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.get',
ordered_params=[u'project', u'image'],
path_params=[u'image', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/{image}',
request_field='',
request_type_name=u'ComputeImagesGetRequest',
response_type_name=u'Image',
supports_download=False,
)
def GetFromFamily(self, request, global_params=None):
"""Returns the latest image that is part of an image family and is not deprecated.
Args:
request: (ComputeImagesGetFromFamilyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Image) The response message.
"""
config = self.GetMethodConfig('GetFromFamily')
return self._RunMethod(
config, request, global_params=global_params)
GetFromFamily.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.getFromFamily',
ordered_params=[u'project', u'family'],
path_params=[u'family', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/images/family/{family}',
request_field='',
request_type_name=u'ComputeImagesGetFromFamilyRequest',
response_type_name=u'Image',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an image in the specified project using the data included in the request.
Args:
request: (ComputeImagesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'forceCreation'],
relative_path=u'projects/{project}/global/images',
request_field=u'image',
request_type_name=u'ComputeImagesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of private images available to the specified project. Private images are images you create that belong to your project. This method does not get any images that belong to other projects, including publicly-available images, like Debian 8. If you want to get a list of publicly-available images, use this method to make a request to the respective image project, such as debian-cloud or windows-cloud.
Args:
request: (ComputeImagesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ImageList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.images.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/images',
request_field='',
request_type_name=u'ComputeImagesListRequest',
response_type_name=u'ImageList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on an image. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeImagesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/images/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeImagesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeImagesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.images.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/images/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeImagesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class InstanceGroupManagersService(base_api.BaseApiService):
"""Service class for the instanceGroupManagers resource."""
_NAME = u'instanceGroupManagers'
def __init__(self, client):
super(ComputeAlpha.InstanceGroupManagersService, self).__init__(client)
self._upload_configs = {
}
def AbandonInstances(self, request, global_params=None):
"""Schedules a group action to remove the specified instances from the managed instance group. Abandoning an instance does not delete the instance, but it does remove the instance from any target pools that are applied by the managed instance group. This method reduces the targetSize of the managed instance group by the number of instances that you abandon. This operation is marked as DONE when the action is scheduled even if the instances have not yet been removed from the group. You must separately verify the status of the abandoning action with the listmanagedinstances method.
Args:
request: (ComputeInstanceGroupManagersAbandonInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AbandonInstances')
return self._RunMethod(
config, request, global_params=global_params)
AbandonInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.abandonInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/abandonInstances',
request_field=u'instanceGroupManagersAbandonInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersAbandonInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of managed instance groups and groups them by zone.
Args:
request: (ComputeInstanceGroupManagersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagerAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersAggregatedListRequest',
response_type_name=u'InstanceGroupManagerAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified managed instance group and all of the instances in that group. Note that the instance group must not belong to a backend service. Read Deleting an instance group for more information.
Args:
request: (ComputeInstanceGroupManagersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceGroupManagers.delete',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteInstances(self, request, global_params=None):
"""Schedules a group action to delete the specified instances in the managed instance group. The instances are also removed from any target pools of which they were a member. This method reduces the targetSize of the managed instance group by the number of instances that you delete. This operation is marked as DONE when the action is scheduled even if the instances are still being deleted. You must separately verify the status of the deleting action with the listmanagedinstances method.
Args:
request: (ComputeInstanceGroupManagersDeleteInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteInstances')
return self._RunMethod(
config, request, global_params=global_params)
DeleteInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.deleteInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/deleteInstances',
request_field=u'instanceGroupManagersDeleteInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersDeleteInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns all of the details about the specified managed instance group. Get a list of available managed instance groups by making a list() request.
Args:
request: (ComputeInstanceGroupManagersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManager) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.get',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersGetRequest',
response_type_name=u'InstanceGroupManager',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a managed instance group using the information that you specify in the request. After the group is created, it schedules an action to create instances in the group using the specified instance template. This operation is marked as DONE when the group is created even if the instances in the group have not yet been created. You must separately verify the status of the individual instances with the listmanagedinstances method.
Args:
request: (ComputeInstanceGroupManagersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers',
request_field=u'instanceGroupManager',
request_type_name=u'ComputeInstanceGroupManagersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of managed instance groups that are contained within the specified project and zone.
Args:
request: (ComputeInstanceGroupManagersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroupManagers.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersListRequest',
response_type_name=u'InstanceGroupManagerList',
supports_download=False,
)
def ListManagedInstances(self, request, global_params=None):
"""Lists all of the instances in the managed instance group. Each instance in the list has a currentAction, which indicates the action that the managed instance group is performing on the instance. For example, if the group is still creating an instance, the currentAction is CREATING. If a previous action failed, the list displays the errors for that failed action.
Args:
request: (ComputeInstanceGroupManagersListManagedInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManagersListManagedInstancesResponse) The response message.
"""
config = self.GetMethodConfig('ListManagedInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListManagedInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.listManagedInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersListManagedInstancesRequest',
response_type_name=u'InstanceGroupManagersListManagedInstancesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a managed instance group using the information that you specify in the request. The field statefulPolicy is updated using PATCH semantics. This operation is marked as DONE when the group is updated even if the instances in the group have not yet been updated. You must separately verify the status of the individual instances with the listmanagedinstances method. This method supports patch semantics.
Args:
request: (ComputeInstanceGroupManagersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.instanceGroupManagers.patch',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field=u'instanceGroupManagerResource',
request_type_name=u'ComputeInstanceGroupManagersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RecreateInstances(self, request, global_params=None):
"""Schedules a group action to recreate the specified instances in the managed instance group. The instances are deleted and recreated using the current instance template for the managed instance group. This operation is marked as DONE when the action is scheduled even if the instances have not yet been recreated. You must separately verify the status of the recreating action with the listmanagedinstances method.
Args:
request: (ComputeInstanceGroupManagersRecreateInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RecreateInstances')
return self._RunMethod(
config, request, global_params=global_params)
RecreateInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.recreateInstances',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/recreateInstances',
request_field=u'instanceGroupManagersRecreateInstancesRequest',
request_type_name=u'ComputeInstanceGroupManagersRecreateInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Resizes the managed instance group. If you increase the size, the group creates new instances using the current instance template. If you decrease the size, the group deletes instances. The resize operation is marked DONE when the resize actions are scheduled even if the group has not yet added or deleted any instances. You must separately verify the status of the creating or deleting actions with the listmanagedinstances method.
Args:
request: (ComputeInstanceGroupManagersResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.resize',
ordered_params=[u'project', u'zone', u'instanceGroupManager', u'size'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[u'size'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/resize',
request_field='',
request_type_name=u'ComputeInstanceGroupManagersResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def ResizeAdvanced(self, request, global_params=None):
"""Resizes the managed instance group with advanced configuration options like disabling creation retries. This is an extended version of the resize method.
If you increase the size of the instance group, the group creates new instances using the current instance template. If you decrease the size, the group deletes instances. The resize operation is marked DONE when the resize actions are scheduled even if the group has not yet added or deleted any instances. You must separately verify the status of the creating, creatingWithoutRetries, or deleting actions with the get or listmanagedinstances method.
Args:
request: (ComputeInstanceGroupManagersResizeAdvancedRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('ResizeAdvanced')
return self._RunMethod(
config, request, global_params=global_params)
ResizeAdvanced.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.resizeAdvanced',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/resizeAdvanced',
request_field=u'instanceGroupManagersResizeAdvancedRequest',
request_type_name=u'ComputeInstanceGroupManagersResizeAdvancedRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetAutoHealingPolicies(self, request, global_params=None):
"""Modifies the autohealing policies.
Args:
request: (ComputeInstanceGroupManagersSetAutoHealingPoliciesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetAutoHealingPolicies')
return self._RunMethod(
config, request, global_params=global_params)
SetAutoHealingPolicies.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.setAutoHealingPolicies',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setAutoHealingPolicies',
request_field=u'instanceGroupManagersSetAutoHealingRequest',
request_type_name=u'ComputeInstanceGroupManagersSetAutoHealingPoliciesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetInstanceTemplate(self, request, global_params=None):
"""Specifies the instance template to use when creating new instances in this group. The templates for existing instances in the group do not change unless you recreate them.
Args:
request: (ComputeInstanceGroupManagersSetInstanceTemplateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetInstanceTemplate')
return self._RunMethod(
config, request, global_params=global_params)
SetInstanceTemplate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.setInstanceTemplate',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate',
request_field=u'instanceGroupManagersSetInstanceTemplateRequest',
request_type_name=u'ComputeInstanceGroupManagersSetInstanceTemplateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTargetPools(self, request, global_params=None):
"""Modifies the target pools to which all instances in this managed instance group are assigned. The target pools automatically apply to all of the instances in the managed instance group. This operation is marked DONE when you make the request even if the instances have not yet been added to their target pools. The change might take some time to apply to all of the instances in the group depending on the size of the group.
Args:
request: (ComputeInstanceGroupManagersSetTargetPoolsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTargetPools')
return self._RunMethod(
config, request, global_params=global_params)
SetTargetPools.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.setTargetPools',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}/setTargetPools',
request_field=u'instanceGroupManagersSetTargetPoolsRequest',
request_type_name=u'ComputeInstanceGroupManagersSetTargetPoolsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInstanceGroupManagersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroupManagers.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInstanceGroupManagersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a managed instance group using the information that you specify in the request. The field statefulPolicy is updated using PATCH semantics. This operation is marked as DONE when the group is updated even if the instances in the group have not yet been updated. You must separately verify the status of the individual instances with the listmanagedinstances method.
Args:
request: (ComputeInstanceGroupManagersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.instanceGroupManagers.update',
ordered_params=[u'project', u'zone', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroupManagers/{instanceGroupManager}',
request_field=u'instanceGroupManagerResource',
request_type_name=u'ComputeInstanceGroupManagersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class InstanceGroupsService(base_api.BaseApiService):
"""Service class for the instanceGroups resource."""
_NAME = u'instanceGroups'
def __init__(self, client):
super(ComputeAlpha.InstanceGroupsService, self).__init__(client)
self._upload_configs = {
}
def AddInstances(self, request, global_params=None):
"""Adds a list of instances to the specified instance group. All of the instances in the instance group must be in the same network/subnetwork. Read Adding instances for more information.
Args:
request: (ComputeInstanceGroupsAddInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddInstances')
return self._RunMethod(
config, request, global_params=global_params)
AddInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.addInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/addInstances',
request_field=u'instanceGroupsAddInstancesRequest',
request_type_name=u'ComputeInstanceGroupsAddInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves the list of instance groups and sorts them by zone.
Args:
request: (ComputeInstanceGroupsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instanceGroups',
request_field='',
request_type_name=u'ComputeInstanceGroupsAggregatedListRequest',
response_type_name=u'InstanceGroupAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified instance group. The instances in the group are not deleted. Note that instance group must not belong to a backend service. Read Deleting an instance group for more information.
Args:
request: (ComputeInstanceGroupsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceGroups.delete',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeInstanceGroupsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified instance group. Get a list of available instance groups by making a list() request.
Args:
request: (ComputeInstanceGroupsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroup) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.get',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeInstanceGroupsGetRequest',
response_type_name=u'InstanceGroup',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance group in the specified project using the parameters that are included in the request.
Args:
request: (ComputeInstanceGroupsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups',
request_field=u'instanceGroup',
request_type_name=u'ComputeInstanceGroupsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instance groups that are located in the specified project and zone.
Args:
request: (ComputeInstanceGroupsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceGroups.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups',
request_field='',
request_type_name=u'ComputeInstanceGroupsListRequest',
response_type_name=u'InstanceGroupList',
supports_download=False,
)
def ListInstances(self, request, global_params=None):
"""Lists the instances in the specified instance group.
Args:
request: (ComputeInstanceGroupsListInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupsListInstances) The response message.
"""
config = self.GetMethodConfig('ListInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.listInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/listInstances',
request_field=u'instanceGroupsListInstancesRequest',
request_type_name=u'ComputeInstanceGroupsListInstancesRequest',
response_type_name=u'InstanceGroupsListInstances',
supports_download=False,
)
def RemoveInstances(self, request, global_params=None):
"""Removes one or more instances from the specified instance group, but does not delete those instances.
Args:
request: (ComputeInstanceGroupsRemoveInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveInstances')
return self._RunMethod(
config, request, global_params=global_params)
RemoveInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.removeInstances',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/removeInstances',
request_field=u'instanceGroupsRemoveInstancesRequest',
request_type_name=u'ComputeInstanceGroupsRemoveInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetNamedPorts(self, request, global_params=None):
"""Sets the named ports for the specified instance group.
Args:
request: (ComputeInstanceGroupsSetNamedPortsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetNamedPorts')
return self._RunMethod(
config, request, global_params=global_params)
SetNamedPorts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.setNamedPorts',
ordered_params=[u'project', u'zone', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{instanceGroup}/setNamedPorts',
request_field=u'instanceGroupsSetNamedPortsRequest',
request_type_name=u'ComputeInstanceGroupsSetNamedPortsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInstanceGroupsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceGroups.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instanceGroups/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInstanceGroupsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class InstanceTemplatesService(base_api.BaseApiService):
"""Service class for the instanceTemplates resource."""
_NAME = u'instanceTemplates'
def __init__(self, client):
super(ComputeAlpha.InstanceTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified instance template. If you delete an instance template that is being referenced from another instance group, the instance group will not be able to create or recreate virtual machine instances. Deleting an instance template is permanent and cannot be undone.
Args:
request: (ComputeInstanceTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instanceTemplates.delete',
ordered_params=[u'project', u'instanceTemplate'],
path_params=[u'instanceTemplate', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates/{instanceTemplate}',
request_field='',
request_type_name=u'ComputeInstanceTemplatesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified instance template. Get a list of available instance templates by making a list() request.
Args:
request: (ComputeInstanceTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceTemplates.get',
ordered_params=[u'project', u'instanceTemplate'],
path_params=[u'instanceTemplate', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates/{instanceTemplate}',
request_field='',
request_type_name=u'ComputeInstanceTemplatesGetRequest',
response_type_name=u'InstanceTemplate',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance template in the specified project using the data that is included in the request. If you are creating a new template to update an existing instance group, your new instance template must use the same network or, if applicable, the same subnetwork as the original template.
Args:
request: (ComputeInstanceTemplatesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceTemplates.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates',
request_field=u'instanceTemplate',
request_type_name=u'ComputeInstanceTemplatesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of instance templates that are contained within the specified project and zone.
Args:
request: (ComputeInstanceTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceTemplateList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instanceTemplates.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/instanceTemplates',
request_field='',
request_type_name=u'ComputeInstanceTemplatesListRequest',
response_type_name=u'InstanceTemplateList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInstanceTemplatesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instanceTemplates.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/instanceTemplates/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInstanceTemplatesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class InstancesService(base_api.BaseApiService):
"""Service class for the instances resource."""
_NAME = u'instances'
def __init__(self, client):
super(ComputeAlpha.InstancesService, self).__init__(client)
self._upload_configs = {
}
def AddAccessConfig(self, request, global_params=None):
"""Adds an access config to an instance's network interface.
Args:
request: (ComputeInstancesAddAccessConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddAccessConfig')
return self._RunMethod(
config, request, global_params=global_params)
AddAccessConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.addAccessConfig',
ordered_params=[u'project', u'zone', u'instance', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'networkInterface'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig',
request_field=u'accessConfig',
request_type_name=u'ComputeInstancesAddAccessConfigRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves aggregated list of instances.
Args:
request: (ComputeInstancesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/instances',
request_field='',
request_type_name=u'ComputeInstancesAggregatedListRequest',
response_type_name=u'InstanceAggregatedList',
supports_download=False,
)
def AttachDisk(self, request, global_params=None):
"""Attaches a Disk resource to an instance.
Args:
request: (ComputeInstancesAttachDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AttachDisk')
return self._RunMethod(
config, request, global_params=global_params)
AttachDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.attachDisk',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'forceAttach'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/attachDisk',
request_field=u'attachedDisk',
request_type_name=u'ComputeInstancesAttachDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Instance resource. For more information, see Stopping or Deleting an Instance.
Args:
request: (ComputeInstancesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.instances.delete',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}',
request_field='',
request_type_name=u'ComputeInstancesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteAccessConfig(self, request, global_params=None):
"""Deletes an access config from an instance's network interface.
Args:
request: (ComputeInstancesDeleteAccessConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteAccessConfig')
return self._RunMethod(
config, request, global_params=global_params)
DeleteAccessConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.deleteAccessConfig',
ordered_params=[u'project', u'zone', u'instance', u'accessConfig', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'accessConfig', u'networkInterface'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig',
request_field='',
request_type_name=u'ComputeInstancesDeleteAccessConfigRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DetachDisk(self, request, global_params=None):
"""Detaches a disk from an instance.
Args:
request: (ComputeInstancesDetachDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DetachDisk')
return self._RunMethod(
config, request, global_params=global_params)
DetachDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.detachDisk',
ordered_params=[u'project', u'zone', u'instance', u'deviceName'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'deviceName'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/detachDisk',
request_field='',
request_type_name=u'ComputeInstancesDetachDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Instance resource. Get a list of available instances by making a list() request.
Args:
request: (ComputeInstancesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Instance) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.get',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}',
request_field='',
request_type_name=u'ComputeInstancesGetRequest',
response_type_name=u'Instance',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
"""Gets the access control policy for a resource. May be empty if no such policy or resource exists.
Args:
request: (ComputeInstancesGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.getIamPolicy',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{resource}/getIamPolicy',
request_field='',
request_type_name=u'ComputeInstancesGetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def GetSerialPortOutput(self, request, global_params=None):
"""Returns the specified instance's serial port output.
Args:
request: (ComputeInstancesGetSerialPortOutputRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SerialPortOutput) The response message.
"""
config = self.GetMethodConfig('GetSerialPortOutput')
return self._RunMethod(
config, request, global_params=global_params)
GetSerialPortOutput.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.getSerialPortOutput',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'port', u'start'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/serialPort',
request_field='',
request_type_name=u'ComputeInstancesGetSerialPortOutputRequest',
response_type_name=u'SerialPortOutput',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an instance resource in the specified project using the data included in the request.
Args:
request: (ComputeInstancesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances',
request_field=u'instance',
request_type_name=u'ComputeInstancesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instances contained within the specified zone.
Args:
request: (ComputeInstancesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.instances.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/instances',
request_field='',
request_type_name=u'ComputeInstancesListRequest',
response_type_name=u'InstanceList',
supports_download=False,
)
def Reset(self, request, global_params=None):
"""Performs a hard reset on the instance.
Args:
request: (ComputeInstancesResetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Reset')
return self._RunMethod(
config, request, global_params=global_params)
Reset.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.reset',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/reset',
request_field='',
request_type_name=u'ComputeInstancesResetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetDiskAutoDelete(self, request, global_params=None):
"""Sets the auto-delete flag for a disk attached to an instance.
Args:
request: (ComputeInstancesSetDiskAutoDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetDiskAutoDelete')
return self._RunMethod(
config, request, global_params=global_params)
SetDiskAutoDelete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setDiskAutoDelete',
ordered_params=[u'project', u'zone', u'instance', u'autoDelete', u'deviceName'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'autoDelete', u'deviceName'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete',
request_field='',
request_type_name=u'ComputeInstancesSetDiskAutoDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
"""Sets the access control policy on the specified resource. Replaces any existing policy.
Args:
request: (ComputeInstancesSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setIamPolicy',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{resource}/setIamPolicy',
request_field=u'policy',
request_type_name=u'ComputeInstancesSetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets labels on an instance. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeInstancesSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setLabels',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setLabels',
request_field=u'instancesSetLabelsRequest',
request_type_name=u'ComputeInstancesSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMachineResources(self, request, global_params=None):
"""Changes the number and/or type of accelerator for a stopped instance to the values specified in the request.
Args:
request: (ComputeInstancesSetMachineResourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMachineResources')
return self._RunMethod(
config, request, global_params=global_params)
SetMachineResources.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMachineResources',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMachineResources',
request_field=u'instancesSetMachineResourcesRequest',
request_type_name=u'ComputeInstancesSetMachineResourcesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMachineType(self, request, global_params=None):
"""Changes the machine type for a stopped instance to the machine type specified in the request.
Args:
request: (ComputeInstancesSetMachineTypeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMachineType')
return self._RunMethod(
config, request, global_params=global_params)
SetMachineType.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMachineType',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMachineType',
request_field=u'instancesSetMachineTypeRequest',
request_type_name=u'ComputeInstancesSetMachineTypeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMetadata(self, request, global_params=None):
"""Sets metadata for the specified instance to the data included in the request.
Args:
request: (ComputeInstancesSetMetadataRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMetadata')
return self._RunMethod(
config, request, global_params=global_params)
SetMetadata.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMetadata',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMetadata',
request_field=u'metadata',
request_type_name=u'ComputeInstancesSetMetadataRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetMinCpuPlatform(self, request, global_params=None):
"""Changes the minimum cpu/platform that this instance should be started as. This is called on a stopped instance.
Args:
request: (ComputeInstancesSetMinCpuPlatformRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetMinCpuPlatform')
return self._RunMethod(
config, request, global_params=global_params)
SetMinCpuPlatform.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setMinCpuPlatform',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform',
request_field=u'instancesSetMinCpuPlatformRequest',
request_type_name=u'ComputeInstancesSetMinCpuPlatformRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetScheduling(self, request, global_params=None):
"""Sets an instance's scheduling options.
Args:
request: (ComputeInstancesSetSchedulingRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetScheduling')
return self._RunMethod(
config, request, global_params=global_params)
SetScheduling.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setScheduling',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setScheduling',
request_field=u'scheduling',
request_type_name=u'ComputeInstancesSetSchedulingRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetServiceAccount(self, request, global_params=None):
"""Sets the service account on the instance.
Args:
request: (ComputeInstancesSetServiceAccountRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetServiceAccount')
return self._RunMethod(
config, request, global_params=global_params)
SetServiceAccount.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setServiceAccount',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount',
request_field=u'instancesSetServiceAccountRequest',
request_type_name=u'ComputeInstancesSetServiceAccountRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTags(self, request, global_params=None):
"""Sets tags for the specified instance to the data included in the request.
Args:
request: (ComputeInstancesSetTagsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTags')
return self._RunMethod(
config, request, global_params=global_params)
SetTags.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.setTags',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/setTags',
request_field=u'tags',
request_type_name=u'ComputeInstancesSetTagsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Start(self, request, global_params=None):
"""Starts an instance that was stopped using the using the instances().stop method. For more information, see Restart an instance.
Args:
request: (ComputeInstancesStartRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Start')
return self._RunMethod(
config, request, global_params=global_params)
Start.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.start',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/start',
request_field='',
request_type_name=u'ComputeInstancesStartRequest',
response_type_name=u'Operation',
supports_download=False,
)
def StartWithEncryptionKey(self, request, global_params=None):
"""Starts an instance that was stopped using the using the instances().stop method. For more information, see Restart an instance.
Args:
request: (ComputeInstancesStartWithEncryptionKeyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('StartWithEncryptionKey')
return self._RunMethod(
config, request, global_params=global_params)
StartWithEncryptionKey.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.startWithEncryptionKey',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey',
request_field=u'instancesStartWithEncryptionKeyRequest',
request_type_name=u'ComputeInstancesStartWithEncryptionKeyRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Stop(self, request, global_params=None):
"""Stops a running instance, shutting it down cleanly, and allows you to restart the instance at a later time. Stopped instances do not incur per-minute, virtual machine usage charges while they are stopped, but any resources that the virtual machine is using, such as persistent disks and static IP addresses, will continue to be charged until they are deleted. For more information, see Stopping an instance.
Args:
request: (ComputeInstancesStopRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Stop')
return self._RunMethod(
config, request, global_params=global_params)
Stop.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.stop',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'discardLocalSsd'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/stop',
request_field='',
request_type_name=u'ComputeInstancesStopRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Suspend(self, request, global_params=None):
"""This method suspends a running instance, saving its state to persistent storage, and allows you to resume the instance at a later time. Suspended instances incur reduced per-minute, virtual machine usage charges while they are suspended. Any resources the virtual machine is using, such as persistent disks and static IP addresses, will continue to be charged until they are deleted.
Args:
request: (ComputeInstancesSuspendRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Suspend')
return self._RunMethod(
config, request, global_params=global_params)
Suspend.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.suspend',
ordered_params=[u'project', u'zone', u'instance'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'discardLocalSsd'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/suspend',
request_field='',
request_type_name=u'ComputeInstancesSuspendRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeInstancesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeInstancesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def UpdateAccessConfig(self, request, global_params=None):
"""Updates the specified access config from an instance's network interface with the data included in the request.
Args:
request: (ComputeInstancesUpdateAccessConfigRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('UpdateAccessConfig')
return self._RunMethod(
config, request, global_params=global_params)
UpdateAccessConfig.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.instances.updateAccessConfig',
ordered_params=[u'project', u'zone', u'instance', u'networkInterface'],
path_params=[u'instance', u'project', u'zone'],
query_params=[u'networkInterface'],
relative_path=u'projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig',
request_field=u'accessConfig',
request_type_name=u'ComputeInstancesUpdateAccessConfigRequest',
response_type_name=u'Operation',
supports_download=False,
)
class LicensesService(base_api.BaseApiService):
"""Service class for the licenses resource."""
_NAME = u'licenses'
def __init__(self, client):
super(ComputeAlpha.LicensesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified License resource. Get a list of available licenses by making a list() request.
Args:
request: (ComputeLicensesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(License) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.licenses.get',
ordered_params=[u'project', u'license'],
path_params=[u'license', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/licenses/{license}',
request_field='',
request_type_name=u'ComputeLicensesGetRequest',
response_type_name=u'License',
supports_download=False,
)
class MachineTypesService(base_api.BaseApiService):
"""Service class for the machineTypes resource."""
_NAME = u'machineTypes'
def __init__(self, client):
super(ComputeAlpha.MachineTypesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of machine types.
Args:
request: (ComputeMachineTypesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineTypeAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/machineTypes',
request_field='',
request_type_name=u'ComputeMachineTypesAggregatedListRequest',
response_type_name=u'MachineTypeAggregatedList',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified machine type. Get a list of available machine types by making a list() request.
Args:
request: (ComputeMachineTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.get',
ordered_params=[u'project', u'zone', u'machineType'],
path_params=[u'machineType', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/machineTypes/{machineType}',
request_field='',
request_type_name=u'ComputeMachineTypesGetRequest',
response_type_name=u'MachineType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of machine types available to the specified project.
Args:
request: (ComputeMachineTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(MachineTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.machineTypes.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/machineTypes',
request_field='',
request_type_name=u'ComputeMachineTypesListRequest',
response_type_name=u'MachineTypeList',
supports_download=False,
)
class NetworksService(base_api.BaseApiService):
"""Service class for the networks resource."""
_NAME = u'networks'
def __init__(self, client):
super(ComputeAlpha.NetworksService, self).__init__(client)
self._upload_configs = {
}
def AddPeering(self, request, global_params=None):
"""Adds a peering to the specified network.
Args:
request: (ComputeNetworksAddPeeringRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddPeering')
return self._RunMethod(
config, request, global_params=global_params)
AddPeering.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.addPeering',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}/addPeering',
request_field=u'networksAddPeeringRequest',
request_type_name=u'ComputeNetworksAddPeeringRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified network.
Args:
request: (ComputeNetworksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.networks.delete',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}',
request_field='',
request_type_name=u'ComputeNetworksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified network. Get a list of available networks by making a list() request.
Args:
request: (ComputeNetworksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Network) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.networks.get',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}',
request_field='',
request_type_name=u'ComputeNetworksGetRequest',
response_type_name=u'Network',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a network in the specified project using the data included in the request.
Args:
request: (ComputeNetworksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks',
request_field=u'network',
request_type_name=u'ComputeNetworksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of networks available to the specified project.
Args:
request: (ComputeNetworksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(NetworkList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.networks.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/networks',
request_field='',
request_type_name=u'ComputeNetworksListRequest',
response_type_name=u'NetworkList',
supports_download=False,
)
def RemovePeering(self, request, global_params=None):
"""Removes a peering from the specified network.
Args:
request: (ComputeNetworksRemovePeeringRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemovePeering')
return self._RunMethod(
config, request, global_params=global_params)
RemovePeering.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.removePeering',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}/removePeering',
request_field=u'networksRemovePeeringRequest',
request_type_name=u'ComputeNetworksRemovePeeringRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SwitchToCustomMode(self, request, global_params=None):
"""Switches the network mode from auto subnet mode to custom subnet mode.
Args:
request: (ComputeNetworksSwitchToCustomModeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SwitchToCustomMode')
return self._RunMethod(
config, request, global_params=global_params)
SwitchToCustomMode.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.switchToCustomMode',
ordered_params=[u'project', u'network'],
path_params=[u'network', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{network}/switchToCustomMode',
request_field='',
request_type_name=u'ComputeNetworksSwitchToCustomModeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeNetworksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.networks.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/networks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeNetworksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = u'projects'
def __init__(self, client):
super(ComputeAlpha.ProjectsService, self).__init__(client)
self._upload_configs = {
}
def DisableXpnHost(self, request, global_params=None):
"""Disable this project as an XPN host project.
Args:
request: (ComputeProjectsDisableXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DisableXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
DisableXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.disableXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/disableXpnHost',
request_field='',
request_type_name=u'ComputeProjectsDisableXpnHostRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DisableXpnResource(self, request, global_params=None):
"""Disable an XPN resource associated with this host project.
Args:
request: (ComputeProjectsDisableXpnResourceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DisableXpnResource')
return self._RunMethod(
config, request, global_params=global_params)
DisableXpnResource.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.disableXpnResource',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/disableXpnResource',
request_field=u'projectsDisableXpnResourceRequest',
request_type_name=u'ComputeProjectsDisableXpnResourceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def EnableXpnHost(self, request, global_params=None):
"""Enable this project as an XPN host project.
Args:
request: (ComputeProjectsEnableXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('EnableXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
EnableXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.enableXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/enableXpnHost',
request_field='',
request_type_name=u'ComputeProjectsEnableXpnHostRequest',
response_type_name=u'Operation',
supports_download=False,
)
def EnableXpnResource(self, request, global_params=None):
"""Enable XPN resource (a.k.a service project or service folder in the future) for a host project, so that subnetworks in the host project can be used by instances in the service project or folder.
Args:
request: (ComputeProjectsEnableXpnResourceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('EnableXpnResource')
return self._RunMethod(
config, request, global_params=global_params)
EnableXpnResource.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.enableXpnResource',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/enableXpnResource',
request_field=u'projectsEnableXpnResourceRequest',
request_type_name=u'ComputeProjectsEnableXpnResourceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Project resource.
Args:
request: (ComputeProjectsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.get',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}',
request_field='',
request_type_name=u'ComputeProjectsGetRequest',
response_type_name=u'Project',
supports_download=False,
)
def GetXpnHost(self, request, global_params=None):
"""Get the XPN host project that this project links to. May be empty if no link exists.
Args:
request: (ComputeProjectsGetXpnHostRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Project) The response message.
"""
config = self.GetMethodConfig('GetXpnHost')
return self._RunMethod(
config, request, global_params=global_params)
GetXpnHost.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.getXpnHost',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/getXpnHost',
request_field='',
request_type_name=u'ComputeProjectsGetXpnHostRequest',
response_type_name=u'Project',
supports_download=False,
)
def GetXpnResources(self, request, global_params=None):
"""Get XPN resources associated with this host project.
Args:
request: (ComputeProjectsGetXpnResourcesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ProjectsGetXpnResources) The response message.
"""
config = self.GetMethodConfig('GetXpnResources')
return self._RunMethod(
config, request, global_params=global_params)
GetXpnResources.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.projects.getXpnResources',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/getXpnResources',
request_field='',
request_type_name=u'ComputeProjectsGetXpnResourcesRequest',
response_type_name=u'ProjectsGetXpnResources',
supports_download=False,
)
def ListXpnHosts(self, request, global_params=None):
"""List all XPN host projects visible to the user in an organization.
Args:
request: (ComputeProjectsListXpnHostsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(XpnHostList) The response message.
"""
config = self.GetMethodConfig('ListXpnHosts')
return self._RunMethod(
config, request, global_params=global_params)
ListXpnHosts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.listXpnHosts',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/listXpnHosts',
request_field=u'projectsListXpnHostsRequest',
request_type_name=u'ComputeProjectsListXpnHostsRequest',
response_type_name=u'XpnHostList',
supports_download=False,
)
def MoveDisk(self, request, global_params=None):
"""Moves a persistent disk from one zone to another.
Args:
request: (ComputeProjectsMoveDiskRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('MoveDisk')
return self._RunMethod(
config, request, global_params=global_params)
MoveDisk.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.moveDisk',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/moveDisk',
request_field=u'diskMoveRequest',
request_type_name=u'ComputeProjectsMoveDiskRequest',
response_type_name=u'Operation',
supports_download=False,
)
def MoveInstance(self, request, global_params=None):
"""Moves an instance and its attached persistent disks from one zone to another.
Args:
request: (ComputeProjectsMoveInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('MoveInstance')
return self._RunMethod(
config, request, global_params=global_params)
MoveInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.moveInstance',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/moveInstance',
request_field=u'instanceMoveRequest',
request_type_name=u'ComputeProjectsMoveInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetCommonInstanceMetadata(self, request, global_params=None):
"""Sets metadata common to all instances within the specified project using the data included in the request.
Args:
request: (ComputeProjectsSetCommonInstanceMetadataRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetCommonInstanceMetadata')
return self._RunMethod(
config, request, global_params=global_params)
SetCommonInstanceMetadata.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.setCommonInstanceMetadata',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/setCommonInstanceMetadata',
request_field=u'metadata',
request_type_name=u'ComputeProjectsSetCommonInstanceMetadataRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetDefaultServiceAccount(self, request, global_params=None):
"""Sets the default service account of the project. The default service account is used when a VM instance is created with the service account email address set to "default".
Args:
request: (ComputeProjectsSetDefaultServiceAccountRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetDefaultServiceAccount')
return self._RunMethod(
config, request, global_params=global_params)
SetDefaultServiceAccount.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.setDefaultServiceAccount',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/setDefaultServiceAccount',
request_field=u'projectsSetDefaultServiceAccountRequest',
request_type_name=u'ComputeProjectsSetDefaultServiceAccountRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetUsageExportBucket(self, request, global_params=None):
"""Enables the usage export feature and sets the usage export bucket where reports are stored. If you provide an empty request body using this method, the usage export feature will be disabled.
Args:
request: (ComputeProjectsSetUsageExportBucketRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUsageExportBucket')
return self._RunMethod(
config, request, global_params=global_params)
SetUsageExportBucket.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.projects.setUsageExportBucket',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/setUsageExportBucket',
request_field=u'usageExportLocation',
request_type_name=u'ComputeProjectsSetUsageExportBucketRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionAutoscalersService(base_api.BaseApiService):
"""Service class for the regionAutoscalers resource."""
_NAME = u'regionAutoscalers'
def __init__(self, client):
super(ComputeAlpha.RegionAutoscalersService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified autoscaler.
Args:
request: (ComputeRegionAutoscalersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionAutoscalers.delete',
ordered_params=[u'project', u'region', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeRegionAutoscalersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified autoscaler.
Args:
request: (ComputeRegionAutoscalersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Autoscaler) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionAutoscalers.get',
ordered_params=[u'project', u'region', u'autoscaler'],
path_params=[u'autoscaler', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers/{autoscaler}',
request_field='',
request_type_name=u'ComputeRegionAutoscalersGetRequest',
response_type_name=u'Autoscaler',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeRegionAutoscalersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionAutoscalers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscaler',
request_type_name=u'ComputeRegionAutoscalersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of autoscalers contained within the specified region.
Args:
request: (ComputeRegionAutoscalersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionAutoscalerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionAutoscalers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field='',
request_type_name=u'ComputeRegionAutoscalersListRequest',
response_type_name=u'RegionAutoscalerList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request. This method supports patch semantics.
Args:
request: (ComputeRegionAutoscalersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.regionAutoscalers.patch',
ordered_params=[u'project', u'region', u'autoscaler'],
path_params=[u'project', u'region'],
query_params=[u'autoscaler'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeRegionAutoscalersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionAutoscalersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionAutoscalers.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/autoscalers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionAutoscalersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates an autoscaler in the specified project using the data included in the request.
Args:
request: (ComputeRegionAutoscalersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.regionAutoscalers.update',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'autoscaler'],
relative_path=u'projects/{project}/regions/{region}/autoscalers',
request_field=u'autoscalerResource',
request_type_name=u'ComputeRegionAutoscalersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionBackendServicesService(base_api.BaseApiService):
"""Service class for the regionBackendServices resource."""
_NAME = u'regionBackendServices'
def __init__(self, client):
super(ComputeAlpha.RegionBackendServicesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified regional BackendService resource.
Args:
request: (ComputeRegionBackendServicesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionBackendServices.delete',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeRegionBackendServicesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified regional BackendService resource.
Args:
request: (ComputeRegionBackendServicesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendService) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionBackendServices.get',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field='',
request_type_name=u'ComputeRegionBackendServicesGetRequest',
response_type_name=u'BackendService',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for this regional BackendService.
Args:
request: (ComputeRegionBackendServicesGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceGroupHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionBackendServices.getHealth',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}/getHealth',
request_field=u'resourceGroupReference',
request_type_name=u'ComputeRegionBackendServicesGetHealthRequest',
response_type_name=u'BackendServiceGroupHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a regional BackendService resource in the specified project using the data included in the request. There are several restrictions and guidelines to keep in mind when creating a regional backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeRegionBackendServicesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionBackendServices.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices',
request_field=u'backendService',
request_type_name=u'ComputeRegionBackendServicesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of regional BackendService resources available to the specified project in the given region.
Args:
request: (ComputeRegionBackendServicesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(BackendServiceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionBackendServices.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/backendServices',
request_field='',
request_type_name=u'ComputeRegionBackendServicesListRequest',
response_type_name=u'BackendServiceList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified regional BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information. This method supports patch semantics.
Args:
request: (ComputeRegionBackendServicesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.regionBackendServices.patch',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeRegionBackendServicesPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionBackendServicesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionBackendServices.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionBackendServicesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified regional BackendService resource with the data included in the request. There are several restrictions and guidelines to keep in mind when updating a backend service. Read Restrictions and Guidelines for more information.
Args:
request: (ComputeRegionBackendServicesUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.regionBackendServices.update',
ordered_params=[u'project', u'region', u'backendService'],
path_params=[u'backendService', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/backendServices/{backendService}',
request_field=u'backendServiceResource',
request_type_name=u'ComputeRegionBackendServicesUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionDiskTypesService(base_api.BaseApiService):
"""Service class for the regionDiskTypes resource."""
_NAME = u'regionDiskTypes'
def __init__(self, client):
super(ComputeAlpha.RegionDiskTypesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified regional disk type. Get a list of available disk types by making a list() request.
Args:
request: (ComputeRegionDiskTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionDiskTypes.get',
ordered_params=[u'project', u'region', u'diskType'],
path_params=[u'diskType', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/diskTypes/{diskType}',
request_field='',
request_type_name=u'ComputeRegionDiskTypesGetRequest',
response_type_name=u'DiskType',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of regional disk types available to the specified project.
Args:
request: (ComputeRegionDiskTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionDiskTypeList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionDiskTypes.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/diskTypes',
request_field='',
request_type_name=u'ComputeRegionDiskTypesListRequest',
response_type_name=u'RegionDiskTypeList',
supports_download=False,
)
class RegionDisksService(base_api.BaseApiService):
"""Service class for the regionDisks resource."""
_NAME = u'regionDisks'
def __init__(self, client):
super(ComputeAlpha.RegionDisksService, self).__init__(client)
self._upload_configs = {
}
def CreateSnapshot(self, request, global_params=None):
"""Creates a snapshot of this regional disk.
Args:
request: (ComputeRegionDisksCreateSnapshotRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('CreateSnapshot')
return self._RunMethod(
config, request, global_params=global_params)
CreateSnapshot.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.createSnapshot',
ordered_params=[u'project', u'region', u'disk'],
path_params=[u'disk', u'project', u'region'],
query_params=[u'guestFlush', u'requestId'],
relative_path=u'projects/{project}/regions/{region}/disks/{disk}/createSnapshot',
request_field=u'snapshot',
request_type_name=u'ComputeRegionDisksCreateSnapshotRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified regional persistent disk. Deleting a regional disk removes all the replicas of its data permanently and is irreversible. However, deleting a disk does not delete any snapshots previously made from the disk. You must separately delete snapshots.
Args:
request: (ComputeRegionDisksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionDisks.delete',
ordered_params=[u'project', u'region', u'disk'],
path_params=[u'disk', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/disks/{disk}',
request_field='',
request_type_name=u'ComputeRegionDisksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns a specified regional persistent disk.
Args:
request: (ComputeRegionDisksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Disk) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionDisks.get',
ordered_params=[u'project', u'region', u'disk'],
path_params=[u'disk', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/disks/{disk}',
request_field='',
request_type_name=u'ComputeRegionDisksGetRequest',
response_type_name=u'Disk',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a persistent regional disk in the specified project using the data included in the request.
Args:
request: (ComputeRegionDisksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'requestId', u'sourceImage'],
relative_path=u'projects/{project}/regions/{region}/disks',
request_field=u'disk',
request_type_name=u'ComputeRegionDisksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of persistent disks contained within the specified region.
Args:
request: (ComputeRegionDisksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DiskList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionDisks.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/disks',
request_field='',
request_type_name=u'ComputeRegionDisksListRequest',
response_type_name=u'DiskList',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Resizes the specified regional persistent disk.
Args:
request: (ComputeRegionDisksResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.resize',
ordered_params=[u'project', u'region', u'disk'],
path_params=[u'disk', u'project', u'region'],
query_params=[u'requestId'],
relative_path=u'projects/{project}/regions/{region}/disks/{disk}/resize',
request_field=u'regionDisksResizeRequest',
request_type_name=u'ComputeRegionDisksResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on the target regional disk.
Args:
request: (ComputeRegionDisksSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/disks/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeRegionDisksSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionDisksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionDisks.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/disks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionDisksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class RegionInstanceGroupManagersService(base_api.BaseApiService):
"""Service class for the regionInstanceGroupManagers resource."""
_NAME = u'regionInstanceGroupManagers'
def __init__(self, client):
super(ComputeAlpha.RegionInstanceGroupManagersService, self).__init__(client)
self._upload_configs = {
}
def AbandonInstances(self, request, global_params=None):
"""Schedules a group action to remove the specified instances from the managed instance group. Abandoning an instance does not delete the instance, but it does remove the instance from any target pools that are applied by the managed instance group. This method reduces the targetSize of the managed instance group by the number of instances that you abandon. This operation is marked as DONE when the action is scheduled even if the instances have not yet been removed from the group. You must separately verify the status of the abandoning action with the listmanagedinstances method.
Args:
request: (ComputeRegionInstanceGroupManagersAbandonInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AbandonInstances')
return self._RunMethod(
config, request, global_params=global_params)
AbandonInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.abandonInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/abandonInstances',
request_field=u'regionInstanceGroupManagersAbandonInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersAbandonInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified managed instance group and all of the instances in that group.
Args:
request: (ComputeRegionInstanceGroupManagersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionInstanceGroupManagers.delete',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def DeleteInstances(self, request, global_params=None):
"""Schedules a group action to delete the specified instances in the managed instance group. The instances are also removed from any target pools of which they were a member. This method reduces the targetSize of the managed instance group by the number of instances that you delete. This operation is marked as DONE when the action is scheduled even if the instances are still being deleted. You must separately verify the status of the deleting action with the listmanagedinstances method.
Args:
request: (ComputeRegionInstanceGroupManagersDeleteInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('DeleteInstances')
return self._RunMethod(
config, request, global_params=global_params)
DeleteInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.deleteInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/deleteInstances',
request_field=u'regionInstanceGroupManagersDeleteInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersDeleteInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns all of the details about the specified managed instance group.
Args:
request: (ComputeRegionInstanceGroupManagersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroupManager) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroupManagers.get',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersGetRequest',
response_type_name=u'InstanceGroupManager',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a managed instance group using the information that you specify in the request. After the group is created, it schedules an action to create instances in the group using the specified instance template. This operation is marked as DONE when the group is created even if the instances in the group have not yet been created. You must separately verify the status of the individual instances with the listmanagedinstances method.
Args:
request: (ComputeRegionInstanceGroupManagersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers',
request_field=u'instanceGroupManager',
request_type_name=u'ComputeRegionInstanceGroupManagersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of managed instance groups that are contained within the specified region.
Args:
request: (ComputeRegionInstanceGroupManagersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupManagerList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroupManagers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersListRequest',
response_type_name=u'RegionInstanceGroupManagerList',
supports_download=False,
)
def ListManagedInstances(self, request, global_params=None):
"""Lists the instances in the managed instance group and instances that are scheduled to be created. The list includes any current actions that the group has scheduled for its instances.
Args:
request: (ComputeRegionInstanceGroupManagersListManagedInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupManagersListInstancesResponse) The response message.
"""
config = self.GetMethodConfig('ListManagedInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListManagedInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.listManagedInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'filter', u'maxResults', u'order_by', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/listManagedInstances',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersListManagedInstancesRequest',
response_type_name=u'RegionInstanceGroupManagersListInstancesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a managed instance group using the information that you specify in the request. This operation is marked as DONE when the group is updated even if the instances in the group have not yet been updated. You must separately verify the status of the individual instances with the listmanagedinstances method. This method supports patch semantics.
Args:
request: (ComputeRegionInstanceGroupManagersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.regionInstanceGroupManagers.patch',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field=u'instanceGroupManagerResource',
request_type_name=u'ComputeRegionInstanceGroupManagersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RecreateInstances(self, request, global_params=None):
"""Schedules a group action to recreate the specified instances in the managed instance group. The instances are deleted and recreated using the current instance template for the managed instance group. This operation is marked as DONE when the action is scheduled even if the instances have not yet been recreated. You must separately verify the status of the recreating action with the listmanagedinstances method.
Args:
request: (ComputeRegionInstanceGroupManagersRecreateInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RecreateInstances')
return self._RunMethod(
config, request, global_params=global_params)
RecreateInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.recreateInstances',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/recreateInstances',
request_field=u'regionInstanceGroupManagersRecreateRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersRecreateInstancesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Resize(self, request, global_params=None):
"""Changes the intended size for the managed instance group. If you increase the size, the group schedules actions to create new instances using the current instance template. If you decrease the size, the group schedules delete actions on one or more instances. The resize operation is marked DONE when the resize actions are scheduled even if the group has not yet added or deleted any instances. You must separately verify the status of the creating or deleting actions with the listmanagedinstances method.
Args:
request: (ComputeRegionInstanceGroupManagersResizeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Resize')
return self._RunMethod(
config, request, global_params=global_params)
Resize.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.resize',
ordered_params=[u'project', u'region', u'instanceGroupManager', u'size'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[u'size'],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/resize',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupManagersResizeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetAutoHealingPolicies(self, request, global_params=None):
"""Modifies the autohealing policy for the instances in this managed instance group.
Args:
request: (ComputeRegionInstanceGroupManagersSetAutoHealingPoliciesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetAutoHealingPolicies')
return self._RunMethod(
config, request, global_params=global_params)
SetAutoHealingPolicies.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.setAutoHealingPolicies',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setAutoHealingPolicies',
request_field=u'regionInstanceGroupManagersSetAutoHealingRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersSetAutoHealingPoliciesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetInstanceTemplate(self, request, global_params=None):
"""Sets the instance template to use when creating new instances or recreating instances in this group. Existing instances are not affected.
Args:
request: (ComputeRegionInstanceGroupManagersSetInstanceTemplateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetInstanceTemplate')
return self._RunMethod(
config, request, global_params=global_params)
SetInstanceTemplate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.setInstanceTemplate',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setInstanceTemplate',
request_field=u'regionInstanceGroupManagersSetTemplateRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersSetInstanceTemplateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetTargetPools(self, request, global_params=None):
"""Modifies the target pools to which all new instances in this group are assigned. Existing instances in the group are not affected.
Args:
request: (ComputeRegionInstanceGroupManagersSetTargetPoolsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetTargetPools')
return self._RunMethod(
config, request, global_params=global_params)
SetTargetPools.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.setTargetPools',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}/setTargetPools',
request_field=u'regionInstanceGroupManagersSetTargetPoolsRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersSetTargetPoolsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionInstanceGroupManagersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroupManagers.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionInstanceGroupManagersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a managed instance group using the information that you specify in the request. This operation is marked as DONE when the group is updated even if the instances in the group have not yet been updated. You must separately verify the status of the individual instances with the listmanagedinstances method.
Args:
request: (ComputeRegionInstanceGroupManagersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.regionInstanceGroupManagers.update',
ordered_params=[u'project', u'region', u'instanceGroupManager'],
path_params=[u'instanceGroupManager', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroupManagers/{instanceGroupManager}',
request_field=u'instanceGroupManagerResource',
request_type_name=u'ComputeRegionInstanceGroupManagersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RegionInstanceGroupsService(base_api.BaseApiService):
"""Service class for the regionInstanceGroups resource."""
_NAME = u'regionInstanceGroups'
def __init__(self, client):
super(ComputeAlpha.RegionInstanceGroupsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified instance group resource.
Args:
request: (ComputeRegionInstanceGroupsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(InstanceGroup) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroups.get',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupsGetRequest',
response_type_name=u'InstanceGroup',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of instance group resources contained within the specified region.
Args:
request: (ComputeRegionInstanceGroupsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionInstanceGroups.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroups',
request_field='',
request_type_name=u'ComputeRegionInstanceGroupsListRequest',
response_type_name=u'RegionInstanceGroupList',
supports_download=False,
)
def ListInstances(self, request, global_params=None):
"""Lists the instances in the specified instance group and displays information about the named ports. Depending on the specified options, this method can list all instances or only the instances that are running.
Args:
request: (ComputeRegionInstanceGroupsListInstancesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionInstanceGroupsListInstances) The response message.
"""
config = self.GetMethodConfig('ListInstances')
return self._RunMethod(
config, request, global_params=global_params)
ListInstances.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroups.listInstances',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}/listInstances',
request_field=u'regionInstanceGroupsListInstancesRequest',
request_type_name=u'ComputeRegionInstanceGroupsListInstancesRequest',
response_type_name=u'RegionInstanceGroupsListInstances',
supports_download=False,
)
def SetNamedPorts(self, request, global_params=None):
"""Sets the named ports for the specified regional instance group.
Args:
request: (ComputeRegionInstanceGroupsSetNamedPortsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetNamedPorts')
return self._RunMethod(
config, request, global_params=global_params)
SetNamedPorts.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroups.setNamedPorts',
ordered_params=[u'project', u'region', u'instanceGroup'],
path_params=[u'instanceGroup', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{instanceGroup}/setNamedPorts',
request_field=u'regionInstanceGroupsSetNamedPortsRequest',
request_type_name=u'ComputeRegionInstanceGroupsSetNamedPortsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRegionInstanceGroupsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.regionInstanceGroups.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/instanceGroups/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRegionInstanceGroupsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class RegionOperationsService(base_api.BaseApiService):
"""Service class for the regionOperations resource."""
_NAME = u'regionOperations'
def __init__(self, client):
super(ComputeAlpha.RegionOperationsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified region-specific Operations resource.
Args:
request: (ComputeRegionOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeRegionOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.regionOperations.delete',
ordered_params=[u'project', u'region', u'operation'],
path_params=[u'operation', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/operations/{operation}',
request_field='',
request_type_name=u'ComputeRegionOperationsDeleteRequest',
response_type_name=u'ComputeRegionOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified region-specific Operations resource.
Args:
request: (ComputeRegionOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionOperations.get',
ordered_params=[u'project', u'region', u'operation'],
path_params=[u'operation', u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/operations/{operation}',
request_field='',
request_type_name=u'ComputeRegionOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified region.
Args:
request: (ComputeRegionOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regionOperations.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/operations',
request_field='',
request_type_name=u'ComputeRegionOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class RegionsService(base_api.BaseApiService):
"""Service class for the regions resource."""
_NAME = u'regions'
def __init__(self, client):
super(ComputeAlpha.RegionsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified Region resource. Get a list of available regions by making a list() request.
Args:
request: (ComputeRegionsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Region) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regions.get',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}',
request_field='',
request_type_name=u'ComputeRegionsGetRequest',
response_type_name=u'Region',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of region resources available to the specified project.
Args:
request: (ComputeRegionsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RegionList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.regions.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions',
request_field='',
request_type_name=u'ComputeRegionsListRequest',
response_type_name=u'RegionList',
supports_download=False,
)
class RoutersService(base_api.BaseApiService):
"""Service class for the routers resource."""
_NAME = u'routers'
def __init__(self, client):
super(ComputeAlpha.RoutersService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of routers.
Args:
request: (ComputeRoutersAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/routers',
request_field='',
request_type_name=u'ComputeRoutersAggregatedListRequest',
response_type_name=u'RouterAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified Router resource.
Args:
request: (ComputeRoutersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.routers.delete',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field='',
request_type_name=u'ComputeRoutersDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Router resource. Get a list of available routers by making a list() request.
Args:
request: (ComputeRoutersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Router) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.get',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field='',
request_type_name=u'ComputeRoutersGetRequest',
response_type_name=u'Router',
supports_download=False,
)
def GetRouterStatus(self, request, global_params=None):
"""Retrieves runtime information of the specified router.
Args:
request: (ComputeRoutersGetRouterStatusRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterStatusResponse) The response message.
"""
config = self.GetMethodConfig('GetRouterStatus')
return self._RunMethod(
config, request, global_params=global_params)
GetRouterStatus.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.getRouterStatus',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}/getRouterStatus',
request_field='',
request_type_name=u'ComputeRoutersGetRouterStatusRequest',
response_type_name=u'RouterStatusResponse',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a Router resource in the specified project and region using the data included in the request.
Args:
request: (ComputeRoutersInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routers.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers',
request_field=u'router',
request_type_name=u'ComputeRoutersInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Router resources available to the specified project.
Args:
request: (ComputeRoutersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouterList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routers.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/routers',
request_field='',
request_type_name=u'ComputeRoutersListRequest',
response_type_name=u'RouterList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified Router resource with the data included in the request. This method supports patch semantics.
Args:
request: (ComputeRoutersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.routers.patch',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Preview(self, request, global_params=None):
"""Preview fields auto-generated during router create and update operations. Calling this method does NOT create or update the router.
Args:
request: (ComputeRoutersPreviewRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RoutersPreviewResponse) The response message.
"""
config = self.GetMethodConfig('Preview')
return self._RunMethod(
config, request, global_params=global_params)
Preview.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routers.preview',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}/preview',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersPreviewRequest',
response_type_name=u'RoutersPreviewResponse',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRoutersTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routers.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRoutersTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified Router resource with the data included in the request.
Args:
request: (ComputeRoutersUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.routers.update',
ordered_params=[u'project', u'region', u'router'],
path_params=[u'project', u'region', u'router'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/routers/{router}',
request_field=u'routerResource',
request_type_name=u'ComputeRoutersUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class RoutesService(base_api.BaseApiService):
"""Service class for the routes resource."""
_NAME = u'routes'
def __init__(self, client):
super(ComputeAlpha.RoutesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified Route resource.
Args:
request: (ComputeRoutesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.routes.delete',
ordered_params=[u'project', u'route'],
path_params=[u'project', u'route'],
query_params=[],
relative_path=u'projects/{project}/global/routes/{route}',
request_field='',
request_type_name=u'ComputeRoutesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Route resource. Get a list of available routes by making a list() request.
Args:
request: (ComputeRoutesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Route) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routes.get',
ordered_params=[u'project', u'route'],
path_params=[u'project', u'route'],
query_params=[],
relative_path=u'projects/{project}/global/routes/{route}',
request_field='',
request_type_name=u'ComputeRoutesGetRequest',
response_type_name=u'Route',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a Route resource in the specified project using the data included in the request.
Args:
request: (ComputeRoutesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routes.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/routes',
request_field=u'route',
request_type_name=u'ComputeRoutesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Route resources available to the specified project.
Args:
request: (ComputeRoutesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(RouteList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.routes.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/routes',
request_field='',
request_type_name=u'ComputeRoutesListRequest',
response_type_name=u'RouteList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeRoutesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.routes.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/routes/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeRoutesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class SnapshotsService(base_api.BaseApiService):
"""Service class for the snapshots resource."""
_NAME = u'snapshots'
def __init__(self, client):
super(ComputeAlpha.SnapshotsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified Snapshot resource. Keep in mind that deleting a single snapshot might not necessarily delete all the data on that snapshot. If any data on the snapshot that is marked for deletion is needed for subsequent snapshots, the data will be moved to the next corresponding snapshot.
For more information, see Deleting snaphots.
Args:
request: (ComputeSnapshotsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.snapshots.delete',
ordered_params=[u'project', u'snapshot'],
path_params=[u'project', u'snapshot'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{snapshot}',
request_field='',
request_type_name=u'ComputeSnapshotsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified Snapshot resource. Get a list of available snapshots by making a list() request.
Args:
request: (ComputeSnapshotsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Snapshot) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.snapshots.get',
ordered_params=[u'project', u'snapshot'],
path_params=[u'project', u'snapshot'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{snapshot}',
request_field='',
request_type_name=u'ComputeSnapshotsGetRequest',
response_type_name=u'Snapshot',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Snapshot resources contained within the specified project.
Args:
request: (ComputeSnapshotsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SnapshotList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.snapshots.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/snapshots',
request_field='',
request_type_name=u'ComputeSnapshotsListRequest',
response_type_name=u'SnapshotList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a snapshot. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeSnapshotsSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.snapshots.setLabels',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{resource}/setLabels',
request_field=u'globalSetLabelsRequest',
request_type_name=u'ComputeSnapshotsSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeSnapshotsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.snapshots.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/snapshots/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeSnapshotsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class SslCertificatesService(base_api.BaseApiService):
"""Service class for the sslCertificates resource."""
_NAME = u'sslCertificates'
def __init__(self, client):
super(ComputeAlpha.SslCertificatesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified SslCertificate resource.
Args:
request: (ComputeSslCertificatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.sslCertificates.delete',
ordered_params=[u'project', u'sslCertificate'],
path_params=[u'project', u'sslCertificate'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates/{sslCertificate}',
request_field='',
request_type_name=u'ComputeSslCertificatesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified SslCertificate resource. Get a list of available SSL certificates by making a list() request.
Args:
request: (ComputeSslCertificatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslCertificate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslCertificates.get',
ordered_params=[u'project', u'sslCertificate'],
path_params=[u'project', u'sslCertificate'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates/{sslCertificate}',
request_field='',
request_type_name=u'ComputeSslCertificatesGetRequest',
response_type_name=u'SslCertificate',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a SslCertificate resource in the specified project using the data included in the request.
Args:
request: (ComputeSslCertificatesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.sslCertificates.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates',
request_field=u'sslCertificate',
request_type_name=u'ComputeSslCertificatesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of SslCertificate resources available to the specified project.
Args:
request: (ComputeSslCertificatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SslCertificateList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.sslCertificates.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/sslCertificates',
request_field='',
request_type_name=u'ComputeSslCertificatesListRequest',
response_type_name=u'SslCertificateList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeSslCertificatesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.sslCertificates.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/sslCertificates/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeSslCertificatesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class SubnetworksService(base_api.BaseApiService):
"""Service class for the subnetworks resource."""
_NAME = u'subnetworks'
def __init__(self, client):
super(ComputeAlpha.SubnetworksService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of subnetworks.
Args:
request: (ComputeSubnetworksAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SubnetworkAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/subnetworks',
request_field='',
request_type_name=u'ComputeSubnetworksAggregatedListRequest',
response_type_name=u'SubnetworkAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified subnetwork.
Args:
request: (ComputeSubnetworksDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.subnetworks.delete',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}',
request_field='',
request_type_name=u'ComputeSubnetworksDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def ExpandIpCidrRange(self, request, global_params=None):
"""Expands the IP CIDR range of the subnetwork to a specified value.
Args:
request: (ComputeSubnetworksExpandIpCidrRangeRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('ExpandIpCidrRange')
return self._RunMethod(
config, request, global_params=global_params)
ExpandIpCidrRange.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.expandIpCidrRange',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange',
request_field=u'subnetworksExpandIpCidrRangeRequest',
request_type_name=u'ComputeSubnetworksExpandIpCidrRangeRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified subnetwork. Get a list of available subnetworks list() request.
Args:
request: (ComputeSubnetworksGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Subnetwork) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.get',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}',
request_field='',
request_type_name=u'ComputeSubnetworksGetRequest',
response_type_name=u'Subnetwork',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
"""Gets the access control policy for a resource. May be empty if no such policy or resource exists.
Args:
request: (ComputeSubnetworksGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.getIamPolicy',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{resource}/getIamPolicy',
request_field='',
request_type_name=u'ComputeSubnetworksGetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a subnetwork in the specified project using the data included in the request.
Args:
request: (ComputeSubnetworksInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks',
request_field=u'subnetwork',
request_type_name=u'ComputeSubnetworksInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of subnetworks available to the specified project.
Args:
request: (ComputeSubnetworksListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(SubnetworkList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.subnetworks.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/subnetworks',
request_field='',
request_type_name=u'ComputeSubnetworksListRequest',
response_type_name=u'SubnetworkList',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
"""Sets the access control policy on the specified resource. Replaces any existing policy.
Args:
request: (ComputeSubnetworksSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.setIamPolicy',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{resource}/setIamPolicy',
request_field=u'policy',
request_type_name=u'ComputeSubnetworksSetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def SetPrivateIpGoogleAccess(self, request, global_params=None):
"""Set whether VMs in this subnet can access Google services without assigning external IP addresses through Cloudpath.
Args:
request: (ComputeSubnetworksSetPrivateIpGoogleAccessRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetPrivateIpGoogleAccess')
return self._RunMethod(
config, request, global_params=global_params)
SetPrivateIpGoogleAccess.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.setPrivateIpGoogleAccess',
ordered_params=[u'project', u'region', u'subnetwork'],
path_params=[u'project', u'region', u'subnetwork'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess',
request_field=u'subnetworksSetPrivateIpGoogleAccessRequest',
request_type_name=u'ComputeSubnetworksSetPrivateIpGoogleAccessRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeSubnetworksTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.subnetworks.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/subnetworks/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeSubnetworksTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetHttpProxiesService(base_api.BaseApiService):
"""Service class for the targetHttpProxies resource."""
_NAME = u'targetHttpProxies'
def __init__(self, client):
super(ComputeAlpha.TargetHttpProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetHttpProxy resource.
Args:
request: (ComputeTargetHttpProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetHttpProxies.delete',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies/{targetHttpProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetHttpProxy resource. Get a list of available target HTTP proxies by making a list() request.
Args:
request: (ComputeTargetHttpProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpProxies.get',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies/{targetHttpProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesGetRequest',
response_type_name=u'TargetHttpProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetHttpProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetHttpProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies',
request_field=u'targetHttpProxy',
request_type_name=u'ComputeTargetHttpProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetHttpProxy resources available to the specified project.
Args:
request: (ComputeTargetHttpProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetHttpProxies',
request_field='',
request_type_name=u'ComputeTargetHttpProxiesListRequest',
response_type_name=u'TargetHttpProxyList',
supports_download=False,
)
def SetUrlMap(self, request, global_params=None):
"""Changes the URL map for TargetHttpProxy.
Args:
request: (ComputeTargetHttpProxiesSetUrlMapRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUrlMap')
return self._RunMethod(
config, request, global_params=global_params)
SetUrlMap.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpProxies.setUrlMap',
ordered_params=[u'project', u'targetHttpProxy'],
path_params=[u'project', u'targetHttpProxy'],
query_params=[],
relative_path=u'projects/{project}/targetHttpProxies/{targetHttpProxy}/setUrlMap',
request_field=u'urlMapReference',
request_type_name=u'ComputeTargetHttpProxiesSetUrlMapRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetHttpProxiesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpProxies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpProxies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetHttpProxiesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetHttpsProxiesService(base_api.BaseApiService):
"""Service class for the targetHttpsProxies resource."""
_NAME = u'targetHttpsProxies'
def __init__(self, client):
super(ComputeAlpha.TargetHttpsProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetHttpsProxy resource.
Args:
request: (ComputeTargetHttpsProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetHttpsProxies.delete',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies/{targetHttpsProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetHttpsProxy resource. Get a list of available target HTTPS proxies by making a list() request.
Args:
request: (ComputeTargetHttpsProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpsProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpsProxies.get',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies/{targetHttpsProxy}',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesGetRequest',
response_type_name=u'TargetHttpsProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetHttpsProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetHttpsProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies',
request_field=u'targetHttpsProxy',
request_type_name=u'ComputeTargetHttpsProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetHttpsProxy resources available to the specified project.
Args:
request: (ComputeTargetHttpsProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetHttpsProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetHttpsProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetHttpsProxies',
request_field='',
request_type_name=u'ComputeTargetHttpsProxiesListRequest',
response_type_name=u'TargetHttpsProxyList',
supports_download=False,
)
def SetSslCertificates(self, request, global_params=None):
"""Replaces SslCertificates for TargetHttpsProxy.
Args:
request: (ComputeTargetHttpsProxiesSetSslCertificatesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSslCertificates')
return self._RunMethod(
config, request, global_params=global_params)
SetSslCertificates.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.setSslCertificates',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/targetHttpsProxies/{targetHttpsProxy}/setSslCertificates',
request_field=u'targetHttpsProxiesSetSslCertificatesRequest',
request_type_name=u'ComputeTargetHttpsProxiesSetSslCertificatesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetUrlMap(self, request, global_params=None):
"""Changes the URL map for TargetHttpsProxy.
Args:
request: (ComputeTargetHttpsProxiesSetUrlMapRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetUrlMap')
return self._RunMethod(
config, request, global_params=global_params)
SetUrlMap.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.setUrlMap',
ordered_params=[u'project', u'targetHttpsProxy'],
path_params=[u'project', u'targetHttpsProxy'],
query_params=[],
relative_path=u'projects/{project}/targetHttpsProxies/{targetHttpsProxy}/setUrlMap',
request_field=u'urlMapReference',
request_type_name=u'ComputeTargetHttpsProxiesSetUrlMapRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetHttpsProxiesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetHttpsProxies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/targetHttpsProxies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetHttpsProxiesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetInstancesService(base_api.BaseApiService):
"""Service class for the targetInstances resource."""
_NAME = u'targetInstances'
def __init__(self, client):
super(ComputeAlpha.TargetInstancesService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target instances.
Args:
request: (ComputeTargetInstancesAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstanceAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetInstances',
request_field='',
request_type_name=u'ComputeTargetInstancesAggregatedListRequest',
response_type_name=u'TargetInstanceAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified TargetInstance resource.
Args:
request: (ComputeTargetInstancesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetInstances.delete',
ordered_params=[u'project', u'zone', u'targetInstance'],
path_params=[u'project', u'targetInstance', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances/{targetInstance}',
request_field='',
request_type_name=u'ComputeTargetInstancesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetInstance resource. Get a list of available target instances by making a list() request.
Args:
request: (ComputeTargetInstancesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstance) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.get',
ordered_params=[u'project', u'zone', u'targetInstance'],
path_params=[u'project', u'targetInstance', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances/{targetInstance}',
request_field='',
request_type_name=u'ComputeTargetInstancesGetRequest',
response_type_name=u'TargetInstance',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetInstance resource in the specified project and zone using the data included in the request.
Args:
request: (ComputeTargetInstancesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetInstances.insert',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances',
request_field=u'targetInstance',
request_type_name=u'ComputeTargetInstancesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of TargetInstance resources available to the specified project and zone.
Args:
request: (ComputeTargetInstancesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetInstanceList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetInstances.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/targetInstances',
request_field='',
request_type_name=u'ComputeTargetInstancesListRequest',
response_type_name=u'TargetInstanceList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetInstancesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetInstances.testIamPermissions',
ordered_params=[u'project', u'zone', u'resource'],
path_params=[u'project', u'resource', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/targetInstances/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetInstancesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetPoolsService(base_api.BaseApiService):
"""Service class for the targetPools resource."""
_NAME = u'targetPools'
def __init__(self, client):
super(ComputeAlpha.TargetPoolsService, self).__init__(client)
self._upload_configs = {
}
def AddHealthCheck(self, request, global_params=None):
"""Adds health check URLs to a target pool.
Args:
request: (ComputeTargetPoolsAddHealthCheckRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddHealthCheck')
return self._RunMethod(
config, request, global_params=global_params)
AddHealthCheck.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.addHealthCheck',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/addHealthCheck',
request_field=u'targetPoolsAddHealthCheckRequest',
request_type_name=u'ComputeTargetPoolsAddHealthCheckRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AddInstance(self, request, global_params=None):
"""Adds an instance to a target pool.
Args:
request: (ComputeTargetPoolsAddInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AddInstance')
return self._RunMethod(
config, request, global_params=global_params)
AddInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.addInstance',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/addInstance',
request_field=u'targetPoolsAddInstanceRequest',
request_type_name=u'ComputeTargetPoolsAddInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target pools.
Args:
request: (ComputeTargetPoolsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetPools',
request_field='',
request_type_name=u'ComputeTargetPoolsAggregatedListRequest',
response_type_name=u'TargetPoolAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified target pool.
Args:
request: (ComputeTargetPoolsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetPools.delete',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}',
request_field='',
request_type_name=u'ComputeTargetPoolsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified target pool. Get a list of available target pools by making a list() request.
Args:
request: (ComputeTargetPoolsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPool) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.get',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}',
request_field='',
request_type_name=u'ComputeTargetPoolsGetRequest',
response_type_name=u'TargetPool',
supports_download=False,
)
def GetHealth(self, request, global_params=None):
"""Gets the most recent health check results for each IP for the instance that is referenced by the given target pool.
Args:
request: (ComputeTargetPoolsGetHealthRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolInstanceHealth) The response message.
"""
config = self.GetMethodConfig('GetHealth')
return self._RunMethod(
config, request, global_params=global_params)
GetHealth.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.getHealth',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/getHealth',
request_field=u'instanceReference',
request_type_name=u'ComputeTargetPoolsGetHealthRequest',
response_type_name=u'TargetPoolInstanceHealth',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a target pool in the specified project and region using the data included in the request.
Args:
request: (ComputeTargetPoolsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools',
request_field=u'targetPool',
request_type_name=u'ComputeTargetPoolsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of target pools available to the specified project and region.
Args:
request: (ComputeTargetPoolsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetPoolList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetPools.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/targetPools',
request_field='',
request_type_name=u'ComputeTargetPoolsListRequest',
response_type_name=u'TargetPoolList',
supports_download=False,
)
def RemoveHealthCheck(self, request, global_params=None):
"""Removes health check URL from a target pool.
Args:
request: (ComputeTargetPoolsRemoveHealthCheckRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveHealthCheck')
return self._RunMethod(
config, request, global_params=global_params)
RemoveHealthCheck.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.removeHealthCheck',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/removeHealthCheck',
request_field=u'targetPoolsRemoveHealthCheckRequest',
request_type_name=u'ComputeTargetPoolsRemoveHealthCheckRequest',
response_type_name=u'Operation',
supports_download=False,
)
def RemoveInstance(self, request, global_params=None):
"""Removes instance URL from a target pool.
Args:
request: (ComputeTargetPoolsRemoveInstanceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('RemoveInstance')
return self._RunMethod(
config, request, global_params=global_params)
RemoveInstance.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.removeInstance',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/removeInstance',
request_field=u'targetPoolsRemoveInstanceRequest',
request_type_name=u'ComputeTargetPoolsRemoveInstanceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetBackup(self, request, global_params=None):
"""Changes a backup target pool's configurations.
Args:
request: (ComputeTargetPoolsSetBackupRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackup')
return self._RunMethod(
config, request, global_params=global_params)
SetBackup.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.setBackup',
ordered_params=[u'project', u'region', u'targetPool'],
path_params=[u'project', u'region', u'targetPool'],
query_params=[u'failoverRatio'],
relative_path=u'projects/{project}/regions/{region}/targetPools/{targetPool}/setBackup',
request_field=u'targetReference',
request_type_name=u'ComputeTargetPoolsSetBackupRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetPoolsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetPools.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetPools/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetPoolsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetSslProxiesService(base_api.BaseApiService):
"""Service class for the targetSslProxies resource."""
_NAME = u'targetSslProxies'
def __init__(self, client):
super(ComputeAlpha.TargetSslProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetSslProxy resource.
Args:
request: (ComputeTargetSslProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetSslProxies.delete',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}',
request_field='',
request_type_name=u'ComputeTargetSslProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetSslProxy resource. Get a list of available target SSL proxies by making a list() request.
Args:
request: (ComputeTargetSslProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetSslProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetSslProxies.get',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}',
request_field='',
request_type_name=u'ComputeTargetSslProxiesGetRequest',
response_type_name=u'TargetSslProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetSslProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetSslProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies',
request_field=u'targetSslProxy',
request_type_name=u'ComputeTargetSslProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetSslProxy resources available to the specified project.
Args:
request: (ComputeTargetSslProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetSslProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetSslProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetSslProxies',
request_field='',
request_type_name=u'ComputeTargetSslProxiesListRequest',
response_type_name=u'TargetSslProxyList',
supports_download=False,
)
def SetBackendService(self, request, global_params=None):
"""Changes the BackendService for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetBackendServiceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackendService')
return self._RunMethod(
config, request, global_params=global_params)
SetBackendService.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setBackendService',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setBackendService',
request_field=u'targetSslProxiesSetBackendServiceRequest',
request_type_name=u'ComputeTargetSslProxiesSetBackendServiceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetProxyHeader(self, request, global_params=None):
"""Changes the ProxyHeaderType for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetProxyHeaderRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetProxyHeader')
return self._RunMethod(
config, request, global_params=global_params)
SetProxyHeader.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setProxyHeader',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setProxyHeader',
request_field=u'targetSslProxiesSetProxyHeaderRequest',
request_type_name=u'ComputeTargetSslProxiesSetProxyHeaderRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetSslCertificates(self, request, global_params=None):
"""Changes SslCertificates for TargetSslProxy.
Args:
request: (ComputeTargetSslProxiesSetSslCertificatesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetSslCertificates')
return self._RunMethod(
config, request, global_params=global_params)
SetSslCertificates.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.setSslCertificates',
ordered_params=[u'project', u'targetSslProxy'],
path_params=[u'project', u'targetSslProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{targetSslProxy}/setSslCertificates',
request_field=u'targetSslProxiesSetSslCertificatesRequest',
request_type_name=u'ComputeTargetSslProxiesSetSslCertificatesRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetSslProxiesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetSslProxies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/targetSslProxies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetSslProxiesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetTcpProxiesService(base_api.BaseApiService):
"""Service class for the targetTcpProxies resource."""
_NAME = u'targetTcpProxies'
def __init__(self, client):
super(ComputeAlpha.TargetTcpProxiesService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified TargetTcpProxy resource.
Args:
request: (ComputeTargetTcpProxiesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetTcpProxies.delete',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified TargetTcpProxy resource. Get a list of available target TCP proxies by making a list() request.
Args:
request: (ComputeTargetTcpProxiesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetTcpProxy) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetTcpProxies.get',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesGetRequest',
response_type_name=u'TargetTcpProxy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a TargetTcpProxy resource in the specified project using the data included in the request.
Args:
request: (ComputeTargetTcpProxiesInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies',
request_field=u'targetTcpProxy',
request_type_name=u'ComputeTargetTcpProxiesInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of TargetTcpProxy resources available to the specified project.
Args:
request: (ComputeTargetTcpProxiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetTcpProxyList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetTcpProxies.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/targetTcpProxies',
request_field='',
request_type_name=u'ComputeTargetTcpProxiesListRequest',
response_type_name=u'TargetTcpProxyList',
supports_download=False,
)
def SetBackendService(self, request, global_params=None):
"""Changes the BackendService for TargetTcpProxy.
Args:
request: (ComputeTargetTcpProxiesSetBackendServiceRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetBackendService')
return self._RunMethod(
config, request, global_params=global_params)
SetBackendService.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.setBackendService',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}/setBackendService',
request_field=u'targetTcpProxiesSetBackendServiceRequest',
request_type_name=u'ComputeTargetTcpProxiesSetBackendServiceRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetProxyHeader(self, request, global_params=None):
"""Changes the ProxyHeaderType for TargetTcpProxy.
Args:
request: (ComputeTargetTcpProxiesSetProxyHeaderRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetProxyHeader')
return self._RunMethod(
config, request, global_params=global_params)
SetProxyHeader.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.setProxyHeader',
ordered_params=[u'project', u'targetTcpProxy'],
path_params=[u'project', u'targetTcpProxy'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{targetTcpProxy}/setProxyHeader',
request_field=u'targetTcpProxiesSetProxyHeaderRequest',
request_type_name=u'ComputeTargetTcpProxiesSetProxyHeaderRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetTcpProxiesTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetTcpProxies.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/targetTcpProxies/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetTcpProxiesTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class TargetVpnGatewaysService(base_api.BaseApiService):
"""Service class for the targetVpnGateways resource."""
_NAME = u'targetVpnGateways'
def __init__(self, client):
super(ComputeAlpha.TargetVpnGatewaysService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of target VPN gateways.
Args:
request: (ComputeTargetVpnGatewaysAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGatewayAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/targetVpnGateways',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysAggregatedListRequest',
response_type_name=u'TargetVpnGatewayAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified target VPN gateway.
Args:
request: (ComputeTargetVpnGatewaysDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.targetVpnGateways.delete',
ordered_params=[u'project', u'region', u'targetVpnGateway'],
path_params=[u'project', u'region', u'targetVpnGateway'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{targetVpnGateway}',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified target VPN gateway. Get a list of available target VPN gateways by making a list() request.
Args:
request: (ComputeTargetVpnGatewaysGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGateway) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.get',
ordered_params=[u'project', u'region', u'targetVpnGateway'],
path_params=[u'project', u'region', u'targetVpnGateway'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{targetVpnGateway}',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysGetRequest',
response_type_name=u'TargetVpnGateway',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a target VPN gateway in the specified project and region using the data included in the request.
Args:
request: (ComputeTargetVpnGatewaysInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetVpnGateways.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways',
request_field=u'targetVpnGateway',
request_type_name=u'ComputeTargetVpnGatewaysInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of target VPN gateways available to the specified project and region.
Args:
request: (ComputeTargetVpnGatewaysListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TargetVpnGatewayList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.targetVpnGateways.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways',
request_field='',
request_type_name=u'ComputeTargetVpnGatewaysListRequest',
response_type_name=u'TargetVpnGatewayList',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeTargetVpnGatewaysTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.targetVpnGateways.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/targetVpnGateways/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeTargetVpnGatewaysTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class UrlMapsService(base_api.BaseApiService):
"""Service class for the urlMaps resource."""
_NAME = u'urlMaps'
def __init__(self, client):
super(ComputeAlpha.UrlMapsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified UrlMap resource.
Args:
request: (ComputeUrlMapsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.urlMaps.delete',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field='',
request_type_name=u'ComputeUrlMapsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified UrlMap resource. Get a list of available URL maps by making a list() request.
Args:
request: (ComputeUrlMapsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMap) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.urlMaps.get',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field='',
request_type_name=u'ComputeUrlMapsGetRequest',
response_type_name=u'UrlMap',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a UrlMap resource in the specified project using the data included in the request.
Args:
request: (ComputeUrlMapsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps',
request_field=u'urlMap',
request_type_name=u'ComputeUrlMapsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def InvalidateCache(self, request, global_params=None):
"""Initiates a cache invalidation operation, invalidating the specified path, scoped to the specified UrlMap.
Args:
request: (ComputeUrlMapsInvalidateCacheRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('InvalidateCache')
return self._RunMethod(
config, request, global_params=global_params)
InvalidateCache.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.invalidateCache',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}/invalidateCache',
request_field=u'cacheInvalidationRule',
request_type_name=u'ComputeUrlMapsInvalidateCacheRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of UrlMap resources available to the specified project.
Args:
request: (ComputeUrlMapsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMapList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.urlMaps.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/urlMaps',
request_field='',
request_type_name=u'ComputeUrlMapsListRequest',
response_type_name=u'UrlMapList',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates the specified UrlMap resource with the data included in the request. This method supports patch semantics.
Args:
request: (ComputeUrlMapsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'compute.urlMaps.patch',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field=u'urlMapResource',
request_type_name=u'ComputeUrlMapsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeUrlMapsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeUrlMapsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates the specified UrlMap resource with the data included in the request.
Args:
request: (ComputeUrlMapsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'compute.urlMaps.update',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}',
request_field=u'urlMapResource',
request_type_name=u'ComputeUrlMapsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Validate(self, request, global_params=None):
"""Runs static validation for the UrlMap. In particular, the tests of the provided UrlMap will be run. Calling this method does NOT create the UrlMap.
Args:
request: (ComputeUrlMapsValidateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(UrlMapsValidateResponse) The response message.
"""
config = self.GetMethodConfig('Validate')
return self._RunMethod(
config, request, global_params=global_params)
Validate.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.urlMaps.validate',
ordered_params=[u'project', u'urlMap'],
path_params=[u'project', u'urlMap'],
query_params=[],
relative_path=u'projects/{project}/global/urlMaps/{urlMap}/validate',
request_field=u'urlMapsValidateRequest',
request_type_name=u'ComputeUrlMapsValidateRequest',
response_type_name=u'UrlMapsValidateResponse',
supports_download=False,
)
class VpnTunnelsService(base_api.BaseApiService):
"""Service class for the vpnTunnels resource."""
_NAME = u'vpnTunnels'
def __init__(self, client):
super(ComputeAlpha.VpnTunnelsService, self).__init__(client)
self._upload_configs = {
}
def AggregatedList(self, request, global_params=None):
"""Retrieves an aggregated list of VPN tunnels.
Args:
request: (ComputeVpnTunnelsAggregatedListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnelAggregatedList) The response message.
"""
config = self.GetMethodConfig('AggregatedList')
return self._RunMethod(
config, request, global_params=global_params)
AggregatedList.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.aggregatedList',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/aggregated/vpnTunnels',
request_field='',
request_type_name=u'ComputeVpnTunnelsAggregatedListRequest',
response_type_name=u'VpnTunnelAggregatedList',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes the specified VpnTunnel resource.
Args:
request: (ComputeVpnTunnelsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.vpnTunnels.delete',
ordered_params=[u'project', u'region', u'vpnTunnel'],
path_params=[u'project', u'region', u'vpnTunnel'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{vpnTunnel}',
request_field='',
request_type_name=u'ComputeVpnTunnelsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Returns the specified VpnTunnel resource. Get a list of available VPN tunnels by making a list() request.
Args:
request: (ComputeVpnTunnelsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnel) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.get',
ordered_params=[u'project', u'region', u'vpnTunnel'],
path_params=[u'project', u'region', u'vpnTunnel'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{vpnTunnel}',
request_field='',
request_type_name=u'ComputeVpnTunnelsGetRequest',
response_type_name=u'VpnTunnel',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a VpnTunnel resource in the specified project and region using the data included in the request.
Args:
request: (ComputeVpnTunnelsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.vpnTunnels.insert',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels',
request_field=u'vpnTunnel',
request_type_name=u'ComputeVpnTunnelsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of VpnTunnel resources contained in the specified project and region.
Args:
request: (ComputeVpnTunnelsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(VpnTunnelList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.vpnTunnels.list',
ordered_params=[u'project', u'region'],
path_params=[u'project', u'region'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels',
request_field='',
request_type_name=u'ComputeVpnTunnelsListRequest',
response_type_name=u'VpnTunnelList',
supports_download=False,
)
def SetLabels(self, request, global_params=None):
"""Sets the labels on a VpnTunnel. To learn more about labels, read the Labeling or Tagging Resources documentation.
Args:
request: (ComputeVpnTunnelsSetLabelsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('SetLabels')
return self._RunMethod(
config, request, global_params=global_params)
SetLabels.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.vpnTunnels.setLabels',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{resource}/setLabels',
request_field=u'regionSetLabelsRequest',
request_type_name=u'ComputeVpnTunnelsSetLabelsRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (ComputeVpnTunnelsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'compute.vpnTunnels.testIamPermissions',
ordered_params=[u'project', u'region', u'resource'],
path_params=[u'project', u'region', u'resource'],
query_params=[],
relative_path=u'projects/{project}/regions/{region}/vpnTunnels/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'ComputeVpnTunnelsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
class ZoneOperationsService(base_api.BaseApiService):
"""Service class for the zoneOperations resource."""
_NAME = u'zoneOperations'
def __init__(self, client):
super(ComputeAlpha.ZoneOperationsService, self).__init__(client)
self._upload_configs = {
}
def Delete(self, request, global_params=None):
"""Deletes the specified zone-specific Operations resource.
Args:
request: (ComputeZoneOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ComputeZoneOperationsDeleteResponse) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'compute.zoneOperations.delete',
ordered_params=[u'project', u'zone', u'operation'],
path_params=[u'operation', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/operations/{operation}',
request_field='',
request_type_name=u'ComputeZoneOperationsDeleteRequest',
response_type_name=u'ComputeZoneOperationsDeleteResponse',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Retrieves the specified zone-specific Operations resource.
Args:
request: (ComputeZoneOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zoneOperations.get',
ordered_params=[u'project', u'zone', u'operation'],
path_params=[u'operation', u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}/operations/{operation}',
request_field='',
request_type_name=u'ComputeZoneOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves a list of Operation resources contained within the specified zone.
Args:
request: (ComputeZoneOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zoneOperations.list',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones/{zone}/operations',
request_field='',
request_type_name=u'ComputeZoneOperationsListRequest',
response_type_name=u'OperationList',
supports_download=False,
)
class ZonesService(base_api.BaseApiService):
"""Service class for the zones resource."""
_NAME = u'zones'
def __init__(self, client):
super(ComputeAlpha.ZonesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Returns the specified Zone resource. Get a list of available zones by making a list() request.
Args:
request: (ComputeZonesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Zone) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zones.get',
ordered_params=[u'project', u'zone'],
path_params=[u'project', u'zone'],
query_params=[],
relative_path=u'projects/{project}/zones/{zone}',
request_field='',
request_type_name=u'ComputeZonesGetRequest',
response_type_name=u'Zone',
supports_download=False,
)
def List(self, request, global_params=None):
"""Retrieves the list of Zone resources available to the specified project.
Args:
request: (ComputeZonesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ZoneList) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'compute.zones.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/zones',
request_field='',
request_type_name=u'ComputeZonesListRequest',
response_type_name=u'ZoneList',
supports_download=False,
)
| apache-2.0 |
pgmillon/ansible | lib/ansible/modules/crypto/acme/acme_inspect.py | 20 | 12317 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018 Felix Fontein (@felixfontein)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: acme_inspect
author: "Felix Fontein (@felixfontein)"
version_added: "2.8"
short_description: Send direct requests to an ACME server
description:
- "Allows to send direct requests to an ACME server with the
L(ACME protocol,https://tools.ietf.org/html/rfc8555),
which is supported by CAs such as L(Let's Encrypt,https://letsencrypt.org/)."
- "This module can be used to debug failed certificate request attempts,
for example when M(acme_certificate) fails or encounters a problem which
you wish to investigate."
- "The module can also be used to directly access features of an ACME servers
which are not yet supported by the Ansible ACME modules."
notes:
- "The I(account_uri) option must be specified for properly authenticated
ACME v2 requests (except a C(new-account) request)."
- "Using the C(ansible) tool, M(acme_inspect) can be used to directly execute
ACME requests without the need of writing a playbook. For example, the
following command retrieves the ACME account with ID 1 from Let's Encrypt
(assuming C(/path/to/key) is the correct private account key):
C(ansible localhost -m acme_inspect -a \"account_key_src=/path/to/key
acme_directory=https://acme-v02.api.letsencrypt.org/directory acme_version=2
account_uri=https://acme-v02.api.letsencrypt.org/acme/acct/1 method=get
url=https://acme-v02.api.letsencrypt.org/acme/acct/1\")"
seealso:
- name: Automatic Certificate Management Environment (ACME)
description: The specification of the ACME protocol (RFC 8555).
link: https://tools.ietf.org/html/rfc8555
- name: ACME TLS ALPN Challenge Extension
description: The current draft specification of the C(tls-alpn-01) challenge.
link: https://tools.ietf.org/html/draft-ietf-acme-tls-alpn-05
extends_documentation_fragment:
- acme
options:
url:
description:
- "The URL to send the request to."
- "Must be specified if I(method) is not C(directory-only)."
type: str
method:
description:
- "The method to use to access the given URL on the ACME server."
- "The value C(post) executes an authenticated POST request. The content
must be specified in the I(content) option."
- "The value C(get) executes an authenticated POST-as-GET request for ACME v2,
and a regular GET request for ACME v1."
- "The value C(directory-only) only retrieves the directory, without doing
a request."
type: str
default: get
choices:
- get
- post
- directory-only
content:
description:
- "An encoded JSON object which will be sent as the content if I(method)
is C(post)."
- "Required when I(method) is C(post), and not allowed otherwise."
type: str
fail_on_acme_error:
description:
- "If I(method) is C(post) or C(get), make the module fail in case an ACME
error is returned."
type: bool
default: yes
'''
EXAMPLES = r'''
- name: Get directory
acme_inspect:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
method: directory-only
register: directory
- name: Create an account
acme_inspect:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
account_key_src: /etc/pki/cert/private/account.key
url: "{{ directory.newAccount}}"
method: post
content: '{"termsOfServiceAgreed":true}'
register: account_creation
# account_creation.headers.location contains the account URI
# if creation was successful
- name: Get account information
acme_inspect:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
account_key_src: /etc/pki/cert/private/account.key
account_uri: "{{ account_creation.headers.location }}"
url: "{{ account_creation.headers.location }}"
method: get
- name: Update account contacts
acme_inspect:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
account_key_src: /etc/pki/cert/private/account.key
account_uri: "{{ account_creation.headers.location }}"
url: "{{ account_creation.headers.location }}"
method: post
content: '{{ account_info | to_json }}'
vars:
account_info:
# For valid values, see
# https://tools.ietf.org/html/rfc8555#section-7.3
contact:
- mailto:[email protected]
- name: Create certificate order
acme_certificate:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
account_key_src: /etc/pki/cert/private/account.key
account_uri: "{{ account_creation.headers.location }}"
csr: /etc/pki/cert/csr/sample.com.csr
fullchain_dest: /etc/httpd/ssl/sample.com-fullchain.crt
challenge: http-01
register: certificate_request
# Assume something went wrong. certificate_request.order_uri contains
# the order URI.
- name: Get order information
acme_inspect:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
account_key_src: /etc/pki/cert/private/account.key
account_uri: "{{ account_creation.headers.location }}"
url: "{{ certificate_request.order_uri }}"
method: get
register: order
- name: Get first authz for order
acme_inspect:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
account_key_src: /etc/pki/cert/private/account.key
account_uri: "{{ account_creation.headers.location }}"
url: "{{ order.output_json.authorizations[0] }}"
method: get
register: authz
- name: Get HTTP-01 challenge for authz
acme_inspect:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
account_key_src: /etc/pki/cert/private/account.key
account_uri: "{{ account_creation.headers.location }}"
url: "{{ authz.output_json.challenges | selectattr('type', 'equalto', 'http-01') }}"
method: get
register: http01challenge
- name: Activate HTTP-01 challenge manually
acme_inspect:
acme_directory: https://acme-staging-v02.api.letsencrypt.org/directory
acme_version: 2
account_key_src: /etc/pki/cert/private/account.key
account_uri: "{{ account_creation.headers.location }}"
url: "{{ http01challenge.url }}"
method: post
content: '{}'
'''
RETURN = '''
directory:
description: The ACME directory's content
returned: always
type: dict
sample: |
{
"a85k3x9f91A4": "https://community.letsencrypt.org/t/adding-random-entries-to-the-directory/33417",
"keyChange": "https://acme-v02.api.letsencrypt.org/acme/key-change",
"meta": {
"caaIdentities": [
"letsencrypt.org"
],
"termsOfService": "https://letsencrypt.org/documents/LE-SA-v1.2-November-15-2017.pdf",
"website": "https://letsencrypt.org"
},
"newAccount": "https://acme-v02.api.letsencrypt.org/acme/new-acct",
"newNonce": "https://acme-v02.api.letsencrypt.org/acme/new-nonce",
"newOrder": "https://acme-v02.api.letsencrypt.org/acme/new-order",
"revokeCert": "https://acme-v02.api.letsencrypt.org/acme/revoke-cert"
}
headers:
description: The request's HTTP headers (with lowercase keys)
returned: always
type: dict
sample: |
{
"boulder-requester": "12345",
"cache-control": "max-age=0, no-cache, no-store",
"connection": "close",
"content-length": "904",
"content-type": "application/json",
"cookies": {},
"cookies_string": "",
"date": "Wed, 07 Nov 2018 12:34:56 GMT",
"expires": "Wed, 07 Nov 2018 12:44:56 GMT",
"link": "<https://letsencrypt.org/documents/LE-SA-v1.2-November-15-2017.pdf>;rel=\"terms-of-service\"",
"msg": "OK (904 bytes)",
"pragma": "no-cache",
"replay-nonce": "1234567890abcdefghijklmnopqrstuvwxyzABCDEFGH",
"server": "nginx",
"status": 200,
"strict-transport-security": "max-age=604800",
"url": "https://acme-v02.api.letsencrypt.org/acme/acct/46161",
"x-frame-options": "DENY"
}
output_text:
description: The raw text output
returned: always
type: str
sample: "{\\n \\\"id\\\": 12345,\\n \\\"key\\\": {\\n \\\"kty\\\": \\\"RSA\\\",\\n ..."
output_json:
description: The output parsed as JSON
returned: if output can be parsed as JSON
type: dict
sample:
- id: 12345
- key:
- kty: RSA
- ...
'''
from ansible.module_utils.acme import (
ModuleFailException, ACMEAccount, set_crypto_backend,
)
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native, to_bytes
import json
def main():
module = AnsibleModule(
argument_spec=dict(
account_key_src=dict(type='path', aliases=['account_key']),
account_key_content=dict(type='str', no_log=True),
account_uri=dict(type='str'),
acme_directory=dict(type='str', default='https://acme-staging.api.letsencrypt.org/directory'),
acme_version=dict(type='int', default=1, choices=[1, 2]),
validate_certs=dict(type='bool', default=True),
url=dict(type='str'),
method=dict(type='str', choices=['get', 'post', 'directory-only'], default='get'),
content=dict(type='str'),
fail_on_acme_error=dict(type='bool', default=True),
select_crypto_backend=dict(type='str', default='auto', choices=['auto', 'openssl', 'cryptography']),
),
mutually_exclusive=(
['account_key_src', 'account_key_content'],
),
required_if=(
['method', 'get', ['url']],
['method', 'post', ['url', 'content']],
['method', 'get', ['account_key_src', 'account_key_content'], True],
['method', 'post', ['account_key_src', 'account_key_content'], True],
),
)
set_crypto_backend(module)
if not module.params.get('validate_certs'):
module.warn(warning='Disabling certificate validation for communications with ACME endpoint. ' +
'This should only be done for testing against a local ACME server for ' +
'development purposes, but *never* for production purposes.')
result = dict()
changed = False
try:
# Get hold of ACMEAccount object (includes directory)
account = ACMEAccount(module)
method = module.params['method']
result['directory'] = account.directory.directory
# Do we have to do more requests?
if method != 'directory-only':
url = module.params['url']
fail_on_acme_error = module.params['fail_on_acme_error']
# Do request
if method == 'get':
data, info = account.get_request(url, parse_json_result=False, fail_on_error=False)
elif method == 'post':
changed = True # only POSTs can change
data, info = account.send_signed_request(url, to_bytes(module.params['content']), parse_json_result=False, encode_payload=False)
# Update results
result.update(dict(
headers=info,
output_text=to_native(data),
))
# See if we can parse the result as JSON
try:
result['output_json'] = json.loads(data)
except Exception as dummy:
pass
# Fail if error was returned
if fail_on_acme_error and info['status'] >= 400:
raise ModuleFailException("ACME request failed: CODE: {0} RESULT: {1}".format(info['status'], data))
# Done!
module.exit_json(changed=changed, **result)
except ModuleFailException as e:
e.do_fail(module, **result)
if __name__ == '__main__':
main()
| gpl-3.0 |
antamb/google-personal-assistant | src/actionbase.py | 2 | 2216 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handle voice commands locally.
This code lets you link keywords to actions. The actions are declared in
action.py.
"""
class Actor(object):
"""Passes commands on to a list of action handlers."""
def __init__(self):
self.handlers = []
def add_keyword(self, keyword, action):
self.handlers.append(KeywordHandler(keyword, action))
def get_phrases(self):
"""Get a list of all phrases that are expected by the handlers."""
return [phrase for h in self.handlers for phrase in h.get_phrases()]
def can_handle(self, command):
"""Check if command is handled without running the handlers.
Returns True if the command would be handled."""
for handler in self.handlers:
if handler.can_handle(command):
return True
return False
def handle(self, command):
"""Pass command to handlers, stopping after one has handled the command.
Returns True if the command was handled."""
for handler in self.handlers:
if handler.handle(command):
return True
return False
class KeywordHandler(object):
"""Perform the action when the given keyword is in the command."""
def __init__(self, keyword, action):
self.keyword = keyword.lower()
self.action = action
def get_phrases(self):
return [self.keyword]
def can_handle(self, command):
return self.keyword in command.lower()
def handle(self, command):
if self.can_handle(command):
self.action.run(command)
return True
return False
| apache-2.0 |
mffrench/fabric | bddtests/steps/docgen.py | 5 | 13747 | # Copyright IBM Corp. 2016 All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from StringIO import StringIO
from itertools import chain
from google.protobuf.message import Message
from b3j0f.aop import weave, unweave, is_intercepted, weave_on
from jinja2 import Environment, PackageLoader, select_autoescape, FileSystemLoader, Template
env = Environment(
loader=FileSystemLoader(searchpath="templates"),
autoescape=select_autoescape(['html', 'xml']),
trim_blocks=True,
lstrip_blocks=True
)
from bootstrap_util import getDirectory
class DocumentGenerator:
def __init__(self, contextHelper, scenario):
self.contextHelper = contextHelper
self.directory = getDirectory(contextHelper.context)
self.output = StringIO()
self.currentStep = 0
self.composition = None
#Weave advices into contextHelper
weave(target=self.contextHelper.before_step, advices=self.beforeStepAdvice)
weave(target=self.contextHelper.after_step, advices=self.afterStepAdvice)
weave(target=self.contextHelper.after_scenario, advices=self.afterScenarioAdvice)
weave(target=self.contextHelper.getBootrapHelper, advices=self.getBootstrapHelperAdvice)
weave(target=self.contextHelper.registerComposition, advices=self.registerCompositionAdvice)
# Weave advices into Directory
weave(target=self.directory._registerOrg, advices=self.registerOrgAdvice)
weave(target=self.directory._registerUser, advices=self.registerUserAdvice)
weave(target=self.directory.registerOrdererAdminTuple, advices=self.registerNamedNodeAdminTupleAdvice)
def beforeStepAdvice(self, joinpoint):
self.currentStep += 1
step = joinpoint.kwargs['step']
# Now the jinja template
self.output.write(env.get_template("html/step.html").render(step_id="Step {0}".format(self.currentStep), step=step))
return joinpoint.proceed()
def afterStepAdvice(self, joinpoint):
step = joinpoint.kwargs['step']
# Now the jinja template
if step.status=="failed":
self.output.write(env.get_template("html/error.html").render(err=step.error_message))
return joinpoint.proceed()
def compositionCallCLIAdvice(self, joinpoint):
'This advice is called around the compositions usage of the cli'
result = joinpoint.proceed()
# Create table for environment
composition = joinpoint.kwargs['self']
envAdditions = composition.getEnvAdditions()
keys = envAdditions.keys()
keys.sort()
envPreamble = " ".join(["{0}={1}".format(key,envAdditions[key]) for key in keys])
args= " ".join(joinpoint.kwargs['argList'])
self.output.write(env.get_template("html/cli.html").render(command="{0} {1}".format(envPreamble, args)))
return result
def _getNetworkGroup(self, serviceName):
groups = {"peer" : 1, "orderer" : 2, "kafka" : 7, "zookeeper" : 8, "couchdb" : 9}
groupId = 0
for group, id in groups.iteritems():
if serviceName.lower().startswith(group):
groupId = id
return groupId
def _getNetworkForConfig(self, configAsYaml):
import yaml
config = yaml.load(configAsYaml)
assert "services" in config, "Expected config from docker-compose config to have services key at top level: \n{0}".format(config)
network = {"nodes": [], "links" : []}
for serviceName in config['services'].keys():
network['nodes'].append({"id" : serviceName, "group" : self._getNetworkGroup(serviceName), "type" : "node"})
# Now get links
if "depends_on" in config['services'][serviceName]:
for dependedOnServiceName in config['services'][serviceName]['depends_on']:
network['links'].append({"source": serviceName, "target": dependedOnServiceName, "value" : 1})
return network
def _getNetworkForDirectory(self):
network = {"nodes":[], "links": []}
for orgName, org in self.directory.getOrganizations().iteritems():
network['nodes'].append({"id" : orgName, "group" : 3, "type" : "org"})
for userName, user in self.directory.getUsers().iteritems():
network['nodes'].append({"id" : userName, "group" : 4, "type" : "user"})
# Now get links
for nct, cert in self.directory.getNamedCtxTuples().iteritems():
nctId = "{0}-{1}-{2}".format(nct.user, nct.nodeName, nct.organization)
network['nodes'].append({"id" : nctId, "group" : 5, "type" : "cert"})
network['links'].append({"source": nctId, "target": nct.organization, "value" : 1})
network['links'].append({"source": nctId, "target": nct.user, "value" : 1})
# Only add the context link if it is a compose service, else the target may not exist.
if nct.nodeName in self.composition.getServiceNames():
network['links'].append({"source": nctId, "target": nct.nodeName, "value" : 1})
return network
def _writeNetworkJson(self):
if self.composition:
import json
configNetwork = self._getNetworkForConfig(configAsYaml=self.composition.getConfig())
directoryNetwork = self._getNetworkForDirectory()
# Join the network info together
fullNetwork = dict(chain([(key, configNetwork[key] + directoryNetwork[key]) for key in configNetwork.keys()]))
(fileName, fileExists) = self.contextHelper.getTmpPathForName("network", extension="json")
with open(fileName, "w") as f:
f.write(json.dumps(fullNetwork))
def registerCompositionAdvice(self, joinpoint):
composition = joinpoint.kwargs['composition']
weave(target=composition._callCLI, advices=self.compositionCallCLIAdvice)
result = joinpoint.proceed()
if composition:
#Now get the config for the composition and dump out.
self.composition = composition
configAsYaml = composition.getConfig()
self.output.write(env.get_template("html/header.html").render(text="Configuration", level=4))
self.output.write(env.get_template("html/cli.html").render(command=configAsYaml))
#Inject the graph
self.output.write(env.get_template("html/header.html").render(text="Network Graph", level=4))
self.output.write(env.get_template("html/graph.html").render())
return result
def _addLinkToFile(self, fileName ,linkText):
import ntpath
baseName = ntpath.basename(fileName)
# self.markdownWriter.addLink(linkUrl="./{0}".format(baseName), linkText=linkText, linkTitle=baseName)
def _getLinkInfoForFile(self, fileName):
import ntpath
return "./{0}".format(ntpath.basename(fileName))
def registerOrgAdvice(self, joinpoint):
orgName = joinpoint.kwargs['orgName']
newlyRegisteredOrg = joinpoint.proceed()
orgCert = newlyRegisteredOrg.getCertAsPEM()
#Write out key material
(fileName, fileExists) = self.contextHelper.getTmpPathForName(name="dir-org-{0}-cert".format(orgName), extension="pem")
with open(fileName, 'w') as f:
f.write(orgCert)
self._addLinkToFile(fileName=fileName, linkText="Public cert for Organization")
#Now the jinja output
self.output.write(env.get_template("html/org.html").render(org=newlyRegisteredOrg, cert_href=self._getLinkInfoForFile(fileName), path_to_cert=fileName))
return newlyRegisteredOrg
def registerUserAdvice(self, joinpoint):
userName = joinpoint.kwargs['userName']
newlyRegisteredUser = joinpoint.proceed()
#Write out key material
privateKeyAsPem = newlyRegisteredUser.getPrivateKeyAsPEM()
(fileName, fileExists) = self.contextHelper.getTmpPathForName(name="dir-user-{0}-privatekey".format(userName), extension="pem")
with open(fileName, 'w') as f:
f.write(privateKeyAsPem)
#Weave into user tags setting
weave(target=newlyRegisteredUser.setTagValue, advices=self.userSetTagValueAdvice)
#Now the jinja output
self.output.write(env.get_template("html/user.html").render(user=newlyRegisteredUser, private_key_href=self._getLinkInfoForFile(fileName)))
return newlyRegisteredUser
def afterScenarioAdvice(self, joinpoint):
scenario = joinpoint.kwargs['scenario']
#Render with jinja
header = env.get_template("html/scenario.html").render(scenario=scenario, steps=scenario.steps)
main = env.get_template("html/main.html").render(header=header, body=self.output.getvalue())
(fileName, fileExists) = self.contextHelper.getTmpPathForName("scenario", extension="html")
with open(fileName, 'w') as f:
f.write(main.encode("utf-8"))
self._writeNetworkJson()
return joinpoint.proceed()
def registerNamedNodeAdminTupleAdvice(self, joinpoint):
namedNodeAdminTuple = joinpoint.proceed()
directory = joinpoint.kwargs['self']
#jinja
newCertAsPEM = directory.getCertAsPEM(namedNodeAdminTuple)
self.output.write(env.get_template("html/header.html").render(text="Created new named node admin tuple: {0}".format(namedNodeAdminTuple), level=4))
self.output.write(env.get_template("html/cli.html").render(command=newCertAsPEM))
#Write cert out
fileNameTocheck = "dir-user-{0}-cert-{1}-{2}".format(namedNodeAdminTuple.user, namedNodeAdminTuple.nodeName, namedNodeAdminTuple.organization)
(fileName, fileExists) = self.contextHelper.getTmpPathForName(fileNameTocheck, extension="pem")
with open(fileName, 'w') as f:
f.write(newCertAsPEM)
return namedNodeAdminTuple
def bootstrapHelperSignConfigItemAdvice(self, joinpoint):
configItem = joinpoint.kwargs['configItem']
#jinja
self.output.write(env.get_template("html/header.html").render(text="Dumping signed config item...", level=4))
self.output.write(env.get_template("html/protobuf.html").render(msg=configItem, msgLength=len(str(configItem))))
signedConfigItem = joinpoint.proceed()
return signedConfigItem
def getBootstrapHelperAdvice(self, joinpoint):
bootstrapHelper = joinpoint.proceed()
weave(target=bootstrapHelper.signConfigItem, advices=self.bootstrapHelperSignConfigItemAdvice)
return bootstrapHelper
def _isProtobufMessage(self, target):
return isinstance(target, Message)
def _isListOfProtobufMessages(self, target):
result = False
if isinstance(target, list):
messageList = [item for item in target if self._isProtobufMessage(item)]
result = len(messageList) == len(target)
return result
def _isDictOfProtobufMessages(self, target):
result = False
if isinstance(target, dict):
messageList = [item for item in target.values() if self._isProtobufMessage(item)]
result = len(messageList) == len(target)
return result
def _writeProtobuf(self, fileName, msg):
import ntpath
baseName = ntpath.basename(fileName)
dataToWrite = msg.SerializeToString()
with open("{0}".format(fileName), 'wb') as f:
f.write(dataToWrite)
self.output.write(env.get_template("html/protobuf.html").render(id=baseName, msg=msg, path_to_protobuf=fileName, msgLength=len(dataToWrite),linkUrl="./{0}".format(baseName), linkText="Protobuf message in binary form", linkTitle=baseName))
def userSetTagValueAdvice(self, joinpoint):
result = joinpoint.proceed()
user = joinpoint.kwargs['self']
tagKey = joinpoint.kwargs['tagKey']
tagValue = joinpoint.kwargs['tagValue']
#jinja invoke
self.output.write(env.get_template("html/tag.html").render(user=user, tag_key=tagKey))
# If protobuf message, write out in binary form
if self._isProtobufMessage(tagValue):
import ntpath
(fileName, fileExists) = self.contextHelper.getTmpPathForName("{0}-{1}".format(user.getUserName(), tagKey), extension="protobuf")
self._writeProtobuf(fileName=fileName, msg=tagValue)
# If protobuf message, write out in binary form
elif self._isListOfProtobufMessages(tagValue):
index = 0
for msg in tagValue:
(fileName, fileExists) = self.contextHelper.getTmpPathForName("{0}-{1}-{2:0>4}".format(user.getUserName(), tagKey, index), extension="protobuf")
self._writeProtobuf(fileName=fileName, msg=msg)
index += 1
elif self._isDictOfProtobufMessages(tagValue):
for key,msg in tagValue.iteritems():
(fileName, fileExists) = self.contextHelper.getTmpPathForName("{0}-{1}-{2}".format(user.getUserName(), tagKey, key), extension="protobuf")
self._writeProtobuf(fileName=fileName, msg=msg)
else:
self.output.write(env.get_template("html/cli.html").render(command=str(tagValue)))
return result | apache-2.0 |
martynovp/edx-platform | common/test/acceptance/tests/studio/test_studio_acid_xblock.py | 130 | 6909 | """
Acceptance tests for Studio related to the acid xblock.
"""
from bok_choy.web_app_test import WebAppTest
from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.studio.overview import CourseOutlinePage
from ...pages.xblock.acid import AcidView
from ...fixtures.course import CourseFixture, XBlockFixtureDesc
class XBlockAcidBase(WebAppTest):
"""
Base class for tests that verify that XBlock integration is working correctly
"""
__test__ = False
def setUp(self):
"""
Create a unique identifier for the course used in this test.
"""
# Ensure that the superclass sets up
super(XBlockAcidBase, self).setUp()
# Define a unique course identifier
self.course_info = {
'org': 'test_org',
'number': 'course_' + self.unique_id[:5],
'run': 'test_' + self.unique_id,
'display_name': 'Test Course ' + self.unique_id
}
self.outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.course_id = '{org}.{number}.{run}'.format(**self.course_info)
self.setup_fixtures()
self.auth_page = AutoAuthPage(
self.browser,
staff=False,
username=self.user.get('username'),
email=self.user.get('email'),
password=self.user.get('password')
)
self.auth_page.visit()
def validate_acid_block_preview(self, acid_block):
"""
Validate the Acid Block's preview
"""
self.assertTrue(acid_block.init_fn_passed)
self.assertTrue(acid_block.resource_url_passed)
self.assertTrue(acid_block.scope_passed('user_state'))
self.assertTrue(acid_block.scope_passed('user_state_summary'))
self.assertTrue(acid_block.scope_passed('preferences'))
self.assertTrue(acid_block.scope_passed('user_info'))
def test_acid_block_preview(self):
"""
Verify that all expected acid block tests pass in studio preview
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
acid_block = AcidView(self.browser, unit.xblocks[0].preview_selector)
self.validate_acid_block_preview(acid_block)
def test_acid_block_editor(self):
"""
Verify that all expected acid block tests pass in studio editor
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
acid_block = AcidView(self.browser, unit.xblocks[0].edit().editor_selector)
self.assertTrue(acid_block.init_fn_passed)
self.assertTrue(acid_block.resource_url_passed)
class XBlockAcidNoChildTest(XBlockAcidBase):
"""
Tests of an AcidBlock with no children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid', 'Acid Block')
)
)
)
).install()
self.user = course_fix.user
class XBlockAcidParentBase(XBlockAcidBase):
"""
Base class for tests that verify that parent XBlock integration is working correctly
"""
__test__ = False
def validate_acid_block_preview(self, acid_block):
super(XBlockAcidParentBase, self).validate_acid_block_preview(acid_block)
self.assertTrue(acid_block.child_tests_passed)
def test_acid_block_preview(self):
"""
Verify that all expected acid block tests pass in studio preview
"""
self.outline.visit()
subsection = self.outline.section('Test Section').subsection('Test Subsection')
unit = subsection.expand_subsection().unit('Test Unit').go_to()
container = unit.xblocks[0].go_to_container()
acid_block = AcidView(self.browser, container.xblocks[0].preview_selector)
self.validate_acid_block_preview(acid_block)
class XBlockAcidEmptyParentTest(XBlockAcidParentBase):
"""
Tests of an AcidBlock with children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid_parent', 'Acid Parent Block').add_children(
)
)
)
)
).install()
self.user = course_fix.user
class XBlockAcidChildTest(XBlockAcidParentBase):
"""
Tests of an AcidBlock with children
"""
__test__ = True
def setup_fixtures(self):
course_fix = CourseFixture(
self.course_info['org'],
self.course_info['number'],
self.course_info['run'],
self.course_info['display_name']
)
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc('acid_parent', 'Acid Parent Block').add_children(
XBlockFixtureDesc('acid', 'First Acid Child', metadata={'name': 'first'}),
XBlockFixtureDesc('acid', 'Second Acid Child', metadata={'name': 'second'}),
XBlockFixtureDesc('html', 'Html Child', data="<html>Contents</html>"),
)
)
)
)
).install()
self.user = course_fix.user
def test_acid_block_preview(self):
super(XBlockAcidChildTest, self).test_acid_block_preview()
def test_acid_block_editor(self):
super(XBlockAcidChildTest, self).test_acid_block_editor()
| agpl-3.0 |
rasata/ansible | lib/ansible/plugins/filter/core.py | 10 | 9635 | # (c) 2012, Jeroen Hoekx <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import sys
import base64
import itertools
import json
import os.path
import ntpath
import types
import pipes
import glob
import re
import crypt
import hashlib
import string
from functools import partial
import operator as py_operator
from random import SystemRandom, shuffle
import uuid
import yaml
from jinja2.filters import environmentfilter
from distutils.version import LooseVersion, StrictVersion
from six import iteritems
from ansible import errors
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.utils.hashing import md5s, checksum_s
from ansible.utils.unicode import unicode_wrap, to_unicode
from ansible.utils.vars import merge_hash
try:
import passlib.hash
HAS_PASSLIB = True
except:
HAS_PASSLIB = False
UUID_NAMESPACE_ANSIBLE = uuid.UUID('361E6D51-FAEC-444A-9079-341386DA8E2E')
def to_yaml(a, *args, **kw):
'''Make verbose, human readable yaml'''
transformed = yaml.dump(a, Dumper=AnsibleDumper, allow_unicode=True, **kw)
return to_unicode(transformed)
def to_nice_yaml(a, *args, **kw):
'''Make verbose, human readable yaml'''
transformed = yaml.dump(a, Dumper=AnsibleDumper, indent=4, allow_unicode=True, default_flow_style=False, **kw)
return to_unicode(transformed)
def to_json(a, *args, **kw):
''' Convert the value to JSON '''
return json.dumps(a, *args, **kw)
def to_nice_json(a, *args, **kw):
'''Make verbose, human readable JSON'''
# python-2.6's json encoder is buggy (can't encode hostvars)
if sys.version_info < (2, 7):
try:
import simplejson
except ImportError:
pass
else:
try:
major = int(simplejson.__version__.split('.')[0])
except:
pass
else:
if major >= 2:
return simplejson.dumps(a, indent=4, sort_keys=True, *args, **kw)
# Fallback to the to_json filter
return to_json(a, *args, **kw)
return json.dumps(a, indent=4, sort_keys=True, *args, **kw)
def bool(a):
''' return a bool for the arg '''
if a is None or type(a) == bool:
return a
if type(a) in types.StringTypes:
a = a.lower()
if a in ['yes', 'on', '1', 'true', 1]:
return True
else:
return False
def quote(a):
''' return its argument quoted for shell usage '''
return pipes.quote(a)
def fileglob(pathname):
''' return list of matched files for glob '''
return glob.glob(pathname)
def regex_replace(value='', pattern='', replacement='', ignorecase=False):
''' Perform a `re.sub` returning a string '''
if not isinstance(value, basestring):
value = str(value)
if ignorecase:
flags = re.I
else:
flags = 0
_re = re.compile(pattern, flags=flags)
return _re.sub(replacement, value)
def ternary(value, true_val, false_val):
''' value ? true_val : false_val '''
if value:
return true_val
else:
return false_val
def version_compare(value, version, operator='eq', strict=False):
''' Perform a version comparison on a value '''
op_map = {
'==': 'eq', '=': 'eq', 'eq': 'eq',
'<': 'lt', 'lt': 'lt',
'<=': 'le', 'le': 'le',
'>': 'gt', 'gt': 'gt',
'>=': 'ge', 'ge': 'ge',
'!=': 'ne', '<>': 'ne', 'ne': 'ne'
}
if strict:
Version = StrictVersion
else:
Version = LooseVersion
if operator in op_map:
operator = op_map[operator]
else:
raise errors.AnsibleFilterError('Invalid operator type')
try:
method = getattr(py_operator, operator)
return method(Version(str(value)), Version(str(version)))
except Exception as e:
raise errors.AnsibleFilterError('Version comparison: %s' % e)
def regex_escape(string):
'''Escape all regular expressions special characters from STRING.'''
return re.escape(string)
@environmentfilter
def rand(environment, end, start=None, step=None):
r = SystemRandom()
if isinstance(end, (int, long)):
if not start:
start = 0
if not step:
step = 1
return r.randrange(start, end, step)
elif hasattr(end, '__iter__'):
if start or step:
raise errors.AnsibleFilterError('start and step can only be used with integer values')
return r.choice(end)
else:
raise errors.AnsibleFilterError('random can only be used on sequences and integers')
def randomize_list(mylist):
try:
mylist = list(mylist)
shuffle(mylist)
except:
pass
return mylist
def get_hash(data, hashtype='sha1'):
try: # see if hash is supported
h = hashlib.new(hashtype)
except:
return None
h.update(data)
return h.hexdigest()
def get_encrypted_password(password, hashtype='sha512', salt=None):
# TODO: find a way to construct dynamically from system
cryptmethod= {
'md5': '1',
'blowfish': '2a',
'sha256': '5',
'sha512': '6',
}
hastype = hashtype.lower()
if hashtype in cryptmethod:
if salt is None:
r = SystemRandom()
salt = ''.join([r.choice(string.ascii_letters + string.digits) for _ in range(16)])
if not HAS_PASSLIB:
if sys.platform.startswith('darwin'):
raise errors.AnsibleFilterError('|password_hash requires the passlib python module to generate password hashes on Mac OS X/Darwin')
saltstring = "$%s$%s" % (cryptmethod[hashtype],salt)
encrypted = crypt.crypt(password, saltstring)
else:
cls = getattr(passlib.hash, '%s_crypt' % hashtype)
encrypted = cls.encrypt(password, salt=salt)
return encrypted
return None
def to_uuid(string):
return str(uuid.uuid5(UUID_NAMESPACE_ANSIBLE, str(string)))
def mandatory(a):
from jinja2.runtime import Undefined
''' Make a variable mandatory '''
if isinstance(a, Undefined):
raise errors.AnsibleFilterError('Mandatory variable not defined.')
return a
def combine(*terms, **kwargs):
recursive = kwargs.get('recursive', False)
if len(kwargs) > 1 or (len(kwargs) == 1 and 'recursive' not in kwargs):
raise errors.AnsibleFilterError("'recursive' is the only valid keyword argument")
for t in terms:
if not isinstance(t, dict):
raise errors.AnsibleFilterError("|combine expects dictionaries, got " + repr(t))
if recursive:
return reduce(merge_hash, terms)
else:
return dict(itertools.chain(*map(iteritems, terms)))
class FilterModule(object):
''' Ansible core jinja2 filters '''
def filters(self):
return {
# base 64
'b64decode': partial(unicode_wrap, base64.b64decode),
'b64encode': partial(unicode_wrap, base64.b64encode),
# uuid
'to_uuid': to_uuid,
# json
'to_json': to_json,
'to_nice_json': to_nice_json,
'from_json': json.loads,
# yaml
'to_yaml': to_yaml,
'to_nice_yaml': to_nice_yaml,
'from_yaml': yaml.safe_load,
# path
'basename': partial(unicode_wrap, os.path.basename),
'dirname': partial(unicode_wrap, os.path.dirname),
'expanduser': partial(unicode_wrap, os.path.expanduser),
'realpath': partial(unicode_wrap, os.path.realpath),
'relpath': partial(unicode_wrap, os.path.relpath),
'splitext': partial(unicode_wrap, os.path.splitext),
'win_basename': partial(unicode_wrap, ntpath.basename),
'win_dirname': partial(unicode_wrap, ntpath.dirname),
'win_splitdrive': partial(unicode_wrap, ntpath.splitdrive),
# value as boolean
'bool': bool,
# quote string for shell usage
'quote': quote,
# hash filters
# md5 hex digest of string
'md5': md5s,
# sha1 hex digeset of string
'sha1': checksum_s,
# checksum of string as used by ansible for checksuming files
'checksum': checksum_s,
# generic hashing
'password_hash': get_encrypted_password,
'hash': get_hash,
# file glob
'fileglob': fileglob,
# regex
'regex_replace': regex_replace,
'regex_escape': regex_escape,
# ? : ;
'ternary': ternary,
# list
# version comparison
'version_compare': version_compare,
# random stuff
'random': rand,
'shuffle': randomize_list,
# undefined
'mandatory': mandatory,
# merge dicts
'combine': combine,
}
| gpl-3.0 |
apark263/tensorflow | tensorflow/python/keras/optimizer_v2/ftrl_test.py | 13 | 17873 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for Ftrl operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.keras.optimizer_v2 import ftrl
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import adagrad
from tensorflow.python.training import gradient_descent
class FtrlOptimizerTest(test.TestCase):
def doTestFtrlwithoutRegularization(self, use_resource=False):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
if use_resource:
var0 = resource_variable_ops.ResourceVariable([0.0, 0.0], dtype=dtype)
var1 = resource_variable_ops.ResourceVariable([0.0, 0.0], dtype=dtype)
else:
var0 = variables.Variable([0.0, 0.0], dtype=dtype)
var1 = variables.Variable([0.0, 0.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllClose([0.0, 0.0], v0_val)
self.assertAllClose([0.0, 0.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-2.60260963, -4.29698515]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.28432083, -0.56694895]), v1_val)
@test_util.run_deprecated_v1
def testFtrlWithoutRegularization(self):
self.doTestFtrlwithoutRegularization(use_resource=False)
@test_util.run_deprecated_v1
def testResourceFtrlWithoutRegularization(self):
self.doTestFtrlwithoutRegularization(use_resource=True)
@test_util.run_deprecated_v1
def testFtrlwithoutRegularization2(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-2.55607247, -3.98729396]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.28232238, -0.56096673]), v1_val)
@test_util.run_deprecated_v1
def testMinimizeSparseResourceVariable(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session():
var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype)
x = constant_op.constant([[4.0], [5.0]], dtype=dtype)
def loss():
pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x) # pylint: disable=cell-var-from-loop
return pred * pred
sgd_op = ftrl.Ftrl(1.0).minimize(loss, var_list=[var0])
variables.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([[1.0, 2.0]], self.evaluate(var0))
# Run 1 step of sgd
sgd_op.run()
# Validate updated params
self.assertAllCloseAccordingToType([[0, 1]],
self.evaluate(var0),
atol=0.01)
@test_util.run_deprecated_v1
def testFtrlWithL1(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-7.66718769, -10.91273689]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.93460727, -1.86147261]), v1_val)
@test_util.run_deprecated_v1
def testFtrlWithL1_L2(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-0.24059935, -0.46829352]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.02406147, -0.04830509]), v1_val)
@test_util.run_deprecated_v1
def testFtrlWithL1_L2_L2Shrinkage(self):
"""Test the new FTRL op with support for l2 shrinkage.
The addition of this parameter which places a constant pressure on weights
towards the origin causes the gradient descent trajectory to differ. The
weights will tend to have smaller magnitudes with this parameter set.
"""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-0.22578995, -0.44345796]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.14378493, -0.13229476]), v1_val)
@test_util.run_deprecated_v1
def testFtrlWithL1_L2_L2ShrinkageSparse(self):
"""Tests the new FTRL op with support for l2 shrinkage on sparse grads."""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([[1.0], [2.0]], dtype=dtype)
var1 = variables.Variable([[4.0], [3.0]], dtype=dtype)
grads0 = ops.IndexedSlices(
constant_op.constant([0.1], shape=[1, 1], dtype=dtype),
constant_op.constant([0]), constant_op.constant([2, 1]))
grads1 = ops.IndexedSlices(
constant_op.constant([0.02], shape=[1, 1], dtype=dtype),
constant_op.constant([1]), constant_op.constant([2, 1]))
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType([[1.0], [2.0]], v0_val)
self.assertAllCloseAccordingToType([[4.0], [3.0]], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType([[-0.22578995], [2.]], v0_val)
self.assertAllCloseAccordingToType([[4.], [-0.13229476]], v1_val)
@test_util.run_deprecated_v1
def testFtrlWithL2ShrinkageDoesNotChangeLrSchedule(self):
"""Verifies that l2 shrinkage in FTRL does not change lr schedule."""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([1.0, 2.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.1, 0.2], dtype=dtype)
opt0 = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
opt1 = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update0 = opt0.apply_gradients([(grads0, var0)])
update1 = opt1.apply_gradients([(grads1, var1)])
variables.global_variables_initializer().run()
v0_val, v1_val = self.evaluate([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([1.0, 2.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update0.run()
update1.run()
v0_val, v1_val = self.evaluate([var0, var1])
# var0 is experiencing L2 shrinkage so it should be smaller than var1
# in magnitude.
self.assertTrue((v0_val**2 < v1_val**2).all())
accum0 = sess.run(opt0.get_slot(var0, "accumulator"))
accum1 = sess.run(opt1.get_slot(var1, "accumulator"))
# L2 shrinkage should not change how we update grad accumulator.
self.assertAllCloseAccordingToType(accum0, accum1)
def applyOptimizer(self, opt, dtype, steps=5, is_sparse=False):
if is_sparse:
var0 = variables.Variable([[0.0], [0.0]], dtype=dtype)
var1 = variables.Variable([[0.0], [0.0]], dtype=dtype)
grads0 = ops.IndexedSlices(
constant_op.constant([0.1], shape=[1, 1], dtype=dtype),
constant_op.constant([0]), constant_op.constant([2, 1]))
grads1 = ops.IndexedSlices(
constant_op.constant([0.02], shape=[1, 1], dtype=dtype),
constant_op.constant([1]), constant_op.constant([2, 1]))
else:
var0 = variables.Variable([0.0, 0.0], dtype=dtype)
var1 = variables.Variable([0.0, 0.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
sess = ops.get_default_session()
v0_val, v1_val = self.evaluate([var0, var1])
if is_sparse:
self.assertAllCloseAccordingToType([[0.0], [0.0]], v0_val)
self.assertAllCloseAccordingToType([[0.0], [0.0]], v1_val)
else:
self.assertAllCloseAccordingToType([0.0, 0.0], v0_val)
self.assertAllCloseAccordingToType([0.0, 0.0], v1_val)
# Run Ftrl for a few steps
for _ in range(steps):
update.run()
v0_val, v1_val = self.evaluate([var0, var1])
return v0_val, v1_val
# When variables are initialized with Zero, FTRL-Proximal has two properties:
# 1. Without L1&L2 but with fixed learning rate, FTRL-Proximal is identical
# with GradientDescent.
# 2. Without L1&L2 but with adaptive learning rate, FTRL-Proximal is identical
# with Adagrad.
# So, basing on these two properties, we test if our implementation of
# FTRL-Proximal performs same updates as Adagrad or GradientDescent.
@test_util.run_deprecated_v1
def testEquivAdagradwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype)
with self.cached_session():
val2, val3 = self.applyOptimizer(
adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1), dtype)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
@test_util.run_deprecated_v1
def testEquivSparseAdagradwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype,
is_sparse=True)
with self.cached_session():
val2, val3 = self.applyOptimizer(
adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1),
dtype,
is_sparse=True)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
@test_util.run_deprecated_v1
def testEquivSparseGradientDescentwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype,
is_sparse=True)
with self.cached_session():
val2, val3 = self.applyOptimizer(
gradient_descent.GradientDescentOptimizer(3.0),
dtype,
is_sparse=True)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
@test_util.run_deprecated_v1
def testEquivGradientDescentwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype)
with self.cached_session():
val2, val3 = self.applyOptimizer(
gradient_descent.GradientDescentOptimizer(3.0), dtype)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
if __name__ == "__main__":
test.main()
| apache-2.0 |
muffinresearch/olympia | scripts/siege.py | 24 | 2862 | """
A script for generating siege files with a bunch of URL variations.
"""
import re
import sys
part_re = re.compile(r'\{([-\w]+)\}')
AMO_LANGUAGES = (
'af', 'ar', 'ca', 'cs', 'da', 'de', 'el', 'en-US', 'es', 'eu', 'fa', 'fi',
'fr', 'ga-IE', 'he', 'hu', 'id', 'it', 'ja', 'ko', 'mn', 'nl', 'pl',
'pt-BR', 'pt-PT', 'ro', 'ru', 'sk', 'sq', 'sr', 'sv-SE', 'uk', 'vi',
'zh-CN', 'zh-TW',
)
config = {
'base': [],
'locale': AMO_LANGUAGES,
'app': ['firefox'],
'extension-slug': [''] + """
alerts-and-updates appearance bookmarks download-management
feeds-news-blogging language-support photos-music-videos
privacy-security social-communication tabs toolbars web-development
other""".split(),
'theme-slug': [''] + """
animals compact large miscellaneous modern nature os-integration retro
sports""".split(),
'theme-sort': 'name updated created downloads rating'.split(),
'page': '1 2'.split(),
'exp': 'on off'.split(),
'personas-slug': [''] + """
abstract causes fashion firefox foxkeh holiday music nature other
scenery seasonal solid sports websites""".split(),
'personas-sort': """up-and-coming created popular rating""".split()
}
root = '{base}/{locale}/{app}'
templates = t = {
'root': '/',
'extensions': '/extensions/{extension-slug}/',
'language-tools': '/language-tools',
'themes': '/themes/{theme-slug}?sort={theme-sort}&page={page}',
'personas': '/personas/{personas-slug}',
}
t['themes-unreviewed'] = t['themes'] + '&unreviewed={exp}'
t['personas-sort'] = t['personas'] + '?sort={personas-sort}'
t['extensions-sort'] = t['extensions'] + '?sort={theme-sort}'
t['extensions-featured'] = t['extensions'] + 'featured'
for key, value in templates.items():
templates[key] = root + value
def combos(s, parts):
def _rec(s, parts, kw):
key, rest = parts[0], parts[1:]
rv = []
for opt in config[key]:
kw[key] = opt
if not rest:
rv.append(s.format(**kw))
else:
rv.extend(_rec(s, rest, kw))
return rv
return _rec(s, parts, {})
def gen(choices=templates):
rv = []
for template in choices:
parts = part_re.findall(template)
rv.extend(combos(template, parts))
return rv
def main():
args = sys.argv
try:
base, choices = sys.argv[1], args[2:] or templates.keys()
except IndexError:
print 'Usage: python siege.py <BASE> [%s]' % (', '.join(templates))
print '\nBASE should be something like "http://localhost:8000/z".'
print 'The remaining arguments are names of url templates.'
sys.exit(1)
config['base'] = [base.rstrip('/')]
print '\n'.join(gen(templates[k] for k in choices))
if __name__ == '__main__':
main()
| bsd-3-clause |
fredericgermain/linux-sunxi | tools/perf/util/setup.py | 989 | 1543 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = getenv('CFLAGS', '').split()
# switch off several checks (need to be at the end of cflags list)
cflags += ['-fno-strict-aliasing', '-Wno-write-strings', '-Wno-unused-parameter' ]
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
libtraceevent = getenv('LIBTRACEEVENT')
libapikfs = getenv('LIBAPIKFS')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
extra_objects = [libtraceevent, libapikfs],
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='[email protected]',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
ritchiewilson/majormajor | tests/majormajor_tests/test_majormajor_helpers.py | 1 | 1120 | # MajorMajor - Collaborative Document Editing Library
# Copyright (C) 2013 Ritchie Wilson
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from majormajor.majormajor import MajorMajor
from majormajor.document import Document
class TestMajorMajorHelpers:
def setup_method(self, method):
self.collab0 = MajorMajor()
def test_new_document(self):
# leaving nothing specified
doc = self.collab0.new_document()
assert isinstance(doc, Document)
assert doc.get_snapshot() == {}
| gpl-3.0 |
yilei0620/3D_Conditional_Gan | lib/data_utils.py | 1 | 1596 | import numpy as np
from sklearn import utils as skutils
from rng import np_rng, py_rng
def center_crop(x, ph, pw=None):
if pw is None:
pw = ph
h, w = x.shape[:2]
j = int(round((h - ph)/2.))
i = int(round((w - pw)/2.))
return x[j:j+ph, i:i+pw]
def patch(x, ph, pw=None):
if pw is None:
pw = ph
h, w = x.shape[:2]
j = py_rng.randint(0, h-ph)
i = py_rng.randint(0, w-pw)
x = x[j:j+ph, i:i+pw]
return x
def list_shuffle(*data):
idxs = np_rng.permutation(np.arange(len(data[0])))
if len(data) == 1:
return [data[0][idx] for idx in idxs]
else:
return [[d[idx] for idx in idxs] for d in data]
def shuffle(*arrays, **options):
if isinstance(arrays[0][0], basestring):
return list_shuffle(*arrays)
else:
return skutils.shuffle(*arrays, random_state=np_rng)
def OneHot(X, n=None, negative_class=0.):
X = np.asarray(X).flatten()
if n is None:
n = np.max(X) + 1
Xoh = np.ones((len(X), n)) * negative_class
Xoh[np.arange(len(X)), X] = 1.
return Xoh
def iter_data(*data, **kwargs):
size = kwargs.get('size', 128)
n = kwargs.get('ndata',0)
sIndex = kwargs.get('shuffle_index',[])
batches = n / size
if n % size != 0:
batches += 1
for b in range(batches):
start = b * size
end = (b + 1) * size
if end > n:
end = n
if len(data) == 1:
yield data[0][start:end]
else:
# print sIndex[start:end]
yield tuple([d[sIndex[start:end]] for d in data])
| mit |
Changaco/oh-mainline | vendor/packages/scrapy/scrapy/utils/project.py | 19 | 1474 | from os.path import join, dirname, abspath, isabs, exists
from os import makedirs, environ
import warnings
from scrapy.utils.conf import closest_scrapy_cfg, get_config
from scrapy.utils.python import is_writable
from scrapy.exceptions import NotConfigured
DATADIR_CFG_SECTION = 'datadir'
def inside_project():
scrapy_module = environ.get('SCRAPY_SETTINGS_MODULE')
if scrapy_module is not None:
try:
__import__(scrapy_module)
except ImportError:
warnings.warn("Cannot import scrapy settings module %s" % scrapy_module)
else:
return True
return bool(closest_scrapy_cfg())
def project_data_dir(project='default'):
"""Return the current project data dir, creating it if it doesn't exist"""
if not inside_project():
raise NotConfigured("Not inside a project")
cfg = get_config()
if cfg.has_option(DATADIR_CFG_SECTION, project):
d = cfg.get(DATADIR_CFG_SECTION, project)
else:
scrapy_cfg = closest_scrapy_cfg()
if not scrapy_cfg:
raise NotConfigured("Unable to find scrapy.cfg file to infer project data dir")
d = abspath(join(dirname(scrapy_cfg), '.scrapy'))
if not exists(d):
makedirs(d)
return d
def data_path(path):
"""If path is relative, return the given path inside the project data dir,
otherwise return the path unmodified
"""
return path if isabs(path) else join(project_data_dir(), path)
| agpl-3.0 |
Worldify/Worldify | worldify/config.py | 1 | 1709 | import os
from ConfigParser import ConfigParser
from .exceptions import WorldifyConfigException
class WorldifyConfig(object):
def __init__(self):
self._config_path = os.path.expanduser("~/.worldify")
self.conf = ConfigParser()
self.conf.read(self._config_path)
self._check_config_contents()
self._create_config_objects()
def _check_config_exsists(self):
if not os.path.exists(self._config_path):
raise WorldifyConfigException("No config file found at {0}".format(self._config_path))
return True
def _check_config_contents(self):
expected_config = {
"twitter": ['customer_key', 'customer_secret', 'access_key', 'access_secret'],
"recptiviti": ['api_key', 'api_secret'],
"spotify": ['user_id', 'user_oath', 'client_id', 'client_secret']
}
for key in expected_config:
if not self.conf.has_section(key):
raise WorldifyConfigException("Could not find the {} section in the worldify "
"config file.".format(key))
for option in expected_config[key]:
if not self.conf.has_option(key, option):
raise WorldifyConfigException("Could not find the {0}.{1} option in the "
"worldify config file".format(key, option))
def _create_config_objects(self):
self.twitter = {item[0]: item[1] for item in self.conf.items("twitter")}
self.recptiviti = {item[0]: item[1] for item in self.conf.items("recptiviti")}
self.spotify = {item[0]: item[1] for item in self.conf.items("spotify")}
| gpl-3.0 |
dkodnik/arp | addons/portal_project/tests/__init__.py | 170 | 1124 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_access_rights
checks = [
test_access_rights,
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
agrista/odoo-saas | addons/base_geolocalize/models/res_partner.py | 239 | 3743 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013_Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
try:
import simplejson as json
except ImportError:
import json # noqa
import urllib
from openerp.osv import osv, fields
from openerp import tools
from openerp.tools.translate import _
def geo_find(addr):
url = 'https://maps.googleapis.com/maps/api/geocode/json?sensor=false&address='
url += urllib.quote(addr.encode('utf8'))
try:
result = json.load(urllib.urlopen(url))
except Exception, e:
raise osv.except_osv(_('Network error'),
_('Cannot contact geolocation servers. Please make sure that your internet connection is up and running (%s).') % e)
if result['status'] != 'OK':
return None
try:
geo = result['results'][0]['geometry']['location']
return float(geo['lat']), float(geo['lng'])
except (KeyError, ValueError):
return None
def geo_query_address(street=None, zip=None, city=None, state=None, country=None):
if country and ',' in country and (country.endswith(' of') or country.endswith(' of the')):
# put country qualifier in front, otherwise GMap gives wrong results,
# e.g. 'Congo, Democratic Republic of the' => 'Democratic Republic of the Congo'
country = '{1} {0}'.format(*country.split(',', 1))
return tools.ustr(', '.join(filter(None, [street,
("%s %s" % (zip or '', city or '')).strip(),
state,
country])))
class res_partner(osv.osv):
_inherit = "res.partner"
_columns = {
'partner_latitude': fields.float('Geo Latitude', digits=(16, 5)),
'partner_longitude': fields.float('Geo Longitude', digits=(16, 5)),
'date_localization': fields.date('Geo Localization Date'),
}
def geo_localize(self, cr, uid, ids, context=None):
# Don't pass context to browse()! We need country names in english below
for partner in self.browse(cr, uid, ids):
if not partner:
continue
result = geo_find(geo_query_address(street=partner.street,
zip=partner.zip,
city=partner.city,
state=partner.state_id.name,
country=partner.country_id.name))
if result:
self.write(cr, uid, [partner.id], {
'partner_latitude': result[0],
'partner_longitude': result[1],
'date_localization': fields.date.context_today(self, cr, uid, context=context)
}, context=context)
return True
| agpl-3.0 |
sogis/Quantum-GIS | python/ext-libs/pygments/lexers/_clbuiltins.py | 370 | 14015 | # -*- coding: utf-8 -*-
"""
pygments.lexers._clbuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~
ANSI Common Lisp builtins.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
BUILTIN_FUNCTIONS = [ # 638 functions
'<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
'adjustable-array-p', 'adjust-array', 'allocate-instance',
'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
'apropos-list', 'aref', 'arithmetic-error-operands',
'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
'characterp', 'char-code', 'char-downcase', 'char-equal',
'char-greaterp', 'char-int', 'char-lessp', 'char-name',
'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
'close', 'clrhash', 'code-char', 'coerce', 'compile',
'compiled-function-p', 'compile-file', 'compile-file-pathname',
'compiler-macro-function', 'complement', 'complex', 'complexp',
'compute-applicable-methods', 'compute-restarts', 'concatenate',
'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
'delete-package', 'denominator', 'deposit-field', 'describe',
'describe-object', 'digit-char', 'digit-char-p', 'directory',
'directory-namestring', 'disassemble', 'documentation', 'dpb',
'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
'enough-namestring', 'ensure-directories-exist',
'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
'file-error-pathname', 'file-length', 'file-namestring',
'file-position', 'file-string-length', 'file-write-date',
'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
'fround', 'ftruncate', 'funcall', 'function-keywords',
'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
'gethash', 'get-internal-real-time', 'get-internal-run-time',
'get-macro-character', 'get-output-stream-string', 'get-properties',
'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
'host-namestring', 'identity', 'imagpart', 'import',
'initialize-instance', 'input-stream-p', 'inspect',
'integer-decode-float', 'integer-length', 'integerp',
'interactive-stream-p', 'intern', 'intersection',
'invalid-method-error', 'invoke-debugger', 'invoke-restart',
'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
'listen', 'list-length', 'listp', 'load',
'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
'logical-pathname-translations', 'logior', 'lognand', 'lognor',
'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
'make-instance', 'make-instances-obsolete', 'make-list',
'make-load-form', 'make-load-form-saving-slots', 'make-package',
'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
'merge', 'merge-pathnames', 'method-combination-error',
'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
'package-name', 'package-nicknames', 'packagep',
'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
'pathname-device', 'pathname-directory', 'pathname-host',
'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
'read-from-string', 'read-line', 'read-preserving-whitespace',
'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
'remprop', 'rename-file', 'rename-package', 'replace', 'require',
'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
'search', 'second', 'set', 'set-difference',
'set-dispatch-macro-character', 'set-exclusive-or',
'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
'simple-condition-format-arguments', 'simple-condition-format-control',
'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
'slot-unbound', 'slot-value', 'software-type', 'software-version',
'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
'standard-char-p', 'store-value', 'stream-element-type',
'stream-error-stream', 'stream-external-format', 'streamp', 'string',
'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
'string-capitalize', 'string-downcase', 'string-equal',
'string-greaterp', 'string-left-trim', 'string-lessp',
'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
'translate-logical-pathname', 'translate-pathname', 'tree-equal',
'truename', 'truncate', 'two-way-stream-input-stream',
'two-way-stream-output-stream', 'type-error-datum',
'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
'update-instance-for-different-class',
'update-instance-for-redefined-class', 'upgraded-array-element-type',
'upgraded-complex-part-type', 'upper-case-p', 'use-package',
'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
'y-or-n-p', 'zerop',
]
SPECIAL_FORMS = [
'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
'unwind-protect',
]
MACROS = [
'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
'define-compiler-macro', 'define-condition', 'define-method-combination',
'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
'multiple-value-setq', 'nth-value', 'or', 'pop',
'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
'with-condition-restarts', 'with-hash-table-iterator',
'with-input-from-string', 'with-open-file', 'with-open-stream',
'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
'with-slots', 'with-standard-io-syntax',
]
LAMBDA_LIST_KEYWORDS = [
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
]
DECLARATIONS = [
'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
'ignorable', 'notinline', 'type',
]
BUILTIN_TYPES = [
'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
'simple-vector', 'standard-char', 'unsigned-byte',
# Condition Types
'arithmetic-error', 'cell-error', 'condition', 'control-error',
'division-by-zero', 'end-of-file', 'error', 'file-error',
'floating-point-inexact', 'floating-point-overflow',
'floating-point-underflow', 'floating-point-invalid-operation',
'parse-error', 'package-error', 'print-not-readable', 'program-error',
'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
'undefined-function', 'warning',
]
BUILTIN_CLASSES = [
'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
'file-stream', 'float', 'function', 'generic-function', 'hash-table',
'integer', 'list', 'logical-pathname', 'method-combination', 'method',
'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
'real', 'random-state', 'restart', 'sequence', 'standard-class',
'standard-generic-function', 'standard-method', 'standard-object',
'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
]
| gpl-2.0 |
Softmotions/edx-platform | lms/djangoapps/survey/models.py | 89 | 8631 | """
Models to support Course Surveys feature
"""
import logging
from lxml import etree
from collections import OrderedDict
from django.db import models
from student.models import User
from django.core.exceptions import ValidationError
from model_utils.models import TimeStampedModel
from survey.exceptions import SurveyFormNameAlreadyExists, SurveyFormNotFound
from xmodule_django.models import CourseKeyField
log = logging.getLogger("edx.survey")
class SurveyForm(TimeStampedModel):
"""
Model to define a Survey Form that contains the HTML form data
that is presented to the end user. A SurveyForm is not tied to
a particular run of a course, to allow for sharing of Surveys
across courses
"""
name = models.CharField(max_length=255, db_index=True, unique=True)
form = models.TextField()
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
"""
Override save method so we can validate that the form HTML is
actually parseable
"""
self.validate_form_html(self.form)
# now call the actual save method
super(SurveyForm, self).save(*args, **kwargs)
@classmethod
def validate_form_html(cls, html):
"""
Makes sure that the html that is contained in the form field is valid
"""
try:
fields = cls.get_field_names_from_html(html)
except Exception as ex:
log.exception("Cannot parse SurveyForm html: {}".format(ex))
raise ValidationError("Cannot parse SurveyForm as HTML: {}".format(ex))
if not len(fields):
raise ValidationError("SurveyForms must contain at least one form input field")
@classmethod
def create(cls, name, form, update_if_exists=False):
"""
Helper class method to create a new Survey Form.
update_if_exists=True means that if a form already exists with that name, then update it.
Otherwise throw an SurveyFormAlreadyExists exception
"""
survey = cls.get(name, throw_if_not_found=False)
if not survey:
survey = SurveyForm(name=name, form=form)
else:
if update_if_exists:
survey.form = form
else:
raise SurveyFormNameAlreadyExists()
survey.save()
return survey
@classmethod
def get(cls, name, throw_if_not_found=True):
"""
Helper class method to look up a Survey Form, throw FormItemNotFound if it does not exists
in the database, unless throw_if_not_found=False then we return None
"""
survey = None
exists = SurveyForm.objects.filter(name=name).exists()
if exists:
survey = SurveyForm.objects.get(name=name)
elif throw_if_not_found:
raise SurveyFormNotFound()
return survey
def get_answers(self, user=None, limit_num_users=10000):
"""
Returns all answers for all users for this Survey
"""
return SurveyAnswer.get_answers(self, user, limit_num_users=limit_num_users)
def has_user_answered_survey(self, user):
"""
Returns whether a given user has supplied answers to this
survey
"""
return SurveyAnswer.do_survey_answers_exist(self, user)
def save_user_answers(self, user, answers, course_key):
"""
Store answers to the form for a given user. Answers is a dict of simple
name/value pairs
IMPORTANT: There is no validaton of form answers at this point. All data
supplied to this method is presumed to be previously validated
"""
# first remove any answer the user might have done before
self.clear_user_answers(user)
SurveyAnswer.save_answers(self, user, answers, course_key)
def clear_user_answers(self, user):
"""
Removes all answers that a user has submitted
"""
SurveyAnswer.objects.filter(form=self, user=user).delete()
def get_field_names(self):
"""
Returns a list of defined field names for all answers in a survey. This can be
helpful for reporting like features, i.e. adding headers to the reports
This is taken from the set of <input> fields inside the form.
"""
return SurveyForm.get_field_names_from_html(self.form)
@classmethod
def get_field_names_from_html(cls, html):
"""
Returns a list of defined field names from a block of HTML
"""
names = []
# make sure the form is wrap in some outer single element
# otherwise lxml can't parse it
# NOTE: This wrapping doesn't change the ability to query it
tree = etree.fromstring(u'<div>{}</div>'.format(html))
input_fields = (
tree.findall('.//input') + tree.findall('.//select') +
tree.findall('.//textarea')
)
for input_field in input_fields:
if 'name' in input_field.keys() and input_field.attrib['name'] not in names:
names.append(input_field.attrib['name'])
return names
class SurveyAnswer(TimeStampedModel):
"""
Model for the answers that a user gives for a particular form in a course
"""
user = models.ForeignKey(User, db_index=True)
form = models.ForeignKey(SurveyForm, db_index=True)
field_name = models.CharField(max_length=255, db_index=True)
field_value = models.CharField(max_length=1024)
# adding the course_id where the end-user answered the survey question
# since it didn't exist in the beginning, it is nullable
course_key = CourseKeyField(max_length=255, db_index=True, null=True)
@classmethod
def do_survey_answers_exist(cls, form, user):
"""
Returns whether a user has any answers for a given SurveyForm for a course
This can be used to determine if a user has taken a CourseSurvey.
"""
return SurveyAnswer.objects.filter(form=form, user=user).exists()
@classmethod
def get_answers(cls, form, user=None, limit_num_users=10000):
"""
Returns all answers a user (or all users, when user=None) has given to an instance of a SurveyForm
Return is a nested dict which are simple name/value pairs with an outer key which is the
user id. For example (where 'field3' is an optional field):
results = {
'1': {
'field1': 'value1',
'field2': 'value2',
},
'2': {
'field1': 'value3',
'field2': 'value4',
'field3': 'value5',
}
:
:
}
limit_num_users is to prevent an unintentional huge, in-memory dictionary.
"""
if user:
answers = SurveyAnswer.objects.filter(form=form, user=user)
else:
answers = SurveyAnswer.objects.filter(form=form)
results = OrderedDict()
num_users = 0
for answer in answers:
user_id = answer.user.id
if user_id not in results and num_users < limit_num_users:
results[user_id] = OrderedDict()
num_users = num_users + 1
if user_id in results:
results[user_id][answer.field_name] = answer.field_value
return results
@classmethod
def save_answers(cls, form, user, answers, course_key):
"""
Store answers to the form for a given user. Answers is a dict of simple
name/value pairs
IMPORTANT: There is no validaton of form answers at this point. All data
supplied to this method is presumed to be previously validated
"""
for name in answers.keys():
value = answers[name]
# See if there is an answer stored for this user, form, field_name pair or not
# this will allow for update cases. This does include an additional lookup,
# but write operations will be relatively infrequent
value = answers[name]
defaults = {"field_value": value}
if course_key:
defaults['course_key'] = course_key
answer, created = SurveyAnswer.objects.get_or_create(
user=user,
form=form,
field_name=name,
defaults=defaults
)
if not created:
# Allow for update cases.
answer.field_value = value
answer.course_key = course_key
answer.save()
| agpl-3.0 |
ebrelsford/v2v | vacant_to_vibrant/steward/migrations/0007_auto__add_field_stewardproject_external_id.py | 1 | 10857 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'StewardProject.external_id'
db.add_column(u'steward_stewardproject', 'external_id',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'StewardProject.external_id'
db.delete_column(u'steward_stewardproject', 'external_id')
models = {
u'actstream.action': {
'Meta': {'ordering': "('-timestamp',)", 'object_name': 'Action'},
'action_object_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'action_object'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'action_object_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'actor_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'actor'", 'to': u"orm['contenttypes.ContentType']"}),
'actor_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'place': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'target_content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'target'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'target_object_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'verb': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'lots.use': {
'Meta': {'object_name': 'Use'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '200'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'organize.organizertype': {
'Meta': {'object_name': 'OrganizerType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_group': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'phillyorganize.organizer': {
'Meta': {'object_name': 'Organizer'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'email_hash': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'facebook_page': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'receive_text_messages': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['organize.OrganizerType']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'steward.optedinstewardprojectmanager': {
'Meta': {'object_name': 'OptedInStewardProjectManager'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'steward.stewardnotification': {
'Meta': {'object_name': 'StewardNotification'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'facebook_page': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_on_map': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'land_tenure_status': ('django.db.models.fields.CharField', [], {'default': "u'not sure'", 'max_length': '50'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'support_organization': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['organize.OrganizerType']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'use': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lots.Use']"})
},
u'steward.stewardproject': {
'Meta': {'object_name': 'StewardProject'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'external_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'include_on_map': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'land_tenure_status': ('django.db.models.fields.CharField', [], {'default': "u'not sure'", 'max_length': '50'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'organizer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['phillyorganize.Organizer']", 'null': 'True', 'blank': 'True'}),
'support_organization': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'use': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lots.Use']"})
}
}
complete_apps = ['steward'] | gpl-3.0 |
win0x86/Lab | mitm/libmproxy/proxy.py | 1 | 24823 | import sys, os, string, socket, time
import shutil, tempfile, threading
import SocketServer
from OpenSSL import SSL
from netlib import odict, tcp, http, wsgi, certutils, http_status, http_auth
import utils, flow, version, platform, controller
KILL = 0
class ProxyError(Exception):
def __init__(self, code, msg, headers=None):
self.code, self.msg, self.headers = code, msg, headers
def __str__(self):
return "ProxyError(%s, %s)"%(self.code, self.msg)
class Log:
def __init__(self, msg):
self.msg = msg
class ProxyConfig:
def __init__(self, certfile = None, cacert = None, clientcerts = None, no_upstream_cert=False, body_size_limit = None, reverse_proxy=None, transparent_proxy=None, authenticator=None):
self.certfile = certfile
self.cacert = cacert
self.clientcerts = clientcerts
self.no_upstream_cert = no_upstream_cert
self.body_size_limit = body_size_limit
self.reverse_proxy = reverse_proxy
self.transparent_proxy = transparent_proxy
self.authenticator = authenticator
self.certstore = certutils.CertStore()
class ServerConnection(tcp.TCPClient):
def __init__(self, config, scheme, host, port, sni):
tcp.TCPClient.__init__(self, host, port)
self.config = config
self.scheme, self.sni = scheme, sni
self.requestcount = 0
self.tcp_setup_timestamp = None
self.ssl_setup_timestamp = None
def connect(self):
tcp.TCPClient.connect(self)
self.tcp_setup_timestamp = time.time()
if self.scheme == "https":
clientcert = None
if self.config.clientcerts:
path = os.path.join(self.config.clientcerts, self.host.encode("idna")) + ".pem"
if os.path.exists(path):
clientcert = path
try:
self.convert_to_ssl(cert=clientcert, sni=self.sni)
self.ssl_setup_timestamp = time.time()
except tcp.NetLibError, v:
raise ProxyError(400, str(v))
def send(self, request):
self.requestcount += 1
d = request._assemble()
if not d:
raise ProxyError(502, "Cannot transmit an incomplete request.")
self.wfile.write(d)
self.wfile.flush()
def terminate(self):
if self.connection:
try:
self.wfile.flush()
except tcp.NetLibDisconnect: # pragma: no cover
pass
self.connection.close()
class RequestReplayThread(threading.Thread):
def __init__(self, config, flow, masterq):
self.config, self.flow, self.channel = config, flow, controller.Channel(masterq)
threading.Thread.__init__(self)
def run(self):
try:
r = self.flow.request
server = ServerConnection(self.config, r.scheme, r.host, r.port, r.host)
server.connect()
server.send(r)
tsstart = utils.timestamp()
httpversion, code, msg, headers, content = http.read_response(
server.rfile, r.method, self.config.body_size_limit
)
response = flow.Response(
self.flow.request, httpversion, code, msg, headers, content, server.cert,
server.rfile.first_byte_timestamp
)
self.channel.ask(response)
except (ProxyError, http.HttpError, tcp.NetLibError), v:
err = flow.Error(self.flow.request, str(v))
self.channel.ask(err)
class HandleSNI:
def __init__(self, handler, client_conn, host, port, cert, key):
self.handler, self.client_conn, self.host, self.port = handler, client_conn, host, port
self.cert, self.key = cert, key
def __call__(self, connection):
try:
sn = connection.get_servername()
if sn:
self.handler.get_server_connection(self.client_conn, "https", self.host, self.port, sn)
new_context = SSL.Context(SSL.TLSv1_METHOD)
new_context.use_privatekey_file(self.key)
new_context.use_certificate(self.cert.x509)
connection.set_context(new_context)
self.handler.sni = sn.decode("utf8").encode("idna")
# An unhandled exception in this method will core dump PyOpenSSL, so
# make dang sure it doesn't happen.
except Exception, e: # pragma: no cover
pass
class ProxyHandler(tcp.BaseHandler):
def __init__(self, config, connection, client_address, server, channel, server_version):
self.channel, self.server_version = channel, server_version
self.config = config
self.proxy_connect_state = None
self.sni = None
self.server_conn = None
tcp.BaseHandler.__init__(self, connection, client_address, server)
def get_server_connection(self, cc, scheme, host, port, sni):
"""
When SNI is in play, this means we have an SSL-encrypted
connection, which means that the entire handler is dedicated to a
single server connection - no multiplexing. If this assumption ever
breaks, we'll have to do something different with the SNI host
variable on the handler object.
"""
sc = self.server_conn
if not sni:
sni = host
if sc and (scheme, host, port, sni) != (sc.scheme, sc.host, sc.port, sc.sni):
sc.terminate()
self.server_conn = None
self.log(
cc,
"switching connection", [
"%s://%s:%s (sni=%s) -> %s://%s:%s (sni=%s)"%(
scheme, host, port, sni,
sc.scheme, sc.host, sc.port, sc.sni
)
]
)
if not self.server_conn:
try:
self.server_conn = ServerConnection(self.config, scheme, host, port, sni)
self.server_conn.connect()
except tcp.NetLibError, v:
raise ProxyError(502, v)
return self.server_conn
def del_server_connection(self):
if self.server_conn:
self.server_conn.terminate()
self.server_conn = None
def handle(self):
cc = flow.ClientConnect(self.client_address)
self.log(cc, "connect")
self.channel.ask(cc)
while self.handle_request(cc) and not cc.close:
pass
cc.close = True
self.del_server_connection()
cd = flow.ClientDisconnect(cc)
self.log(
cc, "disconnect",
[
"handled %s requests"%cc.requestcount]
)
self.channel.tell(cd)
def handle_request(self, cc):
try:
request, err = None, None
request = self.read_request(cc)
if request is None:
return
cc.requestcount += 1
app = self.server.apps.get(request)
if app:
err = app.serve(request, self.wfile)
if err:
self.log(cc, "Error in wsgi app.", err.split("\n"))
return
else:
request_reply = self.channel.ask(request)
if request_reply is None or request_reply == KILL:
return
elif isinstance(request_reply, flow.Response):
request = False
response = request_reply
response_reply = self.channel.ask(response)
else:
request = request_reply
if self.config.reverse_proxy:
scheme, host, port = self.config.reverse_proxy
else:
scheme, host, port = request.scheme, request.host, request.port
# If we've already pumped a request over this connection,
# it's possible that the server has timed out. If this is
# the case, we want to reconnect without sending an error
# to the client.
while 1:
sc = self.get_server_connection(cc, scheme, host, port, self.sni)
sc.send(request)
if sc.requestcount == 1: # add timestamps only for first request (others are not directly affected)
request.tcp_setup_timestamp = sc.tcp_setup_timestamp
request.ssl_setup_timestamp = sc.ssl_setup_timestamp
sc.rfile.reset_timestamps()
try:
tsstart = utils.timestamp()
httpversion, code, msg, headers, content = http.read_response(
sc.rfile,
request.method,
self.config.body_size_limit
)
except http.HttpErrorConnClosed, v:
self.del_server_connection()
if sc.requestcount > 1:
continue
else:
raise
except http.HttpError, v:
raise ProxyError(502, "Invalid server response.")
else:
break
response = flow.Response(
request, httpversion, code, msg, headers, content, sc.cert,
sc.rfile.first_byte_timestamp
)
response_reply = self.channel.ask(response)
# Not replying to the server invalidates the server
# connection, so we terminate.
if response_reply == KILL:
sc.terminate()
if response_reply == KILL:
return
else:
response = response_reply
self.send_response(response)
if request and http.request_connection_close(request.httpversion, request.headers):
return
# We could keep the client connection when the server
# connection needs to go away. However, we want to mimic
# behaviour as closely as possible to the client, so we
# disconnect.
if http.response_connection_close(response.httpversion, response.headers):
return
except (IOError, ProxyError, http.HttpError, tcp.NetLibError), e:
if hasattr(e, "code"):
cc.error = "%s: %s"%(e.code, e.msg)
else:
cc.error = str(e)
if request:
err = flow.Error(request, cc.error)
self.channel.ask(err)
self.log(
cc, cc.error,
["url: %s"%request.get_url()]
)
else:
self.log(cc, cc.error)
if isinstance(e, ProxyError):
self.send_error(e.code, e.msg, e.headers)
else:
return True
def log(self, cc, msg, subs=()):
msg = [
"%s:%s: "%cc.address + msg
]
for i in subs:
msg.append(" -> "+i)
msg = "\n".join(msg)
l = Log(msg)
self.channel.tell(l)
def find_cert(self, cc, host, port, sni):
if self.config.certfile:
return certutils.SSLCert.from_pem(file(self.config.certfile, "r").read())
else:
sans = []
if not self.config.no_upstream_cert:
conn = self.get_server_connection(cc, "https", host, port, sni)
sans = conn.cert.altnames
host = conn.cert.cn.decode("utf8").encode("idna")
ret = self.config.certstore.get_cert(host, sans, self.config.cacert)
if not ret:
raise ProxyError(502, "Unable to generate dummy cert.")
return ret
def get_line(self, fp):
"""
Get a line, possibly preceded by a blank.
"""
line = fp.readline()
if line == "\r\n" or line == "\n": # Possible leftover from previous message
line = fp.readline()
return line
def read_request_transparent(self, client_conn):
orig = self.config.transparent_proxy["resolver"].original_addr(self.connection)
if not orig:
raise ProxyError(502, "Transparent mode failure: could not resolve original destination.")
self.log(client_conn, "transparent to %s:%s"%orig)
host, port = orig
if port in self.config.transparent_proxy["sslports"]:
scheme = "https"
if not self.ssl_established:
dummycert = self.find_cert(client_conn, host, port, host)
sni = HandleSNI(
self, client_conn, host, port,
dummycert, self.config.certfile or self.config.cacert
)
try:
self.convert_to_ssl(dummycert, self.config.certfile or self.config.cacert, handle_sni=sni)
except tcp.NetLibError, v:
raise ProxyError(400, str(v))
else:
scheme = "http"
line = self.get_line(self.rfile)
if line == "":
return None
r = http.parse_init_http(line)
if not r:
raise ProxyError(400, "Bad HTTP request line: %s"%repr(line))
method, path, httpversion = r
headers = self.read_headers(authenticate=False)
content = http.read_http_body_request(
self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit
)
return flow.Request(
client_conn,httpversion, host, port, scheme, method, path, headers, content,
self.rfile.first_byte_timestamp, utils.timestamp()
)
def read_request_proxy(self, client_conn):
line = self.get_line(self.rfile)
if line == "":
return None
if not self.proxy_connect_state:
connparts = http.parse_init_connect(line)
if connparts:
host, port, httpversion = connparts
headers = self.read_headers(authenticate=True)
self.wfile.write(
'HTTP/1.1 200 Connection established\r\n' +
('Proxy-agent: %s\r\n'%self.server_version) +
'\r\n'
)
self.wfile.flush()
dummycert = self.find_cert(client_conn, host, port, host)
sni = HandleSNI(
self, client_conn, host, port,
dummycert, self.config.certfile or self.config.cacert
)
try:
self.convert_to_ssl(dummycert, self.config.certfile or self.config.cacert, handle_sni=sni)
except tcp.NetLibError, v:
raise ProxyError(400, str(v))
self.proxy_connect_state = (host, port, httpversion)
line = self.rfile.readline(line)
if self.proxy_connect_state:
r = http.parse_init_http(line)
if not r:
raise ProxyError(400, "Bad HTTP request line: %s"%repr(line))
method, path, httpversion = r
headers = self.read_headers(authenticate=False)
host, port, _ = self.proxy_connect_state
content = http.read_http_body_request(
self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit
)
return flow.Request(
client_conn, httpversion, host, port, "https", method, path, headers, content,
self.rfile.first_byte_timestamp, utils.timestamp()
)
else:
r = http.parse_init_proxy(line)
if not r:
raise ProxyError(400, "Bad HTTP request line: %s"%repr(line))
method, scheme, host, port, path, httpversion = r
headers = self.read_headers(authenticate=True)
content = http.read_http_body_request(
self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit
)
return flow.Request(
client_conn, httpversion, host, port, scheme, method, path, headers, content,
self.rfile.first_byte_timestamp, utils.timestamp()
)
def read_request_reverse(self, client_conn):
line = self.get_line(self.rfile)
if line == "":
return None
scheme, host, port = self.config.reverse_proxy
r = http.parse_init_http(line)
if not r:
raise ProxyError(400, "Bad HTTP request line: %s"%repr(line))
method, path, httpversion = r
headers = self.read_headers(authenticate=False)
content = http.read_http_body_request(
self.rfile, self.wfile, headers, httpversion, self.config.body_size_limit
)
return flow.Request(
client_conn, httpversion, host, port, "http", method, path, headers, content,
self.rfile.first_byte_timestamp, utils.timestamp()
)
def read_request(self, client_conn):
self.rfile.reset_timestamps()
if self.config.transparent_proxy:
return self.read_request_transparent(client_conn)
elif self.config.reverse_proxy:
return self.read_request_reverse(client_conn)
else:
return self.read_request_proxy(client_conn)
def read_headers(self, authenticate=False):
headers = http.read_headers(self.rfile)
if headers is None:
raise ProxyError(400, "Invalid headers")
if authenticate and self.config.authenticator:
if self.config.authenticator.authenticate(headers):
self.config.authenticator.clean(headers)
else:
raise ProxyError(
407,
"Proxy Authentication Required",
self.config.authenticator.auth_challenge_headers()
)
return headers
def send_response(self, response):
d = response._assemble()
if not d:
raise ProxyError(502, "Cannot transmit an incomplete response.")
self.wfile.write(d)
self.wfile.flush()
def send_error(self, code, body, headers):
try:
response = http_status.RESPONSES.get(code, "Unknown")
html_content = '<html><head>\n<title>%d %s</title>\n</head>\n<body>\n%s\n</body>\n</html>'%(code, response, body)
self.wfile.write("HTTP/1.1 %s %s\r\n" % (code, response))
self.wfile.write("Server: %s\r\n"%self.server_version)
self.wfile.write("Content-type: text/html\r\n")
self.wfile.write("Content-Length: %d\r\n"%len(html_content))
for key, value in headers.items():
self.wfile.write("%s: %s\r\n"%(key, value))
self.wfile.write("Connection: close\r\n")
self.wfile.write("\r\n")
self.wfile.write(html_content)
self.wfile.flush()
except:
pass
class ProxyServerError(Exception): pass
class ProxyServer(tcp.TCPServer):
allow_reuse_address = True
bound = True
def __init__(self, config, port, address='', server_version=version.NAMEVERSION):
"""
Raises ProxyServerError if there's a startup problem.
"""
self.config, self.port, self.address = config, port, address
self.server_version = server_version
try:
tcp.TCPServer.__init__(self, (address, port))
except socket.error, v:
raise ProxyServerError('Error starting proxy server: ' + v.strerror)
self.channel = None
self.apps = AppRegistry()
def start_slave(self, klass, channel):
slave = klass(channel, self)
slave.start()
def set_channel(self, channel):
self.channel = channel
def handle_connection(self, request, client_address):
h = ProxyHandler(self.config, request, client_address, self, self.channel, self.server_version)
h.handle()
h.finish()
class AppRegistry:
def __init__(self):
self.apps = {}
def add(self, app, domain, port):
"""
Add a WSGI app to the registry, to be served for requests to the
specified domain, on the specified port.
"""
self.apps[(domain, port)] = wsgi.WSGIAdaptor(app, domain, port, version.NAMEVERSION)
def get(self, request):
"""
Returns an WSGIAdaptor instance if request matches an app, or None.
"""
if (request.host, request.port) in self.apps:
return self.apps[(request.host, request.port)]
if "host" in request.headers:
host = request.headers["host"][0]
return self.apps.get((host, request.port), None)
class DummyServer:
bound = False
def __init__(self, config):
self.config = config
def start_slave(self, *args):
pass
def shutdown(self):
pass
# Command-line utils
def certificate_option_group(parser):
group = parser.add_argument_group("SSL")
group.add_argument(
"--cert", action="store",
type = str, dest="cert", default=None,
help = "User-created SSL certificate file."
)
group.add_argument(
"--client-certs", action="store",
type = str, dest = "clientcerts", default=None,
help = "Client certificate directory."
)
TRANSPARENT_SSL_PORTS = [443, 8443]
def process_proxy_options(parser, options):
if options.cert:
options.cert = os.path.expanduser(options.cert)
if not os.path.exists(options.cert):
return parser.error("Manually created certificate does not exist: %s"%options.cert)
cacert = os.path.join(options.confdir, "mitmproxy-ca.pem")
cacert = os.path.expanduser(cacert)
if not os.path.exists(cacert):
certutils.dummy_ca(cacert)
body_size_limit = utils.parse_size(options.body_size_limit)
if options.reverse_proxy and options.transparent_proxy:
return parser.error("Can't set both reverse proxy and transparent proxy.")
if options.transparent_proxy:
if not platform.resolver:
return parser.error("Transparent mode not supported on this platform.")
trans = dict(
resolver = platform.resolver(),
sslports = TRANSPARENT_SSL_PORTS
)
else:
trans = None
if options.reverse_proxy:
rp = utils.parse_proxy_spec(options.reverse_proxy)
if not rp:
return parser.error("Invalid reverse proxy specification: %s"%options.reverse_proxy)
else:
rp = None
if options.clientcerts:
options.clientcerts = os.path.expanduser(options.clientcerts)
if not os.path.exists(options.clientcerts) or not os.path.isdir(options.clientcerts):
return parser.error("Client certificate directory does not exist or is not a directory: %s"%options.clientcerts)
if (options.auth_nonanonymous or options.auth_singleuser or options.auth_htpasswd):
if options.auth_singleuser:
if len(options.auth_singleuser.split(':')) != 2:
return parser.error("Invalid single-user specification. Please use the format username:password")
username, password = options.auth_singleuser.split(':')
password_manager = http_auth.PassManSingleUser(username, password)
elif options.auth_nonanonymous:
password_manager = http_auth.PassManNonAnon()
elif options.auth_htpasswd:
try:
password_manager = http_auth.PassManHtpasswd(options.auth_htpasswd)
except ValueError, v:
return parser.error(v.message)
authenticator = http_auth.BasicProxyAuth(password_manager, "mitmproxy")
else:
authenticator = http_auth.NullProxyAuth(None)
return ProxyConfig(
certfile = options.cert,
cacert = cacert,
clientcerts = options.clientcerts,
body_size_limit = body_size_limit,
no_upstream_cert = options.no_upstream_cert,
reverse_proxy = rp,
transparent_proxy = trans,
authenticator = authenticator
)
| gpl-3.0 |
yesbox/ansible | lib/ansible/cli/playbook.py | 21 | 9325 | #!/usr/bin/env python
# (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
########################################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import stat
from ansible.cli import CLI
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.inventory import Inventory
from ansible.parsing.dataloader import DataLoader
from ansible.playbook.block import Block
from ansible.playbook.play_context import PlayContext
from ansible.utils.vars import load_extra_vars
from ansible.utils.vars import load_options_vars
from ansible.vars import VariableManager
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
#---------------------------------------------------------------------------------------------------
class PlaybookCLI(CLI):
''' code behind ansible playbook cli'''
def parse(self):
# create parser for CLI options
parser = CLI.base_parser(
usage = "%prog playbook.yml",
connect_opts=True,
meta_opts=True,
runas_opts=True,
subset_opts=True,
check_opts=True,
inventory_opts=True,
runtask_opts=True,
vault_opts=True,
fork_opts=True,
module_opts=True,
)
# ansible playbook specific opts
parser.add_option('--list-tasks', dest='listtasks', action='store_true',
help="list all tasks that would be executed")
parser.add_option('--list-tags', dest='listtags', action='store_true',
help="list all available tags")
parser.add_option('--step', dest='step', action='store_true',
help="one-step-at-a-time: confirm each task before running")
parser.add_option('--start-at-task', dest='start_at_task',
help="start the playbook at the task matching this name")
self.options, self.args = parser.parse_args(self.args[1:])
self.parser = parser
if len(self.args) == 0:
raise AnsibleOptionsError("You must specify a playbook file to run")
display.verbosity = self.options.verbosity
self.validate_conflicts(runas_opts=True, vault_opts=True, fork_opts=True)
def run(self):
super(PlaybookCLI, self).run()
# Note: slightly wrong, this is written so that implicit localhost
# Manage passwords
sshpass = None
becomepass = None
vault_pass = None
passwords = {}
# don't deal with privilege escalation or passwords when we don't need to
if not self.options.listhosts and not self.options.listtasks and not self.options.listtags and not self.options.syntax:
self.normalize_become_options()
(sshpass, becomepass) = self.ask_passwords()
passwords = { 'conn_pass': sshpass, 'become_pass': becomepass }
loader = DataLoader()
if self.options.vault_password_file:
# read vault_pass from a file
vault_pass = CLI.read_vault_password_file(self.options.vault_password_file, loader=loader)
loader.set_vault_password(vault_pass)
elif self.options.ask_vault_pass:
vault_pass = self.ask_vault_passwords()[0]
loader.set_vault_password(vault_pass)
# initial error check, to make sure all specified playbooks are accessible
# before we start running anything through the playbook executor
for playbook in self.args:
if not os.path.exists(playbook):
raise AnsibleError("the playbook: %s could not be found" % playbook)
if not (os.path.isfile(playbook) or stat.S_ISFIFO(os.stat(playbook).st_mode)):
raise AnsibleError("the playbook: %s does not appear to be a file" % playbook)
# create the variable manager, which will be shared throughout
# the code, ensuring a consistent view of global variables
variable_manager = VariableManager()
variable_manager.extra_vars = load_extra_vars(loader=loader, options=self.options)
variable_manager.options_vars = load_options_vars(self.options)
# create the inventory, and filter it based on the subset specified (if any)
inventory = Inventory(loader=loader, variable_manager=variable_manager, host_list=self.options.inventory)
variable_manager.set_inventory(inventory)
# (which is not returned in list_hosts()) is taken into account for
# warning if inventory is empty. But it can't be taken into account for
# checking if limit doesn't match any hosts. Instead we don't worry about
# limit if only implicit localhost was in inventory to start with.
#
# Fix this when we rewrite inventory by making localhost a real host (and thus show up in list_hosts())
no_hosts = False
if len(inventory.list_hosts()) == 0:
# Empty inventory
display.warning("provided hosts list is empty, only localhost is available")
no_hosts = True
inventory.subset(self.options.subset)
if len(inventory.list_hosts()) == 0 and no_hosts is False:
# Invalid limit
raise AnsibleError("Specified --limit does not match any hosts")
# create the playbook executor, which manages running the plays via a task queue manager
pbex = PlaybookExecutor(playbooks=self.args, inventory=inventory, variable_manager=variable_manager, loader=loader, options=self.options, passwords=passwords)
results = pbex.run()
if isinstance(results, list):
for p in results:
display.display('\nplaybook: %s' % p['playbook'])
for idx, play in enumerate(p['plays']):
msg = "\n play #%d (%s): %s" % (idx + 1, ','.join(play.hosts), play.name)
mytags = set(play.tags)
msg += '\tTAGS: [%s]' % (','.join(mytags))
if self.options.listhosts:
playhosts = set(inventory.get_hosts(play.hosts))
msg += "\n pattern: %s\n hosts (%d):" % (play.hosts, len(playhosts))
for host in playhosts:
msg += "\n %s" % host
display.display(msg)
all_tags = set()
if self.options.listtags or self.options.listtasks:
taskmsg = ''
if self.options.listtasks:
taskmsg = ' tasks:\n'
def _process_block(b):
taskmsg = ''
for task in b.block:
if isinstance(task, Block):
taskmsg += _process_block(task)
else:
if task.action == 'meta':
continue
all_tags.update(task.tags)
if self.options.listtasks:
cur_tags = list(mytags.union(set(task.tags)))
cur_tags.sort()
if task.name:
taskmsg += " %s" % task.get_name()
else:
taskmsg += " %s" % task.action
taskmsg += "\tTAGS: [%s]\n" % ', '.join(cur_tags)
return taskmsg
all_vars = variable_manager.get_vars(loader=loader, play=play)
play_context = PlayContext(play=play, options=self.options)
for block in play.compile():
block = block.filter_tagged_tasks(play_context, all_vars)
if not block.has_tasks():
continue
taskmsg += _process_block(block)
if self.options.listtags:
cur_tags = list(mytags.union(all_tags))
cur_tags.sort()
taskmsg += " TASK TAGS: [%s]\n" % ', '.join(cur_tags)
display.display(taskmsg)
return 0
else:
return results
| gpl-3.0 |
dimagi/rapidsms-contrib-apps-dev | handlers/tests.py | 1 | 1655 | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from nose.tools import assert_equal
from rapidsms.conf import settings
from .utils import get_handlers
def test_get_handlers():
# store current settings.
_settings = (
settings.INSTALLED_APPS,
settings.INSTALLED_HANDLERS,
settings.EXCLUDED_HANDLERS)
# clear current settings, to test in a predictable environment.
settings.INSTALLED_APPS = []
settings.INSTALLED_HANDLERS = None
settings.EXCLUDED_HANDLERS = None
assert_equal(get_handlers(), [])
# this crappy test depends upon the ``echo`` contrib app, which
# defines exactly two handlers. i don't have a cleaner solution.
settings.INSTALLED_APPS = ['rapidsms.contrib.echo']
from rapidsms.contrib.echo.handlers.echo import EchoHandler
from rapidsms.contrib.echo.handlers.ping import PingHandler
# check that both handlers were found as default
assert_equal(get_handlers(), [EchoHandler, PingHandler])
# exclude no handlers, explicitly include a single handler
settings.INSTALLED_HANDLERS = ['rapidsms.contrib.echo.handlers.ping']
assert_equal(get_handlers(), [PingHandler])
settings.INSTALLED_HANDLERS = []
# exclude a single handler
settings.EXCLUDED_HANDLERS = ['rapidsms.contrib.echo.handlers.ping']
assert_equal(get_handlers(), [EchoHandler])
# exclude all handlers from the echo app
settings.EXCLUDED_HANDLERS = ['rapidsms.contrib.echo']
assert_equal(get_handlers(), [])
# restore pre-test settings.
settings.INSTALLED_APPS,
settings.INSTALLED_HANDLERS,
settings.EXCLUDED_HANDLERS = _settings
| bsd-3-clause |
martatolos/DemandAnalysis | visualizations.py | 1 | 5518 | # Plots
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
import re
import random
def plot_several_countries(df, ylabel, title, country_list="", save=False, num="", xticks_hourly=False, kind='bar', linestyle='-', color='mbygcr', marker='o', linewidth=4.0, fontsize=16, legend=True):
"""
This function plots a dataframe with several countries
@param df: data frame
@param ylabel: label for y axis
@param title: graphic title
@param kind: graphic type ex: bar or line
@param linestyle: lines style
@param color: color to use
@param marker: shape of point on a line
@param linewidth: line width
@param fontsize: font size
@return: n/a
"""
# Plotting
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 12}
matplotlib.rc('font', **font)
if xticks_hourly:
xticks_hourly = range(0,24)
else:
xticks_hourly = None
### PLOT FINAL
if kind == 'line':
graphic = df.plot(title=title, kind=kind, fontsize=fontsize, linestyle=linestyle, color=color,
linewidth=linewidth, marker=marker, xticks=xticks_hourly, figsize=(18,9))
else:
graphic = df.plot(title=title, kind=kind, fontsize=fontsize, color=color,
xticks=xticks_hourly, figsize=(18,9))
if legend == False:
graphic.legend_.remove()
graphic.set_ylabel(ylabel)
graphic.legend(prop={'size': 12})
if save==True and country_list!="":
namefile= re.sub("[\'\",\[\]]", "", str(country_list))
namefile= re.sub("[\s+]", "-", namefile)
if num=="":
num = random.randrange(1,100)
plt.savefig(namefile+str(num))
else:
plt.show()
def plot_yearly_consumption(df, country, kind='bar', linestyle='-', color='blue', marker='o', linewidth=4.0,fontsize=16):
"""
This function plots the yearly data from a monthlypowerconsumptions data frame
@param df: monthlypowerconsumptions data frame
@param df: country name to add on the title of the plot
@return: n/a
"""
# Plotting
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 12}
matplotlib.rc('font', **font)
### PLOT FINAL
if kind == 'line':
graphic = df.plot(x='year', y='Sum', title='Evolution of electricity consumption in '+ country, kind=kind, fontsize=fontsize, linestyle=linestyle, color=color , marker=marker)
else:
graphic = df.plot(x='year', y='Sum', title='Evolution of electricity consumption in '+ country, kind=kind, fontsize=fontsize, color=color)
graphic.set_ylabel('GWh')
plt.show()
def plot_monthly_average_consumption(mpc, country_list, ylabel='normalized', title='', kind='bar', linestyle='-', color='mbygcr', marker='o', linewidth=4.0, fontsize=16, legend=True):
"""
This function plots the yearly data from a monthlypowerconsumptions object
@param df: monthlypowerconsumptions object
@param country_list: country names to add on the title of the plot
@param ylabel: label for y axis
@param title: graphic title
@param kind: graphic type ex: bar or line
@param linestyle: lines style
@param color: color to use
@param marker: shape of point on a line
@param linewidth: line width
@param fontsize: font size
@return: n/a
"""
# Plotting
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 12}
matplotlib.rc('font', **font)
df = mpc.data_normalization(year=False)
df = df.groupby('country').mean()
del df['year']
del df['Sum']
df = df.T
plot_several_countries(df[country_list], ylabel, title, kind=kind, linestyle=linestyle, color=color, marker=marker, linewidth=linewidth, fontsize=fontsize, legend=legend)
def plot_average_week(df, ylabel='Normalized', title="Normalized average weekday consumption",kind='bar', color='rbbbbgg', rotation=50, legend=True):
# Plotting
"""
@param df: Data frame with the values to plot
@param ylabel: Label for the y axis
@param title: Title for the graphic
@param kind: Type of graphic: bar, line,...
@param color: color values
@param rotation: degrees for the ylabel rotation
@param legend: True or False legend on or off
"""
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 12}
matplotlib.rc('font', **font)
#create a dictionary for the week days
dayDict={0:'Monday', 1:'Tuesday', 2:'Wednesday', 3:'Thrusday', 4:'Friday', 5:'Saturday', 6:'Sunday'}
df = df[['Country', 'weekday', 'daily']]
df = df.pivot(index='weekday', columns='Country')
df = df.rename(index=dayDict)
df.columns = df.columns.droplevel()
# normalized
df = df/df.mean()
graphic = df.plot(title=title, kind=kind, color=color, legend=legend)
graphic.set_ylabel(ylabel)
graphic.legend(prop={'size': 12})
plt.xticks(rotation=rotation)
plt.show()
# #### PLOT FINAL
# # Plot the infaltion with the spanish consumption
# ES_info_year = ES_info[['year','Sum','inflation']]
# ES_info_year.set_index('year')
# plt.figure()
# ax = ES_info_year.plot(x='year', title='Consumption and Inflation in Spain', y='Sum', kind='bar',fontsize=16)
# ax.set_ylabel('Consumption - GWh')
# ax2 = ax.twinx()
# ax2.plot(ES_info_year['inflation'].values, linestyle='-', color='red', marker='o', linewidth=4.0)
# ax2.set_ylabel('Inflation - Annual Change [%]')
# plt.show() | gpl-2.0 |
MrSenko/Nitrate | tcms/testruns/urls/run_urls.py | 1 | 1045 | # -*- coding: utf-8 -*-
from django.conf.urls import url, patterns
from tcms.testruns.views import TestRunReportView
from tcms.testruns.views import AddCasesToRunView
urlpatterns = patterns(
'tcms.testruns.views',
url(r'^new/$', 'new'),
url(r'^(?P<run_id>\d+)/$', 'get'),
url(r'^(?P<run_id>\d+)/clone/$', 'new_run_with_caseruns'),
url(r'^(?P<run_id>\d+)/delete/$', 'delete'),
url(r'^(?P<run_id>\d+)/edit/$', 'edit'),
url(r'^(?P<run_id>\d+)/report/$', TestRunReportView.as_view(),
name='run-report'),
url(r'^(?P<run_id>\d+)/ordercase/$', 'order_case'),
url(r'^(?P<run_id>\d+)/changestatus/$', 'change_status'),
url(r'^(?P<run_id>\d+)/ordercaserun/$', 'order_case'),
url(r'^(?P<run_id>\d+)/removecaserun/$', 'remove_case_run'),
url(r'^(?P<run_id>\d+)/assigncase/$', AddCasesToRunView.as_view(),
name='add-cases-to-run'),
url(r'^(?P<run_id>\d+)/cc/$', 'cc'),
url(r'^(?P<run_id>\d+)/update/$', 'update_case_run_text'),
url(r'^(?P<run_id>\d+)/export/$', 'export'),
)
| gpl-2.0 |
Shaps/ansible | test/units/module_utils/common/test_collections.py | 39 | 5024 | # -*- coding: utf-8 -*-
# Copyright (c) 2018–2019, Sviatoslav Sydorenko <[email protected]>
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
"""Test low-level utility functions from ``module_utils.common.collections``."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.six import Iterator
from ansible.module_utils.common._collections_compat import Sequence
from ansible.module_utils.common.collections import ImmutableDict, is_iterable, is_sequence
class SeqStub:
"""Stub emulating a sequence type.
>>> from collections.abc import Sequence
>>> assert issubclass(SeqStub, Sequence)
>>> assert isinstance(SeqStub(), Sequence)
"""
Sequence.register(SeqStub)
class IteratorStub(Iterator):
def __next__(self):
raise StopIteration
class IterableStub:
def __iter__(self):
return IteratorStub()
TEST_STRINGS = u'he', u'Україна', u'Česká republika'
TEST_STRINGS = TEST_STRINGS + tuple(s.encode('utf-8') for s in TEST_STRINGS)
TEST_ITEMS_NON_SEQUENCES = (
{}, object(), frozenset(),
4, 0.,
) + TEST_STRINGS
TEST_ITEMS_SEQUENCES = (
[], (),
SeqStub(),
)
TEST_ITEMS_SEQUENCES = TEST_ITEMS_SEQUENCES + (
# Iterable effectively containing nested random data:
TEST_ITEMS_NON_SEQUENCES,
)
@pytest.mark.parametrize('sequence_input', TEST_ITEMS_SEQUENCES)
def test_sequence_positive(sequence_input):
"""Test that non-string item sequences are identified correctly."""
assert is_sequence(sequence_input)
assert is_sequence(sequence_input, include_strings=False)
@pytest.mark.parametrize('non_sequence_input', TEST_ITEMS_NON_SEQUENCES)
def test_sequence_negative(non_sequence_input):
"""Test that non-sequences are identified correctly."""
assert not is_sequence(non_sequence_input)
@pytest.mark.parametrize('string_input', TEST_STRINGS)
def test_sequence_string_types_with_strings(string_input):
"""Test that ``is_sequence`` can separate string and non-string."""
assert is_sequence(string_input, include_strings=True)
@pytest.mark.parametrize('string_input', TEST_STRINGS)
def test_sequence_string_types_without_strings(string_input):
"""Test that ``is_sequence`` can separate string and non-string."""
assert not is_sequence(string_input, include_strings=False)
@pytest.mark.parametrize(
'seq',
([], (), {}, set(), frozenset(), IterableStub()),
)
def test_iterable_positive(seq):
assert is_iterable(seq)
@pytest.mark.parametrize(
'seq', (IteratorStub(), object(), 5, 9.)
)
def test_iterable_negative(seq):
assert not is_iterable(seq)
@pytest.mark.parametrize('string_input', TEST_STRINGS)
def test_iterable_including_strings(string_input):
assert is_iterable(string_input, include_strings=True)
@pytest.mark.parametrize('string_input', TEST_STRINGS)
def test_iterable_excluding_strings(string_input):
assert not is_iterable(string_input, include_strings=False)
class TestImmutableDict:
def test_scalar(self):
imdict = ImmutableDict({1: 2})
assert imdict[1] == 2
def test_string(self):
imdict = ImmutableDict({u'café': u'くらとみ'})
assert imdict[u'café'] == u'くらとみ'
def test_container(self):
imdict = ImmutableDict({(1, 2): ['1', '2']})
assert imdict[(1, 2)] == ['1', '2']
def test_from_tuples(self):
imdict = ImmutableDict((('a', 1), ('b', 2)))
assert frozenset(imdict.items()) == frozenset((('a', 1), ('b', 2)))
def test_from_kwargs(self):
imdict = ImmutableDict(a=1, b=2)
assert frozenset(imdict.items()) == frozenset((('a', 1), ('b', 2)))
def test_immutable(self):
imdict = ImmutableDict({1: 2})
expected_reason = r"^'ImmutableDict' object does not support item assignment$"
with pytest.raises(TypeError, match=expected_reason):
imdict[1] = 3
with pytest.raises(TypeError, match=expected_reason):
imdict[5] = 3
def test_hashable(self):
# ImmutableDict is hashable when all of its values are hashable
imdict = ImmutableDict({u'café': u'くらとみ'})
assert hash(imdict)
def test_nonhashable(self):
# ImmutableDict is unhashable when one of its values is unhashable
imdict = ImmutableDict({u'café': u'くらとみ', 1: [1, 2]})
expected_reason = r"^unhashable type: 'list'$"
with pytest.raises(TypeError, match=expected_reason):
hash(imdict)
def test_len(self):
imdict = ImmutableDict({1: 2, 'a': 'b'})
assert len(imdict) == 2
def test_repr(self):
initial_data = {1: 2, 'a': 'b'}
initial_data_repr = repr(initial_data)
imdict = ImmutableDict(initial_data)
actual_repr = repr(imdict)
expected_repr = "ImmutableDict({0})".format(initial_data_repr)
assert actual_repr == expected_repr
| gpl-3.0 |
gqwest-erp/server | openerp/addons/purchase_analytic_plans/__init__.py | 441 | 1220 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#----------------------------------------------------------
# Init Sales
#----------------------------------------------------------
import purchase_analytic_plans
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mxia/engine | build/android/gyp/create_java_binary_script.py | 26 | 2379 | #!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates a simple script to run a java "binary".
This creates a script that sets up the java command line for running a java
jar. This includes correctly setting the classpath and the main class.
"""
import optparse
import os
import sys
from util import build_utils
# The java command must be executed in the current directory because there may
# be user-supplied paths in the args. The script receives the classpath relative
# to the directory that the script is written in and then, when run, must
# recalculate the paths relative to the current directory.
script_template = """\
#!/usr/bin/env python
#
# This file was generated by build/android/gyp/create_java_binary_script.py
import os
import sys
self_dir = os.path.dirname(__file__)
classpath = [{classpath}]
if os.getcwd() != self_dir:
offset = os.path.relpath(self_dir, os.getcwd())
classpath = [os.path.join(offset, p) for p in classpath]
java_args = [
"java",
"-classpath", ":".join(classpath),
"-enableassertions",
\"{main_class}\"] + sys.argv[1:]
os.execvp("java", java_args)
"""
def main(argv):
argv = build_utils.ExpandFileArgs(argv)
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--output', help='Output path for executable script.')
parser.add_option('--jar-path', help='Path to the main jar.')
parser.add_option('--main-class',
help='Name of the java class with the "main" entry point.')
parser.add_option('--classpath', action='append',
help='Classpath for running the jar.')
options, _ = parser.parse_args(argv)
classpath = [options.jar_path]
for cp_arg in options.classpath:
classpath += build_utils.ParseGypList(cp_arg)
run_dir = os.path.dirname(options.output)
classpath = [os.path.relpath(p, run_dir) for p in classpath]
with open(options.output, 'w') as script:
script.write(script_template.format(
classpath=('"%s"' % '", "'.join(classpath)),
main_class=options.main_class))
os.chmod(options.output, 0750)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.