blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
09624c254fd814d60f06f3e974c3b11e8a555ac6 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /MxNcFpABB68JCxSwA_11.py | 9d14b1e736cb8251be6b631017bdd3a50318fe68 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py |
def legendre(p, n):
i, res = 1, 0
while p**i <= n:
res += n // p**i
i += 1
return res
| [
"[email protected]"
] | |
7efa9975210f4ac91ff681aafebfb83b62f65fe5 | 79359f4814c53a09460cd7c257d17901ba665f7b | /adress/models.py | eab690f225d2bd8ddbf10507db3f22156be3d793 | [] | no_license | idelfrides/API_django_REST_framework | 77e784bc8d5fd0e282273bb401acae3e7c2dc6e3 | 06429c9e394a40925850504f6fe233296b0d303a | refs/heads/master | 2020-08-12T22:59:12.775204 | 2019-10-13T17:11:00 | 2019-10-13T17:11:00 | 214,859,685 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 545 | py | from django.db import models
class Adress(models.Model):
line1 = models.CharField(max_length=150)
line2 = models.CharField(max_length=150, null=True, blank=True)
city = models.CharField(max_length=50)
state = models.CharField(max_length=50)
country = models.CharField(max_length=50)
latitude = models.IntegerField(null=True, blank=True)
longitude = models.IntegerField(null=True, blank=True)
update = models.DateTimeField(auto_now=True, auto_now_add=False)
def __str__(self):
return self.line1
| [
"[email protected]"
] | |
bf49b30027e8e76c3e2969b93abaf9b1c89d9e40 | beb4d7c16ea8d8da9747b94298891cf01d9466f6 | /users/forms.py | 8e58828a265c591a8f885ad7435e82918ab2af63 | [] | no_license | Chirag-Django/nonstopio_assignment | 48985445a19f8d01c1f0565e8058cd032942d903 | b1f1561e841857ea64d9a5170974704a347cc0e3 | refs/heads/master | 2023-03-05T07:05:49.963018 | 2021-02-20T18:35:29 | 2021-02-20T18:35:29 | 340,347,626 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | from .models import Profile
from django import forms
from django.contrib.auth.models import User
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('username','password')
widgets = {
'password': forms.PasswordInput(),
}
class ProfileForm(forms.ModelForm):
age =forms.IntegerField()
class Meta:
model = Profile
fields = ('age', 'address') | [
"[email protected]"
] | |
2b629008ebebe1c82a9798ac82abd5a380f666de | 8d02b867eaa5d7aedb80ae31cec5dfe7b0201d1f | /Ch_06 - Functions/ants_solution.py | c1117f5a2df92b0bc9247374d9ddcc7f38d2443a | [] | no_license | capncrockett/beedle_book | df17f632990edf4dfae82ccedb5f8d2d07385c00 | d65315ddff20fb0ef666c610dbe4634dff0a621a | refs/heads/main | 2023-07-23T08:33:17.275029 | 2021-09-01T02:47:08 | 2021-09-01T02:47:08 | 401,894,762 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,193 | py | # c06ex02.py
# Prints lyrics for "The Ants Go Marching".
def verse(number, action):
print(march(number), hurrah())
print(march(number), hurrah())
print(march(number))
print(littleOne(action))
refrain()
def march(number):
return "The ants go marching %s by %s," % (number, number)
def hurrah():
return "hurrah! hurrah!"
def littleOne(action):
return "The little one stops to " + action + ","
def refrain():
print("And they all go marching down...")
print("In the ground...")
print("To get out...")
print("Of the rain.")
print("Boom! " * 3)
def main():
actions = [("one", "suck his thumb"),
("two", "tie his shoe"),
("three", "climb a tree"),
("four", "shut the door"),
("five", "take a dive"),
("six", "pick up sticks"),
("seven", "talk to Kevin"),
("eight", "jump the gate"),
("nine", "swing on a vine"),
("ten", "say 'The End'")]
for n, a in actions:
verse(n, a)
print()
input("Press <Enter> to Quit")
main()
| [
"[email protected]"
] | |
4e083f58b4e80f2947c7cd47ac00d60a37371e07 | 8be2df0c4508cc5254887b8cccb044032aea5c21 | /interview/first/tests/test_first.py | 97b686ec7178ed3528336ed4a0d8dfa6763ad96e | [] | no_license | ezhk/python-learning | 2d3dad2190ac9ce9299534f0f303e8b76a8eeab2 | 424ec9ca08541273f9ec39ff25f75a3b78d9dcb7 | refs/heads/master | 2023-01-05T16:50:08.829169 | 2020-06-02T18:03:05 | 2020-06-02T18:03:05 | 165,482,083 | 0 | 1 | null | 2023-01-04T04:59:43 | 2019-01-13T08:21:44 | Python | UTF-8 | Python | false | false | 572 | py | #!/usr/bin/env python
import sys
import unittest
sys.path.append(".")
from first import *
class TestFirst(unittest.TestCase):
def test_multiplication_table(self):
result = """- -- -- -- -- -- -- -- -- --
1 2 3 4 5 6 7 8 9 10
2 4 6 8 10 12 14 16 18 20
3 6 9 12 15 18 21 24 27 30
4 8 12 16 20 24 28 32 36 40
5 10 15 20 25 30 35 40 45 50
- -- -- -- -- -- -- -- -- --"""
self.assertEqual(multiplication_table(10, 5), result)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
6db4923a1304a510d7f7b79194c89fae3d3e433d | f7b05ca511d923822ae8519de4c3f35b24a76f5f | /stubs/beancount/core/data.pyi | 58f15af63d5704d692508807380a0e2130b72883 | [
"MIT"
] | permissive | iEverX/fava | 50d4c1214afbc80a01c60841ecd33bc366d2b44b | 2c5508038b886b42e13648e3fb8a50bf9ac484cf | refs/heads/main | 2023-04-26T23:32:10.516227 | 2021-05-23T07:08:45 | 2021-05-23T07:08:45 | 369,947,837 | 0 | 0 | MIT | 2021-05-23T02:52:55 | 2021-05-23T02:52:54 | null | UTF-8 | Python | false | false | 3,760 | pyi | # pylint: disable=all
# flake8: noqa
import datetime
import enum
from typing import Any
from typing import Dict
from typing import FrozenSet
from typing import List
from typing import NamedTuple
from typing import Optional
from typing import Set
from typing import Tuple
from typing import Type
from typing import Union
from beancount.core.amount import Amount
from beancount.core.number import Decimal
from beancount.core.number import MISSING
from beancount.core.position import Cost
from beancount.core.position import CostSpec
Account = str
Currency = str
Flag = str
Meta = Dict[str, Any]
Tags = Union[Set[str], FrozenSet[str]]
Links = Tags
EMPTY_SET: Any
class Booking(enum.Enum):
STRICT: str = ...
NONE: str = ...
AVERAGE: str = ...
FIFO: str = ...
LIFO: str = ...
class Close(NamedTuple):
meta: Meta
date: datetime.date
account: Account
class Commodity(NamedTuple):
meta: Meta
date: datetime.date
currency: Currency
class Open(NamedTuple):
meta: Meta
date: datetime.date
account: Account
currencies: List[Currency]
booking: Booking
class Pad(NamedTuple):
meta: Meta
date: datetime.date
account: Account
source_account: Account
class Balance(NamedTuple):
meta: Meta
date: datetime.date
account: Account
amount: Amount
tolerance: Optional[Decimal]
diff_amount: Optional[Decimal]
class Posting(NamedTuple):
account: Account
units: Union[Amount, Type[MISSING]]
cost: Optional[Union[Cost, CostSpec]]
price: Optional[Amount]
flag: Optional[Flag]
meta: Optional[Meta]
class Transaction(NamedTuple):
meta: Meta
date: datetime.date
flag: Flag
payee: Optional[str]
narration: str
tags: Tags
links: Links
postings: List[Posting]
class TxnPosting(NamedTuple):
txn: Transaction
posting: Posting
class Note(NamedTuple):
meta: Meta
date: datetime.date
account: Account
comment: str
class Event(NamedTuple):
meta: Meta
date: datetime.date
type: str
description: str
class Query(NamedTuple):
meta: Meta
date: datetime.date
name: str
query_string: str
class Price(NamedTuple):
meta: Meta
date: datetime.date
currency: Currency
amount: Amount
class Document(NamedTuple):
meta: Meta
date: datetime.date
account: Account
filename: str
tags: Optional[Tags]
links: Optional[Links]
class Custom(NamedTuple):
meta: Meta
date: datetime.date
type: str
values: List
# ALL_DIRECTIVES: Any
Directive = Union[
Open,
Close,
Commodity,
Pad,
Balance,
Transaction,
Note,
Event,
Query,
Price,
Document,
Custom,
]
Entries = List[Directive]
def new_metadata(filename: Any, lineno: Any, kvlist: Optional[Any] = ...): ...
def create_simple_posting(
entry: Any, account: Any, number: Any, currency: Any
): ...
def create_simple_posting_with_cost(
entry: Any,
account: Any,
number: Any,
currency: Any,
cost_number: Any,
cost_currency: Any,
): ...
NoneType: Any
def sanity_check_types(
entry: Any, allow_none_for_tags_and_links: bool = ...
) -> None: ...
def posting_has_conversion(posting: Any): ...
def transaction_has_conversion(transaction: Any): ...
def get_entry(posting_or_entry: Any): ...
SORT_ORDER: Any
def entry_sortkey(entry: Any): ...
def sorted(entries: Any): ...
def posting_sortkey(entry: Any): ...
def filter_txns(entries: Any) -> None: ...
def has_entry_account_component(entry: Any, component: Any): ...
def find_closest(entries: Any, filename: Any, lineno: Any): ...
def remove_account_postings(account: Any, entries: Any): ...
def iter_entry_dates(entries: Any, date_begin: Any, date_end: Any): ...
| [
"[email protected]"
] | |
429bb3f927f5e101a0070ec3957c352f1b2b81ec | d308fffe3db53b034132fb1ea6242a509f966630 | /pirates/chat/PChatInputTyped.py | f5bd65337bfac2d93587b1771aa561f8438e1070 | [
"BSD-3-Clause"
] | permissive | rasheelprogrammer/pirates | 83caac204965b77a1b9c630426588faa01a13391 | 6ca1e7d571c670b0d976f65e608235707b5737e3 | refs/heads/master | 2020-03-18T20:03:28.687123 | 2018-05-28T18:05:25 | 2018-05-28T18:05:25 | 135,193,362 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 12,284 | py | # uncompyle6 version 3.2.0
# Python bytecode 2.4 (62061)
# Decompiled from: Python 2.7.14 (v2.7.14:84471935ed, Sep 16 2017, 20:19:30) [MSC v.1500 32 bit (Intel)]
# Embedded file name: pirates.chat.PChatInputTyped
from direct.fsm import FSM
from otp.otpbase import OTPGlobals
import sys
from direct.directnotify import DirectNotifyGlobal
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from otp.otpbase import OTPLocalizer
class PChatInputTyped(FSM.FSM, DirectEntry):
__module__ = __name__
notify = DirectNotifyGlobal.directNotify.newCategory('PChatInputTyped')
ExecNamespace = None
def __init__(self, parent=None, **kw):
FSM.FSM.__init__(self, 'PChatInputTyped')
optiondefs = (('parent', parent, None), ('relief', DGG.SUNKEN, None), ('scale', 0.03, None), ('frameSize', (-0.2, 25.3, -0.5, 1.2), None), ('borderWidth', (0.1, 0.1), None), ('frameColor', (0.9, 0.9, 0.85, 0.8), None), ('entryFont', OTPGlobals.getInterfaceFont(), None), ('width', 25, None), ('numLines', 1, None), ('cursorKeys', 1, None), ('backgroundFocus', 0, None), ('suppressKeys', 1, None), ('suppressMouse', 1, None), ('command', self.sendChat, None), ('focus', 0, None), ('text', '', None))
self.defineoptions(kw, optiondefs)
DirectEntry.__init__(self, parent=parent, **kw)
self.initialiseoptions(PChatInputTyped)
self.whisperId = None
self.bind(DGG.OVERFLOW, self.chatOverflow)
self.bind(DGG.ERASE, self.chatErased)
wantHistory = 0
if __dev__:
wantHistory = 1
self.wantHistory = __dev__ or base.config.GetBool('exec-chat', 0) or localAvatar.isGM() or base.cr.wantMagicWords or base.config.GetBool('want-chat-history', wantHistory)
self.history = [
'']
self.historySize = base.config.GetInt('chat-history-size', 10)
self.historyIndex = 0
self.wantSlidingWindow = base.config.GetBool('want-sliding-chat', 1)
self.maxSavedLength = 100
self.slideDistance = 10
self.savedStringLeft = ''
self.savedStringRight = ''
self.fillToLength = 0
return
def delete(self):
self.ignore('uber-control-arrow_up')
self.ignore('uber-control-arrow_down')
def requestMode(self, mode, *args):
self.request(mode, *args)
def defaultFilter(self, request, *args):
if request == 'AllChat':
if not base.talkAssistant.checkOpenTypedChat():
messenger.send('Chat-Failed open typed chat test')
return
else:
if request == 'PlayerWhisper':
whisperId = args[0][0]
if not base.talkAssistant.checkWhisperTypedChatPlayer(whisperId):
messenger.send('Chat-Failed player typed chat test')
return
else:
if request == 'AvatarWhisper':
whisperId = args[0][0]
if not base.talkAssistant.checkWhisperTypedChatAvatar(whisperId):
messenger.send('Chat-Failed avatar typed chat test')
return
return FSM.FSM.defaultFilter(self, request, *args)
def enterOff(self):
self.deactivate()
def exitOff(self):
self.activate()
def enterAllChat(self):
self['focus'] = 1
self.show()
def exitAllChat(self):
pass
def enterGuildChat(self):
self['focus'] = 1
self.show()
def exitGuildChat(self):
pass
def enterShipPVPChat(self):
self['focus'] = 1
self.show()
def exitShipPVPChat(self):
pass
def enterCrewChat(self):
self['focus'] = 1
self.show()
def exitCrewChat(self):
pass
def enterPlayerWhisper(self, whisperId):
self.tempText = self.get()
self.activate()
self.whisperId = whisperId
def exitPlayerWhisper(self):
self.set(self.tempText)
self.whisperId = None
return
def enterAvatarWhisper(self, whisperId):
self.tempText = self.get()
self.activate()
self.whisperId = whisperId
def exitAvatarWhisper(self):
self.set(self.tempText)
self.whisperId = None
return
def activate(self):
self.set('')
self['focus'] = 1
self.accept('uber-escape', self.handleEscape)
if self.wantHistory:
self.accept('uber-control-arrow_up', self.setPrevHistory)
self.accept('uber-control-arrow_down', self.setNextHistory)
self.historyIndex = None
if self.wantSlidingWindow:
self.accept('uber-arrow_right', self.movingRight)
self.accept('uber-arrow_left', self.movingLeft)
self.accept('uber-backspace', self.movingLeft)
self.accept('uber-home', self.fullSlideLeft)
self.accept('uber-end', self.fullSlideRight)
self.show()
return
def handleEscape(self):
localAvatar.chatMgr.deactivateChat()
def deactivate(self):
self.set('')
self.savedStringLeft = ''
self.savedStringRight = ''
self['focus'] = 0
self.ignore('uber-escape')
self.ignore('uber-control-arrow_up')
self.ignore('uber-control-arrow_down')
self.ignore('uber-arrow_right')
self.ignore('uber-arrow_left')
self.ignore('uber-backspace')
self.ignore('uber-home')
self.ignore('uber-end')
self.hide()
def sendChat(self, text, overflow=False):
text = self.savedStringLeft + text + self.savedStringRight
self.savedStringLeft = self.savedStringRight = ''
if text:
self.set('')
if base.config.GetBool('exec-chat', 0):
if text[0] == '>':
text = self.__execMessage(text[1:])
base.localAvatar.setChatAbsolute(text, CFSpeech | CFTimeout)
return
else:
if base.config.GetBool('want-slash-commands', 1):
text[0] == '/' and base.talkAssistant.executeSlashCommand(text)
else:
if localAvatar.isGM() or base.cr.wantMagicWords:
text[0] == '`' and base.talkAssistant.executeGMCommand(text)
else:
self.sendChatByMode(text)
self.wantHistory and self.addToHistory(text)
else:
localAvatar.chatMgr.deactivateChat()
if not overflow:
self.hide()
localAvatar.chatMgr.messageSent()
def sendChatByMode(self, text):
messenger.send('sentRegularChat')
state = self.getCurrentOrNextState()
if state == 'PlayerWhisper':
base.talkAssistant.sendAccountTalk(text, self.whisperId)
else:
if state == 'AvatarWhisper':
base.talkAssistant.sendWhisperTalk(text, self.whisperId)
else:
if state == 'GuildChat':
base.talkAssistant.sendGuildTalk(text)
else:
if state == 'CrewChat':
base.talkAssistant.sendPartyTalk(text)
else:
if state == 'ShipPVPChat':
base.talkAssistant.sendShipPVPCrewTalk(text)
else:
base.talkAssistant.sendOpenTalk(text)
def checkKey(self, key):
print 'key typed: %s' % key.getKeycode()
def movingRight(self):
if self.guiItem.getCursorPosition() == self.guiItem.getNumCharacters():
if len(self.savedStringRight) > 0:
self.slideBack(self.get())
def movingLeft(self):
if self.guiItem.getCursorPosition() == 0:
if len(self.savedStringLeft) > 0:
self.slideFront(self.get())
def fullSlideLeft(self):
while len(self.savedStringLeft) > 0:
self.slideFront(self.get())
self.guiItem.setCursorPosition(0)
def fullSlideRight(self):
while len(self.savedStringRight) > 0:
self.slideBack(self.get())
self.guiItem.setCursorPosition(self.guiItem.getNumCharacters())
def chatOverflow(self, overflowText):
if overflowText.hasKeycode():
newText = self.get() + chr(overflowText.getKeycode())
if not self.wantSlidingWindow:
self.sendChat(newText, overflow=True)
else:
self.fillToLength = self.guiItem.getNumCharacters() - 3
if len(self.savedStringLeft) + len(self.savedStringRight) + self.slideDistance <= self.maxSavedLength:
self.slideBack(newText)
def chatErased(self, key):
if not self.wantSlidingWindow:
return
while self.guiItem.getNumCharacters() < self.fillToLength and (len(self.savedStringRight) > 0 or len(self.savedStringLeft) > 0):
if len(self.savedStringRight) > 0:
self.set(self.get() + self.savedStringRight[0])
self.savedStringRight = self.savedStringRight[1:]
elif len(self.savedStringLeft) > 0:
self.set(self.savedStringLeft[-1] + self.get())
self.savedStringLeft = self.savedStringLeft[0:-1]
self.guiItem.setCursorPosition(self.guiItem.getCursorPosition() + 1)
def slideBack(self, inputText):
if len(self.savedStringRight) < 1:
self.savedStringLeft += inputText[0:self.slideDistance]
self.set(inputText[self.slideDistance:] + self.savedStringRight[0:self.slideDistance])
else:
self.savedStringLeft += inputText[0:self.slideDistance]
self.set(inputText[self.slideDistance:] + self.savedStringRight[0:self.slideDistance])
self.savedStringRight = self.savedStringRight[self.slideDistance:]
self.guiItem.setCursorPosition(self.guiItem.getNumCharacters() - self.slideDistance)
print '%s + %s + %s' % (self.savedStringLeft, self.get(), self.savedStringRight)
def slideFront(self, inputText):
self.savedStringRight = inputText[-1 * self.slideDistance:] + self.savedStringRight
self.set(self.savedStringLeft[-1 * self.slideDistance:] + inputText[:-1 * self.slideDistance])
self.savedStringLeft = self.savedStringLeft[0:-1 * self.slideDistance]
self.guiItem.setCursorPosition(self.slideDistance)
print '%s + %s + %s' % (self.savedStringLeft, self.get(), self.savedStringRight)
def addToHistory(self, text):
self.history = [
text] + self.history[:self.historySize - 1]
self.historyIndex = 0
def setPrevHistory(self):
if self.historyIndex is None:
self.historyIndex = -1
self.historyIndex += 1
self.historyIndex %= len(self.history)
self.set(self.history[self.historyIndex])
return
def setNextHistory(self):
if self.historyIndex is None:
self.historyIndex = len(self.history) - 1
self.historyIndex -= 1
self.historyIndex %= len(self.history)
self.set(self.history[self.historyIndex])
return
def importExecNamespace(self):
pass
def __execMessage(self, message):
if not PChatInputTyped.ExecNamespace:
PChatInputTyped.ExecNamespace = {}
exec 'from pandac.PandaModules import *' in globals(), self.ExecNamespace
self.importExecNamespace()
try:
return str(eval(message, globals(), PChatInputTyped.ExecNamespace))
except SyntaxError:
try:
exec message in globals(), PChatInputTyped.ExecNamespace
return 'ok'
except:
exception = sys.exc_info()[0]
extraInfo = sys.exc_info()[1]
if extraInfo:
return str(extraInfo)
else:
return str(exception)
except:
exception = sys.exc_info()[0]
extraInfo = sys.exc_info()[1]
if extraInfo:
return str(extraInfo)
else:
return str(exception) | [
"[email protected]"
] | |
db7e7c16b31a09e8b6ca45c25861d6d291100c75 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /v34oCTbkrceCZjgRE_13.py | a9172c7f75c4daa5b8039320bd29cd908f79cc2b | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | """
The **right shift** operation is similar to **floor division by powers of
two** , thus, the process is _repetitive_ and can be done _recursively_.
Sample calculation using the right shift operator ( `>>` ):
80 >> 3 = floor(80/2^3) = floor(80/8) = 10
-24 >> 2 = floor(-24/2^2) = floor(-24/4) = -6
-5 >> 1 = floor(-5/2^1) = floor(-5/2) = -3
Write a function that **mimics** (without the use of **> >**) the right shift
operator and returns the result from the two given integers.
### Examples
shift_to_right(80, 3) ➞ 10
shift_to_right(-24, 2) ➞ -6
shift_to_right(-5, 1) ➞ -3
shift_to_right(4666, 6) ➞ 72
shift_to_right(3777, 6) ➞ 59
shift_to_right(-512, 10) ➞ -1
### Notes
* There will be no negative values for the second parameter `y`.
* This challenge is more like recreating of the **right shift** operation, thus, **the use of the operator directly** is **prohibited**.
* You are expected to solve this challenge via **recursion**.
* An **iterative** version of this challenge can be found via this [link](https://edabit.com/challenge/noqQNSr5o9qzvXWzL).
"""
def shift_to_right(x, y):
# recursive code here
if y == 0:
return x
else:
return shift_to_right(x // 2, y - 1)
| [
"[email protected]"
] | |
c8cad4bbe0e65284571d2098ad9eac09162e0c47 | fe6e0a2cfb00d34b58f64f164a747e3df08e8a9d | /client/application/controller/quyuxiaoshoutongji.py | bf2221e525b580d1453b72118bd22302a81d672e | [] | no_license | huboqiao/kmvip | c141814666631c35b8adeec3d3beb5aca0d2d1cd | 11ae7e1f78943c8425516c4f06acf043a99acdcc | refs/heads/master | 2020-02-26T14:58:31.573602 | 2016-08-03T06:29:41 | 2016-08-03T06:29:41 | 64,809,269 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,712 | py | #coding:utf-8
from application.lib.Commethods import *
from application.view.quyuxiaoshoutongji import Ui_Dialog
class QuYuScale(ControllerAction,Ui_Dialog,PrintAction):
def __init__(self,parent = None):
ControllerAction.__init__(self, parent)
PrintAction.__init__(self,u"区域销售统计表")
self.setTable()
self.setStyleSheet("""QGroupBox{font-size:18px;margin-top:10px;padding:14px;border:2px solid #6cb479;border-radius:10px;}
QGroupBox::indicator{width:130px;height:130px;}
QGroupBox::title{font-size:20px;left:15px;margin-top:-15px;}
QTableWidget{border:2px solid #6cb479;border-radius:5px;}
""")
self.tableWidget.setAlternatingRowColors(True);
self.tableWidget.setSelectionBehavior(QAbstractItemView.SelectRows); #选择整行 【推荐】
self.tableWidget.verticalHeader().hide()
# self.connect(self.table, SIGNAL("cellPressed(int,int)"),self.test)
self.connect(self.pushButton, SIGNAL("clicked()"),self.testdd)
self.connect(self.pushButton_2, SIGNAL("clicked()"),self.generateExcel)
self.connect(self.pushButton_3, SIGNAL("clicked()"),self.printTo)
self.connect(self.pushButton_5, SIGNAL("clicked()"),self.prePrint)
self.connect(self.pushButton_4, SIGNAL("clicked()"),self.configColumn)
self.tableWidget.horizontalHeader().setResizeMode(QHeaderView.Stretch)
# self.table.horizontalHeader().setResizeMode(QHeaderView.Stretch)#【推荐】
def testdd(self):
dlg = KDialog(self)
dlg.exec_()
def setTable(self):
self.tableWidget.setRowCount(10)
# alignment,color,format,count
self.table_fmt_list = []
self.table_fmt_list.append({"alignment":"left","color":"black","format":"general","count":False})
self.table_fmt_list.append({"alignment":"left","color":"black","format":"general","count":False})
self.table_fmt_list.append({"alignment":"left","color":"black","format":"0","count":True})
self.table_fmt_list.append({"alignment":"right","color":"black","format":"#,##0.00","count":True})
self.table_fmt_list.append({"alignment":"right","color":"black","format":"#,##0.00","count":True})
self.table_data_list = ["苹果","水果",11,123.2,123434321]
#
countColumn = [key for key,value in enumerate(self.table_fmt_list) if value['count'] == True]
print countColumn
countList = {}
for i in countColumn:
countList[str(i)] = 0
for i in range(10):
for j in range(5):
item = QTableWidgetItem(unicode(str(self.table_data_list[j])))
self.formatTableItem(item,self.table_fmt_list[j])
self.tableWidget.setItem(i,j,item)
if j in countColumn:
countList[str(j)] += self.table_data_list[j]
if len(countColumn)>0:
rowCount = self.tableWidget.rowCount()
self.tableWidget.setRowCount(rowCount+1)
self.tableWidget.setItem(rowCount,0,QTableWidgetItem(u"共计:"))
for key,value in countList.items():
item = QTableWidgetItem(str(value))
self.tableWidget.setItem(rowCount,int(key),item)
self.formatTableItem(item,self.table_fmt_list[int(key)])
def test(self,x,y):
print x,y
# self.verticalLayout.addWidget()
| [
"[email protected]"
] | |
4c5b927c53c82fc293f4290aa24ae57772b12da7 | 930309163b930559929323647b8d82238724f392 | /typical90_i.py | ea4c1a1584c4ad62fa2c06098d162e2a63c7da57 | [] | no_license | GINK03/atcoder-solvers | 874251dffc9f23b187faa77c439b445e53f8dfe1 | b1e7ac6e9d67938de9a85df4a2f9780fb1fbcee7 | refs/heads/master | 2021-11-07T14:16:52.138894 | 2021-09-12T13:32:29 | 2021-09-12T13:32:29 | 11,724,396 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,077 | py | import itertools
import numpy as np
import bisect
def angle(c0, c1):
cosine_angle = np.dot(c0, c1) / (np.linalg.norm(c0) * np.linalg.norm(c1))
angle = np.arccos(cosine_angle)
return np.degrees(angle)
def slow():
N=int(input())
XY = []
for _ in range(N):
x, y = map(int,input().split())
XY.append( [x,y] )
XY=np.array(XY)
for a,b,c in itertools.permutations(XY,3):
print(a, b, c, angle(a-c, b-c))
# slow()
def main():
N=int(input())
XY = []
for _ in range(N):
x, y = map(int,input().split())
XY.append( [x,y] )
XY=np.array(XY)
ans = 0
for c_pos in range(N):
c = XY[c_pos]
xy = [angle(x,c) for x in XY]
print(c, xy)
xy.sort()
for i in range(len(xy)):
a = xy[i]
can_b = a + 180
if can_b >= 360:
can_b -= 360
i0 = bisect.bisect_left(xy, can_b)%len(xy)
print( can_b, i0, xy[i0] - a)
ans = max(xy[i0] - a, ans)
print(ans)
main()
| [
"[email protected]"
] | |
d93e485b32559ed9bbeb9095906afc77c3b293b1 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/19/usersdata/102/7266/submittedfiles/jogoDaVelha.py | ca7bac2dab81b9c471ab886a8d0b50f2746278a9 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 900 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
x1 = input('Digite x1: ')
x2 = input('Digite x2: ')
x3 = input('Digite x3: ')
x4 = input('Digite x4: ')
x5 = input('Digite x5: ')
x6 = input('Digite x6: ')
x7 = input('Digite x7: ')
x8 = input('Digite x8: ')
x9 = input('Digite x9: ')
#CONTINUE...
if x1==x2==x3==0:
print('0')
elif x1==x2==x3==1:
print('1')
elif x1==x4==x7==0:
print('0')
elif x1==x4==x7==1:
print('1')
elif x4==x5==x6==0:
print('0')
elif x4==x5==x6==1:
print('1')
elif x7==x8==x9==0:
print('0')
elif x7==x8==x9==1:
print('1')
elif x2==x5==x8==0:
print('0')
elif x2==x5==x8==1:
print('1')
elif x3==x6==x9==0:
print('0')
elif x3==x6==x9==1:
print('1')
elif x1==x5==x9==0:
print('0')
elif x1==x5==x9==1:
print('1')
elif x3==x5==x7==0:
print('0')
elif x3==x5==x7==1:
print('1')
else:
print('e')
| [
"[email protected]"
] | |
ea1f690bb16b6f3b7c4f574beb17f9754aa3dfa2 | 3fad7381b03607e908dc06a7f91ae60f10e5be16 | /01_tests/05_andrei_repository/2017.08.23_RaportMnist/knn_slider_experiment/slider.py | 549c9c6779a84f95aa31a5ece4f596f64525af76 | [] | no_license | Cloudifier/CLOUDIFIER_WORK | ea5efe0f8e75315313db5ee145f4cc8092b542fa | e8ce18fad97b1207545e933ed0947347ed09c536 | refs/heads/master | 2021-12-23T16:41:03.149554 | 2021-12-13T13:16:51 | 2021-12-13T13:16:51 | 108,911,842 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,605 | py | import numpy as np
import pandas as pd
from utils import sigmoid, softmax, min_max_scaler
from sklearn.metrics import pairwise_distances
from scipy.stats import mode
import multiprocessing
from tqdm import tqdm, trange
import time
import gc
import inspect
import sys
class Slider():
def __init__(self, df_files, sizes, window_size, step_size, classifier, epsilon, logger):
self.df_files = df_files
self.sizes = sizes
self.window_size = window_size
self.step_size = step_size
self.epsilon = epsilon
self.num_df = len(df_files)
self.crt_df = None
self.crt_idx = 0
self.logger = logger
self.classifier = classifier
self.X = None
self.y = None
self.img_pos = None
self.results = [[0, 0] for i in range(self.num_df)]
np.set_printoptions(precision = 2, suppress = True)
def sliding_window(self, image, step_size):
for y in range(0, image.shape[0] - self.window_size[1], step_size):
for x in range(0, image.shape[1] - self.window_size[0], step_size):
yield (x, y, image[y:y + self.window_size[1], x:x + self.window_size[0]])
def slide_over_image(self, image):
start_time = time.time()
windows = []
positions = []
self.dict_windows = {}
for (x, y, window) in self.sliding_window(image, step_size = self.step_size):
self.logger.log("\tTaking window at pos = ({},{})".format(y,x), verbosity_level = 0)
self.dict_windows[(y,x)] = window
window = window.flatten()
windows.append(window)
positions.append((x, y))
self.windows = np.array(windows)
self.positions = np.array(positions)
predicted_val, predicted_pos, vals, counts, top_k_sums = self.classifier.predict(self.windows, self.positions, k = 5)
self.predicted_val = predicted_val
self.predicted_pos = predicted_pos
self.vals = vals
self.counts = counts
self.top_k_sums = top_k_sums
self.logger.log("\tScene slided in {:.2f}s".format(time.time()-start_time),
verbosity_level = 2)
return predicted_val, predicted_pos
def read_df(self):
self.crt_df = pd.read_pickle(self.df_files[self.crt_idx])
self.X = np.array(self.crt_df.iloc[:, 3:].values, dtype = float)
self.y = np.array(self.crt_df.iloc[:, 0].values, dtype = int)
self.img_pos = np.array(self.crt_df.iloc[:, 1:3].values, dtype = int)
self.X = min_max_scaler(self.X)
def check_position(self, i, predicted_pos):
return (abs(self.img_pos[i][1] - predicted_pos[1]) < self.epsilon and
abs(self.img_pos[i][0] - predicted_pos[0]) < self.epsilon)
def slide_over_df(self):
self.read_df()
self.logger.log("Sliding {} test scenes of size {}x{} with {}x{} windows and step_size={}".format(self.X.shape[0], self.sizes[self.crt_idx][0], self.sizes[self.crt_idx][1],
self.window_size[0], self.window_size[1], self.step_size), verbosity_level = 2)
old_print = print
inspect.builtins.print = tqdm.write
t = trange(self.X.shape[0], desc='Slider', leave=True)
for i in range(self.X.shape[0]):
if self.results[self.crt_idx][0] + self.results[self.crt_idx][1] == 0:
crt_accuracy = 0
else:
crt_accuracy = float(self.results[self.crt_idx][0]) / (self.results[self.crt_idx][0] +
self.results[self.crt_idx][1])
t.set_description("Target {} -- Position ({}, {}) -- corrects = {}, wrongs = {} -- accuracy = {:.2f} %".format(self.y[i], self.img_pos[i][1], self.img_pos[i][0],
self.results[self.crt_idx][0], self.results[self.crt_idx][1], crt_accuracy * 100))
t.refresh()
t.update(1)
sys.stdout.flush()
self.logger.log("Start sliding scene #{}; position of the image with target = {} in the scene = ({}, {})".format(i, self.y[i], self.img_pos[i][1], self.img_pos[i][0]), verbosity_level = 2)
image = self.X[i].reshape(self.sizes[self.crt_idx][0], self.sizes[self.crt_idx][1])
predicted_val, predicted_pos = self.slide_over_image(image)
if predicted_val == self.y[i]:
if self.check_position(i, predicted_pos):
self.results[self.crt_idx][0] += 1
self.logger.log("\tFound {} at pos ({}, {}) ... correct target, correct position"
.format(predicted_val, predicted_pos[0], predicted_pos[1]),
verbosity_level = 2)
else:
self.logger.log("\tFound {} at pos ({}, {}) ... correct target, wrong position"
.format(predicted_val, predicted_pos[0], predicted_pos[1]),
verbosity_level = 2)
self.results[self.crt_idx][1] += 1
else:
if predicted_val == -1:
self.logger.log("\tCould not match a window .. ", verbosity_level = 2)
else:
self.logger.log("\tFound {} at pos ({}, {}) ... incorrect target"
.format(predicted_val, predicted_pos[0], predicted_pos[1]),
verbosity_level = 2)
self.results[self.crt_idx][1] += 1
self.logger.log("Finished sliding scene #{}".format(i), verbosity_level = 2)
inspect.builtins.print = old_print
def slide(self):
for i in range(1):
start_time = time.time()
self.slide_over_df()
self.logger.log("Test scenes of size {}x{} slided in {:.2f}s; corrects={}, wrongs={}"
.format(self.sizes[i][0], self.sizes[i][1], time.time() - start_time,
self.results[i][0], self.results[i][1]))
self.crt_idx += 1
del self.crt_df
gc.collect()
if __name__=='__main__':
print("Library module. No main function") | [
"[email protected]"
] | |
923ca287952b81c4d4382b7af028fdc1393fab6e | 5a9a28b79e01a71dae8c92d1c1feaee139a92510 | /2022/2022-02/02-07/1405.py | 48c8814e0ea1a5ed2f0f7c36808ffbe89f92fc37 | [] | no_license | ez4lionky/Leetcode-practices | b81854e0ab9a9b39b6a26df6faf99bcf89860c39 | 0c28803043ea8196e564dacdbb231f6bb1693226 | refs/heads/master | 2023-02-08T03:33:42.756691 | 2023-01-21T03:31:09 | 2023-01-21T03:31:09 | 196,699,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,061 | py | import heapq
class Solution:
def longestDiverseString(self, a: int, b: int, c: int) -> str:
max_heap = []
if a > 0: heapq.heappush(max_heap, (-a, 'a'))
if b > 0: heapq.heappush(max_heap, (-b, 'b'))
if c > 0: heapq.heappush(max_heap, (-c, 'c'))
res = ''
while max_heap:
n, x = heapq.heappop(max_heap)
n *= -1
if len(res) < 2 or not (res[-2] == res[-1] == x):
res += x
n -= 1
if n != 0:
heapq.heappush(max_heap, (-n, x))
else:
if len(max_heap) == 0:
break
n2, y = heapq.heappop(max_heap)
n2 *= -1
res += y
n2 -= 1
if n2 != 0:
heapq.heappush(max_heap, (-n2, y))
heapq.heappush(max_heap, (-n, x))
return res
if __name__ == "__main__":
sol = Solution()
a = 1
b = 1
c = 7
print(sol.longestDiverseString(a, b, c))
| [
"[email protected]"
] | |
a0aa5f38758c6fb400aeb788892f86ccbf5513d2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02778/s359431231.py | 05bef0f491391dffa6e151fc8c6564e36229bb6a | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | import sys
input = lambda: sys.stdin.readline().rstrip()
def solve():
n = len(input())
ans = 'x' * n
print(ans)
if __name__ == '__main__':
solve()
| [
"[email protected]"
] | |
81a1fc018ec8c711319f15d08beb753759612447 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03573/s699758897.py | 68dc4cc90a48839d7a5ebf77da31afb0a341b1ff | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 112 | py | a, b, c = (int(x) for x in input().split())
if a == b:
print(c)
elif b == c:
print(a)
else:
print(b) | [
"[email protected]"
] | |
35587ddb86a9f14e7a0f7fb2e1d56c72f3e4c63b | bc441bb06b8948288f110af63feda4e798f30225 | /resource_package_tools_sdk/model/topology/view_pb2.pyi | 87c922d2bad9ae781ab1970c8f62c712ab95c8b8 | [
"Apache-2.0"
] | permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,522 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedCompositeFieldContainer as google___protobuf___internal___containers___RepeatedCompositeFieldContainer,
RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from resource_package_tools_sdk.model.topology.area_pb2 import (
Area as resource_package_tools_sdk___model___topology___area_pb2___Area,
)
from resource_package_tools_sdk.model.topology.link_pb2 import (
Link as resource_package_tools_sdk___model___topology___link_pb2___Link,
)
from resource_package_tools_sdk.model.topology.node_pb2 import (
Node as resource_package_tools_sdk___model___topology___node_pb2___Node,
)
from resource_package_tools_sdk.model.topology.note_pb2 import (
Note as resource_package_tools_sdk___model___topology___note_pb2___Note,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class View(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
class Diff(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
@property
def addNodes(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___topology___node_pb2___Node]: ...
@property
def removeNodes(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___topology___node_pb2___Node]: ...
@property
def addLinks(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___topology___link_pb2___Link]: ...
@property
def removeLinks(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___topology___link_pb2___Link]: ...
def __init__(self,
*,
addNodes : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___topology___node_pb2___Node]] = None,
removeNodes : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___topology___node_pb2___Node]] = None,
addLinks : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___topology___link_pb2___Link]] = None,
removeLinks : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___topology___link_pb2___Link]] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> View.Diff: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> View.Diff: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def ClearField(self, field_name: typing_extensions___Literal[u"addLinks",b"addLinks",u"addNodes",b"addNodes",u"removeLinks",b"removeLinks",u"removeNodes",b"removeNodes"]) -> None: ...
id = ... # type: typing___Text
name = ... # type: typing___Text
creator = ... # type: typing___Text
modifier = ... # type: typing___Text
readAuthorizers = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
writeAuthorizers = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
version = ... # type: typing___Text
ctime = ... # type: builtin___int
mtime = ... # type: builtin___int
@property
def rootNode(self) -> resource_package_tools_sdk___model___topology___node_pb2___Node: ...
@property
def nodes(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___topology___node_pb2___Node]: ...
@property
def links(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___topology___link_pb2___Link]: ...
@property
def areas(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___topology___area_pb2___Area]: ...
@property
def notes(self) -> google___protobuf___internal___containers___RepeatedCompositeFieldContainer[resource_package_tools_sdk___model___topology___note_pb2___Note]: ...
@property
def diff(self) -> View.Diff: ...
def __init__(self,
*,
id : typing___Optional[typing___Text] = None,
name : typing___Optional[typing___Text] = None,
creator : typing___Optional[typing___Text] = None,
modifier : typing___Optional[typing___Text] = None,
readAuthorizers : typing___Optional[typing___Iterable[typing___Text]] = None,
writeAuthorizers : typing___Optional[typing___Iterable[typing___Text]] = None,
version : typing___Optional[typing___Text] = None,
ctime : typing___Optional[builtin___int] = None,
mtime : typing___Optional[builtin___int] = None,
rootNode : typing___Optional[resource_package_tools_sdk___model___topology___node_pb2___Node] = None,
nodes : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___topology___node_pb2___Node]] = None,
links : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___topology___link_pb2___Link]] = None,
areas : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___topology___area_pb2___Area]] = None,
notes : typing___Optional[typing___Iterable[resource_package_tools_sdk___model___topology___note_pb2___Note]] = None,
diff : typing___Optional[View.Diff] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> View: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> View: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"diff",b"diff",u"rootNode",b"rootNode"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"areas",b"areas",u"creator",b"creator",u"ctime",b"ctime",u"diff",b"diff",u"id",b"id",u"links",b"links",u"modifier",b"modifier",u"mtime",b"mtime",u"name",b"name",u"nodes",b"nodes",u"notes",b"notes",u"readAuthorizers",b"readAuthorizers",u"rootNode",b"rootNode",u"version",b"version",u"writeAuthorizers",b"writeAuthorizers"]) -> None: ...
| [
"[email protected]"
] | |
0ecd372f87cdeb4bbb342d2f31042ecb2e05bd41 | 683876019cad0b0d562ac7f9da8c679cb310cfb2 | /2016/day19/part1_ll.py | 4a08c2e641a6605382f82513fdc09f18e30d97da | [] | no_license | CoachEd/advent-of-code | d028bc8c21235361ad31ea55922625adf743b5c8 | 10850d5d477c0946ef73756bfeb3a6db241cc4b2 | refs/heads/master | 2023-05-11T05:20:26.951224 | 2023-05-09T18:54:16 | 2023-05-09T18:54:16 | 160,375,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,337 | py | """
AoC
"""
import time
import sys
# A single node of a singly linked list
class Node:
# constructor
def __init__(self, data = None, next=None):
self.data = data
self.next = next
# A Linked List class with a single head node
class LinkedList:
def __init__(self):
self.head = None
# insertion method for the linked list
def insert(self, data):
newNode = Node(data)
if(self.head):
current = self.head
while(current.next):
current = current.next
current.next = newNode
else:
self.head = newNode
return newNode
# print method for the linked list
def printLL(self):
current = self.head
while(current):
print(current.data)
current = current.next
if current == self.head:
break
print()
# main
#num_elves = 3014603 # does not scale well!
num_elves = 5 # test data
ll = LinkedList()
elf_num = 1
for i in range(num_elves):
n = ll.insert([elf_num,1])
elf_num += 1
n.next = ll.head
curr = ll.head
while True:
arr = curr.data
if arr[1] == num_elves:
print('Elf ' + str(arr[0]) + ' wins!')
break
next_elf = curr.next
arr[1] += next_elf.data[1]
curr.next = next_elf.next
curr = curr.next
start_secs = time.time()
print('')
end_secs = time.time()
print('--- ' + str(end_secs-start_secs) + ' secs ---')
| [
"[email protected]"
] | |
de63c3e5d529ce92b9c76636a9c9546bef4fc10b | 34263dd7d7cb038bd62b50239ba5f7a88d0430ce | /extra_apps/xadmin/plugins/ueditor.py | c95778d810b957813b85d7268919b96c31d85c15 | [] | no_license | vevoly/StudyOnline2 | c8da53ccc4d58e10d7f58639ffe4a0944757aef1 | 224a9d21a8600e26945e09ae4240b67267392173 | refs/heads/master | 2020-04-21T08:01:44.043799 | 2019-02-12T15:17:46 | 2019-02-12T15:17:46 | 169,407,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,394 | py | # _*_ coding: utf-8 _*_
__author__ = 'jevoly'
__date__ = '2018/12/13 0013 上午 10:15'
import xadmin
from xadmin.views import BaseAdminPlugin, CreateAdminView, UpdateAdminView
from DjangoUeditor.models import UEditorField
from DjangoUeditor.widgets import UEditorWidget
from django.conf import settings
class XadminUEditorWidget(UEditorWidget):
def __init__(self, **kwargs):
self.ueditor_options = kwargs
self.Media.js = None
super(XadminUEditorWidget, self).__init__(kwargs)
class UeditorPlugin(BaseAdminPlugin):
def get_field_style(self, attrs, db_field, style, **kwargs):
if style == 'ueditor':
if isinstance(db_field, UEditorField):
widget = db_field.formfield().widget
param = {}
param.update(widget.ueditor_settings)
param.update(widget.attrs)
return {'widget': XadminUEditorWidget(**param)}
return attrs
def block_extrahead(self, context, nodes):
js = '<script type="text/javascript" src="%s"></script>' %(settings.STATIC_URL + "ueditor/ueditor.config.js")
js += '<script type="text/javascript" src="%s"></script>' %(settings.STATIC_URL + "ueditor/ueditor.all.min.js")
nodes.append(js)
xadmin.site.register_plugin(UeditorPlugin, CreateAdminView)
xadmin.site.register_plugin(UeditorPlugin, UpdateAdminView)
| [
"[email protected]"
] | |
4fb1a32acd37b6b3c8a78123ef3c9402f520f7b1 | 3c868540c8f5b0b9b46440e9b8e9160de9e8988f | /ch04/items.py | 7acd94f1c49dc7b2622e2f46537cd18c9af52490 | [] | no_license | sarte3/python | cc8f41b8b22b0a980252d6546358dd212324e2cd | 15d984e5df03387950692092b6b5569adab845bb | refs/heads/master | 2023-01-18T18:37:40.720326 | 2020-11-17T08:43:27 | 2020-11-17T08:43:27 | 304,824,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 354 | py | example_dictionary = {
'키A': '값A',
'키B': '값B',
'키C': '값C'
}
print('# 딕셔너리의 items() 함수')
print('items() : ', example_dictionary.items())
print()
print('# 딕셔너리의 items() 함수와 반복문 조합하기')
for key, element in example_dictionary.items():
print('dictionary[{}] = {}'.format(key, element)) | [
"[email protected]"
] | |
0761f644ba1f7580cfe8081b9a120d644abca30f | 4ebdc7053d9341ce7ad45f1e859ff86ef1455177 | /56_simple_interest.py | 4ada91727b9d65ad66eee49e2e59748636e9322d | [] | no_license | daikiante/python | 1f4d55e1fd04eef22702b364148b8e1a2beea2d3 | 9d604b8dcd9e3cbe8b4db24ef16c5c969f6f894f | refs/heads/master | 2020-09-17T00:14:24.034179 | 2019-12-02T09:03:25 | 2019-12-02T09:03:25 | 223,928,994 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 279 | py | # 単利計算
# 元本*金利*運用期間 = 単利
def simple_interest(p, t, r):
return p * (r / 100) * t
p = int(input('Enter the amount (/Rs):'))
t = int(input('Enter the hold span (/Year):'))
r = int(input('Enter the interest (/%):'))
print(simple_interest(p,t,r)) | [
"[email protected]"
] | |
151e343e9c8d0f2441e111d73315a7c1519ef256 | 4cdb92fddb5082070ea8e25df63a7d43e9259c81 | /pubsubpull/migrations/0005_auto_20150520_1121.py | e0bcf42821b5225682cacbac07cad406faa9d580 | [
"MIT"
] | permissive | WFP-BKK/django-pubsubpull | 50b22604eef82379124f8377188d956e4965a146 | ae4341ff9fdbe7493399adb7f2ecb43024cb7961 | refs/heads/master | 2021-01-20T14:53:32.616023 | 2016-10-26T03:12:29 | 2016-10-26T03:12:29 | 82,781,454 | 0 | 0 | null | 2017-02-22T08:46:10 | 2017-02-22T08:46:10 | null | UTF-8 | Python | false | false | 698 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('pubsubpull', '0004_auto_20150520_1041'),
]
operations = [
migrations.AddField(
model_name='request',
name='duration',
field=models.FloatField(null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='request',
name='started',
field=models.DateTimeField(default=datetime.date(1970, 1, 1), auto_now_add=True),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
b1613ee7db44819afd8e61048612766198986df2 | 1dd89e25ae58eded2fc3a5756a706fd654c9b40a | /test/test_analyze.py | 652880fcf500bc53bbb28c8567a8a18605915ec9 | [
"Apache-2.0"
] | permissive | jiev/chan | 9672bd3ac3146248da1da60660a6123ab1fa34b1 | a6c57a9f64e37c9cb05cf498f16719dbc2757a4c | refs/heads/master | 2022-06-17T21:31:19.680827 | 2022-06-10T01:52:36 | 2022-06-10T01:52:36 | 247,880,355 | 2 | 0 | MIT | 2020-03-17T04:38:18 | 2020-03-17T04:38:17 | null | UTF-8 | Python | false | false | 4,209 | py | # coding: utf-8
import zipfile
from tqdm import tqdm
import pandas as pd
from czsc.analyze import *
from czsc.enum import Freq
from czsc.signals.signals import get_default_signals, get_s_three_bi, get_s_d0_bi
cur_path = os.path.split(os.path.realpath(__file__))[0]
def read_1min():
with zipfile.ZipFile(os.path.join(cur_path, 'data/000001.XSHG_1min.zip'), 'r') as z:
f = z.open('000001.XSHG_1min.csv')
data = pd.read_csv(f, encoding='utf-8')
data['dt'] = pd.to_datetime(data['dt'])
records = data.to_dict('records')
bars = []
for row in tqdm(records, desc='read_1min'):
bar = RawBar(**row)
bar.freq = Freq.F1
bars.append(bar)
return bars
def read_daily():
file_kline = os.path.join(cur_path, "data/000001.SH_D.csv")
kline = pd.read_csv(file_kline, encoding="utf-8")
kline.loc[:, "dt"] = pd.to_datetime(kline.dt)
bars = [RawBar(symbol=row['symbol'], id=i, freq=Freq.D, open=row['open'], dt=row['dt'],
close=row['close'], high=row['high'], low=row['low'], vol=row['vol'])
for i, row in kline.iterrows()]
return bars
def test_find_bi():
bars = read_daily()
# 去除包含关系
bars1 = []
for bar in bars:
if len(bars1) < 2:
bars1.append(NewBar(symbol=bar.symbol, id=bar.id, freq=bar.freq,
dt=bar.dt, open=bar.open,
close=bar.close, high=bar.high, low=bar.low,
vol=bar.vol, elements=[bar]))
else:
k1, k2 = bars1[-2:]
has_include, k3 = remove_include(k1, k2, bar)
if has_include:
bars1[-1] = k3
else:
bars1.append(k3)
fxs = []
for i in range(1, len(bars1) - 1):
fx = check_fx(bars1[i - 1], bars1[i], bars1[i + 1])
if isinstance(fx, FX):
fxs.append(fx)
def get_user_signals(c: CZSC) -> OrderedDict:
"""在 CZSC 对象上计算信号,这个是标准函数,主要用于研究。
实盘时可以按照自己的需要自定义计算哪些信号
:param c: CZSC 对象
:return: 信号字典
"""
s = OrderedDict({"symbol": c.symbol, "dt": c.bars_raw[-1].dt, "close": c.bars_raw[-1].close})
# 倒0,特指未确认完成笔
# 倒1,倒数第1笔的缩写,表示第N笔
# 倒2,倒数第2笔的缩写,表示第N-1笔
# 倒3,倒数第3笔的缩写,表示第N-2笔
# 以此类推
for i in range(1, 3):
s.update(get_s_three_bi(c, i))
s.update(get_s_d0_bi(c))
return s
def test_czsc_update():
bars = read_daily()
# 不计算任何信号
c = CZSC(bars)
assert not c.signals
# 计算信号
c = CZSC(bars, get_signals=get_default_signals)
assert len(c.bi_list) == 50 and not c.last_bi_extend
assert isinstance(c.signals, OrderedDict) and len(c.signals) == 38
last_bi = c.bi_list[-1]
assert len(last_bi.raw_bars) == 32 and last_bi.power_price == last_bi.power
assert len(last_bi.fake_bis) == 11
assert last_bi.fake_bis[0].direction == last_bi.fake_bis[-1].direction == last_bi.direction
# 测试自定义信号
c = CZSC(bars, get_signals=get_user_signals, signals_n=20)
assert len(c.signals) == 11
assert len(c.signals_list) == 20
assert c.signals_list[-1] == c.signals
kline = [x.__dict__ for x in c.bars_raw]
bi = [{'dt': x.fx_a.dt, "bi": x.fx_a.fx} for x in c.bi_list] + \
[{'dt': c.bi_list[-1].fx_b.dt, "bi": c.bi_list[-1].fx_b.fx}]
chart = kline_pro(kline, bi=bi, title="{} - {}".format(c.symbol, c.freq))
file_html = "x.html"
chart.render(file_html)
os.remove(file_html)
def test_get_signals():
def get_test_signals(c: CZSC) -> OrderedDict:
s = OrderedDict({"symbol": c.symbol, "dt": c.bars_raw[-1].dt, "close": c.bars_raw[-1].close})
s.update(get_s_d0_bi(c))
return s
bars = read_daily()
# 不计算任何信号
c = CZSC(bars, get_signals=get_test_signals)
assert c.signals['日线_倒0笔_方向'] == '向下_任意_任意_0'
assert c.signals['日线_倒0笔_长度'] == '5到9根K线_任意_任意_0'
| [
"[email protected]"
] | |
acf74262f60d7d295e91526be0a91ace5f16d7bc | db14241eca00e2bcbf03924106c377ccb2b2aec8 | /lte/gateway/python/magma/pipelined/bridge_util.py | 2efc3bb2f1fb9a55a423fcaffd22af6c2e1ac7ae | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bdryja/magma | 24a893abaf65284b9ce721455d70564b2447b547 | 7d8e019a082b88f63d22313abffdb98257160c99 | refs/heads/master | 2022-04-19T14:08:14.365184 | 2020-03-26T15:59:44 | 2020-03-26T16:03:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,059 | py | """
Copyright (c) 2018-present, Facebook, Inc.
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory.
"""
import binascii
from collections import defaultdict
import re
import subprocess
from typing import Optional, Dict, List, TYPE_CHECKING
# Prevent circular import
if TYPE_CHECKING:
from magma.pipelined.service_manager import Tables
class DatapathLookupError(Exception):
pass
class BridgeTools:
"""
BridgeTools
Use ovs-vsctl commands to get bridge info and setup bridges for testing.
"""
TABLE_NUM_REGEX = r'table=(\d+)'
@staticmethod
def get_datapath_id(bridge_name):
"""
Gets the datapath_id by bridge_name
Hacky, call vsctl, decode output to str, strip '\n', remove '' around
the output, convert to int.
This gives the integer datapath_id that we want to run apps on, this is
needed when 2 bridges are setup, gtp_br0(main bridge) and testing_br)
"""
try:
output = subprocess.check_output(["ovs-vsctl", "get", "bridge",
bridge_name, "datapath_id"])
output_str = str(output, 'utf-8').strip()[1:-1]
output_hex = int(output_str, 16)
except subprocess.CalledProcessError as e:
raise DatapathLookupError(
'Error: ovs-vsctl bridge({}) datapath id lookup: {}'.format(
bridge_name, e
)
)
return output_hex
@staticmethod
def get_ofport(interface_name):
"""
Gets the ofport name ofport number of a interface
"""
try:
port_num = subprocess.check_output(["ovs-vsctl", "get", "interface",
interface_name, "ofport"])
except subprocess.CalledProcessError as e:
raise DatapathLookupError(
'Error: ovs-vsctl interface({}) of port lookup: {}'.format(
interface_name, e
)
)
return int(port_num)
@staticmethod
def create_bridge(bridge_name, iface_name):
"""
Creates a simple bridge, sets up an interface.
Used when running unit tests
"""
subprocess.Popen(["ovs-vsctl", "add-br", bridge_name]).wait()
subprocess.Popen(["ovs-vsctl", "set", "bridge", bridge_name,
"protocols=OpenFlow10,OpenFlow13,OpenFlow14",
"other-config:disable-in-band=true"]).wait()
subprocess.Popen(["ovs-vsctl", "set-controller", bridge_name,
"tcp:127.0.0.1:6633", "tcp:127.0.0.1:6654"]).wait()
subprocess.Popen(["ifconfig", iface_name, "192.168.1.1/24"]).wait()
@staticmethod
def destroy_bridge(bridge_name):
"""
Removes the bridge.
Used when unit test finishes
"""
subprocess.Popen(["ovs-vsctl", "del-br", bridge_name]).wait()
@staticmethod
def get_controllers_for_bridge(bridge_name):
curr_controllers = subprocess.check_output(
["ovs-vsctl", "get-controller", bridge_name],
).decode("utf-8").replace(' ', '').split('\n')
return list(filter(None, curr_controllers))
@staticmethod
def add_controller_to_bridge(bridge_name, port_num):
curr_controllers = BridgeTools.get_controllers_for_bridge(bridge_name)
ctlr_ip = "tcp:127.0.0.1:{}".format(port_num)
if ctlr_ip in curr_controllers:
return
curr_controllers.append(ctlr_ip)
BridgeTools.set_controllers_for_bridge(bridge_name, curr_controllers)
@staticmethod
def remove_controller_from_bridge(bridge_name, port_num):
curr_controllers = BridgeTools.get_controllers_for_bridge(bridge_name)
ctlr_ip = 'tcp:127.0.0.1:{}'.format(port_num)
curr_controllers.remove(ctlr_ip)
BridgeTools.set_controllers_for_bridge(bridge_name, curr_controllers)
@staticmethod
def set_controllers_for_bridge(bridge_name, ctlr_list):
set_cmd = ["ovs-vsctl", "set-controller", bridge_name]
set_cmd.extend(ctlr_list)
subprocess.Popen(set_cmd).wait()
@staticmethod
def get_flows_for_bridge(bridge_name, table_num=None, include_stats=True):
"""
Returns a flow dump of the given bridge from ovs-ofctl. If table_num is
specified, then only the flows for the table will be returned.
"""
if include_stats:
set_cmd = ["ovs-ofctl", "dump-flows", bridge_name]
else:
set_cmd = ["ovs-ofctl", "dump-flows", bridge_name, "--no-stats"]
if table_num:
set_cmd.append("table=%s" % table_num)
flows = \
subprocess.check_output(set_cmd).decode('utf-8').split('\n')[1:-1]
return flows
@staticmethod
def _get_annotated_name_by_table_num(
table_assignments: 'Dict[str, Tables]') -> Dict[int, str]:
annotated_tables = {}
# A main table may be used by multiple apps
apps_by_main_table_num = defaultdict(list)
for name in table_assignments:
apps_by_main_table_num[table_assignments[name].main_table].append(
name)
# Scratch tables are used for only one app
for ind, scratch_num in enumerate(
table_assignments[name].scratch_tables):
annotated_tables[scratch_num] = '{}(scratch_table_{})'.format(
name,
ind)
for table, apps in apps_by_main_table_num.items():
annotated_tables[table] = '{}(main_table)'.format(
'/'.join(sorted(apps)))
return annotated_tables
@classmethod
def get_annotated_flows_for_bridge(cls, bridge_name: str,
table_assignments: 'Dict[str, Tables]',
apps: Optional[List[str]] = None,
include_stats: bool = True
) -> List[str]:
"""
Returns an annotated flow dump of the given bridge from ovs-ofctl.
table_assignments is used to annotate table number with its
corresponding app. If a note exists, the note will be decoded.
If apps is not None, then only the flows for the given apps will be
returned.
"""
annotated_tables = cls._get_annotated_name_by_table_num(
table_assignments)
def annotated_table_num(num):
if int(num) in annotated_tables:
return annotated_tables[int(num)]
return num
def parse_resubmit_action(match):
"""
resubmit(port,1) => resubmit(port,app_name(main_table))
"""
ret = ''
# We can have more than one resubmit per flow
actions = [a for a in match.group().split('resubmit') if a]
for action in actions:
resubmit_tokens = re.search(r'\((.*?)\)', action)\
.group(1).split(',')
in_port, table = resubmit_tokens[0], resubmit_tokens[1]
if ret:
ret += ','
ret += 'resubmit({},{})'.format(in_port,
annotated_table_num(table))
return ret
def parse_flow(flow):
sub_rules = [
# Annotate table number with app name
(cls.TABLE_NUM_REGEX,
lambda match: 'table={}'.format(annotated_table_num(
match.group(1)))),
(r'resubmit\((.*)\)', parse_resubmit_action),
# Decode the note
(r'note:([\d\.a-fA-F]*)',
lambda match: 'note:{}'.format(
str(binascii.unhexlify(match.group(1)
.replace('00', '')
.replace('.', ''))))),
]
for rule in sub_rules:
flow = re.sub(rule[0], rule[1], flow)
return flow
def filter_apps(flows):
if apps is None:
yield from flows
return
selected_tables = []
for app in apps:
selected_tables.append(table_assignments[app].main_table)
selected_tables.extend(table_assignments[app].scratch_tables)
for flow in flows:
table_num = int(re.search(cls.TABLE_NUM_REGEX, flow).group(1))
if table_num in selected_tables:
yield flow
return [parse_flow(flow) for flow in
filter_apps(cls.get_flows_for_bridge(bridge_name,
include_stats=include_stats))]
| [
"[email protected]"
] | |
4edd2ce054246103a8e3827a9b3be861c706652a | 09379c13eea9c6b806c43fca12ebf182684ea11f | /Day12/04装饰器进阶.py | af26a492d965e1d4d3728d61152eea7cccc77e28 | [] | no_license | ExplorerX/PythonProjects | 0d97348dc9712c622e4f20e9c33b3904e2810e1d | de23e5f7a9415bf983f9c99ba5c0bd12dc7b6a99 | refs/heads/master | 2020-04-19T10:39:12.788123 | 2019-02-26T05:38:47 | 2019-02-26T05:38:47 | 168,146,465 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,700 | py | # 带参数的装饰器
# 装饰器的本质就是闭包
# 闭包的本质就是内层函数使用了外层函数的变量,而这个变量会一直存在于内存中
# import time
# FLAG = True
#
#
# def wrapper(flag):
# def tim(f):
# def inner(*args, **kwargs):
# if not flag:
# start = time.time()
# ret = f(*args, **kwargs)
# end = time.time()
# print(end - start)
# return ret
# else:
# ret = f(*args, **kwargs)
# return ret
# return inner
# return tim
#
#
# @wrapper(FLAG)
# def func(*args):
# time.sleep(0.1)
# sum1 = 0
# for i in args:
# sum1 += i
# return sum1
#
#
# # li = [1, 2, 3, 4]
# result = func(1, 2, 3, 4)
# print(result)
# 多个装饰器装饰一个函数,就像俄罗斯套娃
def wrapper1(f):
def inner1(*args, **kwargs):
print('This is front string in inner1!')
ret = f(*args, **kwargs)
print('This is behind string in inner1!')
return ret
return inner1
def wrapper2(f):
def inner2(*args, **kwargs):
print('This is front string in inner2!')
ret = f(*args, **kwargs)
print('This is behind string in inner2!')
return ret
return inner2
@wrapper1
@wrapper2
def func(*args):
sum1 = 0
print('This is a sum function!')
for i in args:
sum1 += i
return sum1
result = func(1, 2, 3, 4, 5, 6)
print(result)
# 运行结果:
"""
This is front string in inner1!
This is front string in inner2!
This is a sum function!
This is behind string in inner2!
This is behind string in inner1!
21
"""
| [
"[email protected]"
] | |
40b395494ca5039218e435761d3aac136f825bd5 | ebf723d6066e157ad5cedf94099deb62e6191390 | /py/yahoo_procon2019_qual_b.py | ec66b16507bbf79dc272e9e4cf3a9ffbaa169713 | [] | no_license | PeterGrainfield/AltCoder | a960bd2642887502829bb4078a35712c492a8d54 | 378cebdf1cf713310a83575b4dc1e39834e76e59 | refs/heads/master | 2023-05-08T16:32:53.653689 | 2021-05-31T06:10:55 | 2021-05-31T06:10:55 | 368,459,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | lab = [list(map(int, input().split())) for _ in range(3)]
road = [0]*4
for ab in lab:
road[ab[0]-1] += 1
road[ab[1]-1] += 1
if min(road) == 0:
print("NO")
elif max(road) == 3:
print("NO")
else:
print("YES")
| [
"[email protected]"
] | |
05b39c43cf82306a1e921f4a016fbda0bddbc0d2 | 24e843a90a3b3a37cc4d76a207f41d1fc628c2e7 | /python3/solved/P674. Longest Continuous Increasing Subsequence.py | e9b047b175727dd2037cea06f93d22535d18a00e | [] | no_license | erikliu0801/leetcode | c595ea786716f7df86bd352c1e8d691f1870ec70 | 1de7bfe192324f9de28afa06b9539331c87d1346 | refs/heads/master | 2023-08-07T14:47:19.074076 | 2021-09-05T09:46:35 | 2021-09-05T09:46:35 | 224,321,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,720 | py | # ToDo:
"""
674. Longest Continuous Increasing Subsequence
Easy
Given an unsorted array of integers, find the length of longest continuous increasing subsequence (subarray).
Note: Length of the array will not exceed 10,000.
"""
# Conditions & Concepts
"""
"""
# Code
## submit part
class Solution:
def findLengthOfLCIS(self, nums: List[int]) -> int:
## test part
def findLengthOfLCIS(nums):
"""
nums: List[int]
rtype: int
"""
## code here
#1
"""
Runtime Error
Last executed input: [1,3,5,7]
Runtime Error
Last executed input: [1]
Success
Runtime: 72 ms, faster than 93.67% of Python3 online submissions for Longest Continuous Increasing Subsequence.
Memory Usage: 13.9 MB, less than 95.65% of Python3 online submissions for Longest Continuous Increasing Subsequence.
"""
def findLengthOfLCIS(nums):
if len(nums) < 2:
return len(nums)
count = 1
nums_count = []
for i in range(1,len(nums)):
if nums[i] > nums[i-1]:
count += 1
if i == len(nums) -1:
nums_count.append(count)
else:
nums_count.append(count)
count = 1
return max(nums_count)
# Test
## Functional Test
"""
# Conditions & Concepts
"""
if __name__ == '__main__':
input_nums = [[1,3,5,4,7], [2,2,2,2,2], [1,3,5,7], [1]]
expected_output = [3, 1, 4, 1]
for i in range(len(input_nums)):
if findLengthOfLCIS(input_nums[i]) != expected_output[i]:
print("Wrong!!!", ' Output:', findLengthOfLCIS(input_nums[i]), '; Expected Output:', expected_output[i])
else:
print("Right")
# print(findLengthOfLCIS(input_nums[-1]))
## Performance Test
import cProfile
cProfile.run('')
## Unit Test
import unittest
class Test(unittest.TestCase):
def test(self):
pass
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
] | |
e24fedc5e7ee860538e5849e1ecaa5874d393e8f | 88e3ae7a21301c56f25fb7561d354cb169139932 | /likebee/core/migrations/0002_auto_20190113_1657.py | af66823781f4b2dd875b0cc34fbdeefbeaeeada6 | [
"MIT"
] | permissive | ow7/likebee | 9cf54a3c45d031cc92e9a9cdebe11e7db11acf6a | 0a0dd6368ef43b53fb8315eb5eb14663067ef07c | refs/heads/master | 2020-04-16T11:13:24.643022 | 2019-04-23T07:34:20 | 2019-04-23T07:34:20 | 165,528,187 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | # Generated by Django 2.1.5 on 2019-01-13 18:57
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='sprint',
name='finish_on',
field=models.DateTimeField(blank=True, null=True, verbose_name='Termina em'),
),
migrations.AddField(
model_name='sprint',
name='start_on',
field=models.DateTimeField(blank=True, null=True, verbose_name='Inicia em'),
),
migrations.AlterField(
model_name='sprint',
name='name',
field=models.CharField(default='Sprint', max_length=200, verbose_name='Nome'),
),
]
| [
"[email protected]"
] | |
06ddd7dcca2fe0eb55e52f4f2f0c3f73f995e301 | 0a0cc1d675d57704e6fd15e60f7ef752fe296f6d | /code/data_utils.py | 9a62de4d851bd06e2846755e5a801a3137677d43 | [] | no_license | RichardcLee/SRGAN-LYH | 83660ff4f92fe6ae61a030cfd7ba71d0b080d2fd | 726e5b172fa3790976ff17efb5401f52efd23816 | refs/heads/master | 2021-07-08T17:26:33.109918 | 2021-04-20T07:30:12 | 2021-04-20T07:30:12 | 239,928,994 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,598 | py | from os import listdir
from os.path import join
from PIL import Image
from torch.utils.data.dataset import Dataset
from torchvision.transforms import Compose, RandomCrop, ToTensor, ToPILImage, CenterCrop, Resize
def is_image_file(filename):
return any(filename.endswith(extension) for extension in ['.png', '.jpg', '.jpeg', '.PNG', '.JPG', '.JPEG'])
def calculate_valid_crop_size(crop_size, upscale_factor):
return crop_size - (crop_size % upscale_factor)
def train_hr_transform(crop_size):
return Compose([
RandomCrop(crop_size),
ToTensor(),
])
def train_lr_transform(crop_size, upscale_factor):
return Compose([
ToPILImage(),
Resize(crop_size // upscale_factor, interpolation=Image.BICUBIC),
ToTensor()
])
def display_transform():
return Compose([
ToPILImage(),
Resize(400),
CenterCrop(400),
ToTensor()
])
class TrainDatasetFromFolder(Dataset):
def __init__(self, dataset_dir, crop_size, upscale_factor):
super(TrainDatasetFromFolder, self).__init__()
self.image_filenames = [join(dataset_dir, x) for x in listdir(dataset_dir) if is_image_file(x)]
crop_size = calculate_valid_crop_size(crop_size, upscale_factor)
self.hr_transform = train_hr_transform(crop_size)
self.lr_transform = train_lr_transform(crop_size, upscale_factor)
def __getitem__(self, index):
hr_image = self.hr_transform(Image.open(self.image_filenames[index]))
lr_image = self.lr_transform(hr_image)
return lr_image, hr_image
def __len__(self):
return len(self.image_filenames)
class ValDatasetFromFolder(Dataset):
def __init__(self, dataset_dir, upscale_factor):
super(ValDatasetFromFolder, self).__init__()
self.upscale_factor = upscale_factor
self.image_filenames = [join(dataset_dir, x) for x in listdir(dataset_dir) if is_image_file(x)]
def __getitem__(self, index):
hr_image = Image.open(self.image_filenames[index])
w, h = hr_image.size
crop_size = calculate_valid_crop_size(min(w, h), self.upscale_factor)
lr_scale = Resize(crop_size // self.upscale_factor, interpolation=Image.BICUBIC)
hr_scale = Resize(crop_size, interpolation=Image.BICUBIC)
hr_image = CenterCrop(crop_size)(hr_image)
lr_image = lr_scale(hr_image)
hr_restore_img = hr_scale(lr_image)
return ToTensor()(lr_image), ToTensor()(hr_restore_img), ToTensor()(hr_image)
def __len__(self):
return len(self.image_filenames)
class TestDatasetFromFolder(Dataset):
def __init__(self, lr_path, hr_path, upscale_factor):
super(TestDatasetFromFolder, self).__init__()
self.lr_path = lr_path
self.hr_path = hr_path
self.upscale_factor = upscale_factor
self.lr_filenames = [join(self.lr_path, x) for x in listdir(self.lr_path) if is_image_file(x)]
self.hr_filenames = [join(self.hr_path, x) for x in listdir(self.hr_path) if is_image_file(x)]
def __getitem__(self, index):
image_name = self.lr_filenames[index].split('/')[-1]
lr_image = Image.open(self.lr_filenames[index])
w, h = lr_image.size
hr_image = Image.open(self.hr_filenames[index])
hr_scale = Resize((self.upscale_factor * h, self.upscale_factor * w), interpolation=Image.BICUBIC)
hr_restore_img = hr_scale(lr_image)
return image_name, ToTensor()(lr_image), ToTensor()(hr_restore_img), ToTensor()(hr_image)
def __len__(self):
return len(self.lr_filenames)
| [
"[email protected]"
] | |
ccbd80416743a01517fc733d6407b812ed9d7c4c | 93713f46f16f1e29b725f263da164fed24ebf8a8 | /Library/lib/python3.7/site-packages/astropy-4.0-py3.7-macosx-10.9-x86_64.egg/astropy/units/astrophys.py | 9a4a5f2f26913ab6e4eb9c7e050137ab68f603e3 | [
"BSD-3-Clause"
] | permissive | holzschu/Carnets | b83d15136d25db640cea023abb5c280b26a9620e | 1ad7ec05fb1e3676ac879585296c513c3ee50ef9 | refs/heads/master | 2023-02-20T12:05:14.980685 | 2023-02-13T15:59:23 | 2023-02-13T15:59:23 | 167,671,526 | 541 | 36 | BSD-3-Clause | 2022-11-29T03:08:22 | 2019-01-26T09:26:46 | Python | UTF-8 | Python | false | false | 7,282 | py | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package defines the astrophysics-specific units. They are also
available in the `astropy.units` namespace.
"""
from . import si
from astropy.constants import si as _si
from .core import (UnitBase, def_unit, si_prefixes, binary_prefixes,
set_enabled_units)
# To ensure si units of the constants can be interpreted.
set_enabled_units([si])
import numpy as _numpy
_ns = globals()
###########################################################################
# LENGTH
def_unit((['AU', 'au'], ['astronomical_unit']), _si.au, namespace=_ns, prefixes=True,
doc="astronomical unit: approximately the mean Earth--Sun "
"distance.")
def_unit(['pc', 'parsec'], _si.pc, namespace=_ns, prefixes=True,
doc="parsec: approximately 3.26 light-years.")
def_unit(['solRad', 'R_sun', 'Rsun'], _si.R_sun, namespace=_ns,
doc="Solar radius", prefixes=False,
format={'latex': r'R_{\odot}', 'unicode': 'R⊙'})
def_unit(['jupiterRad', 'R_jup', 'Rjup', 'R_jupiter', 'Rjupiter'],
_si.R_jup, namespace=_ns, prefixes=False, doc="Jupiter radius",
# LaTeX jupiter symbol requires wasysym
format={'latex': r'R_{\rm J}', 'unicode': 'R♃'})
def_unit(['earthRad', 'R_earth', 'Rearth'], _si.R_earth, namespace=_ns,
prefixes=False, doc="Earth radius",
# LaTeX earth symbol requires wasysym
format={'latex': r'R_{\oplus}', 'unicode': 'R⊕'})
def_unit(['lyr', 'lightyear'], (_si.c * si.yr).to(si.m),
namespace=_ns, prefixes=True, doc="Light year")
###########################################################################
# AREAS
def_unit(['barn', 'barn'], 10 ** -28 * si.m ** 2, namespace=_ns, prefixes=True,
doc="barn: unit of area used in HEP")
###########################################################################
# ANGULAR MEASUREMENTS
def_unit(['cycle', 'cy'], 2.0 * _numpy.pi * si.rad,
namespace=_ns, prefixes=False,
doc="cycle: angular measurement, a full turn or rotation")
###########################################################################
# MASS
def_unit(['solMass', 'M_sun', 'Msun'], _si.M_sun, namespace=_ns,
prefixes=False, doc="Solar mass",
format={'latex': r'M_{\odot}', 'unicode': 'M⊙'})
def_unit(['jupiterMass', 'M_jup', 'Mjup', 'M_jupiter', 'Mjupiter'],
_si.M_jup, namespace=_ns, prefixes=False, doc="Jupiter mass",
# LaTeX jupiter symbol requires wasysym
format={'latex': r'M_{\rm J}', 'unicode': 'M♃'})
def_unit(['earthMass', 'M_earth', 'Mearth'], _si.M_earth, namespace=_ns,
prefixes=False, doc="Earth mass",
# LaTeX earth symbol requires wasysym
format={'latex': r'M_{\oplus}', 'unicode': 'M⊕'})
def_unit(['M_p'], _si.m_p, namespace=_ns, doc="Proton mass",
format={'latex': r'M_{p}', 'unicode': 'Mₚ'})
def_unit(['M_e'], _si.m_e, namespace=_ns, doc="Electron mass",
format={'latex': r'M_{e}', 'unicode': 'Mₑ'})
# Unified atomic mass unit
def_unit(['u', 'Da', 'Dalton'], _si.u, namespace=_ns,
prefixes=True, exclude_prefixes=['a', 'da'],
doc="Unified atomic mass unit")
##########################################################################
# ENERGY
# Here, explicitly convert the planck constant to 'eV s' since the constant
# can override that to give a more precise value that takes into account
# covariances between e and h. Eventually, this may also be replaced with
# just `_si.Ryd.to(eV)`.
def_unit(['Ry', 'rydberg'],
(_si.Ryd * _si.c * _si.h.to(si.eV * si.s)).to(si.eV),
namespace=_ns, prefixes=True,
doc="Rydberg: Energy of a photon whose wavenumber is the Rydberg "
"constant",
format={'latex': r'R_{\infty}', 'unicode': 'R∞'})
###########################################################################
# ILLUMINATION
def_unit(['solLum', 'L_sun', 'Lsun'], _si.L_sun, namespace=_ns,
prefixes=False, doc="Solar luminance",
format={'latex': r'L_{\odot}', 'unicode': 'L⊙'})
###########################################################################
# SPECTRAL DENSITY
def_unit((['ph', 'photon'], ['photon']),
format={'ogip': 'photon', 'vounit': 'photon'},
namespace=_ns, prefixes=True)
def_unit(['Jy', 'Jansky', 'jansky'], 1e-26 * si.W / si.m ** 2 / si.Hz,
namespace=_ns, prefixes=True,
doc="Jansky: spectral flux density")
def_unit(['R', 'Rayleigh', 'rayleigh'],
(1e10 / (4 * _numpy.pi)) *
ph * si.m ** -2 * si.s ** -1 * si.sr ** -1,
namespace=_ns, prefixes=True,
doc="Rayleigh: photon flux")
###########################################################################
# MISCELLANEOUS
# Some of these are very FITS-specific and perhaps considered a mistake.
# Maybe they should be moved into the FITS format class?
# TODO: This is defined by the FITS standard as "relative to the sun".
# Is that mass, volume, what?
def_unit(['Sun'], namespace=_ns)
###########################################################################
# EVENTS
def_unit((['ct', 'count'], ['count']),
format={'fits': 'count', 'ogip': 'count', 'vounit': 'count'},
namespace=_ns, prefixes=True, exclude_prefixes=['p'])
def_unit((['pix', 'pixel'], ['pixel']),
format={'ogip': 'pixel', 'vounit': 'pixel'},
namespace=_ns, prefixes=True)
###########################################################################
# MISCELLANEOUS
def_unit(['chan'], namespace=_ns, prefixes=True)
def_unit(['bin'], namespace=_ns, prefixes=True)
def_unit((['vox', 'voxel'], ['voxel']),
format={'fits': 'voxel', 'ogip': 'voxel', 'vounit': 'voxel'},
namespace=_ns, prefixes=True)
def_unit((['bit', 'b'], ['bit']), namespace=_ns,
prefixes=si_prefixes + binary_prefixes)
def_unit((['byte', 'B'], ['byte']), 8 * bit, namespace=_ns,
format={'vounit': 'byte'},
prefixes=si_prefixes + binary_prefixes,
exclude_prefixes=['d'])
def_unit(['adu'], namespace=_ns, prefixes=True)
def_unit(['beam'], namespace=_ns, prefixes=True)
def_unit(['electron'], doc="Number of electrons", namespace=_ns,
format={'latex': r'e^{-}', 'unicode': 'e⁻'})
# This is not formally a unit, but is used in that way in many contexts, and
# an appropriate equivalency is only possible if it's treated as a unit (see
# https://arxiv.org/pdf/1308.4150.pdf for more)
# Also note that h or h100 or h_100 would be a better name, but they either
# conflict or have numbers in them, which is apparently disallowed
def_unit(['littleh'], namespace=_ns, prefixes=False,
doc="Reduced/\"dimensionless\" Hubble constant",
format={'latex': r'h_{100}'})
###########################################################################
# CLEANUP
del UnitBase
del def_unit
del si
###########################################################################
# DOCSTRING
# This generates a docstring for this module that describes all of the
# standard units defined here.
from .utils import generate_unit_summary as _generate_unit_summary
if __doc__ is not None:
__doc__ += _generate_unit_summary(globals())
| [
"[email protected]"
] | |
238fb1753a99b26170c07e8c5f7b6c37dcdaec9d | a6ea3ddd3592f83b6d4b38d9e803d8ad387975d1 | /districts/ak/districts.py | 76eb48ad290e7b4b7ea15712a6cabbc1abd47c0b | [] | no_license | CTCL/bip-data | 80a9cbde0057e551fe8b85091f2be891b669a803 | 39cf25545937db19daf17aeb3f0b86fb202fe74c | refs/heads/new_nat | 2020-12-25T10:36:22.906223 | 2013-02-06T03:00:50 | 2013-02-06T03:00:50 | 50,055,835 | 1 | 0 | null | 2016-01-20T19:48:51 | 2016-01-20T19:48:51 | null | UTF-8 | Python | false | false | 1,029 | py | state_senate_district = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T']
school_district = []
state = ['AK']
judicial_district = []
congressional_district = ['001']
county_id = ['ALEUTIANS EAST BOROUGH', 'BRISTOL BAY BOROUGH', 'CITY & BOROUGH OF JUNEAU', 'CITY & BOROUGH OF SITKA', 'CITY & BOROUGH OF YAKUTAT', 'CITY AND BOROUGH OF WRANGELL', 'DENALI BOROUGH', 'FAIRBANKS NORTH STAR BOROUGH', 'HAINES BOROUGH', 'KENAI PENINSULA BOROUGH', 'KETCHIKAN GATEWAY BOROUGH', 'KODIAK ISLAND BOROUGH', 'LAKE & PENINSULA BOROUGH', 'MATANUSKA-SUSITNA BOROUGH', 'MUNICIPALITY OF ANCHORAGE', 'NORTH SLOPE BOROUGH', 'NORTHWEST ARCTIC BOROUGH', 'UNORGANIZED']
county_council = []
state_representative_district = ['001', '002', '003', '004', '005', '006', '007', '008', '009', '010', '011', '012', '013', '014', '015', '016', '017', '018', '019', '020', '021', '022', '023', '024', '025', '026', '027', '028', '029', '030', '031', '032', '033', '034', '035', '036', '037', '038', '039', '040']
| [
"gaertner@gaertner-lap.(none)"
] | gaertner@gaertner-lap.(none) |
e61020e2fbd20892008b21a3e1c7a11a32a01765 | e714dfd95cb74f4e357af8d085e4dcaf8b7ecdf3 | /0x0A-python-inheritance/2-is_same_class.py | aa4b57fcbf256d54c6bd85d4a1fca0b859008f73 | [] | no_license | MCavigli/holbertonschool-higher_level_programming_classic | 2cea769dc1fd39e90f6ef74cdb3191e2472b0282 | 870548f964a3deac4a41918e9c3d0bad6cd732b4 | refs/heads/master | 2022-03-06T09:33:56.839118 | 2019-09-27T06:04:34 | 2019-09-27T06:04:34 | 184,122,977 | 3 | 4 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | #!/usr/bin/python3
"""This module holds a function that checks if an object is an instance
or a specified class
"""
def is_same_class(obj, a_class):
"""Returns True if the object is exactly an instance of the specified
class; otherwise False
Args:
obj: the object to check
a_class: The class to check against
"""
return True if type(obj) is a_class else False
| [
"[email protected]"
] | |
19313e15aa82c9fa0f64105caffdbd82c478fe72 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startQiskit_QC239.py | 5f958a1bc26ad324bb6b43561bf565c7887f736f | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,430 | py | # qubit number=3
# total number=45
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.rx(-0.09738937226128368,input_qubit[2]) # number=2
prog.h(input_qubit[1]) # number=33
prog.cz(input_qubit[2],input_qubit[1]) # number=34
prog.h(input_qubit[1]) # number=35
prog.h(input_qubit[1]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_QC239.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_belem")
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
7b1d972987a309a0f4c5c451712b01728c5e99c4 | a9fbbfc990ad79f412d8078d27b8937e5ef00bde | /inheritance/exercise/problem_02/reptile.py | b485963519f9b88bc3f05e703e0f4304972141f5 | [
"MIT"
] | permissive | BoyanPeychinov/object_oriented_programming | e2d23ec0ff681ca2c6cf1805e581af3d601aafee | a960721c7c17710bd7b151a9025647e953435962 | refs/heads/main | 2023-03-31T16:19:20.239216 | 2021-03-30T19:43:42 | 2021-03-30T19:43:42 | 342,281,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | from problem_02.animal import Animal
class Reptile(Animal):
# def __init__(self, name):
# super().__init__(name)
pass | [
"[email protected]"
] | |
0e34820d502392826161fb5b3bcd150e9b63c521 | 80831d77ef6fc3b485be80501b73ccb30ce5e444 | /networkapi/plugins/F5/lb.py | c9ab8665c63964bffc6932c9cf03eb11aba63fb8 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
] | permissive | globocom/GloboNetworkAPI | e2fdf5a9e6070359e90801bf3e45c2d499f199c5 | eb27e1d977a1c4bb1fee8fb51b8d8050c64696d9 | refs/heads/master | 2023-06-25T21:34:04.923940 | 2023-05-29T12:07:20 | 2023-05-29T12:07:20 | 22,734,387 | 86 | 74 | Apache-2.0 | 2023-05-29T12:07:21 | 2014-08-07T19:47:43 | Python | UTF-8 | Python | false | false | 2,601 | py | # -*- coding: utf-8 -*-
import logging
from time import sleep
import bigsuds
from networkapi.plugins import exceptions as base_exceptions
from networkapi.system.facade import get_value as get_variable
log = logging.getLogger(__name__)
class Lb(object):
def __init__(self, hostname, username, password, session=True):
self._hostname = hostname
self._username = username
self._password = password
self._time_reconn = 10
try:
self._channel = bigsuds.BIGIP(
hostname=self._hostname,
username=self._username,
password=self._password
)
except Exception, e:
logging.critical('Unable to connect to BIG-IP. Details: %s' % (e))
raise base_exceptions.CommandErrorException(e)
else:
log.info('Connected in hostname:%s' % hostname)
try:
self._version = self._channel.System.SystemInfo.get_version()
if self._version[8:len(self._version)].split('.')[0] <= 10:
raise base_exceptions.UnsupportedVersion(
'This plugin only supports BIG-IP v11 or above')
else:
if session:
log.info('Try get new session')
session_cur = self._channel.System.Session.get_session_timeout()
log.info('Session Timeout Current: %s' % session_cur)
session_timeout = get_variable(
'set_session_timeout_plugin_f5', '60')
if int(session_cur) > int(session_timeout):
self._channel.System.Session.set_session_timeout(
session_timeout)
self._channel = self.get_session()
except Exception, e:
log.error(e)
raise base_exceptions.CommandErrorException(e)
def get_session(self):
try:
channel = self._channel.with_session_id()
log.info('Session %s', channel)
except Exception, e:
if 'There are too many existing user sessions.'.lower() in str(e).lower():
self._time_reconn *= 2
log.warning(
'There are too many existing user sessions. '
'Trying again in %s seconds' % self._time_reconn)
sleep(self._time_reconn)
self.get_session()
else:
raise e
else:
return channel
| [
"[email protected]"
] | |
8f09cbf72c7a7c1685667c667533b247017eefc2 | 9a4de72aab094c87cfee62380e7f2613545eecfb | /git_python/views_api.py | 594be88299c49b034c5904e620c87bf81a9cb19c | [] | no_license | jamesduan/asset | ed75765c30a5288aaf4f6c56bbf2c9a059105f29 | f71cb623b5ba376309cb728ad5c291ced2ee8bfc | refs/heads/master | 2021-01-10T00:06:41.120678 | 2017-05-27T11:40:48 | 2017-05-27T11:40:48 | 92,730,581 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,342 | py | # -*- coding: utf-8 -*-
from rest_framework.decorators import api_view, permission_classes
from rest_framework.response import Response
from rest_framework.permissions import AllowAny
from rest_framework import status, generics, filters, viewsets
from assetv2.settingsapi import *
from git import Repo
from git.exc import *
from git_python.utils.Utils import *
from git_python.serializers import *
from git_python.permissions import BootShPermission
from git_python.filters import *
from util.httplib import httpcall2
from urlparse import urlunparse
import json
import re
import redis
import shutil
@api_view(['GET', 'POST', 'PUT', 'DELETE'])
@permission_classes((BootShPermission,))
def crud(request):
working_tree_dir = request.REQUEST.get('working_tree_dir', request.data.get('working_tree_dir'))
path = request.REQUEST.get('path', request.data.get('path'))
content = request.REQUEST.get('content', request.data.get('content'))
file_name = os.path.join(working_tree_dir, path)
if request.method == 'GET':
if os.path.isfile(file_name):
with open(file_name) as f:
content = f.read()
return Response(status=status.HTTP_200_OK, data={'msg': content})
elif request.method == 'POST':
if not os.path.isfile(file_name):
os.makedirs(os.path.dirname(file_name))
else:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '文件已经存在,不允许创建'})
with open(file_name, 'w') as f:
try:
f.write(content)
except IOError, e:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': e.args})
repo = Repo(working_tree_dir)
repo.git.add(path)
return Response(status=status.HTTP_201_CREATED, data={'msg': '创建文件成功'})
elif request.method == 'PUT':
if not os.path.isfile(file_name):
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '文件不存在,无法更新'})
with open(file_name, 'w') as f:
try:
f.write(content)
except IOError, e:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': e.args})
repo = Repo(working_tree_dir)
repo.git.add(path)
return Response(status=status.HTTP_200_OK, data={'msg': '更新文件成功'})
elif request.method == 'DELETE':
if not os.path.isfile(file_name):
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '文件不存在,无法删除'})
repo = Repo(working_tree_dir)
repo.git.rm('-f', path)
# shutil.rmtree(os.path.dirname(path), True)
return Response(status=status.HTTP_200_OK, data={'msg': '删除文件成功'})
@api_view(['GET'])
@permission_classes((AllowAny,))
def boot_sh_tree(request):
working_tree_dir = request.GET['working_tree_dir']
letter_list = request.GET['letter_list']
default_args = ('--cached', '--abbrev=40', '--full-index', '--raw')
# default_args = ('--abbrev=40', '--full-index', '--raw')
repo = Repo(working_tree_dir)
dirty_list = []
dirty_dict = dict()
for entry in repo.git.diff(*default_args).splitlines():
entry_list = entry.split()
letter, path = entry_list[4:6]
if letter not in json.loads(letter_list):
continue
if path == 'boot.sh':
dirty_list.append({
'title': path,
'path': path
})
continue
try:
site_name, app_name, file_name = path.split('/')
except ValueError:
continue
if file_name != 'boot.sh':
continue
dirty_dict[site_name] = dirty_dict.get(site_name, dict())
dirty_dict[site_name][app_name] = dirty_dict[site_name].get(app_name, [])
dirty_dict[site_name][app_name].append({'path': path, 'title': file_name})
for site_name in dirty_dict:
site_dict = {'title': site_name, 'folder': True, 'expanded': True, 'children': []}
for app_name in dirty_dict[site_name]:
app_dict = {'title': app_name, 'folder': True, 'expanded': True, 'children': []}
for file_dict in dirty_dict[site_name][app_name]:
app_dict['children'].append({
'title': file_dict['title'],
'path': file_dict['path']
})
site_dict['children'].append(app_dict)
dirty_list.append(site_dict)
return Response(status=status.HTTP_200_OK, data=dirty_list)
@api_view(['POST'])
@permission_classes((BootShPermission,))
def commit(request):
working_tree_dir = request.POST['working_tree_dir']
msg = request.POST['msg']
repo = Repo(working_tree_dir)
if not repo.is_dirty(index=True, working_tree=False, untracked_files=False):
return Response(status=status.HTTP_200_OK, data={'msg': '没有需要提交的内容'})
if repo.index.unmerged_blobs():
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '存在冲突的文件,禁止提交'})
repo.git.stash('save')
# repo.remotes.origin.pull()
repo.git.pull()
try:
repo.git.stash('pop')
except GitCommandError:
repo.git.stash('clear')
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '生成冲突的文件,禁止提交'})
for blob in changes_to_be_committed(repo):
if os.path.isfile(os.path.join(working_tree_dir, blob)):
repo.git.add(blob)
else:
repo.git.rm('--cached', blob)
# dirty_dict = changes_to_be_committed(repo)
# for letter in dirty_dict:
# if letter in ['A', 'M']:
# for path in dirty_dict[letter]:
# repo.git.add(path)
# elif letter in ['D']:
# for path in dirty_dict[letter]:
# repo.git.rm('--cached', path)
repo.index.commit(msg)
# repo.remotes.origin.push()
try:
repo.git.push()
except GitCommandError:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '提交失败'})
return Response(status=status.HTTP_201_CREATED, data={'msg': '提交成功'})
# @api_view(['POST'])
# @permission_classes((AllowAny, ))
# def log(request):
# working_tree_dir = request.POST['working_tree_dir']
# number = request.POST['number']
# blob = request.POST.get('blob')
# repo = Repo(working_tree_dir)
# origin_log_list = repo.heads.master.log()[-1 * int(number):]
# origin_log_list.reverse()
# formatted_log_list = [
# {
# 'hash': entry.newhexsha,
# 'author': entry.actor.name,
# 'message': entry.message,
# 'date_time': stamp2str(entry.time[0])
# } for entry in origin_log_list]
# return Response(status=status.HTTP_200_OK, data=formatted_log_list)
@api_view(['GET'])
@permission_classes((AllowAny,))
def log(request):
working_tree_dir = request.GET['working_tree_dir']
path = request.GET.get('path')
page = int(request.GET.get('page'))
page_size = int(request.GET.get('page_size'))
repo = Repo(working_tree_dir)
default_args = ['--date=iso', '--pretty=format:%H %an %ad %s']
if path:
default_args.append(path)
origin_log_list = repo.git.log(*default_args).splitlines()
formatted_log_list = []
for origin_log in origin_log_list[(page - 1) * page_size: (page - 1) * page_size + page_size]:
try:
hash, author, date, time, timezone, message = re.split('\s', origin_log, 5)
except ValueError:
return Response(status=status.HTTP_400_BAD_REQUEST, data=origin_log)
formatted_log_list.append({
'path': path,
'hash': hash,
'author': author,
'message': message,
'date': '{0} {1}'.format(date, time)
})
return Response(status=status.HTTP_200_OK, data={'count': len(origin_log_list), 'results': formatted_log_list})
@api_view(['GET'])
@permission_classes((AllowAny,))
def unmerged_blobs(request):
working_tree_dir = request.REQUEST.get('working_tree_dir')
path = request.REQUEST.get('path')
repo = Repo(working_tree_dir)
unmerged_blobs_list = repo.index.unmerged_blobs().get(path)
unmerged_blobs_dict = dict()
for stage, blob in unmerged_blobs_list:
unmerged_blobs_dict[stage] = blob.data_stream.read()
return Response(status=status.HTTP_200_OK, data=unmerged_blobs_dict)
@api_view(['PUT'])
@permission_classes((BootShPermission,))
def checkout(request):
stage_dict = {'2': '--ours', '3': '--theirs'}
working_tree_dir = request.data.get('working_tree_dir')
path = request.data.get('path')
stage = request.data.get('stage')
repo = Repo(working_tree_dir)
try:
repo.git.checkout(stage_dict[stage], path)
repo.git.add(path)
return Response(status=status.HTTP_200_OK, data={'msg': '解决冲突成功'})
except GitCommandError:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '解决冲突失败'})
@api_view(['PUT'])
@permission_classes((BootShPermission,))
def reset(request):
working_tree_dir = request.data.get('working_tree_dir')
path = request.data.get('path')
repo = Repo(working_tree_dir)
try:
repo.git.reset('HEAD', path)
except GitCommandError:
pass
try:
repo.git.checkout('--', path)
return Response(status=status.HTTP_200_OK, data={'msg': '还原成功'})
except GitCommandError:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '还原失败'})
@api_view(['GET'])
@permission_classes((AllowAny,))
def diff(request):
working_tree_dir = request.GET.get('working_tree_dir')
path = request.GET.get('path')
revision = request.GET.get('revision')
repo = Repo(working_tree_dir)
commit = repo.commit(revision)
git_blob = git_blob_from_commit(commit, path)
parents = commit.parents
if len(parents) == 0:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '该文件没有历史修改记录,不作比较'})
elif len(parents) > 1:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '该文件通过合并而成,不作比较'})
else:
last_git_blob = git_blob_from_commit(parents[0], path)
return Response(status=status.HTTP_200_OK, data={
'commit': git_blob.data_stream.read() if git_blob else None,
'last_commit': last_git_blob.data_stream.read() if last_git_blob else None
})
@api_view(['GET'])
@permission_classes((AllowAny,))
def diff_cached(request):
working_tree_dir = request.GET.get('working_tree_dir')
path = request.GET.get('path')
repo = Repo(working_tree_dir)
index_git_blob = git_blob_from_index(repo.index, path)
commit_git_blob = git_blob_from_commit(repo.commit('HEAD'), path)
return Response(status=status.HTTP_200_OK, data={
'index': index_git_blob.data_stream.read() if index_git_blob else None,
'commit': commit_git_blob.data_stream.read() if commit_git_blob else None
})
@api_view(['GET'])
@permission_classes((AllowAny,))
def boot_sh_puppet(request):
puppet_dict = {0: 'boot.sh'}
for git_boot_sh_app in GitBootShApp.objects.all():
app = git_boot_sh_app.app
puppet_dict[app.id] = os.path.join(app.site.name, app.name, 'boot.sh')
return Response(status=status.HTTP_200_OK, data=puppet_dict)
@api_view(['POST'])
@permission_classes((AllowAny,))
def boot_sh_web_hook_puppet(request):
web_hook_dict = request.data
for puppet_master in GIT['BOOT_SH']['PUPPET_MASTER_LIST']:
code, response = httpcall2('http://%s%s' % (puppet_master, GIT['BOOT_SH']['PUPPET_URL']), method='POST')
web_hook_dict['puppet'] = web_hook_dict.get('puppet', dict())
web_hook_dict['puppet'][puppet_master] = json.loads(response) if code is not None and code < 400 else dict()
web_hook_dict['puppet'][puppet_master]['code'] = code
r = redis.Redis(host=REDIS["HOST"], port=REDIS["PORT"], db=3)
r.set(web_hook_dict['after'], json.dumps(web_hook_dict))
return Response(status=status.HTTP_200_OK, data=True)
@api_view(['GET'])
@permission_classes((AllowAny,))
def boot_sh_puppet_result(request):
hash = request.GET.get('hash')
r = redis.Redis(host=REDIS["HOST"], port=REDIS["PORT"], db=3)
data = r.get(hash)
if data:
return Response(status=status.HTTP_200_OK, data=json.loads(data))
else:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '未查到相关信息'})
class GitBootShAppList(generics.ListCreateAPIView):
"""
发布申请单状态.
输入参数:
* depid - 发布单号
* status - 状态值(状态值为0、1、2、3可废弃,4、5、6状态无法废弃)
输出参数:
* status - 当前状态值
"""
queryset = GitBootShApp.objects.all()
serializer_class = GitBootShAppSerializer
permission_classes = (AllowAny, )
class GitBootShAppDetail(generics.RetrieveDestroyAPIView):
"""
发布申请单状态.
输入参数:
* depid - 发布单号
* status - 状态值(状态值为0、1、2、3可废弃,4、5、6状态无法废弃)
输出参数:
* status - 当前状态值
"""
queryset = GitBootShApp.objects.all()
serializer_class = GitBootShAppSerializer
permission_classes = (BootShPermission,)
lookup_field = 'app_id'
class GitAppViewSet(viewsets.ModelViewSet):
permission_classes = (BootShPermission,)
queryset = GitApp.objects.all()
serializer_class = GitAppSerializer
filter_backends = (filters.SearchFilter, filters.DjangoFilterBackend)
filter_class = GitAppFilter
search_fields = ('app__name',)
def perform_create(self, serializer):
instance = serializer.save(created_by=self.request.user)
content = self.request.DATA.get('content')
if content is None:
return
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], self.request.user.username)
file_name = (instance.room.name + '_' if instance.room else '') + instance.type.name
git_path = '/'.join([instance.app.site.name, instance.app.name, file_name]) if instance.app else file_name
file_path = os.path.join(working_tree_dir, git_path)
try:
if not os.path.isfile(file_path):
file_dir = os.path.dirname(file_path)
if not os.path.isdir(file_dir):
os.makedirs(file_dir)
else:
raise Exception(u'文件已经存在,不允许创建')
with open(file_path, 'w') as f:
f.write(content)
repo = Repo(working_tree_dir)
repo.git.add(git_path)
except Exception, e:
instance.delete()
raise Exception(e.args[0])
def perform_destroy(self, instance):
instance.delete()
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], self.request.user.username)
file_name = (instance.room.name + '_' if instance.room else '') + instance.type.name
git_path = '/'.join([instance.app.site.name, instance.app.name, file_name]) if instance.app else file_name
file_path = os.path.join(working_tree_dir, git_path)
if not os.path.isfile(file_path):
raise Exception(u'文件不存在,无法删除')
repo = Repo(working_tree_dir)
repo.git.rm('-f', git_path)
@api_view(['GET', 'PUT'])
@permission_classes((BootShPermission,))
def git_app_file(request, pk):
git_app_obj = GitApp.objects.get(id=pk)
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
file_name = (git_app_obj.room.name + '_' if git_app_obj.room else '') + git_app_obj.type.name
git_path = '/'.join([git_app_obj.app.site.name, git_app_obj.app.name, file_name]) if git_app_obj.app else file_name
content = request.DATA.get('content')
file_path = os.path.join(working_tree_dir, git_path)
if request.method == 'GET':
if os.path.isfile(file_path):
with open(file_path) as f:
content = f.read()
return Response(status=status.HTTP_200_OK, data={'detail': content})
elif request.method == 'PUT':
if not os.path.isfile(file_path):
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '文件不存在,无法更新'})
with open(file_path, 'w') as f:
try:
f.write(content)
except IOError, e:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': e.args[0]})
repo = Repo(working_tree_dir)
repo.git.add(git_path)
return Response(status=status.HTTP_200_OK, data={'detail': '更新文件成功'})
@api_view(['GET'])
@permission_classes((AllowAny,))
def git_app_tree(request):
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
letter = request.GET.get('letter')
default_args = ('--cached', '--abbrev=40', '--full-index', '--raw')
repo = Repo(working_tree_dir)
# 将所有文件转换为一个字典
dirty_dict = dict()
for entry in repo.git.diff(*default_args).splitlines():
entry_list = entry.split()
real_letter, path = entry_list[4:6]
if real_letter != letter:
continue
my_dirty_dict = dirty_dict
while True:
path_list = path.split('/', 1)
if len(path_list) > 1:
p1, p2 = path_list
my_dirty_dict[p1] = my_dirty_dict.get(p1, dict())
path = p2
my_dirty_dict = my_dirty_dict[p1]
else:
my_dirty_dict[path_list[0]] = None
break
# 将字典转换为tree控件识别的格式
dirty_list = [get_recursive_node_dict(title, dirty_dict[title]) for title in dirty_dict]
return Response(status=status.HTTP_200_OK, data=dirty_list)
@api_view(['POST'])
@permission_classes((BootShPermission,))
def commit_v2(request):
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
msg = request.POST['msg']
repo = Repo(working_tree_dir)
if not repo.is_dirty(index=True, working_tree=True, untracked_files=False):
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '没有需要提交的内容'})
if repo.index.unmerged_blobs():
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '存在冲突的文件,禁止提交'})
repo.git.stash('save')
repo.git.pull()
try:
repo.git.stash('pop')
except GitCommandError:
repo.git.stash('clear')
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '生成冲突的文件,禁止提交'})
default_args = ('--abbrev=40', '--full-index', '--raw')
dirty_list = [entry.split()[-1] for entry in repo.git.diff(*default_args).splitlines()]
for blob in dirty_list:
if os.path.isfile(os.path.join(working_tree_dir, blob)):
repo.git.add(blob)
else:
repo.git.rm('--cached', blob)
repo.index.commit(msg)
try:
repo.git.push()
except GitCommandError:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '提交失败'})
GitApp.objects.filter(created_by=request.user, valid=False).update(valid=True)
return Response(status=status.HTTP_201_CREATED, data={'detail': '提交成功'})
@api_view(['PUT'])
@permission_classes((BootShPermission,))
def checkout_v2(request):
stage_dict = {
'2': '--ours',
'3': '--theirs'
}
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
path = request.data.get('path')
stage = request.data.get('stage')
repo = Repo(working_tree_dir)
try:
repo.git.checkout(stage_dict[stage], path)
repo.git.add(path)
return Response(status=status.HTTP_200_OK, data={'detail': '解决冲突成功'})
except GitCommandError:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '解决冲突失败'})
@api_view(['GET'])
@permission_classes((AllowAny,))
def unmerged_blobs_v2(request):
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
path = request.REQUEST.get('path')
repo = Repo(working_tree_dir)
unmerged_blobs_list = repo.index.unmerged_blobs().get(path)
unmerged_blobs_dict = dict()
for stage, blob in unmerged_blobs_list:
unmerged_blobs_dict[stage] = blob.data_stream.read()
return Response(status=status.HTTP_200_OK, data=unmerged_blobs_dict)
@api_view(['GET'])
@permission_classes((AllowAny,))
def log_v2(request):
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
path = request.GET.get('path')
page = int(request.GET.get('page'))
page_size = int(request.GET.get('page_size'))
repo = Repo(working_tree_dir)
default_args = ['--date=iso', '--pretty=format:%H%n%an%n%ad%n%s']
if path:
default_args.append(path)
origin_log_list = repo.git.log(*default_args).splitlines()
formatted_log_list = []
formatted_log_dict = dict()
item_dict = {
0: 'hash',
1: 'author',
2: 'date',
3: 'message'
}
i = 0
for origin_log in origin_log_list:
formatted_log_dict[item_dict[i]] = origin_log
i += 1
if i > len(item_dict) - 1:
formatted_log_dict['path'] = path
formatted_log_list.append(formatted_log_dict)
formatted_log_dict = dict()
i = 0
return Response(status=status.HTTP_200_OK, data={'count': len(formatted_log_list), 'results': formatted_log_list[(page - 1) * page_size: (page - 1) * page_size + page_size]})
@api_view(['GET'])
@permission_classes((AllowAny,))
def diff_v2(request):
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
path = request.GET.get('path')
revision = request.GET.get('revision')
repo = Repo(working_tree_dir)
default_args = ['--pretty=format:%H', path]
revision_list = repo.git.log(*default_args).splitlines()
revision_position = revision_list.index(revision)
last_revision = None if revision_position == len(revision_list)-1 else revision_list[revision_position+1]
git_blob = git_blob_from_commit(repo.commit(revision), path)
last_git_blob = git_blob_from_commit(repo.commit(last_revision), path) if last_revision else None
return Response(status=status.HTTP_200_OK, data={
'commit': git_blob.data_stream.read() if git_blob else '',
'last_commit': last_git_blob.data_stream.read() if last_git_blob else ''
})
@api_view(['POST'])
@permission_classes((AllowAny,))
def web_hook_puppet(request):
web_hook_dict = request.data
puppet_dict = dict()
for git_app_obj in GitApp.objects.filter(valid=True):
app = git_app_obj.app
type = git_app_obj.type
room = git_app_obj.room
app_id = app.id if app else 0
if type.room_property:
type_name = '_'.join([room.name, type.name])
git_path = '/'.join([app.site.name, app.name, type_name]) if app else type_name
puppet_dict[type.name] = puppet_dict.get(type.name, dict())
puppet_dict[type.name][app_id] = puppet_dict[type.name].get(app_id, dict())
puppet_dict[type.name][app_id][room.name] = git_path
else:
git_path = '/'.join([app.site.name, app.name, type.name]) if app else type.name
puppet_dict[type.name] = puppet_dict.get(type.name, dict())
puppet_dict[type.name][app_id] = git_path
for puppet_master in GIT['COMMON']['PUPPET_MASTER_LIST']:
url = urlunparse(('http', puppet_master, GIT['COMMON']['PUPPET_URL'], '', '', ''))
code, response = httpcall2(url, method='POST', body={'data': json.dumps(puppet_dict)})
web_hook_dict['puppet'] = web_hook_dict.get('puppet', dict())
web_hook_dict['puppet'][puppet_master] = json.loads(response) if code is not None and code < 400 else dict()
web_hook_dict['puppet'][puppet_master]['code'] = code
r = redis.Redis(host=REDIS["HOST"], port=REDIS["PORT"], db=3)
r.set(web_hook_dict['after'], json.dumps(web_hook_dict))
return Response(status=status.HTTP_200_OK, data=web_hook_dict)
@api_view(['GET'])
@permission_classes((AllowAny,))
def diff_cached_v2(request):
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
path = request.GET.get('path')
repo = Repo(working_tree_dir)
index_git_blob = git_blob_from_index(repo.index, path)
commit_git_blob = git_blob_from_commit(repo.commit('HEAD'), path)
return Response(status=status.HTTP_200_OK, data={
'index': index_git_blob.data_stream.read() if index_git_blob else None,
'commit': commit_git_blob.data_stream.read() if commit_git_blob else None
})
@api_view(['PUT'])
@permission_classes((BootShPermission,))
def reset_v2(request):
working_tree_dir = os.path.join(GIT['COMMON']['LOCAL_DIR'], request.user.username)
path = request.data.get('path')
git_app = request.data.get('git_app')
repo = Repo(working_tree_dir)
try:
repo.git.reset('HEAD', path)
except GitCommandError:
pass
try:
repo.git.checkout('--', path)
except GitCommandError:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '还原失败'})
if git_app is not None:
try:
site_name, app_name, file_name = path.split('/')
except Exception, e:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '路径%s无法解析' % path})
app_obj = None
for app in App.objects.filter(name=app_name):
if app.site.name == site_name:
app_obj = app
break
if app_obj is None:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '路径%s找不到对应的Pool' % path})
file_name_list = file_name.split('_', 1)
if len(file_name_list) == 1:
file_type = file_name
room_obj = None
else:
room_name, file_type = file_name_list
room_queryset = Room.objects.filter(name=room_name)
if room_queryset is None:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '机房名称%s无法识别' % room_name})
else:
room_obj = room_queryset.first()
file_type_queryset = GitFileType.objects.filter(name=file_type)
if file_type_queryset is None:
return Response(status=status.HTTP_400_BAD_REQUEST, data={'detail': '文件类型%s无法识别' % file_type})
else:
file_type_obj = file_type_queryset.first()
GitApp.objects.create(app=app_obj, type=file_type_obj, room=room_obj)
return Response(status=status.HTTP_200_OK, data={'detail': '还原成功'})
| [
"[email protected]"
] | |
968e3135fb1bfbcd2d8917579b2fb4b0c066cec9 | 3fc8c5588252ce522d8d0b7fdda398397475abd7 | /enigma/rotor/__init__.py | 0a341da2b84f2c9f997739721df1b37e879ff132 | [
"MIT"
] | permissive | lukeshiner/enigma | 242362a37b7c82fabef237af424a5eca7bc6cdc9 | 51b9dcb9ec8190b780775135dc88b95fbba120fe | refs/heads/master | 2023-07-23T12:10:11.397318 | 2023-07-19T10:34:37 | 2023-07-19T10:34:37 | 137,946,565 | 0 | 0 | MIT | 2023-09-11T08:43:50 | 2018-06-19T21:16:36 | Python | UTF-8 | Python | false | false | 185 | py | """Enigma's rotors."""
from .reflector import Reflector # NOQA
from .rotor import Rotor # NOQA
from .rotor_mechanism import RotorMechanism # NOQA
from .wiring import Wiring # NOQA
| [
"[email protected]"
] | |
47a1b00a4c65511b02ad861524c25a8f5032850e | d9e0016dce735cf1d72b8f922733c1aa2eb0ac69 | /tests/test_dicom2nifti.py | d3dc3e3dc00f2f6bb02fa808f0d14a1c55f07931 | [
"MIT"
] | permissive | fmljr/dicom2nifti | 4228af03d00342fd3ab6b2fbf1846b5af2dc7790 | 807dbf5d8d16f9cb1fc681344de4f1eeed04b0dd | refs/heads/master | 2021-01-12T10:16:17.640273 | 2016-10-26T11:39:22 | 2016-10-26T11:39:22 | 76,402,823 | 0 | 0 | null | 2016-12-13T22:11:56 | 2016-12-13T22:11:56 | null | UTF-8 | Python | false | false | 4,839 | py | # -*- coding: utf-8 -*-
"""
dicom2nifti
@author: abrys
"""
import unittest
import tempfile
import shutil
import os
import dicom2nifti
import tests.test_data as test_data
from tests.test_tools import compare_nifti, ground_thruth_filenames
class TestConversionDicom(unittest.TestCase):
def test_main_function(self):
tmp_output_dir = tempfile.mkdtemp()
try:
results = dicom2nifti.dicom_series_to_nifti(test_data.SIEMENS_ANATOMICAL,
os.path.join(tmp_output_dir, 'test.nii.gz'),
False)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.SIEMENS_ANATOMICAL)[0]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.SIEMENS_ANATOMICAL,
os.path.join(tmp_output_dir, 'test.nii.gz'),
True)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.SIEMENS_ANATOMICAL)[1]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.SIEMENS_ANATOMICAL_IMPLICIT,
os.path.join(tmp_output_dir, 'test.nii.gz'),
False)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.SIEMENS_ANATOMICAL_IMPLICIT)[0]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.SIEMENS_ANATOMICAL_IMPLICIT,
os.path.join(tmp_output_dir, 'test.nii.gz'),
True)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.SIEMENS_ANATOMICAL_IMPLICIT)[1]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.GENERIC_ANATOMICAL,
os.path.join(tmp_output_dir, 'test.nii.gz'),
False)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.GENERIC_ANATOMICAL)[0]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.GENERIC_ANATOMICAL,
os.path.join(tmp_output_dir, 'test.nii.gz'),
True)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.GENERIC_ANATOMICAL)[1]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.GENERIC_ANATOMICAL_IMPLICIT,
os.path.join(tmp_output_dir, 'test.nii.gz'),
False)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.GENERIC_ANATOMICAL_IMPLICIT)[0]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.GENERIC_ANATOMICAL_IMPLICIT,
os.path.join(tmp_output_dir, 'test.nii.gz'),
True)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.GENERIC_ANATOMICAL_IMPLICIT)[1]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.GENERIC_COMPRESSED,
os.path.join(tmp_output_dir, 'test.nii.gz'),
False)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.GENERIC_COMPRESSED)[0]) == True
results = dicom2nifti.dicom_series_to_nifti(test_data.GENERIC_COMPRESSED,
os.path.join(tmp_output_dir, 'test.nii.gz'),
True)
assert compare_nifti(results['NII_FILE'],
ground_thruth_filenames(test_data.GENERIC_COMPRESSED)[1]) == True
finally:
shutil.rmtree(tmp_output_dir)
def test_convert_directory(self):
tmp_output_dir = tempfile.mkdtemp()
try:
dicom2nifti.convert_directory(test_data.GENERIC_ANATOMICAL, tmp_output_dir)
finally:
shutil.rmtree(tmp_output_dir)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
df57c2e1baa7e2f56403f2f29ca0b10aee646060 | be8bb4346a3043c62d914ccc9865cc561218158b | /Interfaces/API/NewInterface/Applications/OverlayInput.py | 23482473bd27e56295ef1a278928dc399ef00e13 | [] | no_license | LCDsoft/ILCDIRAC | 72afc0aa3089ba1c9e9bef092c027403c2bbe2cf | 9c366957fdd680a284df675c318989cb88e5959c | refs/heads/Rel-v29r2 | 2021-05-24T03:00:01.211717 | 2020-05-25T14:36:01 | 2020-05-25T14:36:50 | 9,239,327 | 1 | 5 | null | 2020-05-18T10:01:19 | 2013-04-05T11:01:20 | Python | UTF-8 | Python | false | false | 15,440 | py | """
OverlayInput : Helper call to define Overlay processor/driver inputs
"""
# pylint: disable=expression-not-assigned
import types
from collections import defaultdict
from ILCDIRAC.Interfaces.API.NewInterface.LCUtilityApplication import LCUtilityApplication
from ILCDIRAC.Workflow.Modules.OverlayInput import allowedBkg
from ILCDIRAC.Core.Utilities.OverlayFiles import energyWithLowerCaseUnit
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.Resources.Catalog.FileCatalogClient import FileCatalogClient
from DIRAC.Core.Workflow.Parameter import Parameter
from DIRAC import S_OK, S_ERROR, gLogger, gConfig
LOG = gLogger.getSubLogger(__name__)
__RCSID__ = "$Id$"
class OverlayInput(LCUtilityApplication):
""" Helper call to define Overlay processor/driver inputs.
Example:
>>> over = OverlayInput()
>>> over.setBXOverlay(300)
>>> over.setGGToHadInt(3.2)
>>> over.setNumberOfSignalEventsPerJob(10)
>>> over.setBackgroundType("gghad")
>>> over.setMachine("clic_opt")
>>> over.setDetectorModel("CLICdet_o3_v14")
>>> over.setEnergy("3000")
See list of available options:
>>> over = OverlayInput()
>>> over.printAvailableOptions()
"""
def __init__(self, paramdict = None):
self._ops = Operations()
self.bxToOverlay = None
self.numberOfGGToHadronInteractions = 0
self.numberOfSignalEventsPerJob = 0
self.backgroundEventType = ''
self.prodID = 0
self.machine = 'clic_cdr'
self.detectorModel = ''
self.useEnergyForFileLookup = True
super(OverlayInput, self).__init__( paramdict )
self.version = '1'
self._modulename = "OverlayInput"
self.appname = self._modulename
self._moduledescription = 'Helper call to define Overlay processor/driver inputs'
self.accountInProduction = False
self._paramsToExclude.append('_ops')
self.pathToOverlayFiles = ''
self.processorName = ''
def setMachine(self, machine):
"""Define the machine to use."""
self._checkArgs({'machine': types.StringTypes})
self.machine = machine
def setProdID(self, pid):
"""Define the prodID to use as input, experts only."""
self._checkArgs({'pid': types.IntType})
self.prodID = pid
return S_OK()
def setUseEnergyForFileLookup(self, useEnergyForFileLookup):
"""Set the flag to use the energy meta data in the search of the background files.
Disable the energy when you want to use files created for a different energy than the signal events
:param bool useEnergyForFileLookup: Use the Energy in the metadata search or not
"""
self._checkArgs({'useEnergyForFileLookup': types.BooleanType})
self.useEnergyForFileLookup = useEnergyForFileLookup
return S_OK()
def setOverlayBXPerSigEvt(self, bxoverlay):
""" Define number bunch crossings to overlay for each signal event.
This is used to determine the number of required overlay events.
It does not modify any of the actual application parameters using the overly input.
Alias for :func:`setBXOverlay`
:param int bxoverlay: Bunch crossings to overlay.
"""
self._checkArgs({'bxoverlay': types.IntType})
self.bxToOverlay = bxoverlay
return S_OK()
def setBXOverlay(self, bxoverlay):
""" Define number bunch crossings to overlay for each signal event.
This is used to determine the number of required overlay events.
It does not modify any of the actual application parameters using the overly input.
:param int bxoverlay: Bunch crossings to overlay.
"""
return self.setOverlayBXPerSigEvt(bxoverlay)
def setOverlayEvtsPerBX(self, ggtohadint):
""" Define the number of overlay events per bunch crossing.
This is used to determine the number of required overlay events.
It does not modify any of the actual application parameters using the overly input.
:param float ggtohadint: optional number of overlay events interactions per bunch crossing
"""
self._checkArgs({'ggtohadint': types.FloatType})
self.numberOfGGToHadronInteractions = ggtohadint
return S_OK()
def setGGToHadInt(self, ggtohadint):
"""Define the number of overlay events per bunch crossing.
This is used to determine the number of required overlay events.
It does not modify any of the actual application parameters using the overly input.
Alias for :func:`setOverlayEvtsPerBX`
:param float ggtohadint: optional number of overlay events interactions per bunch crossing
"""
return self.setOverlayEvtsPerBX(ggtohadint)
def setNbSigEvtsPerJob(self, nbsigevtsperjob):
"""Set the number of signal events per job.
This is used to determine the number of required overlay events.
It does not modify any of the actual application parameters using the overly input.
:param int nbsigevtsperjob: Number of signal events per job
"""
self._checkArgs({'nbsigevtsperjob': types.IntType})
self.numberOfSignalEventsPerJob = nbsigevtsperjob
return S_OK()
def setDetectorModel(self, detectormodel):
""" Set the detector type for the background files.
Files are defined in the ConfigurationSystem: Operations/Overlay/<Accelerator>/<energy>/<Detector>
:param str detectormodel: Detector type
"""
self._checkArgs({'detectormodel': types.StringTypes})
self.detectorModel = detectormodel
return S_OK()
def setPathToFiles(self, path):
"""Set the path to where the overlay files are located.
Setting this option will ignore all other settings!
:param str path: LFN path to the folder containing the overlay files
"""
self._checkArgs({'path': types.StringTypes})
self.pathToOverlayFiles = path
return S_OK()
def setBkgEvtType(self, backgroundEventType):
""" Define the background type.
.. deprecated:: 23r0
Use :func:`setBackgroundType` instead
:param str backgroundEventType: Background type.
"""
self._checkArgs({'backgroundEventType': types.StringTypes})
self.backgroundEventType = backgroundEventType
return S_OK()
def setBackgroundType(self, backgroundType):
"""Define the background type
:param str backgroundType: Background type.
"""
return self.setBkgEvtType(backgroundType)
def setProcessorName(self, processorName):
"""Set the processorName to set the input files for.
Necessary if multiple invocations of the overlay processor happen in marlin for example.
Different processors must use different background types
:param str processorName: Name of the Processor these input files are for
"""
self._checkArgs({'processorName': types.StringTypes})
self.processorName = processorName
return S_OK()
def setNumberOfSignalEventsPerJob(self, numberSignalEvents):
"""Alternative to :func:`setNbSigEvtsPerJob`
Number used to determine the number of background files needed.
:param int numberSignalEvents: Number of signal events per job
"""
return self.setNbSigEvtsPerJob(numberSignalEvents)
def _applicationModule(self):
m1 = self._createModuleDefinition()
m1.addParameter(Parameter("BXOverlay", 0, "float", "", "", False, False,
"Bunch crossings to overlay"))
m1.addParameter(Parameter("ggtohadint", 0, "float", "", "", False, False,
"Optional number of gamma gamma -> hadrons interactions per bunch crossing, default is 3.2"))
m1.addParameter(Parameter("NbSigEvtsPerJob", 0, "int", "", "", False, False,
"Number of signal events per job"))
m1.addParameter(Parameter("prodid", 0, "int", "", "", False, False,
"ProdID to use"))
m1.addParameter(Parameter("BkgEvtType", "", "string", "", "", False, False,
"Background type."))
m1.addParameter(Parameter("detectormodel", "", "string", "", "", False, False,
"Detector type."))
m1.addParameter(Parameter("machine", "", "string", "", "", False, False,
"machine: clic_cdr or ilc_dbd"))
m1.addParameter(Parameter("useEnergyForFileLookup", True, "bool", "", "", False, False,
"useEnergy to look for background files: True or False"))
m1.addParameter(Parameter("pathToOverlayFiles", "", "string", "", "", False, False,
"use overlay files from this path"))
m1.addParameter(Parameter("processorName", "", "string", "", "", False, False,
"Processor Name"))
m1.addParameter(Parameter("debug", False, "bool", "", "", False, False, "debug mode"))
return m1
def _applicationModuleValues(self, moduleinstance):
moduleinstance.setValue("BXOverlay", self.bxToOverlay)
moduleinstance.setValue('ggtohadint', self.numberOfGGToHadronInteractions)
moduleinstance.setValue('NbSigEvtsPerJob', self.numberOfSignalEventsPerJob)
moduleinstance.setValue('prodid', self.prodID)
moduleinstance.setValue('BkgEvtType', self.backgroundEventType)
moduleinstance.setValue('detectormodel', self.detectorModel)
moduleinstance.setValue('debug', self.debug)
moduleinstance.setValue('machine', self.machine )
moduleinstance.setValue('useEnergyForFileLookup', self.useEnergyForFileLookup )
moduleinstance.setValue('pathToOverlayFiles', self.pathToOverlayFiles )
moduleinstance.setValue('processorName', self.processorName )
def _userjobmodules(self, stepdefinition):
res1 = self._setApplicationModuleAndParameters(stepdefinition)
if not res1["OK"] :
return S_ERROR('userjobmodules failed')
return S_OK()
def _prodjobmodules(self, stepdefinition):
res1 = self._setApplicationModuleAndParameters(stepdefinition)
if not res1["OK"] :
return S_ERROR('prodjobmodules failed')
return S_OK()
def _addParametersToStep(self, stepdefinition):
res = self._addBaseParameters(stepdefinition)
if not res["OK"]:
return S_ERROR("Failed to set base parameters")
return S_OK()
def _checkConsistency(self, job=None):
""" Checks that all needed parameters are set
"""
if self.pathToOverlayFiles:
res = FileCatalogClient().findFilesByMetadata({}, self.pathToOverlayFiles)
if not res['OK']:
return res
LOG.notice("Found %i files in path %s" % (len(res['Value']), self.pathToOverlayFiles))
if not res['Value']:
return S_ERROR("OverlayInput: PathToFiles is specified, but there are no files in that path")
if not self.bxToOverlay :
return S_ERROR("Number of overlay bunch crossings not defined")
if not self.numberOfGGToHadronInteractions :
return S_ERROR("Number of background events per bunch crossing is not defined")
if not self.backgroundEventType :
return S_ERROR("Background event type is not defined: Chose one gghad, aa_lowpt, ...")
if self._jobtype == 'User' :
if not self.numberOfSignalEventsPerJob :
return S_ERROR("Number of signal event per job is not defined")
else:
self.prodparameters['detectorModel'] = self.detectorModel
self.prodparameters['BXOverlay'] = self.bxToOverlay
self.prodparameters['GGtoHadInt'] = self.numberOfGGToHadronInteractions
return S_OK()
@staticmethod
def printAvailableOptions(machine=None, energy=None, detModel=None):
"""Print a list of the available options for each machine, energy and detector model.
The parameters can be used to filter the output
:param str machine: only list options for this machine
:param str detModel: only list options for this detector model
:param energy: only list options for this energy
:type energy: int or float
"""
overTree = gConfig.getConfigurationTree('/Operations/Defaults/Overlay')
if not overTree['OK']:
LOG.error('Could not find the configuration section "/Operations/Defaults/Overlay"')
return
if energy:
energy = energyWithLowerCaseUnit(energy)
# Dictionary of machines, energy, detModel, backgroundType
availableOptions = defaultdict(lambda: defaultdict(lambda: defaultdict(set)))
for value in overTree['Value']:
values = value.strip('/').split('/')
if len(values) <= 6:
continue
theMachine = values[3]
theEnergy = values[4]
theDetModel = values[5]
backgroundType = values[6]
if machine and machine != theMachine:
continue
if energy and energy != theEnergy:
continue
if detModel and detModel != theDetModel:
continue
availableOptions[theMachine][theEnergy][theDetModel].add(backgroundType)
if machine or energy or detModel:
if availableOptions:
LOG.notice('Printing options compatible with')
else:
LOG.notice('No overlay options compatible with your selection')
LOG.notice(' * Machine = %s' % machine) if machine else False
LOG.notice(' * Energy = %s' % energy) if energy else False
LOG.notice(' * DetModel = %s' % detModel) if detModel else False
else:
LOG.notice('All available overlay combinations')
for theMachine, energies in availableOptions.items():
LOG.notice('Machine: %s' % theMachine)
for theEnergy, detModels in energies.items():
LOG.notice(' %s' % theEnergy)
for theDetModel, backgrounds in detModels.items():
LOG.notice(' %s: %s' % (theDetModel, ', '.join(backgrounds)))
def _checkFinalConsistency(self):
"""Check consistency before submission.
The overlay files for the specifed energy must exist. Print all available overlay options on error
"""
res = self.__checkFinalConsistency()
if res['OK']:
return res
self.printAvailableOptions(machine=self.machine)
self.printAvailableOptions(energy=self.energy)
self.printAvailableOptions(detModel=self.detectorModel)
return res
def __checkFinalConsistency(self):
"""Check consistency of overlay options."""
if self.pathToOverlayFiles:
return S_OK() # can ignore other parameter
if not self.energy:
return S_ERROR("Energy MUST be specified for the overlay")
res = self._ops.getSections('/Overlay')
if not res['OK']:
return S_ERROR("Could not resolve the CS path to the overlay specifications")
sections = res['Value']
if self.machine not in sections:
return S_ERROR('Machine %r does not have overlay data' % self.machine)
energytouse = energyWithLowerCaseUnit( self.energy )
res = self._ops.getSections("/Overlay/%s" % self.machine)
if energytouse not in res['Value']:
return S_ERROR("No overlay files corresponding to %s" % energytouse)
res = self._ops.getSections("/Overlay/%s/%s" % (self.machine, energytouse))
if not res['OK']:
return S_ERROR("Could not find the detector models")
if self.detectorModel not in res['Value']:
return S_ERROR('Detector model %r has no overlay data with energy %r and %r' %
(self.detectorModel, self.energy, self.machine))
res = allowedBkg(self.backgroundEventType, energytouse, detectormodel=self.detectorModel, machine=self.machine)
if not res['OK']:
return res
if res['Value'] < 0:
return S_ERROR("No proper production ID found")
return S_OK()
| [
"[email protected]"
] | |
56c2ff039bc08f1a07ec735e680f16991d49556a | d4f1bd5e52fe8d85d3d0263ede936928d5811bff | /Python/Problem Solving/AlgorithmBook/9-1-1 Future City.py | fe41df785080e0f3bb92f7720429ed5e7b550f36 | [] | no_license | ambosing/PlayGround | 37f7d071c4402599995a50cac1e7f1a85c6d10dd | 0d5262dbb2fa2128ecb3fd969244fa647b104928 | refs/heads/master | 2023-04-08T04:53:31.747838 | 2023-03-23T06:32:47 | 2023-03-23T06:32:47 | 143,112,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | import sys
INF = int(1e9)
n, m = map(int, sys.stdin.readline().split())
graph = [[INF] * (n + 1) for _ in range(n + 1)]
for a in range(1, n + 1):
graph[a][a] = 0
for _ in range(m):
a, b = map(int, sys.stdin.readline().split())
graph[a][b] = 1
graph[b][a] = 1
x, k = map(int, sys.stdin.readline().split())
for k in range(1, n + 1):
for a in range(1, n + 1):
for b in range(1, n + 1):
graph[a][b] = min(graph[a][b], graph[a][k] + graph[k][b])
if graph[1][k] == INF or graph[k][x] == INF:
print(-1)
else:
print(graph[1][k] + graph[k][x])
| [
"[email protected]"
] | |
7748936c73223f57680a54fc92426bd279181b0e | af32685025305eb77125b6a0a544522422be3b2f | /basics/croping.py | 772a3a71b2adc2d835a6db1f5293d8fbb5c57577 | [] | no_license | Mohammed-abdelawal/py-opencv-practice | 5e913f48f7ad875b00ce8a5e7bb692b2aa2efb35 | 876df49d54a0083650f3f9682c5e3dc226e31b76 | refs/heads/master | 2022-10-10T04:19:27.300790 | 2020-06-09T20:05:22 | 2020-06-09T20:05:22 | 270,372,834 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py | from cv2 import cv2
import numpy as np
img = cv2.imread('data/home.png')
print(img.item(100,100,0))
print(img.shape)
print(img.size)
print(img.dtype)
ball = img[280:340, 330:390]
cv2.imshow('old',img)
cv2.imshow('cropped area',ball)
img[273:333, 100:160] = ball
cv2.imshow('new',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
b54648dead31dbe6de0c6b4821ea81cd6fefb3aa | c8c1c8b3327ae93ceaa671681d5f9eac0933e738 | /mrgpylinux/femcalcadv/gls/rotateypr.py | deb111704561ac9583192469d8ab27a4ccfd7a90 | [] | no_license | gpspelle/acoustic-pollution | bbb2a6492b3d02d046cb533470affabcacb38409 | ad80f1fd582f47ce679748bb6ac93ff3149fd445 | refs/heads/master | 2022-07-28T06:49:20.493083 | 2019-11-15T00:27:52 | 2019-11-15T00:27:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,091 | py | __pyarmor__(__name__, __file__, b'\x50\x59\x41\x52\x4d\x4f\x52\x00\x00\x03\x06\x00\x33\x0d\x0d\x0a\x01\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x40\x00\x00\x00\xa3\x06\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xe3\x50\x8c\x64\x26\x42\xd6\x01\xc1\x57\xe0\xf8\x31\x41\xaf\xc1\x30\x25\xfc\x16\xb4\xae\x13\xb6\xa7\x63\xc3\x3e\x9a\x2e\xae\x80\xb2\x75\x67\x81\x5d\x1e\xaf\x43\x1c\x91\x92\x83\x72\x2d\xec\x1a\x52\xe2\x4e\x40\x60\xc5\x61\x6c\x9c\x9e\x7a\xe9\x6f\xab\xf8\x3a\x24\xfa\x8e\x2f\x91\x53\x3c\xeb\x4b\x16\xee\x1d\x17\x4f\xf0\xe5\xb0\xc9\x7a\xb7\x40\x38\x3c\x40\xa1\x4d\x51\x04\xe9\x49\x6d\x26\xa4\x2b\x95\x87\x28\xa1\xab\x6a\xbd\x7d\x96\x67\xc2\x45\xa4\xab\x40\xed\x62\x10\x30\x31\x9b\xa4\x2f\x22\xfd\xda\xa6\x16\x64\xce\x6f\x81\xa1\x5f\x41\x99\x58\x0d\x0a\x25\x1a\x98\x9b\xc7\x8b\xed\x6e\xd8\x35\x6b\x56\xcd\xdf\x40\xb9\xf3\xa9\x20\xa8\x73\xbc\x6d\x45\x54\x81\x33\x6b\xb5\xfa\x0d\x92\x42\x6a\x70\x9b\x96\x00\xec\xa1\x3f\x39\x54\x66\x77\xd9\xc0\xf2\x87\x7e\x27\x72\xc4\x62\x9e\xf4\x0b\x8f\xdb\xda\x9f\x0f\xaa\x19\x22\x89\x70\x8e\x94\x40\x6d\x55\x26\xf5\xea\xee\xab\x0b\x14\xff\x7b\x40\x1a\x7e\xfd\xb8\xf0\x3b\x4c\x72\x3d\xf6\x6b\x5c\x18\xe3\xea\x2f\xe8\x62\x19\x73\xaf\xb1\x5a\xd3\x76\x0b\x78\xc9\x44\x1e\x8b\x2f\xf3\xdc\x13\x08\xfd\xe3\x69\x7a\x30\x8c\x2b\x30\xa5\x8f\x84\xf4\xca\x7a\x11\x3d\xee\xf6\xe6\x38\x2a\x53\xda\xf1\x6c\x5e\xad\x36\xda\xeb\x7e\xc2\x1d\x7a\xa7\x53\x15\xf3\x2b\x9f\x27\xd5\xff\x94\xcc\x67\x1a\xb0\xc8\xb2\xbc\xa0\x43\x71\x8c\x74\xf7\x7d\x6c\x8e\xf2\xf1\xc2\x55\xd4\xcd\x69\x94\xf8\x73\x25\xf5\x7d\xf0\x19\x0a\x81\xf4\x64\xca\xa8\xfc\xd6\x8f\x23\x9f\xa4\x6c\x2e\x6c\xe2\xaf\x25\xae\xba\xfa\x2b\xf6\x0c\xce\xbd\xd4\xee\x3e\x3e\x5d\x7b\xfc\x15\xd7\xed\x86\x49\xcc\x9d\x4b\x03\x33\x6f\x4f\x96\xfa\x6d\x4a\x08\x7c\x68\xd0\xb4\xa2\x8d\xec\x32\xbd\x2b\x28\xe1\x02\xdc\x1d\x02\x0f\xe8\xd8\xae\x9b\x41\x1b\x37\x26\x74\xd4\xb5\x32\xc0\x94\x5c\x0d\x55\x61\x58\xde\x83\x8b\x39\x2b\xb2\xf6\x3d\x43\xa3\x4d\x7b\xae\xde\x7f\xde\x85\x9e\x64\x5a\xc6\x0d\xf2\x3d\xba\x3b\x16\x53\xe4\xa8\x25\x97\xd8\xf5\x25\xae\xe5\x6f\x6e\xe3\x67\x59\x48\x62\x58\x5a\x3a\x48\x0b\xa2\xe2\xfd\x22\xe5\x51\x31\xbd\x9f\x4b\x4d\x7e\x8b\x29\x72\x9b\xe6\x5c\x6c\xcc\x1f\xed\xfa\xf2\x73\xb6\x9e\x0e\x36\x07\x80\x1a\x3f\x6c\xb2\x42\x87\x13\xee\x9c\x81\xe5\x55\x71\x59\xac\xca\x85\xb6\x58\xb3\x7d\x4e\xbd\xfc\x83\xcf\xcd\xd8\x6f\x5c\x0c\x29\xdf\x19\xea\x08\xbb\x29\xed\xbf\xb1\xcf\x7e\xbe\x8a\x94\x6a\x27\x3d\x2e\xbc\x03\x84\x66\x43\xbd\x34\x58\x78\xc5\x8f\xae\x95\xf2\x69\xea\xda\xa2\x6b\x73\x75\xac\xf6\x66\x3e\x79\x9c\x72\x3a\xb3\x9e\x1c\xe9\x18\x39\xc6\x03\xbe\xbc\xa3\x2d\x8a\xc5\x53\xd4\x9b\x4d\x28\x31\x61\xd7\xac\x78\x6d\x23\xba\x91\x1e\xd8\x38\x5d\xfa\xf3\xb7\x8a\x49\x3a\x3b\xef\x19\xb9\x8f\x30\xf1\xe4\x0d\xdf\x0d\xf9\xb1\x55\xb0\xe6\x1d\xda\xd2\xc2\x75\xe2\x1b\x4f\x90\x37\x0e\xb3\xaa\x62\x1d\x67\xe8\xa6\x0a\x89\x05\xb5\x8d\x7a\x7d\x92\xa0\x1d\x61\x72\xad\xa8\x8d\x9e\x38\x09\x86\xd5\x8f\xe3\x9e\x40\x8c\x23\x3a\x16\x4a\x95\x6d\x10\x01\xd9\xcc\x31\x11\x30\x56\x65\xe7\xe9\xa9\x46\xd3\x23\x6c\xcb\xba\xda\xb0\xba\x00\x5e\xe5\xd5\x40\x09\x06\xb9\xcd\x5d\xca\xae\x37\x42\xfe\xae\xdf\x9f\x7c\x60\x53\x95\x86\xb5\xb6\xe9\x5f\xa7\x6c\xa6\x6a\x68\x6a\x9c\xab\xf2\x27\xa9\x0d\xe5\x18\x55\xfb\xdb\x8d\xe2\x49\xc8\x3c\x6b\x4b\x17\x66\xd1\x8d\xb8\xa1\xa8\x40\x3c\xbd\x28\x73\x40\x24\xb7\xfd\x21\xe8\x33\xc7\x22\x61\x2d\x10\x00\xf7\x14\x35\x08\x7a\xd3\xe8\x3d\x56\xda\xff\x5e\xc7\xef\x1e\x6a\x57\x25\x2c\x82\x47\x1c\x28\x9d\x08\x8f\x63\x93\xce\x4f\xa3\x1b\x4d\x45\xc3\x03\xc3\x41\xb7\x87\x81\x22\xa0\x7d\xb6\x07\xcf\xff\xba\xfd\x45\xde\x95\xe5\xdd\xc9\x97\x0b\x3b\xf0\x3a\x46\x8d\xe9\x64\x4f\x25\x76\x3f\xad\x4b\x5c\x67\xf4\x8b\x00\xab\x61\xa2\xab\x84\x5f\x69\x30\x8f\x79\x99\xad\x1d\xd7\x77\x1e\x71\x1f\x9e\xab\x45\x9a\x9d\x5d\x74\x1d\xe6\x0e\x86\xa1\x2c\x8d\x89\xda\x45\x0e\x17\xd7\x7b\x41\x00\x98\x7c\xd1\x57\xca\xa2\xc9\xd3\x13\x11\x7c\x66\x1d\x8b\x9c\x31\x06\x59\x4b\x98\x6e\x74\x96\xb3\xd4\xb2\x53\xa5\x91\x58\xf2\x3d\xd0\xe4\xa8\x51\x6c\xd7\x21\x00\xa2\x6f\x2f\xce\x0e\xb8\xf3\x6e\x44\xa2\x8c\x48\xd0\x25\xbe\x81\x74\xfe\x11\x2e\x75\x18\x01\x99\x30\xd7\x36\xed\xc7\xdf\xf8\x1f\x2e\x69\x8e\xe4\xbe\xe2\xc9\x31\x3d\xe3\xab\xf6\x09\x12\xd0\xe3\x5f\x56\xb3\x1e\x26\x36\x87\xa8\x69\xd2\xe3\x77\xb1\xc1\x86\x9f\x18\xda\x2b\xd5\xc9\x0f\x29\xe0\x91\x6d\x81\x21\x46\xe4\xf4\xe5\x9c\xdb\x87\xf3\x33\x87\x78\x4b\x9b\xd8\x3c\xa7\x0a\x43\x8e\x83\x4f\x83\x3b\x8c\x2e\xcb\x02\x42\x4e\xac\x43\xdf\xba\x64\xd0\x3c\x2f\xd5\x53\xa6\xc0\xef\xc1\x79\xd9\xc1\x9d\x78\xdd\xc3\xf5\xa8\xa7\x61\x9b\xc4\xce\xf5\xae\x5b\xfe\x2b\x90\x60\x37\x23\x6f\x1d\x67\xfa\xbe\x4d\x43\xca\x42\xc3\xbf\x2c\xee\xcf\x85\x8e\x41\x1e\xb7\x4a\xad\xe2\x5a\xa5\x1c\x27\x18\x7a\x35\xa9\xbb\xcb\x5a\xee\x46\x09\x9b\xc9\xf2\x91\x5d\xaa\x64\xdf\xc0\x4c\x30\x43\x08\x58\x6f\xe5\x54\x7c\xe3\xd6\x9a\xe0\x9c\x23\x02\xf3\x81\xc9\x6f\x8f\xa2\x1f\xe9\xa1\x15\xa7\x53\x1c\x73\x97\x5f\x50\x92\xd0\x81\x0d\x05\x3b\x63\x5e\x10\xdf\xd1\x45\x91\x0d\x0a\x83\xac\x77\xb2\xd1\xbc\x5f\x78\xa4\x06\x4d\xcc\x59\xda\x22\xc9\x51\xaa\x71\x7b\xa9\x71\x1e\x49\xca\xc7\xa4\x7e\x6f\xb2\x9e\x60\xe9\x2d\xa6\xa6\x02\x2c\x77\x57\xd9\x66\x1c\x42\x63\x41\xab\xf1\xb3\xfd\x62\xcc\xcf\xaa\x0b\xbb\x9a\xe0\xc0\x0d\x8d\xd2\x73\xf4\xdf\x40\xb1\x61\xe8\x93\xe3\xb3\x01\xab\x79\x6b\x0d\xe0\x51\xa8\x64\x0c\x4b\xef\x22\xf3\xfa\x9e\x0d\x6d\x1e\x1a\xa6\x7f\x33\x5c\x99\xf5\x6f\x53\x52\x6c\xe8\xc6\xdc\x07\xdf\x00\x51\x7f\x20\x7f\x77\xb4\x62\x57\xe5\xad\x51\x80\x13\xa1\x94\x24\xf1\x2c\x05\x4c\x3a\x18\x57\x71\x2c\xc9\xe1\x9d\x67\x6e\x96\xdc\xd6\xd9\x72\x7c\x5e\x89\x90\x63\x67\x33\x05\xef\xf9\xcc\xbe\x56\xdb\xe5\x39\xf7\xd9\xd6\xe6\x8c\x62\x2d\xf5\x99\x39\x86\x8d\xb2\xb6\xef\x2f\x23\x85\xcd\x48\xdb\x9c\x32\x93\xea\xe1\x47\xec\x7d\x1b\x58\x33\x47\xb3\x3d\x94\xcc\x36\xb3\x40\x09\xb5\x5c\xbf\xb4\x53\x39\xef\x98\x9a\x8f\x7c\x38\x90\xf3\xbf\x4d\xbb\xad\x20\xa9\x86\x6d\x50\xc2\xf5\x7c\x1b\x85\x97\x37\xa7\x11\x97\x77\x9d\x04\x92\x03\x2c\xe6\xa5\xc2\x61\xb3\x31\x32\xda\x39\x47\xde\xf9\x73\xcb\x55\x47\x5f\x04\x3d\x72\xba\xa3\x4a\x88\x1c\x70\x7b\x94\xfd\x15\xef\xf9\x4c\xa8\x48\x8a\xc3\xc3\x11\x3d\xad\x1e\x7a\x79\xf1\x6b\x0a\x68\xa7\x3d\xc7\xee\x50\x38\xff\x80\x2d\x31\x17\x31\x82\x3f\x95\x2b\xaf\x4c\x86\xed\x41\xb5\xe7\x64\xc2\x4b\xe1\xe5\xd3\xcd\x11\x17\xb9\xfc\x31\x92\xef\x04\x13\xd0\xc1\xd7\x99\xe3\x63\xc4\xc0\xbc\x65\xbc\x57\xc6\xe3\x0b\x3d\x19\x3b\x78\x7d\xc8\x68\xd8\x86\x6a\xc9\x1f\xfd\x07\x82\x8a\x4b\x5c\x12\x26\x5c\x35\xb2\x13\x07\x9c\x2f\x00\xb4\x7a\x7a\xb6\x03\x0e\x98\xbb\xf2\x12\x0b\x2d\xbc\xb6\x46\x5c\x8a\xa3\xfa\x9c\x01\x0e\x9d\x03\x36\x28\xc7\x94\x4a\x37\xdc\x7c\x02\xce\x98\x6a\xfe\xff\xdd\x1c\xee\xa9\x0a\x24\x98\x87\x3c\x04\xda\x85\xfb\xb6\x68\x61\x35\xb5\x51\x1d\x27\xd2\xe3\x72\x16\x3d\xe8\x33\x7a\xa9\x2e\xa0\x59\xdd\xdd\x3b\x04\x63\x34\x00\x53\x15\x82\x09\xc9\x89\x98\x3d\xab\x68\x46\xd8\xb6\xa7\xaa\x5c\x53\xef\x7c\x59\x1f\x40\xd0\x02\xab\xb5\x90\x00\x5f\x1b\x47\x02\x86\x51\x28\xa3\x85\x83\x28\x3a\xee\xfc\x8d\x29\x1b\x3b\xad\x3c\x1e\x15\x09\x14\xa9\x67\x60\x6f\x5e\x39\x89\x7b\x87\xf4\xfd\x22\xab\xdc\xc9\x90\x41\x05\xf6\xe0\xcb\x6a\xd0\x78\xd1\x61\xa9\xd0\x31\xca\x4d\xc9\x35', 1) | [
"[email protected]"
] | |
61f52032ebbb4a63f30604d312a1fa77475aa7fe | b4e4399f6d18ee83760604fc67c90d3f5eac52dd | /Python/11.PythonFunctionals/75.ValidatingEmail.py | ecefafd0f2b944edb0a95f31372d5b79104984b9 | [] | no_license | angelvv/HackerRankSolution | 88415c3ace68ddc10c76ae8df43ab5193aa921d4 | 8b2c323507f9a1826b4156aeab94815f41b6fc84 | refs/heads/master | 2021-07-17T20:51:50.758364 | 2020-05-26T17:25:05 | 2020-05-26T17:25:05 | 167,896,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 686 | py | def fun(s):
# return True if s is a valid email, else return False
try:
username, url = s.split("@")
website, extension = url.split(".")
except ValueError:
return False
if not username.replace("-", "").replace("_", "").isalnum():
return False
if not website.isalnum():
return False
if not 1<= len(extension) <= 3:
return False
return True
def filter_mail(emails):
return list(filter(fun, emails))
if __name__ == '__main__':
n = int(input())
emails = []
for _ in range(n):
emails.append(input())
filtered_emails = filter_mail(emails)
filtered_emails.sort()
print(filtered_emails) | [
"[email protected]"
] | |
42fed85bf89726b3b133b5d263eceb2d46e6ef2d | 6874015cb6043d1803b61f8978627ddce64963b4 | /django/contrib/gis/utils/wkt.py | 626eab9d6e36dd5f62cab03c5178868563ee66ad | [
"BSD-3-Clause",
"Python-2.0"
] | permissive | yephper/django | 25fbfb4147211d08ec87c41e08a695ac016454c6 | cdd1689fb354886362487107156978ae84e71453 | refs/heads/master | 2021-01-21T12:59:14.443153 | 2016-04-27T09:51:41 | 2016-04-27T09:51:41 | 56,134,291 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,982 | py | """
Utilities for manipulating Geometry WKT.
"""
from django.utils import six
def precision_wkt(geom, prec):
"""
Returns WKT text of the geometry according to the given precision (an
integer or a string). If the precision is an integer, then the decimal
places of coordinates WKT will be truncated to that number:
>>> from django.contrib.gis.geos import Point
>>> pnt = Point(5, 23)
>>> pnt.wkt
'POINT (5.0000000000000000 23.0000000000000000)'
>>> precision_wkt(pnt, 1)
'POINT (5.0 23.0)'
If the precision is a string, it must be valid Python format string
(e.g., '%20.7f') -- thus, you should know what you're doing.
"""
if isinstance(prec, int):
num_fmt = '%%.%df' % prec
elif isinstance(prec, six.string_types):
num_fmt = prec
else:
raise TypeError
# TODO: Support 3D geometries.
coord_fmt = ' '.join([num_fmt, num_fmt])
def formatted_coords(coords):
return ','.join(coord_fmt % c[:2] for c in coords)
def formatted_poly(poly):
return ','.join('(%s)' % formatted_coords(r) for r in poly)
def formatted_geom(g):
gtype = str(g.geom_type).upper()
yield '%s(' % gtype
if gtype == 'POINT':
yield formatted_coords((g.coords,))
elif gtype in ('LINESTRING', 'LINEARRING'):
yield formatted_coords(g.coords)
elif gtype in ('POLYGON', 'MULTILINESTRING'):
yield formatted_poly(g)
elif gtype == 'MULTIPOINT':
yield formatted_coords(g.coords)
elif gtype == 'MULTIPOLYGON':
yield ','.join('(%s)' % formatted_poly(p) for p in g)
elif gtype == 'GEOMETRYCOLLECTION':
yield ','.join(''.join(wkt for wkt in formatted_geom(child)) for child in g)
else:
raise TypeError
yield ')'
return ''.join(wkt for wkt in formatted_geom(geom))
| [
"[email protected]"
] | |
3409b6ddd08874520206f0046806448740c5c21e | 9830360802428854384d6b27a172102de0e59c8f | /2776.py | 737239add71ab1b31f319104e5d8d171bb4e8a89 | [] | no_license | banje/acmicpc | d4009535ec31892f706333d812c92fddead08aa1 | 69d44a3b60d2a559563b5a1055bcc2290090e35c | refs/heads/master | 2022-07-20T20:01:56.623346 | 2020-05-16T11:30:17 | 2020-05-16T11:30:17 | 260,843,463 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py | a=int(input())
for i in range(a):
b=int(input())
c=set(map(int,input().split()))
d=int(input())
e=list(map(int,input().split()))
for j in range(d):
if e[j] in c:
print(1)
else:
print(0) | [
"[email protected]"
] | |
8533b4b408672ace1a5fb2af100207eb14c91acc | cb94a4cdd7a9df17f9c6f1a03f8f4ff12c916cf3 | /Python_Essential_Training/Exercise Files/Chap06/for.2.py | 7a77df333bfaa16c51725a084f1e21a1691e55a8 | [] | no_license | sedstan/LinkedIn-Learning-Python-Course | 2b936d0f00703a6e66a872220ed47572123dc7fd | b4584218355bf07aa3d2939b950911eae67adb0b | refs/heads/master | 2021-10-11T10:19:13.675662 | 2019-01-24T17:55:20 | 2019-01-24T17:55:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | #!/usr/bin/env python3
# Copyright 2009-2017 BHG http://bw.org/
animals = ( 'bear', 'bunny', 'dog', 'cat', 'velociraptor' )
for pet in animals:
if pet == 'dog': continue
print(pet)
| [
"[email protected]"
] | |
c46892506c1b9dcdb921b6d830ec352ff9d8c096 | fdf531435b0a4d771083bab78f5a2f91b2ec1b28 | /Hashing/Hashing II/2. Hashing + Sliding Window/2. Window String.py | e0b5714f9d7256422a8e49cf6a0f1275ba856489 | [] | no_license | srajsonu/100DaysOfCode | d556cf4c8491d2bea2bf6c17cc4410f64ae71829 | b25ff694a04a16bd2bdd33cf5bb84f9cbe5f3af6 | refs/heads/main | 2023-03-22T22:48:11.030576 | 2021-03-18T18:50:00 | 2021-03-18T18:50:00 | 325,747,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 631 | py | class Solution:
def minWindow(self, A, B):
m = len(A)
n = len(B)
if m < n:
return ''
freqA = {}
freqB = {}
for i in B:
if i not in freqB:
freqB[i] = 1
else:
freqB[i] += 1
cnt = 0
i, j = 0, 0
while j < m:
if A[j] not in freqA:
freqA[A[j]] = 1
else:
freqA[A[j]] += 1
j += 1
return freqA, freqB
if __name__ == '__main__':
A = "ADOBECODEBANC"
B = "ABC"
C = Solution()
print(C.minWindow(A, B))
| [
"[email protected]"
] | |
26446c43755f56300804abd5f5b9d97113d69cb9 | d52413173437ba73ecdf822ca895e659f00a8ce7 | /kiwibackend/application/website/messages/http/packageUse_request.py | 1e9eebdd11e508f0afbc3bb02fb584f586d612a3 | [] | no_license | whiteprism/mywork | 2329b3459c967c079d6185c5acabd6df80cab8ea | a8e568e89744ca7acbc59e4744aff2a0756d7252 | refs/heads/master | 2021-01-21T11:15:49.090408 | 2017-03-31T03:28:13 | 2017-03-31T03:28:13 | 83,540,646 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | # -*- encoding:utf8 -*-
from messages.http import BaseHttp
class PackageUseRequest(BaseHttp):
def __init__(self):
super(self.__class__, self).__init__()
self.packageCode = ""
| [
"[email protected]"
] | |
6ee1c27a32d25079040da61af38f1c0d53ab849f | 8f6cc0e8bd15067f1d9161a4b178383e62377bc7 | /__OLD_CODE_STORAGE/reinforcement_learning/a3c/from_internet/playground-ikostrikov-pytorch-a3c/envs.py | 84224c825875b4c885430cd14323acae0c470d82 | [
"MIT"
] | permissive | humorbeing/python_github | 9c4dfc61a3cefbb266fefff335f6b28d05797e5e | e4b4b49bee7e7e3843c6874717779ce8d619bd02 | refs/heads/master | 2023-01-22T21:51:20.193131 | 2020-01-26T21:47:23 | 2020-01-26T21:47:23 | 163,707,778 | 0 | 0 | null | 2022-12-27T15:37:48 | 2019-01-01T01:58:18 | Python | UTF-8 | Python | false | false | 2,163 | py | import cv2
import gym
import numpy as np
from gym.spaces.box import Box
from matplotlib import pyplot as plt
def si(data):
plt.imshow(data, interpolation='nearest')
plt.show()
# Taken from https://github.com/openai/universe-starter-agent
def create_atari_env(env_id):
# print('2')
env = gym.make(env_id)
env = AtariRescale42x42(env)
env = NormalizedEnv(env)
return env
def _process_frame42(frame):
# si(frame)
frame = frame[34:34 + 160, :160]
# si(frame)
# Resize by half, then down to 42x42 (essentially mipmapping). If
# we resize directly we lose pixels that, when mapped to 42x42,
# aren't close enough to the pixel boundary.
frame = cv2.resize(frame, (80, 80))
# si(frame)
frame = cv2.resize(frame, (42, 42))
# si(frame)
# print(frame.shape)
frame = frame.mean(2, keepdims=True)
# si(frame)
# print(frame.shape)
frame = frame.astype(np.float32)
# si(frame)
frame *= (1.0 / 255.0)
# si(frame)
# print(frame.shape)
frame = np.moveaxis(frame, -1, 0)
# print(frame.shape)
# si(frame)
return frame
class AtariRescale42x42(gym.ObservationWrapper):
def __init__(self, env=None):
super(AtariRescale42x42, self).__init__(env)
self.observation_space = Box(0.0, 1.0, [1, 42, 42])
def _observation(self, observation):
return _process_frame42(observation)
class NormalizedEnv(gym.ObservationWrapper):
def __init__(self, env=None):
super(NormalizedEnv, self).__init__(env)
self.state_mean = 0
self.state_std = 0
self.alpha = 0.9999
self.num_steps = 0
def _observation(self, observation):
self.num_steps += 1
self.state_mean = self.state_mean * self.alpha + \
observation.mean() * (1 - self.alpha)
self.state_std = self.state_std * self.alpha + \
observation.std() * (1 - self.alpha)
unbiased_mean = self.state_mean / (1 - pow(self.alpha, self.num_steps))
unbiased_std = self.state_std / (1 - pow(self.alpha, self.num_steps))
return (observation - unbiased_mean) / (unbiased_std + 1e-8)
| [
"[email protected]"
] | |
8d0ba3fffd49ea3eea2c0d620f4f2610304025b1 | 48c0e32b803b59ed695e9d8f1dcdb877a2124252 | /virtual/bin/confusable_homoglyphs | 899edb47b6e39425a22faa0ecada590b2de2579c | [
"MIT"
] | permissive | willyowi/my-hood | 512604dc189527c7872125daf45b1cf8173448a8 | 8643b901b6234fc9c5ac50727d1789893cebc3b4 | refs/heads/master | 2022-12-15T03:52:57.961655 | 2019-09-17T05:54:01 | 2019-09-17T05:54:01 | 208,743,454 | 0 | 0 | null | 2022-11-22T04:15:05 | 2019-09-16T08:01:29 | Python | UTF-8 | Python | false | false | 262 | #!/home/moringa/Django/InstaInsta/virtual/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from confusable_homoglyphs.cli import cli
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(cli())
| [
"[email protected]"
] | ||
caf56dfe7e0c49a264c81da81d9ea914a84447b2 | b29acb2e230b3cf2f8be070850c34ed5d62dc80c | /Python/YPS/11/Sample6.py | 4b1cb2312995ea62fe3367f82ff96ca26700c936 | [] | no_license | MasatakaShibataSS/lesson | be6e3557c52c6157b303be268822cad613a7e0f7 | 4f3f81ba0161b820410e2a481b63a999d0d4338c | refs/heads/master | 2020-06-17T13:42:08.383167 | 2019-11-11T07:23:14 | 2019-11-11T07:23:14 | 195,940,605 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | import urllib.request
page = urllib.request.urlopen("https://www.python.org/")
html = page.read()
str = html.decode()
print(str)
| [
"[email protected]"
] | |
c7002a8a4fd72fba25c69325e8430ad8ced27ca8 | 9eee1566e436a883fa3bd0266c6a7375e34ab4d7 | /core/cli/commands/__init__.py | 44633d5b9ef2ce1a47eab7ce8d55f2271b91fcce | [] | no_license | gropax/qiq-django | aa87fa070bf2083aba9043ebc96c2287be2cf7e5 | 31f8c6ad717994bd9b37fcdde3fec8549be5aec1 | refs/heads/master | 2020-07-09T05:19:13.117991 | 2017-01-10T16:54:52 | 2017-01-10T16:54:52 | 65,904,082 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | from .manage import ManageCommand
from .qiq import QiqCommand
| [
"[email protected]"
] | |
6bdf85290bd663dbcce0fcc6c25afc13b3ec49b3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03076/s440054219.py | 3147caa674c3d4a08d5b4d7bfac6bdbff79f2610 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | from itertools import permutations
Order = [int(input()) for _ in range(5)]
ans = []
for orders in permutations(Order):
# print(orders)
tmp = 0
cnt = 1
for order in orders:
tmp += order
if cnt == 5:
ans.append(tmp)
for _ in range(11):
if tmp % 10 == 0:
break
tmp += 1
cnt += 1
print(min(ans)) | [
"[email protected]"
] | |
f9c248c90a84e28028bd38121a1b2c64c0646d36 | 277d4ee56616bb5930c57a57c68a202bf5085501 | /stubs/pytorch_transformers/modeling_transfo_xl_utilities.pyi | 690e9c0e6bf58fb17eaeff7e7fa883231a190cb1 | [
"MIT"
] | permissive | miskolc/spacy-pytorch-transformers | fc502523644eb25cb293e0796b46535ba581a169 | ab132b674c5a91510eb8cc472cdbdf5877d24145 | refs/heads/master | 2020-07-22T09:47:17.905850 | 2019-09-04T15:12:09 | 2019-09-04T15:12:09 | 207,156,566 | 1 | 0 | MIT | 2019-09-08T18:37:55 | 2019-09-08T18:37:55 | null | UTF-8 | Python | false | false | 1,205 | pyi | # Stubs for pytorch_transformers.modeling_transfo_xl_utilities (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
import torch.nn.functional as nn
from typing import Any, Optional
class ProjectedAdaptiveLogSoftmax(nn.Module):
n_token: Any = ...
d_embed: Any = ...
d_proj: Any = ...
cutoffs: Any = ...
cutoff_ends: Any = ...
div_val: Any = ...
shortlist_size: Any = ...
n_clusters: Any = ...
head_size: Any = ...
cluster_weight: Any = ...
cluster_bias: Any = ...
out_layers: Any = ...
out_projs: Any = ...
keep_order: Any = ...
def __init__(self, n_token: Any, d_embed: Any, d_proj: Any, cutoffs: Any, div_val: int = ..., keep_order: bool = ...) -> None: ...
def forward(self, hidden: Any, labels: Optional[Any] = ..., keep_order: bool = ...): ...
def log_prob(self, hidden: Any): ...
class LogUniformSampler:
range_max: Any = ...
dist: Any = ...
log_q: Any = ...
n_sample: Any = ...
def __init__(self, range_max: Any, n_sample: Any) -> None: ...
def sample(self, labels: Any): ...
def sample_logits(embedding: Any, bias: Any, labels: Any, inputs: Any, sampler: Any): ...
| [
"[email protected]"
] | |
26b8d5c8ca06054ba4ceacb635d29f047733406b | cedf275d9d0a9034f4d1227605b2f869098a4322 | /guize/migrations/0002_rule.py | c2a52efa781f4067da427bec40d04527405c7710 | [] | no_license | thorDemo/WagtailPointsShop | bb019dfef781d843d1d9e78fb24142f67a0d178a | f069bcb66514067197a59ffe25b68b47dea282e3 | refs/heads/master | 2020-06-02T23:34:47.100993 | 2019-08-30T10:08:43 | 2019-08-30T10:08:43 | 185,065,560 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 841 | py | # Generated by Django 2.2.1 on 2019-05-15 09:05
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0041_group_collection_permissions_verbose_name_plural'),
('guize', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Rule',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('description', wagtail.core.fields.RichTextField()),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| [
"[email protected]"
] | |
0a847c650ed4c43f4378921035fc98866e7c64ca | 8441f156e53afcc6c2b5190de2439c68eb40f218 | /scripts/test_pdl2resources.py | 4a1a86f180bd2d2d6030e297f64ca2bb8c9e882d | [] | no_license | usnistgov/oar-metadata | 99436a84d32d623d77310e75eee834c683ea1d5b | 2190bfc79d97f81d52dd24df0d4e9dc844065b67 | refs/heads/integration | 2023-07-08T16:06:23.258608 | 2023-04-22T21:00:09 | 2023-04-22T21:00:09 | 82,972,531 | 4 | 7 | null | 2023-06-30T18:27:38 | 2017-02-23T21:20:34 | Python | UTF-8 | Python | false | false | 2,595 | py | #!/usr/bin/env python
#
import os, pdb, sys, shutil, json
import unittest as test
import ejsonschema as ejs
datadir = os.path.join(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))),
"jq", "tests", "data")
pdlfile = os.path.join(datadir, "nist-pdl-oct2016.json")
tmpname = "_test"
basedir = os.getcwd()
tmpdir = os.path.join(basedir, tmpname)
outdir = os.path.join(tmpdir, "nerdmrecs")
errdir = os.path.join(tmpdir, "errors")
scriptdir = os.path.dirname(__file__)
cvtscript = os.path.join(scriptdir, "pdl2resources.py")
schemadir = os.path.join(os.path.dirname(scriptdir), "model")
class TestConvert(test.TestCase):
@classmethod
def setUpClass(cls):
if not os.path.isdir(outdir):
if not os.path.isdir(tmpdir):
os.mkdir(tmpdir)
os.mkdir(outdir)
os.makedirs(errdir)
@classmethod
def tearDownClass(cls):
if os.path.exists(tmpdir):
shutil.rmtree(tmpdir)
def setUp(self):
self.val = ejs.ExtValidator.with_schema_dir(schemadir, ejsprefix='_')
def test_convert(self):
script = "python3 {0} -d {1} -T {2}".format(cvtscript, outdir, pdlfile)
self.assertEqual(os.system(script), 0)
files = [f for f in os.listdir(outdir) if f.endswith(".json")]
failed = []
passed = 0
for f in files:
nf = os.path.join(outdir, f)
errs = self.val.validate_file(nf, raiseex=False)
if len(errs) > 0:
failed.append(f)
with open(os.path.join(errdir, f), 'w') as fd:
for err in errs:
print(str(err), file=fd)
else:
sys.stderr.write(".")
passed += 1
with open(nf) as fd:
nerd = json.load(fd)
if 'theme' in nerd:
self.assertEqual(len(nerd['topic']), len(nerd['theme']))
if nerd['ediid'] == 'EBC9DB05EDEE5B0EE043065706812DF85':
self.assertIn('theme', nerd)
self.assertEqual(nerd['theme'][0], "Physics: Spectroscopy")
self.assertEqual(nerd['topic'][0]['tag'], "Physics: Spectroscopy")
self.assertTrue(all([':' in t for t in nerd['theme']]))
sys.stderr.write("\nValidated {0} files".format(str(passed)))
self.assertEqual(len(failed), 0,
"{0} converted file(s) failed validation".format(str(len(failed))))
if __name__ == '__main__':
test.main()
| [
"[email protected]"
] | |
84e6c18f5c63e7aa8929734ce272fa3a09eeb159 | 15ae6a6ca3a202e50e1905fb8f1bf8461d14e525 | /bender_behaviors/src/python_old/entrevista1.py | 184866f75c8f0ce575384b10e486eb55729d6c29 | [] | no_license | uchile-robotics-graveyard/code_graveyard | 5f8fa65ce0dc4698598ee4df00e27172cfd5be36 | 09feb04d2303456723542b95257e3ef3c86e263e | refs/heads/master | 2021-01-21T08:24:24.254179 | 2016-12-13T03:23:19 | 2016-12-13T03:23:19 | 91,625,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | from bender_msgs.msg import *
from bender_srvs.srv import *
# Messages
from std_msgs.msg import *
# Services
from std_srvs.srv import *
rospy.init_node("ceremonia")
face = rospy.Publisher('/head',Emotion)
client_speech_synth = rospy.ServiceProxy('/speech_synthesizer/synthesize',synthesize)
print "Presione Enter"
a = raw_input() # Espera un 'enter' para comenzar
client_speech_synth("Pero senior rector que esta diciendo ")
face.publish("changeFace", "sad1", 0)
time.sleep(3)
client_speech_synth("creo que esta equivocado")
face.publish("changeFace", "angry1", 0)
time.sleep(6)
client_speech_synth("Muchas gracias")
| [
"[email protected]"
] | |
5627b2e467cd0e1614f8d47a99d2bf22066b9b4e | fb3caa66ac0b2254b422303d670a70e597067758 | /201911_AI_Sec_Baidu/core-attack-codes/a_04.py | 9f9cf0803ac57b036a01e238785d504de381f628 | [] | no_license | dyngq/Competitions | 065ec9f153919950b161aaa9fff6a9de9e29ba32 | e9b7ff8fbe038e148bc61b21b077f35cdc5368a9 | refs/heads/master | 2021-06-13T13:55:11.352531 | 2021-05-08T09:49:24 | 2021-05-08T09:49:24 | 186,392,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,022 | py | #coding=utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import functools
import numpy as np
import paddle.fluid as fluid
#加载自定义文件
import models
from attack.attack_pp import FGSM, PGD
from utils import init_prog, save_adv_image, process_img, tensor2img, calc_mse, add_arguments, print_arguments
#######parse parameters
parser = argparse.ArgumentParser(description=__doc__)
add_arg = functools.partial(add_arguments, argparser=parser)
add_arg('class_dim', int, 121, "Class number.")
add_arg('shape', str, "3,224,224", "output image shape")
add_arg('input', str, "./input2_image/", "Input directory with images")
add_arg('output', str, "./input3_image/", "Output directory with images")
args = parser.parse_args()
print_arguments(args)
######Init args
image_shape = [int(m) for m in args.shape.split(",")]
class_dim=args.class_dim
input_dir = args.input
output_dir = args.output
# Xception41
model_name="ResNet50"
pretrained_model="/home/aistudio/work/attack_example/attack_code/models_parameters/ResNet50"
val_list = 'val_list.txt'
use_gpu=True
######Attack graph
adv_program=fluid.Program()
#完成初始化
with fluid.program_guard(adv_program):
input_layer = fluid.layers.data(name='image', shape=image_shape, dtype='float32')
#设置为可以计算梯度
input_layer.stop_gradient=False
# model definition
model = models.__dict__[model_name]()
print(model_name)
out_logits = model.net(input=input_layer, class_dim=class_dim)
out = fluid.layers.softmax(out_logits)
place = fluid.CUDAPlace(0) if use_gpu else fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(fluid.default_startup_program())
#记载模型参数
fluid.io.load_persistables(exe, pretrained_model)
#设置adv_program的BN层状态
init_prog(adv_program)
#创建测试用评估模式
eval_program = adv_program.clone(for_test=True)
#定义梯度
with fluid.program_guard(adv_program):
label = fluid.layers.data(name="label", shape=[1] ,dtype='int64')
loss = fluid.layers.cross_entropy(input=out, label=label)
gradients = fluid.backward.gradients(targets=loss, inputs=[input_layer])[0]
######Inference
def inference(img):
fetch_list = [out.name]
result = exe.run(eval_program,
fetch_list=fetch_list,
feed={ 'image':img })
result = result[0][0]
pred_label = np.argmax(result)
pred_score = result[pred_label].copy()
return pred_label, pred_score
######FGSM attack
#untarget attack
def attack_nontarget_by_FGSM(img, src_label):
pred_label = src_label
# step = 4.0/512.0
step = 8.0/64.0
eps = 32.0/64.0
while pred_label == src_label:
#生成对抗样本
adv=FGSM(adv_program=adv_program,eval_program=eval_program,gradients=gradients,o=img,
input_layer=input_layer,output_layer=out,step_size=step,epsilon=eps,
isTarget=False,target_label=0,use_gpu=use_gpu)
pred_label, pred_score = inference(adv)
step *= 2
if step > eps:
break
print("Test-score: {0}, class {1}".format(pred_score, pred_label))
adv_img=tensor2img(adv)
return adv_img
def attack_nontarget_by_FGSM_test(img, src_label):
pred_label = src_label
print("---------------AAAA-------------------Test-score: {0}, class {1}".format(pred_label, pred_label))
pred_label, pred_score = inference(img)
print("---------------BBBB-------------------Test-score: {0}, class {1}".format(pred_score, pred_label))
####### Main #######
def get_original_file(filepath):
with open(filepath, 'r') as cfile:
full_lines = [line.strip() for line in cfile]
cfile.close()
original_files = []
for line in full_lines:
label, file_name = line.split()
original_files.append([file_name, int(label)])
return original_files
def gen_adv():
mse = 0
original_files = get_original_file(input_dir + val_list)
for filename, label in original_files:
img_path = input_dir + filename
print("Image: {0} ".format(img_path))
img=process_img(img_path)
# attack_nontarget_by_FGSM_test(img, label)
prelabel, xxxx = inference(img)
if label == prelabel:
adv_img = attack_nontarget_by_FGSM(img, label)
else:
adv_img = tensor2img(img)
image_name, image_ext = filename.split('.')
##Save adversarial image(.png)
save_adv_image(adv_img, output_dir+image_name+'.jpg')
# attack_nontarget_by_FGSM_test(img, label)
org_img = tensor2img(img)
score = calc_mse(org_img, adv_img)
print(score)
mse += score
print("ADV {} files, AVG MSE: {} ".format(len(original_files), mse/len(original_files)))
def main():
gen_adv()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
6509d45fac1e61e8e1adb7641a828be45e3c4bd6 | f1d2a86b7dd93f4ddafa8961780775a28e7b4508 | /LeetCodePractice/657. Judge Route Circle.py | c460223a113be445b4b0e16c29382cf9793142c3 | [] | no_license | deepika087/CompetitiveProgramming | 76f8c1451fce1a8e3c94656f81a5b04363987dc6 | d40c24736a6fee43b56aa1c80150c5f14be4ff22 | refs/heads/master | 2021-06-12T02:26:22.374506 | 2021-02-20T19:27:57 | 2021-02-20T19:27:57 | 70,208,474 | 10 | 6 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | __author__ = 'deepika'
class Solution(object):
def judgeCircle(self, moves):
"""
:type moves: str
:rtype: bool
"""
start = [0, 0]
for pos in moves:
if pos == 'U':
start[1] += 1
elif pos == 'D':
start[1] -= 1
elif pos == 'R':
start[0] += 1
else:
start[0] -= 1
return start == [0, 0]
s=Solution()
assert s.judgeCircle("UD") == True
assert s.judgeCircle("LL") == False | [
"[email protected]"
] | |
c669a2a88652de7ad32f758264e4aebdb6518c22 | 461d7bf019b9c7a90d15b3de05891291539933c9 | /bip_utils/addr/egld_addr.py | 5ce39f3efdc68e319a149a4f7e603112e1d6e4b2 | [
"MIT"
] | permissive | renauddahou/bip_utils | 5c21503c82644b57ddf56735841a21b6306a95fc | b04f9ef493a5b57983412c0ce460a9ca05ee1f50 | refs/heads/master | 2023-07-16T05:08:45.042084 | 2021-08-19T09:33:03 | 2021-08-19T09:33:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,965 | py | # Copyright (c) 2021 Emanuele Bellocchia
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Imports
from typing import Union
from bip_utils.addr.utils import AddrUtils
from bip_utils.bech32 import Bech32Encoder
from bip_utils.conf import Bip44Elrond
from bip_utils.ecc import IPublicKey
class EgldAddr:
""" Elrond address class. It allows the Elrond address generation. """
@staticmethod
def EncodeKey(pub_key: Union[bytes, IPublicKey]) -> str:
""" Get address in Elrond format.
Args:
pub_key (bytes or IPublicKey): Public key bytes or object
Returns:
str: Address string
Raises:
ValueError: If the public key is not valid
TypeError: If the public key is not ed25519
"""
pub_key_obj = AddrUtils.ValidateAndGetEd25519Key(pub_key)
return Bech32Encoder.Encode(Bip44Elrond.AddrConfKey("hrp"), pub_key_obj.RawCompressed().ToBytes()[1:])
| [
"[email protected]"
] | |
8c30f409fcf6bfd0fc431ba46999e04e306eb0a3 | c36679186f669c6e3bd1c106c96d4a17be1f5ab1 | /Practice_Telusko/18.py | 09fd1764f0a745f0ee53343d99e9499adde34fde | [] | no_license | touhiduzzaman-tuhin/python-code-university-life | 60a3d671b200a6f5222c6d176c13c5f20f013509 | 6d2e3d90d430faa5c83fe79e7fb1ebe516994762 | refs/heads/master | 2023-03-22T15:18:10.636203 | 2021-03-06T18:52:04 | 2021-03-06T18:52:04 | 332,467,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 91 | py | li = [1, 2, 3, 4, 5]
print(li[-1])
print(li[-2])
print(li[-3])
print(li[-4])
print(li[-5]) | [
"[email protected]"
] | |
c38c36d6ffc05d42fe02639a242917e1d5d32199 | fd54c0886b81b49a55c31eb8c5254ce83df78785 | /Source_Code/madagascar/appussd/utilities/data_ucip/tests.py | 67147cdba1ee753b31be73451e555d3f685e8c22 | [] | no_license | santsy03/RADIX | 7854896651833b1be6e3279be409db59a71c76e4 | da8f2535692697b80a6dc543b9eb270fe3d5e4d3 | refs/heads/master | 2021-01-12T09:48:32.085432 | 2016-12-13T06:01:41 | 2016-12-13T06:01:41 | 76,260,115 | 0 | 0 | null | 2016-12-13T06:01:41 | 2016-12-12T13:46:31 | null | UTF-8 | Python | false | false | 984 | py | def test_bill_subscriber():
'''tests the bill_subscriber function'''
from utilities.ucip.core import bill_subscriber
resources = {}
parameters = {}
parameters['msisdn'] = '254735267974'
parameters['transactionId'] = 1
parameters['externalData1'] = 'test'
parameters['externalData2'] = 'test'
parameters['price'] = '1'
resources['parameters'] = parameters
resp = bill_subscriber(resources)
print resp
def test_get_balance():
'''tests the get_balance function'''
from utilities.ucip.core import get_balance
resources = {}
parameters = {}
parameters['msisdn'] = '254735267974'
parameters['transactionId'] = 1
parameters['externalData1'] = 'test'
parameters['externalData2'] = 'test'
parameters['price'] = '1'
resources['parameters'] = parameters
resp = get_balance(resources)
print resp
if __name__ == '__main__':
pass
#test_bill_subscriber()
#test_get_balance()
| [
"[email protected]"
] | |
2e58446c7b4fbaa9e6612720013bf890545919e6 | a6bc66b6c1561fc465d7c321d2584c1c7d6bd792 | /sqliteschema/_text_extractor.py | ec137f4f88eedc36c5d605868782f47158484aff | [
"MIT"
] | permissive | dirkakrid/sqliteschema | defffda63d5922ec29d4f04cbe3895dfdca8a3e2 | 70aae30bd6e1eba0e959476246a2a6907b8f4b2d | refs/heads/master | 2021-01-22T21:27:54.759880 | 2017-02-26T13:24:58 | 2017-02-26T13:24:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,261 | py | #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import OrderedDict
import six
import typepy
from ._error import DataNotFoundError
from ._interface import AbstractSqliteSchemaExtractor
class SqliteSchemaTextExtractorV0(AbstractSqliteSchemaExtractor):
@property
def verbosity_level(self):
return 0
def get_table_schema(self, table_name):
return []
def get_table_schema_text(self, table_name):
self._validate_table_existence(table_name)
return "{:s}\n".format(table_name)
def _write_database_schema(self):
for table_name in self.get_table_name_list():
if table_name == "sqlite_sequence":
continue
self._stream.write(self.get_table_schema_text(table_name))
class SqliteSchemaTextExtractorV1(SqliteSchemaTextExtractorV0):
@property
def verbosity_level(self):
return 1
def get_table_schema(self, table_name):
return [
attr.split()[0]
for attr in self._get_attr_schema(table_name, "table")
]
def get_table_schema_text(self, table_name):
return "{:s} ({:s})\n".format(
table_name, ", ".join(self.get_table_schema(table_name)))
class SqliteSchemaTextExtractorV2(AbstractSqliteSchemaExtractor):
@property
def verbosity_level(self):
return 2
def get_table_schema(self, table_name):
return OrderedDict([
attr.split()[:2]
for attr in self._get_attr_schema(table_name, "table")
])
def get_table_schema_text(self, table_name):
attr_list = []
for key, value in six.iteritems(self.get_table_schema(table_name)):
attr_list.append("{:s} {:s}".format(key, value))
return "{:s} ({:s})\n".format(table_name, ", ".join(attr_list))
def _write_table_schema(self, table_name):
self._stream.write(self.get_table_schema_text(table_name))
def _write_database_schema(self):
for table_name in self.get_table_name_list():
self._write_table_schema(table_name)
class SqliteSchemaTextExtractorV3(SqliteSchemaTextExtractorV2):
@property
def verbosity_level(self):
return 3
def get_table_schema(self, table_name):
attr_list_list = [
attr.split()
for attr in self._get_attr_schema(table_name, "table")
]
return OrderedDict([
[attr_list[0], " ".join(attr_list[1:])]
for attr_list in attr_list_list
])
class SqliteSchemaTextExtractorV4(SqliteSchemaTextExtractorV3):
@property
def verbosity_level(self):
return 4
def get_table_schema_text(self, table_name):
attr_list = []
for key, value in six.iteritems(self.get_table_schema(table_name)):
attr_list.append("{:s} {:s}".format(key, value))
return "\n".join([
"{:s} (".format(table_name),
] + [
",\n".join([
" {:s}".format(attr)
for attr in attr_list
])
] + [
")\n",
])
def _write_table_schema(self, table_name):
super(SqliteSchemaTextExtractorV4, self)._write_table_schema(
table_name)
self._stream.write("\n")
class SqliteSchemaTextExtractorV5(SqliteSchemaTextExtractorV4):
__ENTRY_TYPE_LIST = ["table", "index"]
@property
def verbosity_level(self):
return 5
def get_table_schema_text(self, table_name):
schema_text = super(
SqliteSchemaTextExtractorV5,
self
).get_table_schema_text(table_name)
try:
index_schema = self._get_index_schema(table_name)
except DataNotFoundError:
return schema_text
index_schema_list = [
"{}".format(index_entry)
for index_entry in index_schema
if typepy.is_not_null_string(index_entry)
]
if typepy.is_empty_sequence(index_schema_list):
return schema_text
return "{:s}{:s}\n".format(schema_text, "\n".join(index_schema_list))
| [
"[email protected]"
] | |
4bd6b7505d8e2a7353534586bcc8c68933891220 | c5b4d174ace61dd5914ca99fb0f2c710d0182324 | /pypes/fmri/rest.py | 3e34ff1e4c8139ca7c77288a6ee1befd637781ba | [
"Apache-2.0"
] | permissive | erramuzpe/pypes | 636c6b31023747a571af90390fd85b2dd6806dea | 3922d3162dc633b30961c036efdeb5d221ab1bfb | refs/heads/master | 2020-12-24T06:43:15.063955 | 2017-04-05T19:51:05 | 2017-04-05T19:51:05 | 73,461,509 | 0 | 0 | null | 2016-11-11T08:54:15 | 2016-11-11T08:54:14 | null | UTF-8 | Python | false | false | 2,291 | py | # -*- coding: utf-8 -*-
"""
Nipype workflows to process resting-state functional MRI.
"""
from .grouptemplate import attach_spm_fmri_grouptemplate_wf
from .clean import attach_fmri_cleanup_wf
from .warp import attach_spm_warp_fmri_wf
def _attach_rest_preprocessing(main_wf, registration_wf_name="spm_warp_fmri", do_group_template=False):
""" Attach the resting-state MRI pre-processing workflow to the `main_wf`.
Parameters
----------
main_wf: nipype Workflow
registration_wf_name: str
Name of the registration workflow.
do_group_template: bool
If True will attach the group template creation and pre-processing pipeline.
Nipype Inputs for `main_wf`
---------------------------
Note: The `main_wf` workflow is expected to have an `input_files` and a `datasink` nodes.
input_files.select.anat: input node
datasink: nipype Node
Returns
-------
main_wf: nipype Workflow
"""
main_wf = attach_fmri_cleanup_wf(main_wf)
main_wf = attach_spm_warp_fmri_wf(main_wf,
registration_wf_name=registration_wf_name,
do_group_template=False)
if do_group_template:
main_wf = attach_spm_fmri_grouptemplate_wf(main_wf, wf_name="spm_fmri_grptemplate")
main_wf = attach_spm_warp_fmri_wf(main_wf,
registration_wf_name=registration_wf_name,
do_group_template=True)
reg_wf = main_wf.get_node("{}_{}".format(registration_wf_name, 'grptemplate'))
grp_template = main_wf.get_node("group_template")
main_wf.connect([(grp_template, reg_wf, [("fmri_template", "wfmri_input.epi_template")]),])
return main_wf
def attach_rest_preprocessing(main_wf, wf_name="spm_warp_fmri"):
return _attach_rest_preprocessing(main_wf,
registration_wf_name=wf_name,
do_group_template=False)
def attach_rest_grptemplate_preprocessing(main_wf, wf_name="spm_warp_fmri"):
return _attach_rest_preprocessing(main_wf,
registration_wf_name=wf_name,
do_group_template=True)
| [
"[email protected]"
] | |
7d48674929e67d0b4ff1c26a9fdc2fdda1b32b00 | 83cf642504313b6ef6527dda52158a6698c24efe | /scripts/addons/remote_debugger.py | 50b2e8e90a16e7b474042e3fa8c4903af0829552 | [] | no_license | PyrokinesisStudio/Fluid-Designer-Scripts | a4c40b871e8d27b0d76a8025c804d5a41d09128f | 23f6fca7123df545f0c91bf4617f4de7d9c12e6b | refs/heads/master | 2021-06-07T15:11:27.144473 | 2016-11-08T03:02:37 | 2016-11-08T03:02:37 | 113,630,627 | 1 | 0 | null | 2017-12-09T00:55:58 | 2017-12-09T00:55:58 | null | UTF-8 | Python | false | false | 2,146 | py | bl_info = {
'name' : 'Remote Debugger',
'author' : '',
'version' : (0, 2),
'blender' : (2, 75, 0),
'location' : '',
'category' : 'Development'}
import bpy
import os.path
from bpy.types import AddonPreferences
from bpy.props import StringProperty
class DebuggerAddonPreferences(AddonPreferences):
bl_idname = __name__
eggpath = StringProperty(name="Path of PyCharm egg file",
description="make sure to select the py3k egg",
subtype="FILE_PATH",
default="pycharm-debug-py3k.egg"
)
def draw(self, context):
layout = self.layout
layout.prop(self, 'eggpath')
layout.label(text="Make sure to select the egg for Python 3.x: pycharm-debug-py3k.egg")
class DEBUG_OT_connect_debugger(bpy.types.Operator):
bl_idname = "debug.connect_debugger"
bl_label = "Connect to remote Python debugger"
bl_description = "Connects to a PyCharm debugger on localhost:1090"
def execute(self, context):
import sys
user_preferences = context.user_preferences
addon_prefs = user_preferences.addons[__name__].preferences
eggpath = os.path.abspath(addon_prefs.eggpath)
if not os.path.exists(eggpath):
self.report({'ERROR'}, "Unable to find debug egg at %r. Configure the addon properties ")
return {'CANCELLED'}
if not any("pycharm-debug" in p for p in sys.path):
sys.path.append(eggpath)
import pydevd
pydevd.settrace('localhost', port=1090, stdoutToServer=True, stderrToServer=True)
return {'FINISHED'}
def register():
bpy.utils.register_class(DEBUG_OT_connect_debugger)
bpy.utils.register_class(DebuggerAddonPreferences)
def unregister():
bpy.utils.unregister_class(DEBUG_OT_connect_debugger)
bpy.utils.unregister_class(DebuggerAddonPreferences)
if __name__ == '__main__':
register()
| [
"[email protected]"
] | |
2dab992e4eb273021e8b75698a6f71b4a65cd9bb | 66973fe6468e1b1f9cd7df765819ba0cb89f6ff8 | /bin/gitstatus | cdb9115b3dd0b17ae88a8f7b6f75db6f5111949c | [
"MIT"
] | permissive | dsavransky/miscpy | 90fee44f245c5eca89897e6536dae702f8836484 | 2aab0e3d089a678ee7fee18be47f2b16591c150a | refs/heads/main | 2022-06-15T21:54:07.285900 | 2022-05-31T23:25:27 | 2022-05-31T23:25:27 | 39,569,507 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,712 | #!/usr/bin/env python
import os
import subprocess
import glob
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Check status on all git repos, fetching (or pulling) from remotes."
)
parser.add_argument(
"--pull",
action="store_true",
help="Do a pull (rather than default fetch on remotes)",
)
parser.add_argument(
"--noremote",
action="store_true",
help="Don't contact any remotes.",
)
args = parser.parse_args()
# define all relevant dirs
basedir = os.path.join(os.environ['HOME'], 'Documents')
gitdirs = ['Proposals', 'Talks', 'Teaching', 'TeXStuff', 'MATLAB', 'Notes',
'BragSheets', 'Reviews', 'Letters']
gitdirs = [os.path.join(basedir, d) for d in gitdirs]
gitdirs += glob.glob(os.path.join(basedir, 'gitrepos/*'))
for d in gitdirs:
if "gitrepos" in d:
tmp = subprocess.run(["git", "remote", "-v"], cwd=d,
capture_output=True).stdout.decode()
if "dsavransky" not in tmp:
continue
if not(args.noremote):
if args.pull:
res0 = subprocess.run(["git", "pull"], cwd=d, capture_output=True)
else:
res0 = subprocess.run(["git", "fetch"], cwd=d, capture_output=True)
res = subprocess.run(["git", "status"], cwd=d, capture_output=True).stdout.decode()
if ("Your branch is up to date" in res) and ("nothing to commit" in res) and\
("working tree clean" in res):
continue
print("{}\n".format(d))
print(res)
print("\n")
| [
"[email protected]"
] | ||
0b3988687c2fcec1c85d3999fa91dd1ed46daa05 | d9a912d066b8a10f2611ed80af8e720522da7a51 | /tf.py | 54cb79555f0d631b74adee30da18cc24db3adfbc | [] | no_license | Sanny26/information_retrieval | 871bba71d1e8d93329e24fc052d738d03241875f | 4fb29c9da28fa1937f43c5b36fc470b0420f2ba0 | refs/heads/master | 2021-08-26T08:39:04.094595 | 2017-11-22T17:14:51 | 2017-11-22T17:14:51 | 111,576,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,603 | py | """Main code for finding TF-IDF scores."""
from collections import Counter
from math import log
from utils import preprocess_file
import os
import numpy as np
import pickle
def get_tf_idf_weights(path):
"""Get the wieghts for TF."""
doc_no = 0
doc_names = dict()
tf_list = dict() # how many term t occurs in doc d
df_list = dict() # how many docs contain term t
sub_dirs = os.listdir(path)
term_list = []
N = 0
for dr in sub_dirs:
dr_path = path + dr + "/"
files = os.listdir(dr_path)
for f in files:
f_path = dr_path+f
doc_names[doc_no] = f_path
doc_no += 1
print(doc_no)
processed_text = preprocess_file(f_path)
tf = Counter(processed_text)
for term, frequency in dict(tf).items():
if term not in tf_list:
tf_list[term] = []
term_list.append(term)
tf_list[term].append((doc_no, 1+log(frequency, 10)))
matrix = np.zeros((len(tf_list), doc_no+1), dtype=float)
for i, term in enumerate(list((tf_list.keys()))):
l = tf_list[term]
for doc_id, freq in l:
matrix[i, doc_id] = freq
return matrix, doc_names, term_list
def main():
"""Main."""
path = "test/"
weights, doc_names, terms = get_tf_idf_weights(path)
pickle.dump(weights, open("pickles/tf.p", "wb"))
pickle.dump(doc_names, open("pickles/tf-file-names.p", "wb"))
pickle.dump(terms, open("pickles/tf-terms.p", "wb"))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
bac5086f28d452d811d08211fd9648f3fd55c034 | 41bea39563c74621924d79723f8ba84889958365 | /nkamg_pcap/server/antimal/misc/trails/feeds/proxyspy.py | 1ac62a432b900d21ea14ee85e56f7891e002c3b8 | [
"MIT"
] | permissive | NKQiuKF/pcap_update | abee0c13cb583fddb89eb9e86a487279bdc18f1d | 679e3f116367394a5f58eb4f95b5318e80fee331 | refs/heads/master | 2022-10-21T17:49:30.706480 | 2019-09-02T09:22:06 | 2019-09-02T09:22:06 | 205,816,421 | 1 | 0 | null | 2022-10-06T18:33:32 | 2019-09-02T08:55:55 | JavaScript | UTF-8 | Python | false | false | 717 | py | #!/usr/bin/env python
"""
Copyright (c) 2014-2016 Miroslav Stampar (@stamparm)
See the file 'LICENSE' for copying permission
"""
from core.common import retrieve_content
__url__ = "https://raw.githubusercontent.com/firehol/blocklist-ipsets/master/proxyspy_1d.ipset"
__check__ = "proxyspy_1d"
__info__ = "proxy (suspicious)"
__reference__ = "spys.ru"
def fetch():
retval = {}
content = retrieve_content(__url__)
if __check__ in content:
for line in content.split('\n'):
line = line.strip()
if not line or line.startswith('#') or '.' not in line:
continue
retval[line] = (__info__, __reference__)
return retval
| [
"[email protected]"
] | |
d00bc0b7e81a3e19920c70b70559eaeea7761f16 | 42e4cc242a2be105dae0288d02a08fbd95bb476a | /deepspeed/elasticity/constants.py | 03cba725fa87d66ef5008c333c05243c149fe043 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | afiaka87/DeepSpeed | a49ca48a410190b631a78c392c25359ed4478577 | 83ff549aa365d4fee744074ac0a64f27571ecbc8 | refs/heads/main | 2023-04-14T16:22:37.595209 | 2021-04-12T09:20:06 | 2021-04-12T09:20:06 | 356,466,212 | 2 | 0 | MIT | 2021-04-12T09:20:07 | 2021-04-10T04:09:31 | Python | UTF-8 | Python | false | false | 2,262 | py | """
Copyright 2020 The Microsoft DeepSpeed Team
"""
#########################################
# Elasticity
#########################################
''' Elasticity Utility in DeepSpeed can be used to create highly elastic jobs compatible
with a large number of GPUs. For elastic jobs, DeepSpeed will provide a batch size that
can support a large number of GPUs based on the user specified parameters
'''
FORMAT = '''
Elasticity should be enabled as:
"elasticity": {
"enabled": true,
"max_train_batch_size": 2000,
"micro_batch_sizes": [2,4,6],
"min_gpus": 1,
"max_gpus" : 10000
"min_time": 20,
"prefer_larger_batch": true,
"ignore_non_elastic_batch_info": false,
"version": 0.1
}
'''
ELASTICITY = 'elasticity'
# Current elasticity version
LATEST_ELASTICITY_VERSION = 0.1
ENABLED = 'enabled'
ENABLED_DEFAULT = False
# Max acceptable train_batch_size
MAX_ACCEPTABLE_BATCH_SIZE = 'max_train_batch_size'
MAX_ACCEPTABLE_BATCH_SIZE_DEFAULT = 2000
# Acceptable micro batch sizes, same as train_micro_batch_size_per_gpu
MICRO_BATCHES = 'micro_batch_sizes'
MICRO_BATCHES_DEFAULT = [2, 4, 6]
# Min/max of GPUs to search over
MIN_GPUS = 'min_gpus'
MIN_GPUS_DEFAULT = 1
MAX_GPUS = 'max_gpus'
MAX_GPUS_DEFAULT = 10000
# Minimum running time (minutes) before the scheduler will scale us, 0 implies it's unknown
MIN_TIME = "min_time"
MIN_TIME_DEFAULT = 0
# When finding a suitable batch size, attempt to find one that is closest
# to the max train batch size given.
PREFER_LARGER_BATCH = 'prefer_larger_batch'
PREFER_LARGER_BATCH_DEFAULT = True
# In order to reduce confusion, if elastic mode is enabled we
# require (via assert) that no batch info is set outside of the
# elastic config. You can turn off this assert via this config
# but keep in mind that all batch info defined outside the
# elastic mode *will be ignored*.
IGNORE_NON_ELASTIC_BATCH_INFO = 'ignore_non_elastic_batch_info'
IGNORE_NON_ELASTIC_BATCH_INFO_DEFAULT = False
# Version of elastic logic to use
VERSION = "version"
VERSION_DEFAULT = LATEST_ELASTICITY_VERSION
# Minimum deepspeed version to use elasticity
MINIMUM_DEEPSPEED_VERSION = "0.3.8"
# Environment variable storing elastic config from resource scheduler
DEEPSPEED_ELASTICITY_CONFIG = "DEEPSPEED_ELASTICITY_CONFIG"
| [
"[email protected]"
] | |
f8114ec28447eee38a12cf5ac1de1c2782d617a8 | b34463870a1754e5f60029a77621c72f32b29a64 | /08_cpp-overload-eigen/test.py | 1d53e9920cbff02aaa4110da2d6b1f686d5c2d89 | [
"MIT"
] | permissive | strint/pybind11_examples | 63d59c064a0e9a6d4ddd64413c3955df53a84969 | f74b3cda2b9d39728923b9b155920e828b7a29f7 | refs/heads/master | 2022-12-02T12:56:52.824416 | 2020-08-13T07:41:10 | 2020-08-13T07:41:10 | 285,498,754 | 0 | 0 | MIT | 2020-08-06T07:04:18 | 2020-08-06T07:04:17 | null | UTF-8 | Python | false | false | 210 | py | import numpy as np
import example
A = np.array([[1,2,1],
[2,1,0],
[-1,1,2]])
B = 10
print(example.mul(A.astype(np.int ),int (B)))
print(example.mul(A.astype(np.float),float(B)))
| [
"[email protected]"
] | |
315603494a810e20704e702766b8df35b57a18c2 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /ACL_PyTorch/contrib/nlp/Rosetta_Resnet34_vd/Rosetta_Resnet34_vd_postprocess.py | 6e17c6800c641359f12e3e433a4c33587569cab5 | [
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 2,248 | py | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
__dir__ = os.path.dirname(os.path.abspath(__file__))
sys.path.append(__dir__)
sys.path.append(os.path.abspath(os.path.join(__dir__, 'PaddleOCR')))
import paddle
import numpy as np
import tools.program as program
from tqdm import tqdm
from ppocr.data import build_dataloader
from ppocr.metrics import build_metric
from ppocr.postprocess import build_post_process
def main(config, device, logger, vdl_writer):
valid_dataloader = build_dataloader(config, 'Eval', device, logger)
eval_class = build_metric(config['Metric'])
global_config = config['Global']
post_process_class = build_post_process(config['PostProcess'], global_config)
pbar = tqdm(
total=len(valid_dataloader),
desc='Postprocessing',
position=0,
leave=True)
for idx, batch in enumerate(valid_dataloader):
result_name = 'img_{}_0.bin'.format(idx)
result = os.path.join(config['results'], result_name)
preds = paddle.to_tensor(np.fromfile(result, dtype=np.float32).reshape(1, 25, 37))
batch = [item.numpy() for item in batch]
post_result = post_process_class(preds, batch[1])
eval_class(post_result, batch)
pbar.update(1)
pbar.close()
metric = eval_class.get_metric()
print(metric)
if __name__ == "__main__":
config, device, logger, vdl_writer = program.preprocess()
main(config, device, logger, vdl_writer)
| [
"[email protected]"
] | |
115cd19c16bf70d8b6fb245210f8ee067431cb67 | adb6755eb1a3d91375e6b4e9b8c1afd07f85313b | /venv/Lib/site-packages/pandas/tests/tseries/offsets/test_offsets_properties.py | afb0549a23c3d833a3cad9b8f36e3d974929567b | [] | no_license | Atwinenickson/Socialmediaclassifier- | af54b559569e80004c441fc90dc44b0ee945555d | 05c5abbaba8694d9bf95d745ffca75c17ac69621 | refs/heads/master | 2022-12-15T01:33:18.073259 | 2019-06-07T15:38:18 | 2019-06-07T15:38:18 | 190,616,071 | 1 | 0 | null | 2022-12-08T05:13:29 | 2019-06-06T16:41:17 | Python | UTF-8 | Python | false | false | 3,941 | py | # -*- coding: utf-8 -*-
"""
Behavioral based tests for offsets and date_range.
This file is adapted from https://github.com/pandas-dev/pandas/pull/18761 -
which was more ambitious but less idiomatic in its use of Hypothesis.
You may wish to consult the previous version for inspiration on further
tests, or when trying to pin down the bugs exposed by the tests below.
"""
import warnings
from hypothesis import assume, given, strategies as st
from hypothesis.extra.dateutil import timezones as dateutil_timezones
from hypothesis.extra.pytz import timezones as pytz_timezones
import pytest
import pandas as pd
from pandas.tseries.offsets import (
BMonthBegin, BMonthEnd, BQuarterBegin, BQuarterEnd, BYearBegin, BYearEnd,
MonthBegin, MonthEnd, QuarterBegin, QuarterEnd, YearBegin, YearEnd)
# ----------------------------------------------------------------
# Helpers for generating random data
with warnings.catch_warnings():
warnings.simplefilter('ignore')
min_dt = pd.Timestamp(1900, 1, 1).to_pydatetime(),
max_dt = pd.Timestamp(1900, 1, 1).to_pydatetime(),
gen_date_range = st.builds(
pd.date_range,
start=st.datetimes(
# TODO: Choose the min/max values more systematically
min_value=pd.Timestamp(1900, 1, 1).to_pydatetime(),
max_value=pd.Timestamp(2100, 1, 1).to_pydatetime()
),
periods=st.integers(min_value=2, max_value=100),
freq=st.sampled_from('Y Q M D H T s ms us ns'.split()),
tz=st.one_of(st.none(), dateutil_timezones(), pytz_timezones()),
)
gen_random_datetime = st.datetimes(
min_value=min_dt,
max_value=max_dt,
timezones=st.one_of(st.none(), dateutil_timezones(), pytz_timezones())
)
# The strategy for each type is registered in conftest.py, as they don't carry
# enough runtime.txt information (e.g. type hints) to infer how to build them.
gen_yqm_offset = st.one_of(*map(st.from_type, [
MonthBegin, MonthEnd, BMonthBegin, BMonthEnd,
QuarterBegin, QuarterEnd, BQuarterBegin, BQuarterEnd,
YearBegin, YearEnd, BYearBegin, BYearEnd
]))
# ----------------------------------------------------------------
# Offset-specific behaviour tests
# Based on CI runs: Always passes on OSX, fails on Linux, sometimes on Windows
@pytest.mark.xfail(strict=False, reason='inconsistent between OSs, Pythons')
@given(gen_random_datetime, gen_yqm_offset)
def test_on_offset_implementations(dt, offset):
assume(not offset.normalize)
# check that the class-specific implementations of onOffset match
# the general case definition:
# (dt + offset) - offset == dt
compare = (dt + offset) - offset
assert offset.onOffset(dt) == (compare == dt)
@pytest.mark.xfail
@given(gen_yqm_offset, gen_date_range)
def test_apply_index_implementations(offset, rng):
# offset.apply_index(dti)[i] should match dti[i] + offset
assume(offset.n != 0) # TODO: test for that case separately
# rng = pd.date_range(start='1/1/2000', periods=100000, freq='T')
ser = pd.Series(rng)
res = rng + offset
res_v2 = offset.apply_index(rng)
assert (res == res_v2).all()
assert res[0] == rng[0] + offset
assert res[-1] == rng[-1] + offset
res2 = ser + offset
# apply_index is only for indexes, not series, so no res2_v2
assert res2.iloc[0] == ser.iloc[0] + offset
assert res2.iloc[-1] == ser.iloc[-1] + offset
# TODO: Check randomly assorted entries, not just first/last
@pytest.mark.xfail
@given(gen_yqm_offset)
def test_shift_across_dst(offset):
# GH#18319 check that 1) timezone is correctly normalized and
# 2) that hour is not incorrectly changed by this normalization
# Note that dti includes a transition across DST boundary
dti = pd.date_range(start='2017-10-30 12:00:00', end='2017-11-06',
freq='D', tz='US/Eastern')
assert (dti.hour == 12).all() # we haven't screwed up yet
res = dti + offset
assert (res.hour == 12).all()
| [
"[email protected]"
] | |
32dbcf217e368ae2de5dac068c7d6e37cadb9bce | 6536946f7997b3eccda846505f1e30edd3af99d5 | /mycroft_jarbas_utils/hivemind/clients/standalone_voice_client.py | b5c077751ba1fd4724ddc518a4974b33e3151e67 | [] | no_license | JarbasAl/ZZZ_mycroft_jarbas_utils | 2a6e4d032675fc340c3ccec5eabcf94bacf06460 | f05f99a19e88aa7d7d778e30058e759fedacbb25 | refs/heads/master | 2022-03-24T19:37:56.187643 | 2019-12-13T02:57:42 | 2019-12-13T02:57:42 | 122,380,983 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,114 | py | from twisted.internet import reactor, ssl
from autobahn.twisted.websocket import WebSocketClientFactory, \
WebSocketClientProtocol
from twisted.internet.protocol import ReconnectingClientFactory
from mycroft_jarbas_utils.hivemind.clients.speech.listener import RecognizerLoop
from threading import Thread
conf = {
"listener": {
"sample_rate": 16000,
"channels": 1,
"record_wake_words": False,
"record_utterances": False,
"phoneme_duration": 120,
"multiplier": 1.0,
"energy_ratio": 1.5,
"wake_word": "hey mycroft",
"stand_up_word": "wake up"
},
"hotwords": {
"hey mycroft": {
"module": "pocketsphinx",
"phonemes": "HH EY . M AY K R AO F T",
"threshold": 1e-90,
"lang": "en-us"
},
"thank you": {
"module": "pocketsphinx",
"phonemes": "TH AE NG K . Y UW .",
"threshold": 1e-1,
"listen": False,
"utterance": "thank you",
"active": True,
"sound": "",
"lang": "en-us"
},
"wake up": {
"module": "pocketsphinx",
"phonemes": "W EY K . AH P",
"threshold": 1e-20,
"lang": "en-us"
}
},
"stt": {
"deepspeech_server": {
"uri": "http://localhost:8080/stt"
},
"kaldi": {
"uri": "http://localhost:8080/client/dynamic/recognize"
}
}
}
import json
import sys
import logging
import base64
logger = logging.getLogger("Standalone_Mycroft_Client")
logger.addHandler(logging.StreamHandler(sys.stdout))
logger.setLevel("INFO")
platform = "JarbasVoiceClientv0.1"
class JarbasVoiceClientProtocol(WebSocketClientProtocol):
def onConnect(self, response):
logger.info("Server connected: {0}".format(response.peer))
def onOpen(self):
logger.info("WebSocket connection open. ")
self.loop = RecognizerLoop(conf)
self.listen = Thread(target=self.start_listening)
self.listen.setDaemon(True)
self.listen.start()
def handle_record_begin(self):
logger.info("Begin Recording...")
def handle_record_end(self):
logger.info("End Recording...")
def handle_awoken(self):
""" Forward mycroft.awoken to the messagebus. """
logger.info("Listener is now Awake: ")
def handle_wakeword(self, event):
logger.info("Wakeword Detected: " + event['utterance'])
def handle_utterance(self, event):
context = {'client_name': platform, "source": self.peer + ":speech",
'destinatary': "https_server"}
msg = {"data": {"utterances": event['utterances'], "lang": "en-us"},
"type": "recognizer_loop:utterance",
"context": context}
self.send(msg)
def handle_unknown(self):
logger.info('mycroft.speech.recognition.unknown')
def handle_hotword(self, event):
config = conf.get("listener", {})
ww = config.get("wake_word", "hey mycroft")
suw = config.get("stand_up_word", "wake up")
if event["hotword"] != ww and event["hotword"] != suw:
logger.info("Hotword Detected: " + event['hotword'])
def handle_sleep(self):
self.loop.sleep()
def handle_wake_up(self, event):
self.loop.awaken()
def handle_mic_mute(self, event):
self.loop.mute()
def handle_mic_unmute(self, event):
self.loop.unmute()
def handle_audio_start(self, event):
"""
Mute recognizer loop
"""
self.loop.mute()
def handle_audio_end(self, event):
"""
Request unmute, if more sources has requested the mic to be muted
it will remain muted.
"""
self.loop.unmute() # restore
def handle_stop(self, event):
"""
Handler for mycroft.stop, i.e. button press
"""
self.loop.force_unmute()
def start_listening(self):
self.loop.on('recognizer_loop:utterance', self.handle_utterance)
self.loop.on('recognizer_loop:record_begin', self.handle_record_begin)
self.loop.on('recognizer_loop:awoken', self.handle_awoken)
self.loop.on('recognizer_loop:wakeword', self.handle_wakeword)
self.loop.on('recognizer_loop:hotword', self.handle_hotword)
self.loop.on('recognizer_loop:record_end', self.handle_record_end)
self.loop.run()
def stop_listening(self):
self.loop.remove_listener('recognizer_loop:utterance', self.handle_utterance)
self.loop.remove_listener('recognizer_loop:record_begin', self.handle_record_begin)
self.loop.remove_listener('recognizer_loop:awoken', self.handle_awoken)
self.loop.remove_listener('recognizer_loop:wakeword', self.handle_wakeword)
self.loop.remove_listener('recognizer_loop:hotword', self.handle_hotword)
self.loop.remove_listener('recognizer_loop:record_end', self.handle_record_end)
self.listen.join(0)
def onMessage(self, payload, isBinary):
if not isBinary:
msg = json.loads(payload)
if msg.get("type", "") == "speak":
utterance = msg["data"]["utterance"]
logger.info("Output: " + utterance)
else:
pass
def send(self, msg):
msg = json.dumps(msg)
self.sendMessage(msg, False)
def onClose(self, wasClean, code, reason):
logger.info("WebSocket connection closed: {0}".format(reason))
self.stop_listening()
class JarbasVoiceClientFactory(WebSocketClientFactory, ReconnectingClientFactory):
protocol = JarbasVoiceClientProtocol
def __init__(self, *args, **kwargs):
super(JarbasVoiceClientFactory, self).__init__(*args, **kwargs)
self.status = "disconnected"
self.client = None
# websocket handlers
def clientConnectionFailed(self, connector, reason):
logger.info(
"Client connection failed: " + str(reason) + " .. retrying ..")
self.status = "disconnected"
self.retry(connector)
def clientConnectionLost(self, connector, reason):
logger.info(
"Client connection lost: " + str(reason) + " .. retrying ..")
self.status = "disconnected"
self.retry(connector)
def start_voice_client(host="0.0.0.0", port=5678, name="standalone voice client", api="test_key", config=None):
global conf
conf = config or conf
authorization = name + ":" + api
usernamePasswordDecoded = authorization
api = base64.b64encode(usernamePasswordDecoded)
headers = {'authorization': api}
adress = u"wss://" + host + u":" + str(port)
factory = JarbasVoiceClientFactory(adress, headers=headers,
useragent=platform)
factory.protocol = JarbasVoiceClientProtocol
contextFactory = ssl.ClientContextFactory()
reactor.connectSSL(host, port, factory, contextFactory)
reactor.run()
if __name__ == '__main__':
start_voice_client()
| [
"[email protected]"
] | |
61c7e594c9592fed0407aa4a923ed6db00bd2fae | 0e477667e6ce65e0799a5605b8f7f9cd778ff81f | /P2/dino_game.py | 2da57b2b18949ee9d6558768c7a7d094a4753f69 | [] | no_license | rohitaswchoudhary/py_projects | 2e79eb88e8b2d504e9f2c86e5cdedf6ba1470cb9 | adde531b1311a1f4e4dd1d5bc57f5f8b743e4f7f | refs/heads/main | 2023-04-16T08:40:05.614405 | 2021-04-29T07:43:21 | 2021-04-29T07:43:21 | 337,432,934 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 791 | py | import pyautogui
import time
from PIL import Image, ImageGrab
def hit(key):
pyautogui.keyDown(key)
def is_collide(data):
for i in range(300,415):
for j in range(410,560):
if data[i,j]<40:
hit("down")
return True
for i in range(300,415):
for j in range(563,650):
if data [i,j]<40:
hit('up')
return True
return False
if __name__ == "__main__":
time.sleep(10)
hit('up')
while True:
image= ImageGrab.grab().convert('L')
data = image.load()
is_collide(data)
# print(asarray(image))
for i in range (300,415):
for j in range(410,563):
data[i,j] = 171
image.show()
break | [
"[email protected]"
] | |
38a04f01a5160d296ff561122a00bddcde966b66 | 5240574e3e89cfd8393141e08479aac6f314acc8 | /L2/helloCont.sh | bfb2b4bdad7774191024f82f87f35458d3afc6a6 | [] | no_license | linnil1/2018_LAB_Tutorial | 1d1fdce3c37e1881715de0c92ea7ad7a66e49e2f | 2d7d9418f29915c828e4f2561709bd731dd0cab8 | refs/heads/master | 2020-03-23T02:08:03.535006 | 2018-07-22T06:21:04 | 2018-07-22T06:21:04 | 140,957,748 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 268 | sh | #!/usr/bin/python3
import time
import sys
num = 0
while True:
if num % 3:
print("hello -- " + str(num))
sys.stdout.flush()
else:
print("hello -- " + str(num), file=sys.stderr)
sys.stderr.flush()
num += 1
time.sleep(0.1)
| [
"[email protected]"
] | |
6f6b240798e75a5c8450064de2832f08eb972382 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/reln/releasereftask.py | 50743f3c071a1e3b7e6d5798bcaf007822e36006 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,748 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class ReleaseRefTask(Mo):
"""
This is generated and used only by internal processes.
"""
meta = ClassMeta("cobra.model.reln.ReleaseRefTask")
meta.moClassName = "relnReleaseRefTask"
meta.rnFormat = "relnReleaseRefTask-%(id)s"
meta.category = MoCategory.TASK
meta.label = "None"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.parentClasses.add("cobra.model.action.LicensemgrSubj")
meta.parentClasses.add("cobra.model.action.StreamelemSubj")
meta.parentClasses.add("cobra.model.action.ObserverSubj")
meta.parentClasses.add("cobra.model.action.SnmpdSubj")
meta.parentClasses.add("cobra.model.action.VmmmgrSubj")
meta.parentClasses.add("cobra.model.action.AnalyticsSubj")
meta.parentClasses.add("cobra.model.action.ScripthandlerSubj")
meta.parentClasses.add("cobra.model.action.ConfelemSubj")
meta.parentClasses.add("cobra.model.action.GoobserverelemSubj")
meta.parentClasses.add("cobra.model.action.EventmgrSubj")
meta.parentClasses.add("cobra.model.action.OspaelemSubj")
meta.parentClasses.add("cobra.model.action.VtapSubj")
meta.parentClasses.add("cobra.model.action.GohealthelemSubj")
meta.parentClasses.add("cobra.model.action.OshSubj")
meta.parentClasses.add("cobra.model.action.DhcpdSubj")
meta.parentClasses.add("cobra.model.action.OpflexelemSubj")
meta.parentClasses.add("cobra.model.action.DomainmgrSubj")
meta.parentClasses.add("cobra.model.action.DbgrelemSubj")
meta.parentClasses.add("cobra.model.action.CloudpeSubj")
meta.parentClasses.add("cobra.model.action.PlgnhandlerSubj")
meta.parentClasses.add("cobra.model.action.TopomgrSubj")
meta.parentClasses.add("cobra.model.action.VleafelemSubj")
meta.parentClasses.add("cobra.model.action.NxosmockSubj")
meta.parentClasses.add("cobra.model.action.DbgrSubj")
meta.parentClasses.add("cobra.model.action.PlatformmgrSubj")
meta.parentClasses.add("cobra.model.action.AppliancedirectorSubj")
meta.parentClasses.add("cobra.model.action.OpflexpSubj")
meta.parentClasses.add("cobra.model.action.BootmgrSubj")
meta.parentClasses.add("cobra.model.action.AeSubj")
meta.parentClasses.add("cobra.model.action.GoeventelemSubj")
meta.parentClasses.add("cobra.model.action.GoconnectorSubj")
meta.parentClasses.add("cobra.model.action.PolicymgrSubj")
meta.parentClasses.add("cobra.model.action.ExtXMLApiSubj")
meta.parentClasses.add("cobra.model.action.ObserverelemSubj")
meta.parentClasses.add("cobra.model.action.PolicyelemSubj")
meta.parentClasses.add("cobra.model.action.PolicydistSubj")
meta.parentClasses.add("cobra.model.action.IdmgrSubj")
meta.parentClasses.add("cobra.model.action.EdmgrSubj")
meta.superClasses.add("cobra.model.action.RInst")
meta.superClasses.add("cobra.model.pol.ComplElem")
meta.superClasses.add("cobra.model.task.Inst")
meta.superClasses.add("cobra.model.action.Inst")
meta.rnPrefixes = [
('relnReleaseRefTask-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "data", "data", 52, PropCategory.REGULAR)
prop.label = "Data"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("data", prop)
prop = PropMeta("str", "descr", "descr", 33, PropCategory.REGULAR)
prop.label = "Description"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "endTs", "endTs", 15575, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("endTs", prop)
prop = PropMeta("str", "fail", "fail", 46, PropCategory.REGULAR)
prop.label = "Fail"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("fail", prop)
prop = PropMeta("str", "flags", "flags", 30392, PropCategory.REGULAR)
prop.label = "Flags"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("flags", prop)
prop = PropMeta("str", "id", "id", 5638, PropCategory.REGULAR)
prop.label = "ID"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("none", "none", 0)
prop._addConstant("releaseOnBehalf", "releaseonbehalf", 16)
meta.props.add("id", prop)
prop = PropMeta("str", "invErrCode", "invErrCode", 49, PropCategory.REGULAR)
prop.label = "Remote Error Code"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("ERR-FILTER-illegal-format", None, 1140)
prop._addConstant("ERR-FSM-no-such-state", None, 1160)
prop._addConstant("ERR-HTTP-set-error", None, 1551)
prop._addConstant("ERR-HTTPS-set-error", None, 1552)
prop._addConstant("ERR-MO-CONFIG-child-object-cant-be-configured", None, 1130)
prop._addConstant("ERR-MO-META-no-such-object-class", None, 1122)
prop._addConstant("ERR-MO-PROPERTY-no-such-property", None, 1121)
prop._addConstant("ERR-MO-PROPERTY-value-out-of-range", None, 1120)
prop._addConstant("ERR-MO-access-denied", None, 1170)
prop._addConstant("ERR-MO-deletion-rule-violation", None, 1107)
prop._addConstant("ERR-MO-duplicate-object", None, 1103)
prop._addConstant("ERR-MO-illegal-containment", None, 1106)
prop._addConstant("ERR-MO-illegal-creation", None, 1105)
prop._addConstant("ERR-MO-illegal-iterator-state", None, 1100)
prop._addConstant("ERR-MO-illegal-object-lifecycle-transition", None, 1101)
prop._addConstant("ERR-MO-naming-rule-violation", None, 1104)
prop._addConstant("ERR-MO-object-not-found", None, 1102)
prop._addConstant("ERR-MO-resource-allocation", None, 1150)
prop._addConstant("ERR-aaa-config-modify-error", None, 1520)
prop._addConstant("ERR-acct-realm-set-error", None, 1513)
prop._addConstant("ERR-add-ctrlr", None, 1574)
prop._addConstant("ERR-admin-passwd-set", None, 1522)
prop._addConstant("ERR-api", None, 1571)
prop._addConstant("ERR-auth-issue", None, 1548)
prop._addConstant("ERR-auth-realm-set-error", None, 1514)
prop._addConstant("ERR-authentication", None, 1534)
prop._addConstant("ERR-authorization-required", None, 1535)
prop._addConstant("ERR-connect", None, 1572)
prop._addConstant("ERR-create-domain", None, 1562)
prop._addConstant("ERR-create-keyring", None, 1560)
prop._addConstant("ERR-create-role", None, 1526)
prop._addConstant("ERR-create-user", None, 1524)
prop._addConstant("ERR-delete-domain", None, 1564)
prop._addConstant("ERR-delete-role", None, 1528)
prop._addConstant("ERR-delete-user", None, 1523)
prop._addConstant("ERR-domain-set-error", None, 1561)
prop._addConstant("ERR-http-initializing", None, 1549)
prop._addConstant("ERR-incompat-ctrlr-version", None, 1568)
prop._addConstant("ERR-internal-error", None, 1540)
prop._addConstant("ERR-invalid-args", None, 1569)
prop._addConstant("ERR-invalid-delimiter", None, 1589)
prop._addConstant("ERR-invalid-domain", None, 1588)
prop._addConstant("ERR-invalid-domain-name", None, 1582)
prop._addConstant("ERR-ldap-delete-error", None, 1510)
prop._addConstant("ERR-ldap-get-error", None, 1509)
prop._addConstant("ERR-ldap-group-modify-error", None, 1518)
prop._addConstant("ERR-ldap-group-set-error", None, 1502)
prop._addConstant("ERR-ldap-set-error", None, 1511)
prop._addConstant("ERR-missing-method", None, 1546)
prop._addConstant("ERR-modify-ctrlr-access", None, 1567)
prop._addConstant("ERR-modify-ctrlr-dvs-version", None, 1576)
prop._addConstant("ERR-modify-ctrlr-rootcont", None, 1575)
prop._addConstant("ERR-modify-ctrlr-scope", None, 1573)
prop._addConstant("ERR-modify-ctrlr-trig-inventory", None, 1577)
prop._addConstant("ERR-modify-domain", None, 1563)
prop._addConstant("ERR-modify-domain-encapmode", None, 1581)
prop._addConstant("ERR-modify-domain-enfpref", None, 1578)
prop._addConstant("ERR-modify-domain-mcastpool", None, 1579)
prop._addConstant("ERR-modify-domain-mode", None, 1580)
prop._addConstant("ERR-modify-domain-prefencapmode", None, 1584)
prop._addConstant("ERR-modify-role", None, 1527)
prop._addConstant("ERR-modify-user", None, 1525)
prop._addConstant("ERR-modify-user-domain", None, 1565)
prop._addConstant("ERR-modify-user-role", None, 1532)
prop._addConstant("ERR-no-buf", None, 1570)
prop._addConstant("ERR-passwd-set-failure", None, 1566)
prop._addConstant("ERR-provider-group-modify-error", None, 1519)
prop._addConstant("ERR-provider-group-set-error", None, 1512)
prop._addConstant("ERR-radius-global-set-error", None, 1505)
prop._addConstant("ERR-radius-group-set-error", None, 1501)
prop._addConstant("ERR-radius-set-error", None, 1504)
prop._addConstant("ERR-request-timeout", None, 1545)
prop._addConstant("ERR-role-set-error", None, 1515)
prop._addConstant("ERR-rsa-global-set-error", None, 1587)
prop._addConstant("ERR-rsa-group-set-error", None, 1585)
prop._addConstant("ERR-rsa-set-error", None, 1586)
prop._addConstant("ERR-secondary-node", None, 1550)
prop._addConstant("ERR-service-not-ready", None, 1539)
prop._addConstant("ERR-set-password-strength-check", None, 1543)
prop._addConstant("ERR-store-pre-login-banner-msg", None, 1521)
prop._addConstant("ERR-tacacs-enable-error", None, 1508)
prop._addConstant("ERR-tacacs-global-set-error", None, 1507)
prop._addConstant("ERR-tacacs-group-set-error", None, 1503)
prop._addConstant("ERR-tacacs-set-error", None, 1506)
prop._addConstant("ERR-user-account-expired", None, 1536)
prop._addConstant("ERR-user-set-error", None, 1517)
prop._addConstant("ERR-xml-parse-error", None, 1547)
prop._addConstant("communication-error", "communication-error", 1)
prop._addConstant("none", "none", 0)
meta.props.add("invErrCode", prop)
prop = PropMeta("str", "invErrDescr", "invErrDescr", 50, PropCategory.REGULAR)
prop.label = "Remote Error Description"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("invErrDescr", prop)
prop = PropMeta("str", "invRslt", "invRslt", 48, PropCategory.REGULAR)
prop.label = "Remote Result"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-applicable"
prop._addConstant("capability-not-implemented-failure", "capability-not-implemented-failure", 16384)
prop._addConstant("capability-not-implemented-ignore", "capability-not-implemented-ignore", 8192)
prop._addConstant("capability-not-supported", "capability-not-supported", 32768)
prop._addConstant("capability-unavailable", "capability-unavailable", 65536)
prop._addConstant("end-point-failed", "end-point-failed", 32)
prop._addConstant("end-point-protocol-error", "end-point-protocol-error", 64)
prop._addConstant("end-point-unavailable", "end-point-unavailable", 16)
prop._addConstant("extend-timeout", "extend-timeout", 134217728)
prop._addConstant("failure", "failure", 1)
prop._addConstant("fru-identity-indeterminate", "fru-identity-indeterminate", 4194304)
prop._addConstant("fru-info-malformed", "fru-info-malformed", 8388608)
prop._addConstant("fru-not-ready", "fru-not-ready", 67108864)
prop._addConstant("fru-not-supported", "fru-not-supported", 536870912)
prop._addConstant("fru-state-indeterminate", "fru-state-indeterminate", 33554432)
prop._addConstant("fw-defect", "fw-defect", 256)
prop._addConstant("hw-defect", "hw-defect", 512)
prop._addConstant("illegal-fru", "illegal-fru", 16777216)
prop._addConstant("intermittent-error", "intermittent-error", 1073741824)
prop._addConstant("internal-error", "internal-error", 4)
prop._addConstant("not-applicable", "not-applicable", 0)
prop._addConstant("resource-capacity-exceeded", "resource-capacity-exceeded", 2048)
prop._addConstant("resource-dependency", "resource-dependency", 4096)
prop._addConstant("resource-unavailable", "resource-unavailable", 1024)
prop._addConstant("service-not-implemented-fail", "service-not-implemented-fail", 262144)
prop._addConstant("service-not-implemented-ignore", "service-not-implemented-ignore", 131072)
prop._addConstant("service-not-supported", "service-not-supported", 524288)
prop._addConstant("service-protocol-error", "service-protocol-error", 2097152)
prop._addConstant("service-unavailable", "service-unavailable", 1048576)
prop._addConstant("sw-defect", "sw-defect", 128)
prop._addConstant("task-reset", "task-reset", 268435456)
prop._addConstant("timeout", "timeout", 8)
prop._addConstant("unidentified-fail", "unidentified-fail", 2)
meta.props.add("invRslt", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "oDn", "oDn", 51, PropCategory.REGULAR)
prop.label = "Subject DN"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("oDn", prop)
prop = PropMeta("str", "operSt", "operSt", 15674, PropCategory.REGULAR)
prop.label = "Completion"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "scheduled"
prop._addConstant("cancelled", "cancelled", 3)
prop._addConstant("completed", "completed", 2)
prop._addConstant("crashsuspect", "crash-suspect", 7)
prop._addConstant("failed", "failed", 4)
prop._addConstant("indeterminate", "indeterminate", 5)
prop._addConstant("processing", "processing", 1)
prop._addConstant("ready", "ready", 8)
prop._addConstant("scheduled", "scheduled", 0)
prop._addConstant("suspended", "suspended", 6)
meta.props.add("operSt", prop)
prop = PropMeta("str", "originMinority", "originMinority", 54, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("originMinority", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "runId", "runId", 45, PropCategory.REGULAR)
prop.label = "ID"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("runId", prop)
prop = PropMeta("str", "startTs", "startTs", 36, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("startTs", prop)
prop = PropMeta("str", "startTx", "startTx", 36895, PropCategory.REGULAR)
prop.label = "startTxId"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("none", "none", 0)
meta.props.add("startTx", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "try", "try", 15574, PropCategory.REGULAR)
prop.label = "Try"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("try", prop)
prop = PropMeta("str", "ts", "ts", 47, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("ts", prop)
meta.namingProps.append(getattr(meta.props, "id"))
def __init__(self, parentMoOrDn, id, markDirty=True, **creationProps):
namingVals = [id]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
f4283609b0b15e9b8ea4c1d8aee1778707b75c26 | 34599a66861f7d95a5740eeb5329ea77014e18d4 | /problems_solving/project-euler/pro043_sum_pandigital.py | 311f0e0f4f790452348cb363b5436bcaf2be503e | [] | no_license | laolee010126/algorithm-with-python | f0f5f1bc3cbe374ccbb59e10ac639674c44ae743 | 89ff0c47a6d8b0cd5b31a25bb3981b8e90971f19 | refs/heads/master | 2022-04-01T17:38:36.199309 | 2020-01-14T01:54:22 | 2020-01-14T01:54:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 979 | py | def get_digit(n, start, length):
n = str(n)
return n[start-1:start-1+length]
def sum_pandigial():
LENGTH = 3
used = [False] * 10
starts = [2, 3, 4, 5, 6, 7, 8]
dividers = [2, 3, 5, 7, 11, 13, 17]
pandigitals = []
ans = []
def generate_pandigitals(tmp, used, left):
nonlocal pandigitals
if not left:
pandigitals.append(tmp)
return
for n in range(10):
if not used[n]:
used[n] = True
generate_pandigitals(tmp + str(n), used, left-1)
used[n] = False
generate_pandigitals('', used, 10)
for pan in pandigitals:
if all(int(get_digit(pan, start, LENGTH)) % divider == 0 for start, divider in zip(starts,
dividers)):
ans.append(int(pan))
return ans
if __name__ == '__main__':
ans = sum_pandigial()
print(ans)
| [
"[email protected]"
] | |
51f9546b94c3bcb4cd440a1de34e1b7e0459997f | 4631798b64f2118b7d8e64483a14d7485163358b | /8.6.1_making_pizzas.py | 03606d0835e6cdc7d73d325e5776c21e070d9050 | [] | no_license | royrowe/python | 288680aba27b8c2d46368250b45fb1672427fe6a | dc7cebd56aa1bee7b2afd91e3a2a4b03f1775ba5 | refs/heads/master | 2020-04-15T20:05:02.587794 | 2019-01-10T02:53:08 | 2019-01-10T02:53:08 | 164,978,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | #!/usr/bin/env python
'''
@File :8.6.1_making_pizzas.py
@Copyright :luoming
@Date :
@Desc :
'''
import pizzaim
pizzaim.make_pizza(16, 'pepperoni')
pizzaim.make_pizza(12, 'mushrooms', 'green pepers','extra cheese') | [
"your email"
] | your email |
4a15925f7f82e3ae829257f3fb188b69d1c18d48 | 3df98e7d4551220e3b09d122e8d6897ca572d9f3 | /basic programs/10.py | f86b5bacd38650a07e43778f169c04f12db69575 | [] | no_license | Madhav2108/Python- | 2e4bf778348786d8dd082e3e1cdd7acb41d9f559 | fdfdf944a96d83352979bc23c3b65aac7bd41d26 | refs/heads/master | 2023-03-30T21:37:26.326980 | 2021-03-31T17:40:49 | 2021-03-31T17:40:49 | 273,668,175 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | U=int(input("Enter the Units"))
if U<100:
print("Charges are Rs.1perday")
elif U<200:
print("Charges are Rs.2perday")
else:
print("Charges are Rs.4perday")
| [
"[email protected]"
] | |
f5f512c9f8f86db91f8603d6019f03c368f21a93 | 60969b1074b333554d519eb2bffac9651ef7a6de | /build/my_stage/catkin_generated/pkg.develspace.context.pc.py | d10e621a6b583859a842cd3ad6656c58b3fe8641 | [] | no_license | p870668723/rosWS | 123823611459227ea6f2ade1c2dd88425b596a44 | ba6c6dbecf5e755b65fa999116bc26a5abe71020 | refs/heads/master | 2021-01-11T01:46:05.522396 | 2017-01-21T07:37:17 | 2017-01-21T07:37:17 | 70,671,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "my_stage"
PROJECT_SPACE_DIR = "/home/pf/rosWS/devel"
PROJECT_VERSION = "0.0.0"
| [
"="
] | = |
a94799987815dbae47be87fd753fc0b8e50c3e3e | 1d5b2b72d322dd154a8efb547290ad5abb1fd098 | /work_dir/autoencoder_test/prepare_nn.py | f10b3551f9bc5475708ae0a016220f1b6df6eaa6 | [] | no_license | hxzwd/drafts | 6b593b50cae309c02495a8aff28719f7b636962d | 478f4a4c399ab0c7c3f8f6e22d13131488716e4d | refs/heads/master | 2020-04-28T01:42:58.998610 | 2019-05-05T17:49:48 | 2019-05-05T17:49:48 | 174,868,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 689 | py |
from keras.layers import Input, Dense, Flatten, Reshape
from keras.models import Model
def create_dense_autoencoder():
encoding_dim = 49
input_img = Input(shape = (28, 28, 1))
flat_img = Flatten()(input_img)
encoded = Dense(encoding_dim, activation = "relu")(flat_img)
input_encoded = Input(shape = (encoding_dim,))
flat_decoded = Dense(28 * 28, activation = "sigmoid")(input_encoded)
decoded = Reshape((28, 28, 1))(flat_decoded)
encoder = Model(input_img, encoded, name = "encoder")
decoder = Model(input_encoded, decoded, name = "decoder")
autoencoder = Model(input_img, decoder(encoder(input_img)), name = "autoencoder")
return encoder, decoder, autoencoder
| [
"="
] | = |
5e28913dc9429bda9278535096ad84a6da40a243 | 315006dde839c66dab61757f5073466ef63883b6 | /imagepy/core/manager/languagemanager.py | 1b9b348f99ed51e5ab4b8044a585b22061c156df | [
"BSD-2-Clause"
] | permissive | clickinfinite/imagepy | 0777b819e95840a6e41dafc623643e22dfc44adf | 5e6425a08ce3a0d9c2ab1b6e749b02cb9362e7f4 | refs/heads/master | 2021-07-25T20:29:26.767885 | 2017-11-07T16:33:00 | 2017-11-07T16:33:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,709 | py | import os
from ... import root_dir
from glob import glob
class LanguageManager:
plgs = []
langs = {}
cur = None
filename = os.path.join(root_dir,'data/language/*.dic')
@classmethod
def set(cls, cur):
cls.cur = None if cur=='English' else cls.langs[cur]
curfile = open(os.path.join(root_dir,'data/language/cur.txt'), 'w', encoding='utf-8')
cur = curfile.write(cur)
curfile.close()
@classmethod
def read(cls):
path = os.path.join(root_dir,'data/language/*.dic')
for name in glob(path):
pkl_file = open(name, 'r', encoding='utf-8')
fp, fn = os.path.split(name)
fn, fe = os.path.splitext(fn)
cls.langs[fn] = {}
for line in pkl_file.readlines():
k,v = line.replace('\n', '').split(':')
cls.langs[fn][k] = v
pkl_file.close()
curfile = os.path.join(root_dir,'data/language/cur.txt')
if not os.path.exists(curfile): return
curfile = open(os.path.join(root_dir,'data/language/cur.txt'), 'r', encoding='utf-8')
cur = curfile.read()
curfile.close()
if cur in cls.langs: cls.cur = cls.langs[cur]
@classmethod
def write(cls):
for key in cls.langs:
dic = cls.langs[key]
titles = sorted(dic.keys())
pkl_file = open(os.path.join(root_dir,'data/language/%s.dic'%key), 'w', encoding='utf-8')
for i in titles:
pkl_file.write('%s:%s\n'%(i,dic[i]))
pkl_file.close()
@classmethod
def add(cls, key=None):
if not key is None and not ':' in key:
if not key in cls.plgs:cls.plgs.append(key)
return
titles = cls.plgs
for key in cls.langs:
dic = cls.langs[key]
for i in titles:
if not ':' in i and not i in dic: dic[i] = '--'
cls.write()
@classmethod
def rm(cls):
titles = cls.plgs
for key in cls.langs:
dic = cls.langs[key]
for i in dic:
if not i in titles: del dic[i]
cls.write()
@classmethod
def newdic(cls, key):
cls.langs[key] = {}
for i in cls.plgs:
if not ':' in i: cls.langs[key][i] = '--'
@classmethod
def get(cls, key):
if not cls.cur is None and key in cls.cur:
if cls.cur[key]!='--':
return cls.cur[key]
return key
LanguageManager.read()
if __name__ == '__main__':
#ShotcutManager.set('c',[1,2,3])
ShotcutManager.rm('c')
print(ShotcutManager.shotcuts)
ShotcutManager.write()
| [
"[email protected]"
] | |
973212974c4e2f21d28c3c8e897643227ad9a0af | 3a527d62947ad6d01ebfc8932958636270bc055a | /contact/validations.py | 4b8254ea28a6c080b75e8528388aa8b575f5bce0 | [] | no_license | Iamprakashkhatri/contactcrud | 5bc9209dc104914608c1c9f86604c7bfadbe0fa6 | ffd2d18cccde5c7dd9c2dd67382564847c4f6eff | refs/heads/master | 2020-09-03T20:03:00.759839 | 2019-11-06T05:08:32 | 2019-11-06T05:08:32 | 219,555,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
def validate_domainonly_email(value):
if not "[email protected]" in value:
raise ValidationError(_("Sorry,the email submitted is invalid"))
return value
Blacklisted=['abc','new']
def validate_blacklisted(value):
if value in Blacklisted:
raise ValidationError(_("Sorry,the value is not valid."))
return value
| [
"prakash"
] | prakash |
4d767e75c1a3e17efffcd6541128012239313ac2 | 108fc2873b5c07e4ad9515adc16bc8e9fdf7d021 | /smorest_sfs/utils/sqla.py | e650ad9a22a4a200e49a5264a8c8d62e89b89d0c | [
"Apache-2.0"
] | permissive | ssfdust/yt-media | 4ac5eba6a25830268f42b951e8307bb57e7baeeb | 36c3d1977df5851d8df54846f0bc84be2b86e962 | refs/heads/master | 2021-08-08T09:40:31.241228 | 2020-05-11T03:11:20 | 2020-05-11T03:11:20 | 175,938,603 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,269 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import namedtuple
from typing import List, Optional
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import Table, UniqueConstraint, inspect
from smorest_sfs.extensions.sqla import Model
RelateTableArgs = namedtuple(
"RelateTableArgs", ["tablename", "related_key", "the_ohter_related_key"]
)
class AttrHistory:
def __init__(self, added: Optional[List[Model]], deleted: Optional[List[Model]]):
self.added: List[Model] = added or []
self.deleted: List[Model] = deleted or []
def create_relation_table(db: SQLAlchemy, table_args: RelateTableArgs) -> Table:
return db.Table(
table_args.tablename,
db.Column(table_args.related_key, db.Integer(), nullable=False),
db.Column(table_args.the_ohter_related_key, db.Integer(), nullable=False),
UniqueConstraint(table_args.related_key, table_args.the_ohter_related_key),
)
def get_histroy(model: Model, attr: str) -> AttrHistory:
model_state = inspect(model)
attr_state = getattr(model_state.attrs, attr)
attr_hist = attr_state.history
if not attr_hist.has_changes():
raise ValueError("No changes found")
return AttrHistory(attr_hist.added, attr_hist.deleted)
| [
"[email protected]"
] | |
c08c2580a68df779f963d6ec34f0eaa6d259e563 | e57d7785276053332c633b57f6925c90ad660580 | /sdk/eventgrid/azure-eventgrid/tests/test_eg_publisher_client_async.py | b2ec715a6a65a09d86a2cdbdef21bcfb94147d91 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | adriananeci/azure-sdk-for-python | 0d560308497616a563b6afecbb494a88535da4c5 | b2bdfe659210998d6d479e73b133b6c51eb2c009 | refs/heads/main | 2023-08-18T11:12:21.271042 | 2021-09-10T18:48:44 | 2021-09-10T18:48:44 | 405,684,423 | 1 | 0 | MIT | 2021-09-12T15:51:51 | 2021-09-12T15:51:50 | null | UTF-8 | Python | false | false | 17,050 | py | #-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import logging
import asyncio
import sys
import os
import json
import pytest
from datetime import timedelta
from msrest.serialization import UTC
from urllib.parse import urlparse
import datetime as dt
from devtools_testutils import AzureMgmtTestCase, CachedResourceGroupPreparer
from azure_devtools.scenario_tests import ReplayableTest
from azure.core.credentials import AzureKeyCredential, AzureSasCredential
from azure.core.messaging import CloudEvent
from azure.core.serialization import NULL
from azure.eventgrid import EventGridEvent, generate_sas
from azure.eventgrid.aio import EventGridPublisherClient
from azure.eventgrid._helpers import _cloud_event_to_generated
from eventgrid_preparer import (
CachedEventGridTopicPreparer
)
class EventGridPublisherClientTests(AzureMgmtTestCase):
FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['aeg-sas-key', 'aeg-sas-token']
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='eventgridtest')
@pytest.mark.asyncio
async def test_send_event_grid_event_data_dict(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
eg_event = EventGridEvent(
subject="sample",
data={"sample": "eventgridevent"},
event_type="Sample.EventGrid.Event",
data_version="2.0"
)
await client.send(eg_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='eventgridtest')
@pytest.mark.asyncio
async def test_send_event_grid_event_data_as_list(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
eg_event1 = EventGridEvent(
subject="sample",
data="eventgridevent",
event_type="Sample.EventGrid.Event",
data_version="2.0"
)
eg_event2 = EventGridEvent(
subject="sample2",
data="eventgridevent2",
event_type="Sample.EventGrid.Event",
data_version="2.0"
)
await client.send([eg_event1, eg_event2])
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='eventgridtest')
@pytest.mark.asyncio
async def test_send_event_grid_event_fails_without_full_url(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
parsed_url = urlparse(eventgrid_topic_endpoint)
client = EventGridPublisherClient(parsed_url.netloc, akc_credential)
eg_event = EventGridEvent(
subject="sample",
data={"sample": "eventgridevent"},
event_type="Sample.EventGrid.Event",
data_version="2.0"
)
with pytest.raises(ValueError):
await client.send(eg_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='eventgridtest')
@pytest.mark.asyncio
async def test_send_event_grid_event_data_str(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
eg_event = EventGridEvent(
subject="sample",
data="eventgridevent",
event_type="Sample.EventGrid.Event",
data_version="2.0"
)
await client.send(eg_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='eventgridtest')
@pytest.mark.asyncio
async def test_send_event_grid_event_data_bytes(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
eg_event = EventGridEvent(
subject="sample",
data=b"eventgridevent",
event_type="Sample.EventGrid.Event",
data_version="2.0"
)
with pytest.raises(TypeError, match="Data in EventGridEvent cannot be bytes*"):
await client.send(eg_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='eventgridtest')
@pytest.mark.asyncio
async def test_send_event_grid_event_dict_data_bytes(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
eg_event = {
"subject":"sample",
"data":b"eventgridevent",
"eventType":"Sample.EventGrid.Event",
"dataVersion":"2.0",
"id": "123-ddf-133-324255ffd",
"eventTime": dt.datetime.utcnow()
}
with pytest.raises(TypeError, match="Data in EventGridEvent cannot be bytes*"):
await client.send(eg_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_cloud_event_data_dict(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
cloud_event = CloudEvent(
source = "http://samplesource.dev",
data = {"sample": "cloudevent"},
type="Sample.Cloud.Event"
)
await client.send(cloud_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_cloud_event_data_str(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
cloud_event = CloudEvent(
source = "http://samplesource.dev",
data = "cloudevent",
type="Sample.Cloud.Event"
)
await client.send(cloud_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_cloud_event_data_bytes(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
cloud_event = CloudEvent(
source = "http://samplesource.dev",
data = b"cloudevent",
type="Sample.Cloud.Event"
)
await client.send(cloud_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_cloud_event_data_as_list(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
cloud_event = CloudEvent(
source = "http://samplesource.dev",
data = "cloudevent",
type="Sample.Cloud.Event"
)
await client.send([cloud_event])
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_cloud_event_data_with_extensions(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
cloud_event = CloudEvent(
source = "http://samplesource.dev",
data = "cloudevent",
type="Sample.Cloud.Event",
extensions={
'reasoncode':204,
'extension':'hello'
}
)
await client.send([cloud_event])
internal = _cloud_event_to_generated(cloud_event).serialize()
assert 'reasoncode' in internal
assert 'extension' in internal
assert internal['reasoncode'] == 204
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_cloud_event_dict(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
cloud_event1 = {
"id": "1234",
"source": "http://samplesource.dev",
"specversion": "1.0",
"data": "cloudevent",
"type": "Sample.Cloud.Event"
}
await client.send(cloud_event1)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_cloud_event_data_none(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
cloud_event = CloudEvent(
source = "http://samplesource.dev",
data = None,
type="Sample.Cloud.Event"
)
await client.send(cloud_event)
@pytest.mark.skip("https://github.com/Azure/azure-sdk-for-python/issues/16993")
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_cloud_event_data_NULL(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
cloud_event = CloudEvent(
source = "http://samplesource.dev",
data = NULL,
type="Sample.Cloud.Event"
)
def callback(request):
req = json.loads(request.http_request.body)
assert req[0].get("data") is None
await client.send(cloud_event, raw_request_hook=callback)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='eventgridtest')
@pytest.mark.asyncio
async def test_send_signature_credential(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
expiration_date_utc = dt.datetime.now(UTC()) + timedelta(hours=1)
signature = generate_sas(eventgrid_topic_endpoint, eventgrid_topic_primary_key, expiration_date_utc)
credential = AzureSasCredential(signature)
client = EventGridPublisherClient(eventgrid_topic_endpoint, credential)
eg_event = EventGridEvent(
subject="sample",
data={"sample": "eventgridevent"},
event_type="Sample.EventGrid.Event",
data_version="2.0"
)
await client.send(eg_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='customeventgridtest')
@pytest.mark.asyncio
async def test_send_custom_schema_event(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
custom_event = {
"customSubject": "sample",
"customEventType": "sample.event",
"customDataVersion": "2.0",
"customId": "1234",
"customEventTime": dt.datetime.now(UTC()).isoformat(),
"customData": "sample data"
}
await client.send(custom_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='customeventgridtest')
@pytest.mark.asyncio
async def test_send_custom_schema_event_as_list(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
custom_event1 = {
"customSubject": "sample",
"customEventType": "sample.event",
"customDataVersion": "2.0",
"customId": "1234",
"customEventTime": dt.datetime.now(UTC()).isoformat(),
"customData": "sample data"
}
custom_event2 = {
"customSubject": "sample2",
"customEventType": "sample.event",
"customDataVersion": "2.0",
"customId": "12345",
"customEventTime": dt.datetime.now(UTC()).isoformat(),
"customData": "sample data 2"
}
await client.send([custom_event1, custom_event2])
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
async def test_send_and_close_async_session(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
akc_credential = AzureKeyCredential(eventgrid_topic_primary_key)
client = EventGridPublisherClient(eventgrid_topic_endpoint, akc_credential)
async with client: # this throws if client can't close
cloud_event = CloudEvent(
source = "http://samplesource.dev",
data = "cloudevent",
type="Sample.Cloud.Event"
)
await client.send(cloud_event)
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='cloudeventgridtest')
@pytest.mark.asyncio
def test_send_NONE_credential_async(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
with pytest.raises(ValueError, match="Parameter 'self._credential' must not be None."):
client = EventGridPublisherClient(eventgrid_topic_endpoint, None)
@pytest.mark.live_test_only
@CachedResourceGroupPreparer(name_prefix='eventgridtest')
@CachedEventGridTopicPreparer(name_prefix='eventgridtest')
@pytest.mark.asyncio
async def test_send_token_credential(self, resource_group, eventgrid_topic, eventgrid_topic_primary_key, eventgrid_topic_endpoint):
credential = self.get_credential(EventGridPublisherClient)
client = EventGridPublisherClient(eventgrid_topic_endpoint, credential)
eg_event = EventGridEvent(
subject="sample",
data={"sample": "eventgridevent"},
event_type="Sample.EventGrid.Event",
data_version="2.0"
)
await client.send(eg_event) | [
"[email protected]"
] | |
0623a8380a36e5f62792222f8e00a5d1dfb009bf | 1e89a90ac62db38a2a565ec11ac005e6a394c152 | /corehq/apps/reports/standard/cases/basic.py | 5d4fef720c29eae647a56d9008f1a9cbff334ebd | [] | no_license | gmimano/commcaretest | ec762f5632193a29ccb2f026152e7c89880f61ed | 9fefc664c99d2fa2ab602d626aa395dd3c960a6a | refs/heads/master | 2016-09-05T20:48:12.442519 | 2014-02-16T21:20:40 | 2014-02-16T21:20:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,772 | py | import logging
from couchdbkit import RequestFailed
from django.utils.translation import ugettext_noop, ugettext
from django.utils.translation import ugettext as _
import simplejson
from corehq.apps.api.es import CaseES
from corehq.apps.groups.models import Group
from corehq.apps.reports.api import ReportDataSource
from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn
from corehq.apps.reports.fields import SelectMobileWorkerField, SelectOpenCloseField
from corehq.apps.reports.filters.search import SearchFilter
from corehq.apps.reports.generic import ElasticProjectInspectionReport
from corehq.apps.reports.models import HQUserType
from corehq.apps.reports.standard import ProjectReportParametersMixin
from corehq.apps.reports.standard.cases.data_sources import CaseInfo, CaseDisplay
from corehq.apps.reports.standard.inspect import ProjectInspectionReport
from corehq.apps.users.models import CommCareUser
from dimagi.utils.decorators.memoized import memoized
class CaseListMixin(ElasticProjectInspectionReport, ProjectReportParametersMixin):
fields = [
'corehq.apps.reports.fields.FilterUsersField',
'corehq.apps.reports.fields.SelectCaseOwnerField',
'corehq.apps.reports.fields.CaseTypeField',
'corehq.apps.reports.fields.SelectOpenCloseField',
'corehq.apps.reports.standard.cases.filters.CaseSearchFilter',
]
case_filter = {}
ajax_pagination = True
asynchronous = True
@property
@memoized
def case_es(self):
return CaseES(self.domain)
def build_query(self, case_type=None, filter=None, status=None, owner_ids=None, search_string=None):
# there's no point doing filters that are like owner_id:(x1 OR x2 OR ... OR x612)
# so past a certain number just exclude
owner_ids = owner_ids or []
MAX_IDS = 50
def _filter_gen(key, flist):
if flist and len(flist) < MAX_IDS:
yield {"terms": {
key: [item.lower() if item else "" for item in flist]
}}
# demo user hack
elif flist and "demo_user" not in flist:
yield {"not": {"term": {key: "demo_user"}}}
def _domain_term():
return {"term": {"domain.exact": self.domain}}
subterms = [_domain_term(), filter] if filter else [_domain_term()]
if case_type:
subterms.append({"term": {"type.exact": case_type}})
if status:
subterms.append({"term": {"closed": (status == 'closed')}})
user_filters = list(_filter_gen('owner_id', owner_ids))
if user_filters:
subterms.append({'or': user_filters})
if search_string:
query_block = {
"query_string": {"query": search_string}} # todo, make sure this doesn't suck
else:
query_block = {"match_all": {}}
and_block = {'and': subterms} if subterms else {}
es_query = {
'query': {
'filtered': {
'query': query_block,
'filter': and_block
}
},
'sort': self.get_sorting_block(),
'from': self.pagination.start,
'size': self.pagination.count,
}
return es_query
@property
@memoized
def es_results(self):
case_es = self.case_es
query = self.build_query(case_type=self.case_type, filter=self.case_filter,
status=self.case_status, owner_ids=self.case_owners,
search_string=SearchFilter.get_value(self.request, self.domain))
query_results = case_es.run_query(query)
if query_results is None or 'hits' not in query_results:
logging.error("CaseListMixin query error: %s, urlpath: %s, params: %s, user: %s yielded a result indicating a query error: %s, results: %s" % (
self.__class__.__name__,
self.request.path,
self.request.GET.urlencode(),
self.request.couch_user.username,
simplejson.dumps(query),
simplejson.dumps(query_results)
))
raise RequestFailed
return query_results
@property
@memoized
def case_owners(self):
if self.individual:
group_owners_raw = self.case_sharing_groups
else:
group_owners_raw = Group.get_case_sharing_groups(self.domain)
group_owners = [group._id for group in group_owners_raw]
ret = [user.get('user_id') for user in self.users]
if len(self.request.GET.getlist('ufilter')) == 1 and str(HQUserType.UNKNOWN) in self.request.GET.getlist('ufilter'):
#not applying group filter
pass
else:
ret += group_owners
return ret
@property
@memoized
def case_sharing_groups(self):
try:
user = CommCareUser.get_by_user_id(self.individual)
user = user if user.username_in_report else None
return user.get_case_sharing_groups()
except Exception:
try:
group = Group.get(self.individual)
assert(group.doc_type == 'Group')
return [group]
except Exception:
return []
def get_case(self, row):
if '_source' in row:
case_dict = row['_source']
else:
raise ValueError("Case object is not in search result %s" % row)
if case_dict['domain'] != self.domain:
raise Exception("case.domain != self.domain; %r and %r, respectively" % (case_dict['domain'], self.domain))
return case_dict
@property
def shared_pagination_GET_params(self):
shared_params = super(CaseListMixin, self).shared_pagination_GET_params
shared_params.append(dict(
name=SelectOpenCloseField.slug,
value=self.request.GET.get(SelectOpenCloseField.slug, '')
))
return shared_params
class CaseListReport(CaseListMixin, ProjectInspectionReport, ReportDataSource):
# note that this class is not true to the spirit of ReportDataSource; the whole
# point is the decouple generating the raw report data from the report view/django
# request. but currently these are too tightly bound to decouple
name = ugettext_noop('Case List')
slug = 'case_list'
@property
def user_filter(self):
return super(CaseListReport, self).user_filter
@property
@memoized
def rendered_report_title(self):
if not self.individual:
self.name = _("%(report_name)s for %(worker_type)s") % {
"report_name": _(self.name),
"worker_type": _(SelectMobileWorkerField.get_default_text(self.user_filter))
}
return self.name
def slugs(self):
return [
'_case',
'case_id',
'case_name',
'case_type',
'detail_url',
'is_open',
'opened_on',
'modified_on',
'closed_on',
'creator_id',
'creator_name',
'owner_type',
'owner_id',
'owner_name',
'external_id',
]
def get_data(self, slugs=None):
for row in self.es_results['hits'].get('hits', []):
case = self.get_case(row)
ci = CaseInfo(self, case)
data = {
'_case': case,
'detail_url': ci.case_detail_url,
}
data.update((prop, getattr(ci, prop)) for prop in (
'case_type', 'case_name', 'case_id', 'external_id',
'is_closed', 'opened_on', 'modified_on', 'closed_on',
))
creator = ci.creating_user or {}
data.update({
'creator_id': creator.get('id'),
'creator_name': creator.get('name'),
})
owner = ci.owner
data.update({
'owner_type': owner[0],
'owner_id': owner[1]['id'],
'owner_name': owner[1]['name'],
})
yield data
@property
def headers(self):
headers = DataTablesHeader(
DataTablesColumn(_("Case Type"), prop_name="type.exact"),
DataTablesColumn(_("Name"), prop_name="name.exact"),
DataTablesColumn(_("Owner"), prop_name="owner_display", sortable=False),
DataTablesColumn(_("Created Date"), prop_name="opened_on"),
DataTablesColumn(_("Created By"), prop_name="opened_by_display", sortable=False),
DataTablesColumn(_("Modified Date"), prop_name="modified_on"),
DataTablesColumn(_("Status"), prop_name="get_status_display", sortable=False)
)
headers.custom_sort = [[5, 'desc']]
return headers
@property
def rows(self):
for data in self.get_data():
display = CaseDisplay(self, data['_case'])
yield [
display.case_type,
display.case_link,
display.owner_display,
display.opened_on,
display.creating_user,
display.modified_on,
display.closed_display
]
def date_to_json(self, date):
if date:
return date.strftime('%Y-%m-%d %H:%M:%S')
# temporary band aid solution for http://manage.dimagi.com/default.asp?80262
# return tz_utils.adjust_datetime_to_timezone(
# date, pytz.utc.zone, self.timezone.zone
# ).strftime('%Y-%m-%d %H:%M:%S')
else:
return '' | [
"[email protected]"
] | |
f8311679b31b7b468a2b34ecffa528474554e3af | e3e5efe47f51f71b28fa6d7d5b2311f25d64b580 | /analytics/migrations/0004_auto_20191002_1350.py | 189d799a1f81daee8ef86dcdbb73ce505ba25707 | [
"BSD-3-Clause"
] | permissive | boxed/analytics | be9169584459434dd2b0099d8ff2ff0755086a95 | b1237de9d13ee3509d524d06c561342071090331 | refs/heads/master | 2023-04-15T01:38:03.702986 | 2023-03-22T05:38:22 | 2023-03-22T05:38:22 | 210,958,980 | 1 | 0 | BSD-3-Clause | 2023-03-22T05:38:24 | 2019-09-25T23:30:59 | Python | UTF-8 | Python | false | false | 529 | py | # Generated by Django 2.2.5 on 2019-10-02 13:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analytics', '0003_auto_20190929_1620'),
]
operations = [
migrations.AlterField(
model_name='referrers',
name='page_url',
field=models.URLField(db_index=True),
),
migrations.AlterUniqueTogether(
name='referrers',
unique_together={('page_url', 'referrer')},
),
]
| [
"[email protected]"
] | |
3752bd2b4cb84fb188177ba0d85a16a73e44cd8c | 8ea6b3429ec95420029a188e898cc63d6acac871 | /fnb/views.py | 44d1330b9653e6cb948596d5c33847e862a19beb | [] | no_license | chidimo/fnb | 131cbbcb44778fae607929e55515907bd15125ba | 1ecbed6ce55f7ce368f8909975c2ba28e908d1c3 | refs/heads/master | 2023-04-10T03:37:40.886743 | 2021-04-23T18:04:26 | 2021-04-23T18:04:26 | 359,479,691 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | from django.shortcuts import render
from django.contrib.auth.decorators import login_required
# @login_required
def home(request):
template = "home.html"
context = {}
return render(request, template, context)
| [
"[email protected]"
] | |
6c2bbef21302fc2cebc465ed246a30dbf26672cb | a66460a46611483dfbdc94c7996893f427e60d97 | /ansible/my_env/lib/python2.7/site-packages/ansible/modules/cloud/vmware/vmware_datastore_cluster.py | 4e635ef511387fe7f9fee40439d8fbc1498d0585 | [
"MIT",
"GPL-3.0-or-later"
] | permissive | otus-devops-2019-02/yyashkin_infra | 06b57807dde26f94f501828c07503d6bf1d70816 | 0cd0c003884155ac922e3e301305ac202de7028c | refs/heads/master | 2020-04-29T02:42:22.056724 | 2019-05-15T16:24:35 | 2019-05-15T16:24:35 | 175,780,718 | 0 | 0 | MIT | 2019-05-15T16:24:36 | 2019-03-15T08:37:35 | HCL | UTF-8 | Python | false | false | 5,606 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Ansible Project
# Copyright (c) 2018, Abhijeet Kasurde <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_datastore_cluster
short_description: Manage VMware vSphere datastore clusters
description:
- This module can be used to add and delete datastore cluster in given VMware environment.
- All parameters and VMware object values are case sensitive.
version_added: 2.6
author:
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on vSphere 6.0, 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
datacenter_name:
description:
- The name of the datacenter.
required: True
datastore_cluster_name:
description:
- The name of the datastore cluster.
required: True
state:
description:
- If the datastore cluster should be present or absent.
choices: [ present, absent ]
default: present
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Create datastore cluster
vmware_datastore_cluster:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
datacenter_name: '{{ datacenter_name }}'
datastore_cluster_name: '{{ datastore_cluster_name }}'
state: present
delegate_to: localhost
- name: Delete datastore cluster
vmware_datastore_cluster:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
datacenter_name: '{{ datacenter_name }}'
datastore_cluster_name: '{{ datastore_cluster_name }}'
state: absent
delegate_to: localhost
'''
RETURN = """
result:
description: information about datastore cluster operation
returned: always
type: string
sample: "Datastore cluster 'DSC2' created successfully."
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec, wait_for_task
from ansible.module_utils._text import to_native
class VMwareDatastoreClusterManager(PyVmomi):
def __init__(self, module):
super(VMwareDatastoreClusterManager, self).__init__(module)
datacenter_name = self.params.get('datacenter_name')
self.datacenter_obj = self.find_datacenter_by_name(datacenter_name)
if not self.datacenter_obj:
self.module.fail_json(msg="Failed to find datacenter '%s' required"
" for managing datastore cluster." % datacenter_name)
self.datastore_cluster_name = self.params.get('datastore_cluster_name')
self.datastore_cluster_obj = self.find_datastore_cluster_by_name(self.datastore_cluster_name)
def ensure(self):
"""
Function to manage internal state of datastore cluster
"""
results = dict(changed=False, result='')
state = self.module.params.get('state')
if self.datastore_cluster_obj:
if state == 'present':
results['result'] = "Datastore cluster '%s' already available." % self.datastore_cluster_name
elif state == 'absent':
# Delete datastore cluster
if not self.module.check_mode:
task = self.datastore_cluster_obj.Destroy_Task()
changed, result = wait_for_task(task)
else:
changed = True
if changed:
results['result'] = "Datastore cluster '%s' deleted successfully." % self.datastore_cluster_name
results['changed'] = changed
else:
self.module.fail_json(msg="Failed to delete datastore cluster '%s'." % self.datastore_cluster_name)
else:
if state == 'present':
# Create datastore cluster
if not self.module.check_mode:
try:
self.datacenter_obj.datastoreFolder.CreateStoragePod(name=self.datastore_cluster_name)
except Exception as generic_exc:
self.module.fail_json(msg="Failed to create datstore cluster"
" '%s' due to %s" % (self.datastore_cluster_name,
to_native(generic_exc)))
results['changed'] = True
results['result'] = "Datastore cluster '%s' created successfully." % self.datastore_cluster_name
elif state == 'absent':
results['result'] = "Datastore cluster '%s' not available or already deleted." % self.datastore_cluster_name
self.module.exit_json(**results)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
dict(
datacenter_name=dict(type='str', required=True),
datastore_cluster_name=dict(type='str', required=True),
state=dict(default='present', choices=['present', 'absent'], type='str'),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
datastore_cluster_mgr = VMwareDatastoreClusterManager(module)
datastore_cluster_mgr.ensure()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
e40ab14da91a94eef3ca5f0162f555fd7f3063e2 | 1f689e448d8b510ea6575590cb6920048b4e9aea | /leetcode/237_delete_node_in_a_linked_list.py | 18f860fadd7f55ef02c8c54a766f4dd7909583d7 | [] | no_license | lijenpan/python | 52c6061ff90c611efd039b1858339edbefdb5ad0 | 7f67045a83bd2592ccc399420194094fb78404b8 | refs/heads/master | 2020-05-30T10:53:15.634090 | 2016-12-02T20:50:28 | 2016-12-02T20:50:28 | 7,646,477 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 706 | py | """
Write a function to delete a node (except the tail) in a singly linked list, given only access to that node.
Supposed the linked list is 1 -> 2 -> 3 -> 4 and you are given the third node with value 3, the linked list
should become 1 -> 2 -> 4 after calling your function.
==============================
I was confused about what the question was asking without giving the root. Then I realize the question
was simply asking to point the given node to the next one. Thus delete the given node. Simple.
"""
def deleteNode(node):
"""
:type node: ListNode
:rtype: void Do not return anything, modify node in-place instead.
"""
node.val = node.next.val
node.next = node.next.next
| [
"[email protected]"
] | |
6b1995c24faa13aa921aa46b8fc645f211e7c15b | 350796fa13c98af7a5a2e8873e5cb74e4f29043c | /redis_test/__init__.py | eb36980e51a8b42aa8c379c220023807fec5d382 | [] | no_license | dajun928/Python36 | 3babdb47124cace844bf8d7b8054c1c6181a0a9f | 8974fc9c9c808e10fef02ed4c061bfbac5a0961f | refs/heads/master | 2022-12-12T17:47:47.724277 | 2021-01-10T03:29:45 | 2021-01-10T03:29:45 | 174,575,956 | 0 | 0 | null | 2021-06-01T23:57:50 | 2019-03-08T16:59:19 | HTML | UTF-8 | Python | false | false | 174 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
@version :
@file : __init__.py.py
@time : 2019/07/13 23:14:22
@func :
"""
import platform
print(platform.python_version())
| [
"[email protected]"
] | |
36d31948ef1c79f995c44675dea5764a3d0967f1 | 7684ffabb75ed2d6396d3a720c56ed0ee09ee77d | /crack_detection/gcloud/google-cloud-sdk/lib/googlecloudsdk/third_party/apis/dlp/v2/dlp_v2_client.py | fe9d6a7d787f294c78f57e6a585b33e7fa8db543 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | bopopescu/CrackPropAPI | da7cc7a1ef046d20992423f7c7a148e390bb70e7 | 24c0cfd1f258eeaa7e5e953253b5d778f2fbecb5 | refs/heads/master | 2022-11-09T07:15:41.142453 | 2020-07-02T14:34:15 | 2020-07-02T14:34:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130,297 | py | """Generated client library for dlp version v2."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.dlp.v2 import dlp_v2_messages as messages
class DlpV2(base_api.BaseApiClient):
"""Generated client library for service dlp version v2."""
MESSAGES_MODULE = messages
BASE_URL = u'https://dlp.googleapis.com/'
MTLS_BASE_URL = u'https://dlp.mtls.googleapis.com/'
_PACKAGE = u'dlp'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform']
_VERSION = u'v2'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = u'google-cloud-sdk'
_CLIENT_CLASS_NAME = u'DlpV2'
_URL_VERSION = u'v2'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new dlp handle."""
url = url or self.BASE_URL
super(DlpV2, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.infoTypes = self.InfoTypesService(self)
self.locations_infoTypes = self.LocationsInfoTypesService(self)
self.locations = self.LocationsService(self)
self.organizations_deidentifyTemplates = self.OrganizationsDeidentifyTemplatesService(self)
self.organizations_inspectTemplates = self.OrganizationsInspectTemplatesService(self)
self.organizations_locations_deidentifyTemplates = self.OrganizationsLocationsDeidentifyTemplatesService(self)
self.organizations_locations_inspectTemplates = self.OrganizationsLocationsInspectTemplatesService(self)
self.organizations_locations_storedInfoTypes = self.OrganizationsLocationsStoredInfoTypesService(self)
self.organizations_locations = self.OrganizationsLocationsService(self)
self.organizations_storedInfoTypes = self.OrganizationsStoredInfoTypesService(self)
self.organizations = self.OrganizationsService(self)
self.projects_content = self.ProjectsContentService(self)
self.projects_deidentifyTemplates = self.ProjectsDeidentifyTemplatesService(self)
self.projects_dlpJobs = self.ProjectsDlpJobsService(self)
self.projects_image = self.ProjectsImageService(self)
self.projects_inspectTemplates = self.ProjectsInspectTemplatesService(self)
self.projects_jobTriggers = self.ProjectsJobTriggersService(self)
self.projects_locations_content = self.ProjectsLocationsContentService(self)
self.projects_locations_deidentifyTemplates = self.ProjectsLocationsDeidentifyTemplatesService(self)
self.projects_locations_dlpJobs = self.ProjectsLocationsDlpJobsService(self)
self.projects_locations_image = self.ProjectsLocationsImageService(self)
self.projects_locations_inspectTemplates = self.ProjectsLocationsInspectTemplatesService(self)
self.projects_locations_jobTriggers = self.ProjectsLocationsJobTriggersService(self)
self.projects_locations_storedInfoTypes = self.ProjectsLocationsStoredInfoTypesService(self)
self.projects_locations = self.ProjectsLocationsService(self)
self.projects_storedInfoTypes = self.ProjectsStoredInfoTypesService(self)
self.projects = self.ProjectsService(self)
class InfoTypesService(base_api.BaseApiService):
"""Service class for the infoTypes resource."""
_NAME = u'infoTypes'
def __init__(self, client):
super(DlpV2.InfoTypesService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Returns a list of the sensitive information types that the DLP API.
supports. See https://cloud.google.com/dlp/docs/infotypes-reference to
learn more.
Args:
request: (DlpInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'dlp.infoTypes.list',
ordered_params=[],
path_params=[],
query_params=[u'filter', u'languageCode', u'locationId'],
relative_path=u'v2/infoTypes',
request_field='',
request_type_name=u'DlpInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInfoTypesResponse',
supports_download=False,
)
class LocationsInfoTypesService(base_api.BaseApiService):
"""Service class for the locations_infoTypes resource."""
_NAME = u'locations_infoTypes'
def __init__(self, client):
super(DlpV2.LocationsInfoTypesService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Returns a list of the sensitive information types that the DLP API.
supports. See https://cloud.google.com/dlp/docs/infotypes-reference to
learn more.
Args:
request: (DlpLocationsInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'dlp.locations.infoTypes.list',
ordered_params=[u'locationId'],
path_params=[u'locationId'],
query_params=[u'filter', u'languageCode'],
relative_path=u'v2/locations/{locationId}/infoTypes',
request_field='',
request_type_name=u'DlpLocationsInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInfoTypesResponse',
supports_download=False,
)
class LocationsService(base_api.BaseApiService):
"""Service class for the locations resource."""
_NAME = u'locations'
def __init__(self, client):
super(DlpV2.LocationsService, self).__init__(client)
self._upload_configs = {
}
class OrganizationsDeidentifyTemplatesService(base_api.BaseApiService):
"""Service class for the organizations_deidentifyTemplates resource."""
_NAME = u'organizations_deidentifyTemplates'
def __init__(self, client):
super(DlpV2.OrganizationsDeidentifyTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a DeidentifyTemplate for re-using frequently used configuration.
for de-identifying content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates',
http_method=u'POST',
method_id=u'dlp.organizations.deidentifyTemplates.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/deidentifyTemplates',
request_field=u'googlePrivacyDlpV2CreateDeidentifyTemplateRequest',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.deidentifyTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'GET',
method_id=u'dlp.organizations.deidentifyTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DeidentifyTemplates.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDeidentifyTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates',
http_method=u'GET',
method_id=u'dlp.organizations.deidentifyTemplates.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'locationId', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/deidentifyTemplates',
request_field='',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDeidentifyTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.deidentifyTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateDeidentifyTemplateRequest',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
class OrganizationsInspectTemplatesService(base_api.BaseApiService):
"""Service class for the organizations_inspectTemplates resource."""
_NAME = u'organizations_inspectTemplates'
def __init__(self, client):
super(DlpV2.OrganizationsInspectTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an InspectTemplate for re-using frequently used configuration.
for inspecting content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates',
http_method=u'POST',
method_id=u'dlp.organizations.inspectTemplates.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/inspectTemplates',
request_field=u'googlePrivacyDlpV2CreateInspectTemplateRequest',
request_type_name=u'DlpOrganizationsInspectTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.inspectTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsInspectTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'GET',
method_id=u'dlp.organizations.inspectTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsInspectTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists InspectTemplates.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInspectTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates',
http_method=u'GET',
method_id=u'dlp.organizations.inspectTemplates.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'locationId', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/inspectTemplates',
request_field='',
request_type_name=u'DlpOrganizationsInspectTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInspectTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.inspectTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateInspectTemplateRequest',
request_type_name=u'DlpOrganizationsInspectTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
class OrganizationsLocationsDeidentifyTemplatesService(base_api.BaseApiService):
"""Service class for the organizations_locations_deidentifyTemplates resource."""
_NAME = u'organizations_locations_deidentifyTemplates'
def __init__(self, client):
super(DlpV2.OrganizationsLocationsDeidentifyTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a DeidentifyTemplate for re-using frequently used configuration.
for de-identifying content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsLocationsDeidentifyTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationId}/deidentifyTemplates',
http_method=u'POST',
method_id=u'dlp.organizations.locations.deidentifyTemplates.create',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/deidentifyTemplates',
request_field=u'googlePrivacyDlpV2CreateDeidentifyTemplateRequest',
request_type_name=u'DlpOrganizationsLocationsDeidentifyTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsLocationsDeidentifyTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.locations.deidentifyTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsLocationsDeidentifyTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsLocationsDeidentifyTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'GET',
method_id=u'dlp.organizations.locations.deidentifyTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsLocationsDeidentifyTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DeidentifyTemplates.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsLocationsDeidentifyTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDeidentifyTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationId}/deidentifyTemplates',
http_method=u'GET',
method_id=u'dlp.organizations.locations.deidentifyTemplates.list',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/locations/{locationId}/deidentifyTemplates',
request_field='',
request_type_name=u'DlpOrganizationsLocationsDeidentifyTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDeidentifyTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsLocationsDeidentifyTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.locations.deidentifyTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateDeidentifyTemplateRequest',
request_type_name=u'DlpOrganizationsLocationsDeidentifyTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
class OrganizationsLocationsInspectTemplatesService(base_api.BaseApiService):
"""Service class for the organizations_locations_inspectTemplates resource."""
_NAME = u'organizations_locations_inspectTemplates'
def __init__(self, client):
super(DlpV2.OrganizationsLocationsInspectTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an InspectTemplate for re-using frequently used configuration.
for inspecting content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsLocationsInspectTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationId}/inspectTemplates',
http_method=u'POST',
method_id=u'dlp.organizations.locations.inspectTemplates.create',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/inspectTemplates',
request_field=u'googlePrivacyDlpV2CreateInspectTemplateRequest',
request_type_name=u'DlpOrganizationsLocationsInspectTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsLocationsInspectTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.locations.inspectTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsLocationsInspectTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsLocationsInspectTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'GET',
method_id=u'dlp.organizations.locations.inspectTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsLocationsInspectTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists InspectTemplates.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsLocationsInspectTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInspectTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationId}/inspectTemplates',
http_method=u'GET',
method_id=u'dlp.organizations.locations.inspectTemplates.list',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/locations/{locationId}/inspectTemplates',
request_field='',
request_type_name=u'DlpOrganizationsLocationsInspectTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInspectTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsLocationsInspectTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.locations.inspectTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateInspectTemplateRequest',
request_type_name=u'DlpOrganizationsLocationsInspectTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
class OrganizationsLocationsStoredInfoTypesService(base_api.BaseApiService):
"""Service class for the organizations_locations_storedInfoTypes resource."""
_NAME = u'organizations_locations_storedInfoTypes'
def __init__(self, client):
super(DlpV2.OrganizationsLocationsStoredInfoTypesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a pre-built stored infoType to be used for inspection.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsLocationsStoredInfoTypesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationId}/storedInfoTypes',
http_method=u'POST',
method_id=u'dlp.organizations.locations.storedInfoTypes.create',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/storedInfoTypes',
request_field=u'googlePrivacyDlpV2CreateStoredInfoTypeRequest',
request_type_name=u'DlpOrganizationsLocationsStoredInfoTypesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsLocationsStoredInfoTypesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.locations.storedInfoTypes.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsLocationsStoredInfoTypesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsLocationsStoredInfoTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'GET',
method_id=u'dlp.organizations.locations.storedInfoTypes.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsLocationsStoredInfoTypesGetRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists stored infoTypes.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsLocationsStoredInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListStoredInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationId}/storedInfoTypes',
http_method=u'GET',
method_id=u'dlp.organizations.locations.storedInfoTypes.list',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/locations/{locationId}/storedInfoTypes',
request_field='',
request_type_name=u'DlpOrganizationsLocationsStoredInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListStoredInfoTypesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the stored infoType by creating a new version. The existing version.
will continue to be used until the new version is ready.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsLocationsStoredInfoTypesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/locations/{locationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.locations.storedInfoTypes.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateStoredInfoTypeRequest',
request_type_name=u'DlpOrganizationsLocationsStoredInfoTypesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
class OrganizationsLocationsService(base_api.BaseApiService):
"""Service class for the organizations_locations resource."""
_NAME = u'organizations_locations'
def __init__(self, client):
super(DlpV2.OrganizationsLocationsService, self).__init__(client)
self._upload_configs = {
}
class OrganizationsStoredInfoTypesService(base_api.BaseApiService):
"""Service class for the organizations_storedInfoTypes resource."""
_NAME = u'organizations_storedInfoTypes'
def __init__(self, client):
super(DlpV2.OrganizationsStoredInfoTypesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a pre-built stored infoType to be used for inspection.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes',
http_method=u'POST',
method_id=u'dlp.organizations.storedInfoTypes.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/storedInfoTypes',
request_field=u'googlePrivacyDlpV2CreateStoredInfoTypeRequest',
request_type_name=u'DlpOrganizationsStoredInfoTypesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.storedInfoTypes.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsStoredInfoTypesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'GET',
method_id=u'dlp.organizations.storedInfoTypes.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsStoredInfoTypesGetRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists stored infoTypes.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListStoredInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes',
http_method=u'GET',
method_id=u'dlp.organizations.storedInfoTypes.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'locationId', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/storedInfoTypes',
request_field='',
request_type_name=u'DlpOrganizationsStoredInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListStoredInfoTypesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the stored infoType by creating a new version. The existing version.
will continue to be used until the new version is ready.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.storedInfoTypes.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateStoredInfoTypeRequest',
request_type_name=u'DlpOrganizationsStoredInfoTypesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
class OrganizationsService(base_api.BaseApiService):
"""Service class for the organizations resource."""
_NAME = u'organizations'
def __init__(self, client):
super(DlpV2.OrganizationsService, self).__init__(client)
self._upload_configs = {
}
class ProjectsContentService(base_api.BaseApiService):
"""Service class for the projects_content resource."""
_NAME = u'projects_content'
def __init__(self, client):
super(DlpV2.ProjectsContentService, self).__init__(client)
self._upload_configs = {
}
def Deidentify(self, request, global_params=None):
r"""De-identifies potentially sensitive info from a ContentItem.
This method has limits on input size and output size.
See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to
learn more.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsContentDeidentifyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyContentResponse) The response message.
"""
config = self.GetMethodConfig('Deidentify')
return self._RunMethod(
config, request, global_params=global_params)
Deidentify.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/content:deidentify',
http_method=u'POST',
method_id=u'dlp.projects.content.deidentify',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/content:deidentify',
request_field=u'googlePrivacyDlpV2DeidentifyContentRequest',
request_type_name=u'DlpProjectsContentDeidentifyRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyContentResponse',
supports_download=False,
)
def Inspect(self, request, global_params=None):
r"""Finds potentially sensitive info in content.
This method has limits on input size, processing time, and output size.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images
and https://cloud.google.com/dlp/docs/inspecting-text,
Args:
request: (DlpProjectsContentInspectRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectContentResponse) The response message.
"""
config = self.GetMethodConfig('Inspect')
return self._RunMethod(
config, request, global_params=global_params)
Inspect.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/content:inspect',
http_method=u'POST',
method_id=u'dlp.projects.content.inspect',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/content:inspect',
request_field=u'googlePrivacyDlpV2InspectContentRequest',
request_type_name=u'DlpProjectsContentInspectRequest',
response_type_name=u'GooglePrivacyDlpV2InspectContentResponse',
supports_download=False,
)
def Reidentify(self, request, global_params=None):
r"""Re-identifies content that has been de-identified.
See
https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example
to learn more.
Args:
request: (DlpProjectsContentReidentifyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ReidentifyContentResponse) The response message.
"""
config = self.GetMethodConfig('Reidentify')
return self._RunMethod(
config, request, global_params=global_params)
Reidentify.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/content:reidentify',
http_method=u'POST',
method_id=u'dlp.projects.content.reidentify',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/content:reidentify',
request_field=u'googlePrivacyDlpV2ReidentifyContentRequest',
request_type_name=u'DlpProjectsContentReidentifyRequest',
response_type_name=u'GooglePrivacyDlpV2ReidentifyContentResponse',
supports_download=False,
)
class ProjectsDeidentifyTemplatesService(base_api.BaseApiService):
"""Service class for the projects_deidentifyTemplates resource."""
_NAME = u'projects_deidentifyTemplates'
def __init__(self, client):
super(DlpV2.ProjectsDeidentifyTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a DeidentifyTemplate for re-using frequently used configuration.
for de-identifying content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates',
http_method=u'POST',
method_id=u'dlp.projects.deidentifyTemplates.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/deidentifyTemplates',
request_field=u'googlePrivacyDlpV2CreateDeidentifyTemplateRequest',
request_type_name=u'DlpProjectsDeidentifyTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.deidentifyTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsDeidentifyTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'GET',
method_id=u'dlp.projects.deidentifyTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsDeidentifyTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DeidentifyTemplates.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDeidentifyTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates',
http_method=u'GET',
method_id=u'dlp.projects.deidentifyTemplates.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'locationId', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/deidentifyTemplates',
request_field='',
request_type_name=u'DlpProjectsDeidentifyTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDeidentifyTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.deidentifyTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateDeidentifyTemplateRequest',
request_type_name=u'DlpProjectsDeidentifyTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
class ProjectsDlpJobsService(base_api.BaseApiService):
"""Service class for the projects_dlpJobs resource."""
_NAME = u'projects_dlpJobs'
def __init__(self, client):
super(DlpV2.ProjectsDlpJobsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running DlpJob. The server.
makes a best effort to cancel the DlpJob, but success is not
guaranteed.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsDlpJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs/{dlpJobsId}:cancel',
http_method=u'POST',
method_id=u'dlp.projects.dlpJobs.cancel',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:cancel',
request_field=u'googlePrivacyDlpV2CancelDlpJobRequest',
request_type_name=u'DlpProjectsDlpJobsCancelRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a new job to inspect storage or calculate risk metrics.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsDlpJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs',
http_method=u'POST',
method_id=u'dlp.projects.dlpJobs.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/dlpJobs',
request_field=u'googlePrivacyDlpV2CreateDlpJobRequest',
request_type_name=u'DlpProjectsDlpJobsCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running DlpJob. This method indicates that the client is.
no longer interested in the DlpJob result. The job will be cancelled if
possible.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsDlpJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs/{dlpJobsId}',
http_method=u'DELETE',
method_id=u'dlp.projects.dlpJobs.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsDlpJobsDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running DlpJob.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsDlpJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs/{dlpJobsId}',
http_method=u'GET',
method_id=u'dlp.projects.dlpJobs.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsDlpJobsGetRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DlpJobs that match the specified filter in the request.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsDlpJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDlpJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs',
http_method=u'GET',
method_id=u'dlp.projects.dlpJobs.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'filter', u'locationId', u'orderBy', u'pageSize', u'pageToken', u'type'],
relative_path=u'v2/{+parent}/dlpJobs',
request_field='',
request_type_name=u'DlpProjectsDlpJobsListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDlpJobsResponse',
supports_download=False,
)
class ProjectsImageService(base_api.BaseApiService):
"""Service class for the projects_image resource."""
_NAME = u'projects_image'
def __init__(self, client):
super(DlpV2.ProjectsImageService, self).__init__(client)
self._upload_configs = {
}
def Redact(self, request, global_params=None):
r"""Redacts potentially sensitive info from an image.
This method has limits on input size, processing time, and output size.
See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to
learn more.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsImageRedactRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2RedactImageResponse) The response message.
"""
config = self.GetMethodConfig('Redact')
return self._RunMethod(
config, request, global_params=global_params)
Redact.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/image:redact',
http_method=u'POST',
method_id=u'dlp.projects.image.redact',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/image:redact',
request_field=u'googlePrivacyDlpV2RedactImageRequest',
request_type_name=u'DlpProjectsImageRedactRequest',
response_type_name=u'GooglePrivacyDlpV2RedactImageResponse',
supports_download=False,
)
class ProjectsInspectTemplatesService(base_api.BaseApiService):
"""Service class for the projects_inspectTemplates resource."""
_NAME = u'projects_inspectTemplates'
def __init__(self, client):
super(DlpV2.ProjectsInspectTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an InspectTemplate for re-using frequently used configuration.
for inspecting content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates',
http_method=u'POST',
method_id=u'dlp.projects.inspectTemplates.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/inspectTemplates',
request_field=u'googlePrivacyDlpV2CreateInspectTemplateRequest',
request_type_name=u'DlpProjectsInspectTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.inspectTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsInspectTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'GET',
method_id=u'dlp.projects.inspectTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsInspectTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists InspectTemplates.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInspectTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates',
http_method=u'GET',
method_id=u'dlp.projects.inspectTemplates.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'locationId', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/inspectTemplates',
request_field='',
request_type_name=u'DlpProjectsInspectTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInspectTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.inspectTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateInspectTemplateRequest',
request_type_name=u'DlpProjectsInspectTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
class ProjectsJobTriggersService(base_api.BaseApiService):
"""Service class for the projects_jobTriggers resource."""
_NAME = u'projects_jobTriggers'
def __init__(self, client):
super(DlpV2.ProjectsJobTriggersService, self).__init__(client)
self._upload_configs = {
}
def Activate(self, request, global_params=None):
r"""Activate a job trigger. Causes the immediate execute of a trigger.
instead of waiting on the trigger event to occur.
Args:
request: (DlpProjectsJobTriggersActivateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Activate')
return self._RunMethod(
config, request, global_params=global_params)
Activate.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers/{jobTriggersId}:activate',
http_method=u'POST',
method_id=u'dlp.projects.jobTriggers.activate',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:activate',
request_field=u'googlePrivacyDlpV2ActivateJobTriggerRequest',
request_type_name=u'DlpProjectsJobTriggersActivateRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a job trigger to run DLP actions such as scanning storage for.
sensitive information on a set schedule.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers',
http_method=u'POST',
method_id=u'dlp.projects.jobTriggers.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/jobTriggers',
request_field=u'googlePrivacyDlpV2CreateJobTriggerRequest',
request_type_name=u'DlpProjectsJobTriggersCreateRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers/{jobTriggersId}',
http_method=u'DELETE',
method_id=u'dlp.projects.jobTriggers.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsJobTriggersDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers/{jobTriggersId}',
http_method=u'GET',
method_id=u'dlp.projects.jobTriggers.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsJobTriggersGetRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists job triggers.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListJobTriggersResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers',
http_method=u'GET',
method_id=u'dlp.projects.jobTriggers.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'filter', u'locationId', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/jobTriggers',
request_field='',
request_type_name=u'DlpProjectsJobTriggersListRequest',
response_type_name=u'GooglePrivacyDlpV2ListJobTriggersResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers/{jobTriggersId}',
http_method=u'PATCH',
method_id=u'dlp.projects.jobTriggers.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateJobTriggerRequest',
request_type_name=u'DlpProjectsJobTriggersPatchRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
class ProjectsLocationsContentService(base_api.BaseApiService):
"""Service class for the projects_locations_content resource."""
_NAME = u'projects_locations_content'
def __init__(self, client):
super(DlpV2.ProjectsLocationsContentService, self).__init__(client)
self._upload_configs = {
}
def Deidentify(self, request, global_params=None):
r"""De-identifies potentially sensitive info from a ContentItem.
This method has limits on input size and output size.
See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to
learn more.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsLocationsContentDeidentifyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyContentResponse) The response message.
"""
config = self.GetMethodConfig('Deidentify')
return self._RunMethod(
config, request, global_params=global_params)
Deidentify.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/content:deidentify',
http_method=u'POST',
method_id=u'dlp.projects.locations.content.deidentify',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/content:deidentify',
request_field=u'googlePrivacyDlpV2DeidentifyContentRequest',
request_type_name=u'DlpProjectsLocationsContentDeidentifyRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyContentResponse',
supports_download=False,
)
def Inspect(self, request, global_params=None):
r"""Finds potentially sensitive info in content.
This method has limits on input size, processing time, and output size.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images
and https://cloud.google.com/dlp/docs/inspecting-text,
Args:
request: (DlpProjectsLocationsContentInspectRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectContentResponse) The response message.
"""
config = self.GetMethodConfig('Inspect')
return self._RunMethod(
config, request, global_params=global_params)
Inspect.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/content:inspect',
http_method=u'POST',
method_id=u'dlp.projects.locations.content.inspect',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/content:inspect',
request_field=u'googlePrivacyDlpV2InspectContentRequest',
request_type_name=u'DlpProjectsLocationsContentInspectRequest',
response_type_name=u'GooglePrivacyDlpV2InspectContentResponse',
supports_download=False,
)
def Reidentify(self, request, global_params=None):
r"""Re-identifies content that has been de-identified.
See
https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example
to learn more.
Args:
request: (DlpProjectsLocationsContentReidentifyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ReidentifyContentResponse) The response message.
"""
config = self.GetMethodConfig('Reidentify')
return self._RunMethod(
config, request, global_params=global_params)
Reidentify.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/content:reidentify',
http_method=u'POST',
method_id=u'dlp.projects.locations.content.reidentify',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/content:reidentify',
request_field=u'googlePrivacyDlpV2ReidentifyContentRequest',
request_type_name=u'DlpProjectsLocationsContentReidentifyRequest',
response_type_name=u'GooglePrivacyDlpV2ReidentifyContentResponse',
supports_download=False,
)
class ProjectsLocationsDeidentifyTemplatesService(base_api.BaseApiService):
"""Service class for the projects_locations_deidentifyTemplates resource."""
_NAME = u'projects_locations_deidentifyTemplates'
def __init__(self, client):
super(DlpV2.ProjectsLocationsDeidentifyTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a DeidentifyTemplate for re-using frequently used configuration.
for de-identifying content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsLocationsDeidentifyTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/deidentifyTemplates',
http_method=u'POST',
method_id=u'dlp.projects.locations.deidentifyTemplates.create',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/deidentifyTemplates',
request_field=u'googlePrivacyDlpV2CreateDeidentifyTemplateRequest',
request_type_name=u'DlpProjectsLocationsDeidentifyTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsLocationsDeidentifyTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.locations.deidentifyTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsDeidentifyTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsLocationsDeidentifyTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'GET',
method_id=u'dlp.projects.locations.deidentifyTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsDeidentifyTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DeidentifyTemplates.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsLocationsDeidentifyTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDeidentifyTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/deidentifyTemplates',
http_method=u'GET',
method_id=u'dlp.projects.locations.deidentifyTemplates.list',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/locations/{locationId}/deidentifyTemplates',
request_field='',
request_type_name=u'DlpProjectsLocationsDeidentifyTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDeidentifyTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsLocationsDeidentifyTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.locations.deidentifyTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateDeidentifyTemplateRequest',
request_type_name=u'DlpProjectsLocationsDeidentifyTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
class ProjectsLocationsDlpJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_dlpJobs resource."""
_NAME = u'projects_locations_dlpJobs'
def __init__(self, client):
super(DlpV2.ProjectsLocationsDlpJobsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running DlpJob. The server.
makes a best effort to cancel the DlpJob, but success is not
guaranteed.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsLocationsDlpJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/dlpJobs/{dlpJobsId}:cancel',
http_method=u'POST',
method_id=u'dlp.projects.locations.dlpJobs.cancel',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:cancel',
request_field=u'googlePrivacyDlpV2CancelDlpJobRequest',
request_type_name=u'DlpProjectsLocationsDlpJobsCancelRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a new job to inspect storage or calculate risk metrics.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsLocationsDlpJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/dlpJobs',
http_method=u'POST',
method_id=u'dlp.projects.locations.dlpJobs.create',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/dlpJobs',
request_field=u'googlePrivacyDlpV2CreateDlpJobRequest',
request_type_name=u'DlpProjectsLocationsDlpJobsCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running DlpJob. This method indicates that the client is.
no longer interested in the DlpJob result. The job will be cancelled if
possible.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsLocationsDlpJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/dlpJobs/{dlpJobsId}',
http_method=u'DELETE',
method_id=u'dlp.projects.locations.dlpJobs.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsDlpJobsDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Finish(self, request, global_params=None):
r"""Finish a running hybrid DlpJob. Triggers the finalization steps and running.
of any enabled actions that have not yet run.
Early access feature is in a pre-release state and might change or have
limited support. For more information, see
https://cloud.google.com/products#product-launch-stages.
Args:
request: (DlpProjectsLocationsDlpJobsFinishRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Finish')
return self._RunMethod(
config, request, global_params=global_params)
Finish.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/dlpJobs/{dlpJobsId}:finish',
http_method=u'POST',
method_id=u'dlp.projects.locations.dlpJobs.finish',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:finish',
request_field=u'googlePrivacyDlpV2FinishDlpJobRequest',
request_type_name=u'DlpProjectsLocationsDlpJobsFinishRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running DlpJob.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsLocationsDlpJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/dlpJobs/{dlpJobsId}',
http_method=u'GET',
method_id=u'dlp.projects.locations.dlpJobs.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsDlpJobsGetRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def HybridInspect(self, request, global_params=None):
r"""Inspect hybrid content and store findings to a job.
To review the findings inspect the job. Inspection will occur
asynchronously.
Early access feature is in a pre-release state and might change or have
limited support. For more information, see
https://cloud.google.com/products#product-launch-stages.
Args:
request: (DlpProjectsLocationsDlpJobsHybridInspectRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2HybridInspectResponse) The response message.
"""
config = self.GetMethodConfig('HybridInspect')
return self._RunMethod(
config, request, global_params=global_params)
HybridInspect.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/dlpJobs/{dlpJobsId}:hybridInspect',
http_method=u'POST',
method_id=u'dlp.projects.locations.dlpJobs.hybridInspect',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:hybridInspect',
request_field=u'googlePrivacyDlpV2HybridInspectDlpJobRequest',
request_type_name=u'DlpProjectsLocationsDlpJobsHybridInspectRequest',
response_type_name=u'GooglePrivacyDlpV2HybridInspectResponse',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DlpJobs that match the specified filter in the request.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsLocationsDlpJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDlpJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/dlpJobs',
http_method=u'GET',
method_id=u'dlp.projects.locations.dlpJobs.list',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[u'filter', u'orderBy', u'pageSize', u'pageToken', u'type'],
relative_path=u'v2/{+parent}/locations/{locationId}/dlpJobs',
request_field='',
request_type_name=u'DlpProjectsLocationsDlpJobsListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDlpJobsResponse',
supports_download=False,
)
class ProjectsLocationsImageService(base_api.BaseApiService):
"""Service class for the projects_locations_image resource."""
_NAME = u'projects_locations_image'
def __init__(self, client):
super(DlpV2.ProjectsLocationsImageService, self).__init__(client)
self._upload_configs = {
}
def Redact(self, request, global_params=None):
r"""Redacts potentially sensitive info from an image.
This method has limits on input size, processing time, and output size.
See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to
learn more.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsLocationsImageRedactRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2RedactImageResponse) The response message.
"""
config = self.GetMethodConfig('Redact')
return self._RunMethod(
config, request, global_params=global_params)
Redact.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/image:redact',
http_method=u'POST',
method_id=u'dlp.projects.locations.image.redact',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/image:redact',
request_field=u'googlePrivacyDlpV2RedactImageRequest',
request_type_name=u'DlpProjectsLocationsImageRedactRequest',
response_type_name=u'GooglePrivacyDlpV2RedactImageResponse',
supports_download=False,
)
class ProjectsLocationsInspectTemplatesService(base_api.BaseApiService):
"""Service class for the projects_locations_inspectTemplates resource."""
_NAME = u'projects_locations_inspectTemplates'
def __init__(self, client):
super(DlpV2.ProjectsLocationsInspectTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an InspectTemplate for re-using frequently used configuration.
for inspecting content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsLocationsInspectTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/inspectTemplates',
http_method=u'POST',
method_id=u'dlp.projects.locations.inspectTemplates.create',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/inspectTemplates',
request_field=u'googlePrivacyDlpV2CreateInspectTemplateRequest',
request_type_name=u'DlpProjectsLocationsInspectTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsLocationsInspectTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.locations.inspectTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsInspectTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsLocationsInspectTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'GET',
method_id=u'dlp.projects.locations.inspectTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsInspectTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists InspectTemplates.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsLocationsInspectTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInspectTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/inspectTemplates',
http_method=u'GET',
method_id=u'dlp.projects.locations.inspectTemplates.list',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/locations/{locationId}/inspectTemplates',
request_field='',
request_type_name=u'DlpProjectsLocationsInspectTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInspectTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsLocationsInspectTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.locations.inspectTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateInspectTemplateRequest',
request_type_name=u'DlpProjectsLocationsInspectTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
class ProjectsLocationsJobTriggersService(base_api.BaseApiService):
"""Service class for the projects_locations_jobTriggers resource."""
_NAME = u'projects_locations_jobTriggers'
def __init__(self, client):
super(DlpV2.ProjectsLocationsJobTriggersService, self).__init__(client)
self._upload_configs = {
}
def Activate(self, request, global_params=None):
r"""Activate a job trigger. Causes the immediate execute of a trigger.
instead of waiting on the trigger event to occur.
Args:
request: (DlpProjectsLocationsJobTriggersActivateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Activate')
return self._RunMethod(
config, request, global_params=global_params)
Activate.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/jobTriggers/{jobTriggersId}:activate',
http_method=u'POST',
method_id=u'dlp.projects.locations.jobTriggers.activate',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:activate',
request_field=u'googlePrivacyDlpV2ActivateJobTriggerRequest',
request_type_name=u'DlpProjectsLocationsJobTriggersActivateRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a job trigger to run DLP actions such as scanning storage for.
sensitive information on a set schedule.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsLocationsJobTriggersCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/jobTriggers',
http_method=u'POST',
method_id=u'dlp.projects.locations.jobTriggers.create',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/jobTriggers',
request_field=u'googlePrivacyDlpV2CreateJobTriggerRequest',
request_type_name=u'DlpProjectsLocationsJobTriggersCreateRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsLocationsJobTriggersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/jobTriggers/{jobTriggersId}',
http_method=u'DELETE',
method_id=u'dlp.projects.locations.jobTriggers.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsJobTriggersDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsLocationsJobTriggersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/jobTriggers/{jobTriggersId}',
http_method=u'GET',
method_id=u'dlp.projects.locations.jobTriggers.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsJobTriggersGetRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
def HybridInspect(self, request, global_params=None):
r"""Inspect hybrid content and store findings to a trigger. The inspection.
will be processed asynchronously. To review the findings monitor the
jobs within the trigger.
Early access feature is in a pre-release state and might change or have
limited support. For more information, see
https://cloud.google.com/products#product-launch-stages.
Args:
request: (DlpProjectsLocationsJobTriggersHybridInspectRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2HybridInspectResponse) The response message.
"""
config = self.GetMethodConfig('HybridInspect')
return self._RunMethod(
config, request, global_params=global_params)
HybridInspect.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/jobTriggers/{jobTriggersId}:hybridInspect',
http_method=u'POST',
method_id=u'dlp.projects.locations.jobTriggers.hybridInspect',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:hybridInspect',
request_field=u'googlePrivacyDlpV2HybridInspectJobTriggerRequest',
request_type_name=u'DlpProjectsLocationsJobTriggersHybridInspectRequest',
response_type_name=u'GooglePrivacyDlpV2HybridInspectResponse',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists job triggers.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsLocationsJobTriggersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListJobTriggersResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/jobTriggers',
http_method=u'GET',
method_id=u'dlp.projects.locations.jobTriggers.list',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[u'filter', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/locations/{locationId}/jobTriggers',
request_field='',
request_type_name=u'DlpProjectsLocationsJobTriggersListRequest',
response_type_name=u'GooglePrivacyDlpV2ListJobTriggersResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsLocationsJobTriggersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/jobTriggers/{jobTriggersId}',
http_method=u'PATCH',
method_id=u'dlp.projects.locations.jobTriggers.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateJobTriggerRequest',
request_type_name=u'DlpProjectsLocationsJobTriggersPatchRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
class ProjectsLocationsStoredInfoTypesService(base_api.BaseApiService):
"""Service class for the projects_locations_storedInfoTypes resource."""
_NAME = u'projects_locations_storedInfoTypes'
def __init__(self, client):
super(DlpV2.ProjectsLocationsStoredInfoTypesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a pre-built stored infoType to be used for inspection.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsLocationsStoredInfoTypesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/storedInfoTypes',
http_method=u'POST',
method_id=u'dlp.projects.locations.storedInfoTypes.create',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{locationId}/storedInfoTypes',
request_field=u'googlePrivacyDlpV2CreateStoredInfoTypeRequest',
request_type_name=u'DlpProjectsLocationsStoredInfoTypesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsLocationsStoredInfoTypesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.locations.storedInfoTypes.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsStoredInfoTypesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsLocationsStoredInfoTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'GET',
method_id=u'dlp.projects.locations.storedInfoTypes.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsLocationsStoredInfoTypesGetRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists stored infoTypes.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsLocationsStoredInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListStoredInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationId}/storedInfoTypes',
http_method=u'GET',
method_id=u'dlp.projects.locations.storedInfoTypes.list',
ordered_params=[u'parent', u'locationId'],
path_params=[u'locationId', u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/locations/{locationId}/storedInfoTypes',
request_field='',
request_type_name=u'DlpProjectsLocationsStoredInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListStoredInfoTypesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the stored infoType by creating a new version. The existing version.
will continue to be used until the new version is ready.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsLocationsStoredInfoTypesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{locationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.locations.storedInfoTypes.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateStoredInfoTypeRequest',
request_type_name=u'DlpProjectsLocationsStoredInfoTypesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
class ProjectsLocationsService(base_api.BaseApiService):
"""Service class for the projects_locations resource."""
_NAME = u'projects_locations'
def __init__(self, client):
super(DlpV2.ProjectsLocationsService, self).__init__(client)
self._upload_configs = {
}
class ProjectsStoredInfoTypesService(base_api.BaseApiService):
"""Service class for the projects_storedInfoTypes resource."""
_NAME = u'projects_storedInfoTypes'
def __init__(self, client):
super(DlpV2.ProjectsStoredInfoTypesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a pre-built stored infoType to be used for inspection.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes',
http_method=u'POST',
method_id=u'dlp.projects.storedInfoTypes.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/storedInfoTypes',
request_field=u'googlePrivacyDlpV2CreateStoredInfoTypeRequest',
request_type_name=u'DlpProjectsStoredInfoTypesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.storedInfoTypes.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsStoredInfoTypesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'GET',
method_id=u'dlp.projects.storedInfoTypes.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsStoredInfoTypesGetRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists stored infoTypes.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListStoredInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes',
http_method=u'GET',
method_id=u'dlp.projects.storedInfoTypes.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'locationId', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/storedInfoTypes',
request_field='',
request_type_name=u'DlpProjectsStoredInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListStoredInfoTypesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the stored infoType by creating a new version. The existing version.
will continue to be used until the new version is ready.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.storedInfoTypes.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateStoredInfoTypeRequest',
request_type_name=u'DlpProjectsStoredInfoTypesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = u'projects'
def __init__(self, client):
super(DlpV2.ProjectsService, self).__init__(client)
self._upload_configs = {
}
| [
"[email protected]"
] | |
1b747ea8a43a236bb90f881c0f94541e88f10226 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /time/great_company/person/think_thing/large_person.py | 404255285e129d19a24dfea618205b314abd8f64 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py |
#! /usr/bin/env python
def different_point_and_important_person(str_arg):
own_life(str_arg)
print('small_government')
def own_life(str_arg):
print(str_arg)
if __name__ == '__main__':
different_point_and_important_person('tell_high_fact_about_next_government')
| [
"[email protected]"
] | |
773f91abb27727fe00fca57a9a0057b794f7b0a2 | 26f6313772161851b3b28b32a4f8d255499b3974 | /Python/HowManyApplesCanYouPutintotheBasket.py | b7e35d3681a27ba6f75f3b9c535237f76e621a1e | [] | no_license | here0009/LeetCode | 693e634a3096d929e5c842c5c5b989fa388e0fcd | f96a2273c6831a8035e1adacfa452f73c599ae16 | refs/heads/master | 2023-06-30T19:07:23.645941 | 2021-07-31T03:38:51 | 2021-07-31T03:38:51 | 266,287,834 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,021 | py | """
You have some apples, where arr[i] is the weight of the i-th apple. You also have a basket that can carry up to 5000 units of weight.
Return the maximum number of apples you can put in the basket.
Example 1:
Input: arr = [100,200,150,1000]
Output: 4
Explanation: All 4 apples can be carried by the basket since their sum of weights is 1450.
Example 2:
Input: arr = [900,950,800,1000,700,800]
Output: 5
Explanation: The sum of weights of the 6 apples exceeds 5000 so we choose any 5 of them.
Constraints:
1 <= arr.length <= 10^3
1 <= arr[i] <= 10^3
"""
class Solution:
def maxNumberOfApples(self, arr):
capacity = 5000
arr = sorted(arr)
counts = 0
weights = 0
for n in arr:
weights += n
if weights > capacity:
break
else:
counts += 1
return counts
s = Solution()
arr = [100,200,150,1000]
print(s.maxNumberOfApples(arr))
arr = [900,950,800,1000,700,800]
print(s.maxNumberOfApples(arr)) | [
"[email protected]"
] | |
08cd8c874f133dbc2800ce6ab8f767de9c81419b | e22e03d9761f5c6d581b5af2e77343e8ee4b201d | /edk2/BaseTools/Source/Python/CommonDataClass/DataClass.py | 32196c5e672752fcf319d543203b6ceeae23fc0b | [
"OpenSSL",
"BSD-2-Clause"
] | permissive | SamuelTulach/SecureFakePkg | 759975fcc84d62f05ac577da48353752e5334878 | f34080a6c0efb6ca3dd755365778d0bcdca6b991 | refs/heads/main | 2023-08-17T07:51:22.175924 | 2021-10-01T10:46:14 | 2021-10-01T10:46:14 | 410,938,306 | 94 | 14 | null | null | null | null | UTF-8 | Python | false | false | 16,686 | py | ## @file
# This file is used to define class for data structure used in ECC
#
# Copyright (c) 2008 - 2014, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
##
# Import Modules
#
import Common.EdkLogger as EdkLogger
##
# Static values for data models
#
MODEL_UNKNOWN = 0
MODEL_FILE_C = 1001
MODEL_FILE_H = 1002
MODEL_FILE_ASM = 1003
MODEL_FILE_INF = 1011
MODEL_FILE_DEC = 1012
MODEL_FILE_DSC = 1013
MODEL_FILE_FDF = 1014
MODEL_FILE_INC = 1015
MODEL_FILE_CIF = 1016
MODEL_FILE_UNI = 1017
MODEL_FILE_OTHERS = 1099
MODEL_IDENTIFIER_FILE_HEADER = 2001
MODEL_IDENTIFIER_FUNCTION_HEADER = 2002
MODEL_IDENTIFIER_COMMENT = 2003
MODEL_IDENTIFIER_PARAMETER = 2004
MODEL_IDENTIFIER_STRUCTURE = 2005
MODEL_IDENTIFIER_VARIABLE = 2006
MODEL_IDENTIFIER_INCLUDE = 2007
MODEL_IDENTIFIER_PREDICATE_EXPRESSION = 2008
MODEL_IDENTIFIER_ENUMERATE = 2009
MODEL_IDENTIFIER_PCD = 2010
MODEL_IDENTIFIER_UNION = 2011
MODEL_IDENTIFIER_MACRO_IFDEF = 2012
MODEL_IDENTIFIER_MACRO_IFNDEF = 2013
MODEL_IDENTIFIER_MACRO_DEFINE = 2014
MODEL_IDENTIFIER_MACRO_ENDIF = 2015
MODEL_IDENTIFIER_MACRO_PROGMA = 2016
MODEL_IDENTIFIER_FUNCTION_CALLING = 2018
MODEL_IDENTIFIER_TYPEDEF = 2017
MODEL_IDENTIFIER_FUNCTION_DECLARATION = 2019
MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION = 2020
MODEL_EFI_PROTOCOL = 3001
MODEL_EFI_PPI = 3002
MODEL_EFI_GUID = 3003
MODEL_EFI_LIBRARY_CLASS = 3004
MODEL_EFI_LIBRARY_INSTANCE = 3005
MODEL_EFI_PCD = 3006
MODEL_EFI_SOURCE_FILE = 3007
MODEL_EFI_BINARY_FILE = 3008
MODEL_EFI_SKU_ID = 3009
MODEL_EFI_INCLUDE = 3010
MODEL_EFI_DEPEX = 3011
MODEL_EFI_DEFAULT_STORES = 3012
MODEL_PCD = 4000
MODEL_PCD_FIXED_AT_BUILD = 4001
MODEL_PCD_PATCHABLE_IN_MODULE = 4002
MODEL_PCD_FEATURE_FLAG = 4003
MODEL_PCD_DYNAMIC_EX = 4004
MODEL_PCD_DYNAMIC_EX_DEFAULT = 4005
MODEL_PCD_DYNAMIC_EX_VPD = 4006
MODEL_PCD_DYNAMIC_EX_HII = 4007
MODEL_PCD_DYNAMIC = 4008
MODEL_PCD_DYNAMIC_DEFAULT = 4009
MODEL_PCD_DYNAMIC_VPD = 4010
MODEL_PCD_DYNAMIC_HII = 4011
MODEL_PCD_TYPE_LIST = [MODEL_PCD_FIXED_AT_BUILD,
MODEL_PCD_PATCHABLE_IN_MODULE,
MODEL_PCD_FEATURE_FLAG,
MODEL_PCD_DYNAMIC_DEFAULT,
MODEL_PCD_DYNAMIC_HII,
MODEL_PCD_DYNAMIC_VPD,
MODEL_PCD_DYNAMIC_EX_DEFAULT,
MODEL_PCD_DYNAMIC_EX_HII,
MODEL_PCD_DYNAMIC_EX_VPD
]
MODEL_META_DATA_HEADER_COMMENT = 5000
MODEL_META_DATA_HEADER = 5001
MODEL_META_DATA_INCLUDE = 5002
MODEL_META_DATA_DEFINE = 5003
MODEL_META_DATA_CONDITIONAL_STATEMENT_IF = 5004
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE = 5005
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF = 5006
MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF = 5007
MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR = 5400
MODEL_META_DATA_BUILD_OPTION = 5008
MODEL_META_DATA_COMPONENT = 5009
MODEL_META_DATA_USER_EXTENSION = 5010
MODEL_META_DATA_PACKAGE = 5011
MODEL_META_DATA_NMAKE = 5012
MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSEIF = 5013
MODEL_META_DATA_CONDITIONAL_STATEMENT_ENDIF = 5014
MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH = 5015
MODEL_META_DATA_COMMENT = 5016
MODEL_META_DATA_GLOBAL_DEFINE = 5017
MODEL_META_DATA_SECTION_HEADER = 5100
MODEL_META_DATA_SUBSECTION_HEADER = 5200
MODEL_META_DATA_TAIL_COMMENT = 5300
MODEL_EXTERNAL_DEPENDENCY = 10000
MODEL_LIST = [('MODEL_UNKNOWN', MODEL_UNKNOWN),
('MODEL_FILE_C', MODEL_FILE_C),
('MODEL_FILE_H', MODEL_FILE_H),
('MODEL_FILE_ASM', MODEL_FILE_ASM),
('MODEL_FILE_INF', MODEL_FILE_INF),
('MODEL_FILE_DEC', MODEL_FILE_DEC),
('MODEL_FILE_DSC', MODEL_FILE_DSC),
('MODEL_FILE_FDF', MODEL_FILE_FDF),
('MODEL_FILE_INC', MODEL_FILE_INC),
('MODEL_FILE_CIF', MODEL_FILE_CIF),
('MODEL_FILE_OTHERS', MODEL_FILE_OTHERS),
('MODEL_IDENTIFIER_FILE_HEADER', MODEL_IDENTIFIER_FILE_HEADER),
('MODEL_IDENTIFIER_FUNCTION_HEADER', MODEL_IDENTIFIER_FUNCTION_HEADER),
('MODEL_IDENTIFIER_COMMENT', MODEL_IDENTIFIER_COMMENT),
('MODEL_IDENTIFIER_PARAMETER', MODEL_IDENTIFIER_PARAMETER),
('MODEL_IDENTIFIER_STRUCTURE', MODEL_IDENTIFIER_STRUCTURE),
('MODEL_IDENTIFIER_VARIABLE', MODEL_IDENTIFIER_VARIABLE),
('MODEL_IDENTIFIER_INCLUDE', MODEL_IDENTIFIER_INCLUDE),
('MODEL_IDENTIFIER_PREDICATE_EXPRESSION', MODEL_IDENTIFIER_PREDICATE_EXPRESSION),
('MODEL_IDENTIFIER_ENUMERATE', MODEL_IDENTIFIER_ENUMERATE),
('MODEL_IDENTIFIER_PCD', MODEL_IDENTIFIER_PCD),
('MODEL_IDENTIFIER_UNION', MODEL_IDENTIFIER_UNION),
('MODEL_IDENTIFIER_MACRO_IFDEF', MODEL_IDENTIFIER_MACRO_IFDEF),
('MODEL_IDENTIFIER_MACRO_IFNDEF', MODEL_IDENTIFIER_MACRO_IFNDEF),
('MODEL_IDENTIFIER_MACRO_DEFINE', MODEL_IDENTIFIER_MACRO_DEFINE),
('MODEL_IDENTIFIER_MACRO_ENDIF', MODEL_IDENTIFIER_MACRO_ENDIF),
('MODEL_IDENTIFIER_MACRO_PROGMA', MODEL_IDENTIFIER_MACRO_PROGMA),
('MODEL_IDENTIFIER_FUNCTION_CALLING', MODEL_IDENTIFIER_FUNCTION_CALLING),
('MODEL_IDENTIFIER_TYPEDEF', MODEL_IDENTIFIER_TYPEDEF),
('MODEL_IDENTIFIER_FUNCTION_DECLARATION', MODEL_IDENTIFIER_FUNCTION_DECLARATION),
('MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION', MODEL_IDENTIFIER_ASSIGNMENT_EXPRESSION),
('MODEL_EFI_PROTOCOL', MODEL_EFI_PROTOCOL),
('MODEL_EFI_PPI', MODEL_EFI_PPI),
('MODEL_EFI_GUID', MODEL_EFI_GUID),
('MODEL_EFI_LIBRARY_CLASS', MODEL_EFI_LIBRARY_CLASS),
('MODEL_EFI_LIBRARY_INSTANCE', MODEL_EFI_LIBRARY_INSTANCE),
('MODEL_EFI_PCD', MODEL_EFI_PCD),
('MODEL_EFI_SKU_ID', MODEL_EFI_SKU_ID),
('MODEL_EFI_INCLUDE', MODEL_EFI_INCLUDE),
('MODEL_EFI_DEPEX', MODEL_EFI_DEPEX),
('MODEL_IDENTIFIER_UNION', MODEL_IDENTIFIER_UNION),
('MODEL_EFI_SOURCE_FILE', MODEL_EFI_SOURCE_FILE),
('MODEL_EFI_BINARY_FILE', MODEL_EFI_BINARY_FILE),
('MODEL_PCD', MODEL_PCD),
('MODEL_PCD_FIXED_AT_BUILD', MODEL_PCD_FIXED_AT_BUILD),
('MODEL_PCD_PATCHABLE_IN_MODULE', MODEL_PCD_PATCHABLE_IN_MODULE),
('MODEL_PCD_FEATURE_FLAG', MODEL_PCD_FEATURE_FLAG),
('MODEL_PCD_DYNAMIC_EX', MODEL_PCD_DYNAMIC_EX),
('MODEL_PCD_DYNAMIC_EX_DEFAULT', MODEL_PCD_DYNAMIC_EX_DEFAULT),
('MODEL_PCD_DYNAMIC_EX_VPD', MODEL_PCD_DYNAMIC_EX_VPD),
('MODEL_PCD_DYNAMIC_EX_HII', MODEL_PCD_DYNAMIC_EX_HII),
('MODEL_PCD_DYNAMIC', MODEL_PCD_DYNAMIC),
('MODEL_PCD_DYNAMIC_DEFAULT', MODEL_PCD_DYNAMIC_DEFAULT),
('MODEL_PCD_DYNAMIC_VPD', MODEL_PCD_DYNAMIC_VPD),
('MODEL_PCD_DYNAMIC_HII', MODEL_PCD_DYNAMIC_HII),
("MODEL_META_DATA_HEADER", MODEL_META_DATA_HEADER),
("MODEL_META_DATA_INCLUDE", MODEL_META_DATA_INCLUDE),
("MODEL_META_DATA_DEFINE", MODEL_META_DATA_DEFINE),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_IF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IF),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE", MODEL_META_DATA_CONDITIONAL_STATEMENT_ELSE),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IFDEF),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF", MODEL_META_DATA_CONDITIONAL_STATEMENT_IFNDEF),
("MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR", MODEL_META_DATA_CONDITIONAL_STATEMENT_ERROR),
("MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH", MODEL_META_DATA_COMPONENT_SOURCE_OVERRIDE_PATH),
("MODEL_META_DATA_BUILD_OPTION", MODEL_META_DATA_BUILD_OPTION),
("MODEL_META_DATA_COMPONENT", MODEL_META_DATA_COMPONENT),
('MODEL_META_DATA_USER_EXTENSION', MODEL_META_DATA_USER_EXTENSION),
('MODEL_META_DATA_PACKAGE', MODEL_META_DATA_PACKAGE),
('MODEL_META_DATA_NMAKE', MODEL_META_DATA_NMAKE),
('MODEL_META_DATA_COMMENT', MODEL_META_DATA_COMMENT)
]
## FunctionClass
#
# This class defines a structure of a function
#
# @param ID: ID of a Function
# @param Header: Header of a Function
# @param Modifier: Modifier of a Function
# @param Name: Name of a Function
# @param ReturnStatement: ReturnStatement of a Function
# @param StartLine: StartLine of a Function
# @param StartColumn: StartColumn of a Function
# @param EndLine: EndLine of a Function
# @param EndColumn: EndColumn of a Function
# @param BodyStartLine: BodyStartLine of a Function Body
# @param BodyStartColumn: BodyStartColumn of a Function Body
# @param BelongsToFile: The Function belongs to which file
# @param IdentifierList: IdentifierList of a File
# @param PcdList: PcdList of a File
#
# @var ID: ID of a Function
# @var Header: Header of a Function
# @var Modifier: Modifier of a Function
# @var Name: Name of a Function
# @var ReturnStatement: ReturnStatement of a Function
# @var StartLine: StartLine of a Function
# @var StartColumn: StartColumn of a Function
# @var EndLine: EndLine of a Function
# @var EndColumn: EndColumn of a Function
# @var BodyStartLine: StartLine of a Function Body
# @var BodyStartColumn: StartColumn of a Function Body
# @var BelongsToFile: The Function belongs to which file
# @var IdentifierList: IdentifierList of a File
# @var PcdList: PcdList of a File
#
class FunctionClass(object):
def __init__(self, ID = -1, Header = '', Modifier = '', Name = '', ReturnStatement = '', \
StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1, \
BodyStartLine = -1, BodyStartColumn = -1, BelongsToFile = -1, \
IdentifierList = [], PcdList = [], \
FunNameStartLine = -1, FunNameStartColumn = -1):
self.ID = ID
self.Header = Header
self.Modifier = Modifier
self.Name = Name
self.ReturnStatement = ReturnStatement
self.StartLine = StartLine
self.StartColumn = StartColumn
self.EndLine = EndLine
self.EndColumn = EndColumn
self.BodyStartLine = BodyStartLine
self.BodyStartColumn = BodyStartColumn
self.BelongsToFile = BelongsToFile
self.FunNameStartLine = FunNameStartLine
self.FunNameStartColumn = FunNameStartColumn
self.IdentifierList = IdentifierList
self.PcdList = PcdList
## IdentifierClass
#
# This class defines a structure of a variable
#
# @param ID: ID of a Identifier
# @param Modifier: Modifier of a Identifier
# @param Type: Type of a Identifier
# @param Name: Name of a Identifier
# @param Value: Value of a Identifier
# @param Model: Model of a Identifier
# @param BelongsToFile: The Identifier belongs to which file
# @param BelongsToFunction: The Identifier belongs to which function
# @param StartLine: StartLine of a Identifier
# @param StartColumn: StartColumn of a Identifier
# @param EndLine: EndLine of a Identifier
# @param EndColumn: EndColumn of a Identifier
#
# @var ID: ID of a Identifier
# @var Modifier: Modifier of a Identifier
# @var Type: Type of a Identifier
# @var Name: Name of a Identifier
# @var Value: Value of a Identifier
# @var Model: Model of a Identifier
# @var BelongsToFile: The Identifier belongs to which file
# @var BelongsToFunction: The Identifier belongs to which function
# @var StartLine: StartLine of a Identifier
# @var StartColumn: StartColumn of a Identifier
# @var EndLine: EndLine of a Identifier
# @var EndColumn: EndColumn of a Identifier
#
class IdentifierClass(object):
def __init__(self, ID = -1, Modifier = '', Type = '', Name = '', Value = '', Model = MODEL_UNKNOWN, \
BelongsToFile = -1, BelongsToFunction = -1, StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1):
self.ID = ID
self.Modifier = Modifier
self.Type = Type
self.Name = Name
self.Value = Value
self.Model = Model
self.BelongsToFile = BelongsToFile
self.BelongsToFunction = BelongsToFunction
self.StartLine = StartLine
self.StartColumn = StartColumn
self.EndLine = EndLine
self.EndColumn = EndColumn
## PcdClass
#
# This class defines a structure of a Pcd
#
# @param ID: ID of a Pcd
# @param CName: CName of a Pcd
# @param TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
# @param Token: Token of a Pcd
# @param DatumType: DatumType of a Pcd
# @param Model: Model of a Pcd
# @param BelongsToFile: The Pcd belongs to which file
# @param BelongsToFunction: The Pcd belongs to which function
# @param StartLine: StartLine of a Pcd
# @param StartColumn: StartColumn of a Pcd
# @param EndLine: EndLine of a Pcd
# @param EndColumn: EndColumn of a Pcd
#
# @var ID: ID of a Pcd
# @var CName: CName of a Pcd
# @var TokenSpaceGuidCName: TokenSpaceGuidCName of a Pcd
# @var Token: Token of a Pcd
# @var DatumType: DatumType of a Pcd
# @var Model: Model of a Pcd
# @var BelongsToFile: The Pcd belongs to which file
# @var BelongsToFunction: The Pcd belongs to which function
# @var StartLine: StartLine of a Pcd
# @var StartColumn: StartColumn of a Pcd
# @var EndLine: EndLine of a Pcd
# @var EndColumn: EndColumn of a Pcd
#
class PcdDataClass(object):
def __init__(self, ID = -1, CName = '', TokenSpaceGuidCName = '', Token = '', DatumType = '', Model = MODEL_UNKNOWN, \
BelongsToFile = -1, BelongsToFunction = -1, StartLine = -1, StartColumn = -1, EndLine = -1, EndColumn = -1):
self.ID = ID
self.CName = CName
self.TokenSpaceGuidCName = TokenSpaceGuidCName
self.Token = Token
self.DatumType = DatumType
self.BelongsToFile = BelongsToFile
self.BelongsToFunction = BelongsToFunction
self.StartLine = StartLine
self.StartColumn = StartColumn
self.EndLine = EndLine
self.EndColumn = EndColumn
## FileClass
#
# This class defines a structure of a file
#
# @param ID: ID of a File
# @param Name: Name of a File
# @param ExtName: ExtName of a File
# @param Path: Path of a File
# @param FullPath: FullPath of a File
# @param Model: Model of a File
# @param TimeStamp: TimeStamp of a File
# @param FunctionList: FunctionList of a File
# @param IdentifierList: IdentifierList of a File
# @param PcdList: PcdList of a File
#
# @var ID: ID of a File
# @var Name: Name of a File
# @var ExtName: ExtName of a File
# @var Path: Path of a File
# @var FullPath: FullPath of a File
# @var Model: Model of a File
# @var TimeStamp: TimeStamp of a File
# @var FunctionList: FunctionList of a File
# @var IdentifierList: IdentifierList of a File
# @var PcdList: PcdList of a File
#
class FileClass(object):
def __init__(self, ID = -1, Name = '', ExtName = '', Path = '', FullPath = '', Model = MODEL_UNKNOWN, TimeStamp = '', \
FunctionList = [], IdentifierList = [], PcdList = []):
self.ID = ID
self.Name = Name
self.ExtName = ExtName
self.Path = Path
self.FullPath = FullPath
self.Model = Model
self.TimeStamp = TimeStamp
self.FunctionList = FunctionList
self.IdentifierList = IdentifierList
self.PcdList = PcdList
| [
"[email protected]"
] | |
87ba885929017189ab742b8e8999ce8d820bb5f2 | 987ead1eb0877b9bdea16f3ee50bf19d5fe204bd | /matplotlib/fig_axes_customize_simple.py | 86517664a5dd13b23da4617281156274fa684c85 | [] | no_license | ZHX1996/project | da62151e32254848a02292a2f9bdb1db17850d67 | 5a57be55cf173dde7e5a135a9cf1cfbc9a63a158 | refs/heads/master | 2021-07-15T11:36:02.412231 | 2020-05-15T08:51:34 | 2020-05-15T08:51:34 | 94,512,901 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 489 | py | import numpy as np
import matplotlib.pyplot as plt
fig = plt.figure()
rect = fig.patch
rect.set_facecolor('lightgoldenrodyellow')
ax1 = fig.add_axes([0.1, 0.3, 0.4, 0.4])
rect = ax1.patch
rect.set_facecolor('lightslategray')
for label in ax1.xaxis.get_ticklabels():
label.set_color('red')
label.set_rotation(45)
label.set_fontsize(16)
for line in ax1.yaxis.get_ticklines():
line.set_color('green')
line.set_markersize(25)
line.set_markeredgewidth(3)
plt.show() | [
"[email protected]"
] | |
e6979701b78027a1810a73e1e1afa3b9d8e5a65b | d6a209a45bb14846e47b07a77641de26e073e9fb | /drill14.py | a5fe48baad84fb34879066dbc046251dce45ee47 | [] | no_license | fan-bingbing/pyexcercise | ca711212af0f5df07a57d253190f63cf4a0dd887 | ddb32bfae1891cda9f0ef0effd43a95a95e1d043 | refs/heads/master | 2021-03-13T19:49:24.042399 | 2020-03-17T04:24:21 | 2020-03-17T04:24:21 | 246,705,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 612 | py | from sys import argv
script, user_name = argv
# don't forget how to run this kind of file, pass argv in command line.
prompt = '>'
# a handy way of changing prompt icon
print(f"Hi {user_name}, I'm the {script} script.")
print("I'd like to ask you a few questions.")
print(f"Do you like me {user_name}?")
likes = input(prompt)
print(f"where do you live {user_name}?")
lives = input(prompt)
print("What kind of computer do you have?")
computer = input(prompt)
print(f"""
Alright, so you said {likes} about liking me.
You live in {lives}. Not sure where that is.
And you have a {computer} computer. Nice.
""")
| [
"[email protected]"
] | |
c2bb5f0ed30a64641673adc9923cb1f29d84b06d | c30906c50ea0fbcccbf080b89eca84edb9f04673 | /DaVinci_scripts/MC/twoBody/KK/job_2016_down.py | 766fe4d29a71ab572fa7135bf3f63551a126e3ce | [] | no_license | hpullen/DPhil_B02DKstar_analysis | 543661c4c2e978fb7f60a1d81f27bc660710994d | 651b3f333d3959e78512fc294afa334e3ea26fd9 | refs/heads/master | 2023-07-15T17:38:53.009366 | 2021-08-25T19:40:42 | 2021-08-25T19:40:42 | 107,555,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | j = Job(name='KK_2016_down',
backend=Dirac(),
application=DaVinci(version = "v41r3"),
splitter=SplitByFiles(filesPerJob = 10, maxFiles = -1)
)
j.application.optsfile = '2016_KK.py'
BK = BKQuery(path=('//MC/2016/Beam6500GeV-2016-MagDown-Nu1.6-25ns-Pythia8/Sim09b/'
'Trig0x6138160F/Reco16/Turbo03/Stripping26NoPrescalingFlagged/'
'11164021/ALLSTREAMS.MDST'))
j.inputdata = BK.getDataset()
j.comment = '2016 MC KK down'
j.submit()
| [
"[email protected]"
] | |
8a0ce920566673a8897bd95bdffab20b9ca62d2e | 5496b9682dec06925f3572e64d7f1eb48d78ebe1 | /src/visualization/FragmentationKaandorpPartial/FragmentationKaandorpPartial_timeseries.py | e0c5e0fe4cbbd37de19ad52a5dd362f293e1991e | [] | no_license | VictorOnink/Lagrangian-Transport-Scenarios | 64bec8b992e2909a05b0258524dbae25f967ea29 | 586bcecc42d6a7f4f299507da8f0cb29c8d71a2e | refs/heads/master | 2023-04-14T12:22:29.309172 | 2022-07-11T18:46:38 | 2022-07-11T18:46:38 | 297,894,637 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,986 | py | import settings
import utils
import visualization.visualization_utils as vUtils
import matplotlib.pyplot as plt
import string
from datetime import datetime, timedelta
class FragmentationKaandorpPartial_timeseries:
def __init__(self, scenario, figure_direc, shore_time, lambda_frag, rho, simulation_length, weight,
input='LebretonDivision'):
# Simulation parameters
self.scenario = scenario
self.shore_time = shore_time
self.lambda_frag = lambda_frag
self.rho = rho
self.simulation_length = simulation_length
self.class_num = settings.SIZE_CLASS_NUMBER
self.weight = weight
self.input = input
# Data parameters
self.output_direc = figure_direc + 'timeseries/'
self.data_direc = settings.DATA_OUTPUT_DIREC + 'timeseries/FragmentationKaandorpPartial/'
utils.check_direc_exist(self.output_direc)
self.prefix = 'timeseries'
self.beach_state_list = ['beach', 'adrift', 'total']
# Figure parameters
self.figure_size = (20, 14)
self.figure_shape = (self.beach_state_list.__len__(), 1)
self.ax_label_size = 16
self.ax_ticklabel_size = 14
self.legend_size = 14
self.y_label = 'Mass'
self.x_label = 'Time'
self.xmin, self.xmax = datetime(settings.STARTYEAR, 1, 1), \
datetime(settings.STARTYEAR + self.simulation_length, 1, 1)
self.ymin, self.ymax = 1e-3, 6500
self.ax_range = self.xmax, self.xmin, self.ymax, self.ymin
self.number_of_plots = self.beach_state_list.__len__()
def plot(self):
# Loading data
timeseries_dict = {}
for size_class in range(self.class_num):
timeseries_dict[size_class] = {}
data_dict = vUtils.FragmentationKaandorpPartial_load_data(scenario=self.scenario, prefix=self.prefix,
data_direc=self.data_direc,
shore_time=self.shore_time,
lambda_frag=self.lambda_frag, rho=self.rho,
postprocess=True, input=self.input)
for beach_state in self.beach_state_list:
timeseries_dict[size_class][beach_state] = data_dict[beach_state][size_class][self.weight]
# creating a time axis
time_list = []
for time in data_dict['time']:
time_list.append(datetime(settings.STARTYEAR, 1, 1, 12) + timedelta(seconds=time))
# Creating figure
ax = vUtils.base_figure(fig_size=self.figure_size, ax_range=self.ax_range, y_label=self.y_label,
x_label=self.x_label, ax_label_size=self.ax_label_size,
ax_ticklabel_size=self.ax_ticklabel_size, shape=self.figure_shape,
plot_num=self.number_of_plots, legend_axis=True, log_yscale=True, x_time_axis=True,
width_ratios=[1, 0.3], all_x_labels=True)
# Setting the subfigure titles
for ax_index in range(self.number_of_plots):
ax[ax_index].set_title(subfigure_title(ax_index, self.beach_state_list[ax_index]),
fontsize=self.ax_label_size)
# Creating a legend
size_colors = [plt.plot([], [], c=vUtils.discrete_color_from_cmap(sc, subdivisions=self.class_num),
label=size_label(sc), linestyle='-')[0] for sc in range(self.class_num)]
ax[-1].legend(handles=size_colors, fontsize=self.legend_size, loc='upper right')
# Plotting the various fractions
for size_class in range(self.class_num):
for beach_index, beach_state in enumerate(self.beach_state_list):
color_size = vUtils.discrete_color_from_cmap(size_class, subdivisions=self.class_num)
ax[beach_index].plot(time_list, timeseries_dict[size_class][beach_state], linestyle='-',
c=color_size)
# Saving the figure
plt.savefig(file_name(self.input, self.output_direc, self.shore_time, self.lambda_frag), bbox_inches='tight')
def file_name(output_direc, shore_time, lambda_frag, input):
str_format = input, shore_time, lambda_frag
return output_direc + 'FragmentationKaandorpPartial_beach_state_timeseries-{}_ST={}_lamf={}.png'.format(*str_format)
def subfigure_title(index, beach_state):
"""
setting the title of the subfigure
:param index:
:return:
"""
alphabet = string.ascii_lowercase
return '({}) {}'.format(alphabet[index], beach_state)
def size_label(size_class):
particle_size = settings.INIT_SIZE * settings.P_FRAG ** size_class
return 'Size class {}, d = {:.2f} mm'.format(size_class, particle_size * 1e3) | [
"[email protected]"
] | |
88bda27ed804dfe8b840c9d8fa91efb4676af7f7 | bd172241a0c0f2817ecd348593f3b4076f233e48 | /train_lorenz_gan.py | 3d5c2af1378cce7e00c3f20405ceeabbe1131ee4 | [] | no_license | josteinstraume/lorenz_gan | 2ccb1f6c5413fc00e4fdb5ceaf278e46eee122bc | 4b7495c00e28574d4cf5faeee0d93d7f0582a4b1 | refs/heads/master | 2022-04-01T20:32:25.970642 | 2020-02-12T05:16:30 | 2020-02-12T05:16:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,158 | py | import tensorflow as tf
tf.compat.v1.disable_eager_execution()
from lorenz_gan.lorenz import run_lorenz96_truth, process_lorenz_data, save_lorenz_output
from lorenz_gan.gan import generator_conv, generator_dense, discriminator_conv, discriminator_dense
from lorenz_gan.gan import predict_stochastic, generator_dense_stoch, discriminator_conv_concrete, generator_dense_auto_stoch
from lorenz_gan.gan import train_gan, initialize_gan, normalize_data, generator_conv_concrete, unnormalize_data
from lorenz_gan.submodels import AR1RandomUpdater, SubModelHist, SubModelPoly, SubModelPolyAdd, SubModelANNRes, SubModelANN
import xarray as xr
import tensorflow.compat.v1.keras.backend as K
from tensorflow.keras.optimizers import Adam
import numpy as np
import pickle
import pandas as pd
import yaml
import argparse
from os.path import exists, join
from os import mkdir
def main():
"""
This script runs the Lorenz '96 model and then trains a generative adversarial network
to parameterize the unresolved Y values. The script requires a config file as input.
The config file is formatted in the yaml format with the following information included.
lorenz: # The Lorenz model subsection
K: 8 # number of X variables
J: 32 # number of Y variables per X variable
h: 1 # coupling constant
b: 10 # spatial-scale ratio
c: 10 # time scale ratio
F: 30 # forcing term
time_step: 0.001 # time step of Lorenz truth model in MTU
num_steps: 1000000 # number of integration time steps
skip: 5 # number of steps to skip when saving out the model
burn_in: 2000 # number of steps to remove from the beginning of the integration
gan: # The GAN subsection
structure: conv # type of GAN neural network, options are conv or dense
t_skip: 10 # number of time steps to skip when saving data for training
x_skip: 1 # number of X variables to skip
output: sample # Train the neural network to output a "sample" of Ys or the "mean" of the Ys
generator:
num_cond_inputs: 3 # number of conditional X values
num_random_inputs: 13 # number of random values
num_outputs: 32 # number of output variables (should match J)
activation: relu # activation function
min_conv_filters: 32 # number of convolution filters in the last layer of the generator
min_data_width: 4 # width of the data array after the dense layer in the generator
filter_width: 4 # Size of the convolution filters
discriminator:
num_cond_inputs: 3 # number of conditional X values
num_sample_inputs: 32 # number of Y values
activation: relu # Activation function
min_conv_filters: 32 # number of convolution filters in the first layer of the discriminator
min_data_width: 4 # width of the data array before the dense layer in the discriminator
filter_width: 4 # width of the convolution filters
gan_path: ./exp # path where GAN files are saved
batch_size: 64 # Number of examples per training batch
gan_index: 0 # GAN configuration number
loss: binary_crossentropy # Loss function for the GAN
num_epochs: [1, 5, 10] # Epochs after which the GAN model is saved
metrics: ["accuracy"] # Metrics to calculate along with the loss
output_nc_file: ./exp/lorenz_output.nc # Where Lorenz 96 data is output
output_csv_file: ./exp/lorenz_combined_output.csv # Where flat file formatted data is saved
Returns:
"""
parser = argparse.ArgumentParser()
parser.add_argument("config", default="lorenz.yaml", help="Config yaml file")
parser.add_argument("-r", "--reload", action="store_true", default=False, help="Reload netCDF and csv files")
parser.add_argument("-g", "--gan", action="store_true", default=False, help="Train GAN")
args = parser.parse_args()
config_file = args.config
with open(config_file) as config_obj:
config = yaml.load(config_obj)
if not exists(config["gan"]["gan_path"]):
mkdir(config["gan"]["gan_path"])
u_scale = config["lorenz"]["h"] * config["lorenz"]["c"] / config["lorenz"]["b"]
saved_steps = (config["lorenz"]["num_steps"] - config["lorenz"]["burn_in"]) // config["lorenz"]["skip"]
split_step = int(config["lorenz"]["train_test_split"] * saved_steps)
#val_split_step = int(config["lorenz"]["val_split"] * saved_steps)
if args.reload:
print("Reloading csv data")
combined_data = pd.read_csv(config["output_csv_file"])
lorenz_output = xr.open_dataset(config["output_nc_file"])
X_out = lorenz_output["lorenz_x"].values
else:
X_out, Y_out, times, steps = generate_lorenz_data(config["lorenz"])
print(X_out.shape, Y_out.shape, saved_steps, split_step)
combined_data = process_lorenz_data(X_out[:split_step], times[:split_step],
steps[:split_step],
config["lorenz"]["J"], config["lorenz"]["F"],
config["lorenz"]["time_step"] * config["lorenz"]["skip"],
config["gan"]["x_skip"],
config["gan"]["t_skip"], u_scale)
combined_test_data = process_lorenz_data(X_out[split_step:], times[split_step:],
steps[split_step:],
config["lorenz"]["J"], config["lorenz"]["F"],
config["lorenz"]["time_step"] * config["lorenz"]["skip"],
config["gan"]["x_skip"],
config["gan"]["t_skip"], u_scale)
save_lorenz_output(X_out, Y_out, times, steps, config["lorenz"], config["output_nc_file"])
combined_data.to_csv(config["output_csv_file"], index=False)
combined_test_data.to_csv(str(config["output_csv_file"]).replace(".csv", "_test.csv"))
train_random_updater(X_out[:, 1], config["random_updater"]["out_file"])
u_vals = combined_data["u_scale"] * combined_data["Ux_t+1"]
train_histogram(combined_data["X_t"].values,
u_vals, **config["histogram"])
train_poly(combined_data["X_t"].values, u_vals, **config["poly"])
x_time_series = X_out[:split_step-1, 0:1]
u_time_series = (-X_out[:split_step-1, -1] * (X_out[:split_step-1, -2] - X_out[:split_step-1, 1])
- X_out[:split_step-1, 0] + config["lorenz"]["F"]) \
- (X_out[1:split_step, 0] - X_out[:split_step-1, 0]) / config["lorenz"]["time_step"] / config["lorenz"]["skip"]
#x_val_time_series = X_out[split_step:val_split_step - 1, 0:1]
#u_val_time_series = (-X_out[split_step:val_split_step - 1, -1] * (X_out[split_step:val_split_step - 1, -2] - X_out[split_step:val_split_step - 1, 1])
# - X_out[split_step:val_split_step - 1, 0] + config["lorenz"]["F"]) \
# - (X_out[split_step + 1:val_split_step, 0] - X_out[split_step:val_split_step - 1, 0]) / config["lorenz"]["time_step"] / \
# config["lorenz"]["skip"]
combined_time_series = pd.DataFrame({"X_t": x_time_series[1:].ravel(), "Ux_t": u_time_series[:-1],
"Ux_t+1": u_time_series[1:]}, columns=["X_t", "Ux_t", "Ux_t+1"])
print(u_time_series.min(), u_time_series.max(), u_time_series.mean())
combined_time_series.to_csv(config["output_csv_file"].replace(".csv", "_ts_val.csv"))
if "poly_add" in config.keys():
train_poly_add(x_time_series,
u_time_series,
**config["poly_add"])
if "ann" in config.keys():
train_ann(combined_data["X_t"].values.reshape(-1, 1),
combined_data["Ux_t+1"].values.reshape(-1, 1),
config["ann"])
if "ann_res" in config.keys():
print("X in", x_time_series.min(), x_time_series.max())
print("U out", u_time_series.min(), u_time_series.max())
train_ann_res(x_time_series,
u_time_series,
config["ann_res"])
if args.gan:
train_lorenz_gan(config, combined_data, combined_time_series)
return
def generate_lorenz_data(config):
"""
Run the Lorenz '96 truth model
Args:
config:
Returns:
"""
x = np.zeros(config["K"], dtype=np.float32)
# initialize Y array
y = np.zeros(config["J"] * config["K"], dtype=np.float32)
x[0] = 1
y[0] = 1
skip = config["skip"]
x_out, y_out, times, steps = run_lorenz96_truth(x, y, config["h"], config["F"], config["b"],
config["c"], config["time_step"], config["num_steps"],
config["burn_in"], skip)
return x_out, y_out, times, steps
def train_lorenz_gan(config, combined_data, combined_time_series):
"""
Train GAN on Lorenz data
Args:
config:
combined_data:
Returns:
"""
if "num_procs" in config.keys():
num_procs = config["num_procs"]
else:
num_procs = 1
sess = tf.compat.v1.Session(config=tf.compat.v1.ConfigProto(intra_op_parallelism_threads=num_procs,
inter_op_parallelism_threads=1))
K.set_session(sess)
x_cols = config["gan"]["cond_inputs"]
y_cols = config["gan"]["output_cols"]
X_series = combined_data[x_cols].values
Y_series = combined_data[y_cols].values
X_norm, X_scaling_values = normalize_data(X_series)
if config["gan"]["output"].lower() == "mean":
Y_norm, Y_scaling_values = normalize_data(np.expand_dims(Y_series.mean(axis=1), axis=-1))
else:
Y_norm, Y_scaling_values = normalize_data(Y_series)
X_scaling_values.to_csv(join(config["gan"]["gan_path"],
"gan_X_scaling_values_{0:04d}.csv".format(config["gan"]["gan_index"])),
index_label="Channel")
Y_scaling_values.to_csv(join(config["gan"]["gan_path"],
"gan_Y_scaling_values_{0:04d}.csv".format(config["gan"]["gan_index"])),
index_label="Channel")
trim = X_norm.shape[0] % config["gan"]["batch_size"]
if config["gan"]["structure"] == "dense":
gen_model = generator_dense(**config["gan"]["generator"])
disc_model = discriminator_dense(**config["gan"]["discriminator"])
rand_vec_length = config["gan"]["generator"]["num_random_inputs"]
elif config["gan"]["structure"] == "specified_random":
gen_model = generator_dense_stoch(**config["gan"]["generator"])
disc_model = discriminator_dense(**config["gan"]["discriminator"])
rand_vec_length = config["gan"]["generator"]["num_random_inputs"] + \
2 * config["gan"]["generator"]["num_hidden_neurons"] + \
config["gan"]["generator"]["num_cond_inputs"]
elif config["gan"]["structure"] == "auto_stoch":
gen_model = generator_dense_auto_stoch(**config["gan"]["generator"])
disc_model = discriminator_dense(**config["gan"]["discriminator"])
rand_vec_length = config["gan"]["generator"]["num_random_inputs"] + \
2 * config["gan"]["generator"]["num_hidden_neurons"] + \
config["gan"]["generator"]["num_cond_inputs"]
elif config["gan"]["structure"] == "concrete":
gen_model = generator_conv_concrete(**config["gan"]["generator"])
disc_model = discriminator_conv_concrete(**config["gan"]["discriminator"])
rand_vec_length = config["gan"]["generator"]["num_random_inputs"]
else:
gen_model = generator_conv(**config["gan"]["generator"])
disc_model = discriminator_conv(**config["gan"]["discriminator"])
rand_vec_length = config["gan"]["generator"]["num_random_inputs"]
optimizer = Adam(lr=config["gan"]["learning_rate"], beta_1=0.5, beta_2=0.9)
loss = config["gan"]["loss"]
gen_disc = initialize_gan(gen_model, disc_model, loss, optimizer, config["gan"]["metrics"])
if trim > 0:
Y_norm = Y_norm[:-trim]
X_norm = X_norm[:-trim]
train_gan(np.expand_dims(Y_norm, -1), X_norm, gen_model, disc_model, gen_disc, config["gan"]["batch_size"],
rand_vec_length, config["gan"]["gan_path"],
config["gan"]["gan_index"], config["gan"]["num_epochs"], config["gan"]["metrics"])
gen_pred_func = predict_stochastic(gen_model)
x_ts_norm, _ = normalize_data(combined_time_series[x_cols].values,
scaling_values=X_scaling_values)
gen_ts_pred_norm = gen_pred_func([x_ts_norm,
np.zeros((x_ts_norm.shape[0], rand_vec_length)), 0])[0]
print(gen_ts_pred_norm.shape)
gen_ts_preds = unnormalize_data(gen_ts_pred_norm, scaling_values=Y_scaling_values)
gen_ts_residuals = combined_time_series[y_cols].values.ravel() - gen_ts_preds.ravel()
train_random_updater(gen_ts_residuals,
config["random_updater"]["out_file"].replace(".pkl",
"_{0:04d}.pkl".format(config["gan"]["gan_index"])))
def train_random_updater(data, out_file):
random_updater = AR1RandomUpdater()
random_updater.fit(data)
print("AR1 Corr:", random_updater.corr)
print("AR1 Noise SD:", random_updater.noise_sd)
with open(out_file, "wb") as out_file_obj:
pickle.dump(random_updater, out_file_obj, pickle.HIGHEST_PROTOCOL)
def train_histogram(x_data, u_data, num_x_bins=10, num_u_bins=10, out_file="./histogram.pkl"):
hist_model = SubModelHist(num_x_bins, num_u_bins)
hist_model.fit(x_data, u_data)
with open(out_file, "wb") as out_file_obj:
pickle.dump(hist_model, out_file_obj, pickle.HIGHEST_PROTOCOL)
def train_poly(x_data, u_data, num_terms=3, noise_type="additive", out_file="./poly.pkl"):
poly_model = SubModelPoly(num_terms=num_terms, noise_type=noise_type)
poly_model.fit(x_data, u_data)
with open(out_file, "wb") as out_file_obj:
pickle.dump(poly_model, out_file_obj, pickle.HIGHEST_PROTOCOL)
return
def train_poly_add(x_data, u_data, num_terms=3, out_file="./poly_add.pkl"):
poly_add_model = SubModelPolyAdd(num_terms=num_terms)
poly_add_model.fit(x_data, u_data)
with open(out_file, "wb") as out_file_obj:
pickle.dump(poly_add_model, out_file_obj, pickle.HIGHEST_PROTOCOL)
def train_ann(x_data, u_data, config):
print("ANN Input shapes", x_data.shape, u_data.shape)
ann_model = SubModelANN(**config)
ann_model.fit(x_data, u_data)
ann_model.save_model(config["out_path"])
def train_ann_res(x_data, u_data, config):
ann_res_model = SubModelANNRes(**config)
ann_res_model.fit(x_data, u_data)
ann_res_model.save_model(config["out_path"])
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
e8c85a198d85379bb175f16c9e3150e47000350b | f03e771eb4c1f300ae819179090efc388bcc6d32 | /src/pymine/tile/Tile.py | 3547ab54aab3f2536690989bfb2649112dcd016b | [] | no_license | lacthan28/PyMine | d8d2365b0aabefcb056754260f67095dbcbe62ff | e7d4778f01181d45551c02fa0cef151327fa240a | refs/heads/master | 2021-01-21T19:50:48.417635 | 2017-06-30T05:38:46 | 2017-06-30T05:38:46 | 92,161,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,618 | py | # -*- coding: utf-8 -*-
import inspect
from abc import *
from pymine.event.Timings import Timings
from pymine.level.Level import Level
from pymine.level.Position import Position
from pymine.nbt.tag.CompoundTag import CompoundTag
from spl.stubs.Core import isset, microtime
class Tile(metaclass = ABCMeta, Position):
"""
:param Chunk
"""
BREWING_STAND = "BrewingStand"
CHEST = "Chest"
ENCHANT_TABLE = "EnchantTable"
FLOWER_POT = "FlowerPot"
FURNACE = "Furnace"
ITEM_FRAME = "ItemFrame"
MOB_SPAWNER = "MobSpawner"
SIGN = "Sign"
SKULL = "Skull"
DISPENSER = "Dispenser"
DROPPER = "Dropper"
CAULDRON = "Cauldron"
HOPPER = "Hopper"
BEACON = "Beacon"
ENDER_CHEST = "EnderChest"
titleCount = 1
knownTiles = { }
shortNames = { }
chunk = None
name = None
id = None
attach = None
metadata = None
closed = False
namedtag = None
lastUpdate = None
server = None
timings = None
tickTimer = None
def init(self):
Tile.registerTile(Beacon)
Tile.registerTile(Chest)
Tile.registerTile(EnchantTable)
Tile.registerTile(FlowerPot)
Tile.registerTile(Furnace)
Tile.registerTile(ItemFrame)
Tile.registerTile(Sign)
Tile.registerTile(Skull)
Tile.registerTile(Cauldron)
Tile.registerTile(Hopper)
Tile.registerTile(EnderChest)
@staticmethod
def createTile(type, level: Level, nbt: CompoundTag, *args):
"""
:param str type:
:param Level level:
:param CompoundTag nbt:
:param args:
:rtype: Tile
:return:
"""
if isset(Tile.knownTiles[type]):
cls = Tile.knownTiles[type]
return cls(level, nbt, *args)
return None
@classmethod
def registerTile(cls):
"""
:rtype: bool
:return:
"""
classs = cls()
if isinstance(cls, Tile) and not inspect.isabstract(classs):
Tile.knownTiles[type(classs).__name__] = cls
Tile.shortNames[cls] = type(classs).__name__
return True
return False
def getSaveId(self):
return Tile.shortNames[__class__]
def __init__(self, level: Level, nbt: CompoundTag):
self.timings = Timings.getTileEntityTimings(self)
self.namedtag = nbt
self.server = level.getServer()
self.setLevel(level)
self.chunk = level.getChunk(self.namedtag['x'] >> 4, self.namedtag['z'] >> 4, False)
assert self.chunk is not None
self.name = ""
self.lastUpdate = microtime(True)
self.id = Tile.titleCount + 1
self.x = int(self.namedtag['x'])
self.y = int(self.namedtag['y'])
self.z = int(self.namedtag['z'])
self.chunk.addTile(self)
self.getLevel().addTile(self)
self.tickTimer = Timings.getTileEntityTimings(self)
def getId(self):
return self.id
def saveNBT(self):
self.namedtag.id = StringTag()
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.