body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
1024fabdd769a3c2995a69b114aeae9e160aa2259286f48ec4dd92e47543d768 | def arbitrage_opportunity(self, kask, ask, kbid, bid):
'Calculates arbitrage opportunity for specified bid and ask, and\n presents this opportunity to all observers.\n\n Keyword arguments:\n kask -- market in depths that contains the relevant asks\n ask -- lowest ask (in dict form along with amount)\n kbid -- market in depths that contains the relevant bids\n bid -- highest bid (in dict form along with amount)\n\n '
(profit, volume, buyprice, sellprice, weighted_buyprice, weighted_sellprice) = self.check_opportunity(kask, kbid)
if ((volume == 0) or (buyprice == 0)):
return
perc = ((profit / (volume * buyprice)) * 100)
for observer in self.observers:
observer.opportunity(profit, volume, buyprice, kask, sellprice, kbid, perc, weighted_buyprice, weighted_sellprice) | Calculates arbitrage opportunity for specified bid and ask, and
presents this opportunity to all observers.
Keyword arguments:
kask -- market in depths that contains the relevant asks
ask -- lowest ask (in dict form along with amount)
kbid -- market in depths that contains the relevant bids
bid -- highest bid (in dict form along with amount) | arbitrage/arbitrer.py | arbitrage_opportunity | acontry/altcoin-arbitrage | 7 | python | def arbitrage_opportunity(self, kask, ask, kbid, bid):
'Calculates arbitrage opportunity for specified bid and ask, and\n presents this opportunity to all observers.\n\n Keyword arguments:\n kask -- market in depths that contains the relevant asks\n ask -- lowest ask (in dict form along with amount)\n kbid -- market in depths that contains the relevant bids\n bid -- highest bid (in dict form along with amount)\n\n '
(profit, volume, buyprice, sellprice, weighted_buyprice, weighted_sellprice) = self.check_opportunity(kask, kbid)
if ((volume == 0) or (buyprice == 0)):
return
perc = ((profit / (volume * buyprice)) * 100)
for observer in self.observers:
observer.opportunity(profit, volume, buyprice, kask, sellprice, kbid, perc, weighted_buyprice, weighted_sellprice) | def arbitrage_opportunity(self, kask, ask, kbid, bid):
'Calculates arbitrage opportunity for specified bid and ask, and\n presents this opportunity to all observers.\n\n Keyword arguments:\n kask -- market in depths that contains the relevant asks\n ask -- lowest ask (in dict form along with amount)\n kbid -- market in depths that contains the relevant bids\n bid -- highest bid (in dict form along with amount)\n\n '
(profit, volume, buyprice, sellprice, weighted_buyprice, weighted_sellprice) = self.check_opportunity(kask, kbid)
if ((volume == 0) or (buyprice == 0)):
return
perc = ((profit / (volume * buyprice)) * 100)
for observer in self.observers:
observer.opportunity(profit, volume, buyprice, kask, sellprice, kbid, perc, weighted_buyprice, weighted_sellprice)<|docstring|>Calculates arbitrage opportunity for specified bid and ask, and
presents this opportunity to all observers.
Keyword arguments:
kask -- market in depths that contains the relevant asks
ask -- lowest ask (in dict form along with amount)
kbid -- market in depths that contains the relevant bids
bid -- highest bid (in dict form along with amount)<|endoftext|> |
fcdac7b25a237348cd78a7d9b5c6d73561abbb80ca328081956cea281ddcf15d | def tickers(self):
'Update markets and print tickers to verbose log.'
for (market_name, market) in self.markets.items():
logging.verbose(((('ticker: ' + market.name) + ' - ') + str(market.get_ticker()))) | Update markets and print tickers to verbose log. | arbitrage/arbitrer.py | tickers | acontry/altcoin-arbitrage | 7 | python | def tickers(self):
for (market_name, market) in self.markets.items():
logging.verbose(((('ticker: ' + market.name) + ' - ') + str(market.get_ticker()))) | def tickers(self):
for (market_name, market) in self.markets.items():
logging.verbose(((('ticker: ' + market.name) + ' - ') + str(market.get_ticker())))<|docstring|>Update markets and print tickers to verbose log.<|endoftext|> |
3393551721c0daba841faeef2001d3da9f2ea2a7d9139b78c2ddf0c7f65359a1 | def loop(self):
'Main loop.'
while True:
self.update_depths()
self.tickers()
self.tick()
time.sleep(config.refresh_rate) | Main loop. | arbitrage/arbitrer.py | loop | acontry/altcoin-arbitrage | 7 | python | def loop(self):
while True:
self.update_depths()
self.tickers()
self.tick()
time.sleep(config.refresh_rate) | def loop(self):
while True:
self.update_depths()
self.tickers()
self.tick()
time.sleep(config.refresh_rate)<|docstring|>Main loop.<|endoftext|> |
e7d87cfe6e2f1525cfa2cd24954fd3d797ff7374f2ffc766f903a01f1ffa2562 | def parseCommandLine(parser):
'Parse command line'
parser.add_argument('--remove', '-r', action='store_true', dest='removeFlag', default=False, help='remove all diskimgr configuration files')
args = parser.parse_args()
return args | Parse command line | diskimgr/configure.py | parseCommandLine | bitsgalore/diskimgr | 6 | python | def parseCommandLine(parser):
parser.add_argument('--remove', '-r', action='store_true', dest='removeFlag', default=False, help='remove all diskimgr configuration files')
args = parser.parse_args()
return args | def parseCommandLine(parser):
parser.add_argument('--remove', '-r', action='store_true', dest='removeFlag', default=False, help='remove all diskimgr configuration files')
args = parser.parse_args()
return args<|docstring|>Parse command line<|endoftext|> |
e8f46d09c2e2ad91e64859a206eac5cf592edecc16874fc2e2945b39088ad7c5 | def errorExit(msg):
'Print error to stderr and exit'
msgString = (('ERROR: ' + msg) + '\n')
sys.stderr.write(msgString)
sys.exit(1) | Print error to stderr and exit | diskimgr/configure.py | errorExit | bitsgalore/diskimgr | 6 | python | def errorExit(msg):
msgString = (('ERROR: ' + msg) + '\n')
sys.stderr.write(msgString)
sys.exit(1) | def errorExit(msg):
msgString = (('ERROR: ' + msg) + '\n')
sys.stderr.write(msgString)
sys.exit(1)<|docstring|>Print error to stderr and exit<|endoftext|> |
56c3f7079f23b449849ecd5e7191b8702b96a3541d74767286a9e48a6993b440 | def infoMessage(msg):
'Print message to stderr'
msgString = (('INFO: ' + msg) + '\n')
sys.stderr.write(msgString) | Print message to stderr | diskimgr/configure.py | infoMessage | bitsgalore/diskimgr | 6 | python | def infoMessage(msg):
msgString = (('INFO: ' + msg) + '\n')
sys.stderr.write(msgString) | def infoMessage(msg):
msgString = (('INFO: ' + msg) + '\n')
sys.stderr.write(msgString)<|docstring|>Print message to stderr<|endoftext|> |
43e0d77fadc206079f1b259169d6827605dd97ea062010e9e879a141d5f4e1b2 | def writeConfigFile(configRootDir, removeFlag):
'Create configuration file'
configDir = os.path.join(configRootDir, 'diskimgr')
if (not removeFlag):
if (not os.path.isdir(configDir)):
os.mkdir(configDir)
fConfig = os.path.join(configDir, 'diskimgr.json')
configSettings = {}
configSettings['retries'] = '4'
configSettings['checksumFileName'] = 'checksums.sha512'
configSettings['logFileName'] = 'diskimgr.log'
configSettings['metadataFileName'] = 'metadata.json'
configSettings['blockSize'] = '512'
configSettings['prefix'] = 'disc'
configSettings['extension'] = 'img'
configSettings['rescueDirectDiscMode'] = 'False'
configSettings['autoRetry'] = 'False'
configSettings['timeZone'] = 'Europe/Amsterdam'
configSettings['defaultDir'] = ''
if (not removeFlag):
infoMessage(('writing configuration file ' + fConfig))
with io.open(fConfig, 'w', encoding='utf-8') as f:
json.dump(configSettings, f, indent=4, sort_keys=True)
else:
if os.path.isfile(fConfig):
infoMessage(('removing configuration file ' + fConfig))
os.remove(fConfig)
if os.path.isdir(configDir):
infoMessage(('removing configuration directory ' + configDir))
os.rmdir(configDir) | Create configuration file | diskimgr/configure.py | writeConfigFile | bitsgalore/diskimgr | 6 | python | def writeConfigFile(configRootDir, removeFlag):
configDir = os.path.join(configRootDir, 'diskimgr')
if (not removeFlag):
if (not os.path.isdir(configDir)):
os.mkdir(configDir)
fConfig = os.path.join(configDir, 'diskimgr.json')
configSettings = {}
configSettings['retries'] = '4'
configSettings['checksumFileName'] = 'checksums.sha512'
configSettings['logFileName'] = 'diskimgr.log'
configSettings['metadataFileName'] = 'metadata.json'
configSettings['blockSize'] = '512'
configSettings['prefix'] = 'disc'
configSettings['extension'] = 'img'
configSettings['rescueDirectDiscMode'] = 'False'
configSettings['autoRetry'] = 'False'
configSettings['timeZone'] = 'Europe/Amsterdam'
configSettings['defaultDir'] =
if (not removeFlag):
infoMessage(('writing configuration file ' + fConfig))
with io.open(fConfig, 'w', encoding='utf-8') as f:
json.dump(configSettings, f, indent=4, sort_keys=True)
else:
if os.path.isfile(fConfig):
infoMessage(('removing configuration file ' + fConfig))
os.remove(fConfig)
if os.path.isdir(configDir):
infoMessage(('removing configuration directory ' + configDir))
os.rmdir(configDir) | def writeConfigFile(configRootDir, removeFlag):
configDir = os.path.join(configRootDir, 'diskimgr')
if (not removeFlag):
if (not os.path.isdir(configDir)):
os.mkdir(configDir)
fConfig = os.path.join(configDir, 'diskimgr.json')
configSettings = {}
configSettings['retries'] = '4'
configSettings['checksumFileName'] = 'checksums.sha512'
configSettings['logFileName'] = 'diskimgr.log'
configSettings['metadataFileName'] = 'metadata.json'
configSettings['blockSize'] = '512'
configSettings['prefix'] = 'disc'
configSettings['extension'] = 'img'
configSettings['rescueDirectDiscMode'] = 'False'
configSettings['autoRetry'] = 'False'
configSettings['timeZone'] = 'Europe/Amsterdam'
configSettings['defaultDir'] =
if (not removeFlag):
infoMessage(('writing configuration file ' + fConfig))
with io.open(fConfig, 'w', encoding='utf-8') as f:
json.dump(configSettings, f, indent=4, sort_keys=True)
else:
if os.path.isfile(fConfig):
infoMessage(('removing configuration file ' + fConfig))
os.remove(fConfig)
if os.path.isdir(configDir):
infoMessage(('removing configuration directory ' + configDir))
os.rmdir(configDir)<|docstring|>Create configuration file<|endoftext|> |
0c3b3137a09a340151d47a0337b5b0e1d2bca1a55878a6ee763e50d42b6123ab | def writeDesktopFiles(packageDir, applicationsDir, desktopDir, removeFlag):
'Creates desktop files in /usr/share/applications and on desktop'
sudoUID = os.environ.get('SUDO_UID')
sudoGID = os.environ.get('SUDO_GID')
pathName = os.path.abspath(os.path.dirname(sys.argv[0]))
iconFile = os.path.join(packageDir, 'icons', 'diskimgr.png')
if (not os.path.isfile(iconFile)):
msg = 'cannot find icon file'
errorExit(msg)
fApplications = os.path.join(applicationsDir, 'diskimgr.desktop')
desktopList = []
desktopList.append('[Desktop Entry]')
desktopList.append('Type=Application')
desktopList.append('Encoding=UTF-8')
desktopList.append('Name=diskimgr')
desktopList.append('Comment=Simple optical media imaging and extraction tool')
desktopList.append(('Exec=' + os.path.join(pathName, 'diskimgr')))
desktopList.append(('Icon=' + iconFile))
desktopList.append('Terminal=false')
desktopList.append('Categories=Utility;System;GTK')
if (not removeFlag):
try:
infoMessage(('creating desktop file ' + fApplications))
with io.open(fApplications, 'w', encoding='utf-8') as fA:
for line in desktopList:
fA.write((line + '\n'))
except:
msg = ('Failed to create ' + fApplications)
errorExit(msg)
elif os.path.isfile(fApplications):
infoMessage(('removing desktop file ' + fApplications))
os.remove(fApplications) | Creates desktop files in /usr/share/applications and on desktop | diskimgr/configure.py | writeDesktopFiles | bitsgalore/diskimgr | 6 | python | def writeDesktopFiles(packageDir, applicationsDir, desktopDir, removeFlag):
sudoUID = os.environ.get('SUDO_UID')
sudoGID = os.environ.get('SUDO_GID')
pathName = os.path.abspath(os.path.dirname(sys.argv[0]))
iconFile = os.path.join(packageDir, 'icons', 'diskimgr.png')
if (not os.path.isfile(iconFile)):
msg = 'cannot find icon file'
errorExit(msg)
fApplications = os.path.join(applicationsDir, 'diskimgr.desktop')
desktopList = []
desktopList.append('[Desktop Entry]')
desktopList.append('Type=Application')
desktopList.append('Encoding=UTF-8')
desktopList.append('Name=diskimgr')
desktopList.append('Comment=Simple optical media imaging and extraction tool')
desktopList.append(('Exec=' + os.path.join(pathName, 'diskimgr')))
desktopList.append(('Icon=' + iconFile))
desktopList.append('Terminal=false')
desktopList.append('Categories=Utility;System;GTK')
if (not removeFlag):
try:
infoMessage(('creating desktop file ' + fApplications))
with io.open(fApplications, 'w', encoding='utf-8') as fA:
for line in desktopList:
fA.write((line + '\n'))
except:
msg = ('Failed to create ' + fApplications)
errorExit(msg)
elif os.path.isfile(fApplications):
infoMessage(('removing desktop file ' + fApplications))
os.remove(fApplications) | def writeDesktopFiles(packageDir, applicationsDir, desktopDir, removeFlag):
sudoUID = os.environ.get('SUDO_UID')
sudoGID = os.environ.get('SUDO_GID')
pathName = os.path.abspath(os.path.dirname(sys.argv[0]))
iconFile = os.path.join(packageDir, 'icons', 'diskimgr.png')
if (not os.path.isfile(iconFile)):
msg = 'cannot find icon file'
errorExit(msg)
fApplications = os.path.join(applicationsDir, 'diskimgr.desktop')
desktopList = []
desktopList.append('[Desktop Entry]')
desktopList.append('Type=Application')
desktopList.append('Encoding=UTF-8')
desktopList.append('Name=diskimgr')
desktopList.append('Comment=Simple optical media imaging and extraction tool')
desktopList.append(('Exec=' + os.path.join(pathName, 'diskimgr')))
desktopList.append(('Icon=' + iconFile))
desktopList.append('Terminal=false')
desktopList.append('Categories=Utility;System;GTK')
if (not removeFlag):
try:
infoMessage(('creating desktop file ' + fApplications))
with io.open(fApplications, 'w', encoding='utf-8') as fA:
for line in desktopList:
fA.write((line + '\n'))
except:
msg = ('Failed to create ' + fApplications)
errorExit(msg)
elif os.path.isfile(fApplications):
infoMessage(('removing desktop file ' + fApplications))
os.remove(fApplications)<|docstring|>Creates desktop files in /usr/share/applications and on desktop<|endoftext|> |
8c53525ccdde6480e557df8b299376fb64864d790b30b6ac97e8b02b417007d9 | def main():
'\n Creates the following items:\n - configuration directory diskimgr in ~/.config/ or /etc/\n - configuration file in configuration directory\n - desktop file in ~/.local/share/applications/ or /usr/share/applications\n If the --remove / -r switch is given the above items\n are removed (if they exist)\n '
parser = argparse.ArgumentParser(description='diskimgr configuration tool')
args = parseCommandLine(parser)
removeFlag = args.removeFlag
sudoUser = os.environ.get('SUDO_USER')
packageDir = os.path.dirname(os.path.abspath(__file__))
try:
homeDir = os.path.normpath(('/home/' + sudoUser))
except TypeError:
homeDir = os.path.normpath(os.path.expanduser('~'))
if packageDir.startswith(homeDir):
globalInstall = False
configRootDir = os.path.join(homeDir, '.config/')
applicationsDir = os.path.join(homeDir, '.local/share/applications/')
else:
globalInstall = True
configRootDir = os.path.normpath('/etc/')
applicationsDir = os.path.normpath('/usr/share/applications')
desktopDir = os.path.join(homeDir, 'Desktop/')
if (globalInstall and (sudoUser is None)):
msg = 'this script must be run as root for a global installation'
errorExit(msg)
if (not os.access(configRootDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + configRootDir)
errorExit(msg)
if (not os.access(applicationsDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + applicationsDir)
errorExit(msg)
if (not os.access(desktopDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + desktopDir)
errorExit(msg)
writeConfigFile(configRootDir, removeFlag)
writeDesktopFiles(packageDir, applicationsDir, desktopDir, removeFlag)
infoMessage('diskimgr configuration completed successfully!') | Creates the following items:
- configuration directory diskimgr in ~/.config/ or /etc/
- configuration file in configuration directory
- desktop file in ~/.local/share/applications/ or /usr/share/applications
If the --remove / -r switch is given the above items
are removed (if they exist) | diskimgr/configure.py | main | bitsgalore/diskimgr | 6 | python | def main():
'\n Creates the following items:\n - configuration directory diskimgr in ~/.config/ or /etc/\n - configuration file in configuration directory\n - desktop file in ~/.local/share/applications/ or /usr/share/applications\n If the --remove / -r switch is given the above items\n are removed (if they exist)\n '
parser = argparse.ArgumentParser(description='diskimgr configuration tool')
args = parseCommandLine(parser)
removeFlag = args.removeFlag
sudoUser = os.environ.get('SUDO_USER')
packageDir = os.path.dirname(os.path.abspath(__file__))
try:
homeDir = os.path.normpath(('/home/' + sudoUser))
except TypeError:
homeDir = os.path.normpath(os.path.expanduser('~'))
if packageDir.startswith(homeDir):
globalInstall = False
configRootDir = os.path.join(homeDir, '.config/')
applicationsDir = os.path.join(homeDir, '.local/share/applications/')
else:
globalInstall = True
configRootDir = os.path.normpath('/etc/')
applicationsDir = os.path.normpath('/usr/share/applications')
desktopDir = os.path.join(homeDir, 'Desktop/')
if (globalInstall and (sudoUser is None)):
msg = 'this script must be run as root for a global installation'
errorExit(msg)
if (not os.access(configRootDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + configRootDir)
errorExit(msg)
if (not os.access(applicationsDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + applicationsDir)
errorExit(msg)
if (not os.access(desktopDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + desktopDir)
errorExit(msg)
writeConfigFile(configRootDir, removeFlag)
writeDesktopFiles(packageDir, applicationsDir, desktopDir, removeFlag)
infoMessage('diskimgr configuration completed successfully!') | def main():
'\n Creates the following items:\n - configuration directory diskimgr in ~/.config/ or /etc/\n - configuration file in configuration directory\n - desktop file in ~/.local/share/applications/ or /usr/share/applications\n If the --remove / -r switch is given the above items\n are removed (if they exist)\n '
parser = argparse.ArgumentParser(description='diskimgr configuration tool')
args = parseCommandLine(parser)
removeFlag = args.removeFlag
sudoUser = os.environ.get('SUDO_USER')
packageDir = os.path.dirname(os.path.abspath(__file__))
try:
homeDir = os.path.normpath(('/home/' + sudoUser))
except TypeError:
homeDir = os.path.normpath(os.path.expanduser('~'))
if packageDir.startswith(homeDir):
globalInstall = False
configRootDir = os.path.join(homeDir, '.config/')
applicationsDir = os.path.join(homeDir, '.local/share/applications/')
else:
globalInstall = True
configRootDir = os.path.normpath('/etc/')
applicationsDir = os.path.normpath('/usr/share/applications')
desktopDir = os.path.join(homeDir, 'Desktop/')
if (globalInstall and (sudoUser is None)):
msg = 'this script must be run as root for a global installation'
errorExit(msg)
if (not os.access(configRootDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + configRootDir)
errorExit(msg)
if (not os.access(applicationsDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + applicationsDir)
errorExit(msg)
if (not os.access(desktopDir, (os.W_OK | os.X_OK))):
msg = ('cannot write to ' + desktopDir)
errorExit(msg)
writeConfigFile(configRootDir, removeFlag)
writeDesktopFiles(packageDir, applicationsDir, desktopDir, removeFlag)
infoMessage('diskimgr configuration completed successfully!')<|docstring|>Creates the following items:
- configuration directory diskimgr in ~/.config/ or /etc/
- configuration file in configuration directory
- desktop file in ~/.local/share/applications/ or /usr/share/applications
If the --remove / -r switch is given the above items
are removed (if they exist)<|endoftext|> |
88ad0d71f15a2a8881930991b096aabac8f1eb9b5d77ac8d6e515f0cd746ba33 | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
return self.minCutHelper(s, 0, (len(s) - 1)) | :type s: str
:rtype: int | leetcode.com/python/132_Palindrome_Partitioning_II.py | minCut | its-sushant/coding-interview-gym | 713 | python | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
return self.minCutHelper(s, 0, (len(s) - 1)) | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
return self.minCutHelper(s, 0, (len(s) - 1))<|docstring|>:type s: str
:rtype: int<|endoftext|> |
d627d03482604cd2c91a331b47c07c8b06477d572e4d2ef3f7ab954b03d110eb | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
strLen = len(s)
dpMinCuts = [[(- 1) for _ in range(strLen)] for _ in range(strLen)]
dpIsPalingrome = [[(- 1) for _ in range(strLen)] for _ in range(strLen)]
return self.minCutHelper(s, 0, (strLen - 1), dpMinCuts, dpIsPalingrome) | :type s: str
:rtype: int | leetcode.com/python/132_Palindrome_Partitioning_II.py | minCut | its-sushant/coding-interview-gym | 713 | python | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
strLen = len(s)
dpMinCuts = [[(- 1) for _ in range(strLen)] for _ in range(strLen)]
dpIsPalingrome = [[(- 1) for _ in range(strLen)] for _ in range(strLen)]
return self.minCutHelper(s, 0, (strLen - 1), dpMinCuts, dpIsPalingrome) | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
strLen = len(s)
dpMinCuts = [[(- 1) for _ in range(strLen)] for _ in range(strLen)]
dpIsPalingrome = [[(- 1) for _ in range(strLen)] for _ in range(strLen)]
return self.minCutHelper(s, 0, (strLen - 1), dpMinCuts, dpIsPalingrome)<|docstring|>:type s: str
:rtype: int<|endoftext|> |
4896fb10d6949b832eb7ca86005bf63c95c00b156b2db194ca6aeab39932738e | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
strLen = len(s)
dpIsPalingrome = [[False for _ in range(strLen)] for _ in range(strLen)]
for i in range(strLen):
dpIsPalingrome[i][i] = True
for strIdx in range((strLen - 1), (- 1), (- 1)):
for endIdx in range((strIdx + 1), strLen):
if (s[strIdx] == s[endIdx]):
if (((endIdx - strIdx) == 1) or dpIsPalingrome[(strIdx + 1)][(endIdx - 1)]):
dpIsPalingrome[strIdx][endIdx] = True
dpMinCuts = [0 for _ in range(strLen)]
for strIdx in range((strLen - 1), (- 1), (- 1)):
minCuts = strLen
for endIdx in range((strLen - 1), (strIdx - 1), (- 1)):
if dpIsPalingrome[strIdx][endIdx]:
minCuts = (0 if (endIdx == (strLen - 1)) else min(minCuts, (1 + dpMinCuts[(endIdx + 1)])))
dpMinCuts[strIdx] = minCuts
return dpMinCuts[0] | :type s: str
:rtype: int | leetcode.com/python/132_Palindrome_Partitioning_II.py | minCut | its-sushant/coding-interview-gym | 713 | python | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
strLen = len(s)
dpIsPalingrome = [[False for _ in range(strLen)] for _ in range(strLen)]
for i in range(strLen):
dpIsPalingrome[i][i] = True
for strIdx in range((strLen - 1), (- 1), (- 1)):
for endIdx in range((strIdx + 1), strLen):
if (s[strIdx] == s[endIdx]):
if (((endIdx - strIdx) == 1) or dpIsPalingrome[(strIdx + 1)][(endIdx - 1)]):
dpIsPalingrome[strIdx][endIdx] = True
dpMinCuts = [0 for _ in range(strLen)]
for strIdx in range((strLen - 1), (- 1), (- 1)):
minCuts = strLen
for endIdx in range((strLen - 1), (strIdx - 1), (- 1)):
if dpIsPalingrome[strIdx][endIdx]:
minCuts = (0 if (endIdx == (strLen - 1)) else min(minCuts, (1 + dpMinCuts[(endIdx + 1)])))
dpMinCuts[strIdx] = minCuts
return dpMinCuts[0] | def minCut(self, s):
'\n :type s: str\n :rtype: int\n '
strLen = len(s)
dpIsPalingrome = [[False for _ in range(strLen)] for _ in range(strLen)]
for i in range(strLen):
dpIsPalingrome[i][i] = True
for strIdx in range((strLen - 1), (- 1), (- 1)):
for endIdx in range((strIdx + 1), strLen):
if (s[strIdx] == s[endIdx]):
if (((endIdx - strIdx) == 1) or dpIsPalingrome[(strIdx + 1)][(endIdx - 1)]):
dpIsPalingrome[strIdx][endIdx] = True
dpMinCuts = [0 for _ in range(strLen)]
for strIdx in range((strLen - 1), (- 1), (- 1)):
minCuts = strLen
for endIdx in range((strLen - 1), (strIdx - 1), (- 1)):
if dpIsPalingrome[strIdx][endIdx]:
minCuts = (0 if (endIdx == (strLen - 1)) else min(minCuts, (1 + dpMinCuts[(endIdx + 1)])))
dpMinCuts[strIdx] = minCuts
return dpMinCuts[0]<|docstring|>:type s: str
:rtype: int<|endoftext|> |
ebb4a1428fe96fc782194d848c4d679b692eff05c0972bfe43cb032b1f9c7edd | def train_network(self):
'Train the network'
if ((self.timeStep % 500) == 0):
self.sess.run(self.target_replace_op)
minibatch = random.sample(self.replayMemory, BATCH)
state_batch = [data[0] for data in minibatch]
action_batch = [data[1] for data in minibatch]
reward_batch = [data[2] for data in minibatch]
next_state_batch = [data[3] for data in minibatch]
q_target = []
readout_j1_batch = self.readout_t.eval(feed_dict={self.target_net_input: next_state_batch})
readout_j1_batch_for_action = self.readout_e.eval(feed_dict={self.eval_net_input: next_state_batch})
max_act4next = np.argmax(readout_j1_batch_for_action, axis=1)
selected_q_next = readout_j1_batch[(range(len(max_act4next)), max_act4next)]
for i in range(BATCH):
terminal = minibatch[i][4]
if terminal:
q_target.append(reward_batch[i])
else:
q_target.append((reward_batch[i] + (GAMMA * selected_q_next[i])))
(_, result) = self.sess.run([self.train_step, self.merged], feed_dict={self.q_target: q_target, self.action_input: action_batch, self.eval_net_input: state_batch})
if (((self.timeStep + 1) % 1000) == 0):
self.writer.add_summary(result, global_step=(self.timeStep + 1)) | Train the network | src/double_dqn.py | train_network | yanyongyu/FlappyBird | 14 | python | def train_network(self):
if ((self.timeStep % 500) == 0):
self.sess.run(self.target_replace_op)
minibatch = random.sample(self.replayMemory, BATCH)
state_batch = [data[0] for data in minibatch]
action_batch = [data[1] for data in minibatch]
reward_batch = [data[2] for data in minibatch]
next_state_batch = [data[3] for data in minibatch]
q_target = []
readout_j1_batch = self.readout_t.eval(feed_dict={self.target_net_input: next_state_batch})
readout_j1_batch_for_action = self.readout_e.eval(feed_dict={self.eval_net_input: next_state_batch})
max_act4next = np.argmax(readout_j1_batch_for_action, axis=1)
selected_q_next = readout_j1_batch[(range(len(max_act4next)), max_act4next)]
for i in range(BATCH):
terminal = minibatch[i][4]
if terminal:
q_target.append(reward_batch[i])
else:
q_target.append((reward_batch[i] + (GAMMA * selected_q_next[i])))
(_, result) = self.sess.run([self.train_step, self.merged], feed_dict={self.q_target: q_target, self.action_input: action_batch, self.eval_net_input: state_batch})
if (((self.timeStep + 1) % 1000) == 0):
self.writer.add_summary(result, global_step=(self.timeStep + 1)) | def train_network(self):
if ((self.timeStep % 500) == 0):
self.sess.run(self.target_replace_op)
minibatch = random.sample(self.replayMemory, BATCH)
state_batch = [data[0] for data in minibatch]
action_batch = [data[1] for data in minibatch]
reward_batch = [data[2] for data in minibatch]
next_state_batch = [data[3] for data in minibatch]
q_target = []
readout_j1_batch = self.readout_t.eval(feed_dict={self.target_net_input: next_state_batch})
readout_j1_batch_for_action = self.readout_e.eval(feed_dict={self.eval_net_input: next_state_batch})
max_act4next = np.argmax(readout_j1_batch_for_action, axis=1)
selected_q_next = readout_j1_batch[(range(len(max_act4next)), max_act4next)]
for i in range(BATCH):
terminal = minibatch[i][4]
if terminal:
q_target.append(reward_batch[i])
else:
q_target.append((reward_batch[i] + (GAMMA * selected_q_next[i])))
(_, result) = self.sess.run([self.train_step, self.merged], feed_dict={self.q_target: q_target, self.action_input: action_batch, self.eval_net_input: state_batch})
if (((self.timeStep + 1) % 1000) == 0):
self.writer.add_summary(result, global_step=(self.timeStep + 1))<|docstring|>Train the network<|endoftext|> |
c7aadd5f387e76fc6c3be541b36a5ee7e80fa7e787bf940783c486e81786cf0e | def de_shift_kernel(kernel, shift_x, shift_y, iterations=20):
'\n de-shifts a shifted kernel to the center of a pixel. This is performed iteratively.\n\n The input kernel is the solution of a linear interpolated shift of a sharper kernel centered in the middle of the\n pixel. To find the de-shifted kernel, we perform an iterative correction of proposed de-shifted kernels and compare\n their shifted version with the input kernel.\n\n :param kernel: (shifted) kernel, e.g. a star in an image that is not centered in the pixel grid\n :param shift_x: x-offset relative to the center of the pixel (sub-pixel shift)\n :param shift_y: y-offset relative to the center of the pixel (sub-pixel shift)\n :return: de-shifted kernel such that the interpolated shift boy (shift_x, shift_y) results in the input kernel\n '
(nx, ny) = np.shape(kernel)
kernel_new = (np.zeros(((nx + 2), (ny + 2))) + ((((kernel[(0, 0)] + kernel[(0, (- 1))]) + kernel[((- 1), 0)]) + kernel[((- 1), (- 1))]) / 4.0))
kernel_new[(1:(- 1), 1:(- 1))] = kernel
int_shift_x = int(round(shift_x))
frac_x_shift = (shift_x - int_shift_x)
int_shift_y = int(round(shift_y))
frac_y_shift = (shift_y - int_shift_y)
kernel_init = copy.deepcopy(kernel_new)
kernel_init_shifted = copy.deepcopy(interp.shift(kernel_init, [int_shift_y, int_shift_x], order=1))
kernel_new = interp.shift(kernel_new, [int_shift_y, int_shift_x], order=1)
norm = np.sum(kernel_init_shifted)
for i in range(iterations):
kernel_shifted_inv = interp.shift(kernel_new, [(- frac_y_shift), (- frac_x_shift)], order=1)
delta = (kernel_init_shifted - (kernel_norm(kernel_shifted_inv) * norm))
kernel_new += (delta * 1.0)
kernel_new = (kernel_norm(kernel_new) * norm)
return kernel_new[(1:(- 1), 1:(- 1))] | de-shifts a shifted kernel to the center of a pixel. This is performed iteratively.
The input kernel is the solution of a linear interpolated shift of a sharper kernel centered in the middle of the
pixel. To find the de-shifted kernel, we perform an iterative correction of proposed de-shifted kernels and compare
their shifted version with the input kernel.
:param kernel: (shifted) kernel, e.g. a star in an image that is not centered in the pixel grid
:param shift_x: x-offset relative to the center of the pixel (sub-pixel shift)
:param shift_y: y-offset relative to the center of the pixel (sub-pixel shift)
:return: de-shifted kernel such that the interpolated shift boy (shift_x, shift_y) results in the input kernel | lenstronomy/Util/kernel_util.py | de_shift_kernel | guoxiaowhu/lenstronomy | 1 | python | def de_shift_kernel(kernel, shift_x, shift_y, iterations=20):
'\n de-shifts a shifted kernel to the center of a pixel. This is performed iteratively.\n\n The input kernel is the solution of a linear interpolated shift of a sharper kernel centered in the middle of the\n pixel. To find the de-shifted kernel, we perform an iterative correction of proposed de-shifted kernels and compare\n their shifted version with the input kernel.\n\n :param kernel: (shifted) kernel, e.g. a star in an image that is not centered in the pixel grid\n :param shift_x: x-offset relative to the center of the pixel (sub-pixel shift)\n :param shift_y: y-offset relative to the center of the pixel (sub-pixel shift)\n :return: de-shifted kernel such that the interpolated shift boy (shift_x, shift_y) results in the input kernel\n '
(nx, ny) = np.shape(kernel)
kernel_new = (np.zeros(((nx + 2), (ny + 2))) + ((((kernel[(0, 0)] + kernel[(0, (- 1))]) + kernel[((- 1), 0)]) + kernel[((- 1), (- 1))]) / 4.0))
kernel_new[(1:(- 1), 1:(- 1))] = kernel
int_shift_x = int(round(shift_x))
frac_x_shift = (shift_x - int_shift_x)
int_shift_y = int(round(shift_y))
frac_y_shift = (shift_y - int_shift_y)
kernel_init = copy.deepcopy(kernel_new)
kernel_init_shifted = copy.deepcopy(interp.shift(kernel_init, [int_shift_y, int_shift_x], order=1))
kernel_new = interp.shift(kernel_new, [int_shift_y, int_shift_x], order=1)
norm = np.sum(kernel_init_shifted)
for i in range(iterations):
kernel_shifted_inv = interp.shift(kernel_new, [(- frac_y_shift), (- frac_x_shift)], order=1)
delta = (kernel_init_shifted - (kernel_norm(kernel_shifted_inv) * norm))
kernel_new += (delta * 1.0)
kernel_new = (kernel_norm(kernel_new) * norm)
return kernel_new[(1:(- 1), 1:(- 1))] | def de_shift_kernel(kernel, shift_x, shift_y, iterations=20):
'\n de-shifts a shifted kernel to the center of a pixel. This is performed iteratively.\n\n The input kernel is the solution of a linear interpolated shift of a sharper kernel centered in the middle of the\n pixel. To find the de-shifted kernel, we perform an iterative correction of proposed de-shifted kernels and compare\n their shifted version with the input kernel.\n\n :param kernel: (shifted) kernel, e.g. a star in an image that is not centered in the pixel grid\n :param shift_x: x-offset relative to the center of the pixel (sub-pixel shift)\n :param shift_y: y-offset relative to the center of the pixel (sub-pixel shift)\n :return: de-shifted kernel such that the interpolated shift boy (shift_x, shift_y) results in the input kernel\n '
(nx, ny) = np.shape(kernel)
kernel_new = (np.zeros(((nx + 2), (ny + 2))) + ((((kernel[(0, 0)] + kernel[(0, (- 1))]) + kernel[((- 1), 0)]) + kernel[((- 1), (- 1))]) / 4.0))
kernel_new[(1:(- 1), 1:(- 1))] = kernel
int_shift_x = int(round(shift_x))
frac_x_shift = (shift_x - int_shift_x)
int_shift_y = int(round(shift_y))
frac_y_shift = (shift_y - int_shift_y)
kernel_init = copy.deepcopy(kernel_new)
kernel_init_shifted = copy.deepcopy(interp.shift(kernel_init, [int_shift_y, int_shift_x], order=1))
kernel_new = interp.shift(kernel_new, [int_shift_y, int_shift_x], order=1)
norm = np.sum(kernel_init_shifted)
for i in range(iterations):
kernel_shifted_inv = interp.shift(kernel_new, [(- frac_y_shift), (- frac_x_shift)], order=1)
delta = (kernel_init_shifted - (kernel_norm(kernel_shifted_inv) * norm))
kernel_new += (delta * 1.0)
kernel_new = (kernel_norm(kernel_new) * norm)
return kernel_new[(1:(- 1), 1:(- 1))]<|docstring|>de-shifts a shifted kernel to the center of a pixel. This is performed iteratively.
The input kernel is the solution of a linear interpolated shift of a sharper kernel centered in the middle of the
pixel. To find the de-shifted kernel, we perform an iterative correction of proposed de-shifted kernels and compare
their shifted version with the input kernel.
:param kernel: (shifted) kernel, e.g. a star in an image that is not centered in the pixel grid
:param shift_x: x-offset relative to the center of the pixel (sub-pixel shift)
:param shift_y: y-offset relative to the center of the pixel (sub-pixel shift)
:return: de-shifted kernel such that the interpolated shift boy (shift_x, shift_y) results in the input kernel<|endoftext|> |
eeb6a1fe9a3e00428ed638761096ef75d4a255d60ea0c22eb94300c62a642556 | def center_kernel(kernel, iterations=20):
'\n given a kernel that might not be perfectly centered, this routine computes its light weighted center and then\n moves the center in an iterative process such that it is centered\n\n :param kernel: 2d array (odd numbers)\n :param iterations: int, number of iterations\n :return: centered kernel\n '
kernel = kernel_norm(kernel)
(nx, ny) = np.shape(kernel)
if ((nx % 2) == 0):
raise ValueError('kernel needs odd number of pixels')
(x_grid, y_grid) = util.make_grid(nx, deltapix=1, left_lower=False)
x_w = np.sum((kernel * util.array2image(x_grid)))
y_w = np.sum((kernel * util.array2image(y_grid)))
kernel_centered = de_shift_kernel(kernel, shift_x=(- x_w), shift_y=(- y_w), iterations=iterations)
return kernel_norm(kernel_centered) | given a kernel that might not be perfectly centered, this routine computes its light weighted center and then
moves the center in an iterative process such that it is centered
:param kernel: 2d array (odd numbers)
:param iterations: int, number of iterations
:return: centered kernel | lenstronomy/Util/kernel_util.py | center_kernel | guoxiaowhu/lenstronomy | 1 | python | def center_kernel(kernel, iterations=20):
'\n given a kernel that might not be perfectly centered, this routine computes its light weighted center and then\n moves the center in an iterative process such that it is centered\n\n :param kernel: 2d array (odd numbers)\n :param iterations: int, number of iterations\n :return: centered kernel\n '
kernel = kernel_norm(kernel)
(nx, ny) = np.shape(kernel)
if ((nx % 2) == 0):
raise ValueError('kernel needs odd number of pixels')
(x_grid, y_grid) = util.make_grid(nx, deltapix=1, left_lower=False)
x_w = np.sum((kernel * util.array2image(x_grid)))
y_w = np.sum((kernel * util.array2image(y_grid)))
kernel_centered = de_shift_kernel(kernel, shift_x=(- x_w), shift_y=(- y_w), iterations=iterations)
return kernel_norm(kernel_centered) | def center_kernel(kernel, iterations=20):
'\n given a kernel that might not be perfectly centered, this routine computes its light weighted center and then\n moves the center in an iterative process such that it is centered\n\n :param kernel: 2d array (odd numbers)\n :param iterations: int, number of iterations\n :return: centered kernel\n '
kernel = kernel_norm(kernel)
(nx, ny) = np.shape(kernel)
if ((nx % 2) == 0):
raise ValueError('kernel needs odd number of pixels')
(x_grid, y_grid) = util.make_grid(nx, deltapix=1, left_lower=False)
x_w = np.sum((kernel * util.array2image(x_grid)))
y_w = np.sum((kernel * util.array2image(y_grid)))
kernel_centered = de_shift_kernel(kernel, shift_x=(- x_w), shift_y=(- y_w), iterations=iterations)
return kernel_norm(kernel_centered)<|docstring|>given a kernel that might not be perfectly centered, this routine computes its light weighted center and then
moves the center in an iterative process such that it is centered
:param kernel: 2d array (odd numbers)
:param iterations: int, number of iterations
:return: centered kernel<|endoftext|> |
b953adbe20505399a69f94688022d7446512d0236d7aeb5b074c9c87016a8ba8 | def kernel_norm(kernel):
'\n\n :param kernel:\n :return: normalisation of the psf kernel\n '
norm = np.sum(np.array(kernel))
kernel /= norm
return kernel | :param kernel:
:return: normalisation of the psf kernel | lenstronomy/Util/kernel_util.py | kernel_norm | guoxiaowhu/lenstronomy | 1 | python | def kernel_norm(kernel):
'\n\n :param kernel:\n :return: normalisation of the psf kernel\n '
norm = np.sum(np.array(kernel))
kernel /= norm
return kernel | def kernel_norm(kernel):
'\n\n :param kernel:\n :return: normalisation of the psf kernel\n '
norm = np.sum(np.array(kernel))
kernel /= norm
return kernel<|docstring|>:param kernel:
:return: normalisation of the psf kernel<|endoftext|> |
789fb386ee387bf5f28adcbf17afe838715d86f0e28bc05baa84a06b7a36cecc | def subgrid_kernel(kernel, subgrid_res, odd=False, num_iter=10):
'\n creates a higher resolution kernel with subgrid resolution as an interpolation of the original kernel in an\n iterative approach\n\n :param kernel: initial kernel\n :param subgrid_res: subgrid resolution required\n :return: kernel with higher resolution (larger)\n '
subgrid_res = int(subgrid_res)
if (subgrid_res == 1):
return kernel
(nx, ny) = np.shape(kernel)
d_x = (1.0 / nx)
x_in = np.linspace((d_x / 2), (1 - (d_x / 2)), nx)
d_y = (1.0 / nx)
y_in = np.linspace((d_y / 2), (1 - (d_y / 2)), ny)
nx_new = (nx * subgrid_res)
ny_new = (ny * subgrid_res)
if (odd is True):
if ((nx_new % 2) == 0):
nx_new -= 1
if ((ny_new % 2) == 0):
ny_new -= 1
d_x_new = (1.0 / nx_new)
d_y_new = (1.0 / ny_new)
x_out = np.linspace((d_x_new / 2.0), (1 - (d_x_new / 2.0)), nx_new)
y_out = np.linspace((d_y_new / 2.0), (1 - (d_y_new / 2.0)), ny_new)
kernel_input = copy.deepcopy(kernel)
kernel_subgrid = image_util.re_size_array(x_in, y_in, kernel_input, x_out, y_out)
norm_subgrid = np.sum(kernel_subgrid)
kernel_subgrid = kernel_norm(kernel_subgrid)
for i in range(max(num_iter, 1)):
if ((subgrid_res % 2) == 0):
kernel_pixel = averaging_odd_kernel(kernel_subgrid, subgrid_res)
else:
kernel_pixel = util.averaging(kernel_subgrid, numGrid=nx_new, numPix=nx)
kernel_pixel = kernel_norm(kernel_pixel)
delta = (kernel - kernel_pixel)
delta_subgrid = (image_util.re_size_array(x_in, y_in, delta, x_out, y_out) / norm_subgrid)
kernel_subgrid += delta_subgrid
kernel_subgrid = kernel_norm(kernel_subgrid)
return kernel_subgrid | creates a higher resolution kernel with subgrid resolution as an interpolation of the original kernel in an
iterative approach
:param kernel: initial kernel
:param subgrid_res: subgrid resolution required
:return: kernel with higher resolution (larger) | lenstronomy/Util/kernel_util.py | subgrid_kernel | guoxiaowhu/lenstronomy | 1 | python | def subgrid_kernel(kernel, subgrid_res, odd=False, num_iter=10):
'\n creates a higher resolution kernel with subgrid resolution as an interpolation of the original kernel in an\n iterative approach\n\n :param kernel: initial kernel\n :param subgrid_res: subgrid resolution required\n :return: kernel with higher resolution (larger)\n '
subgrid_res = int(subgrid_res)
if (subgrid_res == 1):
return kernel
(nx, ny) = np.shape(kernel)
d_x = (1.0 / nx)
x_in = np.linspace((d_x / 2), (1 - (d_x / 2)), nx)
d_y = (1.0 / nx)
y_in = np.linspace((d_y / 2), (1 - (d_y / 2)), ny)
nx_new = (nx * subgrid_res)
ny_new = (ny * subgrid_res)
if (odd is True):
if ((nx_new % 2) == 0):
nx_new -= 1
if ((ny_new % 2) == 0):
ny_new -= 1
d_x_new = (1.0 / nx_new)
d_y_new = (1.0 / ny_new)
x_out = np.linspace((d_x_new / 2.0), (1 - (d_x_new / 2.0)), nx_new)
y_out = np.linspace((d_y_new / 2.0), (1 - (d_y_new / 2.0)), ny_new)
kernel_input = copy.deepcopy(kernel)
kernel_subgrid = image_util.re_size_array(x_in, y_in, kernel_input, x_out, y_out)
norm_subgrid = np.sum(kernel_subgrid)
kernel_subgrid = kernel_norm(kernel_subgrid)
for i in range(max(num_iter, 1)):
if ((subgrid_res % 2) == 0):
kernel_pixel = averaging_odd_kernel(kernel_subgrid, subgrid_res)
else:
kernel_pixel = util.averaging(kernel_subgrid, numGrid=nx_new, numPix=nx)
kernel_pixel = kernel_norm(kernel_pixel)
delta = (kernel - kernel_pixel)
delta_subgrid = (image_util.re_size_array(x_in, y_in, delta, x_out, y_out) / norm_subgrid)
kernel_subgrid += delta_subgrid
kernel_subgrid = kernel_norm(kernel_subgrid)
return kernel_subgrid | def subgrid_kernel(kernel, subgrid_res, odd=False, num_iter=10):
'\n creates a higher resolution kernel with subgrid resolution as an interpolation of the original kernel in an\n iterative approach\n\n :param kernel: initial kernel\n :param subgrid_res: subgrid resolution required\n :return: kernel with higher resolution (larger)\n '
subgrid_res = int(subgrid_res)
if (subgrid_res == 1):
return kernel
(nx, ny) = np.shape(kernel)
d_x = (1.0 / nx)
x_in = np.linspace((d_x / 2), (1 - (d_x / 2)), nx)
d_y = (1.0 / nx)
y_in = np.linspace((d_y / 2), (1 - (d_y / 2)), ny)
nx_new = (nx * subgrid_res)
ny_new = (ny * subgrid_res)
if (odd is True):
if ((nx_new % 2) == 0):
nx_new -= 1
if ((ny_new % 2) == 0):
ny_new -= 1
d_x_new = (1.0 / nx_new)
d_y_new = (1.0 / ny_new)
x_out = np.linspace((d_x_new / 2.0), (1 - (d_x_new / 2.0)), nx_new)
y_out = np.linspace((d_y_new / 2.0), (1 - (d_y_new / 2.0)), ny_new)
kernel_input = copy.deepcopy(kernel)
kernel_subgrid = image_util.re_size_array(x_in, y_in, kernel_input, x_out, y_out)
norm_subgrid = np.sum(kernel_subgrid)
kernel_subgrid = kernel_norm(kernel_subgrid)
for i in range(max(num_iter, 1)):
if ((subgrid_res % 2) == 0):
kernel_pixel = averaging_odd_kernel(kernel_subgrid, subgrid_res)
else:
kernel_pixel = util.averaging(kernel_subgrid, numGrid=nx_new, numPix=nx)
kernel_pixel = kernel_norm(kernel_pixel)
delta = (kernel - kernel_pixel)
delta_subgrid = (image_util.re_size_array(x_in, y_in, delta, x_out, y_out) / norm_subgrid)
kernel_subgrid += delta_subgrid
kernel_subgrid = kernel_norm(kernel_subgrid)
return kernel_subgrid<|docstring|>creates a higher resolution kernel with subgrid resolution as an interpolation of the original kernel in an
iterative approach
:param kernel: initial kernel
:param subgrid_res: subgrid resolution required
:return: kernel with higher resolution (larger)<|endoftext|> |
fcd0eac39d508c055b0b375f304730740ea79d0604d13e4593f1f038df0ded68 | def averaging_odd_kernel(kernel_high_res, subgrid_res):
'\n makes a lower resolution kernel based on the kernel_high_res (odd numbers) and the subgrid_res (even number), both\n meant to be centered.\n\n :param kernel_high_res:\n :param subgrid_res:\n :return:\n '
n_high = len(kernel_high_res)
n_low = int(((n_high + 1) / subgrid_res))
kernel_low_res = np.zeros((n_low, n_low))
for i in range((subgrid_res - 1)):
for j in range((subgrid_res - 1)):
kernel_low_res += kernel_high_res[(i::subgrid_res, j::subgrid_res)]
i = (subgrid_res - 1)
for j in range((subgrid_res - 1)):
kernel_low_res[(1:, :)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
kernel_low_res[(:(- 1), :)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
j = (subgrid_res - 1)
for i in range((subgrid_res - 1)):
kernel_low_res[(:, 1:)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
kernel_low_res[(:, :(- 1))] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
i = (subgrid_res - 1)
j = (subgrid_res - 1)
kernel_edge = kernel_high_res[(i::subgrid_res, j::subgrid_res)]
kernel_low_res[(1:, 1:)] += (kernel_edge / 4)
kernel_low_res[(:(- 1), 1:)] += (kernel_edge / 4)
kernel_low_res[(1:, :(- 1))] += (kernel_edge / 4)
kernel_low_res[(:(- 1), :(- 1))] += (kernel_edge / 4)
return kernel_low_res | makes a lower resolution kernel based on the kernel_high_res (odd numbers) and the subgrid_res (even number), both
meant to be centered.
:param kernel_high_res:
:param subgrid_res:
:return: | lenstronomy/Util/kernel_util.py | averaging_odd_kernel | guoxiaowhu/lenstronomy | 1 | python | def averaging_odd_kernel(kernel_high_res, subgrid_res):
'\n makes a lower resolution kernel based on the kernel_high_res (odd numbers) and the subgrid_res (even number), both\n meant to be centered.\n\n :param kernel_high_res:\n :param subgrid_res:\n :return:\n '
n_high = len(kernel_high_res)
n_low = int(((n_high + 1) / subgrid_res))
kernel_low_res = np.zeros((n_low, n_low))
for i in range((subgrid_res - 1)):
for j in range((subgrid_res - 1)):
kernel_low_res += kernel_high_res[(i::subgrid_res, j::subgrid_res)]
i = (subgrid_res - 1)
for j in range((subgrid_res - 1)):
kernel_low_res[(1:, :)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
kernel_low_res[(:(- 1), :)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
j = (subgrid_res - 1)
for i in range((subgrid_res - 1)):
kernel_low_res[(:, 1:)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
kernel_low_res[(:, :(- 1))] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
i = (subgrid_res - 1)
j = (subgrid_res - 1)
kernel_edge = kernel_high_res[(i::subgrid_res, j::subgrid_res)]
kernel_low_res[(1:, 1:)] += (kernel_edge / 4)
kernel_low_res[(:(- 1), 1:)] += (kernel_edge / 4)
kernel_low_res[(1:, :(- 1))] += (kernel_edge / 4)
kernel_low_res[(:(- 1), :(- 1))] += (kernel_edge / 4)
return kernel_low_res | def averaging_odd_kernel(kernel_high_res, subgrid_res):
'\n makes a lower resolution kernel based on the kernel_high_res (odd numbers) and the subgrid_res (even number), both\n meant to be centered.\n\n :param kernel_high_res:\n :param subgrid_res:\n :return:\n '
n_high = len(kernel_high_res)
n_low = int(((n_high + 1) / subgrid_res))
kernel_low_res = np.zeros((n_low, n_low))
for i in range((subgrid_res - 1)):
for j in range((subgrid_res - 1)):
kernel_low_res += kernel_high_res[(i::subgrid_res, j::subgrid_res)]
i = (subgrid_res - 1)
for j in range((subgrid_res - 1)):
kernel_low_res[(1:, :)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
kernel_low_res[(:(- 1), :)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
j = (subgrid_res - 1)
for i in range((subgrid_res - 1)):
kernel_low_res[(:, 1:)] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
kernel_low_res[(:, :(- 1))] += (kernel_high_res[(i::subgrid_res, j::subgrid_res)] / 2)
i = (subgrid_res - 1)
j = (subgrid_res - 1)
kernel_edge = kernel_high_res[(i::subgrid_res, j::subgrid_res)]
kernel_low_res[(1:, 1:)] += (kernel_edge / 4)
kernel_low_res[(:(- 1), 1:)] += (kernel_edge / 4)
kernel_low_res[(1:, :(- 1))] += (kernel_edge / 4)
kernel_low_res[(:(- 1), :(- 1))] += (kernel_edge / 4)
return kernel_low_res<|docstring|>makes a lower resolution kernel based on the kernel_high_res (odd numbers) and the subgrid_res (even number), both
meant to be centered.
:param kernel_high_res:
:param subgrid_res:
:return:<|endoftext|> |
13fd8b799117b27c4ae4923f7b641864648f7feb82fae2de0e3a66d6ed3a188a | def kernel_pixelsize_change(kernel, deltaPix_in, deltaPix_out):
'\n change the pixel size of a given kernel\n :param kernel:\n :param deltaPix_in:\n :param deltaPix_out:\n :return:\n '
numPix = len(kernel)
numPix_new = int(round(((numPix * deltaPix_in) / deltaPix_out)))
if ((numPix_new % 2) == 0):
numPix_new -= 1
x_in = np.linspace((((- (numPix - 1)) / 2) * deltaPix_in), (((numPix - 1) / 2) * deltaPix_in), numPix)
x_out = np.linspace((((- (numPix_new - 1)) / 2) * deltaPix_out), (((numPix_new - 1) / 2) * deltaPix_out), numPix_new)
kernel_out = image_util.re_size_array(x_in, x_in, kernel, x_out, x_out)
kernel_out = kernel_norm(kernel_out)
return kernel_out | change the pixel size of a given kernel
:param kernel:
:param deltaPix_in:
:param deltaPix_out:
:return: | lenstronomy/Util/kernel_util.py | kernel_pixelsize_change | guoxiaowhu/lenstronomy | 1 | python | def kernel_pixelsize_change(kernel, deltaPix_in, deltaPix_out):
'\n change the pixel size of a given kernel\n :param kernel:\n :param deltaPix_in:\n :param deltaPix_out:\n :return:\n '
numPix = len(kernel)
numPix_new = int(round(((numPix * deltaPix_in) / deltaPix_out)))
if ((numPix_new % 2) == 0):
numPix_new -= 1
x_in = np.linspace((((- (numPix - 1)) / 2) * deltaPix_in), (((numPix - 1) / 2) * deltaPix_in), numPix)
x_out = np.linspace((((- (numPix_new - 1)) / 2) * deltaPix_out), (((numPix_new - 1) / 2) * deltaPix_out), numPix_new)
kernel_out = image_util.re_size_array(x_in, x_in, kernel, x_out, x_out)
kernel_out = kernel_norm(kernel_out)
return kernel_out | def kernel_pixelsize_change(kernel, deltaPix_in, deltaPix_out):
'\n change the pixel size of a given kernel\n :param kernel:\n :param deltaPix_in:\n :param deltaPix_out:\n :return:\n '
numPix = len(kernel)
numPix_new = int(round(((numPix * deltaPix_in) / deltaPix_out)))
if ((numPix_new % 2) == 0):
numPix_new -= 1
x_in = np.linspace((((- (numPix - 1)) / 2) * deltaPix_in), (((numPix - 1) / 2) * deltaPix_in), numPix)
x_out = np.linspace((((- (numPix_new - 1)) / 2) * deltaPix_out), (((numPix_new - 1) / 2) * deltaPix_out), numPix_new)
kernel_out = image_util.re_size_array(x_in, x_in, kernel, x_out, x_out)
kernel_out = kernel_norm(kernel_out)
return kernel_out<|docstring|>change the pixel size of a given kernel
:param kernel:
:param deltaPix_in:
:param deltaPix_out:
:return:<|endoftext|> |
87e05f05b85ebe507521c4f42c0321d0acb26a4794d00628ae2fd55055e6a785 | def cut_psf(psf_data, psf_size):
'\n cut the psf properly\n :param psf_data: image of PSF\n :param psf_size: size of psf\n :return: re-sized and re-normalized PSF\n '
kernel = image_util.cut_edges(psf_data, psf_size)
kernel = kernel_norm(kernel)
return kernel | cut the psf properly
:param psf_data: image of PSF
:param psf_size: size of psf
:return: re-sized and re-normalized PSF | lenstronomy/Util/kernel_util.py | cut_psf | guoxiaowhu/lenstronomy | 1 | python | def cut_psf(psf_data, psf_size):
'\n cut the psf properly\n :param psf_data: image of PSF\n :param psf_size: size of psf\n :return: re-sized and re-normalized PSF\n '
kernel = image_util.cut_edges(psf_data, psf_size)
kernel = kernel_norm(kernel)
return kernel | def cut_psf(psf_data, psf_size):
'\n cut the psf properly\n :param psf_data: image of PSF\n :param psf_size: size of psf\n :return: re-sized and re-normalized PSF\n '
kernel = image_util.cut_edges(psf_data, psf_size)
kernel = kernel_norm(kernel)
return kernel<|docstring|>cut the psf properly
:param psf_data: image of PSF
:param psf_size: size of psf
:return: re-sized and re-normalized PSF<|endoftext|> |
29e71d125dc4e0b48123fd40e74f05d4a3c8484d25425e8750b7eddd2b720aa5 | def pixel_kernel(point_source_kernel, subgrid_res=7):
'\n converts a pixelised kernel of a point source to a kernel representing a uniform extended pixel\n\n :param point_source_kernel:\n :param subgrid_res:\n :return: convolution kernel for an extended pixel\n '
kernel_subgrid = subgrid_kernel(point_source_kernel, subgrid_res)
kernel_size = len(point_source_kernel)
kernel_pixel = np.zeros(((kernel_size * subgrid_res), (kernel_size * subgrid_res)))
for i in range(subgrid_res):
k_x = int(((((kernel_size - 1) / 2) * subgrid_res) + i))
for j in range(subgrid_res):
k_y = int(((((kernel_size - 1) / 2) * subgrid_res) + j))
kernel_pixel = image_util.add_layer2image(kernel_pixel, k_x, k_y, kernel_subgrid)
kernel_pixel = util.averaging(kernel_pixel, numGrid=(kernel_size * subgrid_res), numPix=kernel_size)
return kernel_norm(kernel_pixel) | converts a pixelised kernel of a point source to a kernel representing a uniform extended pixel
:param point_source_kernel:
:param subgrid_res:
:return: convolution kernel for an extended pixel | lenstronomy/Util/kernel_util.py | pixel_kernel | guoxiaowhu/lenstronomy | 1 | python | def pixel_kernel(point_source_kernel, subgrid_res=7):
'\n converts a pixelised kernel of a point source to a kernel representing a uniform extended pixel\n\n :param point_source_kernel:\n :param subgrid_res:\n :return: convolution kernel for an extended pixel\n '
kernel_subgrid = subgrid_kernel(point_source_kernel, subgrid_res)
kernel_size = len(point_source_kernel)
kernel_pixel = np.zeros(((kernel_size * subgrid_res), (kernel_size * subgrid_res)))
for i in range(subgrid_res):
k_x = int(((((kernel_size - 1) / 2) * subgrid_res) + i))
for j in range(subgrid_res):
k_y = int(((((kernel_size - 1) / 2) * subgrid_res) + j))
kernel_pixel = image_util.add_layer2image(kernel_pixel, k_x, k_y, kernel_subgrid)
kernel_pixel = util.averaging(kernel_pixel, numGrid=(kernel_size * subgrid_res), numPix=kernel_size)
return kernel_norm(kernel_pixel) | def pixel_kernel(point_source_kernel, subgrid_res=7):
'\n converts a pixelised kernel of a point source to a kernel representing a uniform extended pixel\n\n :param point_source_kernel:\n :param subgrid_res:\n :return: convolution kernel for an extended pixel\n '
kernel_subgrid = subgrid_kernel(point_source_kernel, subgrid_res)
kernel_size = len(point_source_kernel)
kernel_pixel = np.zeros(((kernel_size * subgrid_res), (kernel_size * subgrid_res)))
for i in range(subgrid_res):
k_x = int(((((kernel_size - 1) / 2) * subgrid_res) + i))
for j in range(subgrid_res):
k_y = int(((((kernel_size - 1) / 2) * subgrid_res) + j))
kernel_pixel = image_util.add_layer2image(kernel_pixel, k_x, k_y, kernel_subgrid)
kernel_pixel = util.averaging(kernel_pixel, numGrid=(kernel_size * subgrid_res), numPix=kernel_size)
return kernel_norm(kernel_pixel)<|docstring|>converts a pixelised kernel of a point source to a kernel representing a uniform extended pixel
:param point_source_kernel:
:param subgrid_res:
:return: convolution kernel for an extended pixel<|endoftext|> |
f6708a193ab7f3763ed5907c46cc197cc60a297728356d19c151922c4b9c8c5f | def split_kernel(kernel, kernel_subgrid, subsampling_size, subgrid_res):
'\n pixel kernel and subsampling kernel such that the convolution of both applied on an image can be\n performed, i.e. smaller subsampling PSF and hole in larger PSF\n\n :param kernel: PSF kernel of the size of the pixel\n :param kernel_subgrid: subsampled kernel\n :param subsampling_size: size of subsampling PSF in units of image pixels\n :return: pixel and subsampling kernel\n '
n = len(kernel)
n_sub = len(kernel_subgrid)
if ((subsampling_size % 2) == 0):
subsampling_size += 1
if (subsampling_size > n):
subsampling_size = n
kernel_hole = copy.deepcopy(kernel)
n_min = int((((n - 1) / 2) - ((subsampling_size - 1) / 2)))
n_max = int(((((n - 1) / 2) + ((subsampling_size - 1) / 2)) + 1))
kernel_hole[(n_min:n_max, n_min:n_max)] = 0
n_min_sub = int((((n_sub - 1) / 2) - (((subsampling_size * subgrid_res) - 1) / 2)))
n_max_sub = int(((((n_sub - 1) / 2) + (((subsampling_size * subgrid_res) - 1) / 2)) + 1))
kernel_subgrid_cut = kernel_subgrid[(n_min_sub:n_max_sub, n_min_sub:n_max_sub)]
flux_subsampled = np.sum(kernel_subgrid_cut)
flux_hole = np.sum(kernel_hole)
if (flux_hole > 0):
kernel_hole *= ((1.0 - flux_subsampled) / np.sum(kernel_hole))
else:
kernel_subgrid_cut /= np.sum(kernel_subgrid_cut)
return (kernel_hole, kernel_subgrid_cut) | pixel kernel and subsampling kernel such that the convolution of both applied on an image can be
performed, i.e. smaller subsampling PSF and hole in larger PSF
:param kernel: PSF kernel of the size of the pixel
:param kernel_subgrid: subsampled kernel
:param subsampling_size: size of subsampling PSF in units of image pixels
:return: pixel and subsampling kernel | lenstronomy/Util/kernel_util.py | split_kernel | guoxiaowhu/lenstronomy | 1 | python | def split_kernel(kernel, kernel_subgrid, subsampling_size, subgrid_res):
'\n pixel kernel and subsampling kernel such that the convolution of both applied on an image can be\n performed, i.e. smaller subsampling PSF and hole in larger PSF\n\n :param kernel: PSF kernel of the size of the pixel\n :param kernel_subgrid: subsampled kernel\n :param subsampling_size: size of subsampling PSF in units of image pixels\n :return: pixel and subsampling kernel\n '
n = len(kernel)
n_sub = len(kernel_subgrid)
if ((subsampling_size % 2) == 0):
subsampling_size += 1
if (subsampling_size > n):
subsampling_size = n
kernel_hole = copy.deepcopy(kernel)
n_min = int((((n - 1) / 2) - ((subsampling_size - 1) / 2)))
n_max = int(((((n - 1) / 2) + ((subsampling_size - 1) / 2)) + 1))
kernel_hole[(n_min:n_max, n_min:n_max)] = 0
n_min_sub = int((((n_sub - 1) / 2) - (((subsampling_size * subgrid_res) - 1) / 2)))
n_max_sub = int(((((n_sub - 1) / 2) + (((subsampling_size * subgrid_res) - 1) / 2)) + 1))
kernel_subgrid_cut = kernel_subgrid[(n_min_sub:n_max_sub, n_min_sub:n_max_sub)]
flux_subsampled = np.sum(kernel_subgrid_cut)
flux_hole = np.sum(kernel_hole)
if (flux_hole > 0):
kernel_hole *= ((1.0 - flux_subsampled) / np.sum(kernel_hole))
else:
kernel_subgrid_cut /= np.sum(kernel_subgrid_cut)
return (kernel_hole, kernel_subgrid_cut) | def split_kernel(kernel, kernel_subgrid, subsampling_size, subgrid_res):
'\n pixel kernel and subsampling kernel such that the convolution of both applied on an image can be\n performed, i.e. smaller subsampling PSF and hole in larger PSF\n\n :param kernel: PSF kernel of the size of the pixel\n :param kernel_subgrid: subsampled kernel\n :param subsampling_size: size of subsampling PSF in units of image pixels\n :return: pixel and subsampling kernel\n '
n = len(kernel)
n_sub = len(kernel_subgrid)
if ((subsampling_size % 2) == 0):
subsampling_size += 1
if (subsampling_size > n):
subsampling_size = n
kernel_hole = copy.deepcopy(kernel)
n_min = int((((n - 1) / 2) - ((subsampling_size - 1) / 2)))
n_max = int(((((n - 1) / 2) + ((subsampling_size - 1) / 2)) + 1))
kernel_hole[(n_min:n_max, n_min:n_max)] = 0
n_min_sub = int((((n_sub - 1) / 2) - (((subsampling_size * subgrid_res) - 1) / 2)))
n_max_sub = int(((((n_sub - 1) / 2) + (((subsampling_size * subgrid_res) - 1) / 2)) + 1))
kernel_subgrid_cut = kernel_subgrid[(n_min_sub:n_max_sub, n_min_sub:n_max_sub)]
flux_subsampled = np.sum(kernel_subgrid_cut)
flux_hole = np.sum(kernel_hole)
if (flux_hole > 0):
kernel_hole *= ((1.0 - flux_subsampled) / np.sum(kernel_hole))
else:
kernel_subgrid_cut /= np.sum(kernel_subgrid_cut)
return (kernel_hole, kernel_subgrid_cut)<|docstring|>pixel kernel and subsampling kernel such that the convolution of both applied on an image can be
performed, i.e. smaller subsampling PSF and hole in larger PSF
:param kernel: PSF kernel of the size of the pixel
:param kernel_subgrid: subsampled kernel
:param subsampling_size: size of subsampling PSF in units of image pixels
:return: pixel and subsampling kernel<|endoftext|> |
ba29416de77c09d0342d62ba1500f2df1f73404555934f754ca01974d2bf04e2 | def cutout_source(x_pos, y_pos, image, kernelsize, shift=True):
'\n cuts out point source (e.g. PSF estimate) out of image and shift it to the center of a pixel\n :param x_pos:\n :param y_pos:\n :param image:\n :param kernelsize:\n :return:\n '
if ((kernelsize % 2) == 0):
raise ValueError('even pixel number kernel size not supported!')
x_int = int(round(x_pos))
y_int = int(round(y_pos))
n = len(image)
d = ((kernelsize - 1) / 2)
x_max = int(np.minimum(((x_int + d) + 1), n))
x_min = int(np.maximum((x_int - d), 0))
y_max = int(np.minimum(((y_int + d) + 1), n))
y_min = int(np.maximum((y_int - d), 0))
image_cut = copy.deepcopy(image[(y_min:y_max, x_min:x_max)])
shift_x = (x_int - x_pos)
shift_y = (y_int - y_pos)
if (shift is True):
kernel_shift = de_shift_kernel(image_cut, shift_x, shift_y, iterations=50)
else:
kernel_shift = image_cut
kernel_final = np.zeros((kernelsize, kernelsize))
k_l2_x = int(((kernelsize - 1) / 2))
k_l2_y = int(((kernelsize - 1) / 2))
xk_min = np.maximum(0, ((- x_int) + k_l2_x))
yk_min = np.maximum(0, ((- y_int) + k_l2_y))
xk_max = np.minimum(kernelsize, (((- x_int) + k_l2_x) + n))
yk_max = np.minimum(kernelsize, (((- y_int) + k_l2_y) + n))
kernel_final[(yk_min:yk_max, xk_min:xk_max)] = kernel_shift
return kernel_final | cuts out point source (e.g. PSF estimate) out of image and shift it to the center of a pixel
:param x_pos:
:param y_pos:
:param image:
:param kernelsize:
:return: | lenstronomy/Util/kernel_util.py | cutout_source | guoxiaowhu/lenstronomy | 1 | python | def cutout_source(x_pos, y_pos, image, kernelsize, shift=True):
'\n cuts out point source (e.g. PSF estimate) out of image and shift it to the center of a pixel\n :param x_pos:\n :param y_pos:\n :param image:\n :param kernelsize:\n :return:\n '
if ((kernelsize % 2) == 0):
raise ValueError('even pixel number kernel size not supported!')
x_int = int(round(x_pos))
y_int = int(round(y_pos))
n = len(image)
d = ((kernelsize - 1) / 2)
x_max = int(np.minimum(((x_int + d) + 1), n))
x_min = int(np.maximum((x_int - d), 0))
y_max = int(np.minimum(((y_int + d) + 1), n))
y_min = int(np.maximum((y_int - d), 0))
image_cut = copy.deepcopy(image[(y_min:y_max, x_min:x_max)])
shift_x = (x_int - x_pos)
shift_y = (y_int - y_pos)
if (shift is True):
kernel_shift = de_shift_kernel(image_cut, shift_x, shift_y, iterations=50)
else:
kernel_shift = image_cut
kernel_final = np.zeros((kernelsize, kernelsize))
k_l2_x = int(((kernelsize - 1) / 2))
k_l2_y = int(((kernelsize - 1) / 2))
xk_min = np.maximum(0, ((- x_int) + k_l2_x))
yk_min = np.maximum(0, ((- y_int) + k_l2_y))
xk_max = np.minimum(kernelsize, (((- x_int) + k_l2_x) + n))
yk_max = np.minimum(kernelsize, (((- y_int) + k_l2_y) + n))
kernel_final[(yk_min:yk_max, xk_min:xk_max)] = kernel_shift
return kernel_final | def cutout_source(x_pos, y_pos, image, kernelsize, shift=True):
'\n cuts out point source (e.g. PSF estimate) out of image and shift it to the center of a pixel\n :param x_pos:\n :param y_pos:\n :param image:\n :param kernelsize:\n :return:\n '
if ((kernelsize % 2) == 0):
raise ValueError('even pixel number kernel size not supported!')
x_int = int(round(x_pos))
y_int = int(round(y_pos))
n = len(image)
d = ((kernelsize - 1) / 2)
x_max = int(np.minimum(((x_int + d) + 1), n))
x_min = int(np.maximum((x_int - d), 0))
y_max = int(np.minimum(((y_int + d) + 1), n))
y_min = int(np.maximum((y_int - d), 0))
image_cut = copy.deepcopy(image[(y_min:y_max, x_min:x_max)])
shift_x = (x_int - x_pos)
shift_y = (y_int - y_pos)
if (shift is True):
kernel_shift = de_shift_kernel(image_cut, shift_x, shift_y, iterations=50)
else:
kernel_shift = image_cut
kernel_final = np.zeros((kernelsize, kernelsize))
k_l2_x = int(((kernelsize - 1) / 2))
k_l2_y = int(((kernelsize - 1) / 2))
xk_min = np.maximum(0, ((- x_int) + k_l2_x))
yk_min = np.maximum(0, ((- y_int) + k_l2_y))
xk_max = np.minimum(kernelsize, (((- x_int) + k_l2_x) + n))
yk_max = np.minimum(kernelsize, (((- y_int) + k_l2_y) + n))
kernel_final[(yk_min:yk_max, xk_min:xk_max)] = kernel_shift
return kernel_final<|docstring|>cuts out point source (e.g. PSF estimate) out of image and shift it to the center of a pixel
:param x_pos:
:param y_pos:
:param image:
:param kernelsize:
:return:<|endoftext|> |
442a2bbe22ae7048f2ee11aab1b3ef9459c2e66c8fbd54bcbe9e2e59e3e13ce8 | def fwhm_kernel(kernel):
'\n computes the full width at half maximum of a (PSF) kernel\n :param kernel: (psf) kernel, 2d numpy array\n :return: fwhm in units of pixels\n '
n = len(kernel)
if ((n % 2) == 0):
raise ValueError('only works with odd number of pixels in kernel!')
max_flux = kernel[(int(((n - 1) / 2)), int(((n - 1) / 2)))]
I_2 = (max_flux / 2.0)
I_r = kernel[(int(((n - 1) / 2)), int(((n - 1) / 2)):)]
r = np.linspace(0, ((n - 1) / 2), int(((n + 1) / 2)))
for i in range(1, len(r)):
if (I_r[i] < I_2):
fwhm_2 = (((I_2 - I_r[(i - 1)]) / (I_r[i] - I_r[(i - 1)])) + r[(i - 1)])
return (fwhm_2 * 2)
raise ValueError('The kernel did not drop to half the max value - fwhm not determined!') | computes the full width at half maximum of a (PSF) kernel
:param kernel: (psf) kernel, 2d numpy array
:return: fwhm in units of pixels | lenstronomy/Util/kernel_util.py | fwhm_kernel | guoxiaowhu/lenstronomy | 1 | python | def fwhm_kernel(kernel):
'\n computes the full width at half maximum of a (PSF) kernel\n :param kernel: (psf) kernel, 2d numpy array\n :return: fwhm in units of pixels\n '
n = len(kernel)
if ((n % 2) == 0):
raise ValueError('only works with odd number of pixels in kernel!')
max_flux = kernel[(int(((n - 1) / 2)), int(((n - 1) / 2)))]
I_2 = (max_flux / 2.0)
I_r = kernel[(int(((n - 1) / 2)), int(((n - 1) / 2)):)]
r = np.linspace(0, ((n - 1) / 2), int(((n + 1) / 2)))
for i in range(1, len(r)):
if (I_r[i] < I_2):
fwhm_2 = (((I_2 - I_r[(i - 1)]) / (I_r[i] - I_r[(i - 1)])) + r[(i - 1)])
return (fwhm_2 * 2)
raise ValueError('The kernel did not drop to half the max value - fwhm not determined!') | def fwhm_kernel(kernel):
'\n computes the full width at half maximum of a (PSF) kernel\n :param kernel: (psf) kernel, 2d numpy array\n :return: fwhm in units of pixels\n '
n = len(kernel)
if ((n % 2) == 0):
raise ValueError('only works with odd number of pixels in kernel!')
max_flux = kernel[(int(((n - 1) / 2)), int(((n - 1) / 2)))]
I_2 = (max_flux / 2.0)
I_r = kernel[(int(((n - 1) / 2)), int(((n - 1) / 2)):)]
r = np.linspace(0, ((n - 1) / 2), int(((n + 1) / 2)))
for i in range(1, len(r)):
if (I_r[i] < I_2):
fwhm_2 = (((I_2 - I_r[(i - 1)]) / (I_r[i] - I_r[(i - 1)])) + r[(i - 1)])
return (fwhm_2 * 2)
raise ValueError('The kernel did not drop to half the max value - fwhm not determined!')<|docstring|>computes the full width at half maximum of a (PSF) kernel
:param kernel: (psf) kernel, 2d numpy array
:return: fwhm in units of pixels<|endoftext|> |
aa3d1585a794a89d75a5d71fbbc211816a2ee2db07bdb7694f1671f04f49555d | def estimate_amp(data, x_pos, y_pos, psf_kernel):
'\n estimates the amplitude of a point source located at x_pos, y_pos\n :param data:\n :param x_pos:\n :param y_pos:\n :param deltaPix:\n :return:\n '
(numPix_x, numPix_y) = np.shape(data)
x_int = int(round((x_pos - 0.49999)))
y_int = int(round((y_pos - 0.49999)))
if ((x_int > 2) and (x_int < (numPix_x - 2)) and (y_int > 2) and (y_int < (numPix_y - 2))):
mean_image = max(np.sum(data[((y_int - 2):(y_int + 3), (x_int - 2):(x_int + 3))]), 0)
num = len(psf_kernel)
center = int(((num - 0.5) / 2))
mean_kernel = np.sum(psf_kernel[((center - 2):(center + 3), (center - 2):(center + 3))])
amp_estimated = (mean_image / mean_kernel)
else:
amp_estimated = 0
return amp_estimated | estimates the amplitude of a point source located at x_pos, y_pos
:param data:
:param x_pos:
:param y_pos:
:param deltaPix:
:return: | lenstronomy/Util/kernel_util.py | estimate_amp | guoxiaowhu/lenstronomy | 1 | python | def estimate_amp(data, x_pos, y_pos, psf_kernel):
'\n estimates the amplitude of a point source located at x_pos, y_pos\n :param data:\n :param x_pos:\n :param y_pos:\n :param deltaPix:\n :return:\n '
(numPix_x, numPix_y) = np.shape(data)
x_int = int(round((x_pos - 0.49999)))
y_int = int(round((y_pos - 0.49999)))
if ((x_int > 2) and (x_int < (numPix_x - 2)) and (y_int > 2) and (y_int < (numPix_y - 2))):
mean_image = max(np.sum(data[((y_int - 2):(y_int + 3), (x_int - 2):(x_int + 3))]), 0)
num = len(psf_kernel)
center = int(((num - 0.5) / 2))
mean_kernel = np.sum(psf_kernel[((center - 2):(center + 3), (center - 2):(center + 3))])
amp_estimated = (mean_image / mean_kernel)
else:
amp_estimated = 0
return amp_estimated | def estimate_amp(data, x_pos, y_pos, psf_kernel):
'\n estimates the amplitude of a point source located at x_pos, y_pos\n :param data:\n :param x_pos:\n :param y_pos:\n :param deltaPix:\n :return:\n '
(numPix_x, numPix_y) = np.shape(data)
x_int = int(round((x_pos - 0.49999)))
y_int = int(round((y_pos - 0.49999)))
if ((x_int > 2) and (x_int < (numPix_x - 2)) and (y_int > 2) and (y_int < (numPix_y - 2))):
mean_image = max(np.sum(data[((y_int - 2):(y_int + 3), (x_int - 2):(x_int + 3))]), 0)
num = len(psf_kernel)
center = int(((num - 0.5) / 2))
mean_kernel = np.sum(psf_kernel[((center - 2):(center + 3), (center - 2):(center + 3))])
amp_estimated = (mean_image / mean_kernel)
else:
amp_estimated = 0
return amp_estimated<|docstring|>estimates the amplitude of a point source located at x_pos, y_pos
:param data:
:param x_pos:
:param y_pos:
:param deltaPix:
:return:<|endoftext|> |
9630780d1404f8f51343fdbc64ff99c8993150f48c0370e2adfff6c881d20df7 | @classmethod
def canonicalize(cls):
'\n Transforms the values in `city_name` and `county_name`\n to their canonicalized (uppercase, regulare spaced) form.\n '
for obj in cls.objects.all():
obj.city_name = canonicalize_string(obj.city_name)
obj.county_name = canonicalize_string(obj.county_name)
obj.save() | Transforms the values in `city_name` and `county_name`
to their canonicalized (uppercase, regulare spaced) form. | usaspending_api/references/models/ref_city_county_code.py | canonicalize | mikiec84/usaspending-api | 0 | python | @classmethod
def canonicalize(cls):
'\n Transforms the values in `city_name` and `county_name`\n to their canonicalized (uppercase, regulare spaced) form.\n '
for obj in cls.objects.all():
obj.city_name = canonicalize_string(obj.city_name)
obj.county_name = canonicalize_string(obj.county_name)
obj.save() | @classmethod
def canonicalize(cls):
'\n Transforms the values in `city_name` and `county_name`\n to their canonicalized (uppercase, regulare spaced) form.\n '
for obj in cls.objects.all():
obj.city_name = canonicalize_string(obj.city_name)
obj.county_name = canonicalize_string(obj.county_name)
obj.save()<|docstring|>Transforms the values in `city_name` and `county_name`
to their canonicalized (uppercase, regulare spaced) form.<|endoftext|> |
88cdbbc3cf067af41c151990a7795b3370ab4d7c3bbfdd83c2dcc7f8c3561297 | def upper_repl(match):
' Convert mask-special tokens to real special tokens '
return ((' [' + match.group(1).upper().replace('-', '_')) + '] ') | Convert mask-special tokens to real special tokens | app/utils/preprocessing.py | upper_repl | m3hrdadfi/wiki-summary | 16 | python | def upper_repl(match):
' '
return ((' [' + match.group(1).upper().replace('-', '_')) + '] ') | def upper_repl(match):
' '
return ((' [' + match.group(1).upper().replace('-', '_')) + '] ')<|docstring|>Convert mask-special tokens to real special tokens<|endoftext|> |
307696b89b216b2ccaf2eaf670591d2274c26767383c1564aafd44cf0785d8b6 | def convert_emoji_to_text(text, delimiters=('[', ']')):
' Convert emojis to something readable by the vocab and model '
text = emoji.demojize(text, delimiters=delimiters)
return text | Convert emojis to something readable by the vocab and model | app/utils/preprocessing.py | convert_emoji_to_text | m3hrdadfi/wiki-summary | 16 | python | def convert_emoji_to_text(text, delimiters=('[', ']')):
' '
text = emoji.demojize(text, delimiters=delimiters)
return text | def convert_emoji_to_text(text, delimiters=('[', ']')):
' '
text = emoji.demojize(text, delimiters=delimiters)
return text<|docstring|>Convert emojis to something readable by the vocab and model<|endoftext|> |
ca216aeba88275fa66919caa305af2cd26f33120314e5b523cb950dcd1b34981 | def clean_html(raw_html):
' Remove all html tags '
cleaner = re.compile('<.*?>')
cleaned = re.sub(cleaner, '', raw_html)
return cleaned | Remove all html tags | app/utils/preprocessing.py | clean_html | m3hrdadfi/wiki-summary | 16 | python | def clean_html(raw_html):
' '
cleaner = re.compile('<.*?>')
cleaned = re.sub(cleaner, , raw_html)
return cleaned | def clean_html(raw_html):
' '
cleaner = re.compile('<.*?>')
cleaned = re.sub(cleaner, , raw_html)
return cleaned<|docstring|>Remove all html tags<|endoftext|> |
042169fa14586b4d8873102a97a4646176940f1fead2c26b6c5e10d6a8958c5e | def clean_text(raw_text, fix_unicode=True, to_ascii=False, lower=True, no_line_breaks=True, no_urls=True, no_emails=True, no_phone_numbers=True, no_numbers=False, no_digits=False, no_currency_symbols=True, no_punct=False, replace_with_url='', replace_with_email='', replace_with_phone_number='', replace_with_number='', replace_with_digit='0', replace_with_currency_symbol=''):
' Preprocessing and normalization the text a the low level '
cleaned = clean(raw_text, fix_unicode=fix_unicode, to_ascii=to_ascii, lower=lower, no_line_breaks=no_line_breaks, no_urls=no_urls, no_emails=no_emails, no_phone_numbers=no_phone_numbers, no_numbers=no_numbers, no_digits=no_digits, no_currency_symbols=no_currency_symbols, no_punct=no_punct, replace_with_url=replace_with_url, replace_with_email=replace_with_email, replace_with_phone_number=replace_with_phone_number, replace_with_number=replace_with_number, replace_with_digit=replace_with_digit, replace_with_currency_symbol=replace_with_currency_symbol)
return cleaned | Preprocessing and normalization the text a the low level | app/utils/preprocessing.py | clean_text | m3hrdadfi/wiki-summary | 16 | python | def clean_text(raw_text, fix_unicode=True, to_ascii=False, lower=True, no_line_breaks=True, no_urls=True, no_emails=True, no_phone_numbers=True, no_numbers=False, no_digits=False, no_currency_symbols=True, no_punct=False, replace_with_url=, replace_with_email=, replace_with_phone_number=, replace_with_number=, replace_with_digit='0', replace_with_currency_symbol=):
' '
cleaned = clean(raw_text, fix_unicode=fix_unicode, to_ascii=to_ascii, lower=lower, no_line_breaks=no_line_breaks, no_urls=no_urls, no_emails=no_emails, no_phone_numbers=no_phone_numbers, no_numbers=no_numbers, no_digits=no_digits, no_currency_symbols=no_currency_symbols, no_punct=no_punct, replace_with_url=replace_with_url, replace_with_email=replace_with_email, replace_with_phone_number=replace_with_phone_number, replace_with_number=replace_with_number, replace_with_digit=replace_with_digit, replace_with_currency_symbol=replace_with_currency_symbol)
return cleaned | def clean_text(raw_text, fix_unicode=True, to_ascii=False, lower=True, no_line_breaks=True, no_urls=True, no_emails=True, no_phone_numbers=True, no_numbers=False, no_digits=False, no_currency_symbols=True, no_punct=False, replace_with_url=, replace_with_email=, replace_with_phone_number=, replace_with_number=, replace_with_digit='0', replace_with_currency_symbol=):
' '
cleaned = clean(raw_text, fix_unicode=fix_unicode, to_ascii=to_ascii, lower=lower, no_line_breaks=no_line_breaks, no_urls=no_urls, no_emails=no_emails, no_phone_numbers=no_phone_numbers, no_numbers=no_numbers, no_digits=no_digits, no_currency_symbols=no_currency_symbols, no_punct=no_punct, replace_with_url=replace_with_url, replace_with_email=replace_with_email, replace_with_phone_number=replace_with_phone_number, replace_with_number=replace_with_number, replace_with_digit=replace_with_digit, replace_with_currency_symbol=replace_with_currency_symbol)
return cleaned<|docstring|>Preprocessing and normalization the text a the low level<|endoftext|> |
7c79a80b040434f9b8cf073b342aca639616346087e30ab2a73ab139e80c0f84 | def cleaning(text, default_cleaning=True, normalize_cleaning=True, half_space_cleaning=True, html_cleaning=True, emoji_convert=False, username_cleaning=True, hashtag_cleaning=True, fix_unicode=True, to_ascii=False, lower=True, no_line_breaks=True, no_urls=True, no_emails=True, no_phone_numbers=True, no_numbers=False, no_digits=False, no_currency_symbols=True, no_punct=False, replace_with_url='', replace_with_email='', replace_with_phone_number='', replace_with_number='', replace_with_digit='0', replace_with_currency_symbol=''):
' A hierarchy of normalization and preprocessing '
text = text.strip()
if username_cleaning:
text = re.sub('\\@[\\w.-_]+', ' ', text)
if hashtag_cleaning:
text = text.replace('#', ' ')
text = text.replace('_', ' ')
if emoji_convert:
text = emoji.emojize(text)
text = convert_emoji_to_text(text)
if default_cleaning:
text = clean_text(text, fix_unicode, to_ascii, lower, no_line_breaks, no_urls, no_emails, no_phone_numbers, no_numbers, no_digits, no_currency_symbols, no_punct, replace_with_url, replace_with_email, replace_with_phone_number, replace_with_number, replace_with_digit, replace_with_currency_symbol)
if html_cleaning:
text = clean_html(text)
if normalize_cleaning:
text = normalizer.normalize(text)
weird_pattern = re.compile('[😀-🙏🌀-🗿🚀-\U0001f6ff\U0001f1e0-🇿✂-➰Ⓜ-🉑🤦-🤷𐀀-\U0010ffff\u200d♀-♂☀-⭕⏏⏩⌚〰️\u2069\u2066–\u2068\u2067]+', flags=re.UNICODE)
text = weird_pattern.sub('', text)
text = re.sub('#', '', text)
if emoji_convert:
text = re.sub('\\[(\\w.+)\\]', upper_repl, text)
if half_space_cleaning:
text = text.replace('\u200c', ' ')
return text | A hierarchy of normalization and preprocessing | app/utils/preprocessing.py | cleaning | m3hrdadfi/wiki-summary | 16 | python | def cleaning(text, default_cleaning=True, normalize_cleaning=True, half_space_cleaning=True, html_cleaning=True, emoji_convert=False, username_cleaning=True, hashtag_cleaning=True, fix_unicode=True, to_ascii=False, lower=True, no_line_breaks=True, no_urls=True, no_emails=True, no_phone_numbers=True, no_numbers=False, no_digits=False, no_currency_symbols=True, no_punct=False, replace_with_url=, replace_with_email=, replace_with_phone_number=, replace_with_number=, replace_with_digit='0', replace_with_currency_symbol=):
' '
text = text.strip()
if username_cleaning:
text = re.sub('\\@[\\w.-_]+', ' ', text)
if hashtag_cleaning:
text = text.replace('#', ' ')
text = text.replace('_', ' ')
if emoji_convert:
text = emoji.emojize(text)
text = convert_emoji_to_text(text)
if default_cleaning:
text = clean_text(text, fix_unicode, to_ascii, lower, no_line_breaks, no_urls, no_emails, no_phone_numbers, no_numbers, no_digits, no_currency_symbols, no_punct, replace_with_url, replace_with_email, replace_with_phone_number, replace_with_number, replace_with_digit, replace_with_currency_symbol)
if html_cleaning:
text = clean_html(text)
if normalize_cleaning:
text = normalizer.normalize(text)
weird_pattern = re.compile('[😀-🙏🌀-🗿🚀-\U0001f6ff\U0001f1e0-🇿✂-➰Ⓜ-🉑🤦-🤷𐀀-\U0010ffff\u200d♀-♂☀-⭕⏏⏩⌚〰️\u2069\u2066–\u2068\u2067]+', flags=re.UNICODE)
text = weird_pattern.sub(, text)
text = re.sub('#', , text)
if emoji_convert:
text = re.sub('\\[(\\w.+)\\]', upper_repl, text)
if half_space_cleaning:
text = text.replace('\u200c', ' ')
return text | def cleaning(text, default_cleaning=True, normalize_cleaning=True, half_space_cleaning=True, html_cleaning=True, emoji_convert=False, username_cleaning=True, hashtag_cleaning=True, fix_unicode=True, to_ascii=False, lower=True, no_line_breaks=True, no_urls=True, no_emails=True, no_phone_numbers=True, no_numbers=False, no_digits=False, no_currency_symbols=True, no_punct=False, replace_with_url=, replace_with_email=, replace_with_phone_number=, replace_with_number=, replace_with_digit='0', replace_with_currency_symbol=):
' '
text = text.strip()
if username_cleaning:
text = re.sub('\\@[\\w.-_]+', ' ', text)
if hashtag_cleaning:
text = text.replace('#', ' ')
text = text.replace('_', ' ')
if emoji_convert:
text = emoji.emojize(text)
text = convert_emoji_to_text(text)
if default_cleaning:
text = clean_text(text, fix_unicode, to_ascii, lower, no_line_breaks, no_urls, no_emails, no_phone_numbers, no_numbers, no_digits, no_currency_symbols, no_punct, replace_with_url, replace_with_email, replace_with_phone_number, replace_with_number, replace_with_digit, replace_with_currency_symbol)
if html_cleaning:
text = clean_html(text)
if normalize_cleaning:
text = normalizer.normalize(text)
weird_pattern = re.compile('[😀-🙏🌀-🗿🚀-\U0001f6ff\U0001f1e0-🇿✂-➰Ⓜ-🉑🤦-🤷𐀀-\U0010ffff\u200d♀-♂☀-⭕⏏⏩⌚〰️\u2069\u2066–\u2068\u2067]+', flags=re.UNICODE)
text = weird_pattern.sub(, text)
text = re.sub('#', , text)
if emoji_convert:
text = re.sub('\\[(\\w.+)\\]', upper_repl, text)
if half_space_cleaning:
text = text.replace('\u200c', ' ')
return text<|docstring|>A hierarchy of normalization and preprocessing<|endoftext|> |
4c994bec01de6f9f7cf07bc87bc03c403f1b3a4eed1124c8353c84697bd672ce | def setUp(self):
'Runs before each test.'
pass | Runs before each test. | test/test_resources.py | setUp | mrmap-community/gprlp_metadata_search | 3 | python | def setUp(self):
pass | def setUp(self):
pass<|docstring|>Runs before each test.<|endoftext|> |
a6c0a5f5395acf2fd08845f919fae36649d7d2db8b1f4d0e0889ae34f5ace4ea | def tearDown(self):
'Runs after each test.'
pass | Runs after each test. | test/test_resources.py | tearDown | mrmap-community/gprlp_metadata_search | 3 | python | def tearDown(self):
pass | def tearDown(self):
pass<|docstring|>Runs after each test.<|endoftext|> |
bc3032a3600ca28158e8d61c541593467359434fb835b68523e409f8a8fc4ec2 | def test_icon_png(self):
'Test we can click OK.'
path = ':/plugins/GeoportalRlpMetadataSearch/icon.png'
icon = QIcon(path)
self.assertFalse(icon.isNull()) | Test we can click OK. | test/test_resources.py | test_icon_png | mrmap-community/gprlp_metadata_search | 3 | python | def test_icon_png(self):
path = ':/plugins/GeoportalRlpMetadataSearch/icon.png'
icon = QIcon(path)
self.assertFalse(icon.isNull()) | def test_icon_png(self):
path = ':/plugins/GeoportalRlpMetadataSearch/icon.png'
icon = QIcon(path)
self.assertFalse(icon.isNull())<|docstring|>Test we can click OK.<|endoftext|> |
43efa472e6db54f91a05760642ebe0c3c4ca5ccf4481fdc9b782ec62bb5a5782 | def parse_search_url(url):
'Parses a search URL.'
config = {}
url = urlparse.urlparse(url)
path = url.path[1:]
path = path.split('?', 2)[0]
if (url.scheme in SEARCH_SCHEMES):
config['ENGINE'] = SEARCH_SCHEMES[url.scheme]
if (url.scheme in USES_URL):
config['URL'] = urlparse.urlunparse((('http',) + url[1:]))
if (url.scheme in USES_INDEX):
if path.endswith('/'):
path = path[:(- 1)]
split = path.rsplit('/', 1)
if (len(split) > 1):
path = split[:(- 1)]
index = split[(- 1)]
else:
path = ''
index = split[0]
config.update({'URL': urlparse.urlunparse((((('http',) + url[1:2]) + (path,)) + url[3:])), 'INDEX_NAME': index})
if (url.scheme in USES_PATH):
config.update({'PATH': path})
return config | Parses a search URL. | confy/search.py | parse_search_url | MechanisM/django-confy | 28 | python | def parse_search_url(url):
config = {}
url = urlparse.urlparse(url)
path = url.path[1:]
path = path.split('?', 2)[0]
if (url.scheme in SEARCH_SCHEMES):
config['ENGINE'] = SEARCH_SCHEMES[url.scheme]
if (url.scheme in USES_URL):
config['URL'] = urlparse.urlunparse((('http',) + url[1:]))
if (url.scheme in USES_INDEX):
if path.endswith('/'):
path = path[:(- 1)]
split = path.rsplit('/', 1)
if (len(split) > 1):
path = split[:(- 1)]
index = split[(- 1)]
else:
path =
index = split[0]
config.update({'URL': urlparse.urlunparse((((('http',) + url[1:2]) + (path,)) + url[3:])), 'INDEX_NAME': index})
if (url.scheme in USES_PATH):
config.update({'PATH': path})
return config | def parse_search_url(url):
config = {}
url = urlparse.urlparse(url)
path = url.path[1:]
path = path.split('?', 2)[0]
if (url.scheme in SEARCH_SCHEMES):
config['ENGINE'] = SEARCH_SCHEMES[url.scheme]
if (url.scheme in USES_URL):
config['URL'] = urlparse.urlunparse((('http',) + url[1:]))
if (url.scheme in USES_INDEX):
if path.endswith('/'):
path = path[:(- 1)]
split = path.rsplit('/', 1)
if (len(split) > 1):
path = split[:(- 1)]
index = split[(- 1)]
else:
path =
index = split[0]
config.update({'URL': urlparse.urlunparse((((('http',) + url[1:2]) + (path,)) + url[3:])), 'INDEX_NAME': index})
if (url.scheme in USES_PATH):
config.update({'PATH': path})
return config<|docstring|>Parses a search URL.<|endoftext|> |
6c38c99eeb41669a50a2c12b7cedd4cc6dc3be6ca67438acff9b9235a42ecef3 | def config(name='SEARCH_URL', default='simple://'):
'Returns configured SEARCH dictionary from SEARCH_URL'
config = {}
s = env(name, default)
if s:
config = parse_search_url(s)
return config | Returns configured SEARCH dictionary from SEARCH_URL | confy/search.py | config | MechanisM/django-confy | 28 | python | def config(name='SEARCH_URL', default='simple://'):
config = {}
s = env(name, default)
if s:
config = parse_search_url(s)
return config | def config(name='SEARCH_URL', default='simple://'):
config = {}
s = env(name, default)
if s:
config = parse_search_url(s)
return config<|docstring|>Returns configured SEARCH dictionary from SEARCH_URL<|endoftext|> |
f7132e980acf5c82c91c523c5d6f379ece85043671589b20e0165735ed6bd081 | def init():
'parse arguments, validate input, set variables'
parser = argparse.ArgumentParser(prog='python3 report.py')
list_of_output_formats = ['csv', 'json']
parser.add_argument('-o', '--output_format', required=True, help='Output format', default='csv', choices=list_of_output_formats)
parser.add_argument('-f', '--file', required=False, help='Output file name')
parser.add_argument('-a', '--account_id', required=True, help='IBM Cloud account id')
parser.add_argument('-d', '--billing_date', required=True, help='Billing date in format yyyy-MM')
parser.add_argument('-t', '--report_type', required=True, help='Billing date in format yyyy-MM')
parser.add_argument('-g', '--group_by', required=True, help='Billing date in format yyyy-MM')
args = parser.parse_args()
global OUTPUT_FORMAT
OUTPUT_FORMAT = args.output_format
global ACCOUNT_ID
ACCOUNT_ID = args.account_id
global BILLING_DATE
BILLING_DATE = args.billing_date
global REPORT_TYPE
REPORT_TYPE = args.report_type
global GROUP_BY
GROUP_BY = args.group_by
global OUTPUT_FILE_NAME
now = datetime.now()
date_time = now.strftime('%Y%m%dT%H%M%S%f')
OUTPUT_FILE_NAME = 'cost-report_{}_by-{}-{}.{}'.format(BILLING_DATE, GROUP_BY, date_time, OUTPUT_FORMAT)
print('Running Cost Report with Output format: {}, Output file: {}, Account id: {}, Billing date: {}, Report type: {}, Group by: {}'.format(OUTPUT_FORMAT, OUTPUT_FILE_NAME, ACCOUNT_ID, BILLING_DATE, REPORT_TYPE, GROUP_BY)) | parse arguments, validate input, set variables | billing/report.py | init | remkohdev/ibm_cloud_automation | 0 | python | def init():
parser = argparse.ArgumentParser(prog='python3 report.py')
list_of_output_formats = ['csv', 'json']
parser.add_argument('-o', '--output_format', required=True, help='Output format', default='csv', choices=list_of_output_formats)
parser.add_argument('-f', '--file', required=False, help='Output file name')
parser.add_argument('-a', '--account_id', required=True, help='IBM Cloud account id')
parser.add_argument('-d', '--billing_date', required=True, help='Billing date in format yyyy-MM')
parser.add_argument('-t', '--report_type', required=True, help='Billing date in format yyyy-MM')
parser.add_argument('-g', '--group_by', required=True, help='Billing date in format yyyy-MM')
args = parser.parse_args()
global OUTPUT_FORMAT
OUTPUT_FORMAT = args.output_format
global ACCOUNT_ID
ACCOUNT_ID = args.account_id
global BILLING_DATE
BILLING_DATE = args.billing_date
global REPORT_TYPE
REPORT_TYPE = args.report_type
global GROUP_BY
GROUP_BY = args.group_by
global OUTPUT_FILE_NAME
now = datetime.now()
date_time = now.strftime('%Y%m%dT%H%M%S%f')
OUTPUT_FILE_NAME = 'cost-report_{}_by-{}-{}.{}'.format(BILLING_DATE, GROUP_BY, date_time, OUTPUT_FORMAT)
print('Running Cost Report with Output format: {}, Output file: {}, Account id: {}, Billing date: {}, Report type: {}, Group by: {}'.format(OUTPUT_FORMAT, OUTPUT_FILE_NAME, ACCOUNT_ID, BILLING_DATE, REPORT_TYPE, GROUP_BY)) | def init():
parser = argparse.ArgumentParser(prog='python3 report.py')
list_of_output_formats = ['csv', 'json']
parser.add_argument('-o', '--output_format', required=True, help='Output format', default='csv', choices=list_of_output_formats)
parser.add_argument('-f', '--file', required=False, help='Output file name')
parser.add_argument('-a', '--account_id', required=True, help='IBM Cloud account id')
parser.add_argument('-d', '--billing_date', required=True, help='Billing date in format yyyy-MM')
parser.add_argument('-t', '--report_type', required=True, help='Billing date in format yyyy-MM')
parser.add_argument('-g', '--group_by', required=True, help='Billing date in format yyyy-MM')
args = parser.parse_args()
global OUTPUT_FORMAT
OUTPUT_FORMAT = args.output_format
global ACCOUNT_ID
ACCOUNT_ID = args.account_id
global BILLING_DATE
BILLING_DATE = args.billing_date
global REPORT_TYPE
REPORT_TYPE = args.report_type
global GROUP_BY
GROUP_BY = args.group_by
global OUTPUT_FILE_NAME
now = datetime.now()
date_time = now.strftime('%Y%m%dT%H%M%S%f')
OUTPUT_FILE_NAME = 'cost-report_{}_by-{}-{}.{}'.format(BILLING_DATE, GROUP_BY, date_time, OUTPUT_FORMAT)
print('Running Cost Report with Output format: {}, Output file: {}, Account id: {}, Billing date: {}, Report type: {}, Group by: {}'.format(OUTPUT_FORMAT, OUTPUT_FILE_NAME, ACCOUNT_ID, BILLING_DATE, REPORT_TYPE, GROUP_BY))<|docstring|>parse arguments, validate input, set variables<|endoftext|> |
8158eaea87b4f8aecdd38ddc0feae1acbe9e606b9820ed64ca0e77f00994f596 | def get_access_token():
'GET /access_token'
url = 'https://iam.cloud.ibm.com/identity/token'
payload = {'apikey': IBM_CLOUD_APIKEY, 'response_type': 'cloud_iam', 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey'}
headers = {'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
response = requests.request('POST', url, headers=headers, data=payload)
access_token = response.json()['access_token']
return access_token | GET /access_token | billing/report.py | get_access_token | remkohdev/ibm_cloud_automation | 0 | python | def get_access_token():
url = 'https://iam.cloud.ibm.com/identity/token'
payload = {'apikey': IBM_CLOUD_APIKEY, 'response_type': 'cloud_iam', 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey'}
headers = {'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
response = requests.request('POST', url, headers=headers, data=payload)
access_token = response.json()['access_token']
return access_token | def get_access_token():
url = 'https://iam.cloud.ibm.com/identity/token'
payload = {'apikey': IBM_CLOUD_APIKEY, 'response_type': 'cloud_iam', 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey'}
headers = {'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
response = requests.request('POST', url, headers=headers, data=payload)
access_token = response.json()['access_token']
return access_token<|docstring|>GET /access_token<|endoftext|> |
ccdbe331f027a110186cd5b982de8648cae82be8ddb620549ad117b395bc4605 | def create_report_resources():
'Cost per Resource'
report_lines = []
report_header = ['resource_id', 'resource_name', 'resource_billable_cost']
report_lines.append(report_header)
resources = get_resources()
nr_of_resources = len(resources)
for resource in resources:
r_id = resource['resource_id']
r_name = resource['resource_name']
r_billable_cost = resource['billable_cost']
cost_line_item = [r_id, r_name, r_billable_cost]
report_lines.append(cost_line_item)
return report_lines | Cost per Resource | billing/report.py | create_report_resources | remkohdev/ibm_cloud_automation | 0 | python | def create_report_resources():
report_lines = []
report_header = ['resource_id', 'resource_name', 'resource_billable_cost']
report_lines.append(report_header)
resources = get_resources()
nr_of_resources = len(resources)
for resource in resources:
r_id = resource['resource_id']
r_name = resource['resource_name']
r_billable_cost = resource['billable_cost']
cost_line_item = [r_id, r_name, r_billable_cost]
report_lines.append(cost_line_item)
return report_lines | def create_report_resources():
report_lines = []
report_header = ['resource_id', 'resource_name', 'resource_billable_cost']
report_lines.append(report_header)
resources = get_resources()
nr_of_resources = len(resources)
for resource in resources:
r_id = resource['resource_id']
r_name = resource['resource_name']
r_billable_cost = resource['billable_cost']
cost_line_item = [r_id, r_name, r_billable_cost]
report_lines.append(cost_line_item)
return report_lines<|docstring|>Cost per Resource<|endoftext|> |
377781162e8bf90ba09f5b5963c2c767765fa1a0b3ed7fbcf751414a4be2b3d3 | def create_report_groupby_resourcegroup():
'Cost per ResourceGroup'
report_lines = []
report_header = ['rg_id', 'rg_name', 'nr_of_resources', 'total_cost']
report_lines.append(report_header)
resourcegroups = get_resourcegroups()
for resourcegroup in resourcegroups:
rg_name = resourcegroup['name']
rg_id = resourcegroup['id']
resources = get_resources_for_resourcegroup(rg_id)
nr_of_resources = len(resources)
total_cost = summarize_costs_for_resources(rg_id, rg_name, resources)
cost_line_item = [rg_id, rg_name, nr_of_resources, total_cost]
report_lines.append(cost_line_item)
return report_lines | Cost per ResourceGroup | billing/report.py | create_report_groupby_resourcegroup | remkohdev/ibm_cloud_automation | 0 | python | def create_report_groupby_resourcegroup():
report_lines = []
report_header = ['rg_id', 'rg_name', 'nr_of_resources', 'total_cost']
report_lines.append(report_header)
resourcegroups = get_resourcegroups()
for resourcegroup in resourcegroups:
rg_name = resourcegroup['name']
rg_id = resourcegroup['id']
resources = get_resources_for_resourcegroup(rg_id)
nr_of_resources = len(resources)
total_cost = summarize_costs_for_resources(rg_id, rg_name, resources)
cost_line_item = [rg_id, rg_name, nr_of_resources, total_cost]
report_lines.append(cost_line_item)
return report_lines | def create_report_groupby_resourcegroup():
report_lines = []
report_header = ['rg_id', 'rg_name', 'nr_of_resources', 'total_cost']
report_lines.append(report_header)
resourcegroups = get_resourcegroups()
for resourcegroup in resourcegroups:
rg_name = resourcegroup['name']
rg_id = resourcegroup['id']
resources = get_resources_for_resourcegroup(rg_id)
nr_of_resources = len(resources)
total_cost = summarize_costs_for_resources(rg_id, rg_name, resources)
cost_line_item = [rg_id, rg_name, nr_of_resources, total_cost]
report_lines.append(cost_line_item)
return report_lines<|docstring|>Cost per ResourceGroup<|endoftext|> |
23c9972d83833bca9469aea890c5189d5fb666c8e3accced16a21661b3d11de7 | def get_resources():
'Get usage for all the resources and plans in an account for a given month'
access_token = get_access_token()
url1 = 'https://billing.cloud.ibm.com/v4/accounts/%s/usage/%s?_names=1'
url2 = (url1 % (ACCOUNT_ID, BILLING_DATE))
headers1 = {'Authorization': access_token, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url2, headers=headers1, data=payload)
resources = response.json()['resources']
return resources | Get usage for all the resources and plans in an account for a given month | billing/report.py | get_resources | remkohdev/ibm_cloud_automation | 0 | python | def get_resources():
access_token = get_access_token()
url1 = 'https://billing.cloud.ibm.com/v4/accounts/%s/usage/%s?_names=1'
url2 = (url1 % (ACCOUNT_ID, BILLING_DATE))
headers1 = {'Authorization': access_token, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url2, headers=headers1, data=payload)
resources = response.json()['resources']
return resources | def get_resources():
access_token = get_access_token()
url1 = 'https://billing.cloud.ibm.com/v4/accounts/%s/usage/%s?_names=1'
url2 = (url1 % (ACCOUNT_ID, BILLING_DATE))
headers1 = {'Authorization': access_token, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url2, headers=headers1, data=payload)
resources = response.json()['resources']
return resources<|docstring|>Get usage for all the resources and plans in an account for a given month<|endoftext|> |
a92c9197acbab641024b9bea7021a85951fa994897a7616bbc4055bb3dbb10bc | def get_resources_for_resourcegroup(rg_id):
'Get resource instance usage in a resource group'
access_token = get_access_token()
RESOURCE_GROUP_ID = rg_id
url1 = 'https://billing.cloud.ibm.com/v4/accounts/%s/resource_groups/%s/resource_instances/usage/%s?_names=1'
url2 = (url1 % (ACCOUNT_ID, RESOURCE_GROUP_ID, BILLING_DATE))
headers1 = {'Authorization': access_token, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url2, headers=headers1, data=payload)
resources = response.json()['resources']
return resources | Get resource instance usage in a resource group | billing/report.py | get_resources_for_resourcegroup | remkohdev/ibm_cloud_automation | 0 | python | def get_resources_for_resourcegroup(rg_id):
access_token = get_access_token()
RESOURCE_GROUP_ID = rg_id
url1 = 'https://billing.cloud.ibm.com/v4/accounts/%s/resource_groups/%s/resource_instances/usage/%s?_names=1'
url2 = (url1 % (ACCOUNT_ID, RESOURCE_GROUP_ID, BILLING_DATE))
headers1 = {'Authorization': access_token, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url2, headers=headers1, data=payload)
resources = response.json()['resources']
return resources | def get_resources_for_resourcegroup(rg_id):
access_token = get_access_token()
RESOURCE_GROUP_ID = rg_id
url1 = 'https://billing.cloud.ibm.com/v4/accounts/%s/resource_groups/%s/resource_instances/usage/%s?_names=1'
url2 = (url1 % (ACCOUNT_ID, RESOURCE_GROUP_ID, BILLING_DATE))
headers1 = {'Authorization': access_token, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url2, headers=headers1, data=payload)
resources = response.json()['resources']
return resources<|docstring|>Get resource instance usage in a resource group<|endoftext|> |
4de2ce18fd0d7d062a33c8b8e15e090af9f6e9f4d63b458a4fd8cadbdebba90a | def get_resourcegroups():
'GET /resourcegroups'
access_token = get_access_token()
authorization_header = ('Bearer %s' % access_token)
url1 = 'https://resource-controller.cloud.ibm.com/v2/resource_groups/'
headers1 = {'Authorization': authorization_header, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url1, headers=headers1, data=payload)
resource_groups = response.json()['resources']
return resource_groups | GET /resourcegroups | billing/report.py | get_resourcegroups | remkohdev/ibm_cloud_automation | 0 | python | def get_resourcegroups():
access_token = get_access_token()
authorization_header = ('Bearer %s' % access_token)
url1 = 'https://resource-controller.cloud.ibm.com/v2/resource_groups/'
headers1 = {'Authorization': authorization_header, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url1, headers=headers1, data=payload)
resource_groups = response.json()['resources']
return resource_groups | def get_resourcegroups():
access_token = get_access_token()
authorization_header = ('Bearer %s' % access_token)
url1 = 'https://resource-controller.cloud.ibm.com/v2/resource_groups/'
headers1 = {'Authorization': authorization_header, 'Accept': 'application/json', 'Content-Type': 'application/x-www-form-urlencoded'}
payload = {'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 'apikey': 'iam_apikey'}
response = requests.get(url=url1, headers=headers1, data=payload)
resource_groups = response.json()['resources']
return resource_groups<|docstring|>GET /resourcegroups<|endoftext|> |
eae7b97946fce64e781ac3607bf715f2aa5fcfa89d32379ea849213daea48872 | def summarize_costs_for_resources(rg_id, rg_name, resources):
'summarize costs'
total_cost = float(0)
for resource in resources:
for use in resource['usage']:
cost = float(use['cost'])
total_cost += cost
return total_cost | summarize costs | billing/report.py | summarize_costs_for_resources | remkohdev/ibm_cloud_automation | 0 | python | def summarize_costs_for_resources(rg_id, rg_name, resources):
total_cost = float(0)
for resource in resources:
for use in resource['usage']:
cost = float(use['cost'])
total_cost += cost
return total_cost | def summarize_costs_for_resources(rg_id, rg_name, resources):
total_cost = float(0)
for resource in resources:
for use in resource['usage']:
cost = float(use['cost'])
total_cost += cost
return total_cost<|docstring|>summarize costs<|endoftext|> |
bd10359fdd05d8feb34b7cf480360c6b9ff3ae6fc580823a34cda768c2b3f64e | def write_to_file(report_lines):
'write to file'
print('----->write to file')
if (OUTPUT_FORMAT == 'json'):
with open(OUTPUT_FILE_NAME, 'w') as fp:
json.dump(report_lines, fp)
elif (OUTPUT_FORMAT == 'csv'):
df = pd.read_json(json.dumps(report_lines))
df.to_csv(OUTPUT_FILE_NAME)
else:
with open(OUTPUT_FILE_NAME, 'w') as fp:
json.dump(report_lines, fp) | write to file | billing/report.py | write_to_file | remkohdev/ibm_cloud_automation | 0 | python | def write_to_file(report_lines):
print('----->')
if (OUTPUT_FORMAT == 'json'):
with open(OUTPUT_FILE_NAME, 'w') as fp:
json.dump(report_lines, fp)
elif (OUTPUT_FORMAT == 'csv'):
df = pd.read_json(json.dumps(report_lines))
df.to_csv(OUTPUT_FILE_NAME)
else:
with open(OUTPUT_FILE_NAME, 'w') as fp:
json.dump(report_lines, fp) | def write_to_file(report_lines):
print('----->')
if (OUTPUT_FORMAT == 'json'):
with open(OUTPUT_FILE_NAME, 'w') as fp:
json.dump(report_lines, fp)
elif (OUTPUT_FORMAT == 'csv'):
df = pd.read_json(json.dumps(report_lines))
df.to_csv(OUTPUT_FILE_NAME)
else:
with open(OUTPUT_FILE_NAME, 'w') as fp:
json.dump(report_lines, fp)<|docstring|>write to file<|endoftext|> |
9e40b74e4ff828ceeb3c1cc572dd6521ed17887054639705fda546fdc9416d6b | @abstractmethod
async def upload_tails_file(self, context: InjectionContext, revo_reg_def_id: str, tails_file_path: str) -> (bool, str):
'Upload tails file to tails server.\n\n Args:\n revo_reg_def_id: The Revocation registry definition ID\n tails_file: The path to the tails file to upload\n ' | Upload tails file to tails server.
Args:
revo_reg_def_id: The Revocation registry definition ID
tails_file: The path to the tails file to upload | aries_cloudagent/tails/base.py | upload_tails_file | baegjae/aries-cloudagent-python | 1 | python | @abstractmethod
async def upload_tails_file(self, context: InjectionContext, revo_reg_def_id: str, tails_file_path: str) -> (bool, str):
'Upload tails file to tails server.\n\n Args:\n revo_reg_def_id: The Revocation registry definition ID\n tails_file: The path to the tails file to upload\n ' | @abstractmethod
async def upload_tails_file(self, context: InjectionContext, revo_reg_def_id: str, tails_file_path: str) -> (bool, str):
'Upload tails file to tails server.\n\n Args:\n revo_reg_def_id: The Revocation registry definition ID\n tails_file: The path to the tails file to upload\n '<|docstring|>Upload tails file to tails server.
Args:
revo_reg_def_id: The Revocation registry definition ID
tails_file: The path to the tails file to upload<|endoftext|> |
1540c15cd42cbd1792ed7b693f2ee7abb3285f5d1bb9921138cd7f038469ff1d | @raises(ValueError)
def test_dense_transitive_closure_faults_nonzerodiagonal():
' Test Closure: Dense for non-zero diagonal '
Dtmp = D.copy()
Dtmp[0][0] = 1
transitive_closure(Dtmp, kind='metric', verbose=True) | Test Closure: Dense for non-zero diagonal | tests/test_closure.py | test_dense_transitive_closure_faults_nonzerodiagonal | rionbr/distanceclosure | 9 | python | @raises(ValueError)
def test_dense_transitive_closure_faults_nonzerodiagonal():
' '
Dtmp = D.copy()
Dtmp[0][0] = 1
transitive_closure(Dtmp, kind='metric', verbose=True) | @raises(ValueError)
def test_dense_transitive_closure_faults_nonzerodiagonal():
' '
Dtmp = D.copy()
Dtmp[0][0] = 1
transitive_closure(Dtmp, kind='metric', verbose=True)<|docstring|>Test Closure: Dense for non-zero diagonal<|endoftext|> |
0a3da7b606a3f2b060a595d5dd84d181b48c9cbeef2a58f2c6701aef37a555b5 | def test_dense_transitive_closure_metric():
' Test Closure: Dense Transitive Closure (Metric) '
Cm = transitive_closure(D, kind='metric', algorithm='dense', verbose=True)
assert np.isclose(Cm, Cm_true).all() | Test Closure: Dense Transitive Closure (Metric) | tests/test_closure.py | test_dense_transitive_closure_metric | rionbr/distanceclosure | 9 | python | def test_dense_transitive_closure_metric():
' '
Cm = transitive_closure(D, kind='metric', algorithm='dense', verbose=True)
assert np.isclose(Cm, Cm_true).all() | def test_dense_transitive_closure_metric():
' '
Cm = transitive_closure(D, kind='metric', algorithm='dense', verbose=True)
assert np.isclose(Cm, Cm_true).all()<|docstring|>Test Closure: Dense Transitive Closure (Metric)<|endoftext|> |
76f5953d0accdc2c38e74fe7066dc894e112b1edb6623bbc82a0e3284127ab71 | def test_dense_transitive_closure_ultrametric():
' Test Closure: Dense Transitive Closure (Ultrametric) '
Cu = transitive_closure(D, kind='ultrametric', algorithm='dense')
assert np.isclose(Cu, Cu_true).all() | Test Closure: Dense Transitive Closure (Ultrametric) | tests/test_closure.py | test_dense_transitive_closure_ultrametric | rionbr/distanceclosure | 9 | python | def test_dense_transitive_closure_ultrametric():
' '
Cu = transitive_closure(D, kind='ultrametric', algorithm='dense')
assert np.isclose(Cu, Cu_true).all() | def test_dense_transitive_closure_ultrametric():
' '
Cu = transitive_closure(D, kind='ultrametric', algorithm='dense')
assert np.isclose(Cu, Cu_true).all()<|docstring|>Test Closure: Dense Transitive Closure (Ultrametric)<|endoftext|> |
017c1702694c2e03e767812c0f9f5d4363c4e92f2879f423274b910cff414998 | def test_dense_backbone():
' Test Closure: Dense Backbone return '
Cm = transitive_closure(D, kind='metric', algorithm='dense')
Bm = backbone(D, Cm)
assert np.isclose(Bm, Bm_true).all() | Test Closure: Dense Backbone return | tests/test_closure.py | test_dense_backbone | rionbr/distanceclosure | 9 | python | def test_dense_backbone():
' '
Cm = transitive_closure(D, kind='metric', algorithm='dense')
Bm = backbone(D, Cm)
assert np.isclose(Bm, Bm_true).all() | def test_dense_backbone():
' '
Cm = transitive_closure(D, kind='metric', algorithm='dense')
Bm = backbone(D, Cm)
assert np.isclose(Bm, Bm_true).all()<|docstring|>Test Closure: Dense Backbone return<|endoftext|> |
ea606b9c4b4b63571e12a16509635ebcea34dece59c8db18384935b050f962ad | def test_dijkstra_vs_dense_transitive_closure_metric():
' Test Closure: Dijkstra vs Dense metric comparison '
C_Dense_um = transitive_closure(D, kind='metric', algorithm='dense')
C_Djisktra_um = transitive_closure(D, kind='metric', algorithm='dijkstra')
assert (C_Dense_um == C_Djisktra_um.A).all() | Test Closure: Dijkstra vs Dense metric comparison | tests/test_closure.py | test_dijkstra_vs_dense_transitive_closure_metric | rionbr/distanceclosure | 9 | python | def test_dijkstra_vs_dense_transitive_closure_metric():
' '
C_Dense_um = transitive_closure(D, kind='metric', algorithm='dense')
C_Djisktra_um = transitive_closure(D, kind='metric', algorithm='dijkstra')
assert (C_Dense_um == C_Djisktra_um.A).all() | def test_dijkstra_vs_dense_transitive_closure_metric():
' '
C_Dense_um = transitive_closure(D, kind='metric', algorithm='dense')
C_Djisktra_um = transitive_closure(D, kind='metric', algorithm='dijkstra')
assert (C_Dense_um == C_Djisktra_um.A).all()<|docstring|>Test Closure: Dijkstra vs Dense metric comparison<|endoftext|> |
5c60d131555e4897a358d1ce742c9246ddf3b7e5a8b023373bd64523a6e53cfa | def test_dijkstra_vs_dense_transitive_closure_ultrametric():
' Test Closure: Dijkstra vs Dense ultrametric comparison '
C_Dense_um = transitive_closure(D, kind='ultrametric', algorithm='dense')
C_Djisktra_um = transitive_closure(D_sparse, kind='ultrametric', algorithm='dijkstra')
assert (C_Dense_um == C_Djisktra_um.A).all() | Test Closure: Dijkstra vs Dense ultrametric comparison | tests/test_closure.py | test_dijkstra_vs_dense_transitive_closure_ultrametric | rionbr/distanceclosure | 9 | python | def test_dijkstra_vs_dense_transitive_closure_ultrametric():
' '
C_Dense_um = transitive_closure(D, kind='ultrametric', algorithm='dense')
C_Djisktra_um = transitive_closure(D_sparse, kind='ultrametric', algorithm='dijkstra')
assert (C_Dense_um == C_Djisktra_um.A).all() | def test_dijkstra_vs_dense_transitive_closure_ultrametric():
' '
C_Dense_um = transitive_closure(D, kind='ultrametric', algorithm='dense')
C_Djisktra_um = transitive_closure(D_sparse, kind='ultrametric', algorithm='dijkstra')
assert (C_Dense_um == C_Djisktra_um.A).all()<|docstring|>Test Closure: Dijkstra vs Dense ultrametric comparison<|endoftext|> |
60538ee0538a257c3c4a4a26478c57a2e6924aa23be229952e95f4849729e98b | def test_dijkstra_vs_dense_backbone():
' Test Closure: Dijkstra vs Dense backbone return '
C_Dense_m = transitive_closure(D, kind='metric', algorithm='dense')
B_Dense_m = backbone(D, C_Dense_m)
C_Djisktra_m = transitive_closure(D_sparse, kind='metric', algorithm='dijkstra')
B_Djisktra_m = backbone(D_sparse, C_Djisktra_m)
B_Djisktra_m = B_Djisktra_m.A
np.fill_diagonal(B_Djisktra_m, (- 1))
assert (B_Dense_m == B_Djisktra_m).all() | Test Closure: Dijkstra vs Dense backbone return | tests/test_closure.py | test_dijkstra_vs_dense_backbone | rionbr/distanceclosure | 9 | python | def test_dijkstra_vs_dense_backbone():
' '
C_Dense_m = transitive_closure(D, kind='metric', algorithm='dense')
B_Dense_m = backbone(D, C_Dense_m)
C_Djisktra_m = transitive_closure(D_sparse, kind='metric', algorithm='dijkstra')
B_Djisktra_m = backbone(D_sparse, C_Djisktra_m)
B_Djisktra_m = B_Djisktra_m.A
np.fill_diagonal(B_Djisktra_m, (- 1))
assert (B_Dense_m == B_Djisktra_m).all() | def test_dijkstra_vs_dense_backbone():
' '
C_Dense_m = transitive_closure(D, kind='metric', algorithm='dense')
B_Dense_m = backbone(D, C_Dense_m)
C_Djisktra_m = transitive_closure(D_sparse, kind='metric', algorithm='dijkstra')
B_Djisktra_m = backbone(D_sparse, C_Djisktra_m)
B_Djisktra_m = B_Djisktra_m.A
np.fill_diagonal(B_Djisktra_m, (- 1))
assert (B_Dense_m == B_Djisktra_m).all()<|docstring|>Test Closure: Dijkstra vs Dense backbone return<|endoftext|> |
4a6c0319ca15c6daef592f377dbea52c680fee199fa1bfa230be81a28c8fdd1d | def load_dataset(name):
"\n Loads a multi-label classification dataset.\n\n Parameters\n ----------\n name : string\n Name of the dataset. Currently only 'yeast' is available.\n "
if (name == 'yeast'):
data = fetch_mldata('yeast')
X = data.data
y = data.target.toarray().astype(np.int).T
return (X, y)
else:
raise Exception('No such dataset') | Loads a multi-label classification dataset.
Parameters
----------
name : string
Name of the dataset. Currently only 'yeast' is available. | skml/datasets/load_datasets.py | load_dataset | ChristianSch/skml | 5 | python | def load_dataset(name):
"\n Loads a multi-label classification dataset.\n\n Parameters\n ----------\n name : string\n Name of the dataset. Currently only 'yeast' is available.\n "
if (name == 'yeast'):
data = fetch_mldata('yeast')
X = data.data
y = data.target.toarray().astype(np.int).T
return (X, y)
else:
raise Exception('No such dataset') | def load_dataset(name):
"\n Loads a multi-label classification dataset.\n\n Parameters\n ----------\n name : string\n Name of the dataset. Currently only 'yeast' is available.\n "
if (name == 'yeast'):
data = fetch_mldata('yeast')
X = data.data
y = data.target.toarray().astype(np.int).T
return (X, y)
else:
raise Exception('No such dataset')<|docstring|>Loads a multi-label classification dataset.
Parameters
----------
name : string
Name of the dataset. Currently only 'yeast' is available.<|endoftext|> |
00505ffd06f4613e4e8072d9220674d17e5f3dd2819e78d0d91e982111d384a4 | def interactions_getter(manager_router):
'\n Gets the slash command processor using `Client.slasher` of an ``_EventHandlerManagerRouter``.\n \n Parameters\n ----------\n manager_router : ``_EventHandlerManagerRouter``\n The caller manager router.\n \n Returns\n -------\n handlers : `list` of ``Slasher``\n '
handlers = []
for client in manager_router.parent.clients:
manager = getattr(client, 'interactions', None)
if (manager is None):
continue
handler = manager.parent
if isinstance(handler, Slasher):
handlers.append(handler)
return handlers | Gets the slash command processor using `Client.slasher` of an ``_EventHandlerManagerRouter``.
Parameters
----------
manager_router : ``_EventHandlerManagerRouter``
The caller manager router.
Returns
-------
handlers : `list` of ``Slasher`` | hata/ext/slash/client_wrapper_extension.py | interactions_getter | WizzyBots/hata | 0 | python | def interactions_getter(manager_router):
'\n Gets the slash command processor using `Client.slasher` of an ``_EventHandlerManagerRouter``.\n \n Parameters\n ----------\n manager_router : ``_EventHandlerManagerRouter``\n The caller manager router.\n \n Returns\n -------\n handlers : `list` of ``Slasher``\n '
handlers = []
for client in manager_router.parent.clients:
manager = getattr(client, 'interactions', None)
if (manager is None):
continue
handler = manager.parent
if isinstance(handler, Slasher):
handlers.append(handler)
return handlers | def interactions_getter(manager_router):
'\n Gets the slash command processor using `Client.slasher` of an ``_EventHandlerManagerRouter``.\n \n Parameters\n ----------\n manager_router : ``_EventHandlerManagerRouter``\n The caller manager router.\n \n Returns\n -------\n handlers : `list` of ``Slasher``\n '
handlers = []
for client in manager_router.parent.clients:
manager = getattr(client, 'interactions', None)
if (manager is None):
continue
handler = manager.parent
if isinstance(handler, Slasher):
handlers.append(handler)
return handlers<|docstring|>Gets the slash command processor using `Client.slasher` of an ``_EventHandlerManagerRouter``.
Parameters
----------
manager_router : ``_EventHandlerManagerRouter``
The caller manager router.
Returns
-------
handlers : `list` of ``Slasher``<|endoftext|> |
cd9e714c5619662fa356a92a0f13f1fa5694b8b05d7b2d3758a5a96a75de7d0f | def from_class_constructor(klass):
'\n Creates a slash command from the given class.\n \n Parameters\n ----------\n klass : `type`\n The class to create a slash command from.\n \n Returns\n -------\n self : ``SlasherApplicationCommand``, ``Router``\n \n Raises\n ------\n BaseException\n Any exception raised by the respective ``SlasherApplicationCommand`` constructor.\n '
return SlasherApplicationCommand.from_class(klass) | Creates a slash command from the given class.
Parameters
----------
klass : `type`
The class to create a slash command from.
Returns
-------
self : ``SlasherApplicationCommand``, ``Router``
Raises
------
BaseException
Any exception raised by the respective ``SlasherApplicationCommand`` constructor. | hata/ext/slash/client_wrapper_extension.py | from_class_constructor | WizzyBots/hata | 0 | python | def from_class_constructor(klass):
'\n Creates a slash command from the given class.\n \n Parameters\n ----------\n klass : `type`\n The class to create a slash command from.\n \n Returns\n -------\n self : ``SlasherApplicationCommand``, ``Router``\n \n Raises\n ------\n BaseException\n Any exception raised by the respective ``SlasherApplicationCommand`` constructor.\n '
return SlasherApplicationCommand.from_class(klass) | def from_class_constructor(klass):
'\n Creates a slash command from the given class.\n \n Parameters\n ----------\n klass : `type`\n The class to create a slash command from.\n \n Returns\n -------\n self : ``SlasherApplicationCommand``, ``Router``\n \n Raises\n ------\n BaseException\n Any exception raised by the respective ``SlasherApplicationCommand`` constructor.\n '
return SlasherApplicationCommand.from_class(klass)<|docstring|>Creates a slash command from the given class.
Parameters
----------
klass : `type`
The class to create a slash command from.
Returns
-------
self : ``SlasherApplicationCommand``, ``Router``
Raises
------
BaseException
Any exception raised by the respective ``SlasherApplicationCommand`` constructor.<|endoftext|> |
264f5e49ca7071f299d6171280b0117b4829322fd342a14db4cace846d01bde6 | @property
def interactions(self):
'\n Returns a ``_EventHandlerManagerRouter``, with what slash commands can be added to more clients at the\n same time.\n \n Returns\n -------\n event_handler_manager_router : ``_EventHandlerManagerRouter``\n '
return _EventHandlerManagerRouter(self, interactions_getter, from_class_constructor) | Returns a ``_EventHandlerManagerRouter``, with what slash commands can be added to more clients at the
same time.
Returns
-------
event_handler_manager_router : ``_EventHandlerManagerRouter`` | hata/ext/slash/client_wrapper_extension.py | interactions | WizzyBots/hata | 0 | python | @property
def interactions(self):
'\n Returns a ``_EventHandlerManagerRouter``, with what slash commands can be added to more clients at the\n same time.\n \n Returns\n -------\n event_handler_manager_router : ``_EventHandlerManagerRouter``\n '
return _EventHandlerManagerRouter(self, interactions_getter, from_class_constructor) | @property
def interactions(self):
'\n Returns a ``_EventHandlerManagerRouter``, with what slash commands can be added to more clients at the\n same time.\n \n Returns\n -------\n event_handler_manager_router : ``_EventHandlerManagerRouter``\n '
return _EventHandlerManagerRouter(self, interactions_getter, from_class_constructor)<|docstring|>Returns a ``_EventHandlerManagerRouter``, with what slash commands can be added to more clients at the
same time.
Returns
-------
event_handler_manager_router : ``_EventHandlerManagerRouter``<|endoftext|> |
4cad09131b7a57108b58ee18b049363aeffb446187ab172c7df624c9bb713e8a | @defer.inlineCallbacks
def filter_events_for_clients(self, user_tuples, events, event_id_to_state):
' Returns dict of user_id -> list of events that user is allowed to\n see.\n\n :param (str, bool) user_tuples: (user id, is_peeking) for each\n user to be checked. is_peeking should be true if:\n * the user is not currently a member of the room, and:\n * the user has not been a member of the room since the given\n events\n '
forgotten = (yield defer.gatherResults([self.store.who_forgot_in_room(room_id) for room_id in frozenset((e.room_id for e in events))], consumeErrors=True))
event_id_forgotten = frozenset((row['event_id'] for rows in forgotten for row in rows))
def allowed(event, user_id, is_peeking):
state = event_id_to_state[event.event_id]
visibility_event = state.get((EventTypes.RoomHistoryVisibility, ''), None)
if visibility_event:
visibility = visibility_event.content.get('history_visibility', 'shared')
else:
visibility = 'shared'
if (visibility not in VISIBILITY_PRIORITY):
visibility = 'shared'
if (visibility == 'world_readable'):
return True
if (event.type == EventTypes.RoomHistoryVisibility):
prev_content = event.unsigned.get('prev_content', {})
prev_visibility = prev_content.get('history_visibility', None)
if (prev_visibility not in VISIBILITY_PRIORITY):
prev_visibility = 'shared'
new_priority = VISIBILITY_PRIORITY.index(visibility)
old_priority = VISIBILITY_PRIORITY.index(prev_visibility)
if (old_priority < new_priority):
visibility = prev_visibility
membership_event = state.get((EventTypes.Member, user_id), None)
if membership_event:
if (membership_event.event_id in event_id_forgotten):
membership = None
else:
membership = membership_event.membership
else:
membership = None
if (membership == Membership.JOIN):
return True
if (visibility == 'joined'):
return False
elif (visibility == 'invited'):
return (membership == Membership.INVITE)
else:
return (not is_peeking)
defer.returnValue({user_id: [event for event in events if allowed(event, user_id, is_peeking)] for (user_id, is_peeking) in user_tuples}) | Returns dict of user_id -> list of events that user is allowed to
see.
:param (str, bool) user_tuples: (user id, is_peeking) for each
user to be checked. is_peeking should be true if:
* the user is not currently a member of the room, and:
* the user has not been a member of the room since the given
events | synapse/handlers/_base.py | filter_events_for_clients | negzi/synapse | 0 | python | @defer.inlineCallbacks
def filter_events_for_clients(self, user_tuples, events, event_id_to_state):
' Returns dict of user_id -> list of events that user is allowed to\n see.\n\n :param (str, bool) user_tuples: (user id, is_peeking) for each\n user to be checked. is_peeking should be true if:\n * the user is not currently a member of the room, and:\n * the user has not been a member of the room since the given\n events\n '
forgotten = (yield defer.gatherResults([self.store.who_forgot_in_room(room_id) for room_id in frozenset((e.room_id for e in events))], consumeErrors=True))
event_id_forgotten = frozenset((row['event_id'] for rows in forgotten for row in rows))
def allowed(event, user_id, is_peeking):
state = event_id_to_state[event.event_id]
visibility_event = state.get((EventTypes.RoomHistoryVisibility, ), None)
if visibility_event:
visibility = visibility_event.content.get('history_visibility', 'shared')
else:
visibility = 'shared'
if (visibility not in VISIBILITY_PRIORITY):
visibility = 'shared'
if (visibility == 'world_readable'):
return True
if (event.type == EventTypes.RoomHistoryVisibility):
prev_content = event.unsigned.get('prev_content', {})
prev_visibility = prev_content.get('history_visibility', None)
if (prev_visibility not in VISIBILITY_PRIORITY):
prev_visibility = 'shared'
new_priority = VISIBILITY_PRIORITY.index(visibility)
old_priority = VISIBILITY_PRIORITY.index(prev_visibility)
if (old_priority < new_priority):
visibility = prev_visibility
membership_event = state.get((EventTypes.Member, user_id), None)
if membership_event:
if (membership_event.event_id in event_id_forgotten):
membership = None
else:
membership = membership_event.membership
else:
membership = None
if (membership == Membership.JOIN):
return True
if (visibility == 'joined'):
return False
elif (visibility == 'invited'):
return (membership == Membership.INVITE)
else:
return (not is_peeking)
defer.returnValue({user_id: [event for event in events if allowed(event, user_id, is_peeking)] for (user_id, is_peeking) in user_tuples}) | @defer.inlineCallbacks
def filter_events_for_clients(self, user_tuples, events, event_id_to_state):
' Returns dict of user_id -> list of events that user is allowed to\n see.\n\n :param (str, bool) user_tuples: (user id, is_peeking) for each\n user to be checked. is_peeking should be true if:\n * the user is not currently a member of the room, and:\n * the user has not been a member of the room since the given\n events\n '
forgotten = (yield defer.gatherResults([self.store.who_forgot_in_room(room_id) for room_id in frozenset((e.room_id for e in events))], consumeErrors=True))
event_id_forgotten = frozenset((row['event_id'] for rows in forgotten for row in rows))
def allowed(event, user_id, is_peeking):
state = event_id_to_state[event.event_id]
visibility_event = state.get((EventTypes.RoomHistoryVisibility, ), None)
if visibility_event:
visibility = visibility_event.content.get('history_visibility', 'shared')
else:
visibility = 'shared'
if (visibility not in VISIBILITY_PRIORITY):
visibility = 'shared'
if (visibility == 'world_readable'):
return True
if (event.type == EventTypes.RoomHistoryVisibility):
prev_content = event.unsigned.get('prev_content', {})
prev_visibility = prev_content.get('history_visibility', None)
if (prev_visibility not in VISIBILITY_PRIORITY):
prev_visibility = 'shared'
new_priority = VISIBILITY_PRIORITY.index(visibility)
old_priority = VISIBILITY_PRIORITY.index(prev_visibility)
if (old_priority < new_priority):
visibility = prev_visibility
membership_event = state.get((EventTypes.Member, user_id), None)
if membership_event:
if (membership_event.event_id in event_id_forgotten):
membership = None
else:
membership = membership_event.membership
else:
membership = None
if (membership == Membership.JOIN):
return True
if (visibility == 'joined'):
return False
elif (visibility == 'invited'):
return (membership == Membership.INVITE)
else:
return (not is_peeking)
defer.returnValue({user_id: [event for event in events if allowed(event, user_id, is_peeking)] for (user_id, is_peeking) in user_tuples})<|docstring|>Returns dict of user_id -> list of events that user is allowed to
see.
:param (str, bool) user_tuples: (user id, is_peeking) for each
user to be checked. is_peeking should be true if:
* the user is not currently a member of the room, and:
* the user has not been a member of the room since the given
events<|endoftext|> |
b72a4ab27b9654994dd560226ac6a5f7ed91a4acfd3c6590be63440138442912 | @defer.inlineCallbacks
def _filter_events_for_client(self, user_id, events, is_peeking=False):
'\n Check which events a user is allowed to see\n\n :param str user_id: user id to be checked\n :param [synapse.events.EventBase] events: list of events to be checked\n :param bool is_peeking should be True if:\n * the user is not currently a member of the room, and:\n * the user has not been a member of the room since the given\n events\n :rtype [synapse.events.EventBase]\n '
types = ((EventTypes.RoomHistoryVisibility, ''), (EventTypes.Member, user_id))
event_id_to_state = (yield self.store.get_state_for_events(frozenset((e.event_id for e in events)), types=types))
res = (yield self.filter_events_for_clients([(user_id, is_peeking)], events, event_id_to_state))
defer.returnValue(res.get(user_id, [])) | Check which events a user is allowed to see
:param str user_id: user id to be checked
:param [synapse.events.EventBase] events: list of events to be checked
:param bool is_peeking should be True if:
* the user is not currently a member of the room, and:
* the user has not been a member of the room since the given
events
:rtype [synapse.events.EventBase] | synapse/handlers/_base.py | _filter_events_for_client | negzi/synapse | 0 | python | @defer.inlineCallbacks
def _filter_events_for_client(self, user_id, events, is_peeking=False):
'\n Check which events a user is allowed to see\n\n :param str user_id: user id to be checked\n :param [synapse.events.EventBase] events: list of events to be checked\n :param bool is_peeking should be True if:\n * the user is not currently a member of the room, and:\n * the user has not been a member of the room since the given\n events\n :rtype [synapse.events.EventBase]\n '
types = ((EventTypes.RoomHistoryVisibility, ), (EventTypes.Member, user_id))
event_id_to_state = (yield self.store.get_state_for_events(frozenset((e.event_id for e in events)), types=types))
res = (yield self.filter_events_for_clients([(user_id, is_peeking)], events, event_id_to_state))
defer.returnValue(res.get(user_id, [])) | @defer.inlineCallbacks
def _filter_events_for_client(self, user_id, events, is_peeking=False):
'\n Check which events a user is allowed to see\n\n :param str user_id: user id to be checked\n :param [synapse.events.EventBase] events: list of events to be checked\n :param bool is_peeking should be True if:\n * the user is not currently a member of the room, and:\n * the user has not been a member of the room since the given\n events\n :rtype [synapse.events.EventBase]\n '
types = ((EventTypes.RoomHistoryVisibility, ), (EventTypes.Member, user_id))
event_id_to_state = (yield self.store.get_state_for_events(frozenset((e.event_id for e in events)), types=types))
res = (yield self.filter_events_for_clients([(user_id, is_peeking)], events, event_id_to_state))
defer.returnValue(res.get(user_id, []))<|docstring|>Check which events a user is allowed to see
:param str user_id: user id to be checked
:param [synapse.events.EventBase] events: list of events to be checked
:param bool is_peeking should be True if:
* the user is not currently a member of the room, and:
* the user has not been a member of the room since the given
events
:rtype [synapse.events.EventBase]<|endoftext|> |
4a78f1032e9214a15ae63bc862097094949c30e4281782a5163784070f5338a5 | def run(self):
' Main loop through all subjects '
...
explored_subjects = self._load()
explored_adj = len(explored_subjects)
for (idx, subject) in enumerate(filter((lambda x: (x not in explored_subjects)), all_subjects)):
...
course_info = cfg.GENERAL.CL_COLUMNS.copy()
...
logger.info(f"Selected Subject {course_info['Department_Abbreviation']} - {int((((idx + explored_adj) / len(all_subjects)) * 100))}% complete")
self.process_subject(course_info)
...
if self.args.save:
explored_subjects = np.append(explored_subjects, code)
self._save(dict_list=self.subject_course_info, explored_subjects=explored_subjects)
return explored_subjects | Main loop through all subjects | code/WEBSCRAPER PYTHON/courseScrapers/template.py | run | chidinzerem/chidinzerem.github.io | 0 | python | def run(self):
' '
...
explored_subjects = self._load()
explored_adj = len(explored_subjects)
for (idx, subject) in enumerate(filter((lambda x: (x not in explored_subjects)), all_subjects)):
...
course_info = cfg.GENERAL.CL_COLUMNS.copy()
...
logger.info(f"Selected Subject {course_info['Department_Abbreviation']} - {int((((idx + explored_adj) / len(all_subjects)) * 100))}% complete")
self.process_subject(course_info)
...
if self.args.save:
explored_subjects = np.append(explored_subjects, code)
self._save(dict_list=self.subject_course_info, explored_subjects=explored_subjects)
return explored_subjects | def run(self):
' '
...
explored_subjects = self._load()
explored_adj = len(explored_subjects)
for (idx, subject) in enumerate(filter((lambda x: (x not in explored_subjects)), all_subjects)):
...
course_info = cfg.GENERAL.CL_COLUMNS.copy()
...
logger.info(f"Selected Subject {course_info['Department_Abbreviation']} - {int((((idx + explored_adj) / len(all_subjects)) * 100))}% complete")
self.process_subject(course_info)
...
if self.args.save:
explored_subjects = np.append(explored_subjects, code)
self._save(dict_list=self.subject_course_info, explored_subjects=explored_subjects)
return explored_subjects<|docstring|>Main loop through all subjects<|endoftext|> |
5a6f851bae70eee626a56b50520cee31963f60af4318cfef456ad11ab3c5af1c | def process_subject(self, template_course_info, *args, **kwargs):
' Secondary loop through all courses in subject '
all_courses = self.locate()
for course in all_courses:
...
course_info = template_course_info.copy()
...
self.process_course(course_info) | Secondary loop through all courses in subject | code/WEBSCRAPER PYTHON/courseScrapers/template.py | process_subject | chidinzerem/chidinzerem.github.io | 0 | python | def process_subject(self, template_course_info, *args, **kwargs):
' '
all_courses = self.locate()
for course in all_courses:
...
course_info = template_course_info.copy()
...
self.process_course(course_info) | def process_subject(self, template_course_info, *args, **kwargs):
' '
all_courses = self.locate()
for course in all_courses:
...
course_info = template_course_info.copy()
...
self.process_course(course_info)<|docstring|>Secondary loop through all courses in subject<|endoftext|> |
aed87c30693741a59619fc9583ff8a9e4a5768511395b289ef6e653f235e5ab3 | def process_course(self, template_course_info, *args, **kwargs):
' Third loop through all sections in course '
all_sections = self.locate()
for section in all_sections:
...
course_info = template_course_info.copy()
...
self.subject_course_info.append(course_info) | Third loop through all sections in course | code/WEBSCRAPER PYTHON/courseScrapers/template.py | process_course | chidinzerem/chidinzerem.github.io | 0 | python | def process_course(self, template_course_info, *args, **kwargs):
' '
all_sections = self.locate()
for section in all_sections:
...
course_info = template_course_info.copy()
...
self.subject_course_info.append(course_info) | def process_course(self, template_course_info, *args, **kwargs):
' '
all_sections = self.locate()
for section in all_sections:
...
course_info = template_course_info.copy()
...
self.subject_course_info.append(course_info)<|docstring|>Third loop through all sections in course<|endoftext|> |
d1e8d3167de081da87783ec9f19cb5ea24be2710a9b6de3ccac1aa4587d0b7e3 | def run_with_dependencies(global_context, cmd_name, cmd_argv, run_node, top_node, package):
'Run the given command, including its dependencies as defined in the\n global_context.'
deps = global_context.retrieve_dependencies(cmd_name)
for dep_cmd_name in deps:
dep_cmd_argv = global_context.retrieve_command_argv(dep_cmd_name)
resolve_and_run_command(global_context, dep_cmd_name, dep_cmd_argv, run_node, package)
resolve_and_run_command(global_context, cmd_name, cmd_argv, run_node, package) | Run the given command, including its dependencies as defined in the
global_context. | bento/commands/wrapper_utils.py | run_with_dependencies | cournape/Bento | 55 | python | def run_with_dependencies(global_context, cmd_name, cmd_argv, run_node, top_node, package):
'Run the given command, including its dependencies as defined in the\n global_context.'
deps = global_context.retrieve_dependencies(cmd_name)
for dep_cmd_name in deps:
dep_cmd_argv = global_context.retrieve_command_argv(dep_cmd_name)
resolve_and_run_command(global_context, dep_cmd_name, dep_cmd_argv, run_node, package)
resolve_and_run_command(global_context, cmd_name, cmd_argv, run_node, package) | def run_with_dependencies(global_context, cmd_name, cmd_argv, run_node, top_node, package):
'Run the given command, including its dependencies as defined in the\n global_context.'
deps = global_context.retrieve_dependencies(cmd_name)
for dep_cmd_name in deps:
dep_cmd_argv = global_context.retrieve_command_argv(dep_cmd_name)
resolve_and_run_command(global_context, dep_cmd_name, dep_cmd_argv, run_node, package)
resolve_and_run_command(global_context, cmd_name, cmd_argv, run_node, package)<|docstring|>Run the given command, including its dependencies as defined in the
global_context.<|endoftext|> |
1cc70c42c0e58a30060803630874011130bcb25fdefe82c1c629368dab43a4b3 | def resolve_and_run_command(global_context, cmd_name, cmd_argv, run_node, package):
'Run the given Command instance inside its context, including any hook\n and/or override.'
cmd = global_context.retrieve_command(cmd_name)
context_klass = global_context.retrieve_command_context(cmd_name)
options_context = global_context.retrieve_options_context(cmd_name)
context = context_klass(global_context, cmd_argv, options_context, package, run_node)
pre_hooks = global_context.retrieve_pre_hooks(cmd_name)
post_hooks = global_context.retrieve_post_hooks(cmd_name)
run_command_in_context(context, cmd, pre_hooks, post_hooks)
return (cmd, context) | Run the given Command instance inside its context, including any hook
and/or override. | bento/commands/wrapper_utils.py | resolve_and_run_command | cournape/Bento | 55 | python | def resolve_and_run_command(global_context, cmd_name, cmd_argv, run_node, package):
'Run the given Command instance inside its context, including any hook\n and/or override.'
cmd = global_context.retrieve_command(cmd_name)
context_klass = global_context.retrieve_command_context(cmd_name)
options_context = global_context.retrieve_options_context(cmd_name)
context = context_klass(global_context, cmd_argv, options_context, package, run_node)
pre_hooks = global_context.retrieve_pre_hooks(cmd_name)
post_hooks = global_context.retrieve_post_hooks(cmd_name)
run_command_in_context(context, cmd, pre_hooks, post_hooks)
return (cmd, context) | def resolve_and_run_command(global_context, cmd_name, cmd_argv, run_node, package):
'Run the given Command instance inside its context, including any hook\n and/or override.'
cmd = global_context.retrieve_command(cmd_name)
context_klass = global_context.retrieve_command_context(cmd_name)
options_context = global_context.retrieve_options_context(cmd_name)
context = context_klass(global_context, cmd_argv, options_context, package, run_node)
pre_hooks = global_context.retrieve_pre_hooks(cmd_name)
post_hooks = global_context.retrieve_post_hooks(cmd_name)
run_command_in_context(context, cmd, pre_hooks, post_hooks)
return (cmd, context)<|docstring|>Run the given Command instance inside its context, including any hook
and/or override.<|endoftext|> |
6b21f746219919821640c4bd26986ba9064be23958748c18f596f6df07449d72 | def run_command_in_context(context, cmd, pre_hooks=None, post_hooks=None):
'Run the given command instance with the hooks within its context. '
if (pre_hooks is None):
pre_hooks = []
if (post_hooks is None):
post_hooks = []
top_node = context.top_node
cmd_funcs = [(cmd.run, top_node.abspath())]
def _run_hooks(hooks):
for hook in hooks:
local_node = top_node.find_dir(relpath(hook.local_dir, top_node.abspath()))
context.pre_recurse(local_node)
try:
hook(context)
finally:
context.post_recurse()
context.init()
try:
cmd.init(context)
_run_hooks(pre_hooks)
context.configure()
while cmd_funcs:
(cmd_func, local_dir) = cmd_funcs.pop(0)
local_node = top_node.find_dir(relpath(local_dir, top_node.abspath()))
context.pre_recurse(local_node)
try:
cmd_func(context)
finally:
context.post_recurse()
_run_hooks(post_hooks)
cmd.finish(context)
finally:
context.finish()
return (cmd, context) | Run the given command instance with the hooks within its context. | bento/commands/wrapper_utils.py | run_command_in_context | cournape/Bento | 55 | python | def run_command_in_context(context, cmd, pre_hooks=None, post_hooks=None):
' '
if (pre_hooks is None):
pre_hooks = []
if (post_hooks is None):
post_hooks = []
top_node = context.top_node
cmd_funcs = [(cmd.run, top_node.abspath())]
def _run_hooks(hooks):
for hook in hooks:
local_node = top_node.find_dir(relpath(hook.local_dir, top_node.abspath()))
context.pre_recurse(local_node)
try:
hook(context)
finally:
context.post_recurse()
context.init()
try:
cmd.init(context)
_run_hooks(pre_hooks)
context.configure()
while cmd_funcs:
(cmd_func, local_dir) = cmd_funcs.pop(0)
local_node = top_node.find_dir(relpath(local_dir, top_node.abspath()))
context.pre_recurse(local_node)
try:
cmd_func(context)
finally:
context.post_recurse()
_run_hooks(post_hooks)
cmd.finish(context)
finally:
context.finish()
return (cmd, context) | def run_command_in_context(context, cmd, pre_hooks=None, post_hooks=None):
' '
if (pre_hooks is None):
pre_hooks = []
if (post_hooks is None):
post_hooks = []
top_node = context.top_node
cmd_funcs = [(cmd.run, top_node.abspath())]
def _run_hooks(hooks):
for hook in hooks:
local_node = top_node.find_dir(relpath(hook.local_dir, top_node.abspath()))
context.pre_recurse(local_node)
try:
hook(context)
finally:
context.post_recurse()
context.init()
try:
cmd.init(context)
_run_hooks(pre_hooks)
context.configure()
while cmd_funcs:
(cmd_func, local_dir) = cmd_funcs.pop(0)
local_node = top_node.find_dir(relpath(local_dir, top_node.abspath()))
context.pre_recurse(local_node)
try:
cmd_func(context)
finally:
context.post_recurse()
_run_hooks(post_hooks)
cmd.finish(context)
finally:
context.finish()
return (cmd, context)<|docstring|>Run the given command instance with the hooks within its context.<|endoftext|> |
d17a5c14489389616d51b9b494e23c0bbf32ad1f7dbe16c2f35f3d7522a9f85f | def netmiko_commit(task: Task, **kwargs: Any) -> Result:
'\n Execute Netmiko commit method\n\n Arguments:\n kwargs: Additional arguments to pass to method.\n\n Returns:\n :obj: `nornir.core.task.Result`:\n * result (``str``): String showing the CLI output from the commit operation\n '
conn = task.host.get_connection('netmiko', task.nornir.config)
result = conn.commit(**kwargs)
return Result(host=task.host, result=result, changed=True) | Execute Netmiko commit method
Arguments:
kwargs: Additional arguments to pass to method.
Returns:
:obj: `nornir.core.task.Result`:
* result (``str``): String showing the CLI output from the commit operation | venv/Lib/site-packages/nornir/plugins/tasks/networking/netmiko_commit.py | netmiko_commit | melihteke/ebook_study | 1 | python | def netmiko_commit(task: Task, **kwargs: Any) -> Result:
'\n Execute Netmiko commit method\n\n Arguments:\n kwargs: Additional arguments to pass to method.\n\n Returns:\n :obj: `nornir.core.task.Result`:\n * result (``str``): String showing the CLI output from the commit operation\n '
conn = task.host.get_connection('netmiko', task.nornir.config)
result = conn.commit(**kwargs)
return Result(host=task.host, result=result, changed=True) | def netmiko_commit(task: Task, **kwargs: Any) -> Result:
'\n Execute Netmiko commit method\n\n Arguments:\n kwargs: Additional arguments to pass to method.\n\n Returns:\n :obj: `nornir.core.task.Result`:\n * result (``str``): String showing the CLI output from the commit operation\n '
conn = task.host.get_connection('netmiko', task.nornir.config)
result = conn.commit(**kwargs)
return Result(host=task.host, result=result, changed=True)<|docstring|>Execute Netmiko commit method
Arguments:
kwargs: Additional arguments to pass to method.
Returns:
:obj: `nornir.core.task.Result`:
* result (``str``): String showing the CLI output from the commit operation<|endoftext|> |
c3a3fbee82f0b03450d2579930aca93c680ffee613e19fcdc6fdd384ff95334e | def test_create_file(self):
'Test the creation of a simple XlsxWriter file.'
filename = self.got_filename
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
format = workbook.add_format({'align': 'center'})
worksheet.set_selection('A4')
worksheet.merge_range('A1:A2', 'col1', format)
worksheet.merge_range('B1:B2', 'col2', format)
worksheet.merge_range('C1:C2', 'col3', format)
worksheet.merge_range('D1:D2', 'col4', format)
workbook.close()
(got, exp) = _compare_xlsx_files(self.got_filename, self.exp_filename, self.ignore_files, self.ignore_elements)
self.assertEqual(got, exp) | Test the creation of a simple XlsxWriter file. | xlsxwriter/test/comparison/test_merge_cells01.py | test_create_file | sontek/XlsxWriter | 1 | python | def test_create_file(self):
filename = self.got_filename
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
format = workbook.add_format({'align': 'center'})
worksheet.set_selection('A4')
worksheet.merge_range('A1:A2', 'col1', format)
worksheet.merge_range('B1:B2', 'col2', format)
worksheet.merge_range('C1:C2', 'col3', format)
worksheet.merge_range('D1:D2', 'col4', format)
workbook.close()
(got, exp) = _compare_xlsx_files(self.got_filename, self.exp_filename, self.ignore_files, self.ignore_elements)
self.assertEqual(got, exp) | def test_create_file(self):
filename = self.got_filename
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
format = workbook.add_format({'align': 'center'})
worksheet.set_selection('A4')
worksheet.merge_range('A1:A2', 'col1', format)
worksheet.merge_range('B1:B2', 'col2', format)
worksheet.merge_range('C1:C2', 'col3', format)
worksheet.merge_range('D1:D2', 'col4', format)
workbook.close()
(got, exp) = _compare_xlsx_files(self.got_filename, self.exp_filename, self.ignore_files, self.ignore_elements)
self.assertEqual(got, exp)<|docstring|>Test the creation of a simple XlsxWriter file.<|endoftext|> |
d501474b75da844cf76d30a60fb4e1dc0cc728d9e706c924a278fb5f9acac689 | def init(args):
" Initialize wit work folders.\n Args: None\n Raises: OSError in case folder creation is failing.\n Return: None\n Assumption - if folders exists don't do anything.\n "
wit = WitRepo(os.getcwd())
wit_dir = wit.wit_dir
subfolders = ('images', 'staging_area')
make_folders(wit_dir, subfolders)
wit.create_active_branch_file() | Initialize wit work folders.
Args: None
Raises: OSError in case folder creation is failing.
Return: None
Assumption - if folders exists don't do anything. | wit.py | init | ron-huberfeld/wit | 0 | python | def init(args):
" Initialize wit work folders.\n Args: None\n Raises: OSError in case folder creation is failing.\n Return: None\n Assumption - if folders exists don't do anything.\n "
wit = WitRepo(os.getcwd())
wit_dir = wit.wit_dir
subfolders = ('images', 'staging_area')
make_folders(wit_dir, subfolders)
wit.create_active_branch_file() | def init(args):
" Initialize wit work folders.\n Args: None\n Raises: OSError in case folder creation is failing.\n Return: None\n Assumption - if folders exists don't do anything.\n "
wit = WitRepo(os.getcwd())
wit_dir = wit.wit_dir
subfolders = ('images', 'staging_area')
make_folders(wit_dir, subfolders)
wit.create_active_branch_file()<|docstring|>Initialize wit work folders.
Args: None
Raises: OSError in case folder creation is failing.
Return: None
Assumption - if folders exists don't do anything.<|endoftext|> |
b6dd4e950857c2d5d798574b36cdb0fb23876f90197ee4da8417ac3d5bd3d9d0 | def merge_override_tree(sourceRoot, destRoot):
' Updates destination and override existing files.\n Args:\n sourceRoot: source root folder of files to copy\n destRoot: Destination root folder for files to be created\n '
for (path, _, files) in os.walk(sourceRoot):
relPath = os.path.relpath(path, sourceRoot)
destPath = os.path.join(destRoot, relPath)
if (not os.path.exists(destPath)):
try:
os.makedirs(destPath)
except OSError as err:
raise err
for file in files:
destFile = os.path.join(destPath, file)
srcFile = os.path.join(path, file)
shutil.copy(srcFile, destFile) | Updates destination and override existing files.
Args:
sourceRoot: source root folder of files to copy
destRoot: Destination root folder for files to be created | wit.py | merge_override_tree | ron-huberfeld/wit | 0 | python | def merge_override_tree(sourceRoot, destRoot):
' Updates destination and override existing files.\n Args:\n sourceRoot: source root folder of files to copy\n destRoot: Destination root folder for files to be created\n '
for (path, _, files) in os.walk(sourceRoot):
relPath = os.path.relpath(path, sourceRoot)
destPath = os.path.join(destRoot, relPath)
if (not os.path.exists(destPath)):
try:
os.makedirs(destPath)
except OSError as err:
raise err
for file in files:
destFile = os.path.join(destPath, file)
srcFile = os.path.join(path, file)
shutil.copy(srcFile, destFile) | def merge_override_tree(sourceRoot, destRoot):
' Updates destination and override existing files.\n Args:\n sourceRoot: source root folder of files to copy\n destRoot: Destination root folder for files to be created\n '
for (path, _, files) in os.walk(sourceRoot):
relPath = os.path.relpath(path, sourceRoot)
destPath = os.path.join(destRoot, relPath)
if (not os.path.exists(destPath)):
try:
os.makedirs(destPath)
except OSError as err:
raise err
for file in files:
destFile = os.path.join(destPath, file)
srcFile = os.path.join(path, file)
shutil.copy(srcFile, destFile)<|docstring|>Updates destination and override existing files.
Args:
sourceRoot: source root folder of files to copy
destRoot: Destination root folder for files to be created<|endoftext|> |
c95c328b5642d04279f1037c48967b022aeeacdd4c16a35c1eb0bc0345e8e63a | def get_actual_commit_id_from_input(self, checkout_input):
" Parsing checkout input to actual commit_id\n Args: checkout_input - could be either branch name (including 'master' branch) or commit id\n Raises: WitException if references files does not exist or if commit id folder was not found\n Return: valid commit id\n "
if (checkout_input in self.get_branches()):
if (not os.path.exists(self.wit_references_file)):
raise WitException('Cannot read reference file.')
actual_commit_id = self.get_references_file_data().get(checkout_input)
self.create_active_branch_file(checkout_input)
else:
actual_commit_id = checkout_input
self.create_active_branch_file('')
logging.warning('==> checkout {}'.format(actual_commit_id))
if (not self.is_commit_id_exist(actual_commit_id)):
raise WitException('Commit ID was not found: {}'.format(actual_commit_id))
return actual_commit_id | Parsing checkout input to actual commit_id
Args: checkout_input - could be either branch name (including 'master' branch) or commit id
Raises: WitException if references files does not exist or if commit id folder was not found
Return: valid commit id | wit.py | get_actual_commit_id_from_input | ron-huberfeld/wit | 0 | python | def get_actual_commit_id_from_input(self, checkout_input):
" Parsing checkout input to actual commit_id\n Args: checkout_input - could be either branch name (including 'master' branch) or commit id\n Raises: WitException if references files does not exist or if commit id folder was not found\n Return: valid commit id\n "
if (checkout_input in self.get_branches()):
if (not os.path.exists(self.wit_references_file)):
raise WitException('Cannot read reference file.')
actual_commit_id = self.get_references_file_data().get(checkout_input)
self.create_active_branch_file(checkout_input)
else:
actual_commit_id = checkout_input
self.create_active_branch_file()
logging.warning('==> checkout {}'.format(actual_commit_id))
if (not self.is_commit_id_exist(actual_commit_id)):
raise WitException('Commit ID was not found: {}'.format(actual_commit_id))
return actual_commit_id | def get_actual_commit_id_from_input(self, checkout_input):
" Parsing checkout input to actual commit_id\n Args: checkout_input - could be either branch name (including 'master' branch) or commit id\n Raises: WitException if references files does not exist or if commit id folder was not found\n Return: valid commit id\n "
if (checkout_input in self.get_branches()):
if (not os.path.exists(self.wit_references_file)):
raise WitException('Cannot read reference file.')
actual_commit_id = self.get_references_file_data().get(checkout_input)
self.create_active_branch_file(checkout_input)
else:
actual_commit_id = checkout_input
self.create_active_branch_file()
logging.warning('==> checkout {}'.format(actual_commit_id))
if (not self.is_commit_id_exist(actual_commit_id)):
raise WitException('Commit ID was not found: {}'.format(actual_commit_id))
return actual_commit_id<|docstring|>Parsing checkout input to actual commit_id
Args: checkout_input - could be either branch name (including 'master' branch) or commit id
Raises: WitException if references files does not exist or if commit id folder was not found
Return: valid commit id<|endoftext|> |
832df081d6cf985b6f051a1ba0192cb4a013c8455153830e790de1b4b97c5c8a | def recipe_image_file_path(instance, filename):
'Generate file path for new recipe image'
ext = filename.split('.')[(- 1)]
filename = f'{uuid.uuid4()}.{ext}'
return os.path.join('uploads/recipe/', filename) | Generate file path for new recipe image | app/core/models.py | recipe_image_file_path | ProgramSKAN/Django-Recipe-RESTAPI | 0 | python | def recipe_image_file_path(instance, filename):
ext = filename.split('.')[(- 1)]
filename = f'{uuid.uuid4()}.{ext}'
return os.path.join('uploads/recipe/', filename) | def recipe_image_file_path(instance, filename):
ext = filename.split('.')[(- 1)]
filename = f'{uuid.uuid4()}.{ext}'
return os.path.join('uploads/recipe/', filename)<|docstring|>Generate file path for new recipe image<|endoftext|> |
7a3bb410433bdf91b91207ef9cdd28b0bf0669a1ae70130ce836098b9f445bd7 | def create_user(self, email, password=None, **extra_fields):
'Creates and saves a new user'
if (not email):
raise ValueError('Users must have an email address')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user | Creates and saves a new user | app/core/models.py | create_user | ProgramSKAN/Django-Recipe-RESTAPI | 0 | python | def create_user(self, email, password=None, **extra_fields):
if (not email):
raise ValueError('Users must have an email address')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user | def create_user(self, email, password=None, **extra_fields):
if (not email):
raise ValueError('Users must have an email address')
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user<|docstring|>Creates and saves a new user<|endoftext|> |
14eb8cceff0e27f3f873699ca769b5c26b4f0fe95657ab4d52479db1c4df573c | def create_superuser(self, email, password):
'creates and saves as super user'
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user | creates and saves as super user | app/core/models.py | create_superuser | ProgramSKAN/Django-Recipe-RESTAPI | 0 | python | def create_superuser(self, email, password):
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user | def create_superuser(self, email, password):
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user<|docstring|>creates and saves as super user<|endoftext|> |
c381dd26d1ca4dbad5c4d63c0296209b8bda3c296833a157a609f80194d98c75 | def source_saved_handler(sender, instance, **kwargs):
'\n Post save signal handler for Source model.\n It sends new Source data to kafka\n '
message = {'source_id': instance.source_id, 'name': instance.name, 'profile': {'code': instance.profile.code, 'version': instance.profile.version, 'payload': instance.profile.payload}}
sender = create_sender(create_broker_parameters_from_settings())
if sender.send(KAFKA_SOURCE_NOTIFICATION_TOPIC, message):
logger.info('Souce notified correctly') | Post save signal handler for Source model.
It sends new Source data to kafka | hgw_backend/hgw_backend/signals.py | source_saved_handler | crs4/health-gateway | 5 | python | def source_saved_handler(sender, instance, **kwargs):
'\n Post save signal handler for Source model.\n It sends new Source data to kafka\n '
message = {'source_id': instance.source_id, 'name': instance.name, 'profile': {'code': instance.profile.code, 'version': instance.profile.version, 'payload': instance.profile.payload}}
sender = create_sender(create_broker_parameters_from_settings())
if sender.send(KAFKA_SOURCE_NOTIFICATION_TOPIC, message):
logger.info('Souce notified correctly') | def source_saved_handler(sender, instance, **kwargs):
'\n Post save signal handler for Source model.\n It sends new Source data to kafka\n '
message = {'source_id': instance.source_id, 'name': instance.name, 'profile': {'code': instance.profile.code, 'version': instance.profile.version, 'payload': instance.profile.payload}}
sender = create_sender(create_broker_parameters_from_settings())
if sender.send(KAFKA_SOURCE_NOTIFICATION_TOPIC, message):
logger.info('Souce notified correctly')<|docstring|>Post save signal handler for Source model.
It sends new Source data to kafka<|endoftext|> |
9d6506853ca9e057aa3398758d1d195f499bf86ecf8ffdea4c5497e4f9a706fc | def connector_created_handler(connector, **kwargs):
'\n Handler for signal create_connector. It notifies the correct operation\n '
message = {'channel_id': connector['channel_id']}
sender = create_sender(create_broker_parameters_from_settings())
if sender.send(KAFKA_CONNECTOR_NOTIFICATION_TOPIC, message):
logger.info('Connector notified correctly') | Handler for signal create_connector. It notifies the correct operation | hgw_backend/hgw_backend/signals.py | connector_created_handler | crs4/health-gateway | 5 | python | def connector_created_handler(connector, **kwargs):
'\n \n '
message = {'channel_id': connector['channel_id']}
sender = create_sender(create_broker_parameters_from_settings())
if sender.send(KAFKA_CONNECTOR_NOTIFICATION_TOPIC, message):
logger.info('Connector notified correctly') | def connector_created_handler(connector, **kwargs):
'\n \n '
message = {'channel_id': connector['channel_id']}
sender = create_sender(create_broker_parameters_from_settings())
if sender.send(KAFKA_CONNECTOR_NOTIFICATION_TOPIC, message):
logger.info('Connector notified correctly')<|docstring|>Handler for signal create_connector. It notifies the correct operation<|endoftext|> |
ca04e8823cd2e589d2aabbe244a9e86e69bda017947c9a67ec8d13db616d1a12 | def get_default_hyperparameters_by_modeltype(model_type):
'\n Select the default parameters for each model\n\n Args:\n model_type (str): Model identifier.\n\n Returns:\n dict: Default hyper parameters for model.\n\n '
model_dict = {'mlp_eg': DEFAULT_HYPER_PARAM_ENERGY_GRADS, 'mlp_e': DEFAULT_HYPER_PARAM_ENERGY, 'mlp_g2': DEFAULT_HYPER_PARAM_GRADS2, 'mlp_nac': DEFAULT_HYPER_PARAM_NAC, 'mlp_nac2': DEFAULT_HYPER_PARAM_NAC}
return model_dict[model_type] | Select the default parameters for each model
Args:
model_type (str): Model identifier.
Returns:
dict: Default hyper parameters for model. | PyRAI2MD/Machine_Learning/pyNNsMD/nn_pes_src/selection.py | get_default_hyperparameters_by_modeltype | lopez-lab/PyRAI2MD | 12 | python | def get_default_hyperparameters_by_modeltype(model_type):
'\n Select the default parameters for each model\n\n Args:\n model_type (str): Model identifier.\n\n Returns:\n dict: Default hyper parameters for model.\n\n '
model_dict = {'mlp_eg': DEFAULT_HYPER_PARAM_ENERGY_GRADS, 'mlp_e': DEFAULT_HYPER_PARAM_ENERGY, 'mlp_g2': DEFAULT_HYPER_PARAM_GRADS2, 'mlp_nac': DEFAULT_HYPER_PARAM_NAC, 'mlp_nac2': DEFAULT_HYPER_PARAM_NAC}
return model_dict[model_type] | def get_default_hyperparameters_by_modeltype(model_type):
'\n Select the default parameters for each model\n\n Args:\n model_type (str): Model identifier.\n\n Returns:\n dict: Default hyper parameters for model.\n\n '
model_dict = {'mlp_eg': DEFAULT_HYPER_PARAM_ENERGY_GRADS, 'mlp_e': DEFAULT_HYPER_PARAM_ENERGY, 'mlp_g2': DEFAULT_HYPER_PARAM_GRADS2, 'mlp_nac': DEFAULT_HYPER_PARAM_NAC, 'mlp_nac2': DEFAULT_HYPER_PARAM_NAC}
return model_dict[model_type]<|docstring|>Select the default parameters for each model
Args:
model_type (str): Model identifier.
Returns:
dict: Default hyper parameters for model.<|endoftext|> |
2a72705791c3d80304bc0a3eab50ad677f4a865a8201d60759559af3963bbe45 | def get_path_for_fit_script(model_type):
'\n Interface to find the path of training scripts.\n\n For now they are expected to be in the same folder-system as calling .py script.\n\n Args:\n model_type (str): Name of the model.\n\n Returns:\n filepath (str): Filepath pointing to training scripts.\n\n '
filepath = os.path.abspath(os.path.dirname(__file__))
fit_script = {'mlp_eg': 'training_mlp_eg.py', 'mlp_nac': 'training_mlp_nac.py', 'mlp_nac2': 'training_mlp_nac2.py', 'mlp_e': 'training_mlp_e.py', 'mlp_g2': 'training_mlp_g2.py'}
outpath = os.path.join(filepath, 'training', fit_script[model_type])
return outpath | Interface to find the path of training scripts.
For now they are expected to be in the same folder-system as calling .py script.
Args:
model_type (str): Name of the model.
Returns:
filepath (str): Filepath pointing to training scripts. | PyRAI2MD/Machine_Learning/pyNNsMD/nn_pes_src/selection.py | get_path_for_fit_script | lopez-lab/PyRAI2MD | 12 | python | def get_path_for_fit_script(model_type):
'\n Interface to find the path of training scripts.\n\n For now they are expected to be in the same folder-system as calling .py script.\n\n Args:\n model_type (str): Name of the model.\n\n Returns:\n filepath (str): Filepath pointing to training scripts.\n\n '
filepath = os.path.abspath(os.path.dirname(__file__))
fit_script = {'mlp_eg': 'training_mlp_eg.py', 'mlp_nac': 'training_mlp_nac.py', 'mlp_nac2': 'training_mlp_nac2.py', 'mlp_e': 'training_mlp_e.py', 'mlp_g2': 'training_mlp_g2.py'}
outpath = os.path.join(filepath, 'training', fit_script[model_type])
return outpath | def get_path_for_fit_script(model_type):
'\n Interface to find the path of training scripts.\n\n For now they are expected to be in the same folder-system as calling .py script.\n\n Args:\n model_type (str): Name of the model.\n\n Returns:\n filepath (str): Filepath pointing to training scripts.\n\n '
filepath = os.path.abspath(os.path.dirname(__file__))
fit_script = {'mlp_eg': 'training_mlp_eg.py', 'mlp_nac': 'training_mlp_nac.py', 'mlp_nac2': 'training_mlp_nac2.py', 'mlp_e': 'training_mlp_e.py', 'mlp_g2': 'training_mlp_g2.py'}
outpath = os.path.join(filepath, 'training', fit_script[model_type])
return outpath<|docstring|>Interface to find the path of training scripts.
For now they are expected to be in the same folder-system as calling .py script.
Args:
model_type (str): Name of the model.
Returns:
filepath (str): Filepath pointing to training scripts.<|endoftext|> |
591619f4218b7c94914140ac1ebb2f1ed2fdf6d33255e570112e876054358b1f | def get_default_scaler(model_type):
'\n Get default values for scaler in and output for each model.\n\n Args:\n model_type (str): Model identifier.\n\n Returns:\n Dict: Scaling dictionary.\n\n '
if (model_type == 'mlp_e'):
return EnergyStandardScaler()
elif (model_type == 'mlp_eg'):
return EnergyGradientStandardScaler()
elif ((model_type == 'mlp_nac') or (model_type == 'mlp_nac2')):
return NACStandardScaler()
elif (model_type == 'mlp_g2'):
return GradientStandardScaler()
else:
print('Error: Unknown model type', model_type)
raise TypeError(f'Error: Unknown model type for default scaler {model_type}') | Get default values for scaler in and output for each model.
Args:
model_type (str): Model identifier.
Returns:
Dict: Scaling dictionary. | PyRAI2MD/Machine_Learning/pyNNsMD/nn_pes_src/selection.py | get_default_scaler | lopez-lab/PyRAI2MD | 12 | python | def get_default_scaler(model_type):
'\n Get default values for scaler in and output for each model.\n\n Args:\n model_type (str): Model identifier.\n\n Returns:\n Dict: Scaling dictionary.\n\n '
if (model_type == 'mlp_e'):
return EnergyStandardScaler()
elif (model_type == 'mlp_eg'):
return EnergyGradientStandardScaler()
elif ((model_type == 'mlp_nac') or (model_type == 'mlp_nac2')):
return NACStandardScaler()
elif (model_type == 'mlp_g2'):
return GradientStandardScaler()
else:
print('Error: Unknown model type', model_type)
raise TypeError(f'Error: Unknown model type for default scaler {model_type}') | def get_default_scaler(model_type):
'\n Get default values for scaler in and output for each model.\n\n Args:\n model_type (str): Model identifier.\n\n Returns:\n Dict: Scaling dictionary.\n\n '
if (model_type == 'mlp_e'):
return EnergyStandardScaler()
elif (model_type == 'mlp_eg'):
return EnergyGradientStandardScaler()
elif ((model_type == 'mlp_nac') or (model_type == 'mlp_nac2')):
return NACStandardScaler()
elif (model_type == 'mlp_g2'):
return GradientStandardScaler()
else:
print('Error: Unknown model type', model_type)
raise TypeError(f'Error: Unknown model type for default scaler {model_type}')<|docstring|>Get default values for scaler in and output for each model.
Args:
model_type (str): Model identifier.
Returns:
Dict: Scaling dictionary.<|endoftext|> |
e40b1139e3458582392f73dd317f5a2bce628540bfc8aff7d4bf72bb270b7cf2 | def get_model_by_type(model_type, hyper):
'\n Find the implemented model by its string identifier.\n\n Args:\n model_type (str): Model type.\n hyper (dict): Dict with hyper parameters.\n\n Returns:\n tf.keras.model: Defult initialized tf.keras.model.\n\n '
if (model_type == 'mlp_nac'):
return NACModel(**hyper)
elif (model_type == 'mlp_nac2'):
return NACModel2(**hyper)
elif (model_type == 'mlp_eg'):
return EnergyGradientModel(**hyper)
elif (model_type == 'mlp_e'):
return EnergyModel(**hyper)
elif (model_type == 'mlp_g2'):
return GradientModel2(**hyper)
else:
print('Error: Unknown model type', model_type)
raise TypeError(f'Error: Unknown model type forn{model_type}') | Find the implemented model by its string identifier.
Args:
model_type (str): Model type.
hyper (dict): Dict with hyper parameters.
Returns:
tf.keras.model: Defult initialized tf.keras.model. | PyRAI2MD/Machine_Learning/pyNNsMD/nn_pes_src/selection.py | get_model_by_type | lopez-lab/PyRAI2MD | 12 | python | def get_model_by_type(model_type, hyper):
'\n Find the implemented model by its string identifier.\n\n Args:\n model_type (str): Model type.\n hyper (dict): Dict with hyper parameters.\n\n Returns:\n tf.keras.model: Defult initialized tf.keras.model.\n\n '
if (model_type == 'mlp_nac'):
return NACModel(**hyper)
elif (model_type == 'mlp_nac2'):
return NACModel2(**hyper)
elif (model_type == 'mlp_eg'):
return EnergyGradientModel(**hyper)
elif (model_type == 'mlp_e'):
return EnergyModel(**hyper)
elif (model_type == 'mlp_g2'):
return GradientModel2(**hyper)
else:
print('Error: Unknown model type', model_type)
raise TypeError(f'Error: Unknown model type forn{model_type}') | def get_model_by_type(model_type, hyper):
'\n Find the implemented model by its string identifier.\n\n Args:\n model_type (str): Model type.\n hyper (dict): Dict with hyper parameters.\n\n Returns:\n tf.keras.model: Defult initialized tf.keras.model.\n\n '
if (model_type == 'mlp_nac'):
return NACModel(**hyper)
elif (model_type == 'mlp_nac2'):
return NACModel2(**hyper)
elif (model_type == 'mlp_eg'):
return EnergyGradientModel(**hyper)
elif (model_type == 'mlp_e'):
return EnergyModel(**hyper)
elif (model_type == 'mlp_g2'):
return GradientModel2(**hyper)
else:
print('Error: Unknown model type', model_type)
raise TypeError(f'Error: Unknown model type forn{model_type}')<|docstring|>Find the implemented model by its string identifier.
Args:
model_type (str): Model type.
hyper (dict): Dict with hyper parameters.
Returns:
tf.keras.model: Defult initialized tf.keras.model.<|endoftext|> |
fbe59713998f41552fad1fae03fcee40546afe1d33f6c0f989736a393ecc6a63 | def create_account(u_name, password, account_details):
'\n function to create new user account\n '
new_user = User(u_name, password, account_details)
return new_user | function to create new user account | run.py | create_account | Ko3ch/pass-locker | 0 | python | def create_account(u_name, password, account_details):
'\n \n '
new_user = User(u_name, password, account_details)
return new_user | def create_account(u_name, password, account_details):
'\n \n '
new_user = User(u_name, password, account_details)
return new_user<|docstring|>function to create new user account<|endoftext|> |
60e16635ee1eb60c7136946674d6e02881f808966c4905884e7762d12683b0fd | def save_account(user):
'\n function to save users\n '
user.add_user() | function to save users | run.py | save_account | Ko3ch/pass-locker | 0 | python | def save_account(user):
'\n \n '
user.add_user() | def save_account(user):
'\n \n '
user.add_user()<|docstring|>function to save users<|endoftext|> |
1d22d47f7cf437fa24f015935e5d7188ce7ffa0e7d1132cf064150fa0c089a08 | def check_account(user_name):
'\n function to check if account exists\n '
return User.user_exists(user_name) | function to check if account exists | run.py | check_account | Ko3ch/pass-locker | 0 | python | def check_account(user_name):
'\n \n '
return User.user_exists(user_name) | def check_account(user_name):
'\n \n '
return User.user_exists(user_name)<|docstring|>function to check if account exists<|endoftext|> |
ba47c7c3e4d07921ebd1cdb89cdc1e20d482034b0003f379e1b2aa9fd56bf173 | def find_account(acc_name):
'\n find account by user_name\n '
return Credentials.find_account_by_acc_name(acc_name) | find account by user_name | run.py | find_account | Ko3ch/pass-locker | 0 | python | def find_account(acc_name):
'\n \n '
return Credentials.find_account_by_acc_name(acc_name) | def find_account(acc_name):
'\n \n '
return Credentials.find_account_by_acc_name(acc_name)<|docstring|>find account by user_name<|endoftext|> |
de45296b469bc09de71cd90664858f4a3c92574dc56c4dd515bc07e48d8a11db | def all_users():
'\n function to get all users\n '
return User.display_users() | function to get all users | run.py | all_users | Ko3ch/pass-locker | 0 | python | def all_users():
'\n \n '
return User.display_users() | def all_users():
'\n \n '
return User.display_users()<|docstring|>function to get all users<|endoftext|> |
103b222a6089fedc5f8dfada7a4f99349fc4d07efe4cc7cf9b256b681794c8e1 | def create_user_credentials(account_name, account_password):
'\n function to create new credentials for a user\n '
new_user_credentials = Credentials(account_name, account_password)
return new_user_credentials | function to create new credentials for a user | run.py | create_user_credentials | Ko3ch/pass-locker | 0 | python | def create_user_credentials(account_name, account_password):
'\n \n '
new_user_credentials = Credentials(account_name, account_password)
return new_user_credentials | def create_user_credentials(account_name, account_password):
'\n \n '
new_user_credentials = Credentials(account_name, account_password)
return new_user_credentials<|docstring|>function to create new credentials for a user<|endoftext|> |
ee1f1293d7fd28754ae17061bc4f3a7366ba3ed24ac590ea6436a59ec5a00558 | def save_account_credentials(user, credentials):
'\n function to add user accounts details\n '
credentials.save_acc_details()
user.add_user_credentials() | function to add user accounts details | run.py | save_account_credentials | Ko3ch/pass-locker | 0 | python | def save_account_credentials(user, credentials):
'\n \n '
credentials.save_acc_details()
user.add_user_credentials() | def save_account_credentials(user, credentials):
'\n \n '
credentials.save_acc_details()
user.add_user_credentials()<|docstring|>function to add user accounts details<|endoftext|> |
1ed4a5ce75de7121dbd904ad16a0688b0124aeb8196c6de1d442bfcc24a19e6f | def delete_credentials(credentials):
'\n function to delete a users account credentials\n '
credentials.delete_acc_details() | function to delete a users account credentials | run.py | delete_credentials | Ko3ch/pass-locker | 0 | python | def delete_credentials(credentials):
'\n \n '
credentials.delete_acc_details() | def delete_credentials(credentials):
'\n \n '
credentials.delete_acc_details()<|docstring|>function to delete a users account credentials<|endoftext|> |
91b63c666545ab22d7df8c9290d180ae92fa8b03681dc8caec440cf3f966b7c3 | def display_credentials():
'\n function tha returns all accounts saved by a user\n '
return Credentials.view_accounts() | function tha returns all accounts saved by a user | run.py | display_credentials | Ko3ch/pass-locker | 0 | python | def display_credentials():
'\n \n '
return Credentials.view_accounts() | def display_credentials():
'\n \n '
return Credentials.view_accounts()<|docstring|>function tha returns all accounts saved by a user<|endoftext|> |
8fd6f84b6a3666dd20561cccbed2cd317346961240aac75c3feb656bf4144f36 | def create_uci_labels():
"\n Creates the labels for the universal chess interface into an array and returns them\n This returns all the possible 'Queen moves' and 'Knight move' for each square plus the promotion\n of a pawn to either a rook, knight, bishop or queen from rank 7 or higher\n :return:\n "
labels_array = []
letters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
numbers = ['1', '2', '3', '4', '5', '6', '7', '8']
promoted_to = ['q', 'r', 'b', 'n']
for l1 in range(8):
for n1 in range(8):
destinations = (((([(t, n1) for t in range(8)] + [(l1, t) for t in range(8)]) + [((l1 + t), (n1 + t)) for t in range((- 7), 8)]) + [((l1 + t), (n1 - t)) for t in range((- 7), 8)]) + [((l1 + a), (n1 + b)) for (a, b) in [((- 2), (- 1)), ((- 1), (- 2)), ((- 2), 1), (1, (- 2)), (2, (- 1)), ((- 1), 2), (2, 1), (1, 2)]])
for (l2, n2) in destinations:
if (((l1, n1) != (l2, n2)) and (l2 in range(8)) and (n2 in range(8))):
move = (((letters[l1] + numbers[n1]) + letters[l2]) + numbers[n2])
labels_array.append(move)
for l1 in range(8):
l = letters[l1]
for p in promoted_to:
labels_array.append(((((l + '2') + l) + '1') + p))
labels_array.append(((((l + '7') + l) + '8') + p))
if (l1 > 0):
l_l = letters[(l1 - 1)]
labels_array.append(((((l + '2') + l_l) + '1') + p))
labels_array.append(((((l + '7') + l_l) + '8') + p))
if (l1 < 7):
l_r = letters[(l1 + 1)]
labels_array.append(((((l + '2') + l_r) + '1') + p))
labels_array.append(((((l + '7') + l_r) + '8') + p))
return labels_array | Creates the labels for the universal chess interface into an array and returns them
This returns all the possible 'Queen moves' and 'Knight move' for each square plus the promotion
of a pawn to either a rook, knight, bishop or queen from rank 7 or higher
:return: | enas/src/chess/pgn_tensors_utils.py | create_uci_labels | LothairKizardjian/EfficientNeuralSearch | 0 | python | def create_uci_labels():
"\n Creates the labels for the universal chess interface into an array and returns them\n This returns all the possible 'Queen moves' and 'Knight move' for each square plus the promotion\n of a pawn to either a rook, knight, bishop or queen from rank 7 or higher\n :return:\n "
labels_array = []
letters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
numbers = ['1', '2', '3', '4', '5', '6', '7', '8']
promoted_to = ['q', 'r', 'b', 'n']
for l1 in range(8):
for n1 in range(8):
destinations = (((([(t, n1) for t in range(8)] + [(l1, t) for t in range(8)]) + [((l1 + t), (n1 + t)) for t in range((- 7), 8)]) + [((l1 + t), (n1 - t)) for t in range((- 7), 8)]) + [((l1 + a), (n1 + b)) for (a, b) in [((- 2), (- 1)), ((- 1), (- 2)), ((- 2), 1), (1, (- 2)), (2, (- 1)), ((- 1), 2), (2, 1), (1, 2)]])
for (l2, n2) in destinations:
if (((l1, n1) != (l2, n2)) and (l2 in range(8)) and (n2 in range(8))):
move = (((letters[l1] + numbers[n1]) + letters[l2]) + numbers[n2])
labels_array.append(move)
for l1 in range(8):
l = letters[l1]
for p in promoted_to:
labels_array.append(((((l + '2') + l) + '1') + p))
labels_array.append(((((l + '7') + l) + '8') + p))
if (l1 > 0):
l_l = letters[(l1 - 1)]
labels_array.append(((((l + '2') + l_l) + '1') + p))
labels_array.append(((((l + '7') + l_l) + '8') + p))
if (l1 < 7):
l_r = letters[(l1 + 1)]
labels_array.append(((((l + '2') + l_r) + '1') + p))
labels_array.append(((((l + '7') + l_r) + '8') + p))
return labels_array | def create_uci_labels():
"\n Creates the labels for the universal chess interface into an array and returns them\n This returns all the possible 'Queen moves' and 'Knight move' for each square plus the promotion\n of a pawn to either a rook, knight, bishop or queen from rank 7 or higher\n :return:\n "
labels_array = []
letters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
numbers = ['1', '2', '3', '4', '5', '6', '7', '8']
promoted_to = ['q', 'r', 'b', 'n']
for l1 in range(8):
for n1 in range(8):
destinations = (((([(t, n1) for t in range(8)] + [(l1, t) for t in range(8)]) + [((l1 + t), (n1 + t)) for t in range((- 7), 8)]) + [((l1 + t), (n1 - t)) for t in range((- 7), 8)]) + [((l1 + a), (n1 + b)) for (a, b) in [((- 2), (- 1)), ((- 1), (- 2)), ((- 2), 1), (1, (- 2)), (2, (- 1)), ((- 1), 2), (2, 1), (1, 2)]])
for (l2, n2) in destinations:
if (((l1, n1) != (l2, n2)) and (l2 in range(8)) and (n2 in range(8))):
move = (((letters[l1] + numbers[n1]) + letters[l2]) + numbers[n2])
labels_array.append(move)
for l1 in range(8):
l = letters[l1]
for p in promoted_to:
labels_array.append(((((l + '2') + l) + '1') + p))
labels_array.append(((((l + '7') + l) + '8') + p))
if (l1 > 0):
l_l = letters[(l1 - 1)]
labels_array.append(((((l + '2') + l_l) + '1') + p))
labels_array.append(((((l + '7') + l_l) + '8') + p))
if (l1 < 7):
l_r = letters[(l1 + 1)]
labels_array.append(((((l + '2') + l_r) + '1') + p))
labels_array.append(((((l + '7') + l_r) + '8') + p))
return labels_array<|docstring|>Creates the labels for the universal chess interface into an array and returns them
This returns all the possible 'Queen moves' and 'Knight move' for each square plus the promotion
of a pawn to either a rook, knight, bishop or queen from rank 7 or higher
:return:<|endoftext|> |
1ceedb42b47c0d46a655c661fe52a2733af6ca6059f67c1ac3367caf8c7359e0 | def flipped_uci_labels():
'\n Seems to somehow transform the labels used for describing the universal chess interface format, putting\n them into a returned list.\n :return:\n '
def repl(x):
return ''.join([(str((9 - int(a))) if a.isdigit() else a) for a in x])
return [repl(x) for x in create_uci_labels()] | Seems to somehow transform the labels used for describing the universal chess interface format, putting
them into a returned list.
:return: | enas/src/chess/pgn_tensors_utils.py | flipped_uci_labels | LothairKizardjian/EfficientNeuralSearch | 0 | python | def flipped_uci_labels():
'\n Seems to somehow transform the labels used for describing the universal chess interface format, putting\n them into a returned list.\n :return:\n '
def repl(x):
return .join([(str((9 - int(a))) if a.isdigit() else a) for a in x])
return [repl(x) for x in create_uci_labels()] | def flipped_uci_labels():
'\n Seems to somehow transform the labels used for describing the universal chess interface format, putting\n them into a returned list.\n :return:\n '
def repl(x):
return .join([(str((9 - int(a))) if a.isdigit() else a) for a in x])
return [repl(x) for x in create_uci_labels()]<|docstring|>Seems to somehow transform the labels used for describing the universal chess interface format, putting
them into a returned list.
:return:<|endoftext|> |
fa0e244ecde759eaad820e00de505d00dd2e4e6437cb06c339f4af4cdd3e35b5 | def get_screenshot_as_file(self):
'\n 截屏保存\n :return: 返回路径\n '
pic_name = ((str.split(str(time.time()), '.')[0] + str.split(str(time.time()), '.')[1]) + '.png')
screent_path = os.path.join(SCREENSHOTDIR, pic_name)
self.driver.get_screenshot_as_file(screent_path)
return screent_path | 截屏保存
:return: 返回路径 | base/factory/webdriveroperator.py | get_screenshot_as_file | handerbaby/AutoRunWebUI | 1 | python | def get_screenshot_as_file(self):
'\n 截屏保存\n :return: 返回路径\n '
pic_name = ((str.split(str(time.time()), '.')[0] + str.split(str(time.time()), '.')[1]) + '.png')
screent_path = os.path.join(SCREENSHOTDIR, pic_name)
self.driver.get_screenshot_as_file(screent_path)
return screent_path | def get_screenshot_as_file(self):
'\n 截屏保存\n :return: 返回路径\n '
pic_name = ((str.split(str(time.time()), '.')[0] + str.split(str(time.time()), '.')[1]) + '.png')
screent_path = os.path.join(SCREENSHOTDIR, pic_name)
self.driver.get_screenshot_as_file(screent_path)
return screent_path<|docstring|>截屏保存
:return: 返回路径<|endoftext|> |
2f826602c7b7fddf8ab6dbdfc96515c6ccd4199bdd70da13d235d0ba770b3471 | def web_implicitly_wait(self, **kwargs):
'\n 隐式等待\n :param kwargs:\n :return:\n '
try:
s = kwargs['time']
except KeyError:
s = 10
try:
self.driver.implicitly_wait(s)
except NoSuchElementException:
return (False, '隐式等待设置失败')
return (True, '隐式等待设置成功') | 隐式等待
:param kwargs:
:return: | base/factory/webdriveroperator.py | web_implicitly_wait | handerbaby/AutoRunWebUI | 1 | python | def web_implicitly_wait(self, **kwargs):
'\n 隐式等待\n :param kwargs:\n :return:\n '
try:
s = kwargs['time']
except KeyError:
s = 10
try:
self.driver.implicitly_wait(s)
except NoSuchElementException:
return (False, '隐式等待设置失败')
return (True, '隐式等待设置成功') | def web_implicitly_wait(self, **kwargs):
'\n 隐式等待\n :param kwargs:\n :return:\n '
try:
s = kwargs['time']
except KeyError:
s = 10
try:
self.driver.implicitly_wait(s)
except NoSuchElementException:
return (False, '隐式等待设置失败')
return (True, '隐式等待设置成功')<|docstring|>隐式等待
:param kwargs:
:return:<|endoftext|> |
d98d2f6c837d64965c21ab58f524f452b628ea9ef4ae0c604a276e540bbf5d81 | def web_element_wait(self, **kwargs):
'\n 等待元素可见\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
except KeyError:
return (False, '未传需要等待元素的定位参数')
try:
s = kwargs['time']
if (s is None):
s = 30
except KeyError:
s = 30
try:
if (type == 'id'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.ID, locator)))
elif (type == 'name'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.NAME, locator)))
elif (type == 'class'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.CLASS_NAME, locator)))
elif (type == 'xpath'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.XPATH, locator)))
elif (type == 'css'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.CSS_SELECTOR, locator)))
else:
return (False, (('不能识别元素元素[' + type) + ']'))
except TimeoutException:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('元素[' + locator) + ']等待出现失败,已截图[') + screenshot_path) + '].'))
return (True, (('元素[' + locator) + ']等待出现成功')) | 等待元素可见
:param kwargs:
:return: | base/factory/webdriveroperator.py | web_element_wait | handerbaby/AutoRunWebUI | 1 | python | def web_element_wait(self, **kwargs):
'\n 等待元素可见\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
except KeyError:
return (False, '未传需要等待元素的定位参数')
try:
s = kwargs['time']
if (s is None):
s = 30
except KeyError:
s = 30
try:
if (type == 'id'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.ID, locator)))
elif (type == 'name'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.NAME, locator)))
elif (type == 'class'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.CLASS_NAME, locator)))
elif (type == 'xpath'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.XPATH, locator)))
elif (type == 'css'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.CSS_SELECTOR, locator)))
else:
return (False, (('不能识别元素元素[' + type) + ']'))
except TimeoutException:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('元素[' + locator) + ']等待出现失败,已截图[') + screenshot_path) + '].'))
return (True, (('元素[' + locator) + ']等待出现成功')) | def web_element_wait(self, **kwargs):
'\n 等待元素可见\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
except KeyError:
return (False, '未传需要等待元素的定位参数')
try:
s = kwargs['time']
if (s is None):
s = 30
except KeyError:
s = 30
try:
if (type == 'id'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.ID, locator)))
elif (type == 'name'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.NAME, locator)))
elif (type == 'class'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.CLASS_NAME, locator)))
elif (type == 'xpath'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.XPATH, locator)))
elif (type == 'css'):
WebDriverWait(self.driver, s, 0.5).until(EC.visibility_of_element_located((By.CSS_SELECTOR, locator)))
else:
return (False, (('不能识别元素元素[' + type) + ']'))
except TimeoutException:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('元素[' + locator) + ']等待出现失败,已截图[') + screenshot_path) + '].'))
return (True, (('元素[' + locator) + ']等待出现成功'))<|docstring|>等待元素可见
:param kwargs:
:return:<|endoftext|> |
a2eff4ebe12af714256fa1aa3242f1bfc682cda9465cfac3d0574f56989130e5 | def element_input(self, **kwargs):
'\n\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
text = kwargs['input']
except KeyError:
return (False, '缺少参数')
try:
index = kwargs['index']
except KeyError:
index = 0
(isOk, result) = self.element_find(type, locator, index)
if (not isOk):
return (isOk, result)
elem = result
try:
elem.send_keys(text)
except Exception:
screenshot_path = self.get_screenshot_as_file()
return (False, (((((('元素[' + locator) + ']输入[') + text) + ']失败,已截图[') + screenshot_path) + '].'))
return (True, (((('元素[' + locator) + ']输入[') + text) + ']成功')) | :param kwargs:
:return: | base/factory/webdriveroperator.py | element_input | handerbaby/AutoRunWebUI | 1 | python | def element_input(self, **kwargs):
'\n\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
text = kwargs['input']
except KeyError:
return (False, '缺少参数')
try:
index = kwargs['index']
except KeyError:
index = 0
(isOk, result) = self.element_find(type, locator, index)
if (not isOk):
return (isOk, result)
elem = result
try:
elem.send_keys(text)
except Exception:
screenshot_path = self.get_screenshot_as_file()
return (False, (((((('元素[' + locator) + ']输入[') + text) + ']失败,已截图[') + screenshot_path) + '].'))
return (True, (((('元素[' + locator) + ']输入[') + text) + ']成功')) | def element_input(self, **kwargs):
'\n\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
text = kwargs['input']
except KeyError:
return (False, '缺少参数')
try:
index = kwargs['index']
except KeyError:
index = 0
(isOk, result) = self.element_find(type, locator, index)
if (not isOk):
return (isOk, result)
elem = result
try:
elem.send_keys(text)
except Exception:
screenshot_path = self.get_screenshot_as_file()
return (False, (((((('元素[' + locator) + ']输入[') + text) + ']失败,已截图[') + screenshot_path) + '].'))
return (True, (((('元素[' + locator) + ']输入[') + text) + ']成功'))<|docstring|>:param kwargs:
:return:<|endoftext|> |
1919b6f81ce7f95bc0ed427681960a5ef057e8d2ebb590d05829ecba45f989ed | def element_find(self, type, locator, index=None):
'\n 定位元素\n :param type:\n :param locator:\n :param index:\n :return:\n '
time.sleep(1)
if (index is None):
index = 0
type = str.lower(type)
try:
if (type == 'id'):
elem = self.driver.find_elements(by=By.ID, value=locator)[index]
elif (type == 'name'):
elem = self.driver.find_elements(by=By.NAME, value=locator)[index]
elif (type == 'class'):
elem = self.driver.find_elements(by=By.CLASS_NAME, value=locator)[index]
elif (type == 'xpath'):
elem = self.driver.find_elements(by=By.XPATH, value=locator)[index]
elif (type == 'css'):
elem = self.driver.find_elements(by=By.CSS_SELECTOR, value=locator)[index]
else:
return (False, (('不能识别元素元素[' + type) + ']'))
except Exception as e:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('获取元素[' + type) + ']失败,已截图[') + screenshot_path) + '].'))
return (True, elem) | 定位元素
:param type:
:param locator:
:param index:
:return: | base/factory/webdriveroperator.py | element_find | handerbaby/AutoRunWebUI | 1 | python | def element_find(self, type, locator, index=None):
'\n 定位元素\n :param type:\n :param locator:\n :param index:\n :return:\n '
time.sleep(1)
if (index is None):
index = 0
type = str.lower(type)
try:
if (type == 'id'):
elem = self.driver.find_elements(by=By.ID, value=locator)[index]
elif (type == 'name'):
elem = self.driver.find_elements(by=By.NAME, value=locator)[index]
elif (type == 'class'):
elem = self.driver.find_elements(by=By.CLASS_NAME, value=locator)[index]
elif (type == 'xpath'):
elem = self.driver.find_elements(by=By.XPATH, value=locator)[index]
elif (type == 'css'):
elem = self.driver.find_elements(by=By.CSS_SELECTOR, value=locator)[index]
else:
return (False, (('不能识别元素元素[' + type) + ']'))
except Exception as e:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('获取元素[' + type) + ']失败,已截图[') + screenshot_path) + '].'))
return (True, elem) | def element_find(self, type, locator, index=None):
'\n 定位元素\n :param type:\n :param locator:\n :param index:\n :return:\n '
time.sleep(1)
if (index is None):
index = 0
type = str.lower(type)
try:
if (type == 'id'):
elem = self.driver.find_elements(by=By.ID, value=locator)[index]
elif (type == 'name'):
elem = self.driver.find_elements(by=By.NAME, value=locator)[index]
elif (type == 'class'):
elem = self.driver.find_elements(by=By.CLASS_NAME, value=locator)[index]
elif (type == 'xpath'):
elem = self.driver.find_elements(by=By.XPATH, value=locator)[index]
elif (type == 'css'):
elem = self.driver.find_elements(by=By.CSS_SELECTOR, value=locator)[index]
else:
return (False, (('不能识别元素元素[' + type) + ']'))
except Exception as e:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('获取元素[' + type) + ']失败,已截图[') + screenshot_path) + '].'))
return (True, elem)<|docstring|>定位元素
:param type:
:param locator:
:param index:
:return:<|endoftext|> |
5fabc3f93ea1e9da2b7a965e4e33eb5167dcdac1d1de999d99ec85170af694b7 | def element_click(self, **kwargs):
'\n 点击元素\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
except KeyError:
return (False, '缺少参数')
try:
index = kwargs['index']
except KeyError:
index = 0
(isOk, result) = self.element_find(type, locator, index)
if (not isOk):
return (isOk, result)
elem = result
try:
elem.click()
except Exception:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('元素[' + locator) + ']点击失败,已截图[') + screenshot_path) + '].'))
return (True, (('元素[' + locator) + ']点击成功')) | 点击元素
:param kwargs:
:return: | base/factory/webdriveroperator.py | element_click | handerbaby/AutoRunWebUI | 1 | python | def element_click(self, **kwargs):
'\n 点击元素\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
except KeyError:
return (False, '缺少参数')
try:
index = kwargs['index']
except KeyError:
index = 0
(isOk, result) = self.element_find(type, locator, index)
if (not isOk):
return (isOk, result)
elem = result
try:
elem.click()
except Exception:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('元素[' + locator) + ']点击失败,已截图[') + screenshot_path) + '].'))
return (True, (('元素[' + locator) + ']点击成功')) | def element_click(self, **kwargs):
'\n 点击元素\n :param kwargs:\n :return:\n '
try:
type = kwargs['type']
locator = kwargs['locator']
except KeyError:
return (False, '缺少参数')
try:
index = kwargs['index']
except KeyError:
index = 0
(isOk, result) = self.element_find(type, locator, index)
if (not isOk):
return (isOk, result)
elem = result
try:
elem.click()
except Exception:
screenshot_path = self.get_screenshot_as_file()
return (False, (((('元素[' + locator) + ']点击失败,已截图[') + screenshot_path) + '].'))
return (True, (('元素[' + locator) + ']点击成功'))<|docstring|>点击元素
:param kwargs:
:return:<|endoftext|> |
dbbd14fd86fb254b5c44befa42144903800cbb9525cf18eef28431c3017ad937 | def parse_arguments(argv):
'Parse command line arguments\n Args:\n argv (list): list of command line arguments including program name\n Returns:\n The parsed arguments as returned by argparse.ArgumentParser\n '
parser = argparse.ArgumentParser(description='Preprocessing')
parser.add_argument('--bucket', type=str, help='GCS bucket where preprocessed data is saved', default='<your-bucket-name>')
parser.add_argument('--cutoff_year', type=str, help='Cutoff year for the stock data', default='2010')
parser.add_argument('--kfp', dest='kfp', action='store_true', help='Kubeflow pipelines flag')
(args, _) = parser.parse_known_args(args=argv[1:])
return args | Parse command line arguments
Args:
argv (list): list of command line arguments including program name
Returns:
The parsed arguments as returned by argparse.ArgumentParser | financial_time_series/tensorflow_model/run_preprocess.py | parse_arguments | eedorenko/examples | 1,165 | python | def parse_arguments(argv):
'Parse command line arguments\n Args:\n argv (list): list of command line arguments including program name\n Returns:\n The parsed arguments as returned by argparse.ArgumentParser\n '
parser = argparse.ArgumentParser(description='Preprocessing')
parser.add_argument('--bucket', type=str, help='GCS bucket where preprocessed data is saved', default='<your-bucket-name>')
parser.add_argument('--cutoff_year', type=str, help='Cutoff year for the stock data', default='2010')
parser.add_argument('--kfp', dest='kfp', action='store_true', help='Kubeflow pipelines flag')
(args, _) = parser.parse_known_args(args=argv[1:])
return args | def parse_arguments(argv):
'Parse command line arguments\n Args:\n argv (list): list of command line arguments including program name\n Returns:\n The parsed arguments as returned by argparse.ArgumentParser\n '
parser = argparse.ArgumentParser(description='Preprocessing')
parser.add_argument('--bucket', type=str, help='GCS bucket where preprocessed data is saved', default='<your-bucket-name>')
parser.add_argument('--cutoff_year', type=str, help='Cutoff year for the stock data', default='2010')
parser.add_argument('--kfp', dest='kfp', action='store_true', help='Kubeflow pipelines flag')
(args, _) = parser.parse_known_args(args=argv[1:])
return args<|docstring|>Parse command line arguments
Args:
argv (list): list of command line arguments including program name
Returns:
The parsed arguments as returned by argparse.ArgumentParser<|endoftext|> |
3231c390f9477dc078a236cccca155ae7f6566cc2d6ffc0e9af3e607f36eb3a6 | def run_preprocess(argv=None):
'Runs the retrieval and preprocessing of the data.\n\n Args:\n args: args that are passed when submitting the training\n\n Returns:\n\n '
logging.info('starting preprocessing of data..')
args = parse_arguments((sys.argv if (argv is None) else argv))
tickers = ['snp', 'nyse', 'djia', 'nikkei', 'hangseng', 'ftse', 'dax', 'aord']
closing_data = preprocess.load_data(tickers, args.cutoff_year)
time_series = preprocess.preprocess_data(closing_data)
logging.info('preprocessing of data complete..')
logging.info('starting uploading of the preprocessed data on GCS..')
temp_folder = 'data'
if (not os.path.exists(temp_folder)):
os.mkdir(temp_folder)
file_path = os.path.join(temp_folder, 'data_{}.csv'.format(args.cutoff_year))
time_series.to_csv(file_path, index=False)
storage_helper.upload_to_storage(args.bucket, temp_folder)
shutil.rmtree(temp_folder)
if args.kfp:
with open('/blob_path.txt', 'w') as output_file:
output_file.write(file_path)
logging.info('upload of the preprocessed data on GCS completed..') | Runs the retrieval and preprocessing of the data.
Args:
args: args that are passed when submitting the training
Returns: | financial_time_series/tensorflow_model/run_preprocess.py | run_preprocess | eedorenko/examples | 1,165 | python | def run_preprocess(argv=None):
'Runs the retrieval and preprocessing of the data.\n\n Args:\n args: args that are passed when submitting the training\n\n Returns:\n\n '
logging.info('starting preprocessing of data..')
args = parse_arguments((sys.argv if (argv is None) else argv))
tickers = ['snp', 'nyse', 'djia', 'nikkei', 'hangseng', 'ftse', 'dax', 'aord']
closing_data = preprocess.load_data(tickers, args.cutoff_year)
time_series = preprocess.preprocess_data(closing_data)
logging.info('preprocessing of data complete..')
logging.info('starting uploading of the preprocessed data on GCS..')
temp_folder = 'data'
if (not os.path.exists(temp_folder)):
os.mkdir(temp_folder)
file_path = os.path.join(temp_folder, 'data_{}.csv'.format(args.cutoff_year))
time_series.to_csv(file_path, index=False)
storage_helper.upload_to_storage(args.bucket, temp_folder)
shutil.rmtree(temp_folder)
if args.kfp:
with open('/blob_path.txt', 'w') as output_file:
output_file.write(file_path)
logging.info('upload of the preprocessed data on GCS completed..') | def run_preprocess(argv=None):
'Runs the retrieval and preprocessing of the data.\n\n Args:\n args: args that are passed when submitting the training\n\n Returns:\n\n '
logging.info('starting preprocessing of data..')
args = parse_arguments((sys.argv if (argv is None) else argv))
tickers = ['snp', 'nyse', 'djia', 'nikkei', 'hangseng', 'ftse', 'dax', 'aord']
closing_data = preprocess.load_data(tickers, args.cutoff_year)
time_series = preprocess.preprocess_data(closing_data)
logging.info('preprocessing of data complete..')
logging.info('starting uploading of the preprocessed data on GCS..')
temp_folder = 'data'
if (not os.path.exists(temp_folder)):
os.mkdir(temp_folder)
file_path = os.path.join(temp_folder, 'data_{}.csv'.format(args.cutoff_year))
time_series.to_csv(file_path, index=False)
storage_helper.upload_to_storage(args.bucket, temp_folder)
shutil.rmtree(temp_folder)
if args.kfp:
with open('/blob_path.txt', 'w') as output_file:
output_file.write(file_path)
logging.info('upload of the preprocessed data on GCS completed..')<|docstring|>Runs the retrieval and preprocessing of the data.
Args:
args: args that are passed when submitting the training
Returns:<|endoftext|> |
d2287833f8ec04e30ea3101c530d5dd1b0001f5a6f4e885883d902bd56c698b3 | def __init__(__self__, *, vpc_peering_connection_id: pulumi.Input[str], accepter: Optional[pulumi.Input['VpcPeeringConnectionAccepterAccepterArgs']]=None, auto_accept: Optional[pulumi.Input[bool]]=None, requester: Optional[pulumi.Input['VpcPeeringConnectionAccepterRequesterArgs']]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None, tags_all: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None):
"\n The set of arguments for constructing a VpcPeeringConnectionAccepter resource.\n :param pulumi.Input[str] vpc_peering_connection_id: The VPC Peering Connection ID to manage.\n :param pulumi.Input['VpcPeeringConnectionAccepterAccepterArgs'] accepter: A configuration block that describes [VPC Peering Connection]\n (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the accepter VPC.\n :param pulumi.Input[bool] auto_accept: Whether or not to accept the peering request. Defaults to `false`.\n :param pulumi.Input['VpcPeeringConnectionAccepterRequesterArgs'] requester: A configuration block that describes [VPC Peering Connection]\n (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider .\n "
pulumi.set(__self__, 'vpc_peering_connection_id', vpc_peering_connection_id)
if (accepter is not None):
pulumi.set(__self__, 'accepter', accepter)
if (auto_accept is not None):
pulumi.set(__self__, 'auto_accept', auto_accept)
if (requester is not None):
pulumi.set(__self__, 'requester', requester)
if (tags is not None):
pulumi.set(__self__, 'tags', tags)
if (tags_all is not None):
pulumi.set(__self__, 'tags_all', tags_all) | The set of arguments for constructing a VpcPeeringConnectionAccepter resource.
:param pulumi.Input[str] vpc_peering_connection_id: The VPC Peering Connection ID to manage.
:param pulumi.Input['VpcPeeringConnectionAccepterAccepterArgs'] accepter: A configuration block that describes [VPC Peering Connection]
(https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the accepter VPC.
:param pulumi.Input[bool] auto_accept: Whether or not to accept the peering request. Defaults to `false`.
:param pulumi.Input['VpcPeeringConnectionAccepterRequesterArgs'] requester: A configuration block that describes [VPC Peering Connection]
(https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider . | sdk/python/pulumi_aws/ec2/vpc_peering_connection_accepter.py | __init__ | rapzo/pulumi-aws | 260 | python | def __init__(__self__, *, vpc_peering_connection_id: pulumi.Input[str], accepter: Optional[pulumi.Input['VpcPeeringConnectionAccepterAccepterArgs']]=None, auto_accept: Optional[pulumi.Input[bool]]=None, requester: Optional[pulumi.Input['VpcPeeringConnectionAccepterRequesterArgs']]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None, tags_all: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None):
"\n The set of arguments for constructing a VpcPeeringConnectionAccepter resource.\n :param pulumi.Input[str] vpc_peering_connection_id: The VPC Peering Connection ID to manage.\n :param pulumi.Input['VpcPeeringConnectionAccepterAccepterArgs'] accepter: A configuration block that describes [VPC Peering Connection]\n (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the accepter VPC.\n :param pulumi.Input[bool] auto_accept: Whether or not to accept the peering request. Defaults to `false`.\n :param pulumi.Input['VpcPeeringConnectionAccepterRequesterArgs'] requester: A configuration block that describes [VPC Peering Connection]\n (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider .\n "
pulumi.set(__self__, 'vpc_peering_connection_id', vpc_peering_connection_id)
if (accepter is not None):
pulumi.set(__self__, 'accepter', accepter)
if (auto_accept is not None):
pulumi.set(__self__, 'auto_accept', auto_accept)
if (requester is not None):
pulumi.set(__self__, 'requester', requester)
if (tags is not None):
pulumi.set(__self__, 'tags', tags)
if (tags_all is not None):
pulumi.set(__self__, 'tags_all', tags_all) | def __init__(__self__, *, vpc_peering_connection_id: pulumi.Input[str], accepter: Optional[pulumi.Input['VpcPeeringConnectionAccepterAccepterArgs']]=None, auto_accept: Optional[pulumi.Input[bool]]=None, requester: Optional[pulumi.Input['VpcPeeringConnectionAccepterRequesterArgs']]=None, tags: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None, tags_all: Optional[pulumi.Input[Mapping[(str, pulumi.Input[str])]]]=None):
"\n The set of arguments for constructing a VpcPeeringConnectionAccepter resource.\n :param pulumi.Input[str] vpc_peering_connection_id: The VPC Peering Connection ID to manage.\n :param pulumi.Input['VpcPeeringConnectionAccepterAccepterArgs'] accepter: A configuration block that describes [VPC Peering Connection]\n (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the accepter VPC.\n :param pulumi.Input[bool] auto_accept: Whether or not to accept the peering request. Defaults to `false`.\n :param pulumi.Input['VpcPeeringConnectionAccepterRequesterArgs'] requester: A configuration block that describes [VPC Peering Connection]\n (https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.\n :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider .\n "
pulumi.set(__self__, 'vpc_peering_connection_id', vpc_peering_connection_id)
if (accepter is not None):
pulumi.set(__self__, 'accepter', accepter)
if (auto_accept is not None):
pulumi.set(__self__, 'auto_accept', auto_accept)
if (requester is not None):
pulumi.set(__self__, 'requester', requester)
if (tags is not None):
pulumi.set(__self__, 'tags', tags)
if (tags_all is not None):
pulumi.set(__self__, 'tags_all', tags_all)<|docstring|>The set of arguments for constructing a VpcPeeringConnectionAccepter resource.
:param pulumi.Input[str] vpc_peering_connection_id: The VPC Peering Connection ID to manage.
:param pulumi.Input['VpcPeeringConnectionAccepterAccepterArgs'] accepter: A configuration block that describes [VPC Peering Connection]
(https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the accepter VPC.
:param pulumi.Input[bool] auto_accept: Whether or not to accept the peering request. Defaults to `false`.
:param pulumi.Input['VpcPeeringConnectionAccepterRequesterArgs'] requester: A configuration block that describes [VPC Peering Connection]
(https://docs.aws.amazon.com/vpc/latest/peering/what-is-vpc-peering.html) options set for the requester VPC.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A map of tags to assign to the resource. .If configured with a provider `default_tags` configuration block present, tags with matching keys will overwrite those defined at the provider-level.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags_all: A map of tags assigned to the resource, including those inherited from the provider .<|endoftext|> |
2bd9e4aa1afa40896e005c92e946a5de37557d6c5acb4a9cbe1a032b47003cff | @property
@pulumi.getter(name='vpcPeeringConnectionId')
def vpc_peering_connection_id(self) -> pulumi.Input[str]:
'\n The VPC Peering Connection ID to manage.\n '
return pulumi.get(self, 'vpc_peering_connection_id') | The VPC Peering Connection ID to manage. | sdk/python/pulumi_aws/ec2/vpc_peering_connection_accepter.py | vpc_peering_connection_id | rapzo/pulumi-aws | 260 | python | @property
@pulumi.getter(name='vpcPeeringConnectionId')
def vpc_peering_connection_id(self) -> pulumi.Input[str]:
'\n \n '
return pulumi.get(self, 'vpc_peering_connection_id') | @property
@pulumi.getter(name='vpcPeeringConnectionId')
def vpc_peering_connection_id(self) -> pulumi.Input[str]:
'\n \n '
return pulumi.get(self, 'vpc_peering_connection_id')<|docstring|>The VPC Peering Connection ID to manage.<|endoftext|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.