id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
134355 | <gh_stars>100-1000
# This file is meant to be run inside lldb as a command after
# the attach_linux.dylib dll has already been loaded to settrace for all threads.
def __lldb_init_module(debugger, internal_dict):
# Command Initialization code goes here
# print('Startup LLDB in Python!')
import lldb
try:
show_debug_info = 1
is_debug = 0
options = lldb.SBExpressionOptions()
options.SetFetchDynamicValue()
options.SetTryAllThreads(run_others=False)
options.SetTimeoutInMicroSeconds(timeout=10000000)
target = debugger.GetSelectedTarget()
if target:
process = target.GetProcess()
if process:
for thread in process:
# Get the first frame
# print('Thread %s, suspended %s\n'%(thread, thread.IsStopped()))
if internal_dict.get('_thread_%d' % thread.GetThreadID(), False):
process.SetSelectedThread(thread)
if not thread.IsStopped():
# thread.Suspend()
error = process.Stop()
frame = thread.GetSelectedFrame()
if frame.GetFunctionName() == '__select':
# print('We are in __select')
# Step over select, otherwise evaluating expression there can terminate thread
thread.StepOver()
frame = thread.GetSelectedFrame()
print('Will settrace in: %s' % (frame,))
for f in thread:
print(f)
res = frame.EvaluateExpression("(int) SetSysTraceFunc(%s, %s)" % (
show_debug_info, is_debug), options)
error = res.GetError()
if error:
print(error)
thread.Resume()
except:
import traceback;traceback.print_exc()
| StarcoderdataPython |
1740622 | <gh_stars>0
# Create a SQL alchemy session maker to be used
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
SQLALCHEMY_DATABASE_URL = "postgresql+psycopg2://admin:admin@localhost/pht_conductor"
engine = create_engine(
SQLALCHEMY_DATABASE_URL, # connect_args={"check_same_thread": False} For sqlite db
)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
| StarcoderdataPython |
1712329 | <filename>test/tutorial/scripts/api/download_data_metadata.py
from dbio.dss import DSSClient
dss = DSSClient()
UUID = "ffffaf55-f19c-40e3-aa81-a6c69d357265"
VERSION = "ffffaf55-f19c-40e3-aa81-a6c69d357265"
# Download the metadata only
dss.download(
bundle_uuid=UUID,
version=VERSION,
replica="aws",
download_dir=".dbio_metadata_only"
)
# Download the data only
dss.download(
bundle_uuid=UUID,
version=VERSION,
replica="aws",
download_dir=".dbio_data_only"
)
| StarcoderdataPython |
3356028 | <reponame>m-star18/atcoder<filename>submissions/abc101/b.py
import math
n = int(input())
n_check = n
num = []
for i in range(int(math.log10(n)+1)+1):
num.append(n % 10)
n /= 10
n -= num[i]/10
if n_check%int(sum(num)) == 0:
ans = 'Yes'
else:
ans = 'No'
print(ans)
| StarcoderdataPython |
3306972 | <filename>website/views.py<gh_stars>10-100
"""
This module registers flask app views.
"""
from flask import Blueprint, render_template
views = Blueprint('views', __name__)
def base() -> str:
"""
Loads the main html template.
---
Args: None
Returns:
base.html: base html template.
"""
return render_template("base.html")
@views.route('/', methods=['GET'])
@views.route('/home', methods=['GET'])
def home() -> str:
"""
Loads the home html template.
---
Args: None
Returns:
home.html: home html template.
"""
return render_template("home.html")
@views.route('/support', methods=['GET'])
def about() -> str:
"""
Loads the about html template.
---
Args: None
Returns:
support.html: support html template
"""
return render_template("support.html")
| StarcoderdataPython |
4814902 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Access Missouri model managers for legislative tasks.
"""
from billmanager import BillManager | StarcoderdataPython |
1795851 | <reponame>kykosic/pycats
"""
Pipe type class.
It can be used to fluently sequence computations on any object,
but is especially useful to transform lazily evaluated data, such
as Python generators.
Users of R will find it similar to the dplyr `%>%` operator.
"""
from abc import ABC, abstractmethod
from pycats.core import typeclass
@typeclass
class Pipe(ABC):
""" Provides `pipe` method.
Signatures for `Pipe[A]`:
- `A.pipe(func: A -> B) -> B`
"""
@abstractmethod
def pipe(self, func):
pass
| StarcoderdataPython |
1750544 | <filename>gym_PBN/envs/bittner/base.py<gh_stars>0
"""
This file contains arcane magics.
"""
import copy
import itertools
import pickle
import random
import time
from os import path
import networkx as nx
import numpy as np
from scipy.special import smirnov
class Node:
def __init__(self, index, bittnerIndex, name, ID, LUTflag=False):
self.index = index
self.bittnerIndex = bittnerIndex
self.name = name
self.ID = ID
self.LUTflag = LUTflag
self.CODsum = 0
self.value = None
self.predictors = []
self.inputNodes = None
def addPredictors(self, predictors):
IDstoPrint = []
for COD, A, inputIDs in predictors.T:
if type(COD) == type(None):
pass
else:
self.CODsum += COD
if len(self.predictors) == 0:
self.predictors += [(inputIDs, A, COD)]
else:
_, _, prevCOD = self.predictors[-1]
currCOD = prevCOD + COD
self.predictors += [(inputIDs, A, currCOD)]
for inID in list(inputIDs):
if not inID in IDstoPrint:
IDstoPrint = IDstoPrint + [inID]
def addInputNode(self, inputNode):
self.inputNodes += [inputNode]
def addInputNodes(self, inputNodes):
self.inputNodes = np.append(self.inputNodes, inputNodes)
def getInputNodes(self):
inputList = []
for pred in self.predictors:
for ID in pred[0]:
if not ID in inputList:
inputList.append(ID)
return inputList
def setValue(self, value):
self.value = value
def addLUT(self, LUT, inputIDs):
self.LUT = LUT
self.inputIDs = inputIDs
def getStateProbs(self, state):
probs = [0] * 2
prevCOD = 0
for IDs, A, COD in self.predictors:
X = np.ones((len(IDs) + 1, 1))
for j in range(len(IDs)):
ID = IDs[j]
x = state[ID]
X[j] = x
currCOD = COD - prevCOD
prevCOD = COD
Ypred = np.matmul(X.T, A)
if Ypred < 0.5:
Y = 0
else:
Y = 1
probs[Y] += currCOD / self.CODsum
return probs
def Predstep(self, state):
if self.value == None:
raise Exception("Forgot to initialise the states")
r = random.random() * self.CODsum
for i in range(len(self.predictors)):
IDs, A, COD = self.predictors[i]
if COD > r:
break
X = np.ones((len(IDs) + 1, 1))
for j in range(len(IDs)):
ID = IDs[j]
x = state[ID]
X[j] = x
Ypred = np.matmul(X.T, A)
if Ypred < 0.5:
Y = 0
else:
Y = 1
return Y
def LUTstep(self, state):
X = np.empty((len(self.inputIDs), 1))
for j in range(len(self.inputIDs)):
ID = self.inputIDs[j]
x = state[ID]
X[j] = x
inputInt = int(integerize(X)[0])
LUTrow = self.LUT[inputInt, :]
r = random.random()
if r < LUTrow[0]:
Y = 0
else:
Y = 1
return Y
def step(self, state, verbose=False):
if self.LUTflag:
Y = self.LUTstep(state)
else:
Y = self.Predstep(state)
self.value = Y
return Y
def genBoolList(n, length, b):
output = np.zeros((length), dtype=int)
for i in range(length):
output[i] = n % b
n = n // b
return output
def integerize(state):
output = 0
for i in range(len(state)):
output += state[i] * (2**i)
return output
def KSstatistic(states1, states2, nMax):
# Oh boy let's go
M = states1.size
maxDist = 0
for x in range(nMax):
dist = abs(states1[x] - states2[x])
if dist > maxDist:
maxDist = dist
D = maxDist
signif = smirnov(M, D)
print("D: {0}".format(D))
# print("Alpha: {0}".format(signif))
return D, signif
def indicatorF(inp, x):
if inp <= x:
return 1
else:
return 0
# Object representing the Graph of the Probabilistic/random boolean network
class Graph:
def __init__(self, base):
self.nodes = []
self.edges = [] # Indexes in the form of tuples (from->to)
self.base = base
self.perturbations = False
self.p = 0.001
@property
def N(self):
return len(self.nodes)
def genSTG(self, savepath):
if not savepath == None:
if path.exists(savepath):
return pickle.load((open(savepath, "rb")))
a = [[0, 1]] * len(self.nodes)
possibleStates = list(itertools.product(*a))
graphNodes = {}
for possibleState in possibleStates:
# print("Going through: {0}".format(possibleState))
starttime = time.time()
# graphNodes[list(possibleState)] = ()
self.setState(list(possibleState))
nextStates = self.getNextStates()
# adding on the inputs for the state
for (
ns
) in nextStates: # Going through each of next states from current state
state = ns
prob = nextStates[
state
] # Probability of getting to the next state given current state
if (
state in graphNodes.keys()
): # Going through each possible each state. Goal is to add self to their inputs.
oldEntry = graphNodes[state] # Entry for that next state
inOld = oldEntry[0] # Ond inputs for the next state
outOld = oldEntry[1] # old outputs for the next state
inOld[
possibleState
] = prob # Adding self to the inputs with the probability
graphNodes[state] = (inOld, outOld)
else:
inNew = {possibleState: prob}
graphNodes[state] = (inNew, None)
# Adding on the outputs for the state
if possibleState in graphNodes.keys(): # If it's already in
oldEntry = graphNodes[possibleState] # Copy the old entry
inOld = oldEntry[0]
graphNodes[possibleState] = (
inOld,
nextStates,
) # Return the old entry with the outputs appended.
# the next state will not be overridden as it only gets added on at this step.
else: # If it's new, then creating an entry
graphNodes[possibleState] = ({}, nextStates)
endtime = time.time()
# print("Time elapsed: {0}".format(endtime-starttime))
if not savepath == None:
pickle.dump(graphNodes, open(savepath, "wb"))
return graphNodes
def getNextStates(self):
probs = []
for node in self.nodes:
prob = node.getStateProbs(self.getState())
probs = probs + [prob]
a = [[0, 1]] * len(self.nodes)
possibleStates = list(itertools.product(*a))
nextStates = {}
for state in possibleStates:
p = 1
for i in range(len(state)):
p *= probs[i][state[i]]
if p > 0:
nextStates[state] = p
return nextStates
def addNodes(self, nodeList):
self.nodes = nodeList
def addCon(self, conn):
k = conn.shape[0] - 1
self.k = k
for i in range(conn.shape[1]):
targID = conn[k, i]
targNode = self.getNodeByID(targID)
for j in range(k):
predID = conn[j, i]
predNode = self.getNodeByID(predID)
self.edges = self.edges + [(predNode.index, targNode.index)]
targNode.addInputNode(predNode)
def addEdge(self, startIndex, endIndex):
self.nodes[endIndex].addInputNode(self.nodes[startIndex])
self.edges = self.edges + [(self.nodes[startIndex], self.nodes[endIndex])]
def flipNode(self, index):
if index < len(self.nodes):
self.nodes[index].setValue(self.nodes[index].value ^ True)
def step(self):
# self.perturbations = True
if self.perturbations:
pertFlag = np.random.rand(len(self.nodes)) < self.p
if pertFlag.any():
oldState = self.getState()
for i in range(len(oldState.keys())):
if pertFlag[i]:
flipid = list(oldState.keys())[i]
oldState[flipid] = oldState[flipid] ^ 1
self.setState(list(oldState.values()))
else:
oldState = self.getState()
outputState = {}
for i in range(0, len(self.nodes)):
self.nodes[i].step(oldState)
outputState = self.getState()
else:
oldState = self.getState()
outputState = {}
for i in range(0, len(self.nodes)):
self.nodes[i].step(oldState)
outputState = self.getState()
def getState(self):
outputState = {}
for node in self.nodes:
outputState[int(node.ID)] = node.value
return outputState
def getNames(self):
names = []
for i in range(0, len(self.nodes)):
names.append([self.nodes[i].name])
return names
def getIDs(self):
IDs = [node.ID[0] for node in self.nodes]
return IDs
def getNodeByID(self, ID):
for node in self.nodes:
if node.ID == ID:
return node
print("Node with ID {0} not found.".format(ID))
return None
def printGraph(self, path, dist=10, charLim=10):
self.G = nx.DiGraph()
for node in self.nodes:
self.G.add_node(str(node.name[0][:charLim]))
inputIDs = node.getInputNodes()
print(inputIDs)
for ID in inputIDs:
inNode = self.getNodeByID(ID)
self.G.add_edge(
str(inNode.name[0][:charLim]), str(node.name[0][:charLim])
)
# pos = nx.spring_layout(self.G, k=dist)
# nx.draw_networkx_nodes(self.G, pos, node_size=500)
# nx.draw_networkx_edges(self.G, pos)
labels = {}
for node in self.G.nodes():
labels[node] = node
# nx.draw_networkx_labels(self.G, pos, labels, font_size=8)
# plt.savefig(path)
return self.G
def setState(self, state):
for x in range(0, len(self.nodes)):
self.nodes[x].value = int(state[x])
def genRandState(self):
for x in range(0, len(self.nodes)):
self.nodes[x].value = int(random.randint(0, self.base - 1))
def findAttractors(STG):
STG = stripSTG(STG)
unknowns = {}
GA = {}
NGA = {}
for key in list(STG.keys()):
unknowns[key] = [key] # (Status, tags)
unknowns, GA, NGA = identify(unknowns, GA, NGA, STG)
while len(unknowns) > 0:
print("Unknowns to clean up left: {0}".format(len(unknowns)))
toRemove = list(unknowns.keys())[0]
unknowns, GA, NGA, STG = removeNode(unknowns, GA, NGA, toRemove, STG)
unknowns, GA, NGA = identify(unknowns, GA, NGA, STG)
print("GA: {0}".format(GA))
return GA
def stripSTG(STG):
for node in list(STG.keys()):
inputs, outputs = STG[node]
newInputs = list(inputs.keys())
newOutputs = list(outputs.keys())
STG[node] = (newInputs, newOutputs)
return STG
def removeNode(unknowns, GA, NGA, toRemove, STG):
inNodes = STG[toRemove][0]
outNodes = STG[toRemove][1]
print("n In: {0}".format(len(inNodes)))
print("n Out: {0}".format(len(outNodes)))
for inn in inNodes:
if inn != toRemove:
# Upstream passing tags
if inn in GA.keys():
oldTags = GA[inn]
newTags = oldTags + unknowns[toRemove]
GA[inn] = newTags
if inn in NGA.keys():
oldTags = NGA[inn]
newTags = oldTags + unknowns[toRemove]
NGA[inn] = newTags
if inn in unknowns.keys():
oldTags = unknowns[inn]
newTags = oldTags + unknowns[toRemove]
unknowns[inn] = newTags
# Tags passed, happy days.
inNodeIn, inNodeOut = STG[
inn
] # Outputs of This particular input (looping over). Need to add outputs to it
for out in outNodes: # My outputs
if (
not out in inNodeOut
): # If this output is also an output of the input node
inNodeOut = inNodeOut + [out]
# By now all my outputs should be added to the outputs of the input.
inNodeOut.remove(toRemove) # Remove self from the outputs of the input.
STG[inn] = (inNodeIn, inNodeOut) # Push it in to the STG!
# Adding my inputs to the inputs of my outputs. Also removing self.
for out in outNodes: # Going through my outputs
if out != toRemove:
outNodesIn, outNodesOut = STG[
out
] # The input of the outputs being considered.
for inn in inNodes: # Going through my inputs
if not inn in outNodesIn: # If my input isn't already there
outNodesIn = outNodesIn + [inn] # Add the input
outNodesIn.remove(toRemove) # Removing self aswell
STG[out] = (outNodesIn, outNodesOut) # Finalize.
del STG[toRemove] # Removing self from STG.
del unknowns[toRemove]
return unknowns, GA, NGA, STG
def identify(
unknowns, GA, NGA, STG
): # Identifies Guaranteed attractors, garanteed non-attractors, and unknown nodes.
for unTuple in list(unknowns.keys()):
inNodes, outNodes = STG[unTuple]
if len(inNodes) == 0:
NGA[unTuple] = unknowns[unTuple]
del unknowns[unTuple]
if list(outNodes)[0] == unTuple and len(outNodes) == 1:
GA[unTuple] = unknowns[unTuple]
del unknowns[unTuple]
return unknowns, GA, NGA
def permutationWrapper(possibleStates):
acc = []
stateSoFar = []
prob = 1
acc = nodePermutations(stateSoFar, possibleStates, acc, prob)
return acc
# Recursively does some jank
def nodePermutations(stateSoFar, possibleStates, acc, prob):
if len(possibleStates) == 0:
return acc + [(stateSoFar, prob)]
else:
for i in range(0, len(possibleStates[0])):
# Tag along the state currently being explored
tempSSF = stateSoFar + possibleStates[0][i][0]
# Run the same function again, without the first possible state which is being explored
acc = nodePermutations(
tempSSF, possibleStates[1:], acc, prob * possibleStates[0][i][1]
)
return acc
# Graph that represents the State Transotion Graph of a Boolean Network.
class StateGraph:
def __init__(self):
self.nodes = []
self.edges = []
self.G = nx.DiGraph()
def addState(self, node):
if node not in self.nodes:
self.nodes += [node]
self.G.add_node(str(node))
def addEdge(self, edge):
self.edges += [edge]
self.G.add_edge(str(edge[0]), str(edge[1]))
def printStateGraph(self):
pos = nx.spring_layout(self.G, k=0.9)
# nx.draw_networkx_nodes(self.G, pos, node_size=500)
# nx.draw_networkx_edges(self.G, pos)
labels = {}
for node in self.G.nodes():
labels[node] = node
# nx.draw_networkx_labels(self.G, pos, labels, font_size=8)
# plt.savefig("ST_graph.png")
return self.G
def getAttractors(self, verbal=False):
attractors = attractorSetFinder(self.nodes, self.edges, verbal)
return attractors
def attractorSetFinder(nodes, edges, verbal):
attractors = list()
n_nodes = len(nodes)
# row-from node, col-to Node
transMatrix = np.zeros((n_nodes, n_nodes), dtype=float)
i = 0 if verbal else False
for edge in edges:
if verbal:
print(str(i) + " out of " + str(len(edges)), end="\r")
i += 1
inNode = binListToInt(edge[0])
outNode = binListToInt(edge[1])
transMatrix[inNode][outNode] = edge[2]
if verbal:
print()
adjMatrix = transMatrix > 0
flags = computeFlags(adjMatrix)
tags = copy.deepcopy(nodes)
for i in range(0, len(nodes)):
tags[i] = [nodes[i]]
simplified = checkSimplified(flags)
while not simplified:
# Picking a node index
i = pickUnconfirmedNode(flags)
# Removing the node
nodesIn = adjMatrix.T[i]
nodesOut = adjMatrix[i]
for j in range(0, len(nodesIn)):
# If the considered node is an input node
if nodesIn[j] == True and not tags[j] == None and not j == i:
# Add the current tag to the input node's tags
tags[j] = joinTags(tags[j], tags[i])
# Connect the input node's outputs to current nodes outputs
inNodeIndex = binListToInt(tags[j][0])
adjMatrix[inNodeIndex] = np.logical_or(adjMatrix[inNodeIndex], nodesOut)
adjMatrix[inNodeIndex][i] = False
for j in range(0, len(nodesOut)):
if nodesOut[j] == True and not tags[j] == None and not j == i:
adjMatrix[i][j] = False
adjMatrix[i][i] = False
tags[i] = None
flags = computeFlags(adjMatrix)
simplified = checkSimplified(flags)
if verbal:
n_sorted = countSimplified(flags)
print(str(n_sorted) + " out of " + str(flags.shape[0]), end="\r")
for i in range(0, len(flags)):
if flags[i][0] and not flags[i][1]:
attractors.append(tags[i])
if verbal:
print("")
return attractors
def computeFlags(adjMatrix):
# Flags
# First collumn true means that it has no outputs beside itself
# second flag means that it has no inputs beside itself, and that it has outputs beside itself
flags = np.ones((adjMatrix.shape[0], 2), dtype=bool)
# Iterate over each row of the matrix, which means iterating over each of
# the output vector for each node.
for i in range(len(adjMatrix)):
for j in range(len(adjMatrix[i])):
# Found an output node which is not itself
if adjMatrix[i][j] == True and not i == j:
flags[i][0] = False
# Same, buth with collumns, which shows the inputs for node i
for i in range(len(adjMatrix)):
# print(adjMatrix.T[i].astype(int))
for j in range(len(adjMatrix.T[i])):
if adjMatrix.T[i][j] == True and not i == j:
# Found an input node which is not itself
flags[i][1] = False
return flags
def pickUnconfirmedNode(flags):
for i in range(0, len(flags)):
if not flags[i][0] and not flags[i][1]:
return i
return None
def checkSimplified(flags):
nodeSorted = np.logical_or(flags.T[0], flags.T[1]).T
allSorted = True
for node in nodeSorted:
allSorted = allSorted and node
return allSorted
def countSimplified(flags):
nodeSorted = np.logical_or(flags.T[0], flags.T[1]).T
n_sorted = 0
for node in nodeSorted:
if node:
n_sorted += 1
return n_sorted
def joinTags(firstTags, secondTags):
for tag in secondTags:
if tag not in firstTags:
firstTags = firstTags + [tag]
return firstTags
def binListToInt(
inList, b
): # Since it's done backwards (most significant values on the right). this is backwards.
out = 0
for i in range(len(inList)):
out += (b**i) * inList[i]
return out
def runAllFunctions(self):
"""
Having the current state as input, computes the ouptut for this node under each of the functions,
and then adds up the probabilities for ending up at that node state
"""
if not self.functions == None:
outputValues = [([0], 0), ([1], 0)]
inputValues = []
for i in range(0, len(self.inputNodes)):
inputValues += [int(self.inputNodes[i].value)]
for function in self.functions:
currentOutput = int(function[1](inputValues))
if currentOutput not in outputValues:
outputValues[currentOutput] = (
[currentOutput],
function[0] + outputValues[currentOutput][1],
)
return outputValues
else:
return [([0], 0.5), ([1], 0.5)]
| StarcoderdataPython |
178843 | import requests
import re
from bs4 import BeautifulSoup
from tika import parser
import json
<<<<<<< HEAD
=======
url = "https://www.facebook.com/legal/terms/plain_text_terms"
file_location = "sample.pdf"
>>>>>>> 076b8689d58d090438c3a5b07b32e4972b849093
# Subroutine for webpages
# Proven that it works for facebook (in current state)
def webpage(url):
r = requests.get(url)
# Parse through the text to find "terms of usage"
<<<<<<< HEAD
try:
location = [m.start() for m in re.finditer('terms of service', r.text.lower())]
soup = r.text[location[0]:]
except:
return 2
=======
location = [m.start() for m in re.finditer('terms of service', r.text.lower())]
soup = r.text[location[0]:]
>>>>>>> 076b8689d58d090438c3a5b07b32e4972b849093
location = [m.start() for m in re.finditer('<script>', soup.lower())]
soup_stripped = soup[:location[0]]
soup_stripped = BeautifulSoup(soup_stripped, "html.parser").get_text()
# Prints our desired piece of code!!!
print(soup_stripped)
# All text.
return soup_stripped
def pdf(file_location):
raw = parser.from_file(file_location)
print(raw['content'])
return raw['content']
<<<<<<< HEAD
=======
# Enter your subroutines here...
pdf(file_location)
webpage(url)
>>>>>>> 076b8689d58d090438c3a5b07b32e4972b849093
def tester():
data = {}
data['key'] = []
data['key'].append({
'rating': '6.44',
'potential_options':[{
'wikipedia': '10',
'cbc': '9.23',
'tvo': '9.22'
}]
})
<<<<<<< HEAD
return data
=======
return data
>>>>>>> 076b8689d58d090438c3a5b07b32e4972b849093
| StarcoderdataPython |
3367097 | <gh_stars>0
# -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'logging buckets create' command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.logging import util
from googlecloudsdk.calliope import base
class Create(base.CreateCommand):
"""Creates a bucket.
"""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument(
'BUCKET_ID', help='ID of the bucket to create.')
parser.add_argument(
'--display-name',
help='A textual name to display for the bucket.')
parser.add_argument(
'--description',
help='A textual description for the bucket.')
parser.add_argument(
'--retention-days', type=int,
help='The period logs will be retained, after which logs will '
'automatically be deleted. The default is 30 days.')
util.AddBucketLocationArg(
parser, True,
'Location in which to create the bucket. Once the bucket is created, '
'the location cannot be changed.')
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
The created bucket.
"""
bucket_data = {}
if args.IsSpecified('retention_days'):
bucket_data['retentionDays'] = args.retention_days
if args.IsSpecified('display_name'):
bucket_data['displayName'] = args.display_name
if args.IsSpecified('description'):
bucket_data['description'] = args.description
return util.GetClient().projects_locations_buckets.Create(
util.GetMessages().LoggingProjectsLocationsBucketsCreateRequest(
bucketId=args.BUCKET_ID,
parent=util.CreateResourceName(
util.GetProjectResource(args.project).RelativeName(),
'locations',
args.location),
logBucket=util.GetMessages().LogBucket(**bucket_data)))
| StarcoderdataPython |
1791914 | <gh_stars>1-10
from torch.utils.data import Dataset
class HumanPoseEstimationDataset(Dataset):
"""
HumanPoseEstimationDataset class.
Generic class for HPE datasets.
"""
def __init__(self):
pass
def __len__(self):
pass
def __getitem__(self, item):
pass
def evaluate_accuracy(self, output, target, params=None):
pass
| StarcoderdataPython |
81355 | # -*- encoding: utf-8 -*-
"""
Query the S3 bucket containing Sierra progress reports, and log
a report in Slack
"""
import datetime as dt
import itertools
import json
import os
import boto3
import requests
from interval_arithmetic import combine_overlapping_intervals, get_intervals
def get_matching_s3_keys(s3_client, bucket, prefix):
"""
Generate the keys in an S3 bucket that match a given prefix.
"""
paginator = s3_client.get_paginator("list_objects")
for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
print("Got page of %d objects from S3…" % len(page["Contents"]))
for s3_object in page["Contents"]:
yield s3_object["Key"]
def build_report(s3_client, bucket, resource_type):
"""
Generate a complete set of covering windows for a resource type.
"""
keys = get_matching_s3_keys(
s3_client=s3_client, bucket=bucket, prefix=f"windows_{resource_type}_complete"
)
yield from get_intervals(keys=keys)
def chunks(iterable, chunk_size):
return (iterable[i : i + chunk_size] for i in range(0, len(iterable), chunk_size))
class IncompleteReportError(Exception):
pass
def process_report(s3_client, bucket, resource_type):
# Start by consolidating all the windows as best we can. This is an
# incremental consolidation: it consolidates the first 1000 keys, then
# the second 1000 keys, and so on.
#
# If there are more keys than it can consolidate in a single invocation,
# it will at least have made progress and be less likely to time out
# next time.
consolidate_windows(s3_client=s3_client, bucket=bucket, resource_type=resource_type)
for iv in build_report(s3_client, bucket, resource_type):
# If the first gap is more than 6 hours old, we might have a
# bug in the Sierra reader. Raise an exception.
hours = (dt.datetime.now() - iv.end).total_seconds() / 3600
if hours > 6:
raise IncompleteReportError(resource_type)
# https://stackoverflow.com/q/6822725/1558022
def window(seq, n=2):
"""
Returns a sliding window (of width n) over data from the iterable
s -> (s0,s1,...s[n-1]), (s1,s2,...,sn),
"""
it = iter(seq)
result = tuple(itertools.islice(it, n))
if len(result) == n:
yield result
for elem in it:
result = result[1:] + (elem,)
yield result
def prepare_missing_report(s3_client, bucket, resource_type):
"""
Generate a report for windows that are missing.
"""
yield ""
yield f"*missing {resource_type} windows*"
for iv1, iv2 in window(build_report(s3_client, bucket, resource_type)):
missing_start = iv1.end
missing_end = iv2.start
if missing_start.date() == missing_end.date():
yield f"{missing_start.date()}: {missing_start.strftime('%H:%M:%S')} — {missing_end.strftime('%H:%M:%S')}"
else:
yield f"{missing_start.strftime('%Y-%m-%d %H:%M:%S')} — {missing_end.strftime('%Y-%m-%d %H:%M:%S')}"
yield ""
def consolidate_windows(s3_client, bucket, resource_type):
paginator = s3_client.get_paginator("list_objects")
prefix = f"windows_{resource_type}_complete"
for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
keys = [s3_obj["Key"] for s3_obj in page["Contents"]]
intervals = get_intervals(keys=keys)
for iv, running in combine_overlapping_intervals(intervals):
if len(running) > 1:
# Create a consolidated marker that represents the entire
# interval. The back-history of Sierra includes >100k windows,
# so combining them makes reporting faster on subsequent runs.
start_str = iv.start.strftime("%Y-%m-%dT%H-%M-%S.%f+00-00")
end_str = iv.end.strftime("%Y-%m-%dT%H-%M-%S.%f+00-00")
consolidated_key = (
f"windows_{resource_type}_complete/{start_str}__{end_str}"
)
s3_client.put_object(Bucket=bucket, Key=consolidated_key, Body=b"")
# Then clean up the individual intervals that made up the set.
# We sacrifice granularity for performance.
for sub_ivs in chunks(running, chunk_size=1000):
keys = [s.key for s in sub_ivs if s.key != consolidated_key]
s3_client.delete_objects(
Bucket=bucket, Delete={"Objects": [{"Key": k} for k in keys]}
)
def main(event=None, _ctxt=None):
s3_client = boto3.client("s3")
bucket = os.environ["BUCKET"]
slack_webhook = os.environ["SLACK_WEBHOOK"]
errors = []
error_lines = []
for resource_type in ("bibs", "items", "holdings", "orders"):
try:
process_report(
s3_client=s3_client, bucket=bucket, resource_type=resource_type
)
except IncompleteReportError:
error_lines.extend(
prepare_missing_report(
s3_client=s3_client, bucket=bucket, resource_type=resource_type
)
)
errors.append(resource_type)
if errors:
error_lines.insert(0, "There are gaps in the %s data." % "/".join(errors))
error_lines.append(
"You can fix this by running `$ python sierra_adapter/build_missing_windows.py` in the root of the catalogue repo."
)
slack_data = {
"username": "sierra-reader",
"icon_emoji": ":sierra:",
"attachments": [
{
"color": "#8B4F30",
"text": "\n".join(error_lines).strip(),
"mrkdwn_in": ["text"],
}
],
}
resp = requests.post(
slack_webhook,
data=json.dumps(slack_data),
headers={"Content-Type": "application/json"},
)
resp.raise_for_status()
| StarcoderdataPython |
3377855 | <reponame>Krish-sysadmin/DjangoPollsApp
from django.apps import AppConfig
class StartingpageConfig(AppConfig):
name = 'startingpage'
| StarcoderdataPython |
3396827 | <filename>registry/donor/models.py<gh_stars>0
from registry.extensions import db
from registry.list.models import DonationCenter, Medals
class Batch(db.Model):
__tablename__ = "batches"
id = db.Column(db.Integer, primary_key=True)
donation_center = db.Column(db.ForeignKey(DonationCenter.id))
imported_at = db.Column(db.DateTime, nullable=False)
def __repr__(self):
return f"<Batch({self.id}) from {self.imported_at}>"
class Record(db.Model):
__tablename__ = "records"
id = db.Column(db.Integer, primary_key=True)
batch = db.Column(db.ForeignKey(Batch.id), nullable=False)
rodne_cislo = db.Column(db.String(10), index=True, nullable=False)
first_name = db.Column(db.String, nullable=False)
last_name = db.Column(db.String, nullable=False)
address = db.Column(db.String, nullable=False)
city = db.Column(db.String, nullable=False)
postal_code = db.Column(db.String(5), nullable=False)
kod_pojistovny = db.Column(db.String(3), nullable=False)
donation_count = db.Column(db.Integer, nullable=False)
def __repr__(self):
return f"<Record({self.id}) {self.rodne_cislo} from Batch {self.batch}>"
@classmethod
def from_list(cls, list):
return cls(
batch=list[0],
rodne_cislo=list[1],
first_name=list[2],
last_name=list[3],
address=list[4],
city=list[5],
postal_code=list[6],
kod_pojistovny=list[7],
donation_count=list[8],
)
class AwardedMedals(db.Model):
__tablename__ = "awarded_medals"
rodne_cislo = db.Column(db.String(10), index=True, nullable=False)
medal = db.Column(db.ForeignKey(Medals.id))
__tableargs__ = (db.PrimaryKeyConstraint(rodne_cislo, medal),)
class DonorsOverview(db.Model):
__tablename__ = "donors_overview"
rodne_cislo = db.Column(db.String(10), primary_key=True)
first_name = db.Column(db.String, nullable=False)
last_name = db.Column(db.String, nullable=False)
address = db.Column(db.String, nullable=False)
city = db.Column(db.String, nullable=False)
postal_code = db.Column(db.String(5), nullable=False)
kod_pojistovny = db.Column(db.String(3), nullable=False)
donation_count_fm = db.Column(db.Integer, nullable=False)
donation_count_fm_bubenik = db.Column(db.Integer, nullable=False)
donation_count_trinec = db.Column(db.Integer, nullable=False)
donation_count_manual = db.Column(db.Integer, nullable=False)
donation_count_total = db.Column(db.Integer, nullable=False)
awarded_medal_br = db.Column(db.Boolean, nullable=False)
awarded_medal_st = db.Column(db.Boolean, nullable=False)
awarded_medal_zl = db.Column(db.Boolean, nullable=False)
awarded_medal_kr3 = db.Column(db.Boolean, nullable=False)
awarded_medal_kr2 = db.Column(db.Boolean, nullable=False)
awarded_medal_kr1 = db.Column(db.Boolean, nullable=False)
def __repr__(self):
return f"<DonorsOverview ({self.rodne_cislo})>"
| StarcoderdataPython |
3386864 | <filename>tests/compatability/testng/before_and_after/src/python/run_tests.py
import BeforeAndAfter
from proboscis import TestProgram
if __name__ == '__main__':
# Run Proboscis and exit.
TestProgram().run_and_exit()
| StarcoderdataPython |
4801098 | # TG-UserBot - A modular Telegram UserBot script for Python.
# Copyright (C) 2019 Kandarp <https://github.com/kandnub>
#
# TG-UserBot is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TG-UserBot is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with TG-UserBot. If not, see <https://www.gnu.org/licenses/>.
# This is based on the parser of https://github.com/mojurasu/kantek/
import re
from typing import Dict, List, Tuple, Union
KWARGS = re.compile(
r'(?<!\S)' # Make sure the key starts after a whitespace
r'(?:(?P<q>\'|\")?)(?P<key>(?(q).+?|(?!\d)\w+?))(?(q)(?P=q))'
r'(?::(?!//)|=)\s?'
r'(?P<val>\[.+?\]|(?P<q1>\'|\").+?(?P=q1)|\S+)')
ARGS = re.compile(r'(?:(?P<q>\'|\"))(.+?)(?:(?P=q))')
BOOL_MAP = {
'false': False,
'true': True,
}
Value = Union[int, str, float, list]
KeywordArgument = Union[Value, range, List[Value]]
async def _parse_arg(val: str) -> Union[int, str, float]:
val = val.strip()
if re.match(r'^-?\d+$', val):
return int(val)
try:
return float(val)
except ValueError:
pass
if isinstance(val, str):
if re.search(r'^\[.*\]$', val):
val = re.sub(r'[\[\]]', '', val).split(',')
val = [await _parse_arg(v.strip()) for v in val]
else:
val = BOOL_MAP.get(val.lower(), val)
if isinstance(val, str):
val = re.sub(r'(?<!\\), ?$', '', val)
return val
async def parse_arguments(
self,
arguments: str) -> Tuple[List[Value], Dict[str, KeywordArgument]]:
keyword_args = {}
args = []
for match in KWARGS.finditer(arguments):
key = match.group('key')
val = await _parse_arg(re.sub(r'[\'\"]', '', match.group('val')))
keyword_args.update({key: val})
arguments = KWARGS.sub('', arguments)
for val in ARGS.finditer(arguments):
args.append(await _parse_arg(val.group(2)))
arguments = ARGS.sub('', arguments)
for val in re.findall(r'([^\r\n\t\f\v ,]+|\[.*\])', arguments):
parsed = await _parse_arg(val)
if parsed:
args.append(parsed)
return args, keyword_args
| StarcoderdataPython |
119091 | import unittest
class TestCanary(unittest.TestCase):
def test_add_one_two(self):
self.assertEqual(3, 1 + 2)
| StarcoderdataPython |
4827481 | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddle.distributed.fleet.utils.fs import HDFSClient
import time
import paddle.distributed.fleet as fleet
import socket
import sys
import hashlib
from .barrier_server_impl import BarrierServer
from .barrier_client_impl import BarrierClient
from .env import is_first_worker, get_node_info
import sysconfig
import multiprocessing
import yaml
import os
def net_is_used(port, ip='127.0.0.1'):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((ip, port))
s.shutdown(2)
return True
except:
return False
def get_md5(file_path):
hash_md5 = hashlib.md5()
with open(file_path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def check_exists(filelist, local_path):
with open("{}/filelist.txt".format(local_path), 'r') as fin:
for line in fin:
current_file = line.split(' ')[0]
current_md5 = line.split(' ')[1].strip()
if current_file in filelist:
if (not os.path.exists("{}/{}".format(
local_path, current_file))) or get_md5("{}/{}".format(
local_path, current_file)) != current_md5:
return True
return False
def get_file_shard(node_id, node_num, local_path):
while not os.path.exists('{}/filelist.txt'.format(local_path)):
time.sleep(3)
full_list = []
with open("{}/filelist.txt".format(local_path), 'r') as fin:
for line in fin:
full_list.append(line.split(' ')[0])
return full_list[node_id::node_num]
class Downloader(object):
def __init__(self):
self.need_barrier = False
def download_from_hdfs(self,
fs_yaml=None,
local_path="./",
shard_num=-1,
shard_id=-1,
process_num=10):
"""
Download from hdfs
The configurations are configured in fs_yaml file:
TODO: add example and yaml argument fields introduction
"""
def multi_download(client,
hdfs_path,
local_path,
filelist,
process_num=process_num):
def _subprocess_download(files):
for ff in files:
client.download('{}/{}'.format(hdfs_path, ff),
'{}/{}'.format(local_path, ff))
cmd = "tar -xf {}/{} -C {}".format(local_path, ff,
local_path)
os.system(cmd)
dir_per_process = len(filelist) / process_num
procs = []
for i in range(process_num):
process_filelist = filelist[i::process_num]
p = multiprocessing.Process(
target=_subprocess_download, args=(process_filelist, ))
procs.append(p)
p.start()
for proc in procs:
proc.join()
if os.environ.get('PADDLE_TRAINER_ENDPOINTS') is not None:
endpoints = os.environ.get('PADDLE_TRAINER_ENDPOINTS').split(",")
current_endpoint = os.environ.get('PADDLE_CURRENT_ENDPOINT')
self.server_endpoint = endpoints[0]
self.server_port = self.server_endpoint.split(":")[1]
self.barrier_server = BarrierServer()
if current_endpoint == self.server_endpoint:
while net_is_used(self.server_port):
time.sleep(3)
self.barrier_server.start_server_in_background(
endpoint=self.server_endpoint, worker_endpoints=endpoints)
self.need_barrier = True
if is_first_worker():
if not os.path.exists(local_path):
os.system('mkdir {}'.format(local_path))
_, ext = os.path.splitext(fs_yaml)
assert ext in ['.yml', '.yaml'], "only support yaml files for now"
with open(fs_yaml) as f:
cfg = yaml.load(f, Loader=yaml.Loader)
if "hadoop_home" in cfg:
self.hadoop_home = cfg["hadoop_home"]
elif "HADOOP_HOME" in os.environ:
self.hadoop_home = os.environ['HADOOP_HOME']
elif os.system('which hadoop') == 0:
path = os.popen("which hadoop").readlines()[0].rstrip()
self.hadoop_home = os.path.dirname(os.path.dirname(path))
if self.hadoop_home:
print("HADOOP_HOME: " + self.hadoop_home)
if "fs.default.name" in cfg and "hadoop.job.ugi" in cfg:
self.hdfs_configs = {
"fs.default.name": cfg["fs.default.name"],
"hadoop.job.ugi": cfg["hadoop.job.ugi"]
}
java_home = ''
if "java_home" in cfg:
java_home = cfg['java_home']
os.environ['JAVA_HOME'] = java_home
if "data_path" in cfg:
hdfs_path = cfg["data_path"]
else:
raise Exception("ERROR: Please figure your data path in AFS.")
client = HDFSClient(self.hadoop_home, self.hdfs_configs)
if is_first_worker():
if not (client.is_exist('{}/meta.txt'.format(hdfs_path)) and
client.is_exist('{}/filelist.txt'.format(hdfs_path))):
raise Exception(
"ERROR: Your data dir should include filelist.txt and meta.txt"
)
if not os.path.exists('{}/filelist.txt'.format(local_path)):
client.download('{}/filelist.txt'.format(hdfs_path),
'{}/filelist.txt'.format(local_path))
if not os.path.exists('{}/meta.txt'.format(local_path)):
client.download('{}/meta.txt'.format(hdfs_path),
'{}/meta.txt'.format(local_path))
with open('{}/meta.txt'.format(local_path), 'r') as fin:
for line in fin:
current_file = line.strip()
if not os.path.exists('{}/{}'.format(local_path,
current_file)):
client.download(
'{}/{}'.format(hdfs_path, current_file),
'{}/{}'.format(local_path, current_file))
if shard_num > 0:
assert (
shard_id >= 0,
"Please provide worker index by fleet.worker_index() if you want to download sharded data on each machine"
)
self.filelist = get_file_shard(shard_id, shard_num, local_path)
need_download = check_exists(self.filelist, local_path)
if need_download:
multi_download(client, hdfs_path, local_path, self.filelist)
else:
if is_first_worker():
self.filelist = get_file_shard(0, 1, local_path)
need_download = check_exists(self.filelist, local_path)
if need_download:
multi_download(client, hdfs_path, local_path,
self.filelist)
if self.need_barrier:
client = BarrierClient()
client.server_endpoint = self.server_endpoint
client.my_endpoint = os.environ.get('PADDLE_CURRENT_ENDPOINT')
client.connect()
client.barrier()
if client.my_endpoint == self.server_endpoint:
time.sleep(10)
self.barrier_server.close_server()
return local_path
def download_from_bos(self,
fs_yaml=None,
local_path="./",
shard_num=-1,
shard_id=-1,
process_num=10):
def multi_download(bos_path,
local_path,
filelist,
process_num=process_num):
def _subprocess_download(files):
for ff in files:
os.system("wget -q -P {} --no-check-certificate {}/{}".
format(local_path, bos_path, ff))
cmd = "tar -xf {}/{} -C {}".format(local_path, ff,
local_path)
os.system(cmd)
dir_per_process = len(filelist) / process_num
procs = []
for i in range(process_num):
process_filelist = filelist[i::process_num]
p = multiprocessing.Process(
target=_subprocess_download, args=(process_filelist, ))
procs.append(p)
p.start()
for proc in procs:
proc.join()
if os.environ.get('PADDLE_TRAINER_ENDPOINTS') is not None:
endpoints = os.environ.get('PADDLE_TRAINER_ENDPOINTS').split(",")
current_endpoint = os.environ.get('PADDLE_CURRENT_ENDPOINT')
self.server_endpoint = endpoints[0]
self.server_port = self.server_endpoint.split(":")[1]
self.barrier_server = BarrierServer()
if current_endpoint == self.server_endpoint:
while net_is_used(self.server_port):
time.sleep(3)
self.barrier_server.start_server_in_background(
endpoint=self.server_endpoint, worker_endpoints=endpoints)
self.need_barrier = True
if is_first_worker():
if not os.path.exists(local_path):
os.system('mkdir {}'.format(local_path))
yaml_file = fs_yaml.split('/')[-1]
if not os.path.exists(yaml_file):
if fs_yaml == None:
raise Exception(
"Error: you should provide a yaml to download data from bos, you can find yaml examples in the following links:"
)
if is_first_worker():
os.system("wget -q --no-check-certificate {}".format(fs_yaml))
if not os.path.exists(yaml_file):
raise Exception(
"Error: If you provide a url, please check if your url is valid and is able to access; otherwise, please check if the yaml file is exists in your local path."
)
else:
while not os.path.exists(yaml_file):
time.sleep(1)
_, ext = os.path.splitext(fs_yaml)
assert ext in ['.yml', '.yaml'], "only support yaml files for now"
with open(yaml_file) as f:
cfg = yaml.load(f, Loader=yaml.Loader)
if 'bos_path' in cfg:
bos_path = cfg["bos_path"]
else:
raise Exception("ERROR: Please figure your data path in BOS.")
if is_first_worker():
try:
os.system(
"wget -q -P {} --no-check-certificate {}/filelist.txt".
format(local_path, bos_path))
os.system("wget -q -P {} --no-check-certificate {}/meta.txt".
format(local_path, bos_path))
except:
raise Exception(
"ERROR: Your data dir should include filelist.txt and meta.txt"
)
with open('{}/meta.txt'.format(local_path), 'r') as fin:
for line in fin:
current_file = line[:-1]
os.system("wget -q -P {} --no-check-certificate {}/{}".
format(local_path, bos_path, current_file))
if shard_num > 0:
assert (
shard_id >= 0,
"Please provide worker index by fleet.worker_index() if you want to download sharded data on each machine"
)
self.filelist = get_file_shard(shard_id, shard_num, local_path)
need_download = check_exists(self.filelist, local_path)
if need_download:
multi_download(bos_path, local_path, self.filelist)
else:
if is_first_worker():
self.filelist = get_file_shard(0, 1, local_path)
need_download = check_exists(self.filelist, local_path)
if need_download:
multi_download(bos_path, local_path, self.filelist)
if self.need_barrier:
client = BarrierClient()
client.server_endpoint = self.server_endpoint
client.my_endpoint = os.environ.get('PADDLE_CURRENT_ENDPOINT')
client.connect()
client.barrier()
if client.my_endpoint == self.server_endpoint:
time.sleep(10)
self.barrier_server.close_server()
return local_path
| StarcoderdataPython |
495 | <gh_stars>10-100
#!/usr/bin/env python
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from haruhi_dl.aes import aes_decrypt, aes_encrypt, aes_cbc_decrypt, aes_cbc_encrypt, aes_decrypt_text
from haruhi_dl.utils import bytes_to_intlist, intlist_to_bytes
import base64
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
class TestAES(unittest.TestCase):
def setUp(self):
self.key = self.iv = [0x20, 0x15] + 14 * [0]
self.secret_msg = b'Secret message goes here'
def test_encrypt(self):
msg = b'message'
key = list(range(16))
encrypted = aes_encrypt(bytes_to_intlist(msg), key)
decrypted = intlist_to_bytes(aes_decrypt(encrypted, key))
self.assertEqual(decrypted, msg)
def test_cbc_decrypt(self):
data = bytes_to_intlist(
b"\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd"
)
decrypted = intlist_to_bytes(aes_cbc_decrypt(data, self.key, self.iv))
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
def test_cbc_encrypt(self):
data = bytes_to_intlist(self.secret_msg)
encrypted = intlist_to_bytes(aes_cbc_encrypt(data, self.key, self.iv))
self.assertEqual(
encrypted,
b"\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd")
def test_decrypt_text(self):
password = intlist_to_bytes(self.key).decode('utf-8')
encrypted = base64.b64encode(
intlist_to_bytes(self.iv[:8])
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae'
).decode('utf-8')
decrypted = (aes_decrypt_text(encrypted, password, 16))
self.assertEqual(decrypted, self.secret_msg)
password = intlist_to_bytes(self.key).decode('utf-8')
encrypted = base64.b64encode(
intlist_to_bytes(self.iv[:8])
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83'
).decode('utf-8')
decrypted = (aes_decrypt_text(encrypted, password, 32))
self.assertEqual(decrypted, self.secret_msg)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
85663 | """Mark as module for PyTest."""
def left(string, seq=(' ', '\t', '\r', '\n')):
res = ""
for c in string:
if c in seq:
res += c
else:
break
return res
def right(string, seq=(' ', '\t', '\r', '\n')):
return left(reversed(string), seq)
import xml.etree.cElementTree as et
def isSVG(bdata):
tag = None
try:
parser = et.XMLPullParser(["start"])
parser.feed(bdata)
for event, el in parser.read_events():
tag = el.tag
break
except et.ParseError:
pass
return tag == '{http://www.w3.org/2000/svg}svg'
import os
def tabPathnames(names: list, skip=('undefined',)):
splitnames = [name.split(os.path.sep) for name in names]
mx = max([len(name) for name in splitnames])
tabs = {}
for s in skip:
i = -1
for o in range(names.count(s)):
i = names.index(s, i+1)
tabs[i] = s
for i in range(mx):
newnames = [os.path.sep.join(name[-i-1:]) for name in splitnames]
for n1 in range(len(newnames)):
if n1 in tabs:
continue
fnd = False
for n2 in range(len(newnames)):
if n1 == n2 or n2 in tabs:
continue
if newnames[n1] == newnames[n2]:
fnd = True
break
if not fnd:
tabs[n1] = newnames[n1]
nlist = names[:]
for idx in tabs:
nlist[idx] = tabs[idx]
return nlist
| StarcoderdataPython |
13155 | <reponame>jschmidtnj/cs584
#!/usr/bin/env python3
"""
decoder file
decoder class
"""
import tensorflow as tf
class BahdanauAttention(tf.keras.layers.Layer):
def __init__(self, units):
"""
attention layer from Bahdanau paper
"""
super().__init__()
self.w1 = tf.keras.layers.Dense(units)
self.w2 = tf.keras.layers.Dense(units)
self.vector = tf.keras.layers.Dense(1)
def call(self, query, values):
"""
get context and weights given query and values
"""
query_with_time_axis = tf.expand_dims(query, 1)
score = self.vector(tf.nn.tanh(
self.w1(query_with_time_axis) + self.w2(values)))
attention_weights = tf.nn.softmax(score, axis=1)
context_vector = tf.reduce_sum(attention_weights * values, axis=1)
return context_vector, attention_weights
class Decoder(tf.keras.Model):
def __init__(self, vocab_size, embedding_dimension, decoding_units, batch_size, gru: bool = True):
"""
decoder for attention model
"""
super().__init__()
self.batch_size = batch_size
self.decoding_units = decoding_units
self.embedding = tf.keras.layers.Embedding(
vocab_size, embedding_dimension)
if gru:
self.layer = tf.keras.layers.GRU(self.decoding_units,
return_sequences=True,
return_state=True,
recurrent_initializer='glorot_uniform')
else:
self.layer = tf.keras.layers.LSTM(self.decoding_units,
return_sequences=True,
return_state=True)
self.dense_layer = tf.keras.layers.Dense(vocab_size)
self.attention = BahdanauAttention(self.decoding_units)
def call(self, x, hidden, enc_output):
"""
given vector, hidden, and encoding, return new vector, state, and weights
"""
context_vector, attention_weights = self.attention(hidden, enc_output)
x = self.embedding(x)
x = tf.concat([tf.expand_dims(context_vector, 1), x], -1)
output, state = self.layer(x)
output = tf.reshape(output, (-1, output.shape[2]))
x = self.dense_layer(output)
return x, state, attention_weights
| StarcoderdataPython |
1602501 | # coding=utf-8
from __future__ import absolute_import, division, print_function
import io
import sys
import os
import struct
from math import floor, log10
from datetime import datetime, timedelta
from collections import OrderedDict
import re
import errno
import dbf
HEADER_SIZE = 0x29
FOOTER_ENTRY_SIZE = 0x19
ENCRYPTED_IDENTIFIER = b'\xfe\xf2\xee'
IDENTIFIER = b'\xfe\xf2\xff'
def checksum_calc(string):
chunk = string[0]
chunka = (chunk & 0xF0) >> 4
chunkb = (chunk & 0x0F) >> 0
chunk = string[1]
chunkc = (chunk & 0xF0) >> 4
chunkd = (chunk & 0x0F) >> 0
for chunk in string:
chunka ^= (chunk & 0xF0) >> 4
chunkb ^= (chunk & 0x0F) >> 0
chunk = (chunka << 9) + ((chunka ^ chunkb) << 5)
chunka, chunkb, chunkc, chunkd = (
chunka ^ chunkb ^ chunkc ^ ((chunk & 0xF000) >> 12),
chunkd ^ ((chunk & 0x0F00) >> 8),
chunka ^ ((chunk & 0x00F0) >> 4),
chunka ^ chunkb ^ ((chunk & 0x000F) >> 0),
)
return (chunka << 12) + (chunkb << 8) + (chunkc << 4) + (chunkd << 0)
class BinaryFix(object):
def __init__(self, name, open_params):
self.fid = io.open(name, open_params)
def read(self, length=None):
return bytearray(self.fid.read(length))
def write(self, string):
self.fid.write(string)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.fid.close()
def seek(self, pos, start_pos=0):
self.fid.seek(pos, start_pos)
def tell(self):
return self.fid.tell()
def open(name, open_params):
return BinaryFix(name, open_params)
class FXPName(object):
def __init__(self, name):
self.name = str(name)
def __repr__(self):
return self.name
class FXPAlias(FXPName):
def __repr__(self):
return self.name + '.'
class FXPNumber(object):
def __init__(self, number, digits, dec_digits=0):
self.number = number
digits -= dec_digits
digits = max(digits, 0)
self.format_string = '{{:0{}.{}f}}'.format(digits, dec_digits)
def __repr__(self):
return self.format_string.format(self.number)
class FXPCurrency(object):
def __init__(self, number, digits, dec_digits=0):
self.number = number
self.dec_digits = dec_digits
digits -= dec_digits
digits = max(digits, 0)
self.digits = digits
def __repr__(self):
base = 10**self.dec_digits
return '${}.{}'.format(self.number // base, self.number % base)
def round_sig(x, sig):
if x == 0:
return 0.
return round(x, sig-int(floor(log10(abs(x))))-1)
def read_string(fid, *args):
return fid.read(read_ushort(fid)).decode('ISO-8859-1')
def read_single_quoted_string(fid, *args):
return FXPName("'{}'".format(read_string(fid)))
def read_double_quoted_string(fid, *args):
return FXPName('"{}"'.format(read_string(fid)))
def read_int8(fid, *args):
digits = fid.read(1)[0]
return FXPNumber(fid.read(1)[0], digits)
def read_int16(fid, *args):
digits = fid.read(1)[0]
return FXPNumber(read_short(fid), digits)
def read_int32(fid, *args):
digits = fid.read(1)[0]
return FXPNumber(read_int(fid), digits)
def read_double(fid, *args):
digits = fid.read(1)[0] - 1
dec_digits = fid.read(1)[0]
return FXPNumber(struct.unpack('<d', fid.read(8))[0], digits, dec_digits)
def read_float(fid, *args):
return FXPNumber(struct.unpack('<i', fid.read(4))[0] / 65536., 10, 4)
def read_currency(fid, *args):
digits = fid.read(1)[0] - 1
dec_digits = fid.read(1)[0]
return FXPCurrency(struct.unpack('<q', fid.read(8))[0], digits, dec_digits)
def read_datetimeexpr(fid):
days = struct.unpack('<d', fid.read(8))[0]
if not days:
return None
days = timedelta(days - 2440588)
days=timedelta(seconds=round(days.total_seconds()))
return datetime.utcfromtimestamp(0) + days
def read_datetime(fid, *args):
dt = read_datetimeexpr(fid)
if not dt:
return '{ / / }'
return '{{^{}}}'.format(dt)
def read_date(fid, *args):
dt = read_datetimeexpr(fid)
if not dt:
return '{ / / }'
return '{{^{}}}'.format(dt.date())
def read_alias(fid, names, *args):
return FXPAlias(names[read_ushort(fid)])
def read_special_alias(fid, *args):
return FXPAlias(SPECIAL_NAMES[fid.read(1)[0]])
def read_system_alias(fid, *args):
return FXPAlias(SYSTEM_NAMES[fid.read(1)[0]])
def read_system_name(fid, *args):
return FXPName(SYSTEM_NAMES[fid.read(1)[0]])
def read_menu_system_name(fid, *args):
return FXPName(MENU_SYSTEM_NAMES[fid.read(1)[0]])
def read_name(fid, names):
return FXPName(names[read_ushort(fid)])
def read_raw(fid, length):
return ' '.join('{:02x}'.format(d) for d in fid.read(length))
def read_next_code(fid, names, expr=None):
if expr is None:
expr = []
codeval = fid.read(1)[0]
if codeval == END_EXPR:
return codeval
if codeval == PARAMETER_MARK:
code = codeval
elif codeval == SQL_SUBQUERY:
length = read_ushort(fid)
final = fid.tell() + length
fid.read(1)
code = FXPName('(SELECT {})'.format(parse_subline(fid, length, final, names, [])))
elif codeval in FUNCTIONS:
code = FUNCTIONS[codeval]
if codeval in (0xE5, 0xF6):
code = code(fid, names)
while expr and type(expr[-1]) is FXPAlias:
code = FXPName(repr(expr.pop()) + repr(code))
code = repr(code)
elif callable(code):
code = code(fid)
parameters = []
while expr:
parameter = expr.pop()
if parameter == PARAMETER_MARK:
break
parameters.insert(0, parameter)
code += '({})'.format(', '.join(repr(p) for p in parameters))
if codeval == 0xE5:
code = FXPAlias(code)
else:
code = FXPName(code)
elif codeval in OPERATORS:
code = OPERATORS[codeval]
if code[1] == 0:
return read_next_code(fid, names, expr)
elif code[1] > 0:
parameters = [p for p in reversed([expr.pop() for i in range(code[1])])]
if len(parameters) == 1:
code = FXPName('({} {})'.format(code[0], repr(parameters[0])))
else:
code = FXPName('({})'.format((' ' + code[0] + ' ').join(repr(p) for p in parameters)))
else:
code = code[0]
elif codeval in VALUES:
code = VALUES[codeval]
if callable(code):
code = code(fid, names)
if codeval in (0xF0, 0xF1):
return read_next_code(fid, names, expr)
if type(code) is FXPName:
while expr and type(expr[-1]) is FXPAlias:
code = FXPName(repr(expr.pop()) + repr(code))
elif type(code) is FXPAlias:
pass
elif type(code) in (float, int):
code = FXPName(repr(code))
else:
code = FXPName(code)
else:
raise KeyError(hex(codeval))
return code
def read_expr(fid, names, *args):
expr = []
while True:
code = read_next_code(fid, names, expr)
if code == END_EXPR:
break
if code:
expr.append(code)
if len(expr) == 1:
return expr[0]
return expr
def read_setcode(fid, length):
return ['SET ' + SETCODES[fid.read(1)[0]]]
def read_text(fid, length):
length = read_ushort(fid)
return [''.join(chr(x) for x in fid.read(length))]
class Token(object):
def __init__(self, tokenstr, tokenval):
self.str = tokenstr
self.val = tokenval
def __repr__(self):
return repr('{}({})'.format(self.str, self.val))
END_EXPR = 0xFD;
PARAMETER_MARK = 0x43;
SQL_SUBQUERY = 0xE8
SPECIAL_NAMES = {
0x0D: 'M',
}
SYSTEM_NAMES = {
0x00: '_ALIGNMENT',
0x05: '_PAGENO',
0x19: '_DBLCLICK',
0x1A: '_CALCVALUE',
0x1B: '_CALCMEM',
0x1C: '_DIARYDATE',
0x1D: '_CLIPTEXT',
0x1E: '_TEXT',
0x1F: '_PRETEXT',
0x20: '_TALLY',
0x21: '_CUROBJ',
0x22: '_MLINE',
0x23: '_THROTTLE',
0x24: '_GENMENU',
0x2F: '_STARTUP',
0x30: '_TRANSPORT',
0x31: '_BEAUTIFY',
0x32: '_DOS',
0x33: '_MAC',
0x34: '_UNIX',
0x35: '_WINDOWS',
0x36: '_SPELLCHK',
0x37: '_SHELL',
0x39: '_SCREEN',
0x3A: '_BUILDER',
0x3B: '_CONVERTER',
0x3C: '_WIZARD',
0x3D: '_TRIGGERLEVEL',
0x3E: '_ASCIICOLS',
0x3F: '_ASCIIROWS',
0x40: '_BROWSER',
0x41: '_SCCTEXT',
0x42: '_COVERAGE',
0x43: '_VFP',
0x44: '_GALLERY',
0x45: '_GETEXPR',
0x46: '_INCLUDE',
0x47: '_GENHTML',
0x49: '_SAMPLES',
0x4A: '_TASKLIST',
0x4B: '_ObjectBrowser',
0x4D: '_FOXCODE',
0x4E: '_FoxTask',
0x4F: '_CODESENSE',
0x50: '_INCSEEK',
0x51: '_PAGETOTAL',
0x52: '_FOXREF',
0x53: '_TOOLBOX',
0x54: '_TASKPANE',
0x55: '_REPORTBUILDER',
0x56: '_REPORTPREVIEW',
0x57: '_REPORTOUTPUT',
}
MENU_SYSTEM_NAMES = {
0x02: '_MSYSMENU',
0x03: '_MSM_SYSTM',
0x04: '_MSM_FILE',
0x05: '_MSM_EDIT',
0x08: '_MSM_PROG',
0x09: '_MSM_WINDO',
0x0A: '_MSM_VIEW',
0x0B: '_MSM_TOOLS',
0x0D: '_MSYSTEM',
0x10: '_MST_HPSCH',
0x12: '_MST_MACRO',
0x13: '_MST_SP100',
0x1B: '_MST_SP200',
0x1E: '_MST_TECHS',
0x1F: '_MST_ABOUT',
0x22: '_MST_VFPWEB',
0x23: '_MFILE',
0x24: '_MFI_NEW',
0x25: '_MFI_OPEN',
0x26: '_MFI_CLOSE',
0x27: '_MFI_CLALL',
0x28: '_MFI_SP100',
0x29: '_MFI_SAVE',
0x2A: '_MFI_SAVAS',
0x2B: '_MFI_REVRT',
0x2C: '_MFI_SP200',
0x2F: '_MFI_SYSPRINT',
0x31: '_MFI_SP300',
0x32: '_MFI_QUIT',
0x33: '_MFI_PREVU',
0x34: '_MFI_PGSET',
0x35: '_MFI_IMPORT',
0x36: '_MFI_EXPORT',
0x37: '_MFI_SP400',
0x38: '_MFI_SEND',
0x39: '_MEDIT',
0x3A: '_MED_UNDO',
0x3B: '_MED_REDO',
0x3C: '_MED_SP100',
0x3D: '_MED_CUT',
0x3E: '_MED_COPY',
0x3F: '_MED_PASTE',
0x40: '_MED_PSTLK',
0x41: '_MED_CLEAR',
0x42: '_MED_SP200',
0x43: '_MED_INSOB',
0x44: '_MED_OBJ',
0x45: '_MED_LINK',
0x47: '_MED_SP300',
0x48: '_MED_SLCTA',
0x49: '_MED_SP400',
0x4A: '_MED_GOTO',
0x4B: '_MED_FIND',
0x4D: '_MED_REPL',
0x4F: '_MED_SP500',
0x50: '_MED_BEAUT',
0x51: '_MED_PREF',
0x70: '_MPROG',
0x71: '_MPR_DO',
0x72: '_MPR_SP100',
0x73: '_MPR_CANCL',
0x74: '_MPR_RESUM',
0x76: '_MPR_COMPL',
0x7C: '_MPR_SUSPEND',
0x7D: '_MWINDOW',
0x7E: '_MWI_ARRAN',
0x7F: '_MWI_HIDE',
0x82: '_MWI_CLEAR',
0x83: '_MWI_SP100',
0x88: '_MWI_ROTAT',
0x8A: '_MWI_SP200',
0x8B: '_MWI_CMD',
0x8C: '_MWI_VIEW',
0x8D: '_MVI_TOOLB',
0x8E: '_MVIEW',
0x90: '_MTOOLS',
0x91: '_MTL_WZRDS',
0x92: '_MTL_SP100',
0x93: '_MTL_SP200',
0x94: '_MTL_SP300',
0x95: '_MTL_SP400',
0x96: '_MTL_OPTNS',
0x97: '_MTL_BROWSER',
0x98: '_MTI_FOXCODE',
0x99: '_MTL_DEBUGGER',
0x9A: '_MTI_TRACE',
0x9E: '_MTI_LOCALS',
0x9F: '_MTI_DBGOUT',
0xA0: '_MTI_CALLSTACK',
0xC4: '_MSM_TEXT',
0xE6: '_MST_MSDNS',
0xED: '_MTL_GALLERY',
0xEE: '_MTL_COVERAGE',
0xEF: '_MTI_TASKLIST',
0xF1: '_MTI_DOCVIEW',
0xF2: '_MTI_BREAKPOINT',
0xF4: '_MED_LISTMEMBERS',
0xF5: '_MED_QUICKINFO',
0xF6: '_MED_BKMKS',
0xFD: '_MWI_CASCADE',
0xFE: '_MWI_DOCKABLE',
}
COMMANDS = {
#Commands are identified by a single byte as shown in the following list:
0x00: 'SELECT',
0x01: lambda fid, length: ['RAW CODE: ' + fid.read(length).decode('ISO-8859-1')],
0x02: '?',
0x03: '??',
0x04: '@',
0x05: 'ACCEPT',
0x06: 'APPEND',
0x07: 'ASSIST',
0x08: 'AVERAGE',
0x09: 'BROWSE',
0x0A: 'CALL',
0x0B: 'CANCEL',
0x0C: 'CASE',
0x0D: 'CHANGE',
0x0E: 'CLEAR',
0x0F: 'CLOSE',
0x10: 'CONTINUE',
0x11: 'COPY',
0x12: 'COUNT',
0x13: 'CREATE',
0x14: 'DELETE',
0x15: 'DIMENSION',
0x16: 'DIR',
0x17: 'DISPLAY',
0x18: 'DO',
0x19: 'EDIT',
0x1A: 'EJECT',
0x1B: 'ELSE',
0x1C: 'ENDCASE',
0x1D: 'ENDDO',
0x1E: 'ENDIF',
0x1F: 'ENDTEXT',
0x20: 'ERASE',
0x21: 'EXIT',
0x22: 'FIND',
0x23: 'GO',
0x24: 'HELP',
0x25: 'IF',
0x26: 'INDEX',
0x27: 'INPUT',
0x28: 'INSERT',
0x29: 'JOIN',
0x2A: 'LABEL',
0x2B: 'LIST',
0x2C: 'LOAD',
0x2D: 'LOCATE',
0x2E: 'LOOP',
0x2F: 'MODIFY',
0x30: lambda fid, length: ['NOTE'] + [fid.read(length).decode('ISO-8859-1')],#'NOTE',
0x31: 'ON',
0x32: 'OTHERWISE',
0x33: 'PACK',
0x34: 'PARAMETERS',
0x35: 'PRIVATE',
0x36: 'PROCEDURE',
0x37: 'PUBLIC',
0x38: 'QUIT',
0x39: 'READ',
0x3A: 'RECALL',
0x3B: 'REINDEX',
0x3C: 'RELEASE',
0x3D: 'RENAME',
0x3E: 'REPLACE',
0x3F: 'REPORT',
0x40: 'RESTORE',
0x41: 'RESUME',
0x42: 'RETURN',
0x43: 'RUN',
0x44: 'SAVE',
0x45: 'SEEK',
0x46: 'SELECT',
0x47: read_setcode,
0x48: 'SKIP',
0x49: 'SORT',
0x4A: 'STORE',
0x4B: 'SUM',
0x4C: 'SUSPEND',
0x4D: 'TEXT',
0x4E: 'TOTAL',
0x4F: 'TYPE',
0x50: 'UPDATE',
0x51: 'USE',
0x52: 'WAIT',
0x53: 'ZAP',
0x54: lambda fid, length: [], #variable assignment
0x55: 'ENDPROC\n',
0x56: 'EXPORT',
0x57: 'IMPORT',
0x58: 'RETRY',
0x5A: 'UNLOCK',
0x5B: 'FLUSH',
0x5C: 'KEYBOARD',
0x5D: 'MENU',
0x5E: 'SCATTER',
0x5F: 'GATHER',
0x60: 'SCROLL',
0x68: 'CREATE',
0x69: 'ALTER',
0x6A: 'DROP',
0x6F: 'SELECT',
0x70: 'UPDATE',
0x71: 'DELETE',
0x72: 'INSERT',
0x73: 'DEFINE',
0x74: 'ACTIVATE',
0x75: 'DEACTIVATE',
0x76: 'PRINTJOB',
0x77: 'ENDPRINTJOB',
0x79: '???',
0x7A: 'MOVE',
0x7B: 'ON', #CLEAR ON event
0x7C: 'DECLARE',
0x7D: 'CALCULATE',
0x7E: 'SCAN',
0x7F: 'ENDSCAN',
0x80: 'SHOW',
0x81: 'PLAY',
0x82: 'GETEXPR',
0x83: 'COMPILE',
0x84: 'FOR',
0x85: 'ENDFOR',
0x86: '=', #expression,
0x87: 'HIDE',
0x89: 'SIZE',
0x8A: 'PUSH',
0x8B: 'POP',
0x8C: 'ZOOM',
0x8D: '\\',
0x8E: '\\\\',
0x8F: 'BUILD',
0x90: 'EXTERNAL',
0x93: 'BLANK',
0x95: 'OPEN',
0x96: 'ADD',
0x97: 'REMOVE',
0x99: lambda fid, length: [], #function call
0x9B: 'BEGIN',
0x9C: 'ROLLBACK',
0x9D: 'END',
0x9E: 'add hidden method',
0x9F: 'HIDDEN',
0xA0: 'VALIDATE',
0xA1: 'PROTECTED',
0xA2: 'add method',
0xA3: 'add protected method',
0xA6: 'WITH',
0xA7: 'ENDWITH',
0xA8: 'ERROR',
0xA9: 'ASSERT',
0xAA: 'DEBUGOUT',
0xAB: 'FREE',
0xAC: 'NODEFAULT',
0xAD: 'MOUSE',
0xAE: 'LOCAL',
0xAF: 'LPARAMETERS',
0xB0: 'CD',
0xB1: 'MKDIR',
0xB2: 'RMDIR',
0xB3: 'DEBUG',
0xB4: lambda fid, length: ['BAD CODE: ' + fid.read(length).decode('ISO-8859-1')],
0xB5: 'FOR EACH',
0xB6: 'ENDFOREACH',
0xB7: 'DOEVENTS',
0xB9: 'IMPLEMENTS',
0xBA: 'TRY',
0xBB: 'CATCH',
0xBC: 'FINALLY',
0xBD: 'THROW',
0xBE: 'ENDTRY',
0xFB: read_text,
}
SETCODES = {
0x01: 'ALTERNATE',
0x02: 'BELL',
0x03: 'CARRY',
0x05: 'CENTURY',
0x07: 'COLOR',
0x09: 'CONFIRM',
0x0A: 'CONSOLE',
0x0B: 'DATE',
0x0C: 'DEBUG',
0x0E: 'DEFAULT',
0x0D: 'DECIMALS',
0x0F: 'DELETED',
0x15: 'ESCAPE',
0x16: 'EXACT',
0x17: 'EXCLUSIVE',
0x18: 'FIELDS',
0x1A: 'FILTER',
0x1B: 'FIXED',
0x1C: 'FORMAT',
0x1D: 'FUNCTION',
0x1E: 'HEADINGS',
0x1F: 'HELP',
0x21: 'INDEX',
0x23: 'MARGIN',
0x24: 'MEMOWIDTH',
0x26: 'MESSAGE',
0x27: 'ODOMETER',
0x28: 'ORDER',
0x29: 'PATH',
0x2A: 'PRINTER',
0x2B: 'PROCEDURE',
0x2D: 'RELATION',
0x2E: 'SAFETY',
0x30: 'STATUS',
0x31: 'STEP',
0x32: 'TALK',
0x35: 'TYPEAHEAD',
0x36: 'UNIQUE',
0x37: 'VIEW',
0x39: 'HOURS',
0x3A: 'MARK',
0x3B: 'POINT',
0x3C: 'SEPERATOR',
0x3D: 'BORDER',
0x3E: 'CLOCK',
0x40: 'SPACE',
0x41: 'COMPATIBLE',
0x42: 'AUTOSAVE',
0x43: 'BLOCKSIZE',
0x45: 'DEVELOPMENT',
0x46: 'NEAR',
0x48: 'REFRESH',
0x49: 'LOCK',
0x4C: 'WINDOW',
0x4D: 'REPROCESS',
0x4E: 'SKIP',
0x51: 'FULLPATH',
0x54: 'RESOURCE',
0x55: 'TOPIC',
0x57: 'LOGERRORS',
0x59: 'SYSMENU',
0x5A: 'NOTIFY',
0x5C: 'MACKEY',
0x5D: 'CURSOR',
0x5E: 'UDFPARMS',
0x5F: 'MULTILOCKS',
0x60: 'TEXTMERGE',
0x61: 'OPTIMIZE',
0x62: 'LIBRARY',
0x64: 'ANSI',
0x65: 'TRBETWEEN',
0x66: 'PDSETUP',
0x68: 'KEYCOMP',
0x69: 'PALETTE',
0x6A: 'READBORDER',
0x6B: 'COLLATE',
0x6D: 'NOCPTRANS',
0x77: 'NULL',
0x79: 'TAG',
0x7B: 'CPDIALOG',
0x7C: 'CPCOMPILE',
0x7D: 'SECONDS',
0x7E: 'CLASSLIB',
0x7F: 'DATABASE',
0x80: 'DATASESSION',
0x81: 'FDOW',
0x82: 'FWEEK',
0x83: 'SYSFORMATS',
0x84: 'OLEOBJECT',
0x85: 'ASSERTS',
0x86: 'COVERAGE',
0x87: 'EVENTTRACKING',
0x88: 'NULLDISPLAY',
0x89: 'EVENTLIST',
0x8D: 'BROWSEIME',
0x8E: 'STRICTDATE',
0x8F: 'AUTOINCERROR',
0x90: 'ENGINEBEHAVIOR',
0x91: 'TABLEVALIDATE',
0x92: 'SQLBUFFERING',
0x94: 'TABLEPROMPT',
0xFE: '\n',
}
CLAUSES = {
0x01: 'ADDITIVE',
0x02: 'ALIAS',
0x03: 'ALL',
0x04: 'ARRAY',
0x05: 'AT',
0x06: 'BAR',
0x07: ',',
0x08: 'BLANK',
0x0C: 'CLEAR',
0x0D: 'COLOR',
0x0E: 'DEFAULT',
0x0F: 'DOUBLE',
0x11: 'FIELDS',
0x12: 'FILE',
0x13: 'FOR',
0x14: 'FORM',
0x15: 'FROM',
0x16: 'IN',
0x17: 'KEY',
0x18: '(@ or LIKE)',
0x1A: 'MACROS',
0x1B: 'MEMO',
0x1C: 'MENU',
0x1D: 'MESSAGE',
0x1E: 'NEXT',
0x1F: 'OFF',
0x20: 'ON',
0x21: 'PRINTER',
0x22: 'PROMPT',
0x23: 'RECORD',
0x24: 'REST',
0x25: 'SAVE',
0x26: 'SCREEN',
0x27: 'TITLE',
0x28: 'TO',
0x29: 'TOP',
0x30: 'NOOPTIMIZE',
0x2B: 'WHILE',
0x2C: 'WINDOW',
0x2E: 'OBJECT',
0x31: 'TABLE',
0x32: 'LABEL',
0x33: 'REPORT',
0x36: 'BOTTOM',
0x38: 'BY',
0x39: 'NOCONSOLE',
0x3A: 'NOWAIT',
0x3B: 'PLAIN',
0x3C: 'DESCENDING',
0x3D: 'NOWINDOW',
0x40: 'FONT',
0x41: 'STYLE',
0x42: 'RGB',
0x48: 'CASE',
0x49: 'ID',
0x4A: 'NAME',
0x4B: 'PROGRAM',
0x4C: 'QUERY',
0x4E: 'SCHEME',
0x4F: 'CLASS',
0x51: 'AS',
0x52: 'CLASSLIB',
0x56: 'DLLS',
0x57: 'SHORT',
0x58: 'LEFT',
0x59: 'RIGHT',
0x5B: 'RTLJUSTIFY',
0x5C: 'LTRJUSTIFY',
0x5F: 'PICTRES',
0xE5: read_name, #user defined function alias
0xF6: read_name, #user defined function
0xFC: read_expr,
END_EXPR: 'END EXPR', #0xFD
0xFE: '\n'
}
MULTICLAUSES = {
0x10: ('=', 'ERROR'),
0xBB: ('XL5', 'WITH BUFFERING='),
0xBC: ('INTO', 'ACTIVATE', 'COMMAND', 'PAD'),
0xBD: ('CENTER', 'CURSOR', 'TRANSACTION', 'APP', 'MINIMIZE', 'ESCAPE'),
0xBE: ('PROCEDURE', 'DELIMITED', 'EXE', 'DISTINCT', 'DOS', 'PAGE'),
0xBF: ('UNKNOWN', 'MIN', 'FLOAT'),
0xC0: ('HAVING', 'FREE', 'LOCAL', 'FOOTER'),
0xC1: ('LINE', 'TRIGGER', 'GLOBAL', 'GET'),
0xC2: ('SHARED', 'DATABASE', 'DROP', 'GETS', 'NOINIT'),
0xC3: ('ORDER BY', 'OF', 'REPLACE', 'NOCLOSE'),
0xC4: ('SAY', 'VIEWS'),
0xC5: ('VALUES', 'DBLCLICK'),
0xC6: ('POPUP', 'WHERE', 'DLL', 'DRAG', 'EXCLUDE'),
0xC7: ('*', 'STEP', 'XLS', 'MARK'),
0xC8: ('READ', 'MARGIN', 'RPD', 'READERROR'),
0xCA: ('TAG', 'SET', 'FORCE', 'NOMINIMIZE', 'EXIT'),
0xCB: ('STATUS', 'RECOMPILE', 'PRIMARY', 'WRK'),
0xCC: ('STRUCTURE', 'RELATIVE', 'FOREIGN', 'PROTECTED'),
0xCD: ('SHUTDOWN', 'NOFILTER'),
0xCE: ('TIMEOUT', 'UPDATE'),
0xCF: ('SHADOW',),
0xD0: ('NOCLEAR', 'SELECTION'),
0xD1: ('WITH', 'INTEGER', 'CONNECTION'),
0xD2: ('NOMARGIN',),
0xD3: ('UNIQUE', 'SIZE'),
0xD4: ('TYPE', 'LONG', 'SYSTEM'),
0xD5: ('EVENTS', 'CSV', 'COLUMN'),
0xD6: ('STRING', 'SHEET', 'NORM'),
0xD7: ('READWRITE',),
}
VALUES = {
0x2D: '.F.',
0x61: '.T.',
0xE4: '.NULL.',
0xD9: read_double_quoted_string,
0xDB: '',
0xDE: read_currency,
0xDF: '::',
0xE0: read_name,
0xE1: read_system_alias,
0xE2: '.',
0xE3: read_alias,
0xE6: read_datetime,
0xE7: read_float,
SQL_SUBQUERY: 'SQL SUBQUERY', #0xE8
0xE9: read_int32,
0xEB: read_next_code,
0xEC: read_menu_system_name,
0xED: read_system_name,
0xEE: read_date,
0xF0: lambda fid, *args: '(SHORT CIRCUIT AND IN {})'.format(read_ushort(fid)),
0xF1: lambda fid, *args: '(SHORT CIRCUIT OR IN {})'.format(read_ushort(fid)),
0xF2: lambda fid, *args: '(SHORT CIRCUIT IIF IN {})'.format(read_ushort(fid)),
0xF3: lambda fid, *args: '(SHORT CIRCUIT IIF IN {})'.format(read_ushort(fid)),
0xF4: read_alias,
0xF5: read_special_alias,
0xF7: read_name,
0xF8: read_int8,
0xF9: read_int16,
0xFA: read_double,
0xFB: read_single_quoted_string,
0xFC: read_expr,
0xFF: lambda fid, *args: 'ff ' + read_raw(fid, 2),
}
OPERATORS = {
0x00: ('NOP', 0),
0x01: ('$', 2),
0x03: ('END PAREN', 0),
0x04: ('*', 2),
0x05: ('^', 2),
0x06: ('+', 2),
0x07: (',', 2),
0x08: ('-', 2),
0x09: ('AND', 2),
0x0A: ('NOT', 1),
0x0B: ('OR', 2),
0x0C: ('/', 2),
0x0D: ('<', 2),
0x0E: ('<=', 2),
0x0F: ('!=', 2),
0x10: ('=', 2),
0x11: ('>', 2),
0x12: ('>=', 2),
0x14: ('==', 2),
0x18: ('@', -1),
0xCC: ('', 1),
}
FUNCTIONS = {
0x19: 'ABS',
0x1A: lambda fid: EXTENDED1[fid.read(1)[0]],
0x1B: 'ALIAS',
0x1C: 'ASC',
0x1D: 'AT',
0x1E: 'BOF',
0x1F: 'CDOW',
0x20: 'CHR',
0x21: 'CMONTH',
0x22: 'COL',
0x23: 'CTOD',
0x24: 'DATE',
0x25: 'DAY',
0x26: 'DBF',
0x27: 'DELETED',
0x28: 'DISKSPACE',
0x29: 'DOW',
0x2A: 'DTOC',
0x2B: 'EOF',
0x2C: 'ERROR',
0x2E: 'FCOUNT',
0x2F: 'FIELD',
0x30: 'FILE',
0x31: 'FKLABEL',
0x32: 'FKMAX',
0x33: 'FLOCK',
0x34: 'FOUND',
0x35: 'GETENV',
0x36: 'IIF',
0x37: 'INKEY',
0x38: 'INT',
0x39: 'ISALPHA',
0x3A: 'ISCOLOR',
0x3B: 'ISLOWER',
0x3C: 'ISUPPER',
0x3D: 'LEFT',
0x3E: 'LEN',
0x3F: 'LOCK',
0x40: 'LOWER',
0x41: 'LTRIM',
0x42: 'LUPDATE',
PARAMETER_MARK: 'MARK PARAMETERS', #0x43
0x44: 'MAX',
0x45: 'MESSAGE',
0x46: 'MIN',
0x47: 'MOD',
0x48: 'MONTH',
0x49: 'NDX',
0x4A: 'OS',
0x4B: 'PCOL',
0x4C: 'PROW',
0x4D: 'READKEY',
0x4E: 'RECCOUNT',
0x4F: 'RECNO',
0x50: 'RECSIZE',
0x51: 'REPLICATE',
0x52: 'RIGHT',
0x53: 'RLOCK',
0x54: 'ROUND',
0x55: 'ROWS',
0x56: 'RTRIM',
0x57: 'SELECT',
0x58: 'SPACE',
0x59: 'SQRT',
0x5A: 'STR',
0x5B: 'STUFF',
0x5C: 'SUBSTR',
0x5D: 'SYS',
0x5E: 'TIME',
0x5F: 'TRANSFORM',
0x60: 'TRIM',
0x62: 'TYPE',
0x64: 'UPDATED',
0x66: 'UPPER',
0x67: 'VAL',
0x68: 'VERSION',
0x69: 'YEAR',
0x6A: 'DMY',
0x6B: 'MDY',
0x6C: 'BAR',
0x6D: 'KEY',
0x6F: 'MEMORY',
0x70: 'MENU',
0x72: 'PAD',
0x73: 'POPUP',
0x74: 'PROGRAM',
0x75: 'PV',
0x76: 'SET',
0x77: 'CEILING',
0x7A: 'FLOOR',
0x7B: 'FV',
0x7C: 'LASTKEY',
0x7D: 'LIKE',
0x7E: 'LOOKUP',
0x7F: 'CDX',
0x80: 'MEMLINES',
0x81: 'MLINE',
0x82: 'ORDER',
0x83: 'PAYMENT',
0x84: 'PRINTSTATUS',
0x85: 'PROMPT',
0x86: 'RAND',
0x87: 'VARREAD',
0x89: 'RTOD',
0x8A: 'SEEK',
0x8B: 'SIGN',
0x8C: 'TAG',
0x8D: 'DTOR',
0x8E: 'DTOS',
0x8F: 'SCHEME',
0x90: 'FOPEN',
0x91: 'FCLOSE',
0x92: 'FREAD',
0x93: 'FWRITE',
0x94: 'FERROR',
0x95: 'FCREATE',
0x96: 'FSEEK',
0x97: 'FGETS',
0x98: 'FFLUSH',
0x99: 'FPUTS',
0x9B: 'ALLTRIM',
0x9C: 'ATLINE',
0x9D: 'CHRTRAN',
0x9E: 'FILTER',
0x9F: 'RELATION',
0xA0: 'TARGET',
0xA1: 'EMPTY',
0xA2: 'FEOF',
0xA3: 'HEADER',
0xA5: 'RAT',
0xA6: 'RATC',
0xA7: 'SECONDS',
0xA8: 'STRTRAN',
0xAA: 'USED',
0xAB: 'BETWEEN',
0xAC: 'CHRSAW',
0xAD: 'INLIST',
0xAE: 'ISDIGIT',
0xAF: 'OCCURS',
0xB0: 'PADC',
0xB1: 'PADL',
0xB2: 'PADR',
0xB3: 'FSIZE',
0xB4: 'SROWS',
0xB5: 'SCOLS',
0xB6: 'WCOLS',
0xB7: 'WROWS',
0xB8: 'ATC',
0xB9: 'ATCLINE',
0xBA: 'CURDIR',
0xBC: 'PROPER',
0xBB: 'FULLPATH',
0xBD: 'WEXISTS',
0xBE: 'WONTOP',
0xBF: 'WOUTPUT',
0xC0: 'WVISIBLE',
0xC1: 'GETFILE',
0xC2: 'PUTFILE',
0xC3: 'GOMONTH',
0xC4: 'PARAMETERS',
0xC5: 'MCOL',
0xC6: 'MDOWN',
0xC7: 'MROW',
0xC8: 'WLCOL',
0xC9: 'WLROW',
0xCB: 'FCHSIZE',
0xCD: 'ALEN',
0xCE: 'EVALUATE',
0xD1: 'ISNULL',
0xD2: 'NVL',
0xE5: read_name, #user defined function alias
0xEA: lambda fid: EXTENDED2[fid.read(1)[0]],
0xF6: read_name, #user defined function
}
EXTENDED1 = {
0x00: 'DISPLAYPATH',
0x01: 'CURSORTOXML',
0x02: 'XMLTOCURSOR',
0x03: 'GETINTERFACE',
0x04: 'BINDEVENT',
0x05: 'RAISEEVENT',
0x06: 'ADOCKSTATE',
0x07: 'GETCURSORADAPTER',
0x08: 'UNBINDEVENTS',
0x09: 'AEVENTS',
0x0A: 'ADDPROPERTY',
0x0E: 'ICASE',
0x0C: 'EVL',
0x0F: 'CAST',
0x10: 'ASQLHANDLES',
0x11: 'SETRESULTSET',
0x12: 'GETRESULTSET',
0x13: 'CLEARRESULTSET',
0x14: 'SQLIDLEDISCONNECT',
0x15: 'ISMEMOFETCHED',
0x16: 'GETAUTOINCVALUE',
0x17: 'MAKETRANSACTABLE',
0x18: 'ISTRANSACTABLE',
0x19: 'ISPEN',
}
EXTENDED2 = {
#This list contains all those functions that are available through the 0xEA (extended function) code:
0x00: 'PRMPAD',
0x01: 'PRMBAR',
0x02: 'MRKPAD',
0x03: 'MRKBAR',
0x04: 'CNTPAD',
0x05: 'CNTBAR',
0x06: 'GETPAD',
0x07: 'GETBAR',
0x08: 'MWINDOW',
0x09: 'OBJNUM',
0x0A: 'WPARENT',
0x0B: 'WCHILD',
0x0C: 'RDLEVEL',
0x0D: 'ACOPY',
0x0E: 'AINS',
0x0F: 'ADEL',
0x10: 'ASORT',
0x11: 'ASCAN',
0x12: 'AELEMENT',
0x13: 'ASUBSCRIPT',
0x14: 'AFIELDS',
0x15: 'ADIR',
0x16: 'LOCFILE',
0x17: 'WBORDER',
0x18: 'ON',
0x19: '', #Some sort of array indicator?
0x1A: 'WLAST',
0x1B: 'SKPBAR',
0x1C: 'SKPPAD',
0x1D: 'WMAXIMUM',
0x1E: 'WMINIMUM',
0x1F: 'WREAD',
0x20: 'WTITLE',
0x21: 'GETPEM',
0x23: 'TXTWIDTH',
0x24: 'FONTMETRIC',
0x25: 'SYSMETRIC',
0x26: 'WFONT',
0x27: 'GETFONT',
0x28: 'AFONT',
0x29: 'DDEADVISE',
0x2A: 'DDEENABLED',
0x2B: 'DDEEXECUTE',
0x2C: 'DDEINITIATE',
0x2D: 'DDEPOKE',
0x2E: 'DDEREQUEST',
0x2F: 'DDESETSERVICE',
0x30: 'DDESETTOPIC',
0x31: 'DDETERMINATE',
0x32: 'DDELASTERROR',
0x33: 'GETDIR',
0x34: 'DDEABORTTRANS',
0x35: 'DDESETOPTION',
0x36: 'OEMTOANSI',
0x38: 'RGBSCHEME',
0x39: 'CPCONVERT',
0x3A: 'CPCURRENT',
0x3B: 'CPDBF',
0x3C: 'IDXCOLLATE',
0x3E: 'CAPSLOCK',
0x3F: 'NUMLOCK',
0x40: 'INSMODE',
0x41: 'SOUNDEX',
0x42: 'DIFFERENCE',
0x43: 'COS',
0x44: 'SIN',
0x45: 'TAN',
0x46: 'ACOS',
0x47: 'ASIN',
0x48: 'ATAN',
0x49: 'ATN2',
0x4A: 'LOG',
0x4B: 'LOG10',
0x4C: 'EXP',
0x4D: 'PI',
0x4E: 'CREATEOBJECT',
0x4F: 'BARPROMPT',
0x51: 'HOME',
0x53: 'FOR',
0x54: 'UNIQUE',
0x55: 'DESCENDING',
0x56: 'TAGCOUNT',
0x57: 'TAGNO',
0x58: 'FDATE',
0x59: 'FTIME',
0x5A: 'ISBLANK',
0x5B: 'ISMOUSE',
0x5C: 'GETOBJECT',
0x5D: 'OBJTOCLIENT',
0x5E: 'RGB',
0x5F: 'OLDVAL',
0x60: 'ASELOBJ',
0x61: 'ACLASS',
0x62: 'AMEMBERS',
0x63: 'COMPOBJ',
0x64: 'SQLCANCEL',
0x65: 'SQLCOLUMNS',
0x66: 'SQLCONNECT',
0x67: 'SQLDISCONNECT',
0x68: 'PEMSTATUS',
0x69: 'SQLEXEC',
0x6A: 'SQLGETPROP',
0x6B: 'SQLMORERESULTS',
0x6C: 'SQLSETPROP',
0x6D: 'SQLTABLES',
0x6E: 'FLDLIST',
0x6F: 'PRTINFO',
0x70: 'KEYMATCH',
0x71: 'OBJVAR',
0x72: 'NORMALIZE',
0x73: 'ISREADONLY',
0x74: 'PCOUNT',
0x78: 'MESSAGEBOX',
0x79: 'AUSED',
0x7A: 'AERROR',
0x7B: 'SQLCOMMIT',
0x7C: 'SQLROLLBACK',
0x7D: 'MTON',
0x7E: 'NTOM',
0x7F: 'DTOT',
0x80: 'TTOD',
0x81: 'TTOC',
0x82: 'CTOT',
0x83: 'HOUR',
0x84: 'MINUTE',
0x85: 'SEC',
0x86: 'DATETIME',
0x87: 'REQUERY',
0x88: 'CURSORSETPROP',
0x89: 'CURSORGETPROP',
0x8A: 'DBSETPROP',
0x8B: 'DBGETPROP',
0x8C: 'GETCOLOR',
0x8D: 'PRIMARY',
0x8E: 'CANDIDATE',
0x8F: 'CURVAL',
0x90: 'GETFLDSTATE',
0x91: 'SETFLDSTATE',
0x92: 'GETNEXTMODIFIED',
0x93: 'TABLEUPDATE',
0x94: 'TABLEREVERT',
0x95: 'ADATABASES',
0x96: 'DBC',
0x98: 'ADBOBJECTS',
0x99: 'APRINTERS',
0x9A: 'GETPRINTER',
0x9B: 'GETPICT',
0x9C: 'WEEK',
0x9D: 'REFRESH',
0x9E: 'GETCP',
0x9F: 'SQLSTRINGCONNECT',
0xA0: 'CREATEBINARY',
0xA1: 'DODEFAULT',
0xA2: 'ISEXCLUSIVE',
0xA3: 'TXNLEVEL',
0xA4: 'DBUSED',
0xA5: 'AINSTANCE',
0xA6: 'INDBC',
0xA7: 'BITLSHIFT',
0xA8: 'BITRSHIFT',
0xA9: 'BITAND',
0xAA: 'BITOR',
0xAB: 'BITNOT',
0xAC: 'BITXOR',
0xAD: 'BITSET',
0xAE: 'BITTEST',
0xAF: 'BITCLEAR',
0xB0: 'AT_C',
0xB1: 'ATCC',
0xB2: 'RATLINE',
0xB3: 'LEFTC',
0xB4: 'RIGHTC',
0xB5: 'SUBSTRC',
0xB6: 'STUFFC',
0xB7: 'LENC',
0xB8: 'CHRTRANC',
0xBA: 'LIKEC',
0xBC: 'IMESTATUS',
0xBD: 'ISLEADBYTE',
0xBE: 'STRCONV',
0xBF: 'BINTOC',
0xC0: 'CTOBIN',
0xC1: 'ISFLOCKED',
0xC2: 'ISRLOCKED',
0xC3: 'LOADPICTURE',
0xC4: 'SAVEPICTURE',
0xC5: 'SQLPREPARE',
0xC6: 'DIRECTORY',
0xC7: 'CREATEOFFLINE',
0xC8: 'DROPOFFLINE',
0xC9: 'AGETCLASS',
0xCA: 'AVCXCLASSES',
0xCB: 'STRTOFILE',
0xCC: 'FILETOSTR',
0xCD: 'ADDBS',
0xCE: 'DEFAULTEXT',
0xCF: 'DRIVETYPE',
0xD0: 'FORCEEXT',
0xD1: 'FORCEPATH',
0xD2: 'JUSTDRIVE',
0xD3: 'JUSTEXT',
0xD4: 'JUSTFNAME',
0xD5: 'JUSTPATH',
0xD6: 'JUSTSTEM',
0xD7: 'INDEXSEEK',
0xD8: 'COMRETURNERROR',
0xD9: 'VARTYPE',
0xDA: 'ALINES',
0xDB: 'NEWOBJECT',
0xDC: 'AMOUSEOBJ',
0xDD: 'COMCLASSINFO',
0xE0: 'ANETRESOURCES',
0xE1: 'AGETFILEVERSION',
0xE2: 'CREATEOBJECTEX',
0xE3: 'COMARRAY',
0xE4: 'EXECSCRIPT',
0xE5: 'XMLUPDATEGRAM',
0xE6: 'COMPROP',
0xE7: 'ATAGINFO',
0xE8: 'ASTACKINFO',
0xE9: 'EVENTHANDLER',
0xEA: 'EDITSOURCE',
0xEB: 'ADLLS',
0xEC: 'QUARTER',
0xED: 'GETWORDCOUNT',
0xEE: 'GETWORDNUM',
0xEF: 'ALANGUAGE',
0xF0: 'STREXTRACT',
0xF1: 'INPUTBOX',
0xF2: 'APROCINFO',
0xF3: 'WDOCKABLE',
0xF4: 'ASESSIONS',
0xF5: 'TEXTMERGE',
0xFD: 'MIN',
}
CLAUSES.update(VALUES)
def read_short(fid):
return struct.unpack('<h', fid.read(2))[0]
def read_ushort(fid):
return struct.unpack('<H', fid.read(2))[0]
def read_int(fid):
return struct.unpack('<i', fid.read(4))[0]
def read_uint(fid):
return struct.unpack('<I', fid.read(4))[0]
def parse_subline(fid, length, final, names, line):
while fid.tell() < final:
clauseval = fid.read(1)[0]
if clauseval in CLAUSES:
clause = CLAUSES[clauseval]
if clauseval in MULTICLAUSES:
clause = ' or '.join(MULTICLAUSES[clauseval])
if clauseval == 0xF6 or clauseval == 0xF7:
clause = clause(fid, names)
while line and type(line[-1]) is FXPAlias:
clause = FXPName(repr(line.pop()) + repr(clause))
elif callable(clause):
clause = clause(fid, names)
else:
clause = FXPName(clause)
line.append(clause)
if len(line) > 1 and isinstance(line[-2], int):
line.pop(-2)
fid.seek(final - length)
try:
line = [' '.join(repr(l) for l in line)]
except:
pass
line + [read_raw(fid, length)] + [fid.tell() - length]
return line[0]
def parse_line(fid, length, names):
final = fid.tell() + length
line = []
command = COMMANDS[fid.read(1)[0]]
if callable(command):
line += [FXPName(c) for c in command(fid, length-1)]
else:
line.append(FXPName(command))
return parse_subline(fid, length, final, names, line)
def read_code_line_area(fid, names, final_fpos):
d = []
while fid.tell() < final_fpos:
try:
start_pos = fid.tell()
length = read_ushort(fid)
d.append(parse_line(fid, length-2, names))
except:
import traceback
traceback.print_exc()
fid.seek(start_pos)
length = read_ushort(fid)
line = read_raw(fid, length-2)
print(line, file=sys.stderr)
d.append(line)
return ''.join(d)
def read_code_name_list(fid):
num_entries = read_ushort(fid)
return [read_string(fid) for i in range(num_entries)]
def change_named_value(expr, names):
expr = expr.split()[1]
if expr.endswith('.'):
return names[int(expr[:-1])] + '.'
else:
return names[int(expr)]
return expr
def concatenate_aliases(codes, names):
codes = codes[:]
new_codes = []
while codes:
code = codes.pop(0)
if isinstance(code, str) and code.startswith('NAME '):
code = change_named_value(code, names)
while code.endswith('.'):
code += change_named_value(codes.pop(0), names)
if isinstance(code, str) and code.startswith('SPECIAL_NAME '):
code = change_named_value(code, SPECIAL_NAMES)
while code.endswith('.'):
code += change_named_value(codes.pop(0), names)
new_codes.append(code)
return new_codes
def read_code_block(fid):
tot_length = read_ushort(fid)
if tot_length == 0:
tot_length = read_uint(fid)
start_pos = fid.tell()
fid.seek(fid.tell() + tot_length)
names = read_code_name_list(fid)
fid.seek(start_pos)
return read_code_line_area(fid, names, start_pos+tot_length)
def convert_date(date_bits):
year = ((date_bits & 0xfe000000) >> 25) + 1980
month = (date_bits & 0x1e00000) >> 21
day = (date_bits & 0x1f0000) >> 16
hour = (date_bits & 0xf800) >> 11
minute = (date_bits & 0x7e0) >> 5
second = (date_bits & 0x1f) << 1
return datetime(year, month, day, hour, minute, second)
def read_procedure_header(fid):
return OrderedDict((
('name', read_string(fid)),
('pos', read_uint(fid)),
('class_flag', read_short(fid)),
('class', read_short(fid)),
))
def read_class_header(fid):
return {
'name': read_string(fid),
'parent': read_string(fid),
'pos': read_uint(fid),
'reserved': fid.read(2),
}
def read_line_info(fid):
return read_raw(fid, 2)
def read_source_info(fid):
unknown1, unknown2, unknown3, line_num_start = struct.unpack('IIII', fid.read(16))
return line_num_start, unknown1, unknown2, unknown3
def read_until_null(fid):
string = fid.read(1)
while string[-1] != 0:
string += fid.read(1)
if string:
string = string[:-1]
return string.decode('ISO-8859-1')
def read_fxp_file_block(fid, start_pos, name_pos):
fid.seek(start_pos)
fmt = '<hhiiiiiiIIB'
num_procedures, num_classes, main_codepos, procedure_pos, class_pos, source_info_pos, num_code_lines, code_lines_pos, date, original_name_pos, codepage = struct.unpack(fmt, fid.read(struct.calcsize(fmt)))
procedure_pos += start_pos
class_pos += start_pos
code_lines_pos += start_pos
source_info_pos += start_pos
date = convert_date(date)
codepage = dbf.code_pages[255 - codepage]
fid.seek(name_pos + original_name_pos)
original_name = read_until_null(fid)
for item in ('num_procedures', 'num_classes', 'main_codepos', 'procedure_pos', 'class_pos', 'source_info_pos', 'num_code_lines', 'code_lines_pos', 'date', 'original_name', 'codepage'):
print('{} = {!r}'.format(item, eval(item)))
fid.seek(procedure_pos)
procedures = [OrderedDict((key, val) for key, val in zip(('name', 'pos', 'class_flag', 'class'), ('', main_codepos, 0, -1)))]
procedures += [read_procedure_header(fid) for i in range(num_procedures)]
fid.seek(class_pos, 0)
classes = [read_class_header(fid) for i in range(num_classes)]
fid.seek(code_lines_pos)
line_info = [read_line_info(fid) for i in range(num_code_lines)]
fid.seek(source_info_pos)
source_info = [read_source_info(fid) for i in range(num_procedures + num_classes + 1)]
for proc_or_cls in procedures + classes:
fid.seek(proc_or_cls['pos'] + start_pos)
proc_or_cls['code'] = read_code_block(fid)
proc_or_cls.pop('pos')
return procedures, classes
def fxp_read(fxp_file, output_dir=None):
with open(fxp_file, 'rb') as fid:
header_bytes = fid.read(HEADER_SIZE)
if len(header_bytes) < HEADER_SIZE:
raise Exception('File header too short')
identifier, head, num_files, main_file, footer_pos, name_pos, name_len, reserved, checksum = struct.unpack('<3s2sHHIII18sH', header_bytes)
if identifier == ENCRYPTED_IDENTIFIER:
print(repr(header_bytes))
raise Exception('Encrypted file')
if identifier != IDENTIFIER:
print('bad header')
raise Exception('bad header: {!r}'.format(identifier))
if checksum != checksum_calc(header_bytes[:-4]):
raise Exception('bad checksum')
for item in ('head', 'num_files', 'main_file', 'footer_pos', 'name_pos', 'name_len', 'reserved', 'checksum'):
print('{} = {!r}'.format(item, eval(item)))
print()
if output_dir:
try:
os.makedirs(output_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
fid.seek(footer_pos)
footer_entries = [struct.unpack('<BIIII8s', fid.read(FOOTER_ENTRY_SIZE)) for i in range(num_files)]
output = OrderedDict()
for i, (file_type, file_start, file_stop, dirname_start, filename_start, reserved) in enumerate(footer_entries):
if i == main_file:
print('MAIN')
fid.seek(name_pos + dirname_start)
dirname = read_until_null(fid)
fid.seek(name_pos + filename_start)
filename = read_until_null(fid)
for item in ('file_type', 'file_start', 'file_stop', 'dirname_start', 'filename_start', 'reserved', 'dirname', 'filename'):
print('{} = {!r}'.format(item, eval(item)))
if file_type == 0:
try:
output[filename] = read_fxp_file_block(fid, file_start, name_pos)
except:
pass
if output_dir:
with open(os.path.join(output_dir, filename), 'wb') as outfid:
fid.seek(file_start)
blocklen = file_stop - file_start
filename_blocklen = len(dirname) + len(filename) + 3
new_footer_pos = HEADER_SIZE + blocklen + filename_blocklen
new_name_pos = HEADER_SIZE + blocklen
header_data = bytearray(struct.pack('<5sHHIII', IDENTIFIER, 1, 0, new_footer_pos, new_name_pos, filename_blocklen))
header_data += b'\x00' * 16
outfid.write(header_data)
outfid.write(b'\x00' * 2)
outfid.write(struct.pack('<H', checksum_calc(header_data)))
file_start = outfid.tell()
outfid.write(fid.read(blocklen))
file_stop = outfid.tell()
outfid.write(b'\x00')
outfid.write((dirname + '\x00').encode('ISO-8859-1'))
outfid.write((filename + '\x00').encode('ISO-8859-1'))
outfid.write(struct.pack('<BIIII', file_type, file_start, file_stop, 1, 1 + len(dirname) + 1))
outfid.write(b'\x00' * 8)
else:
if output_dir:
with open(os.path.join(output_dir, filename), 'wb') as outfid:
fid.seek(file_start)
outfid.write(fid.read(file_stop - file_start))
print()
if output_dir:
for filename in output:
with open(os.path.join(output_dir, os.path.splitext(filename)[0]) + '.prg', 'wb') as outfid:
procedures, classes = output[filename]
for proc in procedures:
if not proc['class_flag']:
if proc['name']:
outfid.write('PROCEDURE {}\n'.format(proc['name']).encode('ISO-8859-1'))
outfid.write(proc['code'].encode('ISO-8859-1'))
for cls in classes:
outfid.write('DEFINE CLASS {} AS {}\n'.format(cls['name'], cls['parent']).encode('ISO-8859-1'))
for line in cls['code'].splitlines(True):
match = re.match(r'add (hidden |protected |)?method ([0-9]*)', line)
if match:
qualifier = match.groups()[0]
proc = procedures[int(match.groups()[1])]
outfid.write('{}PROCEDURE {}\n'.format(qualifier, proc['name']).encode('ISO-8859-1'))
outfid.write(proc['code'].encode('ISO-8859-1'))
else:
outfid.write(line)
for filename in output:
import pprint
printer = pprint.PrettyPrinter(depth=10, indent=4)
procedures, classes = output[filename]
printer.pprint(filename)
printer.pprint(procedures)
printer.pprint(classes)
if __name__ == '__main__':
fxp_read(sys.argv[1], sys.argv[2] if len(sys.argv) > 2 else None)
| StarcoderdataPython |
157789 | from gtts import gTTS
import os
tts = gTTS(text='temperatura a 30 grados', lang='es')
tts.save('apagado.mp3')
| StarcoderdataPython |
1730481 | '''
单词接龙
字典 wordList 中从单词 beginWord 和 endWord 的 转换序列 是一个按下述规格形成的序列:
序列中第一个单词是 beginWord 。
序列中最后一个单词是 endWord 。
每次转换只能改变一个字母。
转换过程中的中间单词必须是字典 wordList 中的单词。
给你两个单词 beginWord 和 endWord 和一个字典 wordList ,找到从 beginWord 到 endWord 的 最短转换序列 中的 单词数目 。
如果不存在这样的转换序列,返回 0。
'''
from typing import List
class Graph:
def __init__(self):
self.nodes = {}
def addNode(self, s: str):
self.nodes[s] = Node(s)
def addEdge(self, s1: str, s2: str):
self.nodes[s1].edges.append(self.nodes[s2])
self.nodes[s2].edges.append(self.nodes[s1])
def isNode(self, s: str):
return s in self.nodes
# 求最短路径长度
def shortestPathLen(self, start: str, end: str):
q = []
startNode, endNode = self.nodes[start], self.nodes[end]
q.append(startNode)
q.append(None) # 放置1个哨兵,标志着1层节点的结束
pathLen = 1
while q:
node = q.pop(0)
if node is None:
pathLen += 1
if q:
q.append(None) # 一层所有节点已经遍历完,再放置下一层的哨兵
continue
if node == endNode:
return pathLen
node.traversed = True
q.extend(filter(lambda node: not node.traversed, node.edges))
return 0
class Node:
def __init__(self, s: str):
self.s = s
self.edges = []
self.traversed = False
'''
思路:图的最短路径
将每个word视为节点,2个只有1个字母不同的word之间视为有边,这道题就是求最短路径的长度。
1、首先需要将输入的beginWord、wordList转化为无权无向图。
要确认任意两个单词是否可以转换,可以将长度为m的单词每次去掉同列字符,形成m个新单词subword,具备相同subword的2个单词可以转化
2、搜索从节点beginWord到endWord的最短路径。采用广度优先搜索算法。
时间复杂度:O(m*m*n),转化为无权无向图,需要将每个word切分成m个subword,需要O(m*m*n)的时间,然后搜索最短路径搜索需要O(n)
空间复杂度:O(m*m*n),转化过程中需要m*m*n的辅助数组保存subword,无向图需要空间n
'''
class Solution:
def ladderLength(self, beginWord: str, endWord: str, wordList: List[str]) -> int:
graph = Graph()
m = len(beginWord)
# 1、转为无向图
subwords = [{} for i in range(m)] # 用这个数组保存去掉不同位置后形成的subword
def cutWord(word: str):
for i in range(m):
subwordMap = subwords[i]
subword = word[:i] + word[i + 1:]
if subword not in subwordMap:
subwordMap[subword] = []
else:
for other in subwordMap[subword]: # 具有相同子串的word之间有边
graph.addEdge(word, other)
subwordMap[subword].append(word)
graph.addNode(beginWord)
cutWord(beginWord)
for word in wordList:
graph.addNode(word)
cutWord(word)
# 判断endWord是否在字典中
if not graph.isNode(endWord):
return 0
# 广度优先搜索最短路径长度
return graph.shortestPathLen(beginWord, endWord)
s = Solution()
print(s.ladderLength(beginWord="hit", endWord="cog", wordList=["hot", "dot", "dog", "lot", "log", "cog"]))
print(s.ladderLength(beginWord="hit", endWord="cog", wordList=["hot", "dot", "dog", "lot", "log"]))
| StarcoderdataPython |
4822550 | from pprint import pprint
import textfsm
template_file = "ex7_show_int_status.template"
template = open(template_file)
with open("ex7_show_int_status.txt") as f:
raw_text_data = f.read()
re_table = textfsm.TextFSM(template)
data = re_table.ParseText(raw_text_data)
template.close()
print()
new_list = []
keys = re_table.header
values = data
new_list = [{k:v for k,v in zip(keys,v)} for v in values]
print(new_list)
| StarcoderdataPython |
43374 | <gh_stars>0
# This software was developed by employees of the National Institute of
# Standards and Technology (NIST), an agency of the Federal Government.
# Pursuant to title 17 United States Code Section 105, works of NIST employees
# are not subject to copyright protection in the United States and are
# considered to be in the public domain. Permission to freely use, copy,
# modify, and distribute this software and its documentation without fee is
# hereby granted, provided that this notice and disclaimer of warranty appears
# in all copies.
#
# THE SOFTWARE IS PROVIDED 'AS IS' WITHOUT ANY WARRANTY OF ANY KIND, EITHER
# EXPRESSED, IMPLIED, OR STATUTORY, INCLUDING, BUT NOT LIMITED TO, ANY WARRANTY
# THAT THE SOFTWARE WILL CONFORM TO SPECIFICATIONS, ANY IMPLIED WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND FREEDOM FROM
# INFRINGEMENT, AND ANY WARRANTY THAT THE DOCUMENTATION WILL CONFORM TO THE
# SOFTWARE, OR ANY WARRANTY THAT THE SOFTWARE WILL BE ERROR FREE. IN NO EVENT
# SHALL NIST BE LIABLE FOR ANY DAMAGES, INCLUDING, BUT NOT LIMITED TO, DIRECT,
# INDIRECT, SPECIAL OR CONSEQUENTIAL DAMAGES, ARISING OUT OF, RESULTING FROM,
# OR IN ANY WAY CONNECTED WITH THIS SOFTWARE, WHETHER OR NOT BASED UPON
# WARRANTY, CONTRACT, TORT, OR OTHERWISE, WHETHER OR NOT INJURY WAS SUSTAINED
# BY PERSONS OR PROPERTY OR OTHERWISE, AND WHETHER OR NOT LOSS WAS SUSTAINED
# FROM, OR AROSE OUT OF THE RESULTS OF, OR USE OF, THE SOFTWARE OR SERVICES
# PROVIDED HEREUNDER. Distributions of NIST software should also include
# copyright and licensing statements of any third-party software that are
# legally bundled with the code in compliance with the conditions of those
# licenses.
"""
This implementation is deeply intertwined obscure details of the python object
model. Consider starting with a close read of the documentation and exploring
the objects in an interpreter instead of reverse-engineering this code.
"""
from . import util
import typing
from warnings import warn
from functools import wraps
import validators as _val
from contextlib import contextmanager
from inspect import isclass
import inspect
import numbers
import re
# for common types
from pathlib import Path
import numpy as np
import pandas as pd
Undefined = inspect.Parameter.empty
T = typing.TypeVar("T")
class ThisType(typing.Generic[T]):
pass
class HasTraitsMeta(type):
__cls_namespace__ = []
@classmethod
def __prepare__(cls, names, bases, **kws):
"""Prepare copies of cls._traits, to ensure that any traits defined
in the definition don't clobber parents' traits.
"""
ns = dict()
if len(bases) >= 1:
if hasattr(bases, "__children__"):
ns["__children__"] = {}
traits = {k: v.copy() for k, v in bases[0]._traits.items()}
ns.update(traits)
ns["_traits"] = traits
HasTraitsMeta.__cls_namespace__.append(traits)
return ns
else:
HasTraitsMeta.__cls_namespace__.append({})
return dict(_traits=HasTraitsMeta.__cls_namespace__[-1])
class Trait:
"""base class for typed descriptors in Device classes. These
implement type checking, casting, decorators, and callbacks.
A Device instance supports two types of Traits:
* A _value trait_ acts as an attribute variable in instantiated
classes
* A _property trait_ exposes set and get operations for a
parameter in the API wrapped by the owning Device class.
The trait behavior is determined by whether its owner is a Device or HasSettings
instance.
Arguments:
default: the default value of the trait (value traits only)
key: some types of Device take this input to determine automation behavior
help: the Trait docstring
label: a label for the quantity, such as units
Arguments:
sets: True if the trait supports writes
gets: True if the trait supports reads
cache: if True, interact with the device only once, then return copies (state traits only)
only: value allowlist; others raise ValueError
Arguments:
allow_none: permit None values in addition to the specified type
remap: a lookup table that maps the python type (keys) to a potentially different backend values (values) ,
in places of the to_pythonic and from_pythonic methods (property traits only)
"""
ROLE_VALUE = "value"
ROLE_PROPERTY = "property"
ROLE_DATARETURN = "return"
ROLE_UNSET = "unset"
type = None
role = ROLE_UNSET
# keyword argument types and default values
default: ThisType = Undefined
key: Undefined = Undefined
func: typing.Callable = None
# role: str = ROLE_UNSET
help: str = ""
label: str = ""
sets: bool = True
gets: bool = True
cache: bool = False
only: tuple = tuple()
allow_none: bool = False
remap: dict = {}
# If the trait is used for a state, it can operate as a decorator to
# implement communication with a device
_setter = None
_getter = None
_returner = None
_decorated_funcs = []
# __decorator_action__ = None
def __init__(self, *args, **kws):
if len(args) >= 1:
if len(args) == 1 and self.role == self.ROLE_VALUE:
if "default" in kws:
raise ValueError(f"duplicate 'default' argument in {self}")
kws["default"] = args[0]
elif len(args) == 1 and self.role == self.ROLE_DATARETURN:
if "func" in kws:
raise ValueError("duplicate 'func' argument")
kws["func"] = args[0]
else:
raise ValueError(f"no positional arguments supported")
self.kws = dict(kws)
self.metadata = {}
self._decorated_funcs = []
cls_defaults = {k: getattr(self, k) for k in self.__annotations__.keys()}
if "default" in cls_defaults:
cls_defaults["default"] = Undefined
kws = dict(cls_defaults, **kws)
# check role and related parameter dependencies
if self.role == self.ROLE_VALUE:
invalid_args = ("remap", "key", "func")
elif self.role == self.ROLE_PROPERTY:
invalid_args = ("default", "func")
elif self.role == self.ROLE_DATARETURN:
invalid_args = "default", "key", "sets", "gets"
else:
clsname = self.__class__.__qualname__
raise ValueError(
f"{clsname}.role must be one of {(self.ROLE_PROPERTY, self.ROLE_DATARETURN, self.ROLE_VALUE)}, not {repr(self.role)}"
)
for k in invalid_args:
if k in cls_defaults and cls_defaults[k] != kws[k]:
raise AttributeError(
f"keyword argument '{k}' is not allowed with {self.role}"
)
if self.role == self.ROLE_VALUE and kws["default"] is Undefined:
# always go with None when this value is allowed, fallback to self.default
kws["default"] = self.type()
if self.role == self.ROLE_DATARETURN:
if kws["func"] is not None:
# apply a decorator
self(kws["func"])
if self.role in (self.ROLE_DATARETURN, self.ROLE_PROPERTY):
# default Undefined so that cache will fill them in
self.default = Undefined
# Replace self.from_pythonic and self.to_pythonic with lookups in self.remap (if defined)
try:
if len(kws["remap"]) > 0:
self.remap_inbound = {v: k for k, v in kws["remap"].items()}
else:
self.remap_inbound = {}
except KeyError:
raise
if len(kws["remap"]) != len(self.remap_inbound):
raise ValueError(f"'remap' has duplicate values")
# set value traits
for k, v in kws.items():
setattr(self, k, v)
@classmethod
def __init_subclass__(cls, type=Undefined):
"""python triggers this call immediately after a Trait subclass
is defined, allowing us to automatically customize its implementation.
Arguments:
type: the python type represented by the trait
"""
if type is not Undefined:
cls.type = type
# complete the annotation dictionary with the parent
annots = dict(
getattr(cls.__mro__[1], "__annotations__", {}),
**getattr(cls, "__annotations__", {}),
)
cls.__annotations__ = dict(annots)
# # apply an explicit signature to cls.__init__
# annots = {k: cls.type if v is ThisType else (k, v) \
# for k, v in annots.items()}
# cls._arg_defaults = {k: getattr(cls, k)
# for k in annots if hasattr(cls, k)}
# if 'default' in cls._arg_defaults:
# cls._arg_defaults['default'] = Undefined
# TODO: remove this
# util.wrap_attribute(cls, '__init__', __init__, tuple(annots.keys()), cls._arg_defaults, 1, annots)
# Help to reduce memory use by __slots__ definition (instead of __dict__)
cls.__slots__ = [n for n in dir(cls) if not n.startswith("_")] + [
"metadata",
"kind",
"name",
]
def copy(self, new_type=None, **update_kws):
if new_type is None:
new_type = type(self)
obj = new_type(**dict(self.kws, **update_kws))
obj._getter = self._getter
obj._setter = self._setter
obj._returner = self._returner
return obj
### Descriptor methods (called automatically by the owning class or instance)
def __set_name__(self, owner_cls, name):
"""Immediately after an owner class is instantiated, it calls this
method for each of its attributes that implements this method.
Trait takes advantage of this to remember the owning class for debug
messages and to register with the owner class.
"""
# other owning objects may unintentionally become owners; this causes problems
# if they do not implement the HasTraits object protocol
if issubclass(owner_cls, HasTraits):
# inspect module expects this name - don't play with it
self.__objclass__ = owner_cls
# Take the given name, unless we've bene tagged with a different
self.name = name
owner_cls._traits[name] = self
def __init_owner_subclass__(self, owner_cls):
"""The owner calls this in each of its traits at the end of defining the subclass
(near the end of __init_subclass__).
has been called. Now it is time to ensure properties are compatible with the owner class.
This is here --- not in __set_name__ --- because python
obfuscates exceptions raised in __set_name__.
This is also where we finalize selecting decorator behavior; is it a property or a method?
"""
if self.role == self.ROLE_VALUE and len(self._decorated_funcs) > 0:
raise AttributeError(
f"tried to combine a default value and a decorator implementation in {self}"
)
elif self.role == self.ROLE_DATARETURN and len(self._decorated_funcs) == 0:
raise AttributeError(f"decorate a method to tag its return data")
elif len(self._decorated_funcs) == 0:
return
positional_argcounts = [
f.__code__.co_argcount - len(f.__defaults__ or tuple())
for f in self._decorated_funcs
]
if self.role == self.ROLE_DATARETURN:
for func, argcount in zip(self._decorated_funcs, positional_argcounts):
if len(self.help.rstrip().strip()) == 0:
# take func docstring as default self.help
self.help = (func.__doc__ or "").rstrip().strip()
self._returner = func
elif self.role == self.ROLE_PROPERTY:
if set(positional_argcounts) not in ({1}, {1, 2}, {2}):
raise AttributeError(
f"a decorator implementation with @{self} must apply to a getter "
f"(above `def func(self)`) and/or setter (above `def func(self, value):`)"
)
for func, argcount in zip(self._decorated_funcs, positional_argcounts):
doc = (func.__doc__ or "").strip().rstrip()
if len(doc) > 0:
# take func docstring as default self.help
self.help = self.kws["help"] = doc
if argcount == 1:
self._getter = func
else:
self._setter = func
def __init_owner_instance__(self, owner):
# called by owner.__init__
pass
@util.hide_in_traceback
def __set__(self, owner, value):
# First, validate the pythonic types
if not self.sets:
raise AttributeError(f"{self.__str__()} cannot be set")
# Validate the pythonic value
if value is not None:
# cast to self.type and validate
value = Trait.to_pythonic(self, value)
value = self.validate(value, owner)
if len(self.only) > 0 and not self.contains(self.only, value):
raise ValueError(
f"value '{value}' is not among the allowed values {repr(self.only)}"
)
elif self.allow_none:
value = None
else:
raise ValueError(f"None value not allowed for trait '{repr(self)}'")
try:
value = self.from_pythonic(value)
except BaseException as e:
name = owner.__class__.__qualname__ + "." + self.name
e.args = (e.args[0] + f" in attempt to set '{name}'",) + e.args[1:]
raise e
if self.role == self.ROLE_VALUE:
# apply as a value trait
owner.__set_value__(self.name, value)
elif self.role == self.ROLE_PROPERTY:
# convert to the outbound representation
if len(self.remap) > 0:
value = self.remap.get(value, value)
# send to the device
if self._setter is not None:
# from the function decorated by this trait
self._setter(owner, value)
elif self.key is not None:
# otherwise, use the owner's set_key
owner.set_key(self.key, value, self.name)
else:
objname = owner.__class__.__qualname__ + "." + self.name
raise AttributeError(
f"cannot set {objname}: no @{self.__repr__(owner_inst=owner)}."
f"setter and no key argument"
)
else:
raise AttributeError(f"data return traits cannot be set")
owner.__notify__(self.name, value, "set", cache=self.cache)
@util.hide_in_traceback
def __get__(self, owner, owner_cls=None):
"""Called by the class instance that owns this attribute to
retreive its value. This, in turn, decides whether to call a wrapped
decorator function or the owner's get_key method to retrieve
the result.
Returns:
retreived value
"""
# only continue to get the value if the __get__ was called for an owning
# instance, and owning class is a match for what we were told in __set_name__.
# otherwise, someone else is trying to access `self` and we
# shouldn't get in their way.
if owner is None or owner_cls.__dict__.get(
self.name, None
) is not self.__objclass__.__dict__.get(self.name):
# the __dict__ acrobatics avoids a recursive __get__ loop
return self
elif self.role == self.ROLE_DATARETURN:
# inject the labbench Trait hooks into the return value
@wraps(self._returner)
def method(*args, **kws):
value = self._returner(owner, *args, **kws)
return self.__cast_get__(owner, value)
return method
elif not self.gets:
# stop now if this is not a gets Trait
raise AttributeError(f"{self.__repr__(owner_inst=owner)} is not gets")
elif self.role == self.ROLE_VALUE:
return owner.__get_value__(self.name)
# from here on, operate as a property getter
if self.cache and self.name in owner.__cache__:
# return the cached value if applicable
return owner.__cache__[self.name]
elif self._getter is not None:
# get value with the decorator implementation, if available
value = self._getter(owner)
else:
# otherwise, get with owner.get_key, if available
if self.key is None:
# otherwise, 'get'
objname = owner.__class__.__qualname__
ownername = self.__repr__(owner_inst=owner)
raise AttributeError(
f"to set the property {self.name}, decorate a method in {objname} or use the function key argument"
)
value = owner.get_key(self.key, self.name)
# apply remapping as appropriate for the trait
if len(self.remap_inbound) > 0:
value = self.remap_inbound.get(value, value)
return self.__cast_get__(owner, value, strict=False)
@util.hide_in_traceback
def __cast_get__(self, owner, value, strict=False):
"""Examine value and either return a valid pythonic value or raise an exception if it cannot be cast.
Arguments:
owner: the class that owns the trait
value: the value we need to validate and notify
:return:
"""
if self.allow_none and value is None:
pass
else:
# skip validation if None and None values are allowed
try:
value = self.to_pythonic(value)
except BaseException as e:
# name = owner.__class__.__qualname__ + '.' + self.name
e.args = (
e.args[0]
+ f" in attempt to get '{self.__repr__(owner_inst=owner)}'",
) + e.args[1:]
raise e
# Once we have a python value, give warnings (not errors) if the device value fails further validation
if hasattr(owner, "_logger"):
log = owner._logger.warning
else:
log = warn
# TODO: This broke array-like data. Was it ever necessary?
# if value != self.validate(value, owner):
# raise ValueError
# except ValueError:
# log(f"'{self.__repr__(owner_inst=owner)}' {self.role} received the value {repr(value)}, " \
# f"which fails {repr(self)}.validate()")
if value is None and not self.allow_none:
log(
f"'{self.__repr__(owner_inst=owner)}' {self.role} received value None, which"
f"is not allowed for {repr(self)}"
)
if len(self.only) > 0 and not self.contains(self.only, value):
log(
f"'{self.__repr__(owner_inst=owner)}' {self.role} received {repr(value)}, which"
f"is not in the valid value list {repr(self.only)}"
)
owner.__notify__(
self.name, value, "get", cache=self.cache or (self.role == self.ROLE_VALUE)
)
return value
@util.hide_in_traceback
def to_pythonic(self, value):
"""Convert a value from an unknown type to self.type."""
return self.type(value)
@util.hide_in_traceback
def from_pythonic(self, value):
"""convert from a python type representation to the format needed to communicate with the device"""
return value
@util.hide_in_traceback
def validate(self, value, owner=None):
"""This is the default validator, which requires that trait values have the same type as self.type.
A ValueError is raised for other types.
value: value to check
Returns:
a valid value
"""
if not isinstance(value, self.type):
typename = self.type.__qualname__
valuetypename = type(value).__qualname__
raise ValueError(
f"{repr(self)} type must be '{typename}', not '{valuetypename}'"
)
return value
def contains(self, iterable, value):
return value in iterable
### Decorator methods
@util.hide_in_traceback
def __call__(self, func):
"""use the Trait as a decorator, which ties this Trait instance to evaluate a property or method in the
owning class. you can specify
"""
# only decorate functions.
if not callable(func):
raise Exception(
f"object of type '{func.__class__.__qualname__}' must be callable"
)
self._decorated_funcs.append(func)
# Register in the list of decorators, in case we are overwritten by an
# overloading function
if getattr(self, "name", None) is None:
self.name = func.__name__
if len(HasTraitsMeta.__cls_namespace__) > 0:
HasTraitsMeta.__cls_namespace__[-1][func.__name__] = self
# return self to ensure `self` is the value assigned in the class definition
return self
### introspection
###
def doc(self):
params = self.doc_params(omit=["help", "default"])
typename = "Any" if self.type is None else self.type.__qualname__
doc = f"{self.name} ({typename}): {self.help}"
if len(params) > 0:
doc += f" ({params})"
return doc
def doc_params(self, omit=["help"]):
pairs = []
for name in self.__annotations__.keys():
default = getattr(type(self), name)
v = getattr(self, name)
# skip uninformative debug info
if name.startswith("_") or name in omit:
continue
# only show non-defaults
v = getattr(self, name)
if v == default:
continue
pairs.append(f"{name}={repr(v)}")
return ",".join(pairs)
def __repr__(self, omit=["help"], owner_inst=None):
declaration = f"{self.role}.{type(self).__qualname__}({self.doc_params(omit)})"
if owner_inst is None:
return declaration
else:
return f"<{declaration} as {owner_inst}.{self.name}>"
__str__ = __repr__
def _owned_name(self, owner):
if owner._owned_name is None:
return type(owner).__qualname__ + "." + self.name
else:
return owner._owned_name + "." + self.name
def update(self, obj=None, **attrs):
""" returns `self` or (if `obj` is None) or `other`, after updating its keyword
parameters with `attrs`
"""
if obj is None:
obj = self
invalid_params = set(attrs).difference(obj.__dict__)
if len(invalid_params) > 0:
raise AttributeError(
f"{obj} does not have the parameter(s) {invalid_params}"
)
obj.__dict__.update(attrs)
return obj
Trait.__init_subclass__()
@contextmanager
def hold_trait_notifications(owner):
def skip_notify(name, value, type, cache):
old = owner.__cache__.setdefault(name, Undefined)
msg = dict(new=value, old=old, owner=owner, name=name, type=type, cache=cache)
owner.__cache__[name] = value
original, owner.__notify__ = owner.__notify__, skip_notify
yield
owner.__notify__ = original
class HasTraits(metaclass=HasTraitsMeta):
__notify_list__ = {}
__cls_namespace__ = {}
def __init__(self, **values):
# who is informed on new get or set values
self.__notify_list__ = {}
# for cached properties and values in this instance
self.__cache__ = {}
self._calibrations = {}
for name, trait in self._traits.items():
trait.__init_owner_instance__(self)
if trait.default is not Undefined:
self.__cache__[name] = trait.default
@util.hide_in_traceback
def __init_subclass__(cls):
cls._traits = dict(getattr(cls, "_traits", {}))
cls._property_attrs = []
cls._value_attrs = []
cls._datareturn_attrs = []
parent_traits = getattr(cls.__mro__[1], "_traits", {})
# annotations = getattr(cls, '__annotations__', {})
for name, trait in dict(cls._traits).items():
# Apply the trait decorator to the object if it is "part 2" of a decorator
obj = getattr(cls, name)
if not isinstance(obj, Trait):
if trait.role in (
Trait.ROLE_PROPERTY,
Trait.ROLE_DATARETURN,
) and callable(obj):
# if it's a method, decorate it
cls._traits[name] = trait(obj)
else:
# if not decorating, clear from the traits dict, and emit a warning at runtime
thisclsname = cls.__qualname__
parentclsname = cls.__mro__[1].__qualname__
warn(
f"'{name}' in {thisclsname} is not a trait, but replaces one in parent class {parentclsname}"
)
del cls._traits[name]
continue
setattr(cls, name, cls._traits[name])
if cls._traits in HasTraitsMeta.__cls_namespace__:
HasTraitsMeta.__cls_namespace__.remove(cls._traits)
# finalize trait setup
for name, trait in dict(cls._traits).items():
if not hasattr(trait, "__objclass__"):
trait.__set_name__(cls, name)
trait.__init_owner_subclass__(cls)
if trait.role == Trait.ROLE_VALUE:
cls._value_attrs.append(name)
elif trait.role == Trait.ROLE_DATARETURN:
cls._datareturn_attrs.append(name)
elif trait.role == Trait.ROLE_PROPERTY:
cls._property_attrs.append(name)
@util.hide_in_traceback
def __notify__(self, name, value, type, cache):
old = self.__cache__.setdefault(name, Undefined)
msg = dict(new=value, old=old, owner=self, name=name, type=type, cache=cache)
for handler in self.__notify_list__.values():
handler(dict(msg))
self.__cache__[name] = value
def set_key(self, key, value, name=None):
"""implement this in subclasses to use `key` to set a parameter value from the
Device with self.backend.
property traits defined with "key=" call this to set values
in the backend.
"""
clsname = self.__class__.__qualname__
raise NotImplementedError(
f"implement {clsname}.get_key for access to key/value parameters on the device"
)
def get_key(self, key, name=None):
"""implement this in subclasses to use `key` to retreive a parameter value from the
Device with self.backend.
property traits defined with "key=" call this to retrieve values
from the backend.
"""
clsname = self.__class__.__qualname__
raise NotImplementedError(
f"implement {clsname}.get_key for access key/value parameters on the device"
)
@util.hide_in_traceback
def __get_value__(self, name):
"""Get value of a trait for this value traits instance
Arguments:
name: Name of the trait
Returns:
cached value, or the trait default if it has not yet been set
"""
return self.__cache__[name]
@util.hide_in_traceback
def __set_value__(self, name, value):
"""Set value of a trait for this value traits instance
Arguments:
name: Name of the trait
value: value to assign
Returns:
None
"""
# assignment to to self.__cache__ here would corrupt 'old' message key in __notify__
pass
class Any(Trait, type=None):
"""allows any value"""
@util.hide_in_traceback
def validate(self, value, owner=None):
return value
@util.hide_in_traceback
def to_pythonic(self, value):
return value
Trait.__annotations__["key"] = Any
def observe(obj, handler, name=Any, type_=("get", "set")):
"""Register a handler function to be called whenever a trait changes.
The handler function takes a single message argument. This
dictionary message has the keys
* `new`: the updated value
* `old`: the previous value
* `owner`: the object that owns the trait
* `name`: the name of the trait
* 'event': 'set' or 'get'
Arguments:
handler: the handler function to call when the value changes
names: notify only changes to these trait names (None to disable filtering)
"""
def validate_name(n):
attr = getattr(type(obj), n, Undefined)
if attr is Undefined:
raise TypeError(f'there is no attribute "{n}" to observe in "{obj}"')
elif not isinstance(attr, Trait):
raise TypeError(f"cannot observe {obj}.{n} because it is not a trait")
if not callable(handler):
raise ValueError(
f"argument 'handler' is {repr(handler)}, which is not a callable"
)
if isinstance(name, str):
validate_name(name)
name = (name,)
elif isinstance(name, (tuple, list)):
for n in name:
validate_name(n)
elif name is not Any:
raise ValueError(
f"name argument {name} has invalid type - must be one of (str, tuple, list), or the value Any"
)
if isinstance(type, str):
type_ = (type_,)
def wrapped(msg):
# filter according to name and type
if name is not Any and msg["name"] not in name:
return
elif msg["type"] not in type_:
return
elif isinstance(msg["new"], Trait):
raise TypeError(f"Trait instance returned as a callback value")
handler(msg)
if isinstance(obj, HasTraits):
obj.__notify_list__[handler] = wrapped
else:
raise TypeError("object to observe must be an instance of Device")
def unobserve(obj, handler):
"""Unregister a handler function from notifications in obj."""
if isinstance(obj, HasTraits):
try:
del obj.__notify_list__[handler]
except KeyError as e:
ex = e
else:
ex = None
if ex:
raise ValueError(f"{handler} was not registered to observe {obj}")
else:
raise TypeError("object to unobserve must be an instance of Device")
def find_trait_in_mro(cls):
if issubclass(cls, DependentTrait):
return find_trait_in_mro(type(cls._trait_dependencies["base"]))
else:
return cls
class DependentTrait(Trait):
_trait_dependencies = set()
def __set_name__(self, owner_cls, name):
super().__set_name__(owner_cls, name)
# propagate ownership of dependent traits, if available
if isinstance(owner_cls, HasTraits):
objclass = owner_cls
elif hasattr(self, "__objclass__"):
objclass = self.__objclass__
else:
return
for trait in self._trait_dependencies.values():
trait.__objclass__ = objclass
def _validate_trait_dependencies(self, owner, allow_none: bool, operation="access"):
if allow_none:
return
none_names = [
f"{owner}.{trait.name}"
for trait in self._trait_dependencies.values()
if getattr(owner, trait.name) is None
]
if len(none_names) == 1:
raise ValueError(
f"cannot {operation} {owner}.{self.name} while {none_names[0]} is None"
)
elif len(none_names) > 1:
raise ValueError(
f"cannot {operation} {owner}.{self.name} while {tuple(none_names)} are None"
)
@classmethod
def derive(mixin_cls, template_trait, dependent_traits={}, *init_args, **init_kws):
name = template_trait.__class__.__name__
name = ("" if name.startswith("dependent_") else "dependent_") + name
dependent_traits["base"] = template_trait
traits_dict = {}
for c in mixin_cls.__mro__:
if issubclass(c, DependentTrait):
traits_dict.update(c._trait_dependencies)
traits_dict.update(dependent_traits)
ns = dict(_trait_dependencies=traits_dict, **dependent_traits)
ttype = type(name, (mixin_cls, find_trait_in_mro(type(template_trait))), ns)
obj = ttype(*init_args, **init_kws)
return obj
class RemappingCorrectionMixIn(DependentTrait):
"""act as another BoundedNumber trait calibrated with a mapping"""
mapping: Any = None # really a pandas Series
EMPTY_STORE = dict(by_cal=None, by_uncal=None)
def _min(self, owner):
by_uncal = owner._calibrations.get(self.name, {}).get("by_uncal", None)
if by_uncal is None:
return None
else:
return by_uncal.min()
def _max(self, owner):
by_uncal = owner._calibrations.get(self.name, {}).get("by_uncal", None)
if by_uncal is None:
return None
else:
return by_uncal.max()
def __init_owner_instance__(self, owner):
self.set_mapping(self.mapping, owner=owner)
observe(
owner,
self._on_base_trait_change,
name=self._trait_dependencies["base"].name,
)
def _on_base_trait_change(self, msg):
owner = msg["owner"]
owner.__notify__(
self.name,
self.lookup_cal(msg["new"], owner),
msg["type"],
cache=msg["cache"],
)
def lookup_cal(self, uncal, owner):
"""look up and return the calibrated value, given the uncalibrated value"""
owner_cal = owner._calibrations.get(self.name, self.EMPTY_STORE)
if owner_cal.get("by_uncal", None) is None:
return None
try:
return owner_cal["by_uncal"].loc[uncal]
except KeyError:
# spare us pandas details in the traceback
util.logger.warning(
f"{self.__repr__(owner_inst=owner)} has no entry at {repr(uncal)} {self.label}"
)
return None
def find_uncal(self, cal, owner):
"""look up the calibrated value for the given uncalibrated value. In the event of a lookup
error, then if `self.allow_none` evaluates as True, triggers return of None, or if
`self.allow_none` evaluates False, ValueError is raised.
"""
owner_cal = owner._calibrations.get(self.name, self.EMPTY_STORE)
if owner_cal["by_uncal"] is None:
return None
i = owner_cal["by_cal"].index.get_loc(cal, method="nearest")
return owner_cal["by_cal"].iloc[i]
def set_mapping(self, series_or_uncal, cal=None, owner=None):
"""set the lookup mapping as `set_mapping(series)`, where `series` is a pandas Series (uncalibrated
values in the index), or `set_mapping(cal_vector, uncal_vector)`, where both vectors have 1
dimension of the same length.
"""
if owner is None:
raise ValueError(f"must pass owner to set_mapping")
import pandas as pd
if isinstance(series_or_uncal, pd.Series):
by_uncal = pd.Series(series_or_uncal).copy()
elif cal is not None:
by_uncal = pd.Series(cal, index=series_or_uncal)
elif series_or_uncal is None:
return
else:
raise ValueError(
f"must call set_mapping with None, a Series, or a pair of vector "
f"arguments, not {series_or_uncal}"
)
by_uncal = by_uncal[~by_uncal.index.duplicated(keep="first")].sort_index()
by_uncal.index.name = "uncal"
by_uncal.name = "cal"
by_cal = pd.Series(by_uncal.index, index=by_uncal.values, name="uncal")
by_cal = by_cal[~by_cal.index.duplicated(keep="first")].sort_index()
by_cal.index.name = "cal"
owner._calibrations.setdefault(self.name, {}).update(
by_cal=by_cal, by_uncal=by_uncal
)
@util.hide_in_traceback
def __get__(self, owner, owner_cls=None):
if owner is None or owner_cls is not self.__objclass__:
return self
# by_cal, by_uncal = owner._calibrations.get(self.name, (None, None))
self._validate_trait_dependencies(owner, self.allow_none, "get")
uncal = self._trait_dependencies["base"].__get__(owner, owner_cls)
cal = self.lookup_cal(uncal, owner)
if cal is None:
ret = uncal
else:
ret = cal
if hasattr(self, "name"):
owner.__notify__(
self.name,
ret,
"get",
cache=self.cache or (self.role == self.ROLE_VALUE),
)
return ret
@util.hide_in_traceback
def __set__(self, owner, cal):
owner_cal = owner._calibrations.get(self.name, self.EMPTY_STORE)
self._validate_trait_dependencies(owner, False, "set")
# start with type conversion and validation on the requested calibrated value
cal = self._trait_dependencies["base"].to_pythonic(cal)
# lookup the uncalibrated value that results in the nearest calibrated result
uncal = self.find_uncal(cal, owner)
if uncal is None:
self._trait_dependencies["base"].__set__(owner, cal)
elif uncal != type(self._trait_dependencies["base"]).validate(
self, uncal, owner
):
# raise an exception if the calibration table contains invalid
# values, instead
raise ValueError(
f"calibration lookup in {self.__repr__(owner_inst=owner)} produced invalid value {repr(uncal)}"
)
else:
# execute the set
self._trait_dependencies["base"].__set__(owner, uncal)
if hasattr(self, "name"):
owner.__notify__(
self.name,
cal,
"set",
cache=self.cache or (self.role == self.ROLE_VALUE),
)
class TableCorrectionMixIn(RemappingCorrectionMixIn):
_CAL_TABLE_KEY = "table"
path_trait = None # a dependent Unicode trait
index_lookup_trait = None # a dependent trait
table_index_column: str = None
def __init_owner_instance__(self, owner):
super().__init_owner_instance__(owner)
observe(
owner,
self._on_cal_update_event,
name=[self.path_trait.name, self.index_lookup_trait.name],
type_="set",
)
def _on_cal_update_event(self, msg):
owner = msg["owner"]
if msg["name"] == self.path_trait.name:
# if msg['new'] == msg['old']:
# return
path = msg["new"]
index = getattr(owner, self.index_lookup_trait.name)
ret = self._load_calibration_table(owner, path)
self._update_index_value(owner, index)
return ret
elif msg["name"] == self.index_lookup_trait.name:
# if msg['new'] == msg['old']:
# return
path = getattr(owner, self.path_trait.name)
index = msg["new"]
if self._CAL_TABLE_KEY not in owner._calibrations.get(self.name, {}):
self._load_calibration_table(owner, path)
ret = self._update_index_value(owner, index)
return ret
else:
raise KeyError(f"unsupported trait name {msg['name']}")
# return self._update_index_value(msg["owner"], msg["new"])
def _load_calibration_table(self, owner, path):
""" stash the calibration table from disk
"""
import pandas as pd
from pathlib import Path
def read(path):
# quick read
cal = pd.read_csv(str(path), index_col=self.table_index_column, dtype=float)
cal.columns = cal.columns.astype(float)
if self.index_lookup_trait.max in cal.index:
cal.drop(self.index_lookup_trait.max, axis=0, inplace=True)
# self._cal_offset.values[:] = self._cal_offset.values-self._cal_offset.columns.values[np.newaxis,:]
owner._calibrations.setdefault(self.name, {}).update(
{self._CAL_TABLE_KEY: cal}
)
owner._logger.debug(f"calibration data read from {path}")
if path is None:
if not self.allow_none:
raise ValueError(
f"{self} defined with allow_none=False; path_trait must not be None"
)
else:
return None
read(path)
def _touch_table(self, owner):
# make sure that calibrations have been initialized
table = owner._calibrations.get(self.name, {}).get(self._CAL_TABLE_KEY, None)
if table is None:
path = getattr(owner, self.path_trait.name)
index = getattr(owner, self.index_lookup_trait.name)
if None not in (path, index):
setattr(owner, self.path_trait.name, path)
setattr(owner, self.index_lookup_trait.name, index)
def _update_index_value(self, owner, index_value):
"""update the calibration on change of index_value"""
cal = owner._calibrations.get(self.name, {}).get(self._CAL_TABLE_KEY, None)
if cal is None:
txt = f"index_value change has no effect because calibration_data has not been set"
elif index_value is None:
cal = None
txt = f"set {owner}.{self.index_lookup_trait.name} to enable calibration"
else:
# pull in the calibration mapping specific to this index_value
i_freq = cal.index.get_loc(index_value, "nearest")
cal = cal.iloc[i_freq]
txt = f"calibrated at {index_value/1e6:0.3f} MHz"
self.set_mapping(cal, owner=owner)
@util.hide_in_traceback
def __get__(self, owner, owner_cls=None):
if owner is None or owner_cls is not self.__objclass__:
return self
self._touch_table(owner)
return super().__get__(owner, owner_cls)
@util.hide_in_traceback
def __set__(self, owner, cal):
self._touch_table(owner)
super().__set__(owner, cal)
class TransformMixIn(DependentTrait):
"""act as an arbitrarily-defined (but reversible) transformation of another BoundedNumber trait"""
_forward: Any = lambda x, y: x
_reverse: Any = lambda x, y: x
def __init_owner_instance__(self, owner):
super().__init_owner_instance__(owner)
observe(owner, self.__owner_event__)
def __owner_event__(self, msg):
# pass on a corresponding notification when self._trait_dependencies['base'] changes
base_trait = self._trait_dependencies["base"]
if msg["name"] != getattr(base_trait, "name", None) or not hasattr(
base_trait, "__objclass__"
):
return
owner = msg["owner"]
owner.__notify__(self.name, msg["new"], msg["type"], cache=msg["cache"])
def _transformed_extrema(self, owner):
base_trait = self._trait_dependencies["base"]
base_bounds = [base_trait._min(owner), base_trait._max(owner)]
other_trait = self._trait_dependencies.get("other", None)
if other_trait is None:
trial_bounds = [
self._forward(base_bounds[0]),
self._forward(base_bounds[1]),
]
else:
other_value = getattr(owner, other_trait.name)
# other_bounds = [
# other_trait._min(owner),
# other_trait._max(owner),
# ]
# trial_bounds = [
# self._forward(base_bounds[0], other_bounds[0]),
# self._forward(base_bounds[0], other_bounds[1]),
# self._forward(base_bounds[1], other_bounds[0]),
# self._forward(base_bounds[1], other_bounds[1]),
# ]
trial_bounds = [
self._forward(base_bounds[0], other_value),
self._forward(base_bounds[1], other_value),
]
if None in trial_bounds:
return None, None
return min(trial_bounds), max(trial_bounds)
def _min(self, owner):
# TODO: ensure this works properly for any reversible self._forward()?
lo, hi = self._transformed_extrema(owner)
if lo is None:
return None
else:
return min(lo, hi)
def _max(self, owner):
# TODO: ensure this works properly for any reversible self._forward()?
lo, hi = self._transformed_extrema(owner)
if hi is None:
return None
else:
return max(lo, hi)
def __get__(self, owner, owner_cls=None):
if owner is None or owner_cls is not self.__objclass__:
return self
base_value = self._trait_dependencies["base"].__get__(owner, owner_cls)
if "other" in self._trait_dependencies:
other_value = self._trait_dependencies["other"].__get__(owner, owner_cls)
ret = self._forward(base_value, other_value)
else:
ret = self._forward(base_value)
if hasattr(self, "name"):
owner.__notify__(
self.name,
ret,
"get",
cache=self.cache or (self.role == self.ROLE_VALUE),
)
return ret
def __set__(self, owner, value_request):
# use the other to the value into the proper format and validate it
base_trait = self._trait_dependencies["base"]
value = base_trait.to_pythonic(value_request)
# now reverse the transformation
if "other" in self._trait_dependencies:
other_trait = self._trait_dependencies["other"]
other_value = other_trait.__get__(owner, other_trait.__objclass__)
base_value = self._reverse(value, other_value)
else:
base_value = self._reverse(value)
# set the value of the base trait with the reverse-transformed value
base_trait.__set__(owner, base_value)
if hasattr(self, "name"):
owner.__notify__(
self.name,
value,
"set",
cache=self.cache or (self.role == self.ROLE_VALUE),
)
class BoundedNumber(Trait):
"""accepts numerical, str, or bytes values, following normal python casting procedures (with bounds checking)"""
default: ThisType = None
allow_none: bool = True
min: ThisType = None
max: ThisType = None
@util.hide_in_traceback
def validate(self, value, owner=None):
if not isinstance(value, (bytes, str, bool, numbers.Number)):
raise ValueError(
f"a '{type(self).__qualname__}' trait value must be a numerical, str, or bytes instance"
)
# Check bounds once it's a numerical type
min = self._min(owner)
max = self._max(owner)
if max is not None and value > max:
raise ValueError(
f"{value} is greater than the max limit {max} of {self._owned_name(owner)}"
)
if min is not None and value < min:
raise ValueError(
f"{value} is less than the min limit {min} of {self._owned_name(owner)}"
)
return value
def _max(self, owner):
"""overload this to dynamically compute max"""
return self.max
def _min(self, owner):
"""overload this to dynamically compute max"""
return self.min
path_trait: Any = None # TODO: should be a Unicode string trait
index_lookup_trait: Any = None # TODO: this is a trait that should almost certainly be a BoundedNumber
table_index_column: str = None
def calibrate_from_table(
self,
path_trait,
index_lookup_trait,
*,
table_index_column: str = None,
help="",
label=Undefined,
allow_none=False,
):
"""generate a new Trait with value dependent on another trait. their configuration
comes from a trait in the owner.
Arguments:
offset_name: the name of a value trait in the owner containing a numerical offset
lookup1d: a table containing calibration data, or None to configure later
"""
if label is Undefined:
label = self.label
ret = TableCorrectionMixIn.derive(
self,
dict(path_trait=path_trait, index_lookup_trait=index_lookup_trait,),
help=help,
label=self.label if label is Undefined else label,
sets=self.sets,
gets=self.gets,
allow_none=allow_none,
table_index_column=table_index_column,
)
return ret
def calibrate_from_expression(
self,
trait_expression,
help: str = "",
label: str = Undefined,
allow_none: bool = False,
):
if isinstance(self, DependentTrait):
# This a little unsatisfying, but the alternative would mean
# solving the trait_expression for the trait `self`
obj = trait_expression
while isinstance(obj, DependentTrait):
obj = obj._trait_dependencies["base"]
if obj == self:
break
else:
raise TypeError(
f"the trait being calibrated must also be the first trait in the calibration expression"
)
return self.update(
trait_expression, help=help, label=label, allow_none=allow_none
)
# def calibrate(
# self,
# offset=Undefined,
# mapping=Undefined,
# table=Undefined,
# help="",
# label=Undefined,
# allow_none=False,
# ):
# """generate a new Trait with value dependent on another trait. their configuration
# comes from a trait in the owner.
# Arguments:
# offset_name: the name of a value trait in the owner containing a numerical offset
# lookup1d: a table containing calibration data, or None to configure later
# """
# params = {}
# if mapping is not Undefined:
# mixin = RemappingCorrectionMixIn
# params["mapping"] = mapping
# elif offset is not Undefined:
# mixin = OffsetCorrectionMixIn
# params["offset"] = offset
# if label is Undefined:
# label = self.label
# if len(params) != 1:
# raise ValueError(f"must set exactly one of `offset`, `lookup1d`, and `lookup2d`")
# return mixin.derive(
# self,
# help=help,
# label=self.label,
# sets=self.sets,
# gets=self.gets,
# allow_none=allow_none,
# **params,
# )
def transform(
self,
other_trait: Trait,
forward: callable,
reverse: callable,
help="",
allow_none=False,
):
"""generate a new Trait subclass that adjusts values in other traits.
Arguments:
forward: implementation of the forward transformation
reverse: implementation of the reverse transformation
"""
obj = TransformMixIn.derive(
self,
dependent_traits={} if other_trait is None else dict(other=other_trait),
help=help,
label=self.label,
sets=self.sets,
gets=self.gets,
allow_none=allow_none,
_forward=forward,
_reverse=reverse,
)
return obj
def __neg__(self):
def neg(x, y=None):
return None if x is None else -x
return self.transform(
None, neg, neg, allow_none=self.allow_none, help=f"-1*({self.help})"
)
def __add__(self, other):
def add(x, y):
return None if None in (x, y) else x + y
def sub(x, y):
return None if None in (x, y) else x - y
return self.transform(
other, add, sub, allow_none=self.allow_none, help=f"({self.help}) + {other}"
)
__radd__ = __add__
def __sub__(self, other):
def add(x, y):
return None if None in (x, y) else x + y
def sub(x, y):
return None if None in (x, y) else x - y
return self.transform(
other, sub, add, allow_none=self.allow_none, help=f"({self.help}) + {other}"
)
def __rsub__(self, other):
def add(x, y):
return None if None in (x, y) else y + x
def sub(x, y):
return None if None in (x, y) else y - x
return self.transform(
other, sub, add, allow_none=self.allow_none, help=f"({self.help}) + {other}"
)
def __mul__(self, other):
def mul(x, y):
return None if None in (x, y) else x * y
def div(x, y):
return None if None in (x, y) else x / y
return self.transform(
other, mul, div, allow_none=self.allow_none, help=f"({self.help}) + {other}"
)
__rmul__ = __mul__
def __truediv__(self, other):
def mul(x, y):
return None if None in (x, y) else x * y
def div(x, y):
return None if None in (x, y) else x / y
return self.transform(
other, div, mul, allow_none=self.allow_none, help=f"({self.help}) + {other}"
)
def __rdiv__(self, other):
def mul(x, y):
return None if None in (x, y) else y * x
def div(x, y):
return None if None in (x, y) else y / x
return self.transform(
other, div, mul, allow_none=self.allow_none, help=f"({self.help}) + {other}"
)
class NonScalar(Any):
"""generically non-scalar data, such as a list, array, but not including a string or bytes"""
@util.hide_in_traceback
def validate(self, value, owner=None):
if isinstance(value, (bytes, str)):
raise ValueError(f"given text data but expected a non-scalar data")
if not hasattr(value, "__iter__") and not hasattr(value, "__len__"):
raise ValueError(f"expected non-scalar data but given a non-iterable")
return value
class Int(BoundedNumber, type=int):
"""accepts numerical, str, or bytes values, following normal python casting procedures (with bounds checking)"""
class Float(BoundedNumber, type=float):
"""accepts numerical, str, or bytes values, following normal python casting procedures (with bounds checking)"""
step: ThisType = None
@util.hide_in_traceback
def validate(self, value, owner=None):
value = super().validate(value, owner)
if self.step is not None:
mod = value % self.step
if mod < self.step / 2:
return value - mod
else:
return value - (mod - self.step)
return value
class Complex(Trait, type=complex):
"""accepts numerical or str values, following normal python casting procedures (with bounds checking)"""
allow_none: bool = False
class Bool(Trait, type=bool):
"""accepts boolean or numeric values, or a case-insensitive match to one of ('true',b'true','false',b'false')"""
allow_none: bool = False
@util.hide_in_traceback
def validate(self, value, owner=None):
if isinstance(value, (bool, numbers.Number)):
return value
elif isinstance(value, (str, bytes)):
lvalue = value.lower()
if lvalue in ("true", b"true"):
return True
elif lvalue in ("false", b"false"):
return False
raise ValueError(
f"'{self.__repr__(owner_inst=owner)}' accepts only boolean, numerical values,"
"or one of ('true',b'true','false',b'false'), case-insensitive"
)
class String(Trait):
"""base class for string types, which adds support for case sensitivity arguments"""
case: bool = True
# allow_none: bool = True # let's not override this default
@util.hide_in_traceback
def contains(self, iterable, value):
if not self.case:
iterable = [v.lower() for v in iterable]
value = value.lower()
return value in iterable
class Unicode(String, type=str):
"""accepts strings or numeric values only; convert others explicitly before assignment"""
default: ThisType = ""
@util.hide_in_traceback
def validate(self, value, owner=None):
if not isinstance(value, (str, numbers.Number)):
raise ValueError(
f"'{type(self).__qualname__}' traits accept values of str or numerical type, not {type(value).__name__}"
)
return value
class Bytes(String, type=bytes):
"""accepts bytes objects only - encode str (unicode) explicitly before assignment"""
default: ThisType = b""
class Iterable(Trait):
"""accepts any iterable"""
@util.hide_in_traceback
def validate(self, value, owner=None):
if not hasattr(value, "__iter__"):
raise ValueError(
f"'{type(self).__qualname__}' traits accept only iterable values"
)
return value
class Dict(Iterable, type=dict):
"""accepts any type of iterable value accepted by python `dict()`"""
class List(Iterable, type=list):
"""accepts any type of iterable value accepted by python `list()`"""
class Tuple(Iterable, type=tuple):
"""accepts any type of iterable value accepted by python `tuple()`"""
sets: bool = False
class Path(Trait, type=Path):
must_exist: bool = False
""" does the path need to exist when set? """
@util.hide_in_traceback
def validate(self, value, owner=None):
path = self.type(value)
if self.must_exist and not path.exists():
raise IOError()
return path
class PandasDataFrame(NonScalar, type=pd.DataFrame):
pass
class PandasSeries(NonScalar, type=pd.Series):
pass
class NumpyArray(NonScalar, type=np.ndarray):
pass
class NetworkAddress(Unicode):
"""a IDN-compatible network address string, such as an IP address or DNS hostname"""
accept_port: bool = True
@util.hide_in_traceback
def validate(self, value, owner=None):
"""Rough IDN compatible domain validator"""
host, *extra = value.split(":", 1)
if len(extra) > 0:
port = extra[0]
try:
int(port)
except ValueError:
raise ValueError(f'port {port} in "{value}" is invalid')
if not self.accept_port:
raise ValueError(
f"{self} does not accept a port number (accept_port=False)"
)
for validate in _val.ipv4, _val.ipv6, _val.domain, _val.slug:
if validate(host):
break
else:
raise ValueError("invalid host address")
return value
VALID_TRAIT_ROLES = Trait.ROLE_VALUE, Trait.ROLE_PROPERTY, Trait.ROLE_DATARETURN
def subclass_namespace_traits(namespace_dict, role, omit_trait_attrs):
for name, attr in dict(namespace_dict).items():
if isclass(attr) and issubclass(attr, Trait):
# subclass our traits with the given role
new_trait = type(name, (attr,), dict(role=role))
new_trait.role = role
# clean out annotations for stub generation
new_trait.__annotations__ = dict(new_trait.__annotations__)
for drop_attr in omit_trait_attrs:
new_trait.__annotations__.pop(drop_attr)
new_trait.__module__ = namespace_dict["__name__"]
namespace_dict[name] = new_trait
| StarcoderdataPython |
4802045 | import numpy as np
def calculate_matrix(Ptran, states, number_processes):
"""Extends a HMM, corresponding to a binary Markov Process,
(i.e. 0 or 1 open channels) to model up until K open channels
by assuming K independent binary Markov processes."""
# Fill in diagonals such that each row sums to 1
for i in range(Ptran.shape[0]):
Ptran[i, i] = 1 - np.sum(Ptran[i, :])
n0 = len(states)
new = Ptran.copy()
new_states = [(x,) for x in range(n0)]
for process in range(1, number_processes):
# We expand our current transition matrix (that models up to
# `process` number of separate processes) its' dimensions by n0. We
# basically add another possible state transition for a new process.
nc = new.shape[0]
Ptran_temp = np.zeros((n0*nc, n0*nc))
temp_states = []
for i in range(n0):
temp_states.extend([s + (i,) for s in new_states])
for j in range(n0):
# We add i -> j as our final transition
Ptran_temp[i*nc:(i+1)*nc, j*nc:(j+1)*nc] = Ptran[i][j] * new
# We now group similar processes together to reduce our matrix.
# E.g. (1, 2, 3) is the same as (2, 3, 1)
new_states = sorted(list(set([tuple(sorted(x)) for x in temp_states])))
new = np.zeros((len(new_states), len(new_states)))
for i in range(len(new_states)):
ix_i = [k for k, x in enumerate(temp_states)
if tuple(sorted(x)) == new_states[i]]
for j in range(len(new_states)):
ix_j = [k for k, x in enumerate(temp_states)
if tuple(sorted(x)) == new_states[j]]
new[i, j] = np.sum(Ptran_temp[ix_i, :][:, ix_j])
new[i, j] /= len(ix_i)
new_channels = []
for s in new_states:
new_channels.append(sum([states[x] for x in s]))
new_channels = np.array(new_channels)
return new, new_channels
def get_Psig(signal, States, kexp):
"""The provided States correspond to numbers of open channels,
this calculates the PDF of a Gaussian, with the exception of some
constants, assuming means equal to the open channels and a tunable
variance (kexp)"""
Psig = np.zeros((len(signal), len(States)))
for i in range(len(Psig)):
Psig[i] = np.exp((-(signal[i] - States)**2)/(kexp))
return Psig
def forward(Psig, Ptran, etat_in=None, coef=1, normalize=True):
"""Custom forward-backward algorithm. This function is also used for the
backward pass by reversing Psig and transposing Ptran. This custom
function is both faster and slightly more accurate than, for example,
hmmlearn. It introduces memory (defined by coef), by taking in to account
the calculated probabilities from a previous pass (etat_in)."""
if etat_in is None: etat_in = np.ones(Psig.shape)/Psig.shape[1]
alpha = np.zeros(Psig.shape) # len(sig) x n_state
etat = np.zeros(Psig.shape) # len(sig) x n_state
etat[0] = etat_in[0]
alpha[0] = etat_in[0]
if normalize:
alpha[0] = etat_in[0]*Psig[0]
alpha[0]/=alpha[0].sum()
for j in range(1, Psig.shape[0]):
etat[j] = alpha[j-1]@Ptran
if normalize: etat[j] /= etat[j].sum()
etat[j] = (etat[j]**coef) * ((etat_in[j])**(1-coef))
if normalize: etat[j] /= etat[j].sum()
alpha[j] = etat[j] * Psig[j]
alpha[j] /= alpha[j].sum()
return alpha, etat | StarcoderdataPython |
3223016 | statementArr = [
'startSwitch("variableName")',
'endSwitch()',
'''getComment("This is a comment")''',
"puts('Something to print')",
"getClassBeginning('sampleClass')",
"getClassEnding()",
'setVar(valueToGet="1", valueToChange="x")',
'startCase("x")',
'endCase()',
'startDefault()',
'endDefault()',
"equals(thing1 = 'x', thing2 = 'y', theType='int')",
'''Or([compare(thing1 = "1", thing2 = "1", theType = "Int"), compare(thing1 = '"lolz"', thing2 = "lolz", theType = "String")])''',
'''And(["x", "y"])''',
'''Not("x")''',
'greaterThan("thing1", "thing2")',
'lessThan("thing1", "thing2")',
'Eval('"Evaluate this string."')',
'concatenateStrings(['"String 1"', '"String 2"'])',
"endConditionalBlock()",
"startConditionalBlock()",
'startIf(condition="x==2")',
"endIf()",
'initializeVar(initialValue="1", variableName = "x", variableType = "String", arrayDimensions=None)',
'getFileExtension()',
'startForEach(variableName="varInArr", array="theArr", typeInArray="String")',
'startElse()',
'startWhile("x==1")',
'getReturnStatement("toReturn")',
'compare(theType="String", thing1="str1", thing2="str2")'
"endElse()",
"endWhile()",
"endForEach()",
"endMethod(methodName='methodName')",
'endElseIf()',
'arrayContains(valueToCheck="1", array="arrayOfIntegers")',
'this("variableName")',
'''function(functionName="startForEach", returnType="String", isStatic=True, parameterNames=["lang", "array", "variableName", "typeInArray"], parameterTypes = ["String", "String", "String", "String"], body=[])''',
'startMain()',
'endMain()',
'subString(start="1", end="5", theString="strName")',
'charAt(theString="stringName", index="1")',
'index(indexList = ["1"], theType="String[]", theObject="ArrayName")',
'stringLength("stringName")',
'split(string = "string,test,1", separator = ",")',
'add(["1","2","3"])',
'concatenateArrays(["array1", "array2"])',
'endConstructor()',
'divide(20, 5)',
'toString(objectToConvert=1, convertFrom="Int")',
'startMethod(name="exampleMethod", returnType="int", parameterNames=["param1", "param2"], parameterTypes=["int", "int"], isStatic=True, requiresTheFunctions=False, isDefined=False)',
'typeOf(1)',
'callFunctionWithNamedArgs()',
'getVariableName(True)',
'getVariableName(False)',
"lessThanOrEqual('a', 'b')",
"greaterThanOrEqual('a', 'b')",
"getArrayInitializer([[1,2], [3,4]])",
"concatenateStrings(['a', 'b'])",
'def callFunction("functionName", "fromClass", ["parameter1", "parameter2"]):',
'semicolon("a statement")',
'getCorrespondingTypeWithoutBrackets("int")',
'getCorrespondingTypeWithoutBrackets("string")',
'getCorrespondingTypeWithoutBrackets("boolean")',
'startElseIf("condition")',
'include("fileToInclude")',
'Error("There is an error!")',
'args()',
'seriesOfStatements[1,2,3,4]',
'compare('"String1"', '"String2"', "String")'
]
| StarcoderdataPython |
1667537 | #!/usr/bin/python
#-*- encoding: utf-8 -*-
"""
A Docutils Publisher script for the Legal Resource Registry
"""
import re,os,os.path,sys
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
pth = os.path.split(sys.argv[0])[0]
pth = os.path.join(pth,"..")
pth = os.path.abspath(pth)
from docutils.core import publish_cmdline, default_description
from docutils.parsers.rst import directives, roles, states
from docutils import nodes, statemachine
from LRR import *
directives.register_directive("bubble", BubbleDirective)
directives.register_directive("variation", VariationDirective)
directives.register_directive("reporter", ReporterDirective)
directives.register_directive("fields", FieldsDirective)
directives.register_directive("jurisdiction", JurisdictionDirective)
directives.register_directive("court", CourtDirective)
directives.register_directive("breadcrumb", BreadcrumbDirective)
directives.register_directive("tab", TabDirective)
directives.register_directive("floater", FloaterDirective)
roles.register_local_role("trans", role_trans)
| StarcoderdataPython |
103378 | """Project metadata
Information describing the project.
"""
# The package name, which is also the so-called "UNIX name" for the project.
package = 'ecs'
project = "Entity-Component-System"
project_no_spaces = project.replace(' ', '')
version = '0.1'
description = 'An entity/component system library for games'
authors = ['<NAME>', '<NAME>']
authors_string = ', '.join(authors)
emails = ['<EMAIL>', '<EMAIL>']
license = 'MIT'
copyright = '2013 ' + authors_string
url = 'https://github.com/seanfisk/ecs'
| StarcoderdataPython |
3356844 | <filename>catana/services/email.py<gh_stars>0
"""Email service"""
import smtplib
from email.mime.text import MIMEText
from catana.core.config import EMAIL, EMAIL_HOST, EMAIL_PASSWORD, EMIAL_HOST_PORT
class Email:
"""Class to send emails to users"""
smtp: smtplib.SMTP
def __init__(self, auth=False):
"""Connect to SMTP server"""
self.smtp = smtplib.SMTP(host=EMAIL_HOST, port=EMIAL_HOST_PORT)
self.smtp.ehlo()
if auth:
self.smtp.login(EMAIL, EMAIL_PASSWORD)
def send(self, email: str, user_email: str, subject: str, content: str):
"""Send example email"""
msg = MIMEText(content)
msg["Subject"] = subject
msg["From"] = email
msg["To"] = user_email
self.smtp.send_message(msg)
def close_connection(self):
"""Close connection"""
self.smtp.close()
| StarcoderdataPython |
1690468 | import sqlite3
from flask import current_app, g # g is a namespace object that can store data during an application context.
def get_db():
if 'db' not in g: # if the object g does not have database, then create a new connection with it
g.db = sqlite3.connect(
current_app.config['DATABASE'], # the path of database is assigned in the __init__.py file
detect_types=sqlite3.PARSE_DECLTYPES
)
g.db.row_factory = sqlite3.Row # this tells the connection to return rows behave like dicts. Allows accesssing the columns by name
return g.db
def close_db(e=None): # This function is ressponsible for closing the connection when the request is done.
db = g.pop('db', None)
if db is not None:
db.close()
def init_app(app):
app.teardown_appcontext(close_db) # This tells the application to close the connection after returning the response | StarcoderdataPython |
3371515 | """
Executor class.
"""
from __future__ import unicode_literals
import yaml
import subprocess
from voluptuous import Schema
from contextlib import closing
from functools import partial
from six import PY2
from locale import getpreferredencoding
class BaseExecutor(object):
"""
A generic executor class.
"""
options_schema = Schema({})
def __init__(self, options=None):
self.options = self.options_schema(options or {})
@property
def full_name(self):
"""
Get the full name of the executor.
:returns: The full dotted-name representation of the current instance
class, including its definition module.
"""
return '{module}.{name}'.format(
module=self.__module__,
name=self.__class__.__name__,
)
def execute(self, environment, commands, display):
"""
Execute the specified commands.
:param environment: The environment variables dictionary.
:param commands: A list of commands to execute.
:param display: The display to use for command output and report.
:returns: True if the execution went fine, False otherwise.
"""
display.set_context(commands=commands)
for index, command in enumerate(commands):
with display.command(
index,
command,
) as result:
result.returncode = self.execute_one(
environment=environment,
command=command,
output=partial(display.command_output, index),
)
if result.returncode is None:
raise RuntimeError(
"No returncode specified for command execution "
"({})".format(
command,
),
)
elif result.returncode != 0:
return False
return True
def executor_representer(dumper, executor):
if executor.options:
return dumper.represent_mapping(
'tag:yaml.org,2002:map',
{
'name': executor.full_name,
'options': executor.options,
},
)
else:
return dumper.represent_scalar(
'tag:yaml.org,2002:str',
executor.full_name,
)
yaml.add_multi_representer(BaseExecutor, executor_representer)
class ShellExecutor(BaseExecutor):
"""
An executor that execute commands through the system shell.
"""
def execute_one(self, environment, command, output):
# Python 2 subprocess doesn't deal well with unicode commands.
command = (
command.encode(getpreferredencoding())
if PY2
else command
)
process = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
env=environment,
)
with closing(process.stdout):
while True:
data = process.stdout.read(4096)
if data:
output(data)
else:
break
process.wait()
return process.returncode
| StarcoderdataPython |
1783979 | <filename>tests/__init__.py<gh_stars>0
"""Unit test package for itpminer."""
| StarcoderdataPython |
3292660 | from operator import getitem
from pendulum import period
def auto_none_days(days, points):
"""Autoincrement don't works days yet with None."""
return points + [None for _ in range(len(days) - len(points))]
def guide(total_points, graph_period):
"""Gerenate guide line with dayoffs."""
def weekday(day):
return not day.weekday() in (5, 6)
days = [(day, weekday(day)) for day in graph_period]
n_days = len(list(filter(lambda x: getitem(x, 1), days)))
median_points = total_points / (n_days - 1)
last_value = total_points
out = []
for n, (date, valid) in enumerate(days):
if valid and n != 0:
last_value = last_value - median_points
out.append(last_value)
else:
out.append(last_value)
return out
def burn_down(sprint, initial_data, final_data, total_points, sprint_data):
graph_period = list(period(initial_data, final_data).range('days'))
y = auto_none_days(graph_period, sprint_data)
return {
'data': [
{ # Guide
'x': graph_period,
'y': guide(total_points, graph_period),
'name': 'Guides',
'type': 'scatter',
'mode': 'lines',
'line': {'color': 'grey', 'width': 3},
},
{ # Sprint_data
'x': graph_period,
'y': y,
'type': 'scatter',
'name': 'Points',
'line': {'color': 'red', 'width': 3},
},
],
'layout': {
'title': {
'text': 'Burn Down',
'x': 0.05,
'xanchor': 'left',
}
}
}
| StarcoderdataPython |
4837826 | <filename>libartipy/geometry/__init__.py<gh_stars>1-10
from .quaternion import Quaternion
from .pose import Pose
from .coordinate_system import CoordinateSystem, CoordinateSystemConverter
from .camera import Camera
| StarcoderdataPython |
4816857 | __author__ = 'n3k'
import os
from FakeTLSServer import WebServerSetup
from Configuration import Configuration
class TestControllerException(Exception):
pass
class TestController(object):
"""
This class is holds a dictionary with singletons per "client_address:hostname"
that holds the tracking for all the testcases for domains under evaluation
"""
def __init__(self, hostname, port, testcase_list):
self.hostname = hostname
self.port = port
self.testcase_iterator = iter(testcase_list)
self.fake_server = None
self.current_testcase = None
self.remove_filename_list = []
self.crt_filename = None
self.key_filename = None
def __exit__(self, exc_type, exc_value, traceback):
self.cleanup()
def notification(self):
"""
A callback defining the action to perform after a client connects to our web server
:return:
"""
pass
def create_certificate(self):
# Get the next TC of the List
self.current_testcase = self.get_next_testcase()
if self.current_testcase == None:
return (None,), False
# Make an instance
self.current_testcase = self.current_testcase(self.hostname, self.port)
cert_folder = Configuration().get_temp_certificate_folder()
# Generate the Certificate
crt, key = self.current_testcase.create_testing_certificate()
crt_filename = os.path.join(cert_folder, crt)
key_filename = os.path.join(cert_folder, key)
self.remove_filename_list.append(crt_filename)
self.remove_filename_list.append(key_filename)
return (crt_filename, key_filename), True
def get_next_testcase(self):
pass
def register_test_result(self, actual_status):
pass
def configure_web_server(self):
"""
Setup the internal web server that will get the request instead of the target domain
"""
# First check for current_testcase, if is not None we know
# the client did not connect to our fakeserver
if self.current_testcase is not None:
self.register_test_result("Certificate Rejected")
certificate, status = self.create_certificate()
if not status:
return None
self.crt_filename, self.key_filename = certificate
server_address = Configuration().fake_server_address
print "+ Setting up WebServer with Test: %s" % self.current_testcase
self.fake_server = WebServerSetup(keyfile=self.key_filename,
certfile=self.crt_filename,
server_address=server_address,
callback=self.notification)
# Return the actual address binded
server_address = self.fake_server.start()
return server_address
def cleanup(self):
"""Kill web server and erase cert,key pair"""
self.kill_web_server()
for filename in self.remove_filename_list:
try:
os.unlink(filename)
except:
pass
def kill_web_server(self):
if self.fake_server:
self.fake_server.kill()
self.fake_server = None # Don't call this twice! | StarcoderdataPython |
3397475 | ID = "channels"
permission = 3
privmsgEnabled = True
def execute(self, name, params, channel, userdata, rank, chan):
channels = ", ".join(self.channelData.keys())
self.sendNotice(name, "I'm currently connected to the following channels: {0}".format(channels)) | StarcoderdataPython |
1731368 | <filename>modules/hub/hub/features/donations/views.py
from datetime import datetime
from dataclasses import dataclass
from typing import List
import string
import requests
from flask import Blueprint, Response, request, url_for, redirect
from flask.views import MethodView
from flask_babelplus import gettext as _
from flask_login import current_user, login_required
from flask_sqlalchemy import Pagination
from flaskbb.display.navigation import NavigationLink
from flaskbb.utils.helpers import register_view, render_template
from hub.utils import configs_path
from hub.models import Player, PointsTransaction, MoneyTransaction, DonationType
donations = Blueprint("donations", __name__, template_folder="templates")
class UserDonationsView(MethodView):
def __get_actions(self):
actions = []
actions.append(
NavigationLink(
endpoint="donations.info",
name=_("✨ Donate"),
))
actions.append(
NavigationLink(
endpoint="donations.points_transactions",
name=_("🔆 Opyxes Transactions"),
))
actions.append(
NavigationLink(
endpoint="donations.money_transactions",
name=_("💵 Donations History"),
))
return actions
def get_args(self):
return {
"actions": self.__get_actions()
}
def get(self):
return redirect(url_for("donations.info"))
class DonationsInfoView(UserDonationsView):
decorators = [login_required]
def get(self):
content = ""
with open(configs_path + "/donations_info.html", "r") as content_html:
content = content_html.read()
return render_template("features/donations/info.html", **self.get_args(), content=content)
class UserPointsTransactionsView(UserDonationsView):
decorators = [login_required]
def get(self):
page = request.args.get('page', 1, type=int)
query = PointsTransaction.query\
.join(Player)\
.filter(Player.discord_user_id == current_user.discord)\
.order_by(PointsTransaction.datetime.desc())
pagination: Pagination = query.paginate(page, 20)
transactions = pagination.items
@dataclass
class TransactionData:
datetime: datetime
change: string
comment: string
data = []
for transaction in transactions:
data.append(TransactionData(
datetime=transaction.datetime,
change="{:+2}".format(transaction.change).rstrip('0').rstrip('.') + " 🔆",
comment=transaction.comment
))
return render_template(
"features/donations/points_transactions.html",
**self.get_args(),
transactions=data,
pagination=pagination)
class UserMoneyTransactionsView(UserDonationsView):
decorators = [login_required]
def get(self):
page = request.args.get('page', 1, type=int)
query = MoneyTransaction.query\
.join(Player)\
.join(DonationType)\
.filter(Player.discord_user_id == current_user.discord)\
.order_by(MoneyTransaction.datetime.desc())
pagination: Pagination = query.paginate(page, 20)
transactions: List[MoneyTransaction] = pagination.items
@dataclass
class TransactionData:
datetime: datetime
change: string
comment: string
data = []
for transaction in transactions:
type_str = transaction.donation_type.type
if type_str == "qiwi":
type_str = "Пожертвование через QIWI"
elif type_str == "patreon":
type_str = "Подписка Patreon"
data.append(TransactionData(
datetime=transaction.datetime,
change="{:+2}".format(transaction.change).rstrip('0').rstrip('.') + " ₽",
comment=type_str
))
return render_template(
"features/donations/money_transactions.html",
**self.get_args(),
transactions=data,
pagination=pagination)
def parse_datetime(qiwi_format: str) -> datetime:
return datetime.fromisoformat(qiwi_format)
class QiwiHook(MethodView):
def post(self):
content = request.get_json()
print("----")
print("Qiwi hook:")
print("Request: " + str(request.__dict__))
print("Json: " + str(content))
print("----")
if content['payment']['type'] != 'IN' or content['payment']['status'] != 'SUCCESS':
print("Skip hook: Not suitable hook")
return Response(status=200)
if content['payment']['sum']['currency'] != 643: # ruble
print("Skip hook: Unknown currency")
return Response(status=200)
dt = parse_datetime(content['payment']['date'])
ckey = content['payment']['comment'].split(' ')[0].lower().strip(string.punctuation)
amount = content['payment']['sum']['amount']
print("New donation from " + ckey + ". Amount: " + str(amount) + ". Datetime: " + dt.isoformat())
return Response(status=200)
def register_webhooks_service(app):
if "QIWI_TOKEN" not in app.config:
print("Error: QIWI_TOKEN isn't specified")
return
headers = {
"Authorization": "Bearer " + app.config["QIWI_TOKEN"],
"Accept": "application/json"
}
res = requests.get("https://edge.qiwi.com/person-profile/v1/profile/current", headers=headers)
print("QIWI Test:")
print(res.__dict__)
if not app.config["QIWI_HOOKS"]:
print("QIWI Webhooks registration skipped")
return
params = {
"hookType": 1,
"param": url_for("donations.qiwi_hook", _external=True),
"txnType": 0
}
headers = {
"Authorization": "Bearer " + app.config["QIWI_TOKEN"],
"Accept": "application/json"
}
res = requests.put("https://edge.qiwi.com/payment-notifier/v1/hooks", params=params, headers=headers)
print("QIWI Webhooks registration result:")
print("Request: " + str(res.request.__dict__))
print("Res: " + str(res.__dict__))
register_view(
donations,
routes=["/"],
view_func=UserDonationsView.as_view("index"),
)
register_view(
donations,
routes=["/info"],
view_func=DonationsInfoView.as_view("info")
)
register_view(
donations,
routes=["/points_transactions"],
view_func=UserPointsTransactionsView.as_view("points_transactions")
)
register_view(
donations,
routes=["/money_transactions"],
view_func=UserMoneyTransactionsView.as_view("money_transactions")
)
register_view(
donations,
routes=['/qiwi_hook'],
view_func=QiwiHook.as_view('qiwi_hook')
)
| StarcoderdataPython |
3308196 | <gh_stars>0
from words import is_clean
def test_clean():
assert is_clean("snowdrift") is True
assert is_clean("snowdrift's") is False
assert is_clean("Englishes") is False
assert is_clean("steve") is False
assert is_clean("conglomerated") is False
| StarcoderdataPython |
16245 | # -*- coding: utf-8 -*-
# Copyright (c) 2021. Distributed under the terms of the MIT License.
from phonopy.interface.calculator import read_crystal_structure
from phonopy.structure.atoms import PhonopyAtoms
from vise.util.phonopy.phonopy_input import structure_to_phonopy_atoms
import numpy as np
def assert_same_phonopy_atoms(actual: PhonopyAtoms,
expected: PhonopyAtoms):
assert (actual.get_cell() == expected.get_cell()).all()
assert (actual.get_scaled_positions()
== expected.get_scaled_positions()).all()
assert actual.symbols == expected.symbols
def test_phonopy_atoms_behavior(sc_structure, tmpdir):
print(tmpdir)
tmpdir.chdir()
# actual = structure_to_phonopy_atoms(sc_structure)
sc_structure.to(fmt="poscar", filename="POSCAR")
a, _ = read_crystal_structure("POSCAR")
b = PhonopyAtoms(atoms=a)
print(type(a.get_cell()))
print(a.get_atomic_numbers())
assert_same_phonopy_atoms(a, b)
def test_structure_to_phonopy_atoms(sc_structure):
actual = structure_to_phonopy_atoms(sc_structure)
expected = PhonopyAtoms(symbols=["H"],
cell=np.array([[1.0, 0.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 0.0, 1.0]]),
scaled_positions=np.array([[0.0, 0.0, 0.0]]))
assert_same_phonopy_atoms(actual, expected)
#
# def test_make_phonopy_input(mc_structure, mc_structure_conv):
# actual = make_phonopy_input(unitcell=mc_structure,
# supercell_matrix=np.eye(3).tolist(),
# conventional_base=True)
# supercell_matrix = [[ 1., 1., 0.],
# [-1., 1., 0.],
# [ 0., 0., 1.]]
# supercell = mc_structure * supercell_matrix
# expected = PhonopyInput(unitcell=mc_structure,
# supercell=supercell,
# supercell_matrix=supercell_matrix)
# assert actual == expected
#
#
# def test_make_phonopy_input_default(mc_structure, mc_structure_conv):
# actual = make_phonopy_input(unitcell=mc_structure)
# supercell_matrix = [[ 2., 2., 0.],
# [-2., 2., 0.],
# [ 0., 0., 2.]]
# supercell = mc_structure * supercell_matrix
# expected = PhonopyInput(unitcell=mc_structure,
# supercell=supercell,
# supercell_matrix=supercell_matrix)
# assert actual == expected
#
#
# def test_make_phonopy_input_default_hexa():
# structure = Structure(Lattice.hexagonal(1.0, 2.0), species=["H"],
# coords=[[0.0]*3])
# actual = make_phonopy_input(unitcell=structure)
# supercell_matrix = [[2, -1, 0], [2, 1, 0], [0, 0, 2]]
# supercell = structure * supercell_matrix
# expected = PhonopyInput(unitcell=structure,
# supercell=supercell,
# supercell_matrix=supercell_matrix)
# assert actual == expected
| StarcoderdataPython |
3348546 | # -*- coding: utf-8 -*-
import numpy as np
def eval_onevsall(distmat, q_pids, max_rank=50):
"""Evaluation with one vs all on query set."""
num_q = distmat.shape[0]
if num_q < max_rank:
max_rank = num_q
print('Note: number of gallery samples is quite small, got {}'.format(num_q))
indices = np.argsort(distmat, axis=1)
# print('indices\n', indices)
matches = (q_pids[indices] == q_pids[:, np.newaxis]).astype(np.int32)
# print('matches\n', matches)
# compute cmc curve for each query
all_cmc = []
all_AP = []
num_valid_q = 0.0 # number of valid query
for q_idx in range(num_q):
# remove the query itself
order = indices[q_idx]
keep = order != q_idx
# compute cmc curve
raw_cmc = matches[q_idx][
keep
] # binary vector, positions with value 1 are correct matches
if not np.any(raw_cmc):
# this condition is true when query identity has only one example
# => cannot evaluate retrieval
continue
cmc = raw_cmc.cumsum()
cmc[cmc > 1] = 1
all_cmc.append(cmc[:max_rank])
num_valid_q += 1.0
# compute average precision
# reference: https://en.wikipedia.org/wiki/Evaluation_measures_(information_retrieval)#Average_precision
num_rel = raw_cmc.sum()
tmp_cmc = raw_cmc.cumsum()
tmp_cmc = [x / (i + 1.0) for i, x in enumerate(tmp_cmc)]
tmp_cmc = np.asarray(tmp_cmc) * raw_cmc
AP = tmp_cmc.sum() / num_rel
all_AP.append(AP)
print('Computed metrics on {} examples'.format(len(all_cmc)))
assert num_valid_q > 0, 'Error: all query identities have one example'
all_cmc = np.asarray(all_cmc).astype(np.float32)
all_cmc = all_cmc.sum(0) / num_valid_q
mAP = np.mean(all_AP)
return all_cmc, mAP
| StarcoderdataPython |
3350491 | from qtpy import QtWidgets
class LabelQListWidget(QtWidgets.QListWidget):
def __init__(self, *args, **kwargs):
super(LabelQListWidget, self).__init__(*args, **kwargs)
self.canvas = None
self.itemsToShapes = []
self.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
def get_shape_from_item(self, item):
for index, (item_, shape) in enumerate(self.itemsToShapes):
if item_ is item:
return shape
def get_item_from_shape(self, shape):
for index, (item, shape_) in enumerate(self.itemsToShapes):
if shape_ is shape:
return item
def clear(self):
super(LabelQListWidget, self).clear()
self.itemsToShapes = []
def setParent(self, parent):
self.parent = parent
def dropEvent(self, event):
shapes = self.shapes
super(LabelQListWidget, self).dropEvent(event)
if self.shapes == shapes:
return
if self.canvas is None:
raise RuntimeError('self.canvas must be set beforehand.')
self.parent.setDirty()
self.canvas.loadShapes(self.shapes)
@property
def shapes(self):
shapes = []
for i in range(self.count()):
item = self.item(i)
shape = self.get_shape_from_item(item)
shapes.append(shape)
return shapes
| StarcoderdataPython |
111579 | from functools import reduce
n = int(input())
a = [int(m) for m in input().split()]
def gcd(a,b):
if b == 0:
return a
return gcd(b, a % b)
def gcd_list(numbers):
return reduce(gcd, numbers)
a.sort()
new = [a[0]]
for i in range(1, n):
new.append(a[i] % a[0])
new.sort()
k = gcd_list(new)
for i in range(n):
if new[i] % k != 0:
print(1)
exit()
print(gcd_list(new)) | StarcoderdataPython |
142414 | # coding=utf-8
# Copyright 2019-present, the HuggingFace Inc. team and Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" The distiller to distil the student.
Adapted in part from Facebook, Inc XLM model (https://github.com/facebookresearch/XLM)
"""
import math
import os
import time
import psutil
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.optim import AdamW
from torch.utils.data import BatchSampler, DataLoader, RandomSampler
from torch.utils.data.distributed import DistributedSampler
from tqdm import tqdm
import json
from .grouped_batch_sampler import GroupedBatchSampler, create_lengths_groups
from transformers import get_linear_schedule_with_warmup
from oscar.run_captioning import (
CaptionTSVDataset, CaptionTensorizer, evaluate, build_dataset,
compute_score_with_logits)
from .utils import logger
try:
from torch.utils.tensorboard import SummaryWriter
except ImportError:
from tensorboardX import SummaryWriter
class Distiller:
def __init__(
self, params: dict, dataset: CaptionTSVDataset, student: nn.Module, teacher: nn.Module,
val_dataset, tokenizer
):
logger.info("Initializing Distiller")
self.params = params
self.dump_path = params.output_dir
self.multi_gpu = params.multi_gpu
self.fp16 = params.fp16
self.student = student
self.teacher = teacher
self.student_config = student.config
self.vocab_size = student.config.vocab_size
if params.n_gpu <= 1:
sampler = RandomSampler(dataset)
else:
sampler = DistributedSampler(dataset)
# if params.group_by_size:
# groups = create_lengths_groups(lengths=dataset.lengths, k=params.max_model_input_size)
# sampler = GroupedBatchSampler(sampler=sampler, group_ids=groups, batch_size=params.batch_size)
# else:
# sampler = BatchSampler(sampler=sampler, batch_size=params.batch_size, drop_last=False)
sampler = BatchSampler(sampler=sampler, batch_size=params.batch_size, drop_last=False)
self.dataloader = DataLoader(dataset=dataset, batch_sampler=sampler)
self.val_dataset = val_dataset
self.tokenizer = tokenizer
self.eval_log = []
self.temperature = params.temperature
assert self.temperature > 0.0
self.alpha_ce = params.alpha_ce
self.alpha_mse = params.alpha_mse
self.alpha_cos = params.alpha_cos
# self.mlm = params.mlm
# if self.mlm:
# logger.info("Using MLM loss for LM step.")
# self.mlm_mask_prop = params.mlm_mask_prop
# assert 0.0 <= self.mlm_mask_prop <= 1.0
# assert params.word_mask + params.word_keep + params.word_rand == 1.0
# self.pred_probs = torch.FloatTensor([params.word_mask, params.word_keep, params.word_rand])
# self.pred_probs = self.pred_probs.to(f"cuda:{params.local_rank}") if params.n_gpu > 0 else self.pred_probs
# self.token_probs = token_probs.to(f"cuda:{params.local_rank}") if params.n_gpu > 0 else token_probs
# if self.fp16:
# self.pred_probs = self.pred_probs.half()
# self.token_probs = self.token_probs.half()
# else:
# logger.info("Using CLM loss for LM step.")
self.epoch = 0
self.n_iter = 0
self.n_total_iter = 0
self.n_sequences_epoch = 0
self.total_loss_epoch = 0
self.last_loss = 0
self.last_loss_ce = 0
if self.alpha_mse > 0.0:
self.last_loss_mse = 0
if self.alpha_cos > 0.0:
self.last_loss_cos = 0
self.last_log = 0
self.ce_loss_fct = nn.KLDivLoss(reduction="batchmean")
if self.alpha_mse > 0.0:
self.mse_loss_fct = nn.MSELoss(reduction="sum")
if self.alpha_cos > 0.0:
self.cosine_loss_fct = nn.CosineEmbeddingLoss(reduction="mean")
logger.info("--- Initializing model optimizer")
assert params.gradient_accumulation_steps >= 1
self.num_steps_epoch = len(self.dataloader)
num_train_optimization_steps = (
int(self.num_steps_epoch / params.gradient_accumulation_steps * params.n_epoch) + 1
)
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [
p for n, p in student.named_parameters() if not any(nd in n for nd in no_decay) and p.requires_grad
],
"weight_decay": params.weight_decay,
},
{
"params": [
p for n, p in student.named_parameters() if any(nd in n for nd in no_decay) and p.requires_grad
],
"weight_decay": 0.0,
},
]
logger.info(
"------ Number of trainable parameters (student): %i"
% sum([p.numel() for p in self.student.parameters() if p.requires_grad])
)
logger.info("------ Number of parameters (student): %i" % sum([p.numel() for p in self.student.parameters()]))
self.optimizer = AdamW(
optimizer_grouped_parameters, lr=params.learning_rate, eps=params.adam_epsilon, betas=(0.9, 0.98)
)
warmup_steps = math.ceil(num_train_optimization_steps * params.warmup_prop)
self.scheduler = get_linear_schedule_with_warmup(
self.optimizer, num_warmup_steps=warmup_steps, num_training_steps=num_train_optimization_steps
)
if self.fp16:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to use fp16 training.")
logger.info(f"Using fp16 training: {self.params.fp16_opt_level} level")
self.student, self.optimizer = amp.initialize(
self.student, self.optimizer, opt_level=self.params.fp16_opt_level
)
self.teacher = self.teacher.half()
if self.multi_gpu:
if self.fp16:
from apex.parallel import DistributedDataParallel
logger.info("Using apex.parallel.DistributedDataParallel for distributed training.")
self.student = DistributedDataParallel(self.student)
else:
from torch.nn.parallel import DistributedDataParallel
logger.info("Using nn.parallel.DistributedDataParallel for distributed training.")
self.student = DistributedDataParallel(
self.student,
device_ids=[params.local_rank],
output_device=params.local_rank,
find_unused_parameters=True,
)
# self.is_master = params.is_master
# if self.is_master:
logger.info("--- Initializing Tensorboard")
self.tensorboard = SummaryWriter(log_dir=os.path.join(self.dump_path, "log", "train"))
self.tensorboard.add_text(tag="config/training", text_string=str(self.params), global_step=0)
self.tensorboard.add_text(tag="config/student", text_string=str(self.student_config), global_step=0)
def train(self):
"""
The real training loop.
"""
logger.info("Starting training")
self.last_log = time.time()
self.student.train()
self.teacher.eval()
for _ in range(self.params.n_epoch):
logger.info(f"--- Starting epoch {self.epoch}/{self.params.n_epoch-1}")
if self.multi_gpu:
torch.distributed.barrier()
iter_bar = tqdm(self.dataloader, desc="-Iter", disable=self.params.local_rank not in [-1, 0])
for batch in iter_bar:
if self.params.n_gpu > 0:
img_key, example = batch
# img_key = img_key.to(f"cuda:{self.params.local_rank}")
example = tuple(t.to(f"cuda:{self.params.local_rank}") for t in example)
'''CaptionTSVDataset:
def __getitem__(self, idx):
img_idx = self.get_image_index(idx)
img_key = self.image_keys[img_idx]
features = self.get_image_features(img_idx)
caption = self.get_caption(idx)
od_labels = self.get_od_labels(img_idx)
example = self.tensorizer.tensorize_example(caption, features, text_b=od_labels)
return img_key, example
'''
# example: (input_ids, attention_mask, segment_ids, img_feat, masked_pos)
inputs = {'input_ids': example[0], 'attention_mask': example[1],
'token_type_ids': example[2], 'img_feats': example[3],
'masked_pos': example[4], 'masked_ids': example[5]
}
outputs = self.step(**inputs)
iter_bar.update()
iter_bar.set_postfix(
{"Last_loss": f"{self.last_loss:.2f}", "Avg_cum_loss": f"{self.total_loss_epoch/self.n_iter:.2f}"}
)
iter_bar.close()
logger.info(f"--- Ending epoch {self.epoch}/{self.params.n_epoch-1}")
self.end_epoch()
logger.info("Save very last checkpoint as `pytorch_model.bin`.")
self.save_checkpoint(checkpoint_name="pytorch_model.bin")
logger.info("Training is finished")
def step(self, input_ids: torch.tensor, attention_mask: torch.tensor, token_type_ids: torch.tensor,
img_feats: torch.tensor, masked_pos: torch.tensor, masked_ids: torch.tensor):
"""
One optimization step: forward of student AND teacher, backward on the loss (for gradient accumulation),
and possibly a parameter update (depending on the gradient accumulation).
Input:
------
input_ids: `torch.tensor(bs, seq_length)` - The token ids.
attention_mask: `torch.tensor(bs, seq_length)` - The attention mask for self attention.
lm_labels: `torch.tensor(bs, seq_length)` - The language modeling labels (mlm labels for MLM and clm labels for CLM).
"""
s_logits, s_hidden_states = self.student(
input_ids=input_ids, attention_mask=attention_mask, img_feats=img_feats,
masked_pos=masked_pos, masked_ids=masked_ids, token_type_ids=token_type_ids
) # (bs, seq_length, voc_size)
with torch.no_grad():
t_output = self.teacher(
input_ids=input_ids, attention_mask=attention_mask, img_feats=img_feats,
masked_pos=masked_pos, masked_ids=masked_ids, token_type_ids=token_type_ids
) # (bs, seq_length, voc_size)
_, t_logits, t_hidden_states = t_output
# output shape (num_blanks, voc_size)
# mask = attention_mask.unsqueeze(-1).expand_as(s_logits) # (bs, seq_length, voc_size)
# s_logits_slct = torch.masked_select(s_logits, mask) # (bs * seq_length * voc_size) modulo the 1s in mask
# s_logits_slct = s_logits_slct.view(-1, s_logits.size(-1)) # (bs * seq_length, voc_size) modulo the 1s in mask
# t_logits_slct = torch.masked_select(t_logits, mask) # (bs * seq_length * voc_size) modulo the 1s in mask
# t_logits_slct = t_logits_slct.view(-1, s_logits.size(-1)) # (bs * seq_length, voc_size) modulo the 1s in mask
s_logits_slct = s_logits
t_logits_slct = t_logits
assert t_logits_slct.size() == s_logits_slct.size()
loss_ce = (
self.ce_loss_fct(
F.log_softmax(s_logits_slct / self.temperature, dim=-1),
F.softmax(t_logits_slct / self.temperature, dim=-1),
)
* (self.temperature) ** 2
)
loss = self.alpha_ce * loss_ce
if self.alpha_mse > 0.0:
loss_mse = self.mse_loss_fct(s_logits_slct, t_logits_slct) / s_logits_slct.size(
0
) # Reproducing batchmean reduction
loss += self.alpha_mse * loss_mse
if self.alpha_cos > 0.0:
s_hidden_states = s_hidden_states[-1] # (bs, seq_length, dim)
t_hidden_states = t_hidden_states[-1] # (bs, seq_length, dim)
# mask = attention_mask.unsqueeze(-1).expand_as(s_hidden_states) # (bs, seq_length, dim)
# assert s_hidden_states.size() == t_hidden_states.size()
# dim = s_hidden_states.size(-1)
# s_hidden_states_slct = torch.masked_select(s_hidden_states, mask) # (bs * seq_length * dim)
# s_hidden_states_slct = s_hidden_states_slct.view(-1, dim) # (bs * seq_length, dim)
# t_hidden_states_slct = torch.masked_select(t_hidden_states, mask) # (bs * seq_length * dim)
# t_hidden_states_slct = t_hidden_states_slct.view(-1, dim) # (bs * seq_length, dim)
s_hidden_states_slct = s_hidden_states.reshape(1,-1)
t_hidden_states_slct = t_hidden_states.reshape(1,-1)
target = torch.ones(s_hidden_states_slct.shape).to(s_hidden_states_slct.device) # (bs * seq_length,)
loss_cos = self.cosine_loss_fct(s_hidden_states_slct, t_hidden_states_slct, target)
loss += self.alpha_cos * loss_cos
self.total_loss_epoch += loss.item()
self.last_loss = loss.item()
self.last_loss_ce = loss_ce.item()
if self.alpha_mse > 0.0:
self.last_loss_mse = loss_mse.item()
if self.alpha_cos > 0.0:
self.last_loss_cos = loss_cos.item()
self.optimize(loss)
self.n_sequences_epoch += input_ids.size(0)
def optimize(self, loss):
"""
Normalization on the loss (gradient accumulation or distributed training), followed by
backward pass on the loss, possibly followed by a parameter update (depending on the gradient accumulation).
Also update the metrics for tensorboard.
"""
# Check for NaN
if (loss != loss).data.any():
logger.error("NaN detected")
exit()
if self.multi_gpu:
loss = loss.mean()
if self.params.gradient_accumulation_steps > 1:
loss = loss / self.params.gradient_accumulation_steps
if self.fp16:
from apex import amp
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
self.iter()
if self.n_iter % self.params.gradient_accumulation_steps == 0:
if self.fp16:
torch.nn.utils.clip_grad_norm_(amp.master_params(self.optimizer), self.params.max_grad_norm)
else:
torch.nn.utils.clip_grad_norm_(self.student.parameters(), self.params.max_grad_norm)
self.optimizer.step()
self.optimizer.zero_grad()
self.scheduler.step()
def iter(self):
"""
Update global counts, write to tensorboard and save checkpoint.
"""
self.n_iter += 1
self.n_total_iter += 1
if self.n_total_iter % self.params.log_interval == 0:
self.log_tensorboard()
self.last_log = time.time()
if self.n_total_iter % self.params.checkpoint_interval == 0:
self.save_checkpoint()
logger.info("Perform evaluation at step: %d" % (self.n_total_iter))
try:
evaluate_file = evaluate(self.params, self.val_dataset, self.student, self.tokenizer,
self.dump_path)
with open(evaluate_file, 'r') as f:
res = json.load(f)
best_score = max(best_score, res['CIDEr'])
res['epoch'] = epoch
res['global_step'] = step
res['best_CIDEr'] = best_score
self.eval_log.append(res)
with open(self.dump_path + '/eval_logs.json', 'w') as f:
json.dump(eval_log, f)
except:
print("An exception was made in the evaluation process. ")
def log_tensorboard(self):
"""
Log into tensorboard. Only by the master process.
"""
# if not self.is_master:
# return
for param_name, param in self.student.named_parameters():
self.tensorboard.add_scalar(
tag="parameter_mean/" + param_name, scalar_value=param.data.mean(), global_step=self.n_total_iter
)
self.tensorboard.add_scalar(
tag="parameter_std/" + param_name, scalar_value=param.data.std(), global_step=self.n_total_iter
)
if param.grad is None:
continue
self.tensorboard.add_scalar(
tag="grad_mean/" + param_name, scalar_value=param.grad.data.mean(), global_step=self.n_total_iter
)
self.tensorboard.add_scalar(
tag="grad_std/" + param_name, scalar_value=param.grad.data.std(), global_step=self.n_total_iter
)
self.tensorboard.add_scalar(
tag="losses/cum_avg_loss_epoch",
scalar_value=self.total_loss_epoch / self.n_iter,
global_step=self.n_total_iter,
)
self.tensorboard.add_scalar(tag="losses/loss", scalar_value=self.last_loss, global_step=self.n_total_iter)
self.tensorboard.add_scalar(
tag="losses/loss_ce", scalar_value=self.last_loss_ce, global_step=self.n_total_iter
)
if self.alpha_mse > 0.0:
self.tensorboard.add_scalar(
tag="losses/loss_mse", scalar_value=self.last_loss_mse, global_step=self.n_total_iter
)
if self.alpha_cos > 0.0:
self.tensorboard.add_scalar(
tag="losses/loss_cos", scalar_value=self.last_loss_cos, global_step=self.n_total_iter
)
self.tensorboard.add_scalar(
tag="learning_rate/lr", scalar_value=self.scheduler.get_lr()[0], global_step=self.n_total_iter
)
self.tensorboard.add_scalar(
tag="global/memory_usage",
scalar_value=psutil.virtual_memory()._asdict()["used"] / 1_000_000,
global_step=self.n_total_iter,
)
self.tensorboard.add_scalar(
tag="global/speed", scalar_value=time.time() - self.last_log, global_step=self.n_total_iter
)
def end_epoch(self):
"""
Finally arrived at the end of epoch (full pass on dataset).
Do some tensorboard logging and checkpoint saving.
"""
logger.info(f"{self.n_sequences_epoch} sequences have been trained during this epoch.")
self.save_checkpoint(checkpoint_name=f"model_epoch_{self.epoch}.pth")
self.tensorboard.add_scalar(
tag="epoch/loss", scalar_value=self.total_loss_epoch / self.n_iter, global_step=self.epoch
)
self.epoch += 1
self.n_sequences_epoch = 0
self.n_iter = 0
self.total_loss_epoch = 0
def save_checkpoint(self, checkpoint_name: str = "checkpoint.pth"):
"""
Save the current state. Only by the master process.
"""
# if not self.is_master:
# return
mdl_to_save = self.student.module if hasattr(self.student, "module") else self.student
mdl_to_save.config.save_pretrained(self.dump_path)
state_dict = mdl_to_save.state_dict()
torch.save(state_dict, os.path.join(self.dump_path, checkpoint_name))
| StarcoderdataPython |
4826614 | '''
Module to perform linear systems calculations
Uses polynomials form numpy:
http://docs.scipy.org/doc/numpy/reference/routines.polynomials.package.html
http://docs.scipy.org/doc/numpy/reference/routines.polynomials.classes.html
History:
12/04/2016 : First version
16/04/2016 : Add of linear frequency response. Some plot corrections.
19/04/2016 : Add of 3D s-plane graphs
29/03/2017 : Add of help system
12/03/2018 : Added Python 2.7 and 3.x compatibility
Improvement of plotting on Colaboratory
Added version string
Help comments are now Python help compatible
15/03/2018 : Add of substraction overload operator
Add autorange for the frequencies of bode
16/03/2018 : Corrections on plotSplane
Added tResponse member function
22/03/2018 : Added addBodeFromComplex and drawBodeFromComplex
'''
# Python 2.7 compatibility
from __future__ import print_function
from __future__ import division
try:
input = raw_input
except NameError:
pass
version = '22/03/2018'
"""
@root
This is the main page of the Linear module help
List of topics:
linblk : Linear block class
plot : Plot functions
util : Utility functions
You can also input the name of a particular command
@plot
Plot functions topics:
Linear frequency:
showFreqMag
showFreqComplex
Log frequency:
showBodeMag
showBodePhase
drawBodePlot
addBodePlot
Pole/Zero:
addPoleZeroPlot
drawPoleZeroPlot
showPlot
@util
Utility functions topics:
frange : Generates log range
f2w : Hz to rad/s
w2f : rad/s to Hz
dB : Linear to dB
damping : Damping from poles
q : Q from poles
poleZeroPolar : Create pole or zero pair
"""
import numpy as np # Import numpy for numeric calculations
import pylab as pl # Import pylab
import matplotlib.pyplot as plt
from matplotlib import cm # Colormaps
from mpl_toolkits.mplot3d import Axes3D # For 3D graphs
from numpy.polynomial import polynomial as P # Polinomial functions
# External files
HELP_FILE = "Linear_Help.dat"
# Define normal mode outside colaboratory
colaboratory = False
# Exception code
class LinearEx(Exception):
def __init__(self, msg=""):
print('** ' + msg)
print("\n")
def __str__(self):
return repr(self.code)
#################### HELP CODE ###########################
def help(topic="root"):
"""
Gives help information
Parameters:
topic : Text to give information about
Defaults to root
Exits with a meesage if the help file is not found
"""
while (True):
print()
ftopic = "@"+topic
topic_found = False
try:
with open(HELP_FILE, 'r') as hfile:
for line in hfile:
if line.startswith("#"):
continue
if not topic_found:
if line.startswith("@#"):
print( "'" + topic + "' topic not found")
break
elif line.upper().startswith(ftopic.upper()):
topic_found = True
else:
if line.startswith("@"):
break
print(line[0:-1])
except:
print('Help file ',HELP_FILE,' is not available')
return
print()
print("root topic goes to main page")
print("Just hit return to exit help")
print()
topic = input("Help topic : ")
if topic == "":
print()
return
##################### FREQUENCY HELPER FUNCTIONS #############################
def frange(start,end=0,ndec=0,ppd=20):
"""
@frange
frange(start,end,ndec,ppd)
Generates a logarithmic range
Required parameters:
start : start value
end : end value
ndec : number of decades
ppd : points per decade (defaults to 20)
Either end or ndec must be provided
Returns a vector with the frequencies
Examples
>> f = frange(fstart,fend) # Range with default 20 ppd
>> f = frange(fstary,fend,ppd=10) # Range with 10 ppd
>> f = frange(fstart,ndec=4) # 4 decades from fstart with default 20 ppd
>> f = frange(fstrat,ndec=4,ppd=10) # 4 decades with custom ppd
"""
stlog = np.log10(start)
# We don't provide end
if end == 0:
if ndec == 0:
raise LinearEx('Need to provide end or decades')
return 10**np.arange(stlog,stlog+ndec,1.0/ppd)
# We provide end
endlog = np.log10(end)
return 10**np.arange(stlog,endlog,1.0/ppd)
def f2w(f):
"""
@f2w
f2w(f)
Converts frequency from Hz to rad/s
Returns frequency in rad/s
"""
return f*2*np.pi
def w2f(w):
""""
@w2f
w2f(w)
Converts frequency from rad/s to Hz
Returns frequency in Hz
"""
return w/(2*np.pi)
# COLABORATORY FLAG FOR PLOTTING #####################################################
def setColaboratory(flag=True):
"""
@setColaboratory
setColaboratory(flag=True)
Indicates that we are in Colaboartory
Don't return anything
"""
global colaboratory
colaboratory = flag
# Internal plot functions ############################################################
def _plotStart():
""""
_plotStart
(Internal use function)
Starts a new plot
Returns:
fig : Figure object
"""
if colaboratory:
fig=plt.figure()
return fig
# Outside colaboratory
fig=plt.figure(facecolor="white") # White border
return fig
def _subplotStart(fig,n,title="",xt="",yt="",grid=True):
"""
_subplotStart
(Internal use function)
Starts a new subplot
Paramenters:
fig : Figure to add the subplot
title : Title of the subplot (deafults to none)
xt : x label of the subplot (defaults to none)
yt : y label of the subplot (defaults to none)
grid : Determines if there is grid (defaults to True)
Returns:
ax : Axes object
"""
# If we are inside colaboratory
if colaboratory:
ax = fig.add_subplot(n)
ax.set_facecolor("white")
ax.set_title(title)
ax.set_xlabel(xt)
ax.set_ylabel(yt)
if (grid):
plt.grid(True,color="lightgrey",linestyle='--')
return ax
# Outside colaboratory
ax = fig.add_subplot(n)
ax.set_title(title)
ax.set_xlabel(xt)
ax.set_ylabel(yt)
if grid:
pl.grid()
return ax
def _subplotEnd(ax,labels=[],location='best'):
"""
_subplotEnd
(Internal use function)
Ends a subplot
Parameters:
ax : Subplot axes
labels : List of label names (Default to no labels)
location : Location for labels (Default to 'best')
Returns nothing
"""
if colaboratory:
if not labels == []:
pl.legend(loc=location)
xmin, xmax = plt.xlim()
ymin, ymax = plt.ylim()
ax.axvline(x=xmin,linewidth=2, color='black')
ax.axvline(x=xmax,linewidth=2, color='black')
ax.axhline(y=ymin,linewidth=2, color='black')
ax.axhline(y=ymax,linewidth=2, color='black')
# Outside colaboratory
if not labels == []:
pl.legend(loc=location)
def _plotEnd():
"""
_plotEnd
Ends a previously started plot
Takes no parameters
Returns nothing
"""
if colaboratory:
plt.show()
return
# Outside colaboratory
pl.show()
pl.close()
################## LINEAR FREQUENCY PLOT HELPER FUNCTIONS ##################
def showFreqMag(f,mag,title='Magnitude Frequency Plot',ylabel='Magnitude'):
"""
@showFreqMag
showFreqMag(f,mag,title,ylabel)
Linear frequency magnitude plot
Required parameters:
f : Frequency vector (Hz)
mag : Magnitude vector (in linear units)
Optional parameters:
title : Plot title
ylabel : Y axis label
Returns nothing
"""
fig=_plotStart()
ax = _subplotStart(fig,111,title,'Frequency (Hz)',ylabel)
pl.semilogx(f, mag)
_subplotEnd(ax)
_plotEnd()
def showFreqComplex(f,vector,title='Magnitude/Phase Frequency Plot'):
"""
@showFreqComplex
showFreqComplex(f,vector,title)
Linear frequency magnitude and phase plot
Required parameters:
f : Frequency vector (Hz)
vector : Complex vector
Optional parameters:
title : Plot title
Returns nothing
"""
fig=_plotStart()
ax = _subplotStart(fig,211,title,'','Magnitude')
mag = np.absolute(vector)
pl.semilogx(f, mag)
_subplotEnd(ax)
ax = _subplotStart(fig,212,'','Frequency (Hz)','Phase')
phase = np.angle(vector,deg=True)
pl.semilogx(f, phase)
_subplotEnd(ax)
_plotEnd()
######################## BODE HELPER FUNCTIONS ############################
def dB(gain):
"""
@dB
dB(gain)
Converts linear gain in dB
Returns value in dB
"""
return 20*np.log10(gain)
def showBodeMag(f,mag,title='Magnitude Bode Plot'):
"""
@showBodeMag
Show Bode magnitude plot
Required parameters:
f : Frequency vector (Hz)
mag : Magnitude vector (dB)
Optional parameter:
title : Plot title
Returns nothing
"""
fig=_plotStart()
ax=_subplotStart(fig,111,title,'Frequency (Hz)','Magnitude (dB)')
pl.semilogx(f, mag)
_subplotEnd(ax)
_plotEnd()
def showBodePhase(f,phase,title='Phase Bode Plot'):
"""
@showBodePhase
showBodePhase(f,phase,title)
Show Bode phase plot
Required parameters:
f : Frequency vector (Hz)
phase : Phase vector (deg)
Optional parameter:
title : Plot title
Returns nothing
"""
fig=_plotStart()
ax=_subplotStart(fig,111,title,'Frequency (Hz)','Phase (deg)')
pl.semilogx(f, phase)
_subplotEnd(ax)
_plotEnd()
# Information about bodes
bodeLabels = []
bodeFrequencies = []
bodeMagnitudes = []
bodePhases = []
def addBodePlot(f,mag,phase,label=''):
"""
@addBodePlot
addBodePlot(f,mag,phase,label)
Adds a new bode plot
Useful to show different Bode curves together
Required parameters:
f : Frequency vector (Hz)
mag : Magnitude vector(dB)
phase : Phase vector (deg)
Optional parameters:
label : Label for the curve (Defaults to no label)
Returns nothing
See also showBodePlot
"""
global bodeFrequencies,bodeMagnitudes,bodePhases,bodeLabels
bodeFrequencies.append(f)
bodeMagnitudes.append(mag)
bodePhases.append(phase)
bodeLabels.append(label)
def addBodeFromComplex(f,vector,label=''):
"""
@addBodeFromComplex
addBodeFromComplex(f,vector,label)
Adds a new bode plot
Useful to show different Bode curves together
Required parameters:
f : Frequency vector (Hz)
v : Complex vector
Optional parameters:
label : Label for the curve (Defaults to no label)
Returns nothing
See also showBodePlot
"""
addBodePlot(f,dB(np.abs(vector)),np.angle(vector,deg=True),label)
def showBodePlot(title='Bode Plot',location='best'):
""""
@showPlot
showPlot()
Shows a multigraph plot
Optional parameters:
title : Title for the plot
location : Location for the labels (Defaults to 'best')
Returns nothing
"""
global bodeFrequencies,bodeMagnitudes,bodePhases,bodeLabels
fig=_plotStart()
ax = _subplotStart(fig,211,title,'','Magnitude (dB)')
for element in zip(bodeFrequencies,bodeMagnitudes,bodeLabels):
if len(bodeFrequencies) == 1:
pl.semilogx(element[0],element[1],label='Nada')
else:
pl.semilogx(element[0],element[1],label=element[2])
# Legend only in phase plot
#if len(bodeFrequencies) > 1:
# pl.legend(loc=location)
_subplotEnd(ax)
ax = _subplotStart(fig,212,'','Frequency (Hz)','Phase (deg)')
for element in zip(bodeFrequencies,bodePhases,bodeLabels):
if len(bodeFrequencies) == 1:
pl.semilogx(element[0],element[1],label='Nada')
else:
pl.semilogx(element[0],element[1],label=element[2])
if len(bodeFrequencies) > 1:
pl.legend(loc=location)
_subplotEnd(ax)
_plotEnd()
# Reset bode plots
bodeFrequencies = []
bodeLabels = []
bodeMagnitudes = []
bodePhases = []
def drawBodePlot(f,mag,phase,title='Bode Plot'):
"""
@drawBodePlot
drawBodePlot(f,mag,phase,title)
Draws a bode plot
Required parameters:
f : Frequency vector (Hz)
mag : Magnitude vector(dB)
phase : Phase vector (deg)
Optional parameters:
title : Plot title
Returns nothing
"""
addBodePlot(f,mag,phase)
showBodePlot(title)
def drawBodeFromComplex(f,v,title='Bode Plot'):
"""
@drawBodeFromComplex
drawBodeFromComplex(f,v,title)
Draws a bode plot
Required parameters:
f : Frequency vector (Hz)
v : Complex vector
Optional parameters:
title : Plot title
Returns nothing
"""
addBodeFromComplex(f,v)
showBodePlot(title)
#################### S PLOT HELPER FUNCTIONS ######################
# Global variables
pzPlotPoles = []
pzPlotZeros = []
pzPlotLabels = []
pzPlotColors = []
def addPoleZeroPlot(poles=[],zeros=[],label=None,color='blue'):
"""
@addPoleZeroPlot
addPoleZeroPlot(poles,zeros,title,color)
Adds poles to the current plot
Parameters:
poles : List of poles
zeros : List of zeros
label : Label (optional)
color : Color of symbols (defaults to 'blue')
Returns nothing
See also showPoleZeroPlot
"""
global pzPlotPoles,pzPlotZeros,pzPlotLabels,pzPlotColors
pzPlotPoles.append(poles)
pzPlotZeros.append(zeros)
pzPlotLabels.append(label)
pzPlotColors.append(color)
def showPoleZeroPlot(title='Pole(x) / Zero(o) plot',location='best'):
"""
@showPoleZeroPlot
showPoleZeroPlot(title,location)
Draws a pole-zero plot after calls to addPoleZeroPlot
Optional parameters:
title : Title for the plot
location : Location for the legend
"""
global pzPlotPoles,pzPlotZeros,pzPlotLabels,pzPlotColors
labelBox = False
fig=_plotStart()
ax=_subplotStart(fig,111,title,'Real axis','Imaginary axis')
for poles,zeros,label,color in zip(pzPlotPoles
,pzPlotZeros,pzPlotLabels,pzPlotColors):
showLabel = (label != None)
if len(poles):
re = np.real(poles)
im = np.imag(poles)
if showLabel:
pl.scatter(re,im,marker='x',label=label,color=color)
labelBox=True
showLabel = False
else:
pl.scatter(re,im,marker='x',color=color)
if len(zeros):
re = np.real(zeros)
im = np.imag(zeros)
if showLabel:
pl.scatter(re,im,marker='x',label=label,color=color)
labelBox=True
else:
pl.scatter(re,im,marker='o',color=color)
# Zero lines
ax.axvline(x=0,linewidth=1, color='black', linestyle='--')
ax.axhline(y=0,linewidth=1, color='black', linestyle='--')
if labelBox == True:
pl.legend(loc=location)
_subplotEnd(ax)
_plotEnd()
# Reset lists
pzPlotPoles = []
pzPlotZeros = []
pzPlotLabels = []
pzPlotColors = []
def drawPoleZeroPlot(poles=[],zeros=[]
,title='Pole(x) & Zero(o) plot'
,color='blue'):
"""
@drawPoleZeroPlot
drawPoleZeroPlot(poles,zeros,title,color)
Draw a poles-zero plot
Parameters:
poles : List of poles
zeros : List of zeros
title : Graph title (optional)
color : Color of symbols (optional)
Returns nothing
"""
addPoleZeroPlot(poles,zeros,color=color)
showPoleZeroPlot()
def damping(pole):
"""
@damping
damping(pole)
Returns the damping associated to a single pole
The results make no sense for real poles
0 : Undamped (Oscillator)
<1 : Underdamped (Decaying oscillations)
1 : Critically damped or Overdamped (No oscillations)
"""
return -np.real(pole)/np.absolute(pole)
def q(pole):
"""
@q
q(pole)
Returns the Q factor associated to a single pole
The result make no sense for real poles
"""
damp = damping(pole)
return 1.0/(2.0*damp)
######################### LINBLK CLASS ############################
"""
@linblk
class linblk
Linear block class
A new object can be created with:
>> l1 = linblk() # H(s) = 1
>> l2 = linblk([1],[1,1/p1]) # H(s) = 1 / ( 1 + s/p1 )
Or you can also use linFromPZ or lin1
Additional topics:
linFromPZ, lin1, operators, methods
@operators
Operators available on the linblk class:
str : Shows numerator/denominator
* : Cascade of two systems
/ : Cascade with second system pole <-> zero
+ : System output addition for same input
- : Negation operator
- : System substraction
@methods
Methods available on the linblk class:
They are seachable as help topics
nf : Negative feedback nf()
pf : Positive feedback pf
eval : Evaluation in s plane
weval : Evaluation at jw
bode : Pode plot
freqR : Frequency response
showBode : Bode plot
addBode : Add Bode plot
poles : List of poles
zeros : List of zeros
gain : System gain
addPZplot : Add PZ plot
showPZplot : PZ plot
printPZ : Print PZ list
clean : PZ cancelation
pzRange : Range for all PZ
plotSplane : Linear 3D Magnitude "s" plot
bode3Dmag : Log 3D Magnitude "s" plot
bode3Dphase : Log 3D Phase "s" plot
'''
'''
@nf
L1.nf(L2)
L2 gives negative feedback on L1
Retuns composite system
@pf
L1.nf(L2)
L2 gives positive feedback on L1
Retuns composite system
@eval
L1.eval(x)
Evaluate the system on a point of the s plane
x : Complex value
Returns a complex value
@weval
L1.weval(w)
Evaluate the system on a point on the imaginary axis
w : Value on the j axis (Real value)
Returns a complex value
@bode
L1.bode(f)
Generates the bode plot vector results
f : Frequency vector
Returns:
mag : Magnitude vector (dB)
phase : Phase vector (deg)
@freqR
L1.freqR(f):
Generates the frequency response as a vector
f : Frequency vector
Returns frecuency response (complex)
@showBode
L1.showBode(f,title):
Shows the bode plot of the system
f : Frequency vector
title : Plot title (optional)
Returns nothing
@addBode
L1.addBode(f,title,label)
Add the bode plot to the current image
f : Frequency vector
title : Plot title (optional)
label : Plot label (optional)
Use showPlot() to see the final image
Returns noting
@poles
L1.poles()
Gives the list of poles of the system
@zeros
L1.zeros()
Gives the list of zeros of the system
@gain
gain()
Gives the gain of the system
We define gain as the quotient of the first coef (in increase order)
of num and den that is not zero
@addPZplot
L1.addPZplot(title,color)
Add the pole-zero plot to the current image
title : Plot title (optional)
color : Color used (optional)
Use showPlot() to see the final image
Returns nothing
@showPZplot
L1.showPZplot(title,color):
Show he pole-zero plot of the system
title : Plot title (optional)
color : Color used (optional)
Returns nothing
@printPZ
L1.printPZ()
Show poles and zeros on screen
Returns nothing
@clean
L1.clean(ratio)
Eliminates poles and zeros that cancel each other
A pole and a zero are considered equal if their distance
is lower than 1/ratio its magnitude
ratio : Ratio to cancel PZ (default = 1000)
Returns a new object
@pzRange
L1.pzRange()
Returns in a tuple the range in the complex domain
that includes all the poles and zeros
@plotSplane
L1.plotSplane(zmax)
Plots the magnitude of the evaluation of the
system inside the s plane in dB(magnitude)
zmax : Maximum in Z axis (dB) (Optional)
Returns nothing
@bode3Dmag
L1.bode3Dmag(sfmax,zmax)
Plots the magnitude of the evaluation of the
system inside the s plane in dB(magnitude)
The plot uses log10 of frequency in the axes
fmax : Maximum frequency (optional)
zmax : Maximum in Z axis (dB) (optional)
Returns nothing
@bode3Dphase
L1.bode3Dphase(fmax)
Plots the phase of the evaluation of the
system inside the s plane in dB(magnitude)
The plot uses log10 of frequency in the axes
fmax : Maximum frequency
Returns nothing
"""
class linblk():
def __init__(self,num=[1.0],den=[1.0]):
"""
linblk Class constructor
A new object can be created with:
>> l1 = linblk() # H(s) = 1 block
>> l2 = linblk([1],[1,1/p1]) # H(s) = 1 / ( 1 + s/p1 )
"""
self.num = P.Polynomial(num)
self.den = P.Polynomial(den)
def __str__(self):
"""
Converts a linblk object to string
Shows the numerator and denominator
"""
st = str(self.num.coef) + ' / ' + str(self.den.coef)
return st
def __mul__(self,other):
"""
Multiplication operator (*)
Returns a cascade of two systems
"""
obj = linblk()
obj.num = self.num * other.num
obj.den = self.den * other.den
return obj
def __div__(self,other):
"""
Division operator (//)
Returns a cascade of the first system with
the second one changing poles to zeros
"""
obj = linblk()
obj.num = self.num * other.den
obj.den = self.den * other.num
return obj
def __truediv__(self,other):
"""
True Division operator (/)
Returns a cascade of the first system with
the second one changing poles to zeros
"""
obj = linblk()
obj.num = self.num * other.den
obj.den = self.den * other.num
return obj
def __add__(self,other):
"""
Addition operator (+)
Returns a system that whose output is the sum of
two systems with the same input
"""
obj = linblk()
obj.num = (self.num * other.den) + (self.den*other.num)
obj.den = self.den * other.den
return obj
def __sub__(self,other):
"""
Substraction operator (+)
Returns a system that whose output is the substraction of
two systems with the same input
"""
obj = linblk()
obj.num = (self.num * other.den) - (self.den*other.num)
obj.den = self.den * other.den
return obj
def __neg__(self):
"""
Negation operator (-)
Returns a system with sign change
"""
obj = linblk()
obj.num = -self.num
obj.den = self.den
return obj
def nf(self,other):
"""
Negative feedback
Use other system to give negative feedback
"""
obj = linblk()
obj.num = self.num * other.den
obj.den = (self.den * other.den) + (self.num * other.num)
return obj
def pf(self,other):
"""
Positive feedback
Use other system to give positive feedback
"""
obj = linblk()
obj.num = self.num * other.den
obj.den = (self.den * other.den) - (self.num * other.num)
return obj
def eval(self,x):
"""
Evaluate the system on a point of the s plane
x : Complex value
"""
y = self.num(x)/self.den(x)
return y
# Evaluation at jw
def weval(self,w):
"""
Evaluate the system on a point on the imaginary axis
w : Value on the j axis (Real value)
"""
x = w*1j
y = self.num(x)/self.den(x)
return y
def bode(self,f):
"""
Generates the bode plot vector results
f : Frequency vector
Returns:
mag : Magnitude vector (dB)
phase : Phase vector (deg)
"""
w = f2w(f)
res = self.weval(w)
mag = dB(np.absolute(res))
phase = np.angle(res,deg=True)
return mag, phase
def freqR(self,f):
"""
Generates the frequency response vector results
f : Frequency vector
Returns:
res : Freuency response (complex)
"""
w = f2w(f)
res = self.weval(w)
return res
def autoRange(self):
"""
Creates a frequency vector that includes all poles and zeros
Returns the frequency vector
"""
min,max = self.wRange()
min = (min/10)/(2*np.pi)
max = (max*10)/(2*np.pi)
fv = frange(min,max)
return fv
def showBode(self,f=None,title='Bode Plot'):
"""
Shows the bode plot of the system
f : Frequency vector
"""
if f is None: f=self.autoRange()
mag, phase = self.bode(f)
drawBodePlot(f,mag,phase,title)
def addBode(self,f=None,label=None):
"""
Add the bode plot to the current image
f : Frequency vector
Use showBodePlot() to see the final image
"""
if f is None: f=self.autoRange()
mag, phase = self.bode(f)
addBodePlot(f,mag,phase,label=label)
def poles(self):
"""
Get the list of poles of the system
"""
return self.den.roots()
def zeros(self):
"""
Get the list of zeros of the system
"""
return self.num.roots()
def gain(self):
"""
Get the gain of the system
We define gain as the quotient of the first coef (in increase order)
of num and den that is not zero
"""
for c in self.num.coef:
if c!= 0.0:
cnum = c
break
for c in self.den.coef:
if c!= 0.0:
cden = c
break
gain = cnum/cden
return gain
def addPZplot(self,label=None,color='blue'):
"""
Add the pole-zero plot to the current image
Use showPoleZeroPlot() to see the final image
label : Label for the set (optional)
color : Color for the set (Defaults to 'blue')
"""
poles = self.poles()
zeros = self.zeros()
addPoleZeroPlot(poles,zeros,label=label,color=color)
def showPZplot(self,title='',color='blue'):
"""
Add the pole-zero plot to the current image
title : Plot title (optional)
color : Simbol colors (Defaults to 'blue')
"""
self.addPZplot(color=color)
showPoleZeroPlot(title)
def printPZ(self):
"""
Show poles and zeros on screen
"""
poles = self.poles()
zeros = self.zeros()
gain = self.gain()
print('Poles : ' + str(poles))
print('Zeros : ' + str(zeros))
print('Gain : ' + str(gain))
def clean(self,ratio=1000.0):
"""
Eliminates poles and zeros that cancel each other
A pole and a zero are considered equal if their distance
is lower than 1/ratio its magnitude
ratio : Ratio to cancel PZ (default = 1000)
Return a new object
"""
gain = self.gain()
poles = self.poles()
zeros = self.zeros()
# Return if there are no poles or zeros
if len(poles)==0: return
if len(zeros)==0: return
outPoles=[] # Empty pole list
for pole in poles:
outZeros=[] # Empty zero list
found = False
for zero in zeros:
if not found:
distance = np.absolute(pole-zero)
threshold = ( np.absolute(pole) + np.absolute(zero) ) / (2.0*ratio)
if distance > threshold:
outZeros.append(zero)
else:
found = True
else:
outZeros.append(zero)
if not found:
outPoles.append(pole)
zeros = outZeros
poles = outPoles
s = linblk()
s.den=P.Polynomial(P.polyfromroots(poles))
s.num=P.Polynomial(P.polyfromroots(zeros))
# Add gain
# curr = s.num.coef[0]/s.den.coef[0]
curr = s.gain()
s.num = s.num * gain / curr
return s
def pzRange(self):
"""
Returns the range in the complex domain that includes
all the poles and zeros
"""
li = np.array(list(self.poles()[:]) + list(self.zeros()[:]))
ReMin = np.amin(np.real(li))
ReMax = np.amax(np.real(li))
ImMin = np.amin(np.imag(li))
ImMax = np.amax(np.imag(li))
return ReMin + ImMin*1j , ReMax + ImMax*1j
def wRange(self):
"""
Returns the angula frequency range that includes all poles and zeros
that are not zero
"""
li = np.array(list(self.poles()[:]) + list(self.zeros()[:]))
if len(li) == 0: return None
li = np.abs(li)
li = [x for x in li if x!=0.0]
return min(li),max(li)
def plotSplane(self,zmax=100.0):
"""
Plots the magnitude of the evaluation of the
system inside the s plane in dB(magnitude)
Optional parameter:
zmax : Maximum in Z axis (dB) (Defaults to 100 dB)
"""
min,max = self.pzRange()
fig = _plotStart()
ax = fig.gca(projection='3d')
X = np.linspace(2.0*np.real(min),0.0,100)
Y = np.linspace(2.0*np.imag(min),2.0*np.imag(max),100)
X, Y = np.meshgrid(X, Y)
Z = np.clip(dB(np.absolute(self.eval(X + 1j*Y))),0.0,zmax)
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
if colaboratory:# For colaboratory
ax.set_facecolor("white")
ax.xaxis.pane.set_edgecolor('black')
ax.yaxis.pane.set_edgecolor('black')
ax.zaxis.pane.set_edgecolor('black')
ax.xaxis.pane.fill = True
ax.yaxis.pane.fill = True
ax.zaxis.pane.fill = True
# Set pane colors
ax.xaxis.set_pane_color((0.8, 0.9, 0.9, 1.0))
ax.yaxis.set_pane_color((0.9, 0.8, 0.9, 1.0))
ax.zaxis.set_pane_color((0.9, 0.9, 0.8, 1.0))
# Improve ticks and axes legend
[t.set_va('center') for t in ax.get_yticklabels()]
[t.set_ha('left') for t in ax.get_yticklabels()]
[t.set_va('center') for t in ax.get_xticklabels()]
[t.set_ha('right') for t in ax.get_xticklabels()]
[t.set_va('center') for t in ax.get_zticklabels()]
[t.set_ha('left') for t in ax.get_zticklabels()]
ax.contour(X, Y, Z)
ax.view_init(30, 30)
ax.set_xlabel('Real')
ax.set_ylabel('Imaginary')
ax.set_zlabel('dB')
_subplotEnd(ax)
_plotEnd()
def tResponse(self,vt,ts=None,fs=None):
if fs == None:
if ts == None:
raise LinearEx('ts or fs must be provided')
else:
fs = 1/ts
# Convert to frequency domain
data = np.fft.fft(vt)
# Create frequency vector
ldata = int(len(data)/2)
wv = np.pi*fs*np.array(list(range(0,ldata)) + [x - ldata for x in range(0,ldata)])/ldata
# Calculate response
resp = self.weval(wv)
data = data * resp
# Return to time domain
result = np.real(np.fft.ifft(data))
return result
'''
The bode3Dmag and bode3Dphase are currently deprecated as they are useful
def bode3Dmag(self,fmax=None,zmax=100.0):
"""
Plots the magnitude of the evaluation of the
system inside the s plane in dB(magnitude)
The plot uses log10 of frequency in the axes
fmax : Maximum frequency
zmax : Maximum in Z axis (dB)
"""
if fmax is None:
min,max = self.pzRange()
fmax = np.max([np.absolute(np.real(min))
,np.absolute(np.real(max))
,np.absolute(np.imag(min))
,np.absolute(np.real(max))])
max = np.log10(2.0*fmax)
fig = plt.figure(facecolor="white") # White border
ax = fig.gca(projection='3d')
X = np.linspace(-max,0.0,100)
Y = np.linspace(-max,max,100)
X, Y = np.meshgrid(X, Y)
Z = np.clip(dB(np.absolute(self.eval(np.sign(X)*10.0**np.absolute(X)
+ 1j*np.sign(Y)*10.0**np.absolute(Y)))),0.0,zmax)
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
ax.contour(X, Y, Z)
ax.set_xlabel('Real (decade)')
ax.set_ylabel('Imaginary (decade)')
ax.set_zlabel('dB')
ax.view_init(30, 30)
plt.show()
def bode3Dphase(self,fmax=None):
"""
Plots the phase of the evaluation of the
system inside the s plane in dB(magnitude)
The plot uses log10 of frequency in the axes
fmax : Maximum frequency
zmax : Maximum in Z axis (dB)
"""
if fmax is None:
min,max = self.pzRange()
fmax = np.max([np.absolute(np.real(min))
,np.absolute(np.real(max))
,np.absolute(np.imag(min))
,np.absolute(np.real(max))])
max = np.log10(2.0*fmax)
fig = plt.figure(facecolor="white") # White border
ax = fig.gca(projection='3d')
X = np.linspace(-max,0.0,100)
Y = np.linspace(-max,max,100)
X, Y = np.meshgrid(X, Y)
Z = np.clip(np.angle(self.eval(np.sign(X)*10.0**np.absolute(X)
+ 1j*np.sign(Y)*10.0**np.absolute(Y)))*180.0/np.pi,-180.0,180.0)
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
ax.contour(X, Y, Z)
ax.set_xlabel('Real (decade)')
ax.set_ylabel('Imaginary (decade)')
ax.set_zlabel('Phase')
ax.view_init(30, 30)
plt.show()
'''
################# HELPER SYSTEM FUNCTIONS ########################
def linFromPZ(poles=[],zeros=[],gain=1.0,wgain=0,ingain=None):
"""
@linFromPZ
linFromPZ(poles,zeros,gain,ingain)
Creates a system from the list of poles and zeros
Parameters:
poles : List of poles
zeros : List of zeros
Gain can be defined as:
gain : Gain defined as the quotient of first num/den coef.
wgain : Frequency where gain is defined
igain : Gain defined at infinite freq. in high pass
Returns a linblk object
"""
# Create new block
s = linblk()
s.den=P.Polynomial(P.polyfromroots(poles))
s.num=P.Polynomial(P.polyfromroots(zeros))
# Add gain
if ingain == None:
#curr = s.gain()
curr=np.abs(s.eval(1j*wgain))
s.num = s.num * gain / curr
else:
curr = s.num.coef[-1] / s.den.coef[-1]
s.num = s.num * gain /curr
return s
def poleZeroPolar(mag,angle):
"""
@poleZeroPolar
poleZeroPolar(mag,angle)
Generates a list of two poles or zeros from
their magnitude and angle on the s plane
Required parameters:
mag : magnitude
angle : angle of one pole or zero (0 to 90)
Returns a list of two poles or zeros
"""
radians = angle * np.pi / 180.0
p1 = -mag*np.cos(radians) + 1j*mag*np.sin(radians)
p2 = -mag*np.cos(radians) - 1j*mag*np.sin(radians)
return [p1,p2]
####################### PREDEFINED SYSTEMS ###################
"""
@lin1
lin1
Identity system H(s)=1
"""
# Linear indentiy system H(s) = 1
lin1 = linblk()
| StarcoderdataPython |
3376925 | import json
from jsonschema import ValidationError, exceptions
from jsonschema.validators import Draft3Validator
from functools import wraps
from flask import _request_ctx_stack, request, jsonify
def _validate(schema, data):
reqv = Draft3Validator(schema)
errors = []
for e in reqv.iter_errors(data):
errors.append(dict(name=e.path[0], reason=e.validator))
return errors
def validate(method, schema):
def decorator(f):
@wraps(f)
def decorated_func(*args, **kws):
ctype = request.headers.get("Content-Type")
method_ = request.headers.get("X-HTTP-Method-Override", request.method)
if method_.lower() == method.lower() and "json" in ctype:
data = json.loads(request.data)
errors = _validate(schema, data)
if len(errors) > 0:
resp = jsonify(result="failure", reason="invalid json", errors=errors)
resp.status_code = 400
return resp
return f(*args, **kws)
return decorated_func
return decorator
| StarcoderdataPython |
1711377 | # Copyright (c) 2019, VMRaid Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import vmraid
from vmraid import _
from functools import wraps
from vmraid.utils import add_to_date, cint, get_link_to_form
from vmraid.modules.import_file import import_file_by_path
import os
from os.path import join
def cache_source(function):
@wraps(function)
def wrapper(*args, **kwargs):
if kwargs.get("chart_name"):
chart = vmraid.get_doc('Dashboard Chart', kwargs.get("chart_name"))
else:
chart = kwargs.get("chart")
no_cache = kwargs.get("no_cache")
if no_cache:
return function(chart = chart, no_cache = no_cache)
chart_name = vmraid.parse_json(chart).name
cache_key = "chart-data:{}".format(chart_name)
if int(kwargs.get("refresh") or 0):
results = generate_and_cache_results(kwargs, function, cache_key, chart)
else:
cached_results = vmraid.cache().get_value(cache_key)
if cached_results:
results = vmraid.parse_json(vmraid.safe_decode(cached_results))
else:
results = generate_and_cache_results(kwargs, function, cache_key, chart)
return results
return wrapper
def generate_and_cache_results(args, function, cache_key, chart):
try:
args = vmraid._dict(args)
results = function(
chart_name = args.chart_name,
filters = args.filters or None,
from_date = args.from_date or None,
to_date = args.to_date or None,
time_interval = args.time_interval or None,
timespan = args.timespan or None,
heatmap_year = args.heatmap_year or None
)
except TypeError as e:
if str(e) == "'NoneType' object is not iterable":
# Probably because of invalid link filter
#
# Note: Do not try to find the right way of doing this because
# it results in an inelegant & inefficient solution
# ref: https://github.com/vmraid/vmraid/pull/9403
vmraid.throw(_('Please check the filter values set for Dashboard Chart: {}').format(
get_link_to_form(chart.doctype, chart.name)), title=_('Invalid Filter Value'))
return
else:
raise
vmraid.db.set_value("Dashboard Chart", args.chart_name, "last_synced_on", vmraid.utils.now(), update_modified = False)
return results
def get_dashboards_with_link(docname, doctype):
dashboards = []
links = []
if doctype == 'Dashboard Chart':
links = vmraid.get_all('Dashboard Chart Link',
fields = ['parent'],
filters = {
'chart': docname
})
elif doctype == 'Number Card':
links = vmraid.get_all('Number Card Link',
fields = ['parent'],
filters = {
'card': docname
})
dashboards = [link.parent for link in links]
return dashboards
def sync_dashboards(app=None):
"""Import, overwrite fixtures from `[app]/fixtures`"""
if not cint(vmraid.db.get_single_value('System Settings', 'setup_complete')):
return
if app:
apps = [app]
else:
apps = vmraid.get_installed_apps()
for app_name in apps:
print("Updating Dashboard for {app}".format(app=app_name))
for module_name in vmraid.local.app_modules.get(app_name) or []:
vmraid.flags.in_import = True
make_records_in_module(app_name, module_name)
vmraid.flags.in_import = False
def make_records_in_module(app, module):
dashboards_path = vmraid.get_module_path(module, "{module}_dashboard".format(module=module))
charts_path = vmraid.get_module_path(module, "dashboard chart")
cards_path = vmraid.get_module_path(module, "number card")
paths = [dashboards_path, charts_path, cards_path]
for path in paths:
make_records(path)
def make_records(path, filters=None):
if os.path.isdir(path):
for fname in os.listdir(path):
if os.path.isdir(join(path, fname)):
if fname == '__pycache__':
continue
import_file_by_path("{path}/{fname}/{fname}.json".format(path=path, fname=fname))
| StarcoderdataPython |
164588 | from openpyxl import load_workbook
xlsx_file = "E:\\hello-git-sourcetree\\R_GO\\Python_RPA\\"
xlsx = load_workbook(xlsx_file+"result.xlsx", read_only =True)
sheet=xlsx.active
print(sheet['A25'].value)
print(sheet['B1'].value)
row = sheet['1']
for data in row:
print(data.value)
xlsx=load_workbook(xlsx_file+"result.xlsx")
sheet=xlsx.active
col = sheet['A']
'''
for data in col:
print(data.value)
'''
print('-'*10,'multi row_data call','-'*10)
rows = sheet['1:2']
for row in rows:
for rowdata in row:
print(rowdata.value)
print('시트 일부의 셀 데이터 읽기')
rows = sheet['A3:B5']
for row in rows:
for cel in row:
print(cel.value)
from openpyxl import Workbook
xlsx=Workbook()
sheet=xlsx.active
sheet['A1'] = 'my input data'
#xlsx.save('other.xlsx')
sheet.append(['A1-data','B1-data','C1-data'])
sheet.append(['A2-data','B2-data','C2-data'])
xlsx.save('other2.xlsx')
sheet = xlsx.create_sheet('new sheet')
sheet['A2'] = 'AIRIM'
xlsx.save('new_xlsx.xlsx')
| StarcoderdataPython |
3258185 | from human_services.organizations import models
from common.testhelpers.random_test_values import a_string, a_website_address, an_email_address
class OrganizationBuilder:
def __init__(self):
self.organization_id = a_string()
self.name = a_string()
self.description = a_string()
self.website = a_website_address()
self.email = an_email_address()
def with_id(self, organization_id):
self.organization_id = organization_id
return self
def with_name(self, name):
self.name = name
return self
def with_description(self, description):
self.description = description
return self
def with_website(self, website):
self.website = website
return self
def with_email(self, email):
self.email = email
return self
def build(self):
result = models.Organization()
result.id = self.organization_id
result.name = self.name
result.description = self.description
result.website = self.website
result.email = self.email
return result
def create(self):
result = self.build()
result.save()
return result
| StarcoderdataPython |
123995 | # -*- coding: utf-8 -*-
"""
Copyright [2009-2019] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import tempfile
from contextlib import contextmanager
import pytest
from rnacentral_pipeline.databases import data
from rnacentral_pipeline.databases.helpers import publications as pub
from rnacentral_pipeline.databases.genecards_suite import genecards as gene
from rnacentral_pipeline.databases.genecards_suite.core import lookup
@contextmanager
def known(handle):
with tempfile.NamedTemporaryFile() as tmp:
lookup.write(handle, os.environ["PGDATABASE"], gene.CONTEXT.urs_field, tmp)
tmp.seek(0)
yield tmp
@pytest.fixture(scope="module")
def simple_data():
with open("data/genecards/data.tsv", "r") as raw:
with known(raw) as indexed:
raw.seek(0)
entries = {}
for entry in gene.parse(raw, indexed):
assert entry.primary_id not in entries
entries[entry.primary_id] = entry
return entries
def test_can_parse_all_entries(simple_data):
assert len(simple_data) == 100
def test_can_create_unique_primary_ids(simple_data):
data = [d.primary_id for d in simple_data.values()]
assert len(data) == 100
def test_can_create_unique_accessions(simple_data):
data = [d.accession for d in simple_data.values()]
assert len(data) == 100
def test_can_create_correct_data(simple_data):
assert simple_data["GENECARDS:1A9N_Q-015:URS00001EE9F1_9606"] == data.Entry(
primary_id="GENECARDS:1A9N_Q-015:URS00001EE9F1_9606",
accession="GENECARDS:1A9N_Q-015:URS00001EE9F1_9606",
ncbi_tax_id=9606,
database="GENECARDS",
sequence="ATTGCAGTACCTCCAGGAACGGTGCAC",
regions=[],
rna_type="misc_RNA",
url="https://www.genecards.org/cgi-bin/carddisp.pl?gene=1A9N_Q-015",
seq_version=1,
gene="1A9N_Q-015",
description="Homo sapiens (human) miscellaneous RNA",
species="Homo sapiens",
common_name="human",
lineage=(
"Eukaryota; Metazoa; Chordata; Craniata; Vertebrata; "
"Euteleostomi; Mammalia; Eutheria; Euarchontoglires; "
"Primates; Haplorrhini; Catarrhini; Hominidae; Homo; "
"Homo sapiens"
),
references=[pub.reference(27322403)],
)
| StarcoderdataPython |
1768738 | <gh_stars>0
grau=int(input())
minuto=int(input())
segundo=int(input())
grausdecimais= grau+(minuto/60)+(segundo/3600)
print(f'graus = {grausdecimais:.4f}')
| StarcoderdataPython |
48898 | # Author : <NAME>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Sequence Ordering of Alignments
"""
import os
import numpy as np
import pandas as pd
from constants import (
folder,
alignment_file,
recipe_folder_name,
)
from utils import (
fetch_parsed_recipe,
fetch_action_ids,
)
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
class SequenceModel:
def test_sequence_model(self):
dish_list = os.listdir(folder)
dish_list = [dish for dish in dish_list if not dish.startswith(".")]
dish_list.sort()
correct_predictions = 0
num_actions = 0
for dish in dish_list:
data_folder = os.path.join(folder, dish) # dish folder
recipe_folder = os.path.join(data_folder, recipe_folder_name) # recipe folder
alignment_file_path = os.path.join(
data_folder, alignment_file
) # alignment file
# Gold Standard Alignments between all recipes for dish
alignments = pd.read_csv(
alignment_file_path, sep="\t", header=0, skiprows=0, encoding="utf-8"
)
# Group by Recipe pairs
group_alignments = alignments.groupby(["file1", "file2"])
dish_correct_predictions = 0
dish_num_actions = 0
for key in group_alignments.groups.keys():
#print('Recipe Pair: ')
#print(key)
recipe1_filename = os.path.join(recipe_folder, key[0] + ".conllu")
recipe2_filename = os.path.join(recipe_folder, key[1] + ".conllu")
parsed_recipe1 = fetch_parsed_recipe(recipe1_filename)
parsed_recipe2 = fetch_parsed_recipe(recipe2_filename)
action_ids1 = fetch_action_ids(parsed_recipe1)
#print('Actions in Recipe 1: ')
#print(action_ids1)
action_ids2 = fetch_action_ids(parsed_recipe2)
#print('Actions in Recipe 2: ')
#print(action_ids2)
if len(action_ids1) < len(action_ids2):
predictions = action_ids2[:len(action_ids1)]
else:
predictions = action_ids2
predictions.extend([0] * (len(action_ids1) - len(action_ids2)))
predictions = np.array(predictions)
#print('Predictions: ')
#print(predictions)
recipe_pair_alignment = group_alignments.get_group(key)
true_labels = list()
for i in action_ids1:
# True Action Id
action_line = recipe_pair_alignment.loc[
recipe_pair_alignment["token1"] == i
]
if not action_line.empty:
label = action_line["token2"].item()
true_labels.append(label)
else:
true_labels.append(0)
true_labels = np.array(true_labels)
#print('True Labels:')
#print(true_labels)
score = [predictions == true_labels]
dish_correct_predictions += np.sum(score)
dish_num_actions += len(action_ids1)
dish_accuracy = dish_correct_predictions * 100 / dish_num_actions
correct_predictions += dish_correct_predictions
num_actions += dish_num_actions
print("Accuracy of Dish {} : {:.2f}".format(dish, dish_accuracy))
model_accuracy = correct_predictions * 100 / num_actions
print("Model Accuracy: {:.2f}".format(model_accuracy))
| StarcoderdataPython |
97158 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class SyncFullSchemaTableColumn(Model):
"""Properties of the column in the table of database full schema.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar data_size: Data size of the column.
:vartype data_size: str
:ivar data_type: Data type of the column.
:vartype data_type: str
:ivar error_id: Error id of the column.
:vartype error_id: str
:ivar has_error: If there is error in the table.
:vartype has_error: bool
:ivar is_primary_key: If it is the primary key of the table.
:vartype is_primary_key: bool
:ivar name: Name of the column.
:vartype name: str
:ivar quoted_name: Quoted name of the column.
:vartype quoted_name: str
"""
_validation = {
'data_size': {'readonly': True},
'data_type': {'readonly': True},
'error_id': {'readonly': True},
'has_error': {'readonly': True},
'is_primary_key': {'readonly': True},
'name': {'readonly': True},
'quoted_name': {'readonly': True},
}
_attribute_map = {
'data_size': {'key': 'dataSize', 'type': 'str'},
'data_type': {'key': 'dataType', 'type': 'str'},
'error_id': {'key': 'errorId', 'type': 'str'},
'has_error': {'key': 'hasError', 'type': 'bool'},
'is_primary_key': {'key': 'isPrimaryKey', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'quoted_name': {'key': 'quotedName', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(SyncFullSchemaTableColumn, self).__init__(**kwargs)
self.data_size = None
self.data_type = None
self.error_id = None
self.has_error = None
self.is_primary_key = None
self.name = None
self.quoted_name = None
| StarcoderdataPython |
3234417 | from model import Model
from model import Player
import view as v
import controller as cont
def create_game(options = 'terminal'):
m = create_bare_game()
# Initial positions
pos = create_initial_positions()
m.set_placement(pos)
# Create view
view = create_view(options)
m.set_view(view)
# Create controller
controller = create_controller(options)
m.set_controller(controller)
# Create players
p1 = Player(view, controller, '1', lambda x: x > 0)
p2 = Player(view, controller, '2', lambda x: x < 0)
m.set_players([p1, p2])
return m
def create_controller(options = 'terminal'):
return cont.Terminal()
def create_view(options = 'terminal'):
return v.Terminal()
def create_bare_game():
m = Model()
return m
def create_initial_positions():
'''
Could be moved into Game class
'''
start_position_a = [
(6, 5),
(8, 3),
(13, 5),
(24, 2)
]
start_position_b = [(25-i, -val) for (i,val) in start_position_a]
return start_position_a + start_position_b
| StarcoderdataPython |
148936 | import requests
import json
import csv
import os
import LastRead
def api_call(payload):
auth = requests.post("https://api.mangadex.org/auth/login", json=payload)
token = auth.json()["token"]["session"]
bearer = {"Authorization": f"Bearer {token}"}
offset = 0
follow_list = []
initial = {"limit": 100}
initial_query = requests.get(
"https://api.mangadex.org/user/follows/manga", headers=bearer, params=initial
).json()
for i in range(0, initial_query["total"], 100):
offset += i
body = {"limit": 100, "offset": offset}
r = requests.get(
"https://api.mangadex.org/user/follows/manga", headers=bearer, params=body
).json()
follow_list.append(r)
return follow_list
def file_handler(follow_list):
try:
with open("follow_list.csv", "w", newline="", encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow(["Title", "id"])
for j in range(0, len(follow_list)):
for i in range(len(follow_list[j]["results"])):
writer.writerow(
[
follow_list[j]["results"][i]["data"]["attributes"]["title"][
"en"
],
follow_list[j]["results"][i]["data"]["id"],
]
)
except PermissionError:
print("Please close the csv file made before (if its open in an editor) when trying to make a new one.")
input("Press Enter to end")
user = input("Enter Your Username: ")
password = input("Enter Your Password: ")
payload = {"username": user, "password": password}
choice = input(
"Do you also want your last read chapters? (may take a while and a seperate list will be created)\nType Y/N?: ")
try:
file_handler(api_call(payload))
print(
f"Your follow list has been created at this directory:- {os.getcwd()}")
input("Press Enter to end")
if choice == 'Y':
LastRead.lastread(payload)
except:
print("Wrong username or password, please try again or process ended by user")
input("Press Enter to end")
| StarcoderdataPython |
137763 | <reponame>jiskra/openmv
# ADC Internal Channels Example
#
# This example shows how to read internal ADC channels.
import time, pyb
adc = pyb.ADCAll(12)
print("VREF = %.1fv VREF = %.1fv Temp = %d" % (adc.read_core_vref(), adc.read_core_vbat(), adc.read_core_temp()))
| StarcoderdataPython |
130269 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_91_room_functional_names(apps, schema_editor):
map = {
'Hibiscus': 'Breakout 3',
'South Pacific 2': 'Meeting Room #6',
'South Pacific 1': 'Terminal Room',
'Coral 1': 'Breakout 4',
'Coral 2': 'Breakout 5',
'Coral 5': 'Breakout 6',
'Coral 4': 'Breakout 7',
'Coral 3': 'Breakout 8',
'Great Lawn': 'Welcome Reception',
'Rainbow Suite': 'Not Used',
'Lehua Suite': 'Breakout 1',
'Kahili': 'Breakout 2',
'Rainbow Suite 1/2': 'Meeting Room #2 (IESG Meeting Room)',
'Village Green': 'Meet and Greet',
'South Pacific 3': 'Meeting Room #4 (IAOC/IAD Office)',
'Rainbow Suite 3': 'Meeting Room #7',
'Rainbow Suite 2/3': 'ISOC Dinner',
'South Pacific 3/4': 'ISOC AC Meeting',
'Iolani 6/7': 'Meeting Room #5 (NomCom Office)',
'Sea Pearl 1/2': 'Reception',
'Sea Pearl 2': 'Meeting Room #1 (IAB Meeting Room)',
'Coral Lounge': 'Registration Area and Breaks',
'Tiare Suite': 'Meeting Room #8 (RFC Office)',
}
Room = apps.get_model('meeting', 'Room')
for name,functional_name in map.items():
Room.objects.filter(meeting__number=91,name=name).update(functional_name=functional_name)
class Migration(migrations.Migration):
dependencies = [
('meeting', '0009_room_functional_name'),
]
operations = [
migrations.RunPython(add_91_room_functional_names),
]
| StarcoderdataPython |
3292664 | <reponame>RichMooreNR/newrelic-lambda-cli
# -*- coding: utf-8 -*-
import click
from newrelic_lambda_cli import utils
from newrelic_lambda_cli.cli import functions, integrations, layers, subscriptions
@click.group()
@click.option("--verbose", "-v", help="Increase verbosity", is_flag=True)
@click.pass_context
def cli(ctx, verbose):
ctx.ensure_object(dict)
ctx.obj["VERBOSE"] = verbose
def register_groups(group):
functions.register(group)
integrations.register(group)
layers.register(group)
subscriptions.register(group)
@utils.catch_boto_errors
def main():
register_groups(cli)
cli()
| StarcoderdataPython |
4812775 | <gh_stars>1-10
celsius = float(input("Digite a temperatura em Celsius: \n"))
farenheit = ((1.8 * celsius) + 32)
print("{}ºC correspondem a {:.1f}ºF.".format(celsius, farenheit))
| StarcoderdataPython |
1771482 | <reponame>tosinolawore/py_everything
import sphinx
import python_docs_theme
project = 'py_everything'
copyright = '2021, PyBash'
author = 'PyBash'
release = '2.0.0'
extensions = ['sphinx.ext.autodoc', 'python_docs_theme']
templates_path = ['_templates']
exclude_patterns = []
html_theme = 'python_docs_theme'
html_static_path = ['_static']
html_logo = '..\\..\\extra\\logo.png'
html_favicon = '..\\..\\extra\\logo.png'
| StarcoderdataPython |
1745531 | <filename>src/LeucipPy/__tests6.py
import WilliamsDivergenceMaker as wdm
import BioPythonMaker as bpm
import GeometryMaker as dfm
import HtmlReportMaker as hrm
import DsspMaker as dm
strucs = bpm.loadPdbStructures([],'Data/',extension='ent',prefix='pdb',log=2)
geo = dfm.GeometryMaker(strucs,log=2)
data = geo.calculateGeometry(['N:CA'])
dssp = dm.DsspMaker([],'Data/',extension='ent',prefix='pdb',log=2)
data = dssp.addDsspColumn(data)
print(data)
df = geo.calculateGeometry(['FE:{O}','FE:{O@2}'],log=2)
print(df)
#df = geo.calculateGeometry(['FE:{O,N,NE2}','FE:{O,N@2}'])
#print(df)
#df = geo.calculateGeometry(['FE:{O,N}+1','FE:{O,N,NE2@2}+1'])
#print(df)
#df = geo.calculateGeometry(['FE:{O,N,FE}+1','FE:{O,N,FE@2}+1'],log=0)
#print(df)
| StarcoderdataPython |
90420 | <filename>bage_utils/ssh_util.py
import traceback
import warnings
import paramiko
warnings.filterwarnings("ignore")
class SshUtil(object):
"""
- Connect remote by SSH and run specific command.
- See also `bage_util.SellUtil`
"""
def __init__(self, hostname, username=None, password=<PASSWORD>, port=22, log=None):
self.client = None
self.log = log
try:
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.WarningPolicy())
# self.client.load_system_host_keys()
# keys = paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
# key = keys[hostname]
# print('keys:', keys
# self.client.get_host_keys().add(hostname, 'ssh-rsa', key)
self.client.connect(hostname, port, username, password)
except Exception:
self.close()
if self.log:
self.log.error(traceback.format_exc())
else:
traceback.print_exc()
def close(self):
try:
if self.client:
self.client.close()
except Exception:
if self.log:
self.log.error(traceback.format_exc)
else:
traceback.print_exc()
def execute_n_print(self, command):
_stdin, stdout, _stderr = self.client.exec_command(command)
for line in stdout:
print(line.strip('\n'))
def execute(self, command):
_stdin, stdout, _stderr = self.client.exec_command(command)
return stdout
def transfer(self, from_site, to_site):
pass
if __name__ == '__main__':
import os
ssh = SshUtil('gpu4')
ssh.execute_n_print('ls -l')
| StarcoderdataPython |
163908 | <reponame>quantmind/lux<filename>lux/core/commands/clear_cache.py<gh_stars>10-100
from lux.core import LuxCommand, Setting
class Command(LuxCommand):
help = "Clear Cache"
option_list = (
Setting('prefix',
nargs='?',
desc=('Optional cache prefix. If omitted the default '
'application prefix is used (APP_NAME)')),
)
def run(self, options, **params):
cache = self.app.cache_server
result = cache.clear(options.prefix)
self.write('Clear %d keys' % result)
return result
| StarcoderdataPython |
4800365 | <gh_stars>1-10
#!/usr/bin/env python
# coding: utf-8
__author__ = 'whoami'
"""
@version: 1.0
@author: whoami
@license: Apache Licence 2.0
@contact: <EMAIL>
@site: http://www.itweet.cn
@software: PyCharm Community Edition
@file: cpu.py
@time: 2015-11-28 下午1:51
"""
import time
def round_percentage(number,ndigits):
return round(number*100,2)
def monitor(frist_invoke=2):
interval=frist_invoke
f = open('/proc/stat')
cpu_t1 = f.readline().split()
f.close()
time.sleep(interval)
f = open('/proc/stat')
cpu_t2 = f.readline().split()
f.close()
cpu_total_t1 = float(eval(('+'.join(cpu_t1)).split('cpu+')[1]))
cpu_idle_t1 = float(cpu_t1[4])
cpu_total_t2 = float(eval(('+'.join(cpu_t2)).split('cpu+')[1]))
cpu_idle_t2 = float(cpu_t2[4])
cpu_idle = cpu_idle_t2-cpu_idle_t1
cpu_total = cpu_total_t2-cpu_total_t1
cpu_percent = (cpu_total-cpu_idle)/cpu_total
cpu_total_tmp = cpu_total_t1+cpu_total_t2/2
cpu_user_tmp = (int(cpu_t1[1])+int(cpu_t2[1]))/2
cpu_nice_tmp = (int(cpu_t1[2])+int(cpu_t2[2]))/2
cpu_system_tmp = (int(cpu_t1[3])+int(cpu_t2[3]))/2
cpu_idle_tmp = (int(cpu_t1[4])+int(cpu_t2[4]))/2
cpu_iowait_tmp = (int(cpu_t1[5])+int(cpu_t2[5]))/2
cpu_irq_tmp = (int(cpu_t1[6])+int(cpu_t2[6]))/2
cpu_softirq_tmp = (int(cpu_t1[7])+int(cpu_t2[7]))/2
cpu_steal_tmp = (int(cpu_t1[8])+int(cpu_t2[8]))/2
cpu_guest_tmp = (int(cpu_t1[9])+int(cpu_t2[9]))/2
value_dic = {
'cpu_user': round_percentage(cpu_user_tmp/cpu_total_tmp,2),
'cpu_nice': round_percentage(cpu_nice_tmp/cpu_total_tmp,2),
'cpu_system': round_percentage(cpu_system_tmp/cpu_total_tmp,2),
'cpu_idle': round_percentage(cpu_idle_tmp/cpu_total_tmp,2),
'cpu_iowait': round_percentage(cpu_iowait_tmp/cpu_total_tmp,2),
'cpu_irq': round_percentage(cpu_irq_tmp/cpu_total_tmp,2),
'cpu_softirq': round_percentage(cpu_softirq_tmp/cpu_total_tmp,2),
'cpu_steal': round_percentage(cpu_steal_tmp/cpu_total_tmp,2),
'cpu_guest': round_percentage(cpu_guest_tmp/cpu_total_tmp,2),
'cpu_percent': round_percentage(cpu_percent,2),
}
return value_dic
if __name__ == '__main__':
print monitor() | StarcoderdataPython |
1668649 | <gh_stars>1-10
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
try:
response = HttpResponse(open(eis.get_filename(), 'rb').read(),
mimetype=eis.content_type.encode('ascii'))
except IOError:
eis.delete()
raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
| StarcoderdataPython |
1642586 | import socket
import time
import threading
UDP_IP = "192.168.0.120"
UDP_PORT = 7191
spray_off_msg = b"0-Off"
spray_on_msg = b"1-On"
lighting_msg = "L="
thread_run = True
def listen_for_udp(sock):
global thread_run
sock.connect((UDP_IP, UDP_PORT))
sock.settimeout(2)
while thread_run:
try:
data, addr = sock.recvfrom(1024)
print("received message:", data, addr)
except socket.timeout:
pass
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
listen_UDP = threading.Thread(target=listen_for_udp, args=(sock,))
listen_UDP.start()
for i in range(2):
if i % 2 == 0:
sock.sendto(spray_off_msg, (UDP_IP, UDP_PORT))
print("UDP {}:{} {}".format(UDP_IP, UDP_PORT, spray_off_msg))
else:
sock.sendto(spray_on_msg, (UDP_IP, UDP_PORT))
print("UDP {}:{} {}".format(UDP_IP, UDP_PORT, spray_on_msg))
light_lev_msg = bytes(lighting_msg + str(i*10), 'utf-8')
sock.sendto(light_lev_msg, (UDP_IP, UDP_PORT))
print("UDP {}:{} {}".format(UDP_IP, UDP_PORT, light_lev_msg))
time.sleep(5.0)
thread_run = False
time.sleep(5.0)
| StarcoderdataPython |
68472 | <reponame>nouranHnouh/FormusWorkshop-<gh_stars>0
import models
members_data=[models.Member("Nancy",20),
models.Member("Narmdha",27),
models.Member("Mark",33),
models.Member("George",40)]
post_data=[models.Post("python","python is an interpreted high-level programming language",members_data[0].id),
models.Post("Telecommunication network","is electronic system of switches and nodes, that aalows for data transfer and exchange between multiple users",members_data[3].id),
models.Post("Artificial intelligence","is intellegence demonstrated by machine such as self-driving cars ",members_data[0]),
models.Post("what is HTML?","HTML is standard markup language for creating web pages",members_data[1].id),
models.Post("Electrical Engineering","is the field that deals with study and application of electricity and electronics",members_data[2].id),
models.Post("circuit components","Resistors, Capacitors, and inductors are examples of circuit components",members_data[1]),
models.Post("circuit simulation","matlab can be used to simulate circuits",members_data[2].id)]
def test(member_store,post_store):
for member in members_data:
member_store.add(member)
for post in post_data:
post_store.add(post)
| StarcoderdataPython |
1651687 | <gh_stars>10-100
# -*- coding: utf-8 -*-
import os
import shutil
import sys
import time
import optparse
import lib.config as config
import lib.vcrparser as vcrparser
# Dynamics load of the workload manager library depending on 'config.mode' value set in lib/config.py
if config.mode == "LSF":
import lib.sys_LSF as manager
elif config.mode == "LOCAL":
import lib.sys_single as manager
else:
import lib.sys_OTHER as manager
##########################################################
## Parsing command line arguments
##########################################################
desc = "aRNApipe: RNA-seq framework"
parser = optparse.OptionParser(description = desc)
parser.add_option("-m", "--mode", dest = "m", default = "", help = "[Required] Write mode: 'new'/'update'/'kill'/'progress'/'skeleton'/'genomes'. 'new' all the libraries will be processed and previously processed results, if exist, will be removed; 'update' only those libraries that have not been previously processed will be analyzed; 'kill' stops a current execution of the given project by killing all its processes; 'progress' shows the progress of a running execution; 'skeleton' creates a skeleton for a project using the absolute path to the project folder given by -p; 'genomes' displays the genome versions currently available.")
parser.add_option("-p", "--project_folder", dest = "folder", default = "", help = "[Required] Absolute path to the project folder. This folder will be used to store the results and must contain the files 'samples.list' and 'config.txt'.")
parser.add_option("-w", "--walltime", dest = "wt", default = "200:00", help = "Optional: Wall time for the main job (defaults to 200:00).")
parser.add_option("-q", "--queue", dest = "q", default = "priority", help = "Optional: Queue that will be used by the main job (defaults to 'priority').")
(opt, args) = parser.parse_args()
path_aRNApipe = os.path.dirname(sys.argv[0])
##########################################################
## Genomes mode: Shows available genome builds and exits
##########################################################
if opt.m == "genomes":
db = path_aRNApipe+ "/../genomes_processed/installed_genomes.txt"
if os.path.exists(db):
f = open(db, 'r')
print "Available genome builds:"
for i in f:
i = i.strip("\n").split("\t")
if len(i) >= 2:
print "- Key: " + i[0] + " (installed " + i[1] +"): " + "/".join(i[2:])
f.close()
exit("All Done!")
else:
exit("Database file not found.")
#############################################################
## Skeleton mode: Builds skeleton for a new project and exits
#############################################################
if opt.m == "skeleton":
if not os.path.exists(opt.folder):
try:
os.mkdir(opt.folder)
shutil.copy(path_aRNApipe + "/template/config.txt", opt.folder + "/config.txt")
shutil.copy(path_aRNApipe + "/template/samples.list", opt.folder + "/samples.list")
print "Project skeleton created."
except:
exit("Unspecified error during skeleton generation.")
else:
print "Project folder already exists"
exit("All Done!")
##########################################################
## Checking required arguments (path and mode)
##########################################################
if (opt.folder == "") or (opt.m == ""):
exit("Parameters not provided. Check --help.")
if not opt.folder.startswith("/"):
exit("An absolute path must be provided (started with '/').")
if opt.folder.endswith("/"):
opt.folder = opt.folder[0:-1]
opt.folder = opt.folder.split("/")
opt.path_base = "/".join(opt.folder[0:-1])
opt.folder = opt.folder[-1]
complete_path = opt.path_base + "/" + opt.folder
##########################################################
## Check for user confirmation in update, kill or new modes
##########################################################
if opt.m == "kill":
t = raw_input("Kill the current analysis (y/n)?")
if t != "y":
exit("--> Exit")
elif opt.m == "update":
print "When running 'update' be sure that no previuos processes on the same project are running."
t = raw_input("Continue (y/n)?")
if t != "y":
exit("--> Exit")
elif opt.m == "new":
print "Mode 'new'. All the results data in the project folder, if exists, will be removed."
t = raw_input("Continue (y/n)?")
if t != "y":
exit("--> Exit")
#######################################################################
## Removing previous results, temp folder and log files if mode = 'new'
#######################################################################
if opt.m == "new":
n = os.listdir(complete_path)
for i in n:
if i.startswith("results_"):
nom = complete_path + "/" + i
os.system("rm -r " + nom.replace("//", "/"))
elif i.startswith("aRNApipe_"):
nom = complete_path + "/" + i
os.system("rm " + nom.replace("//", "/"))
elif i=="temp":
nom = complete_path + "/" + i
os.system("rm -r " + nom.replace("//", "/"))
elif i=="pid.txt":
nom = complete_path + "/" + i
os.system("rm " + nom.replace("//", "/"))
elif i=="logs":
nom = complete_path + "/" + i
os.system("rm -r " + nom.replace("//", "/"))
elif opt.m == "update":
n = os.listdir(complete_path)
for i in n:
if i.startswith("results_"):
if os.path.exists(complete_path + "/" + i + "/samples_ko.txt"):
os.remove(complete_path + "/" + i + "/samples_ko.txt")
##########################################################
## Submiting the main process if mode 'new' or 'update'
##########################################################
print "aRNApipe:"
print "- Input: " + complete_path
print "- Mode: " + opt.m
## Kill current analysis if mode 'kill'
if opt.m == "kill":
n = vcrparser.project_kill(opt.path_base, opt.folder)
## Check progress if mode 'progress'
elif opt.m == "progress":
n = vcrparser.project_process(opt.path_base, opt.folder)
## Submiting the main process if mode 'new' or 'update'
elif opt.m in ["update", "new"]:
timestamp = time.strftime("%y%m%d_%H%M%S")
if not os.path.exists(complete_path):
os.mkdir(complete_path)
if not os.path.exists(complete_path + "/logs"):
os.mkdir(complete_path + "/logs")
vcr_args = " -f " + opt.folder + " -b " + opt.path_base + " -m " + opt.m + " -t " + timestamp
if opt.m == "update":
out = open(complete_path + "/logs/aRNApipe.log", 'a')
print >> out, "###########################################"
print >> out, "## Update: " + timestamp
else:
out = open(complete_path + "/logs/aRNApipe.log", 'w')
print >> out, "###########################################"
print >> out, "## New: " + timestamp
print >> out, "###########################################"
out.close()
if not os.path.exists(complete_path + "/temp"):
os.mkdir(complete_path + "/temp")
ce = vcrparser.change_environment(config.environment)
uds = manager.submit_job(opt.wt, "1", opt.q, complete_path + "/logs/aRNApipe_cluster_" + timestamp + ".log", opt.folder, "python " + path_aRNApipe + "/lib/wr_aRNApipe.py" + vcr_args + " >> " + complete_path + "/logs/aRNApipe.log", 0, complete_path, "")
print "Main process submitted (jid=" + uds + ")"
out = open(complete_path + "/pid.txt", 'w')
print >> out, "aRNApipe\t" + uds + "\t" + timestamp
out.close()
else:
exit("Selected mode not valid.") | StarcoderdataPython |
1616076 | <filename>src/secondaires/peche/commandes/appater/__init__.py<gh_stars>0
# -*-coding:Utf-8 -*
# Copyright (c) 2012 <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'appâter'."""
from primaires.interpreteur.commande.commande import Commande
class CmdAppater(Commande):
"""Commande 'appâter'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "appâter", "bait")
self.schema = "<nom_objet>"
self.aide_courte = "appâte un ameçon"
self.aide_longue = \
"Cette commande permet d'appâter un hameçon. Vous devez " \
"tenir une canne à pêche et préciser le nom de l'appât. " \
"Certains appâts, plus durs à trouver que d'autres, " \
"attirent aussi plus le poisson et peuvent augmenter la " \
"chance de pêcher une prise rare. Dès lors que vous avez " \
"pêché quelque chose, vous devez appâter de nouveau " \
"l'hameçon de la canne avant de la relancer."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
nom_objet = self.noeud.get_masque("nom_objet")
nom_objet.proprietes["conteneurs"] = \
"(personnage.equipement.inventaire_simple.iter_objets_qtt(" \
"True), )"
nom_objet.proprietes["quantite"] = "True"
nom_objet.proprietes["conteneur"] = "True"
def interpreter(self, personnage, dic_masques):
"""Interprétation de la commande."""
appat = dic_masques["nom_objet"].objet
if not appat.est_de_type("appât"):
personnage << "|err|Vous ne pouvez appâter {}.|ff|".format(
appat.get_nom())
return
canne = None
for membre in personnage.equipement.membres:
for objet in membre.equipe:
if objet.est_de_type("canne à pêche"):
canne = objet
break
if canne is None:
personnage << "|err|Vous n'équipez aucune canne à pêche.|ff|"
return
if (getattr(canne, "appat", None) is not None
and appat.prototype == canne.appat.prototype):
personnage << "|err|Vous avez déjà appâté {} avec {}.|ff|".format(
canne.get_nom(), appat.get_nom())
return
personnage.agir("appater")
appat.contenu.retirer(appat)
a_appat = canne.appat
if a_appat:
importeur.objet.supprimer_objet(a_appat.identifiant)
canne.appat = appat
personnage << "Vous appâtez {} avec {}.".format(
canne.get_nom(), appat.get_nom())
personnage.salle.envoyer("{{}} appâte {} avec {}.".format(
canne.get_nom(), appat.get_nom()), personnage)
| StarcoderdataPython |
68057 | from cart.services.inventory_services import InventoryService
class CartItem:
product_id: int
quantity: int
class ShoppingCart:
id: int
voucher: str
discount_ratio: float
@property
def items(self):
return tuple(self._items)
def __init__(self):
self._items = []
self.inventory = InventoryService()
self.voucher = None
self.discount_ratio = 0
def get_item(self, product_id):
if not self.has_item(product_id):
raise RuntimeError
return next(x for x in self.items if x.product_id ==
product_id)
def has_item(self, product_id):
return product_id in map(lambda x: x.product_id, self.items)
def add(self, item: CartItem):
if self.inventory.remaining(item.product_id) < item.quantity:
raise RuntimeError("Not enough inventory")
self._items.append(item)
def remove(self, item):
self._items.remove(item)
def clear_items(self):
self._items.clear() | StarcoderdataPython |
3363790 | # Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functionality for testing preconditions and assertions.
"""
import functools
import sys
def loaded_module(modules):
"""Function decorator that tests whether module(s) have already been loaded.
Parameters
----------
modules: :class:`str` or sequence of :class:`str`, required
Names of the modules that must already be loaded for the wrapped
function to execute.
Raises
------
:class:`RuntimeError`
If any module in `modules` isn't already loaded.
"""
if isinstance(modules, str):
modules = (modules,) # pragma: no cover
def implementation(f):
@functools.wraps(f)
def implementation(*args, **kwargs):
for module in modules:
if module not in sys.modules:
raise RuntimeError(f"Module {module} could not be found.") # pragma: no cover
return f(*args, **kwargs)
return implementation
return implementation
| StarcoderdataPython |
4818096 | import random
class Solution:
def __init__(self, nums):
"""
:type nums: List[int]
"""
self.org = nums
self.aux = nums[:]
def reset(self):
"""
Resets the array to its original configuration and return it.
:rtype: List[int]
"""
return self.aux
def shuffle(self):
"""
Returns a random shuffling of the array.
:rtype: List[int]
"""
x = len(self.org)
result = self.org[:]
var = x
for i in range(x):
id = random.randrange(0, var)
result[id], result[var - 1] = result[var - 1], result[id]
var -= 1
return result
# Your Solution object will be instantiated and called as such:
# obj = Solution(nums)
# param_1 = obj.reset()
# param_2 = obj.shuffle()
| StarcoderdataPython |
3289220 | <filename>qiskit/pulse/pulse_lib/samplers/decorators.py
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=missing-return-doc
"""Sampler decorator module for sampling of continuous pulses to discrete pulses to be
exposed to user.
Some atypical boilerplate has been added to solve the problem of decorators not preserving
their wrapped function signatures. Below we explain the problem that samplers solve and how
we implement this.
A sampler is a function that takes an continuous pulse function with signature:
```python
def f(times: np.ndarray, *args, **kwargs) -> np.ndarray:
...
```
and returns a new function:
def f(duration: int, *args, **kwargs) -> SamplePulse:
...
Samplers are used to build up pulse commands from continuous pulse functions.
In Python the creation of a dynamic function that wraps another function will cause
the underlying signature and documentation of the underlying function to be overwritten.
In order to circumvent this issue the Python standard library provides the decorator
`functools.wraps` which allows the programmer to expose the names and signature of the
wrapped function as those of the dynamic function.
Samplers are implemented by creating a function with signature
@sampler
def left(continuous_pulse: Callable, duration: int, *args, **kwargs)
...
This will create a sampler function for `left`. Since it is a dynamic function it would not
have the docstring of `left` available too `help`. This could be fixed by wrapping with
`functools.wraps` in the `sampler`, but this would then cause the signature to be that of the
sampler function which is called on the continuous pulse, below:
`(continuous_pulse: Callable, duration: int, *args, **kwargs)``
This is not correct for the sampler as the output sampled functions accept only a function.
For the standard sampler we get around this by not using `functools.wraps` and
explicitly defining our samplers such as `left`, `right` and `midpoint` and
calling `sampler` internally on the function that implements the sampling schemes such as
`left_sample`, `right_sample` and `midpoint_sample` respectively. See `left` for an example of this.
In this way our standard samplers will expose the proper help signature, but a user can
still create their own sampler with
@sampler
def custom_sampler(time, *args, **kwargs):
...
However, in this case it will be missing documentation of the underlying sampling methods.
We believe that the definition of custom samplers will be rather infrequent.
However, users will frequently apply sampler instances too continuous pulses. Therefore, a different
approach was required for sampled continuous functions (the output of an continuous pulse function
decorated by a sampler instance).
A sampler instance is a decorator that may be used to wrap continuous pulse functions such as
linear below:
```python
@left
def linear(times: np.ndarray, m: float, b: float) -> np.ndarray:
```Linear test function
Args:
times: Input times.
m: Slope.
b: Intercept
Returns:
np.ndarray
```
return m*times+b
```
Which after decoration may be called with a duration rather than an array of times
```python
duration = 10
pulse_command = linear(10, 0.1, 0.1)
```
If one calls help on `linear` they will find
```
linear(duration:int, *args, **kwargs) -> numpy.ndarray
Discretized continuous pulse function: `linear` using
sampler: `_left`.
The first argument (time) of the continuous pulse function has been replaced with
a discretized `duration` of type (int).
Args:
duration (int)
*args: Remaining arguments of continuous pulse function.
See continuous pulse function documentation below.
**kwargs: Remaining kwargs of continuous pulse function.
See continuous pulse function documentation below.
Sampled continuous function:
function linear in module test.python.pulse.test_samplers
linear(x:numpy.ndarray, m:float, b:float) -> numpy.ndarray
Linear test function
Args:
x: Input times.
m: Slope.
b: Intercept
Returns:
np.ndarray
```
This is partly because `functools.wraps` has been used on the underlying function.
This in itself is not sufficient as the signature of the sampled function has
`duration`, whereas the signature of the continuous function is `time`.
This is achieved by removing `__wrapped__` set by `functools.wraps` in order to preserve
the correct signature and also applying `_update_annotations` and `_update_docstring`
to the generated function which corrects the function annotations and adds an informative
docstring respectively.
The user therefore has access to the correct sampled function docstring in its entirety, while
still seeing the signature for the continuous pulse function and all of its arguments.
"""
import functools
from typing import Callable
import textwrap
import pydoc
import numpy as np
import qiskit.pulse.commands as commands
from . import strategies
def _update_annotations(discretized_pulse: Callable) -> Callable:
"""Update annotations of discretized continuous pulse function with duration.
Args:
discretized_pulse: Discretized decorated continuous pulse.
"""
undecorated_annotations = list(discretized_pulse.__annotations__.items())
decorated_annotations = undecorated_annotations[1:]
decorated_annotations.insert(0, ('duration', int))
discretized_pulse.__annotations__ = dict(decorated_annotations)
return discretized_pulse
def _update_docstring(discretized_pulse: Callable, sampler_inst: Callable) -> Callable:
"""Update annotations of discretized continuous pulse function.
Args:
discretized_pulse: Discretized decorated continuous pulse.
sampler_inst: Applied sampler.
"""
wrapped_docstring = pydoc.render_doc(discretized_pulse, '%s')
header, body = wrapped_docstring.split('\n', 1)
body = textwrap.indent(body, ' ')
wrapped_docstring = header+body
updated_ds = """
Discretized continuous pulse function: `{continuous_name}` using
sampler: `{sampler_name}`.
The first argument (time) of the continuous pulse function has been replaced with
a discretized `duration` of type (int).
Args:
duration (int)
*args: Remaining arguments of continuous pulse function.
See continuous pulse function documentation below.
**kwargs: Remaining kwargs of continuous pulse function.
See continuous pulse function documentation below.
Sampled continuous function:
{continuous_doc}
""".format(continuous_name=discretized_pulse.__name__,
sampler_name=sampler_inst.__name__,
continuous_doc=wrapped_docstring)
discretized_pulse.__doc__ = updated_ds
return discretized_pulse
def sampler(sample_function: Callable) -> Callable:
"""Sampler decorator base method.
Samplers are used for converting an continuous function to a discretized pulse.
They operate on a function with the signature:
`def f(times: np.ndarray, *args, **kwargs) -> np.ndarray`
Where `times` is a numpy array of floats with length n_times and the output array
is a complex numpy array with length n_times. The output of the decorator is an
instance of `FunctionalPulse` with signature:
`def g(duration: int, *args, **kwargs) -> SamplePulse`
Note if your continuous pulse function outputs a `complex` scalar rather than a
`np.ndarray`, you should first vectorize it before applying a sampler.
This class implements the sampler boilerplate for the sampler.
Args:
sample_function: A sampler function to be decorated.
"""
def generate_sampler(continuous_pulse: Callable) -> Callable:
"""Return a decorated sampler function."""
@functools.wraps(continuous_pulse)
def call_sampler(duration: int, *args, **kwargs) -> commands.SamplePulse:
"""Replace the call to the continuous function with a call to the sampler applied
to the analytic pulse function."""
sampled_pulse = sample_function(continuous_pulse, duration, *args, **kwargs)
return np.asarray(sampled_pulse, dtype=np.complex_)
# Update type annotations for wrapped continuous function to be discrete
call_sampler = _update_annotations(call_sampler)
# Update docstring with that of the sampler and include sampled function documentation.
call_sampler = _update_docstring(call_sampler, sample_function)
# Unset wrapped to return base sampler signature
# but still get rest of benefits of wraps
# such as __name__, __qualname__
call_sampler.__dict__.pop('__wrapped__')
# wrap with functional pulse
return commands.functional_pulse(call_sampler)
return generate_sampler
def left(continuous_pulse: Callable) -> Callable:
r"""Left sampling strategy decorator.
See `pulse.samplers.sampler` for more information.
For `duration`, return:
$$\{f(t) \in \mathbb{C} | t \in \mathbb{Z} \wedge 0<=t<\texttt{duration}\}$$
Args:
continuous_pulse: To sample.
"""
return sampler(strategies.left_sample)(continuous_pulse)
def right(continuous_pulse: Callable) -> Callable:
r"""Right sampling strategy decorator.
See `pulse.samplers.sampler` for more information.
For `duration`, return:
$$\{f(t) \in \mathbb{C} | t \in \mathbb{Z} \wedge 0<t<=\texttt{duration}\}$$
Args:
continuous_pulse: To sample.
"""
return sampler(strategies.right_sample)(continuous_pulse)
def midpoint(continuous_pulse: Callable) -> Callable:
r"""Midpoint sampling strategy decorator.
See `pulse.samplers.sampler` for more information.
For `duration`, return:
$$\{f(t+0.5) \in \mathbb{C} | t \in \mathbb{Z} \wedge 0<=t<\texttt{duration}\}$$
Args:
continuous_pulse: To sample.
"""
return sampler(strategies.midpoint_sample)(continuous_pulse)
| StarcoderdataPython |
4806218 | '''
------------------------------------------------------------------------
This file sets parameters for the OG-USA model run.
This module calls the following other module(s):
demographics.py
income.py
txfunc.py
elliptical_u_est.py
This module defines the following function(s):
read_parameter_metadata()
read_tax_func_estimate()
get_parameters_from_file()
get_parameters()
get_reduced_parameters()
get_full_parameters()
------------------------------------------------------------------------
'''
'''
------------------------------------------------------------------------
Import Packages
------------------------------------------------------------------------
'''
import os
import json
import numpy as np
import scipy.interpolate as si
import demographics as dem
import income as inc
import pickle
import txfunc
import elliptical_u_est as ellip
import matplotlib.pyplot as plt
'''
------------------------------------------------------------------------
Set paths, define user modifiable parameters
------------------------------------------------------------------------
'''
PARAMS_FILE = os.path.join(os.path.dirname(__file__), 'default_full_parameters.json')
PARAMS_FILE_METADATA_NAME = 'parameters_metadata.json'
PARAMS_FILE_METADATA_PATH = os.path.join(os.path.dirname(__file__), PARAMS_FILE_METADATA_NAME)
TAX_ESTIMATE_PATH = os.environ.get("TAX_ESTIMATE_PATH", ".")
USER_MODIFIABLE_PARAMS = ['g_y_annual', 'frisch']
def read_parameter_metadata():
'''
--------------------------------------------------------------------
This function reads in parameter metadata
--------------------------------------------------------------------
INPUTS: None
OTHER FUNCTIONS AND FILES CALLED BY THIS FUNCTION:
/PARAMS_FILE_METADATA_PATH/ = json file with metadata
OBJECTS CREATED WITHIN FUNCTION:
params_dict = dictionary of metadata
RETURNS: params_dict
--------------------------------------------------------------------
'''
if os.path.exists(PARAMS_FILE_METADATA_PATH):
with open(PARAMS_FILE_METADATA_PATH) as pfile:
params_dict = json.load(pfile)
else:
from pkg_resources import resource_stream, Requirement
path_in_egg = os.path.join('ogusa', PARAMS_FILE_METADATA_NAME)
buf = resource_stream(Requirement.parse('ogusa'), path_in_egg)
as_bytes = buf.read()
as_string = as_bytes.decode("utf-8")
params_dict = json.loads(as_string)
return params_dict
def read_tax_func_estimate(pickle_path, pickle_file):
'''
--------------------------------------------------------------------
This function reads in tax function parameters
--------------------------------------------------------------------
INPUTS:
pickle_path = string, path to pickle with tax function parameter estimates
pickle_file = string, name of pickle file with tax function parmaeter estimates
OTHER FUNCTIONS AND FILES CALLED BY THIS FUNCTION:
/picklepath/ = pickle file with dictionary of tax function estimated parameters
OBJECTS CREATED WITHIN FUNCTION:
dict_params = dictionary, contains numpy arrays of tax function estimates
RETURNS: dict_params
--------------------------------------------------------------------
'''
if os.path.exists(pickle_path):
print 'pickle path exists'
with open(pickle_path) as pfile:
dict_params = pickle.load(pfile)
else:
from pkg_resources import resource_stream, Requirement
path_in_egg = pickle_file
buf = resource_stream(Requirement.parse('ogusa'), path_in_egg)
as_bytes = buf.read()
as_string = as_bytes.decode("utf-8")
dict_params = pickle.loads(as_string)
return dict_params
def get_parameters_from_file():
'''
--------------------------------------------------------------------
This function loads the json file with model parameters
--------------------------------------------------------------------
INPUTS: None
OTHER FUNCTIONS AND FILES CALLED BY THIS FUNCTION:
/PARAMS_FILE/ = json file with model parameters
OBJECTS CREATED WITHIN FUNCTION:
RETURNS: j
--------------------------------------------------------------------
'''
with open(PARAMS_FILE,'r') as f:
j = json.load(f)
for key in j:
if isinstance(j[key], list):
j[key] = np.array(j[key])
return j
def get_parameters(test=False, baseline=False, guid='', user_modifiable=False, metadata=False):
'''
--------------------------------------------------------------------
This function returns the model parameters.
--------------------------------------------------------------------
INPUTS:
test = boolean, =True if run test version with smaller state space
baseline = boolean, =True if baseline tax policy, =False if reform
guid = string, id for reform run
user_modifiable = boolean, =True if allow user modifiable parameters
metadata = boolean, =True if use metadata file for parameter
values (rather than what is entered in parameters below)
OTHER FUNCTIONS AND FILES CALLED BY THIS FUNCTION:
read_tax_func_estimate()
ellip.estimation()
read_parameter_metadata()
OBJECTS CREATED WITHIN FUNCTION:
See parameters defined above
allvars = dictionary, dictionary with all parameters defined in this function
RETURNS: allvars, dictionary with model parameters
--------------------------------------------------------------------
'''
'''
------------------------------------------------------------------------
Parameters
------------------------------------------------------------------------
Model Parameters:
------------------------------------------------------------------------
S = integer, number of economically active periods an individual lives
J = integer, number of different ability groups
T = integer, number of time periods until steady state is reached
BW = integer, number of time periods in the budget window
lambdas = [J,] vector, percentiles for ability groups
imm_rates = [J,T+S] array, immigration rates by age and year
starting_age = integer, age agents enter population
ending age = integer, maximum age agents can live until
E = integer, age agents become economically active
beta_annual = scalar, discount factor as an annual rate
beta = scalar, discount factor for model period
sigma = scalar, coefficient of relative risk aversion
alpha = scalar, capital share of income
Z = scalar, total factor productivity parameter in firms' production
function
delta_annual = scalar, depreciation rate as an annual rate
delta = scalar, depreciation rate for model period
ltilde = scalar, measure of time each individual is endowed with each
period
g_y_annual = scalar, annual growth rate of technology
g_y = scalar, growth rate of technology for a model period
frisch = scalar, Frisch elasticity that is used to fit ellipitcal utility
to constant Frisch elasticity function
b_ellipse = scalar, value of b for elliptical fit of utility function
k_ellipse = scalar, value of k for elliptical fit of utility function
upsilon = scalar, value of omega for elliptical fit of utility function
------------------------------------------------------------------------
Small Open Economy Parameters:
------------------------------------------------------------------------
ss_firm_r = scalar, world interest rate available to firms in the steady state
ss_hh_r = scalar, world interest rate available to households in the steady state
tpi_firm_r = [T+S,] vector, world interest rate (firm). Must be ss_firm_r in last period.
tpi_hh_r = [T+S,] vector, world interest rate (household). Must be ss_firm_r in last period.
------------------------------------------------------------------------
Fiscal imbalance Parameters:
------------------------------------------------------------------------
alpha_T = scalar, share of GDP that goes to transfers.
alpha_G = scalar, share of GDP that goes to gov't spending in early years.
tG1 = scalar < t_G2, period at which change government spending rule from alpha_G*Y to glide toward SS debt ratio
tG2 = scalar < T, period at which change gov't spending rule with final discrete jump to achieve SS debt ratio
debt_ratio_ss = scalar, steady state debt/GDP.
------------------------------------------------------------------------
Tax Parameters:
------------------------------------------------------------------------
mean_income_data = scalar, mean income from IRS data file used to calibrate income tax
etr_params = [S,BW,#tax params] array, parameters for effective tax rate function
mtrx_params = [S,BW,#tax params] array, parameters for marginal tax rate on
labor income function
mtry_params = [S,BW,#tax params] array, parameters for marginal tax rate on
capital income function
h_wealth = scalar, wealth tax parameter h (scalar)
m_wealth = scalar, wealth tax parameter m (scalar)
p_wealth = scalar, wealth tax parameter p (scalar)
tau_bq = [J,] vector, bequest tax
tau_payroll = scalar, payroll tax rate
retire = integer, age at which individuals eligible for retirement benefits
------------------------------------------------------------------------
Simulation Parameters:
------------------------------------------------------------------------
MINIMIZER_TOL = scalar, tolerance level for the minimizer in the calibration of chi parameters
MINIMIZER_OPTIONS = dictionary, dictionary for options to put into the minimizer, usually
to set a max iteration
PLOT_TPI = boolean, =Ture if plot the path of K as TPI iterates (for debugging purposes)
maxiter = integer, maximum number of iterations that SS and TPI solution methods will undergo
mindist_SS = scalar, tolerance for SS solution
mindist_TPI = scalar, tolerance for TPI solution
nu = scalar, contraction parameter in SS and TPI iteration process
representing the weight on the new distribution
flag_graphs = boolean, =True if produce graphs in demographic, income,
wealth, and labor files (True=graph)
chi_b_guess = [J,] vector, initial guess of \chi^{b}_{j} parameters
(if no calibration occurs, these are the values that will be used for \chi^{b}_{j})
chi_n_guess_80 = (80,) vector, initial guess of chi_{n,s} parameters for
80 one-year-period ages from 21 to 100
chi_n_guess = (S,) vector, interpolated initial guess of chi^{n,s}
parameters (if no calibration occurs, these are the
values that will be used
age_midp_80 = (80,) vector, midpoints of age bins for 80 one-year-
period ages from 21 to 100 for interpolation
chi_n_interp = function, interpolation function for chi_n_guess
newstep = scalar > 1, duration in years of each life period
age_midp_S = (S,) vector, midpoints of age bins for S one-year-
period ages from 21 to 100 for interpolation
------------------------------------------------------------------------
Demographics and Ability variables:
------------------------------------------------------------------------
omega = [T+S,S] array, time path of stationary distribution of economically active population by age
g_n_ss = scalar, steady state population growth rate
omega_SS = [S,] vector, stationary steady state population distribution
surv_rate = [S,] vector, survival rates by age
rho = [S,] vector, mortality rates by age
g_n_vector = [T+S,] vector, growth rate in economically active pop for each period in transition path
e = [S,J] array, normalized effective labor units by age and ability type
------------------------------------------------------------------------
'''
# Model Parameters
if test:
# size of state space
S = int(40)
lambdas = np.array([0.6,0.4])
J = lambdas.shape[0]
# Simulation Parameters
MINIMIZER_TOL = 1e-6
MINIMIZER_OPTIONS = {'maxiter': 1}
PLOT_TPI = False
maxiter = 35
mindist_SS = 1e-6
mindist_TPI = 1e-3
nu = .4
flag_graphs = False
else:
S = int(80)
lambdas = np.array([0.25, 0.25, 0.2, 0.1, 0.1, 0.09, 0.01])
J = lambdas.shape[0]
# Simulation Parameters
MINIMIZER_TOL = 1e-14
MINIMIZER_OPTIONS = None
PLOT_TPI = False
maxiter = 250
mindist_SS = 1e-9
mindist_TPI = 1e-5#1e-9
nu = .4
flag_graphs = False
# Time parameters
T = int(4 * S)
BW = int(10)
start_year = 2016
starting_age = 20
ending_age = 100
E = int(starting_age * (S / float(ending_age - starting_age)))
beta_annual = .96 # Carroll (JME, 2009)
beta = beta_annual ** (float(ending_age - starting_age) / S)
sigma = 1.5 # value from Attanasio, Banks, Meghir and Weber (JEBS, 1999)
alpha = .35
gamma = 0.35 # many use 0.33, but many find that capitals share is increasing (e.g. Elsby, Hobijn, and Sahin (BPEA, 2013))
epsilon = 1.0#0.6 ##Note: If note =1, then careful w calibration
Z = 1.0
delta_annual = 0.05 # approximately the value from Kehoe calibration exercise: http://www.econ.umn.edu/~tkehoe/classes/calibration-04.pdf
delta = 1 - ((1 - delta_annual) ** (float(ending_age - starting_age) / S))
ltilde = 1.0
g_y_annual = 0.03
g_y = (1 + g_y_annual)**(float(ending_age - starting_age) / S) - 1
# Ellipse parameters
frisch = 0.4 # Frisch elasticity consistent with Altonji (JPE, 1996) and Peterman (Econ Inquiry, 2016)
b_ellipse, upsilon = ellip.estimation(frisch,ltilde)
k_ellipse = 0 # this parameter is just a level shifter in utlitiy - irrelevant for analysis
# Small Open Economy parameters. Currently these are placeholders. Can introduce a
# borrow/lend spread and a time path from t=0 to t=T-1. However, from periods T through
# T+S, the steady state rate should hold.
ss_firm_r_annual = 0.04
ss_hh_r_annual = 0.04
ss_firm_r = (1 + ss_firm_r_annual) ** (float(ending_age - starting_age) / S) - 1
ss_hh_r = (1 + ss_hh_r_annual) ** (float(ending_age - starting_age) / S) - 1
tpi_firm_r = np.ones(T+S)*ss_firm_r
tpi_hh_r = np.ones(T+S)*ss_hh_r
# Fiscal imbalance parameters. These allow government deficits, debt, and savings.
tG1 = 20#int(T/4) # change government spending rule from alpha_G*Y to glide toward SS debt ratio
tG2 = int(T*0.8) # change gov't spending rule with final discrete jump to achieve SS debt ratio
alpha_T = 0.09 # share of GDP that goes to transfers each period. This ratio will hold in later baseline periods & SS.
alpha_G = 0.05 # share of GDP of government spending for periods t<tG1
ALPHA_T = np.ones(T+S)*alpha_T # Periods can be assigned different %-of-GDP rates for the baseline. Assignment after tG1 is not recommended.
ALPHA_G = np.ones(T)*alpha_G # Early periods (up to tG1) can be assigned different %-of-GDP rates for the baseline
# Assign any deviations from constant share of GDP in pre-tG1 ALPHA_T and ALPHA_G in the user dashboard of run_ogusa_serial.
rho_G = 0.1 # 0 < rho_G < 1 is transition speed for periods [tG1, tG2-1]. Lower rho_G => slower convergence.
debt_ratio_ss = 0.4 # assumed steady-state debt/GDP ratio. Savings would be a negative number.
initial_debt = 0.59 # first-period debt/GDP ratio. Savings would be a negative number.
# Business tax parameters
tau_b = 0.20 # business income tax rate
delta_tau_annual = .027# from B-Tax
delta_tau = 1 - ((1 - delta_annual) ** (float(ending_age - starting_age) / S))
if tG1 > tG2:
print 'The first government spending rule change date, (', tG1, ') is after the second one (', tG2, ').'
err = "Gov't spending rule dates are inconsistent"
raise RuntimeError(err)
if tG2 > T:
print 'The second government spending rule change date, (', tG2, ') is after time T (', T, ').'
err = "Gov't spending rule dates are inconsistent"
raise RuntimeError(err)
# Tax parameters:
# Income Tax Parameters
# will call tax function estimation function here...
# do output such that each parameters is in a separate SxBW array
# read in estimated parameters
#print 'baseline is:', baseline
if baseline:
baseline_pckl = "TxFuncEst_baseline{}.pkl".format(guid)
estimate_file = os.path.join(TAX_ESTIMATE_PATH,
baseline_pckl)
print 'using baseline tax parameters'
dict_params = read_tax_func_estimate(estimate_file, baseline_pckl)
else:
policy_pckl = "TxFuncEst_policy{}.pkl".format(guid)
estimate_file = os.path.join(TAX_ESTIMATE_PATH,
policy_pckl)
print 'using policy tax parameters'
dict_params = read_tax_func_estimate(estimate_file, policy_pckl)
mean_income_data = dict_params['tfunc_avginc'][0]
etr_params = dict_params['tfunc_etr_params_S'][:S,:BW,:]
mtrx_params = dict_params['tfunc_mtrx_params_S'][:S,:BW,:]
mtry_params = dict_params['tfunc_mtry_params_S'][:S,:BW,:]
# # Make all ETRs equal the average
etr_params = np.zeros(etr_params.shape)
etr_params[:, :, 10] = dict_params['tfunc_avg_etr'] # set shift to average rate
# # Make all MTRx equal the average
mtrx_params = np.zeros(mtrx_params.shape)
mtrx_params[:, :, 10] = dict_params['tfunc_avg_mtrx'] # set shift to average rate
# # Make all MTRy equal the average
mtry_params = np.zeros(mtry_params.shape)
mtry_params[:, :, 10] = dict_params['tfunc_avg_mtry'] # set shift to average rate
# # Make MTRx depend only on labor income
# mtrx_params[:, :, 11] = 1.0 # set share parameter to 1
# # Make MTRy depend only on capital income
# mtry_params[:, :, 11] = 0.0 # set share parameter to 0
# # set all MTRx parameters equal to the 43-yr-old values from 2016
# mtrx_params = np.tile(mtrx_params[11, 0, :], (S, 10, 1))
# Wealth tax params
# These are non-calibrated values, h and m just need
# need to be nonzero to avoid errors. When p_wealth
# is zero, there is no wealth tax.
h_wealth = 0.1
m_wealth = 1.0
p_wealth = 0.0
# Bequest and Payroll Taxes
tau_bq = np.zeros(J)
tau_payroll = 0.0 #0.15 # were are inluding payroll taxes in tax functions for now
retire = np.int(np.round(9.0 * S / 16.0) - 1)
# Calibration parameters
# These guesses are close to the calibrated values
chi_b_guess = np.ones((J,)) * 80.0
#chi_b_guess = np.array([0.7, 0.7, 1.0, 1.2, 1.2, 1.2, 1.4])
#chi_b_guess = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 4.0, 10.0])
#chi_b_guess = np.array([5, 10, 90, 250, 250, 250, 250])
#chi_b_guess = np.array([2, 10, 90, 350, 1700, 22000, 120000])
chi_n_guess_80 = np.array(
[38.12000874, 33.22762421, 25.3484224, 26.67954008, 24.41097278,
23.15059004, 22.46771332, 21.85495452, 21.46242013, 22.00364263,
21.57322063, 21.53371545, 21.29828515, 21.10144524, 20.8617942,
20.57282, 20.47473172, 20.31111347, 19.04137299, 18.92616951,
20.58517969, 20.48761429, 20.21744847, 19.9577682, 19.66931057,
19.6878927, 19.63107201, 19.63390543, 19.5901486, 19.58143606,
19.58005578, 19.59073213, 19.60190899, 19.60001831, 21.67763741,
21.70451784, 21.85430468, 21.97291208, 21.97017228, 22.25518398,
22.43969757, 23.21870602, 24.18334822, 24.97772026, 26.37663164,
29.65075992, 30.46944758, 31.51634777, 33.13353793, 32.89186997,
38.07083882, 39.2992811, 40.07987878, 35.19951571, 35.97943562,
37.05601334, 37.42979341, 37.91576867, 38.62775142, 39.4885405,
37.10609921, 40.03988031, 40.86564363, 41.73645892, 42.6208256,
43.37786072, 45.38166073, 46.22395387, 50.21419653, 51.05246704,
53.86896121, 53.90029708, 61.83586775, 64.87563699, 66.91207845,
68.07449767, 71.27919965, 73.57195873, 74.95045988, 76.6230815])
# Generate Income and Demographic parameters
(omega, g_n_ss, omega_SS, surv_rate, rho, g_n_vector, imm_rates,
omega_S_preTP) = dem.get_pop_objs(E, S, T, 1, 100, start_year,
flag_graphs)
# Interpolate chi_n_guesses and create omega_SS_80 if necessary
if S == 80:
chi_n_guess = chi_n_guess_80.copy()
omega_SS_80 = omega_SS.copy()
elif S < 80:
age_midp_80 = np.linspace(20.5, 99.5, 80)
chi_n_interp = si.interp1d(age_midp_80, chi_n_guess_80,
kind='cubic')
newstep = 80.0 / S
age_midp_S = np.linspace(20 + 0.5 * newstep,
100 - 0.5 * newstep, S)
chi_n_guess = chi_n_interp(age_midp_S)
(_, _, omega_SS_80, _, _, _, _,_) = dem.get_pop_objs(20, 80,
320, 1, 100, start_year, False)
## To shut off demographics, uncomment the following 9 lines of code
# g_n_ss = 0.0
# surv_rate1 = np.ones((S,))# prob start at age S
# surv_rate1[1:] = np.cumprod(surv_rate[:-1], dtype=float)
# omega_SS = np.ones(S)*surv_rate1# number of each age alive at any time
# omega_SS = omega_SS/omega_SS.sum()
# imm_rates = np.zeros((T+S,S))
# omega = np.tile(np.reshape(omega_SS,(1,S)),(T+S,1))
# omega_S_preTP = omega_SS
# g_n_vector = np.tile(g_n_ss,(T+S,))
e = inc.get_e_interp(S, omega_SS, omega_SS_80, lambdas, plot=False)
# e_hetero = get_e(S, J, starting_age, ending_age, lambdas, omega_SS, flag_graphs)
# e = np.tile(((e_hetero*lambdas).sum(axis=1)).reshape(S,1),(1,J))
# e = np.tile(e[:,0].reshape(S,1),(1,J))
# e /= (e * omega_SS.reshape(S, 1)* lambdas.reshape(1, J)).sum()
# print 'g_y: ', g_y
# print 'e: ', e
# print 'chi_n_guess: ', chi_n_guess
# print 'chi_b_guess: ', chi_b_guess
# print 'delta, beta: ', delta, beta
# quit()
allvars = dict(locals())
if user_modifiable:
allvars = {k:allvars[k] for k in USER_MODIFIABLE_PARAMS}
if metadata:
params_meta = read_parameter_metadata()
for k,v in allvars.iteritems():
params_meta[k]["value"] = v
allvars = params_meta
return allvars
| StarcoderdataPython |
116895 | # coding: utf-8
"""
Consolidate Services
Description of all APIs # noqa: E501
The version of the OpenAPI document: version not set
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from argocd_client.configuration import Configuration
class V1alpha1ResourceDiff(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'diff': 'str',
'group': 'str',
'hook': 'bool',
'kind': 'str',
'live_state': 'str',
'name': 'str',
'namespace': 'str',
'normalized_live_state': 'str',
'predicted_live_state': 'str',
'target_state': 'str'
}
attribute_map = {
'diff': 'diff',
'group': 'group',
'hook': 'hook',
'kind': 'kind',
'live_state': 'liveState',
'name': 'name',
'namespace': 'namespace',
'normalized_live_state': 'normalizedLiveState',
'predicted_live_state': 'predictedLiveState',
'target_state': 'targetState'
}
def __init__(self, diff=None, group=None, hook=None, kind=None, live_state=None, name=None, namespace=None, normalized_live_state=None, predicted_live_state=None, target_state=None, local_vars_configuration=None): # noqa: E501
"""V1alpha1ResourceDiff - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._diff = None
self._group = None
self._hook = None
self._kind = None
self._live_state = None
self._name = None
self._namespace = None
self._normalized_live_state = None
self._predicted_live_state = None
self._target_state = None
self.discriminator = None
if diff is not None:
self.diff = diff
if group is not None:
self.group = group
if hook is not None:
self.hook = hook
if kind is not None:
self.kind = kind
if live_state is not None:
self.live_state = live_state
if name is not None:
self.name = name
if namespace is not None:
self.namespace = namespace
if normalized_live_state is not None:
self.normalized_live_state = normalized_live_state
if predicted_live_state is not None:
self.predicted_live_state = predicted_live_state
if target_state is not None:
self.target_state = target_state
@property
def diff(self):
"""Gets the diff of this V1alpha1ResourceDiff. # noqa: E501
:return: The diff of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._diff
@diff.setter
def diff(self, diff):
"""Sets the diff of this V1alpha1ResourceDiff.
:param diff: The diff of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._diff = diff
@property
def group(self):
"""Gets the group of this V1alpha1ResourceDiff. # noqa: E501
:return: The group of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._group
@group.setter
def group(self, group):
"""Sets the group of this V1alpha1ResourceDiff.
:param group: The group of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._group = group
@property
def hook(self):
"""Gets the hook of this V1alpha1ResourceDiff. # noqa: E501
:return: The hook of this V1alpha1ResourceDiff. # noqa: E501
:rtype: bool
"""
return self._hook
@hook.setter
def hook(self, hook):
"""Sets the hook of this V1alpha1ResourceDiff.
:param hook: The hook of this V1alpha1ResourceDiff. # noqa: E501
:type: bool
"""
self._hook = hook
@property
def kind(self):
"""Gets the kind of this V1alpha1ResourceDiff. # noqa: E501
:return: The kind of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1alpha1ResourceDiff.
:param kind: The kind of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._kind = kind
@property
def live_state(self):
"""Gets the live_state of this V1alpha1ResourceDiff. # noqa: E501
:return: The live_state of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._live_state
@live_state.setter
def live_state(self, live_state):
"""Sets the live_state of this V1alpha1ResourceDiff.
:param live_state: The live_state of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._live_state = live_state
@property
def name(self):
"""Gets the name of this V1alpha1ResourceDiff. # noqa: E501
:return: The name of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1alpha1ResourceDiff.
:param name: The name of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._name = name
@property
def namespace(self):
"""Gets the namespace of this V1alpha1ResourceDiff. # noqa: E501
:return: The namespace of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._namespace
@namespace.setter
def namespace(self, namespace):
"""Sets the namespace of this V1alpha1ResourceDiff.
:param namespace: The namespace of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._namespace = namespace
@property
def normalized_live_state(self):
"""Gets the normalized_live_state of this V1alpha1ResourceDiff. # noqa: E501
:return: The normalized_live_state of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._normalized_live_state
@normalized_live_state.setter
def normalized_live_state(self, normalized_live_state):
"""Sets the normalized_live_state of this V1alpha1ResourceDiff.
:param normalized_live_state: The normalized_live_state of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._normalized_live_state = normalized_live_state
@property
def predicted_live_state(self):
"""Gets the predicted_live_state of this V1alpha1ResourceDiff. # noqa: E501
:return: The predicted_live_state of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._predicted_live_state
@predicted_live_state.setter
def predicted_live_state(self, predicted_live_state):
"""Sets the predicted_live_state of this V1alpha1ResourceDiff.
:param predicted_live_state: The predicted_live_state of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._predicted_live_state = predicted_live_state
@property
def target_state(self):
"""Gets the target_state of this V1alpha1ResourceDiff. # noqa: E501
:return: The target_state of this V1alpha1ResourceDiff. # noqa: E501
:rtype: str
"""
return self._target_state
@target_state.setter
def target_state(self, target_state):
"""Sets the target_state of this V1alpha1ResourceDiff.
:param target_state: The target_state of this V1alpha1ResourceDiff. # noqa: E501
:type: str
"""
self._target_state = target_state
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1alpha1ResourceDiff):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1alpha1ResourceDiff):
return True
return self.to_dict() != other.to_dict()
| StarcoderdataPython |
68598 | # -*- coding: utf-8 -*-
from os import popen
class UpdtPyLibs(object):
"""
升级所有第三方有更新的python库到最新版本
"""
@staticmethod
def exec_cmd(cmmd):
"""
执行命令
:param cmmd: str/命令内容
:return: tuple/(boolean, result)
"""
return popen(cmmd).read()
@staticmethod
def get_lib_name(libs_str):
"""
获得需要升级的pylibs名称列表
:param libs_str: str
:return: list/[pylib_name1, pylib_name2, ..., pylibn]
"""
res = []
lines = libs_str.split('\n')[2:]
lines = lines if lines[-1] else lines[:-1]
for line in lines:
res.append(line.split()[0])
return res
def main():
failed = 0
cmd_ls = 'pip list --outdated'
cmd_updt = "pip install -U {}"
print("Searching libs ...")
libs_str = UpdtPyLibs.exec_cmd(cmd_ls)
print("Searching done.")
libs_name = UpdtPyLibs.get_lib_name(libs_str)
lib_len = len(libs_name)
for count, lib in enumerate(libs_name, 1):
res = 'Succeed'
cmmd = cmd_updt.format(lib)
try:
UpdtPyLibs.exec_cmd(cmmd)
except Exception as e:
failed += 1
res = 'Failed'
finally:
report = "[ Name: {}\t| res: {}\t| {}/{}\t| {:.2%} ]".format(lib, res, count, lib_len, count / lib_len)
print(report)
final_report = "[UPDATED: {} | FAILED: {} ]".format(lib_len - failed, failed)
if __name__ == '__main__':
main()
| StarcoderdataPython |
3291927 | """
Repetition Code Encoder Classes
"""
from typing import Dict, List, Tuple, Optional, Type
from qiskit import QuantumRegister, QuantumCircuit, ClassicalRegister
from qiskit.circuit import Qubit
from qtcodes.circuits.base import (
_Stabilizer,
_TopologicalLattice,
TopologicalQubit,
)
TQubit = Tuple[float, float, float]
class _Parity(_Stabilizer):
"""
Parity syndrome measurement for Repetition code.
"""
def entangle(self) -> None:
"""
Parity measurement on nearby data qubits.
"""
syndrome = self.qubit_indices[0]
left = self.qubit_indices[1]
right = self.qubit_indices[2]
self.circ.cx(left, syndrome)
self.circ.cx(right, syndrome)
class _RepetitionLattice(_TopologicalLattice):
"""
This class contains all the lattice geometry specifications regarding the Repetition Code.
E.g.
X
|
X
|
X
|
X
|
X
d=5 Rep Code
"""
def __init__(self, params: Dict[str, float], name: str, circ: QuantumCircuit):
"""
Initializes this Topological Lattice class.
Args:
params (Dict[str,int]):
Contains params such as d, where d is the number of
physical "data" qubits lining a row or column of the lattice.
name (str):
Useful when combining multiple TopologicalQubits together.
Prepended to all registers.
circ (QuantumCircuit):
QuantumCircuit on top of which the topological qubit is built.
This is often shared amongst multiple TQubits.
"""
self.geometry: Dict[str, List[List[int]]] = {}
super().__init__(params, name, circ)
def _params_validate_and_generate(self) -> None:
"""
Validate and generate params.
E.g.
self.params["num_syn"] = params["d"] - 1
"""
# default params
if "d" not in self.params:
self.params["d"] = 3
# calculated params
self.params["T"] = -1 # -1 until a stabilizer round is added!
self.params["num_readout"] = -1 # -1 until a logical readout is performed!
self.params[
"num_lattice_readout"
] = -1 # -1 until a lattice readout is performed!
self.params["num_data"] = self.params["d"]
self.params["num_syn"] = self.params["d"] - 1
def _gen_registers(self) -> None:
"""
Implement this method to create quantum and classical registers.
E.g.
qregisters["data"] = QuantumRegister(params["num_data"], name=name + "_data")
"""
self.qregisters["data"] = QuantumRegister(
self.params["num_data"], name=self.name + "_data"
)
self.qregisters["mz"] = QuantumRegister(
self.params["num_syn"], name=self.name + "_mp"
)
self.qregisters["ancilla"] = QuantumRegister(1, name=self.name + "_ancilla")
def _set_geometry(self):
"""
Construct the lattice geometry for reuse across this class.
Returns:
geometry (Dict[str, List[List[int]]]):
key: syndrome/plaquette type
value: List of lists of qubit indices comprising one plaquette.
"""
geometry = {"mz": []}
for i in range(int(self.params["num_syn"])):
syn = i
left = i
right = i + 1
geometry["mz"].append([syn, left, right])
self.geometry = geometry
def _gen_qubit_indices_and_stabilizers(
self,
) -> Tuple[List[List[Qubit]], List[Type[_Parity]]]:
"""
Generates lattice blueprint for rep code with our
chosen layout and numbering.
Returns:
qubit_indices (List[List[Qubit]]):
List of lists of Qubits that comprise each plaquette.
stabilizers (List[_Stabilizer]):
List of stabilizers for each plaquette.
"""
self._set_geometry()
qubit_indices = []
stabilizers = []
for stabilizer, idx_lists in self.geometry.items():
stabilizer_cls = _Parity
for idx_list in idx_lists:
syn = self.qregisters[stabilizer][idx_list[0]]
plaquette = [
self.qregisters["data"][idx] if idx is not None else None
for idx in idx_list[1:]
]
plaquette = [syn,] + plaquette
qubit_indices.append(plaquette)
stabilizers.append(stabilizer_cls)
return qubit_indices, stabilizers
def extract_final_stabilizer_and_logical_readout_z(
self, final_readout_string: str
) -> Tuple[int, str]:
"""
Extract final Parity syndrome measurements and logical Z readout from
lattice readout along the Parity syndrome graph.
Args:
final_readout_string (str):
readout string of length equal to the number of data qubits
contains the readout values of each data qubit measured along
axes specified by the Z syndrome graph
Returns:
logical_readout (int):
logical readout value
stabilizer_str (str):
returns a string of the form
"Z_{N}Z_{N-1}...Z_{0}"
"""
readout_values = [int(q) for q in final_readout_string]
readout_values = readout_values[::-1] # [q_0,q_1,...,q_24]
z_stabilizer = "" # "Z_{N}Z_{N-1}..Z_{0}"
for idx_list in self.geometry["mz"]:
stabilizer_val = (
sum(
[
readout_values[idx] if idx is not None else 0
for idx in idx_list[1:]
]
) # [syn, left, right]
% 2
)
z_stabilizer = str(stabilizer_val) + z_stabilizer
logical_readout = readout_values[0] # first row qubit
return logical_readout, z_stabilizer
def reset_x(self) -> None:
"""
Initialize/reset to a logical |x+> state.
Create a GHZ state: |+_L> := |0_L> + |1_L> = |00..0> + |11..1>
"""
self.circ.reset(self.qregisters["data"])
self.circ.h(self.qregisters["data"][0])
for i in range(len(self.qregisters["data"]) - 1):
self.circ.cx(self.qregisters["data"][i], self.qregisters["data"][i + 1])
self.circ.barrier()
def reset_z(self) -> None:
"""
Initialize/reset to a logical |z+> state.
"""
self.circ.reset(self.qregisters["data"])
self.circ.barrier()
def x(self) -> None:
"""
Logical X operator on the topological qubit.
Defined as the left-most column on the X Syndrome Graph.
"""
self.circ.x(self.qregisters["data"])
self.circ.barrier()
def z(self) -> None:
"""
Logical Z operator on the topological qubit.
Defined as the top-most row on the Z Syndrome Graph.
"""
self.circ.z(self.qregisters["data"][0])
def x_c_if(self, classical: ClassicalRegister, val: int) -> None:
"""
Classically conditioned logical X operator on the topological qubit.
"""
self.circ.x(self.qregisters["data"]).c_if(classical, val)
self.circ.barrier()
def z_c_if(self, classical: ClassicalRegister, val: int) -> None:
"""
Classically conditioned logical Z operator on the topological qubit.
"""
self.circ.z(self.qregisters["data"][0]).c_if(classical, val)
def cx(self, control: Optional[Qubit] = None, target: Optional[Qubit] = None):
"""
Logical CX Gate
Args:
control (Optional[Qubit]): If provided, then this gate will implement
a logical x gate on this tqubit conditioned on source
target (Optional[Qubit]): If provided, then this gate will implement
a logical x gate on target conditioned on this tqubit
"""
if control:
self.circ.cx(control, self.qregisters["data"])
self.circ.barrier()
elif target:
self.circ.cx(self.qregisters["data"][0], target)
def _readout_x_into_ancilla(self) -> None:
"""
Convenience method to read-out the
logical-X projection into an ancilla qubit.
Uses the left-most column.
"""
self.circ.reset(self.qregisters["ancilla"])
# X Readout
self.circ.h(self.qregisters["ancilla"])
self.circ.cx(self.qregisters["ancilla"], self.qregisters["data"])
self.circ.h(self.qregisters["ancilla"])
def readout_x(self, readout_creg: Optional[ClassicalRegister] = None) -> None:
"""
Convenience method to read-out the logical-X projection.
"""
if not readout_creg:
self.params["num_readout"] += 1
creg_name = self.name + "_readout_" + str(self.params["num_readout"])
readout = ClassicalRegister(1, name=creg_name)
self.circ.add_register(readout)
self.cregisters[creg_name] = readout
readout_creg = self.cregisters[creg_name]
self._readout_x_into_ancilla()
self.circ.measure(self.qregisters["ancilla"], readout_creg)
self.circ.barrier()
def readout_z(self, readout_creg: Optional[ClassicalRegister] = None) -> None:
"""
Convenience method to read-out the logical-Z projection.
Uses the top-most row (in this case just the first qubit).
"""
if not readout_creg:
self.params["num_readout"] += 1
creg_name = self.name + "_readout_" + str(self.params["num_readout"])
readout = ClassicalRegister(1, name=creg_name)
self.circ.add_register(readout)
self.cregisters[creg_name] = readout
readout_creg = self.cregisters[creg_name]
self.circ.reset(self.qregisters["ancilla"])
self.circ.measure(self.qregisters["data"][0], readout_creg)
self.circ.barrier()
def lattice_readout_x(self) -> None:
"""
Not applicable/relevant to the Rep Code.
"""
self.params["num_lattice_readout"] += 1
creg_name = (
self.name + "_lattice_readout_" + str(self.params["num_lattice_readout"])
)
readout = ClassicalRegister(self.params["num_data"], name=creg_name,)
self.circ.add_register(readout)
self.cregisters[creg_name] = readout
# measure along X
self.circ.h(self.qregisters["data"])
self.circ.measure(self.qregisters["data"], self.cregisters[creg_name])
self.circ.barrier()
def lattice_readout_z(self) -> None:
"""
Readout all data qubits that constitute the lattice.
This readout can be used to extract a final round of Parity stabilizer measurments,
as well as a logical Z readout.
"""
self.params["num_lattice_readout"] += 1
creg_name = (
self.name + "_lattice_readout_" + str(self.params["num_lattice_readout"])
)
readout = ClassicalRegister(self.params["num_data"], name=creg_name,)
self.circ.add_register(readout)
self.cregisters[creg_name] = readout
self.circ.measure(self.qregisters["data"], self.cregisters[creg_name])
self.circ.barrier()
def parse_readout(
self, readout_string: str, readout_type: Optional[str] = None
) -> Tuple[int, Dict[str, List[TQubit]]]:
"""
Helper method to turn a result string (e.g. 0000 000 000) into an
appropriate logical readout value and XOR-ed syndrome locations
according to our grid coordinate convention.
Args:
readout_string (str):
Readout like "0 000 000 000" (logical_readout syndrome_2 syndrome_1 syndrome_0)
or of the form "0000 000 000" (lattice_readout syndrome_1 syndrome_0)
A syndrome_i measurement "00..0" is of the form Z_{N}Z_{N-1}...Z_{0}
readout_type (Optional[str]):
"X" or "Z" needed to accurately parse a lattice readout to extract a final round of
syndrome measurements and logical readout.
Returns:
logical_readout (int):
logical readout value
syndromes (Dict[str, List[TQubit]]]):
key: syndrome type
value: (time, row, col) of parsed syndrome hits (changes between consecutive rounds)
"""
chunks = readout_string.split(" ")
if len(chunks[0]) > 1: # this is true when all data qubits are readout
assert (
readout_type == "Z"
), "Rep code currently only supports Z lattice readout."
(
logical_readout,
final_stabilizer,
) = self.extract_final_stabilizer_and_logical_readout_z(chunks[0])
chunks = [final_stabilizer,] + chunks[1:]
else:
logical_readout = int(chunks[0])
chunks = chunks[1:]
int_syndromes = [int(x, base=2) for x in chunks[::-1]]
z_syndromes = [a ^ b for (a, b) in zip(int_syndromes, int_syndromes[1:])]
Z = []
for T, syndrome in enumerate(z_syndromes):
for loc in range(int(self.params["num_syn"])):
if syndrome & 1 << loc:
Z.append((float(T), 0.5 + loc, 0.0))
return (
logical_readout,
{"Z": Z},
)
class RepetitionQubit(TopologicalQubit):
"""
A single logical repetition code qubit. At the physical level, this wraps a
circuit, so we chose to subclass and extend TopologicalQubit which extends QuantumCircuit.
"""
lattice_type = _RepetitionLattice
def stabilize(self) -> None:
"""
Run a single round of stabilization (entangle and measure).
"""
self.lattice.params["T"] += 1
syndrome_readouts = ClassicalRegister(
self.lattice.params["num_syn"],
name=self.name + "_c{}".format(self.lattice.params["T"]),
)
self.lattice.cregisters[
"syndrome{}".format(self.lattice.params["T"])
] = syndrome_readouts
self.circ.add_register(syndrome_readouts)
self.lattice.entangle()
# measure syndromes
self.circ.measure(
self.lattice.qregisters["mz"], syndrome_readouts,
)
self.circ.reset(self.lattice.qregisters["mz"])
self.circ.barrier()
| StarcoderdataPython |
92546 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class LabelSelectorRequirement(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class LabelSelector(Model):
matchExpressions = Field(LabelSelectorRequirement)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type, alt_type=int)
maxUnavailable = Field(six.text_type, alt_type=int)
selector = Field(LabelSelector)
class PodDisruptionBudget(Model):
class Meta:
list_url = "/apis/policy/v1beta1/poddisruptionbudgets"
url_template = "/apis/policy/v1beta1/namespaces/{namespace}/poddisruptionbudgets/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
| StarcoderdataPython |
1683440 | import os
from . import constants
_basedir = os.path.abspath(os.path.dirname(__file__)) # Get this file's directory rather than pwd
for f in os.listdir("{}/scripts".format(_basedir)): # Loop through the scripts folder
filename, file_extension = os.path.splitext(f)
if file_extension and file_extension == ".js": # Any JS files added to the JS_SCRIPTS dictionary in constants.py
with open("{}/scripts/{}".format(_basedir, f), "r") as open_file:
constants.JS_SCRIPTS[filename] = open_file.read()
| StarcoderdataPython |
3209377 | <filename>htag/runners/devapp.py
# -*- coding: utf-8 -*-
# #############################################################################
# Copyright (C) 2022 <NAME>[at]gmail(dot)com
#
# MIT licence
#
# https://github.com/manatlan/htag
# #############################################################################
from .. import Tag
from ..render import HRenderer
import threading
import os,json
from starlette.applications import Starlette
from starlette.responses import HTMLResponse
from starlette.routing import Route,WebSocketRoute
from starlette.endpoints import WebSocketEndpoint
import socket
def isFree(ip, port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
return not (s.connect_ex((ip,port)) == 0)
class DevApp(Starlette):
""" DEV APP, Runner specialized for development process. Features :
* autoreload on file changes
* refresh UI/HTML/client part, after server autoreloaded
* console.log/info in devtools, for all exchanges
* uvicorn debug
* js error() method auto implemented (popup with skip/refresh)
Simple ASync Web Server (with starlette) with WebSocket interactions with HTag.
Open the rendering in a browser tab.
The instance is an ASGI htag app
"""
def __init__(self,tagClass:type):
assert issubclass(tagClass,Tag)
#/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\
# add a Static Template, for displaying beautiful full error on UI ;-)
#/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\ #TODO: perhaps something integrated in hrenderer
t=Tag.H.div( _style="z-index:10000000000;position:fixed;top:10px;left:10px;background:#F00;padding:8px;border:1px solid yellow" )
t <= Tag.H.a("X",_href="#",_onclick="this.parentNode.remove()",_style="color:yellow;text-decoration:none",_title="Forget error (skip)")
t <= " "
t <= Tag.H.a("REFRESH",_href="#",_onclick="window.location.reload()",_style="color:yellow;text-decoration:none",_title="Restart the UI part by refreshing it")
t <= Tag.H.pre()
template = Tag.H.template(t,_id="DevAppError")
#/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\/\
js = """
window.error=function(txt) {
var clone = document.importNode(document.querySelector("#DevAppError").content, true);
clone.querySelector("pre").innerHTML = txt
document.body.appendChild(clone)
}
async function interact( o ) {
let packet = JSON.stringify(o)
console.info("[htag interact]",packet.length,o)
ws.send( packet );
}
var ws = new WebSocket("ws://"+document.location.host+"/ws");
ws.onopen = function() {console.info("[htag start]");start()};
ws.onclose = function() {document.body.innerHTML="Refreshing";window.location.reload()}
ws.onmessage = function(e) {
let data = JSON.parse(e.data);
console.info("[htag action]",e.data.length,data)
action( data );
};
"""
self.renderer=HRenderer(tagClass, js, lambda: os._exit(0), fullerror=True, statics=[template,])
class WsInteract(WebSocketEndpoint):
encoding = "json"
async def on_receive(this, websocket, data):
actions = await self.renderer.interact(data["id"],data["method"],data["args"],data["kargs"])
await websocket.send_text( json.dumps(actions) )
Starlette.__init__(self,debug=True, routes=[
Route('/', self.GET, methods=["GET"]),
WebSocketRoute("/ws", WsInteract),
])
async def GET(self,request):
return HTMLResponse( str(self.renderer) )
def run(self, host="127.0.0.1", port=8000, openBrowser=True): # localhost, by default !!
""" example `app.run(__name__)` """
import uvicorn,webbrowser
import inspect,sys
from pathlib import Path
try:
fi= inspect.getframeinfo(sys._getframe(1))
stem = Path(fi.filename).stem
instanceName = fi.code_context[0].strip().split(".")[0]
except Exception as e:
print("Can't run DevApp :",e)
sys.exit(-1)
fileapp = stem+":"+instanceName
url = f"http://{host}:{port}"
print("="*79)
print(f"Start Uvicorn Reloader for '{fileapp}' ({url})")
print("="*79)
if openBrowser:
webbrowser.open_new_tab(url)
uvicorn.run(fileapp,host=host,port=port,reload=True,debug=True)
| StarcoderdataPython |
1693427 | #import copy
#import re, sys
from collections import defaultdict
#from Queue import Queue
from data_structures import CanonicalDerivation, Edge, RuleInstance
class CanonicalParser(object):
def __init__(self,s):
"""
Takes a sentence and learns a canonical derivation according to the simple grammar defined below.
"""
derivs_cur = set()
derivs_fail = set()
self.derivs_done = set()
derivs_all = set()
self.s = s
# Add the full AMR to start with
derivs_cur.add(CanonicalDerivation([s['mrt']]))
while len(derivs_cur) > 0:
derivation = derivs_cur.pop()
if len(derivation.get_triples()) == 1 and derivation.get_triples()[0][1].isNonterminal():
self.derivs_done.add(derivation)
derivs_all.add(derivation)
else:
deriv = False
for rule in [self.applyDelex,self.applySL,self.applySW, \
self.applySO,self.applyCircle,self.applyJointHit,self.applyElongate]:
deriv = rule(derivation)
if deriv: break
# If we don't learn anything, add this derivation to the failures
if not deriv:
derivs_fail.add(derivation)
else:
# If we've seen this derivation before, don't go there again
if deriv not in derivs_all:
derivs_cur.add(deriv)
derivs_all.add(deriv)
self.derivs_done = list(self.derivs_done)
self.derivs_fail = list(derivs_fail)
#print "Failed derivations: ", len(derivs_fail)
print "Complete derivations: ", len(self.derivs_done)
"""
# Print the failed derivations to see what went wrong
for d in self.derivs_fail:
print "Failed derivation: "
print d.get_triples()
"""
def applyDelex(self,d):
triples = d.get_triples()
for i in xrange(len(triples)):
(a,b,c) = triples[i]
if b.isTerminal():
ntLabel,tmp = b[0].split(":",1)
nrf = (a,Edge(ntLabel,d.count),c)
nrt = [triples[i]]
new_mrt = list(triples)
new_mrt[i] = nrf # replace triple with new triple
new_rule = RuleInstance(nrf,nrt,'DL')
return CanonicalDerivation.derive(d,new_mrt,new_rule)
return False
def applySL(self,d):
"""
Search for any node with one occurence as p1 and one as p2 only.
Combine these two by removing that node and merging the edges.
"""
triples = d.get_triples()
ANodes = defaultdict(int)
BNodes = defaultdict(int)
for (a,b,c) in triples:
ANodes[a] += 1
BNodes[c] += 1
for a in ANodes.keys():
if ANodes[a] == 1 and BNodes[a] == 1:
# we have an edge that we can shorten: remove (x,X,a) and (a,X,z) for (x,Y,z)
nrf = [None,Edge('*',d.count),None] # new rule from
nrt = [0,0] # new rule to
new_amr = list(triples)
for i in xrange(len(triples)):
at = triples[i]
if at[0] == a and at[2] != a:
nrf[2] = at[2]
nrt[1] = at
elif at[2] == a and at[0] != a:
nrf[0] = at[0]
nrf[1][0] = at[1][0]
nrt[0] = at
index = i
if nrt[0][1].isNonterminal() and nrt[1][1].isNonterminal():
new_amr[index] = tuple(nrf)
new_amr.remove(nrt[1])
new_rule = RuleInstance(tuple(nrf),nrt,'SL')
return CanonicalDerivation.derive(d,new_amr,new_rule)
return False
def applySW(self,d):
"""
Search for any multiple edges (a-X-b) and merge two of these
"""
triples = d.get_triples()
Nodes = defaultdict(int)
for (a,b,c) in triples:
Nodes[(a,c)] += 1
for (a,c) in Nodes.keys():
if Nodes[(a,c)] > 1:
# We have one edge that we can remove: remove (a,X,b) and (a,Y,b) for (a,Y,b)
# If more than two, we can remove any one of these, given any other one of these
for i in xrange(len(triples)):
candidate = triples[i]
(x,y,z) = candidate
if x == a and z == c and y.isNonterminal():
for j in xrange(i+1,len(triples)):
candidate2 = triples[j]
(k,l,m) = candidate2
if k == x and m == z and l.isNonterminal() and candidate != candidate2:
nrf = (k,Edge(y[0],d.count),m)
nrt = [candidate,candidate2]
new_amr = list(triples)
new_amr[i] = nrf
del new_amr[j]
new_rule = RuleInstance(nrf,nrt,'SW')
return CanonicalDerivation.derive(d,new_amr,new_rule)
return False
def applySO(self,d):
"""
Search for any split a-X-b,a-Y-c where c is a leaf node
Remove a-Y-c and let it be generated by a-X-b
"""
triples = d.get_triples()
Leaves = defaultdict(int)
Branches = defaultdict(int)
for (a,b,c) in triples:
Leaves[c] += 1
Branches[a] += 1
# If leaves[b] == 1 and branches[a] > 1 we can remove the (a,X,b) edge using SO
for i in xrange(len(triples)):
candidate = triples[i]
(a,b,c) = candidate
if Leaves[c] == 1 and Branches[a] > 1 and Branches[c] == 0 and b.isNonterminal():
for j in xrange(len(triples)):
candidate2 = triples[j]
(x,y,z) = candidate2
if x == a and z != c and y.isNonterminal():
# Depending on the grammar it would make sense to install a clause here
# which determines the 'surviving' edge based on some implicit ordering
nrf = (x,Edge(y[0],d.count),z)
nrt = [candidate2,candidate]
rulename = 'OL' # short for open-left
new_amr = list(triples)
new_amr[j] = nrf
del new_amr[i]
new_rule = RuleInstance(nrf,nrt,rulename)
return CanonicalDerivation.derive(d,new_amr,new_rule)
return False
def applyJointHit(self,d):
"""
edge A-B becomes edges A-C and B-C in reverse
"""
child = defaultdict(set)
parent = defaultdict(set)
triples = d.get_triples()
for trip in triples:
(a,b,c) = trip
child[a].add(trip)
parent[c].add(trip)
for i in xrange(len(triples)):
candidate1 = triples[i]
(a,x,c) = candidate1
if len(child[c]) == 0 and len(parent[c]) == 2 and x.isNonterminal():
for candidate2 in parent[c]:
(b,y,tmp) = candidate2
if y.isNonterminal() and b != a: # we know that c == tmp
wrongWay = False
for check in child[b]:
# optional (attempts to avoid generating looped structures)
(k,l,m) = check
if m == a: wrongWay = True
if not wrongWay:
# We found a candidate to remove (a,x,c) (b,y,c) down to (a,?,b)
# Now, let's iterate so that we can find the suitable edges (with labels)
nrf = (a,Edge('*',d.count),b)
nrt = [candidate1,candidate2]
new_amr = list(triples)
new_amr[i] = nrf
new_amr.remove(candidate2)
new_rule = RuleInstance(nrf,nrt,'JH')
return CanonicalDerivation.derive(d,new_amr,new_rule)
return False
def applyElongate(self,d):
"""
A->B becomes A->B->C in reverse
"""
child = defaultdict(set)
parent = defaultdict(set)
triples = d.get_triples()
for trip in triples:
(a,b,c) = trip
child[a].add(trip)
parent[c].add(trip)
for i in xrange(len(triples)):
candidate1 = triples[i]
(b,x,c) = candidate1
if len(child[c]) == 0 and len(parent[c]) == 1 and x.isNonterminal():
for candidate2 in parent[b]:
(a,y,tmp) = candidate2
if y.isNonterminal(): # we already know tmp == b
# We found a candidate to remove (a,y,b,x,c) down to (a,y,b)
nrf = (a,Edge(y[0],d.count),b)
nrt = [candidate2,candidate1]
new_amr = list(triples)
new_amr[i] = nrf
new_amr.remove(candidate2)
new_rule = RuleInstance(nrf,nrt,'LL')
return CanonicalDerivation.derive(d,new_amr,new_rule)
return False
def applyCircle(self,d):
"""
A->B becomes A->B->B (circle) in reverse
"""
parent = defaultdict(set)
triples = d.get_triples()
for i in xrange(len(triples)):
(a,b,c) = triples[i]
parent[c].add((i,triples[i]))
for i in xrange(len(triples)):
candidate1 = triples[i]
(a,b,c) = candidate1
if a == c and b.isNonterminal():
for index,candidate2 in parent[c]:
(x,y,z) = candidate2
if y.isNonterminal():
# We found a candidate to remove (x,y,a,b,a) down to (x,y,a)
nrf = (x,Edge(y[0],d.count),z)
nrt = [candidate2,candidate1]
new_amr = list(triples)
new_amr[index] = nrf
del new_amr[i]
new_rule = RuleInstance(nrf,nrt,'CC')
return CanonicalDerivation.derive(d,new_amr,new_rule)
return False
| StarcoderdataPython |
57551 | <reponame>apie/countries_visited
from flask_restless import ProcessingException
from flask import redirect, url_for, request
from flask_login import current_user
from flask_security import Security, auth_required
from visited import app, user_datastore, Visit
security = Security(app, user_datastore)
@security.unauthn_handler
def r(*args, headers):
if 'api/' in request.url:
#Needed for auth_func because it needs to raise something. Return values dont matter.
raise ProcessingException(description='Not Authorized', code=401)
#Otherwise, normal behavior: redirect to login page
return redirect(url_for('security.login'))
@auth_required()
def auth_func_single(instance_id=None, **kwargs):
if ('data' in kwargs and kwargs['data']['username'] != str(current_user.email)) or (Visit.query.filter_by(username=current_user.email, id=instance_id).first() is None):
# Changed the username OR instance_id was not found for the current user
raise ProcessingException(description='Not Authorized', code=401)
@auth_required()
def auth_func_many(search_params=None, **kwargs):
if 'filters' not in search_params:
search_params['filters'] = []
# Always filter on current logged in user
search_params['filters'].append(
dict(name='username', op='eq', val=current_user.email)
)
def deny(*args, **kwargs):
raise ProcessingException(description='Forbidden', code=403)
def auth_func_post(data, **kwargs):
if data['username'] != str(current_user.email):
# Can only post for the current user
raise ProcessingException(description='Not Authorized', code=401)
check_user=dict(
GET_SINGLE=[auth_func_single],
GET_MANY=[auth_func_many],
POST=[auth_func_post],
PUT_SINGLE=[auth_func_single],
PUT_MANY=[deny],
DELETE_SINGLE=[auth_func_single],
DELETE_MANY=[deny],
)
| StarcoderdataPython |
1771301 | <gh_stars>10-100
from __future__ import print_function, division, absolute_import
from pymel.core import curve, delete, revolve
from . import _build_util as util
@util.commonArgs
def build():
p = [ [0, -0.49, 0],
[-0.49, -0.49, 0.49],
[-0.49, 0.49, 0.49],
[0, 0.49, 0] ]
temp = curve( p=p, d=1 )
ctrl = revolve( temp, ssw=0, esw=360, d=1, ax=[0, 1, 0], s=4 )[0]
points = [ [-0.5, 0.5, 0.5],
[-0.5, -0.5, 0.5],
[-0.5, -0.5, -0.5],
[-0.5, 0.5, -0.5],
[-0.5, 0.5, 0.5],
[0.5, 0.5, 0.5],
[0.5, 0.5, -0.5],
[-0.5, 0.5, -0.5],
[-0.5, -0.5, -0.5],
[0.5, -0.5, -0.5],
[0.5, 0.5, -0.5],
[0.5, -0.5, -0.5],
[0.5, -0.5, 0.5],
[0.5, 0.5, 0.5],
[0.5, -0.5, 0.5],
[-0.5, -0.5, 0.5] ]
line = curve(p=points, d=1)
line.rename('outline')
line.getShape().setParent( ctrl, add=True, shape=True )
delete(line, temp)
return ctrl | StarcoderdataPython |
1753649 | from . import users
from .change_password import ChangePasswordForm
from .users import UserForm
from .balance import BalanceForm | StarcoderdataPython |
4801711 | <reponame>shubhamkanungoo007/competitive-programming-solutions<gh_stars>0
ls=[2,33,4,2,1,2]
ls1=[77,66,55,44]
s=ls+ls1
s.sort(reverse=True)
print(s) | StarcoderdataPython |
118087 | # THIS FILE IS AUTO-GENERATED. DO NOT EDIT
from verta._swagger.base_type import BaseType
class UacAction(BaseType):
def __init__(self, service=None, role_service_action=None, authz_service_action=None, modeldb_service_action=None):
required = {
"service": False,
"role_service_action": False,
"authz_service_action": False,
"modeldb_service_action": False,
}
self.service = service
self.role_service_action = role_service_action
self.authz_service_action = authz_service_action
self.modeldb_service_action = modeldb_service_action
for k, v in required.items():
if self[k] is None and v:
raise ValueError('attribute {} is required'.format(k))
@staticmethod
def from_json(d):
from .ServiceEnumService import ServiceEnumService
from .RoleActionEnumRoleServiceActions import RoleActionEnumRoleServiceActions
from .AuthzActionEnumAuthzServiceActions import AuthzActionEnumAuthzServiceActions
from .ModelDBActionEnumModelDBServiceActions import ModelDBActionEnumModelDBServiceActions
tmp = d.get('service', None)
if tmp is not None:
d['service'] = ServiceEnumService.from_json(tmp)
tmp = d.get('role_service_action', None)
if tmp is not None:
d['role_service_action'] = RoleActionEnumRoleServiceActions.from_json(tmp)
tmp = d.get('authz_service_action', None)
if tmp is not None:
d['authz_service_action'] = AuthzActionEnumAuthzServiceActions.from_json(tmp)
tmp = d.get('modeldb_service_action', None)
if tmp is not None:
d['modeldb_service_action'] = ModelDBActionEnumModelDBServiceActions.from_json(tmp)
return UacAction(**d)
| StarcoderdataPython |
4822404 | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pandas.util.testing import assert_frame_equal
from pyflink.common import Row
from pyflink.table import expressions as expr
from pyflink.table.types import DataTypes
from pyflink.table.udf import udf, udtf, udaf, AggregateFunction
from pyflink.testing import source_sink_utils
from pyflink.testing.test_case_utils import PyFlinkBlinkBatchTableTestCase, \
PyFlinkBlinkStreamTableTestCase
class RowBasedOperationTests(object):
def test_map(self):
t = self.t_env.from_elements(
[(1, 2, 3), (2, 1, 3), (1, 5, 4), (1, 8, 6), (2, 3, 4)],
DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.TINYINT()),
DataTypes.FIELD("b", DataTypes.SMALLINT()),
DataTypes.FIELD("c", DataTypes.INT())]))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'],
[DataTypes.BIGINT(), DataTypes.BIGINT()])
self.t_env.register_table_sink("Results", table_sink)
func = udf(lambda x: Row(x + 1, x * x), result_type=DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.BIGINT()),
DataTypes.FIELD("b", DataTypes.BIGINT())]))
t.map(func(t.b)).alias("a", "b") \
.map(func(t.a)).alias("a", "b") \
.execute_insert("Results") \
.wait()
actual = source_sink_utils.results()
self.assert_equals(actual, ["4,9", "3,4", "7,36", "10,81", "5,16"])
def test_map_with_pandas_udf(self):
t = self.t_env.from_elements(
[(1, Row(2, 3)), (2, Row(1, 3)), (1, Row(5, 4)), (1, Row(8, 6)), (2, Row(3, 4))],
DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.TINYINT()),
DataTypes.FIELD("b",
DataTypes.ROW([DataTypes.FIELD("a", DataTypes.INT()),
DataTypes.FIELD("b", DataTypes.INT())]))]))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'],
[DataTypes.BIGINT(), DataTypes.BIGINT()])
self.t_env.register_table_sink("Results", table_sink)
def func(x, y):
import pandas as pd
a = (x * 2).rename('b')
res = pd.concat([a, x], axis=1) + y
return res
pandas_udf = udf(func,
result_type=DataTypes.ROW(
[DataTypes.FIELD("c", DataTypes.BIGINT()),
DataTypes.FIELD("d", DataTypes.BIGINT())]),
func_type='pandas')
t.map(pandas_udf(t.a, t.b)).execute_insert("Results").wait()
actual = source_sink_utils.results()
self.assert_equals(actual, ["3,5", "3,7", "6,6", "9,8", "5,8"])
def test_flat_map(self):
t = self.t_env.from_elements(
[(1, "2,3", 3), (2, "1", 3), (1, "5,6,7", 4)],
DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.TINYINT()),
DataTypes.FIELD("b", DataTypes.STRING()),
DataTypes.FIELD("c", DataTypes.INT())]))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'],
[DataTypes.BIGINT(), DataTypes.STRING()])
self.t_env.register_table_sink("Results", table_sink)
@udtf(result_types=[DataTypes.INT(), DataTypes.STRING()])
def split(x, string):
for s in string.split(","):
yield x, s
t.flat_map(split(t.a, t.b)) \
.alias("a, b") \
.flat_map(split(t.a, t.b)) \
.execute_insert("Results") \
.wait()
actual = source_sink_utils.results()
self.assert_equals(actual, ["1,2", "1,3", "2,1", "1,5", "1,6", "1,7"])
class BatchRowBasedOperationITTests(RowBasedOperationTests, PyFlinkBlinkBatchTableTestCase):
def test_aggregate_with_pandas_udaf(self):
t = self.t_env.from_elements(
[(1, 2, 3), (2, 1, 3), (1, 5, 4), (1, 8, 6), (2, 3, 4)],
DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.TINYINT()),
DataTypes.FIELD("b", DataTypes.SMALLINT()),
DataTypes.FIELD("c", DataTypes.INT())]))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b', 'c'],
[DataTypes.TINYINT(), DataTypes.FLOAT(), DataTypes.INT()])
self.t_env.register_table_sink("Results", table_sink)
pandas_udaf = udaf(lambda a: (a.mean(), a.max()),
result_type=DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.FLOAT()),
DataTypes.FIELD("b", DataTypes.INT())]),
func_type="pandas")
t.group_by(t.a) \
.aggregate(pandas_udaf(t.b).alias("c", "d")) \
.select("a, c, d").execute_insert("Results") \
.wait()
actual = source_sink_utils.results()
self.assert_equals(actual, ["1,5.0,8", "2,2.0,3"])
def test_aggregate_with_pandas_udaf_without_keys(self):
t = self.t_env.from_elements(
[(1, 2, 3), (2, 1, 3), (1, 5, 4), (1, 8, 6), (2, 3, 4)],
DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.TINYINT()),
DataTypes.FIELD("b", DataTypes.SMALLINT()),
DataTypes.FIELD("c", DataTypes.INT())]))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b'],
[DataTypes.FLOAT(), DataTypes.INT()])
self.t_env.register_table_sink("Results", table_sink)
pandas_udaf = udaf(lambda a: Row(a.mean(), a.max()),
result_type=DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.FLOAT()),
DataTypes.FIELD("b", DataTypes.INT())]),
func_type="pandas")
t.aggregate(pandas_udaf(t.b).alias("c", "d")) \
.select("c, d").execute_insert("Results") \
.wait()
actual = source_sink_utils.results()
self.assert_equals(actual, ["3.8,8"])
def test_window_aggregate_with_pandas_udaf(self):
import datetime
from pyflink.table.window import Tumble
t = self.t_env.from_elements(
[
(1, 2, 3, datetime.datetime(2018, 3, 11, 3, 10, 0, 0)),
(3, 2, 4, datetime.datetime(2018, 3, 11, 3, 10, 0, 0)),
(2, 1, 2, datetime.datetime(2018, 3, 11, 3, 10, 0, 0)),
(1, 3, 1, datetime.datetime(2018, 3, 11, 3, 40, 0, 0)),
(1, 8, 5, datetime.datetime(2018, 3, 11, 4, 20, 0, 0)),
(2, 3, 6, datetime.datetime(2018, 3, 11, 3, 30, 0, 0))
],
DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.TINYINT()),
DataTypes.FIELD("b", DataTypes.SMALLINT()),
DataTypes.FIELD("c", DataTypes.INT()),
DataTypes.FIELD("rowtime", DataTypes.TIMESTAMP(3))]))
table_sink = source_sink_utils.TestAppendSink(
['a', 'b', 'c'],
[
DataTypes.TIMESTAMP(3),
DataTypes.FLOAT(),
DataTypes.INT()
])
self.t_env.register_table_sink("Results", table_sink)
pandas_udaf = udaf(lambda a: (a.mean(), a.max()),
result_type=DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.FLOAT()),
DataTypes.FIELD("b", DataTypes.INT())]),
func_type="pandas")
tumble_window = Tumble.over(expr.lit(1).hours) \
.on(expr.col("rowtime")) \
.alias("w")
t.window(tumble_window) \
.group_by("w") \
.aggregate(pandas_udaf(t.b).alias("d", "e")) \
.select("w.rowtime, d, e") \
.execute_insert("Results") \
.wait()
actual = source_sink_utils.results()
self.assert_equals(actual,
["2018-03-11 03:59:59.999,2.2,3",
"2018-03-11 04:59:59.999,8.0,8"])
class StreamRowBasedOperationITTests(RowBasedOperationTests, PyFlinkBlinkStreamTableTestCase):
def test_aggregate(self):
import pandas as pd
t = self.t_env.from_elements(
[(1, 2, 3), (2, 1, 3), (1, 5, 4), (1, 8, 6), (2, 3, 4)],
DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.BIGINT()),
DataTypes.FIELD("b", DataTypes.SMALLINT()),
DataTypes.FIELD("c", DataTypes.INT())]))
function = CountAndSumAggregateFunction()
agg = udaf(function,
result_type=function.get_result_type(),
accumulator_type=function.get_accumulator_type(),
name=str(function.__class__.__name__))
result = t.group_by(t.a) \
.aggregate(agg(t.b).alias("c", "d")) \
.select("a, c, d") \
.to_pandas()
assert_frame_equal(result, pd.DataFrame([[1, 3, 15], [2, 2, 4]], columns=['a', 'c', 'd']))
class CountAndSumAggregateFunction(AggregateFunction):
def get_value(self, accumulator):
from pyflink.common import Row
return Row(accumulator[0], accumulator[1])
def create_accumulator(self):
from pyflink.common import Row
return Row(0, 0)
def accumulate(self, accumulator, *args):
accumulator[0] += 1
accumulator[1] += args[0]
def retract(self, accumulator, *args):
accumulator[0] -= 1
accumulator[1] -= args[0]
def merge(self, accumulator, accumulators):
for other_acc in accumulators:
accumulator[0] += other_acc[0]
accumulator[1] += other_acc[1]
def get_accumulator_type(self):
return DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.BIGINT()),
DataTypes.FIELD("b", DataTypes.BIGINT())])
def get_result_type(self):
return DataTypes.ROW(
[DataTypes.FIELD("a", DataTypes.BIGINT()),
DataTypes.FIELD("b", DataTypes.BIGINT())])
if __name__ == '__main__':
import unittest
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| StarcoderdataPython |
65840 | <reponame>jonasht/pythonEstudos
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
import sys
class Window(QWindow):
def __init__(self):
QWindow.__init__(self)
self.setTitle('janela')
self.resize(400,300)
app = QApplication(sys.argv)
tela = Window()
tela.show()
sys.exit(app.exec_())
| StarcoderdataPython |
85231 | # Generated by Django 2.1.3 on 2018-11-21 01:37
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('product', '0002_auto_20181121_0740'),
]
operations = [
migrations.CreateModel(
name='Apistep',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('apiname', models.CharField(max_length=100, verbose_name='接口名称')),
('apiurl', models.CharField(max_length=200, verbose_name='url地址')),
('apistep', models.CharField(max_length=100, null=True, verbose_name='测试步骤')),
('apiparamvalue', models.CharField(max_length=800, verbose_name='请求参数和值')),
],
),
migrations.CreateModel(
name='Apitest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('apitestname', models.CharField(max_length=64, verbose_name='流程接口名称')),
('apitestdesc', models.CharField(max_length=64, null=True, verbose_name='描述')),
('apitester', models.CharField(max_length=16, verbose_name='测试负责人')),
('apitestresult', models.BooleanField(verbose_name='测试结果')),
('create_time', models.DateField(auto_now=True, verbose_name='创建时间')),
('Product', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='product.Product')),
],
options={
'verbose_name': '流程场景接口',
'verbose_name_plural': '流程场景接口',
},
),
migrations.AddField(
model_name='apistep',
name='Apitest',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='product.Apitest'),
),
]
| StarcoderdataPython |
140085 | <gh_stars>0
# Copyright 2015 IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The v1 Language Translation service
(https://http://www.ibm.com/smarterplanet/us/en/ibmwatson/developercloud/language-translation.html)
"""
from .watson_developer_cloud_service import WatsonDeveloperCloudService
from .watson_developer_cloud_service import WatsonInvalidArgument
class LanguageTranslationV2(WatsonDeveloperCloudService):
default_url = "https://gateway.watsonplatform.net/language-translation/api"
def __init__(self, url=default_url, **kwargs):
WatsonDeveloperCloudService.__init__(
self, 'language_translation', url, **kwargs)
def identify(self, text):
"""
Identifies the language of given source text
"""
return self.request(method='POST', url='/v2/identify', data=text, headers={'content-type': 'text/plain'},
accept_json=True)
def get_models(self, default=None, source=None, target=None):
"""
Get the available models for translation
"""
params = {'default': default, 'source': source, 'target': target}
return self.request(method='GET', url='/v2/models', params=params, accept_json=True)
def translate(self, text, source=None, target=None, model=None):
"""
Translates text from a source language to a target language
"""
if model is None and (source is None or target is None):
raise WatsonInvalidArgument('Either model or source and target must be specified')
data = {'text': text, 'source': source, 'target': target, 'model': model}
# data=data or json=data
return self.request(method='POST', url='/v2/translate', json=data).text
| StarcoderdataPython |
137251 | """Test cases for IR generation."""
import os.path
from mypy.test.config import test_temp_dir
from mypy.test.data import DataDrivenTestCase
from mypy.errors import CompileError
from mypyc.common import TOP_LEVEL_NAME
from mypyc.ir.func_ir import format_func
from mypyc.test.testutil import (
ICODE_GEN_BUILTINS, use_custom_builtins, MypycDataSuite, build_ir_for_single_file,
assert_test_output, remove_comment_lines, replace_native_int
)
from mypyc.options import CompilerOptions
files = [
'irbuild-basic.test',
'irbuild-lists.test',
'irbuild-dict.test',
'irbuild-statements.test',
'irbuild-nested.test',
'irbuild-classes.test',
'irbuild-optional.test',
'irbuild-tuple.test',
'irbuild-any.test',
'irbuild-generics.test',
'irbuild-try.test',
'irbuild-set.test',
'irbuild-str.test',
'irbuild-strip-asserts.test',
'irbuild-int.test',
]
class TestGenOps(MypycDataSuite):
files = files
base_path = test_temp_dir
optional_out = True
def run_case(self, testcase: DataDrivenTestCase) -> None:
# Kind of hacky. Not sure if we need more structure here.
options = CompilerOptions(strip_asserts='StripAssert' in testcase.name)
"""Perform a runtime checking transformation test case."""
with use_custom_builtins(os.path.join(self.data_prefix, ICODE_GEN_BUILTINS), testcase):
expected_output = remove_comment_lines(testcase.output)
expected_output = replace_native_int(expected_output)
try:
ir = build_ir_for_single_file(testcase.input, options)
except CompileError as e:
actual = e.messages
else:
actual = []
for fn in ir:
if (fn.name == TOP_LEVEL_NAME
and not testcase.name.endswith('_toplevel')):
continue
actual.extend(format_func(fn))
assert_test_output(testcase, actual, 'Invalid source code output',
expected_output)
| StarcoderdataPython |
161782 | from . import db
from werkzeug.security import generate_password_hash,check_password_hash
from flask_login import UserMixin
from . import login_manager
from sqlalchemy.sql import func
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(UserMixin,db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer,primary_key = True)
username = db.Column(db.String(255),index = True)
email = db.Column(db.String(255),unique = True,index = True)
# role_id = db.Column(db.Integer,db.ForeignKey('roles.id'))
bio = db.Column(db.String(255))
profile_pic_path = db.Column(db.String())
password_secure = db.Column(db.String(255))
blogs = db.relationship('Blog')
@property
def password(self):
raise AttributeError('You cannnot read the password attribute')
@password.setter
def password(self, password):
self.password_secure = generate_password_hash(password)
def verify_password(self,password):
return check_password_hash(self.password_secure,password)
def __repr__(self):
return f'User {self.username}'
#class Role(db.Model):
# __tablename__ = 'roles'
#
# id = db.Column(db.Integer,primary_key = True)
# name = db.Column(db.String(255))
# users = db.relationship('User',backref = 'role',lazy="dynamic")
#
#
#
# def __repr__(self):
# return f'User {self.username}'
class Blog(db.Model):
__tablename__ = 'blogs'
id = db.Column(db.Integer,primary_key = True)
topic = db.Column(db.String(200))
data = db.Column(db.String())
date = db.Column(db.DateTime(timezone = True), default = func.now(), index = True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
comments = db.relationship('Comment')
class Comment(db.Model):
__tablename__ = 'comments'
id = db.Column(db.Integer,primary_key = True)
comment = db.Column(db.String(500))
date = db.Column(db.DateTime(timezone = True), default = func.now(), index = True)
blog_id = db.Column(db.Integer, db.ForeignKey('blogs.id'))
| StarcoderdataPython |
3290923 | from telas.atualizar import Atualizar
from tkinter import Tk
from tkinter import PhotoImage
from telas.design import Design
from telas.bug import Bug
from telas.splash import Splash
from time import sleep
import util.funcoes as funcoes
import tkinter.font as tkFont
from tkinter import Button
def atualizar():
master = Tk()
design = Design()
design.update_design_dic()
# Configurações da IDE
arquivo_configuracoes = funcoes.carregar_json("configuracoes/configuracoes.json")
# Idioma que a safira está configurada
idioma = arquivo_configuracoes['idioma']
interface_idioma = funcoes.carregar_json("configuracoes/interface.json")
icon = PhotoImage(file='imagens/icone.png')
atualizar = Atualizar(master, design, idioma, interface_idioma, icon)
# Quando a safira é iniciado
# Verificar a primeira vez
# Primeira vez não mostra mensagem de erro
# e nem mensagem se estiver atualizado
#atualizar.verificar_versao(primeira_vez=True)
# Quando o usuário tenta buscar atualizações de
atualizar.verificar_versao()
#atualizar.aviso_aguarde_instalando('0.25')
master.mainloop()
def bug():
master = Tk()
design = Design()
design.update_design_dic()
# Configurações da IDE
arquivo_configuracoes = funcoes.carregar_json("configuracoes/configuracoes.json")
# Idioma que a safira está configurada
idioma = arquivo_configuracoes['idioma']
interface_idioma = funcoes.carregar_json("configuracoes/interface.json")
icon = PhotoImage(file='imagens/icone.png')
bug = Bug(master, design, idioma, interface_idioma, icon)
bug.interface()
master.mainloop()
def splash():
master = Tk()
# Obter o Design de interfaces
design = Design()
design.update_design_dic()
sp = Splash(design)
sleep(5)
sp.splash_fim()
master.mainloop()
root = Tk()
fonts=list(tkFont.families())
for fonte in sorted(fonts):
if 'mono' in fonte.lower():
print(fonte)
| StarcoderdataPython |
3351864 | """
wav_prints.py
~~~~~~~~~~
A common collection of print statements for
various file types
"""
import os
import datetime
import getpass
import re
#from reportlab.lib.enums import TA_JUSTIFY
#from reportlab.lib.pagesizes import letter
#from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Image, Table, TableStyle, PageBreak, Flowable, PageTemplate, Frame
#from reportlab.platypus.tableofcontents import TableOfContents
#from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
#from reportlab.lib.units import inch, mm, cm
#from reportlab.lib import colors
#from reportlab.pdfgen import canvas
#from reportlab.lib.utils import ImageReader
#
#class WavPrint():
#
# ################################################
# def __init__(self):
# pass
#
################################################
def created_line(start=''):
"""Gets the username and date for creation info. 'start' is used to
add any sort of comment string to the beginning of each line"""
yr = datetime.date.today().strftime("%Y")
day = datetime.date.today().strftime("%d")
mon = datetime.date.today().strftime("%B")
t = datetime.datetime.now().strftime("%H:%M:%S")
user = getpass.getuser()
cstring = start+'\n'
#cstring = cstring+start+' Copyright (C) Wavious {0} - All Rights Reserved'.format(yr)+'\n'+start+'\n'
#cstring = cstring+start+' Unauthorized copying of this file, via any medium is strictly prohibited\n'+start+'\n'
cstring = cstring+start+' Created by {0}'.format(str(user))+' on {0}/{1}/{2} at {3}'.format(mon,day,yr,t)+'\n'
cstring = cstring+start+'\n'
return cstring
################################################
def print_verilog_c_script_header(extra=None):
"""Returns a verilog/C formatted 'header' for top of file descriptions
User can pass 'extra' string if there is something they want to include
in the header"""
hstring = ''
com = '//'
dottedline = com+'===================================================================\n'
hstring = dottedline
create = created_line(start=com)
hstring = hstring+create
if extra:
hstring = hstring+com+' '+extra+'\n'+com+'\n'
hstring = hstring+dottedline
hstring = hstring+'\n\n\n'
return hstring
#################################################
## ___ ___ ___
## | _ \ | \ | __|
## | _/ | |) | | _|
## |_| |___/ |_|
##
#################################################
#class WavCanvas(canvas.Canvas):
# """
# Adapted from http://code.activestate.com/recipes/576832/
# """
# def __init__(self, *args, **kwargs):
# canvas.Canvas.__init__(self, *args, **kwargs)
# self._saved_page_states = []
#
# def showPage(self):
# self._saved_page_states.append(dict(self.__dict__))
# self._startPage()
#
# def drawPageNumber(self, page_count):
# self.setFont('Helvetica', 8)
# self.drawRightString(21 * cm, 1 * cm,
# 'Page %s / %s' % (self._pageNumber, page_count))
# def save(self):
# num_pages = len(self._saved_page_states)
# for state in self._saved_page_states:
# self.__dict__.update(state)
# self.drawPageNumber(num_pages)
# canvas.Canvas.showPage(self)
# canvas.Canvas.save(self)
#
#################################################
#class WavPDFLine(Flowable):
# """Flowable class for drawing lines onto a PDF"""
# def __init__(self, width=500, height=0):
# Flowable.__init__(self)
# self.width = width
# self.height = height
#
# def __repr__(self):
# return "Line(w=%s)" % self.width
#
# def draw(self):
# self.canv.line(0, self.height, self.width, self.height)
#
#
#################################################
#class WavDocTemplate(SimpleDocTemplate):
# """Doc template we extend from to performa any special tasks, such as TOC"""
#
# def __init__(self, filename, **kw):
# self.allowSplitting = 0
# SimpleDocTemplate.__init__(self, filename, **kw)
#
# def afterFlowable(self, flowable):
# "Registers TOC entries."
# if flowable.__class__.__name__ == 'Paragraph':
# text = flowable.getPlainText()
# style = flowable.style.name
# if style == 'Heading1':
# key = 'h1-%s' % self.seq.nextf('heading1')
# self.canv.bookmarkPage(key)
# text = re.sub('System:\s*', '', text)
# self.notify('TOCEntry', (0, text, self.page, key))
#
# if style == 'Heading2':
# key = 'h2-%s' % self.seq.nextf('heading2')
# self.canv.bookmarkPage(key)
# text = re.sub('Register Block:\s*', '', text)
# self.notify('TOCEntry', (1, text, self.page, key))
#
# if style == 'Heading3':
# key = 'h3-%s' % self.seq.nextf('heading3')
# self.canv.bookmarkPage(key)
# self.notify('TOCEntry', (2, text, self.page, key))
#
#################################################
#class WavPDF:
# """Class for PDF generation. Mainly used so we can do footers and headers a little bit
# easier than ReportLab normally allows"""
#
# ##############################
# def __init__(self, filename, *args, **kwargs):
# self.filename = filename
# self.doc = WavDocTemplate(self.filename, pagesize=letter,
# rightMargin=30,leftMargin=30,
# topMargin=30,bottomMargin=30)
#
# styles=getSampleStyleSheet()
#
#
# if 'footer' in kwargs:
# self.footer = kwargs['footer']
# else:
# yr = datetime.date.today().strftime("%Y")
# self.footer = ' Copyright (C) Wavious {0} - All Rights Reserved'.format(yr)
#
# if 'header' in kwargs:
# self.header = kwargs['header']
# else:
# self.header = ''
#
# if 'title' in kwargs:
# self.title = kwargs['title']
# else:
# self.title = None
#
#
# # Coordinates for headers/footer
# self.center_x = (self.doc.width + self.doc.leftMargin + self.doc.rightMargin) / 2
# self.top_y = self.doc.height + self.doc.topMargin
# self.bottom_y = self.doc.bottomMargin - 2
#
# # Headings for TableOfContents
# self.toc = TableOfContents()
# self.head1 = ParagraphStyle(name='Heading1', fontSize=14, leading=16)
# self.head2 = ParagraphStyle(name='Heading2', fontSize=12, leading=14)
# self.head3 = ParagraphStyle(name='Heading3', fontSize=12, leading=12)
# #self.toc.levelStyles= [self.head1, self.head2, self.head3]
# self.toc.levelStyles= [
# ParagraphStyle(fontSize=14, name='TOCHeading1', leftIndent=10, firstLineIndent=-20, spaceBefore=0, leading=14),
# ParagraphStyle(fontSize=12, name='TOCHeading2', leftIndent=20, firstLineIndent=-20, spaceBefore=0, leading=12),
# ParagraphStyle(fontSize=6, name='TOCHeading3', leftIndent=40, firstLineIndent=-20, spaceBefore=0, leading=2),
# ]
#
# self.Story = []
#
# if self.title:
# self.Story.append(Spacer(1, 150))
# self.Story.append(Paragraph("<font size=14><b><i>{0}</i></b></font>".format(self.title), styles["Normal"]))
# self.Story.append(PageBreak())
#
# self.Story.append(self.toc)
# self.Story.append(PageBreak())
#
# ##############################
# # First Page Headers/Footers
# ##############################
# def onMyFirstPage(self, canvas, doc):
# canvas.saveState()
# canvas.setFont('Helvetica', 8)
# canvas.drawString(5*mm, self.bottom_y, "Confidential")
#
# if self.footer is not None:
# canvas.drawCentredString(self.center_x, self.bottom_y, self.footer)
#
# #logo = ImageReader("/home/sbridges/wavious_logo.png")
# #canvas.drawImage(logo, 3, self.top_y - 8, width=20*mm, height=10*mm, mask='auto')
# canvas.restoreState()
#
# ##############################
# # Remainder Page Headers/Footers
# ##############################
# def onMyLaterPages(self, canvas, doc):
# canvas.saveState()
# canvas.setFont('Helvetica', 8)
# canvas.drawString(5*mm, self.bottom_y, "Confidential")
#
# if self.footer is not None:
# canvas.drawCentredString(self.center_x, self.bottom_y, self.footer)
# if self.header is not None:
# w, h = (doc.width, doc.height)
# canvas.drawCentredString(self.center_x,self.top_y, self.header)
#
# #logo = ImageReader("/home/sbridges/wavious_logo.png")
# #canvas.drawImage(logo, 3, self.top_y - 8, width=20*mm, height=10*mm, mask='auto')
# canvas.restoreState()
#
# ##############################
# def who_made_me(self):
# styles=getSampleStyleSheet()
# user = getpass.getuser()
# yr = datetime.date.today().strftime("%Y")
# day = datetime.date.today().strftime("%d")
# mon = datetime.date.today().strftime("%B")
# t = datetime.datetime.now().strftime("%H:%M:%S")
# when = '{0}/{1}/{2} at {3}'.format(mon,day,yr,t)
#
# s = "<font size=14>Filename: {0}\nGenerated By: {1}\nGenerated On: {2}</font>".format(self.filename, user, when)
#
# self.Story.append(PageBreak())
# self.Story.append(Spacer(1, 28))
# self.Story.append(Paragraph("<font size=14>Filename: <b>{0}</b></font>".format(self.filename), styles["Normal"]))
# self.Story.append(Paragraph("<font size=14>Generated By: <b>{0}</b></font>".format(user), styles["Normal"]))
# self.Story.append(Paragraph("<font size=14>Generated On: <b>{0}</b></font>".format(when), styles["Normal"]))
#
#
# ##############################
# def gen_pdf(self):
# self.who_made_me()
# #self.doc.multiBuild(self.Story, canvasmaker=WavCanvas, onFirstPage=self.onMyFirstPage, onLaterPages=self.onMyLaterPages)
# self.doc.multiBuild(self.Story, onFirstPage=self.onMyFirstPage, onLaterPages=self.onMyLaterPages)
#
#
#
#################################################
#def wrap_font(s, size):
# return "<font size={0}>{1}</font>".format(size, s)
#
#################################################
#def wrap_bold(s):
# return "<b>{0}</b>".format(s)
| StarcoderdataPython |
1666194 | # Import default libraries
import pandas as pd
import numpy as np
import os
import json
import logging
import argparse
# Import custom libraries
from modules.feature_extraction import *
from modules.feature_preprocessing import *
from modules.pipelines import *
# Set debugging level (default DEBUG)
logging.basicConfig(level=logging.INFO)
# Initialize configuration dictionary
config = {
# Default configuration file
'config_file': './config.json'
}
# Load configuration settings from configuration file
with open(config.get('config_file')) as config_file:
config = json.load(config_file)
# Parse arguments
parser = argparse.ArgumentParser(description='Train a given model to predict LIP flag over aminoacidic sequences', prog='LIP learner')
# Define protein PDB ID
parser.add_argument('lip_file', help='FIle which contains LIP/non-LIP tagged aminoacidic sequence', type=str, action='store')
# Define random seed
# parser.add_argument('-rs', --'random_seed', help='Random seed used in training', type=int, action='store')
# Define PDB ids to be excluded by computations
parser.add_argument('-e', '--exclude', help='PDB to be excluded from training', action='append')
# Define window size
parser.add_argument('-ws', '--window_size', help='Size of the window used to average residues features', type=int, action='store')
# Define a force command do overwrite ring files
parser.add_argument('-rf', '--ring_force', help='Forces the program to download RING file relative to given PDB instance again', type=bool, action='store')
# Define ring files download directory
parser.add_argument('-rd', '--ring_dir', help='Folder where RING files will be downloaded', type=str, action='store')
# Define pdb files download directory
parser.add_argument('-pd', '--pdb_dir', help='Folder where PDB files will be downloaded', type=str, action='store')
# Define configuration file to overwrite momentaniously overwrite the default one
parser.add_argument('-cf', '--config_file', help='Define a custom configuration file, overwrites original parameters', type=str, action='store')
# Parse arguments
args = vars(parser.parse_args())
# Delete arguments which are None
args = {k: v for k, v in args.items() if v is not None}
# Check if there is a configuration file specified
new_config_file = args.get('config_file')
# Import config file
if new_config_file:
# Open new configuration file
with open(new_config_file) as new_config_file:
# Get new configuration file content
new_config = json.load(new_config_file)
# Merge content into the default configuration file
config = {**config, **new_config}
# Merge command line arguments into the config dictionary (they have highest priority)
config = {**config, **args}
# Debug
logging.debug(config)
# Train the model
train_pipeline(config)
| StarcoderdataPython |
4804398 | # Copyright 2020 MERA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing
# permissions and limitations under the License.
import collections
from deep_pavlov_wrapper import BertModel
from flask import Flask, request, jsonify
from flask_cors import CORS
from flask_json_schema import JsonSchema, JsonValidationError
app = Flask(__name__)
schema = JsonSchema(app)
CORS(app)
embedding_schema = {
'type': 'object',
'properties': {
'text': {'type': 'string'}
},
'required': ['text'],
}
_model = None
def get_model():
"""Lazy loading of the model."""
global _model
if _model is None:
_model = BertModel()
return _model
@app.errorhandler(JsonValidationError)
def validation_error(e):
return jsonify({
'error': e.message,
'errors': [validation_error.message for validation_error in e.errors]
}), 400
@app.route("/embedding_generator", methods=["POST"])
@schema.validate(embedding_schema)
def embedding_generator():
"""Trainsform text to embedding."""
request_data = request.get_json()
text = request_data['text']
emb = get_model().get_embeddings([text])
for k, v in emb.items():
emb[k] = v[0].tolist()
return jsonify(emb), 200
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
| StarcoderdataPython |
70533 | <gh_stars>10-100
#!/usr/bin/env python2.7
# Copyright 2016 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script to check C and C++ file header guards.
This script accepts a list of file or directory arguments. If a given
path is a file, it runs the checker on it. If the path is a directory,
it runs the checker on all files in that directory.
In addition, this script checks for potential header guard
collisions. This is useful since we munge / to _, and so
lib/abc/xyz/xyz.h
and
lib/abc_xyz/xyz.h
both want to use LIB_ABC_XYZ_XYZ_H_ as a header guard.
"""
import argparse
import collections
import fileinput
import os.path
import re
import string
import sys
FUCHSIA_ROOT = os.path.dirname( # $root
os.path.dirname( # scripts
os.path.dirname( # style
os.path.realpath(
os.path.abspath(__file__)))))
SYSROOT_PREFIXES = [
'ZIRCON_SYSTEM_PUBLIC',
'ZIRCON_THIRD_PARTY_ULIB_MUSL_INCLUDE',
]
sysroot_prefix = re.compile('^(' + string.join(SYSROOT_PREFIXES, '|') + ')_')
PUBLIC_PREFIXES = [
'ZIRCON_SYSTEM_ULIB_.*_INCLUDE',
'GARNET_PUBLIC',
'PERIDOT_PUBLIC',
'TOPAZ_PUBLIC',
'SDK'
]
public_prefix = re.compile('^(' + string.join(PUBLIC_PREFIXES, '|') + ')_')
all_header_guards = collections.defaultdict(list)
pragma_once = re.compile('^#pragma once$')
disallowed_header_characters = re.compile('[^a-zA-Z0-9_]')
def adjust_for_location(header_guard):
"""Remove internal location prefix from public headers if applicable."""
# Remove public prefixes
header_guard = public_prefix.sub('', header_guard, 1)
# Replace sysroot prefixes
header_guard = sysroot_prefix.sub('SYSROOT_', header_guard, 1)
return header_guard
def header_guard_from_path(path):
"""Compute the header guard from the path"""
assert(path.startswith(FUCHSIA_ROOT))
relative_path = path[len(FUCHSIA_ROOT):].strip('/')
upper_path = relative_path.upper()
header_guard = re.sub(disallowed_header_characters, '_', upper_path) + '_'
header_guard = adjust_for_location(header_guard)
return header_guard
def check_file(path, fix_guards=False):
"""Check whether the file has a correct header guard.
A header guard can either be a #pragma once, or else a matching set of
#ifndef PATH_TO_FILE_
#define PATH_TO_FILE_
...
#endif // PATH_TO_FILE_
preprocessor directives, where both '.' and '/' in the path are
mapped to '_', and a trailing '_' is appended.
In either the #pragma once case or the header guard case, it is
assumed that there is no trailing or leading whitespace.
"""
# Only check .h files
if path[-2:] != '.h':
return True
header_guard = header_guard_from_path(path)
all_header_guards[header_guard].append(path)
ifndef = re.compile('^#ifndef %s$' % header_guard)
define = re.compile('^#define %s$' % header_guard)
endif = re.compile('^#endif +// *%s$' % header_guard)
found_pragma_once = False
found_ifndef = False
found_define = False
found_endif = False
with open(path, 'r') as f:
for line in f.readlines():
match = pragma_once.match(line)
if match:
if found_pragma_once:
print('%s contains multiple #pragma once' % path)
return False
found_pragma_once = True
match = ifndef.match(line)
if match:
if found_ifndef:
print('%s contains multiple ifndef header guards' % path)
return False
found_ifndef = True
match = define.match(line)
if match:
if found_define:
print('%s contains multiple define header guards' % path)
return False
found_define = True
match = endif.match(line)
if match:
if found_endif:
print('%s contains multiple endif header guards' % path)
return False
found_endif = True
if found_pragma_once:
if found_ifndef or found_define or found_endif:
print('%s contains both #pragma once and header guards' % path)
return False
if not fix_guards:
return True
if found_ifndef and found_define and found_endif:
return True
if not found_ifndef:
print('%s did not contain ifndef part of its header guard' % path)
elif not found_define:
print('%s did not contain define part of its header guard' % path)
elif not found_endif:
print('%s did not contain endif part of its header guard' % path)
elif fix_guards:
if found_pragma_once:
print('%s contained #pragma once instead of a header guard' % path)
else:
print('%s did not contain a header guard or the header guard did '
'not match the file path' % path)
else:
print('%s contained neither a proper header guard nor #pragma once' %
path)
header_guards_fixed = False
if fix_guards:
header_guards_fixed = fix_header_guard(path, header_guard)
if not header_guards_fixed:
print('Allowable header guard values are %s' % all_header_guards.keys());
return False
def fix_header_guard(path, header_guard):
"""Attempt to fix the header guard in the given file."""
ifndef = re.compile('^#ifndef [^\s]+_H_$')
define = re.compile('^#define [^\s]+_H_$')
endif = re.compile('^#endif +// *[^\s]+_H_$')
fixed_ifndef = False
fixed_define = False
fixed_endif = False
fixed_pragma_once = False
for line in fileinput.input(path, inplace=1):
(new_line, changes) = re.subn(ifndef,
'#ifndef %s' % header_guard,
line)
if changes:
fixed_ifndef = True
sys.stdout.write(new_line)
continue
(new_line, changes) = re.subn(define,
'#define %s' % header_guard,
line)
if changes:
fixed_define = True
sys.stdout.write(new_line)
continue
(new_line, changes) = re.subn(endif,
'#endif // %s' % header_guard,
line)
if changes:
fixed_endif = True
sys.stdout.write(new_line)
continue
if pragma_once.match(line):
fixed_pragma_once = True
sys.stdout.write('#ifndef %s\n' % header_guard)
sys.stdout.write('#define %s\n' % header_guard)
continue
sys.stdout.write(line)
if fixed_pragma_once:
with open(path, 'a') as file:
file.write('\n')
file.write('#endif // %s\n' % header_guard)
if (fixed_ifndef and fixed_define and fixed_endif) or fixed_pragma_once:
print('Fixed!')
return True
print('Not fixed...')
return False
def check_dir(p, fix_guards=False):
"""Walk recursively over a directory checking .h files"""
def prune(d):
if d[0] == '.' or d == 'third_party':
return True
return False
for root, dirs, paths in os.walk(p):
# Prune dot directories like .git
[dirs.remove(d) for d in list(dirs) if prune(d)]
for path in paths:
check_file(os.path.join(root, path), fix_guards=fix_guards)
def check_collisions():
for header_guard, paths in all_header_guards.iteritems():
if len(paths) == 1:
continue
print('Multiple files could use %s as a header guard:' % header_guard)
for path in paths:
print(' %s' % path)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--fix',
help='Correct wrong header guards',
action='store_true')
(arg_results, other_args) = parser.parse_known_args()
fix_guards = arg_results.fix
for p in other_args:
p = os.path.realpath(os.path.abspath(p))
if os.path.isdir(p):
check_dir(p, fix_guards=fix_guards)
else:
check_file(p, fix_guards=fix_guards)
check_collisions()
if __name__ == "__main__":
sys.exit(main())
| StarcoderdataPython |
51677 | <reponame>canyon289/Theano-PyMC
import numpy as np
import theano
import theano.tensor as tt
import theano.typed_list
from tests.tensor.utils import rand_ranged
from theano import In
from theano.typed_list.basic import Append, Extend, Insert, Remove, Reverse
from theano.typed_list.type import TypedListType
class TestInplace:
def test_reverse_inplace(self):
mySymbolicMatricesList = TypedListType(
tt.TensorType(theano.config.floatX, (False, False))
)()
z = Reverse()(mySymbolicMatricesList)
m = theano.compile.mode.get_default_mode().including("typed_list_inplace_opt")
f = theano.function(
[In(mySymbolicMatricesList, borrow=True, mutable=True)],
z,
accept_inplace=True,
mode=m,
)
assert f.maker.fgraph.toposort()[0].op.inplace
x = rand_ranged(-1000, 1000, [100, 101])
y = rand_ranged(-1000, 1000, [100, 101])
assert np.array_equal(f([x, y]), [y, x])
def test_append_inplace(self):
mySymbolicMatricesList = TypedListType(
tt.TensorType(theano.config.floatX, (False, False))
)()
mySymbolicMatrix = tt.matrix()
z = Append()(mySymbolicMatricesList, mySymbolicMatrix)
m = theano.compile.mode.get_default_mode().including("typed_list_inplace_opt")
f = theano.function(
[
In(mySymbolicMatricesList, borrow=True, mutable=True),
In(mySymbolicMatrix, borrow=True, mutable=True),
],
z,
accept_inplace=True,
mode=m,
)
assert f.maker.fgraph.toposort()[0].op.inplace
x = rand_ranged(-1000, 1000, [100, 101])
y = rand_ranged(-1000, 1000, [100, 101])
assert np.array_equal(f([x], y), [x, y])
def test_extend_inplace(self):
mySymbolicMatricesList1 = TypedListType(
tt.TensorType(theano.config.floatX, (False, False))
)()
mySymbolicMatricesList2 = TypedListType(
tt.TensorType(theano.config.floatX, (False, False))
)()
z = Extend()(mySymbolicMatricesList1, mySymbolicMatricesList2)
m = theano.compile.mode.get_default_mode().including("typed_list_inplace_opt")
f = theano.function(
[
In(mySymbolicMatricesList1, borrow=True, mutable=True),
mySymbolicMatricesList2,
],
z,
mode=m,
)
assert f.maker.fgraph.toposort()[0].op.inplace
x = rand_ranged(-1000, 1000, [100, 101])
y = rand_ranged(-1000, 1000, [100, 101])
assert np.array_equal(f([x], [y]), [x, y])
def test_insert_inplace(self):
mySymbolicMatricesList = TypedListType(
tt.TensorType(theano.config.floatX, (False, False))
)()
mySymbolicIndex = tt.scalar(dtype="int64")
mySymbolicMatrix = tt.matrix()
z = Insert()(mySymbolicMatricesList, mySymbolicIndex, mySymbolicMatrix)
m = theano.compile.mode.get_default_mode().including("typed_list_inplace_opt")
f = theano.function(
[
In(mySymbolicMatricesList, borrow=True, mutable=True),
mySymbolicIndex,
mySymbolicMatrix,
],
z,
accept_inplace=True,
mode=m,
)
assert f.maker.fgraph.toposort()[0].op.inplace
x = rand_ranged(-1000, 1000, [100, 101])
y = rand_ranged(-1000, 1000, [100, 101])
assert np.array_equal(f([x], np.asarray(1, dtype="int64"), y), [x, y])
def test_remove_inplace(self):
mySymbolicMatricesList = TypedListType(
tt.TensorType(theano.config.floatX, (False, False))
)()
mySymbolicMatrix = tt.matrix()
z = Remove()(mySymbolicMatricesList, mySymbolicMatrix)
m = theano.compile.mode.get_default_mode().including("typed_list_inplace_opt")
f = theano.function(
[
In(mySymbolicMatricesList, borrow=True, mutable=True),
In(mySymbolicMatrix, borrow=True, mutable=True),
],
z,
accept_inplace=True,
mode=m,
)
assert f.maker.fgraph.toposort()[0].op.inplace
x = rand_ranged(-1000, 1000, [100, 101])
y = rand_ranged(-1000, 1000, [100, 101])
assert np.array_equal(f([x, y], y), [x])
def test_constant_folding():
m = tt.ones((1,), dtype="int8")
l = theano.typed_list.make_list([m, m])
f = theano.function([], l)
topo = f.maker.fgraph.toposort()
assert len(topo)
assert isinstance(topo[0].op, theano.compile.ops.DeepCopyOp)
assert f() == [1, 1]
| StarcoderdataPython |
48913 | <filename>pipy/tests/test_utils.py
import pandas as pd
from pipy.pipeline.utils import combine_series
def test_combine_series():
s1 = pd.Series(dict(zip("AB", (1, 2))))
s2 = pd.Series(dict(zip("BC", (20, 30))))
s3 = combine_series(s1, s2)
pd.testing.assert_series_equal(s3, pd.Series({"A": 1, "B": 20, "C": 30}))
| StarcoderdataPython |
Subsets and Splits