content
stringlengths 7
1.05M
| fixed_cases
stringlengths 1
1.28M
|
---|---|
def main(x):
max_test = 2000
is_negative = False
if (x < 0):
is_negative = True
x = abs(x)
x = round(x, 16)
test = int(x - 1)
for i in range (test, max_test):
for j in range (1, max_test):
if (x == i/j):
if is_negative:
print('-', end='')
print(str(i)+'/' + str(j))
return
print("no solution found with max_test = " + str(max_test))
|
def main(x):
max_test = 2000
is_negative = False
if x < 0:
is_negative = True
x = abs(x)
x = round(x, 16)
test = int(x - 1)
for i in range(test, max_test):
for j in range(1, max_test):
if x == i / j:
if is_negative:
print('-', end='')
print(str(i) + '/' + str(j))
return
print('no solution found with max_test = ' + str(max_test))
|
#!/home/jepoy/anaconda3/bin/python
def main():
f = open('lines.txt', 'r') # 'w' write - rewrites over the file # a append add to the end of the file
for line in f:
print(line.rstrip())
f.close()
if __name__ == '__main__':
main()
|
def main():
f = open('lines.txt', 'r')
for line in f:
print(line.rstrip())
f.close()
if __name__ == '__main__':
main()
|
class TrackingFieldsMixin:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._old_fields = {}
self._set_old_fields()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
result = super().save(force_insert, force_update, using, update_fields)
self._set_old_fields()
return result
def _set_old_fields(self):
for field in self._meta.fields:
attname, column = field.get_attname_column()
self._old_fields[attname] = getattr(self, attname)
def get_old_fields(self):
return self._old_fields
# Returns the fields name that have been modified since they are loaded or saved most recently.
def get_dirty_fields(self):
dirty_fields = []
for field in self._old_fields:
if self._old_fields[field] != getattr(self, field):
dirty_fields.append(field)
return dirty_fields
def get_old_field(self, field, default=None):
if field in self._old_fields:
return self._old_fields[field]
return default
def set_old_field(self, field, value):
self._old_fields[field] = value
|
class Trackingfieldsmixin:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._old_fields = {}
self._set_old_fields()
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
result = super().save(force_insert, force_update, using, update_fields)
self._set_old_fields()
return result
def _set_old_fields(self):
for field in self._meta.fields:
(attname, column) = field.get_attname_column()
self._old_fields[attname] = getattr(self, attname)
def get_old_fields(self):
return self._old_fields
def get_dirty_fields(self):
dirty_fields = []
for field in self._old_fields:
if self._old_fields[field] != getattr(self, field):
dirty_fields.append(field)
return dirty_fields
def get_old_field(self, field, default=None):
if field in self._old_fields:
return self._old_fields[field]
return default
def set_old_field(self, field, value):
self._old_fields[field] = value
|
_PAD = "_PAD"
_GO = "_GO"
_EOS = "_EOS"
_UNK = "_UNK"
_START_VOCAB = [_PAD, _GO, _EOS, _UNK]
PAD_ID = 0
GO_ID = 1
EOS_ID = 2
UNK_ID = 3
OP_DICT_IDS = [PAD_ID, GO_ID, EOS_ID, UNK_ID]
|
_pad = '_PAD'
_go = '_GO'
_eos = '_EOS'
_unk = '_UNK'
_start_vocab = [_PAD, _GO, _EOS, _UNK]
pad_id = 0
go_id = 1
eos_id = 2
unk_id = 3
op_dict_ids = [PAD_ID, GO_ID, EOS_ID, UNK_ID]
|
fp = open('greetings.txt','w')
fp.write("Hello, World!\n")
fp.close()
|
fp = open('greetings.txt', 'w')
fp.write('Hello, World!\n')
fp.close()
|
def str_without_separators(sentence):
#separators = ",.?;: "
#str1 = "".join(char if char not in separators else "" for char in sentence)
str1 = "".join(char if char.isalnum() else "" for char in sentence)
return str1
def is_palindrome(sentence):
str1 = str_without_separators(sentence)
return str1[::-1].casefold() == str1.casefold()
print(is_palindrome("Was it a car, or a cat, I saw?"))
|
def str_without_separators(sentence):
str1 = ''.join((char if char.isalnum() else '' for char in sentence))
return str1
def is_palindrome(sentence):
str1 = str_without_separators(sentence)
return str1[::-1].casefold() == str1.casefold()
print(is_palindrome('Was it a car, or a cat, I saw?'))
|
c = int(input('\nHow many rows do you want? '))
print()
a = [[1]]
for i in range(c):
b = [1]
for j in range(len(a[-1]) - 1):
b.append(a[-1][j] + a[-1][j + 1])
b.append(1)
a.append(b)
for i in range(len(a)):
for j in range(len(a[i])):
a[i][j] = str(a[i][j])
d = ' '.join(a[i])
for i in range(len(a)):
f = ' '.join(a[i])
e = (len(d) - len(f)) // 2
print(e * ' ' + f + e * ' ')
print()
|
c = int(input('\nHow many rows do you want? '))
print()
a = [[1]]
for i in range(c):
b = [1]
for j in range(len(a[-1]) - 1):
b.append(a[-1][j] + a[-1][j + 1])
b.append(1)
a.append(b)
for i in range(len(a)):
for j in range(len(a[i])):
a[i][j] = str(a[i][j])
d = ' '.join(a[i])
for i in range(len(a)):
f = ' '.join(a[i])
e = (len(d) - len(f)) // 2
print(e * ' ' + f + e * ' ')
print()
|
name = "pymum"
version = "3"
requires = ["pydad-3"]
|
name = 'pymum'
version = '3'
requires = ['pydad-3']
|
def proportion(a,b,c):
try:
a = int(a)
b = int(b)
c = int(c)
ratio = a/b
propor = c/ratio
return propor
except ZeroDivisionError:
print("Error: Dividing by Zero is not valid!!")
except ValueError:
print ("Error: Only Numeric Values are valid!!")
|
def proportion(a, b, c):
try:
a = int(a)
b = int(b)
c = int(c)
ratio = a / b
propor = c / ratio
return propor
except ZeroDivisionError:
print('Error: Dividing by Zero is not valid!!')
except ValueError:
print('Error: Only Numeric Values are valid!!')
|
datasets={'U1001': {'135058': 1,'135038': 3,'135032': 3,'135084': 2,'135076':2},
'U1002': {'135058': 2,'135038': 2,'135032': 1,'135084': 1,'135076':3},
'U1003': {'135058': 2,'135038': 1,'135032': 2,'135084': 3,'135076':3},
'U1004': {'135058': 1,'135038': 3,'135032': 3,'135084': 3,'135076':3},
'U1005': {'135058': 1,'135038': 2,'135032': 2,'135084': 2,'135076':3},
'U1006': {'135058': 1,'135038': 3,'135032': 3,'135084': 2,'135076':1},
'U1007': {'135058': 1,'135038': 1,'135032': 3,'135084': 2,'135076':2},
'U1008': {'135058': 3,'135038': 3,'135032': 3,'135084': 2,'135076':3},
'U1009': {'135058': 1,'135038': 3,'135032': 3,'135084': 1,'135076':1},
'U1010': {'135058': 3,'135038': 3,'135032': 3,'135084': 3,'135076':3},
'U1011': {'135015': 2,'135018': 2,'135060': 3},
'U1012': {'135015': 3,'135018': 1,'135060': 2},
'U1013': {'135015': 2,'135018': 2,'135060': 2},
'U1014': {'135015': 2,'135018': 3,'135060': 3},
'U1015': {'135015': 2,'135018': 2,'135060': 1},
'U1016': {'135062': 2},
'U1017': {'134983': 0},
'U1018': {'135108': 1},
'U1019': {'135086': 0},
'U1020': {'135109': 2},
'U1021': {'132715': 2},
'U1022': {'135063': 2},
'U1023': {'132733': 2},
'U1024': {'135058': 2,'135038': 3,'135032': 2,'135084': 1,'135030':3},
'U1025': {'132830': 2},
'U1026': {'132665': 2},
'U1027': {'135062': 1},
'U1028': {'132613': 2},
'U1029': {'132922': 1},
'U1030': {'135058': 1,'135038': 2,'135032': 3,'132668': 3,'134996':3},
'U1031': {'132668': 0},
'U1032': {'135058': 3,'135038': 2,'135032': 2,'135084': 3,'132668':2},
'U1033': {'135030': 2},
'U1034': {'135035': 1},
'U1035': {'134986': 2},
'U1036': {'135045': 2},
'U1037': {'132854': 1},
'U1038': {'132862': 2},
'U1039': {'132665': 1},
'U1040': {'135019': 1},
'U1041': {'135109': 1},
'U1042': {'134992': 1},
'U1043': {'132630': 1},
'U1044': {'132768': 2},
'U1045': {'135041': 1},
'U1046': {'132861': 1},
'U1047': {'132884': 0},
'U1048': {'132723': 2},
'U1049': {'135052': 0},
'U1050': {'132584': 0},
'U1051': {'134996': 1},
'U1052': {'132870': 1},
'U1053': {'135047': 2},
'U1054': {'135045': 2},
'U1055': {'132825': 2},
'U1056': {'135051': 2},
'U1057': {'132834': 1},
'U1058': {'135055': 2},
'U1059': {'132754': 2},
'U1060': {'132740': 1},
'U1061': {'132954': 2},
'U1062': {'132834': 0},
'U1063': {'132667': 1},
'U1064': {'135052': 2},
'U1065': {'132654': 1},
'U1066': {'135013': 1},
'U1067': {'132560': 1},
'U1068': {'132660': 0},
'U1069': {'132847': 0},
'U1070': {'132613': 1},
'U1071': {'135030': 2},
'U1072': {'135000': 0},
'U1073': {'132862': 0},
'U1074': {'134976': 2},
'U1075': {'135041': 1},
'U1076': {'135073': 2},
'U1077': {'135027': 0},
'U1078': {'135052': 2},
'U1079': {'132766': 1},
'U1080': {'132715': 1},
'U1081': {'135027': 0},
'U1082': {'132733': 0},
'U1083': {'135044': 1},
'U1084': {'132723': 1},
'U1085': {'132825': 2},
'U1086': {'132951': 1},
'U1087': {'132663': 1},
'U1088': {'135051': 2},
'U1089': {'135079': 0},
'U1090': {'132925': 2},
'U1091': {'135035': 1},
'U1092': {'132723': 1},
'U1093': {'135011': 1},
'U1094': {'135069': 0},
'U1095': {'135072': 2},
'U1096': {'135075': 1},
'U1097': {'132861': 2},
'U1098': {'132921': 1},
'U1099': {'135042': 1},
'U1100': {'134976': 2},
'U1101': {'135041': 0},
'U1102': {'132847': 2},
'U1103': {'132733': 2},
'U1104': {'135041': 1},
'U1105': {'135052': 0},
'U1106': {'135064': 2},
'U1107': {'132733': 2},
'U1108': {'135058': 1},
'U1109': {'132872': 1},
'U1110': {'134999': 2},
'U1111': {'135082': 1},
'U1112': {'132862': 1},
'U1113': {'132854': 0},
'U1114': {'132755': 1},
'U1115': {'135071': 2},
'U1116': {'132834': 2},
'U1117': {'135000': 1},
'U1118': {'134992': 0},
'U1119': {'132768': 2},
'U1120': {'132847': 2},
'U1121': {'134999': 2},
'U1122': {'135053': 2},
'U1123': {'132594': 1},
'U1124': {'135050': 0},
'U1125': {'135062': 1},
'U1126': {'135108': 2},
'U1127': {'134996': 2},
'U1128': {'132951': 0},
'U1129': {'132665': 0},
'U1130': {'132706': 1},
'U1131': {'132870': 0},
'U1132': {'135027': 2},
'U1133': {'135019': 1},
'U1134': {'135074': 2},
'U1135': {'135060': 0},
'U1136': {'135028': 2},
'U1137': {'135075': 2},
'U1138': {'132925': 1}}
|
datasets = {'U1001': {'135058': 1, '135038': 3, '135032': 3, '135084': 2, '135076': 2}, 'U1002': {'135058': 2, '135038': 2, '135032': 1, '135084': 1, '135076': 3}, 'U1003': {'135058': 2, '135038': 1, '135032': 2, '135084': 3, '135076': 3}, 'U1004': {'135058': 1, '135038': 3, '135032': 3, '135084': 3, '135076': 3}, 'U1005': {'135058': 1, '135038': 2, '135032': 2, '135084': 2, '135076': 3}, 'U1006': {'135058': 1, '135038': 3, '135032': 3, '135084': 2, '135076': 1}, 'U1007': {'135058': 1, '135038': 1, '135032': 3, '135084': 2, '135076': 2}, 'U1008': {'135058': 3, '135038': 3, '135032': 3, '135084': 2, '135076': 3}, 'U1009': {'135058': 1, '135038': 3, '135032': 3, '135084': 1, '135076': 1}, 'U1010': {'135058': 3, '135038': 3, '135032': 3, '135084': 3, '135076': 3}, 'U1011': {'135015': 2, '135018': 2, '135060': 3}, 'U1012': {'135015': 3, '135018': 1, '135060': 2}, 'U1013': {'135015': 2, '135018': 2, '135060': 2}, 'U1014': {'135015': 2, '135018': 3, '135060': 3}, 'U1015': {'135015': 2, '135018': 2, '135060': 1}, 'U1016': {'135062': 2}, 'U1017': {'134983': 0}, 'U1018': {'135108': 1}, 'U1019': {'135086': 0}, 'U1020': {'135109': 2}, 'U1021': {'132715': 2}, 'U1022': {'135063': 2}, 'U1023': {'132733': 2}, 'U1024': {'135058': 2, '135038': 3, '135032': 2, '135084': 1, '135030': 3}, 'U1025': {'132830': 2}, 'U1026': {'132665': 2}, 'U1027': {'135062': 1}, 'U1028': {'132613': 2}, 'U1029': {'132922': 1}, 'U1030': {'135058': 1, '135038': 2, '135032': 3, '132668': 3, '134996': 3}, 'U1031': {'132668': 0}, 'U1032': {'135058': 3, '135038': 2, '135032': 2, '135084': 3, '132668': 2}, 'U1033': {'135030': 2}, 'U1034': {'135035': 1}, 'U1035': {'134986': 2}, 'U1036': {'135045': 2}, 'U1037': {'132854': 1}, 'U1038': {'132862': 2}, 'U1039': {'132665': 1}, 'U1040': {'135019': 1}, 'U1041': {'135109': 1}, 'U1042': {'134992': 1}, 'U1043': {'132630': 1}, 'U1044': {'132768': 2}, 'U1045': {'135041': 1}, 'U1046': {'132861': 1}, 'U1047': {'132884': 0}, 'U1048': {'132723': 2}, 'U1049': {'135052': 0}, 'U1050': {'132584': 0}, 'U1051': {'134996': 1}, 'U1052': {'132870': 1}, 'U1053': {'135047': 2}, 'U1054': {'135045': 2}, 'U1055': {'132825': 2}, 'U1056': {'135051': 2}, 'U1057': {'132834': 1}, 'U1058': {'135055': 2}, 'U1059': {'132754': 2}, 'U1060': {'132740': 1}, 'U1061': {'132954': 2}, 'U1062': {'132834': 0}, 'U1063': {'132667': 1}, 'U1064': {'135052': 2}, 'U1065': {'132654': 1}, 'U1066': {'135013': 1}, 'U1067': {'132560': 1}, 'U1068': {'132660': 0}, 'U1069': {'132847': 0}, 'U1070': {'132613': 1}, 'U1071': {'135030': 2}, 'U1072': {'135000': 0}, 'U1073': {'132862': 0}, 'U1074': {'134976': 2}, 'U1075': {'135041': 1}, 'U1076': {'135073': 2}, 'U1077': {'135027': 0}, 'U1078': {'135052': 2}, 'U1079': {'132766': 1}, 'U1080': {'132715': 1}, 'U1081': {'135027': 0}, 'U1082': {'132733': 0}, 'U1083': {'135044': 1}, 'U1084': {'132723': 1}, 'U1085': {'132825': 2}, 'U1086': {'132951': 1}, 'U1087': {'132663': 1}, 'U1088': {'135051': 2}, 'U1089': {'135079': 0}, 'U1090': {'132925': 2}, 'U1091': {'135035': 1}, 'U1092': {'132723': 1}, 'U1093': {'135011': 1}, 'U1094': {'135069': 0}, 'U1095': {'135072': 2}, 'U1096': {'135075': 1}, 'U1097': {'132861': 2}, 'U1098': {'132921': 1}, 'U1099': {'135042': 1}, 'U1100': {'134976': 2}, 'U1101': {'135041': 0}, 'U1102': {'132847': 2}, 'U1103': {'132733': 2}, 'U1104': {'135041': 1}, 'U1105': {'135052': 0}, 'U1106': {'135064': 2}, 'U1107': {'132733': 2}, 'U1108': {'135058': 1}, 'U1109': {'132872': 1}, 'U1110': {'134999': 2}, 'U1111': {'135082': 1}, 'U1112': {'132862': 1}, 'U1113': {'132854': 0}, 'U1114': {'132755': 1}, 'U1115': {'135071': 2}, 'U1116': {'132834': 2}, 'U1117': {'135000': 1}, 'U1118': {'134992': 0}, 'U1119': {'132768': 2}, 'U1120': {'132847': 2}, 'U1121': {'134999': 2}, 'U1122': {'135053': 2}, 'U1123': {'132594': 1}, 'U1124': {'135050': 0}, 'U1125': {'135062': 1}, 'U1126': {'135108': 2}, 'U1127': {'134996': 2}, 'U1128': {'132951': 0}, 'U1129': {'132665': 0}, 'U1130': {'132706': 1}, 'U1131': {'132870': 0}, 'U1132': {'135027': 2}, 'U1133': {'135019': 1}, 'U1134': {'135074': 2}, 'U1135': {'135060': 0}, 'U1136': {'135028': 2}, 'U1137': {'135075': 2}, 'U1138': {'132925': 1}}
|
def isolateData(selector,channel,labels,data):
selected=[]
for i in range(len(labels)):
if labels[i]==selector:
selected.append(data[str(i)+'c'+str(channel)])#epochs with class AGMSY5
return selected
|
def isolate_data(selector, channel, labels, data):
selected = []
for i in range(len(labels)):
if labels[i] == selector:
selected.append(data[str(i) + 'c' + str(channel)])
return selected
|
# ,---------------------------------------------------------------------------,
# | This module is part of the krangpower electrical distribution simulation |
# | suit by Federico Rosato <[email protected]> et al. |
# | Please refer to the license file published together with this code. |
# | All rights not explicitly granted by the license are reserved. |
# '---------------------------------------------------------------------------'
class AssociationError(Exception):
def __init__(self,
association_target_type,
association_target_name,
association_subject_type,
association_subject_name,
msg=None):
if msg is None:
msg = 'krangpower does not know how to associate a {0}({1}) to a {2}({3})'\
.format(association_target_type,
association_target_name,
association_subject_type,
association_subject_name)
super().__init__(msg)
self.association_target_type = association_target_type
self.association_subject_type = association_subject_type
self.association_target_name = association_target_name
self.association_subject_name = association_subject_name
class TypeRecoveryError(Exception):
pass
class TypeUnrecoverableError(TypeRecoveryError):
def __init__(self, original_type, msg=None):
if msg is None:
msg = 'krangpower has no options to recover a type {}'.format(str(original_type))
super().__init__(msg)
self.unrecoverable_type =original_type
class RecoveryTargetError(TypeRecoveryError):
def __init__(self, original_type, target_type, msg=None):
if msg is None:
msg = 'krangpower does not know how to convert type {}---->{}'\
.format(str(original_type), str(target_type))
super().__init__(msg)
self.original_type = original_type
self.invalid_target_type = target_type
class KrangInstancingError(Exception):
def __init__(self, already_existing_krang_name, msg=None):
if msg is None:
msg = 'Cannot create a new Krang - A Krang ({0}) already exists.'\
'Delete every reference to it if you want to instantiate another.'\
.format(already_existing_krang_name)
super().__init__(msg)
class KrangObjAdditionError(Exception):
def __init__(self, object, msg=None):
if msg is None:
msg = 'There was a problem in adding object {} to Krang'.format(str(object))
super().__init__(msg)
class ClearingAttemptError(Exception):
def __init__(self, msg=None):
if msg is None:
msg = 'A "clear" command was passed to the text command interface.' \
'If you wish a new circuit, delete the existing Krang.'
super().__init__(msg)
class UnsolvedCircuitError(Exception):
def __init__(self, property_stack: str, msg=None):
if msg is None:
msg = 'An attempt to access the calculated property {} was made before solving the circuit.'\
.format(property_stack)
super().__init__(msg)
|
class Associationerror(Exception):
def __init__(self, association_target_type, association_target_name, association_subject_type, association_subject_name, msg=None):
if msg is None:
msg = 'krangpower does not know how to associate a {0}({1}) to a {2}({3})'.format(association_target_type, association_target_name, association_subject_type, association_subject_name)
super().__init__(msg)
self.association_target_type = association_target_type
self.association_subject_type = association_subject_type
self.association_target_name = association_target_name
self.association_subject_name = association_subject_name
class Typerecoveryerror(Exception):
pass
class Typeunrecoverableerror(TypeRecoveryError):
def __init__(self, original_type, msg=None):
if msg is None:
msg = 'krangpower has no options to recover a type {}'.format(str(original_type))
super().__init__(msg)
self.unrecoverable_type = original_type
class Recoverytargeterror(TypeRecoveryError):
def __init__(self, original_type, target_type, msg=None):
if msg is None:
msg = 'krangpower does not know how to convert type {}---->{}'.format(str(original_type), str(target_type))
super().__init__(msg)
self.original_type = original_type
self.invalid_target_type = target_type
class Kranginstancingerror(Exception):
def __init__(self, already_existing_krang_name, msg=None):
if msg is None:
msg = 'Cannot create a new Krang - A Krang ({0}) already exists.Delete every reference to it if you want to instantiate another.'.format(already_existing_krang_name)
super().__init__(msg)
class Krangobjadditionerror(Exception):
def __init__(self, object, msg=None):
if msg is None:
msg = 'There was a problem in adding object {} to Krang'.format(str(object))
super().__init__(msg)
class Clearingattempterror(Exception):
def __init__(self, msg=None):
if msg is None:
msg = 'A "clear" command was passed to the text command interface.If you wish a new circuit, delete the existing Krang.'
super().__init__(msg)
class Unsolvedcircuiterror(Exception):
def __init__(self, property_stack: str, msg=None):
if msg is None:
msg = 'An attempt to access the calculated property {} was made before solving the circuit.'.format(property_stack)
super().__init__(msg)
|
factors_avro = {
'namespace': 'com.gilt.cerebro.job',
'type': 'record',
'name': 'AvroFactors',
'fields': [
{'name': 'id', 'type': 'string'},
{'name': 'factors', 'type': {'type': 'array',
'items': 'float'}},
{'name': 'bias', 'type': 'float'},
],
}
|
factors_avro = {'namespace': 'com.gilt.cerebro.job', 'type': 'record', 'name': 'AvroFactors', 'fields': [{'name': 'id', 'type': 'string'}, {'name': 'factors', 'type': {'type': 'array', 'items': 'float'}}, {'name': 'bias', 'type': 'float'}]}
|
def fibonacci_number(num):
f = 0
s = 1
for i in range(num + 1):
if i <= 1:
nxt = i
else:
nxt = f +s
f = s
s = nxt
print (nxt)
print(fibonacci_number(int(input("Enter the number:"))))
|
def fibonacci_number(num):
f = 0
s = 1
for i in range(num + 1):
if i <= 1:
nxt = i
else:
nxt = f + s
f = s
s = nxt
print(nxt)
print(fibonacci_number(int(input('Enter the number:'))))
|
{
"cells": [
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"ename": "ValueError",
"evalue": "operands could not be broadcast together with shapes (4,) (100,) (4,) ",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-13-5f28594dbd6a>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 38\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 39\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 40\u001b[0;31m \u001b[0mmaximo\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msigma\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmaximo_sigma\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprior\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mposterior\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmu\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 41\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 42\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m<ipython-input-13-5f28594dbd6a>\u001b[0m in \u001b[0;36mposterior\u001b[0;34m(H, secuencia)\u001b[0m\n\u001b[1;32m 21\u001b[0m \u001b[0mPosterior\u001b[0m \u001b[0mcalculado\u001b[0m \u001b[0mcon\u001b[0m \u001b[0mla\u001b[0m \u001b[0mnormalizacion\u001b[0m \u001b[0madecuada\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 22\u001b[0m \"\"\"\n\u001b[0;32m---> 23\u001b[0;31m \u001b[0mpost\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlike\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msigma\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmu\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlog\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprior\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mmu\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 24\u001b[0m \u001b[0mevidencia\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mamax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpost\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 25\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexp\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpost\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mevidencia\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mtrapz\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexp\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mpost\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mevidencia\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmu\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m<ipython-input-13-5f28594dbd6a>\u001b[0m in \u001b[0;36mlike\u001b[0;34m(secuencia, sigma, mu)\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0mL\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mzeros\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 16\u001b[0;31m \u001b[0mL\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlog\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m1.\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msqrt\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m2.0\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpi\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0msigma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexp\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m0.5\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msecuencia\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0mmu\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msigma\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m**\u001b[0m\u001b[0;36m2\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 17\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mL\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mValueError\u001b[0m: operands could not be broadcast together with shapes (4,) (100,) (4,) "
]
}
],
"source": [
"\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"x =[4.6, 6.0, 2.0, 5.8] \n",
"x=np.array(x)\n",
"sigma =[2.0, 1.5, 5.0, 1.0]\n",
"mu=np.array(mu)\n",
"mu=np.linspace(10,-10,100)\n",
"\n",
"def prior(a):\n",
" p=np.ones(len(a))\n",
" return p\n",
"\n",
"def like(secuencia, sigma,mu):\n",
" L=np.zeros(len(x))\n",
" for i in range(len(x)):\n",
" L += np.log(1./np.sqrt(2.0*np.pi*sigma[i]**2))*np.exp(-0.5*(secuencia[i]-mu)**2/(sigma[i]**2))\n",
" return L\n",
"\n",
"def posterior(H, secuencia):\n",
" \"\"\"\n",
" Posterior calculado con la normalizacion adecuada\n",
" \"\"\"\n",
" post = like(x, sigma,mu) + np.log(prior(mu))\n",
" evidencia = np.amax(post)\n",
" return np.exp(post-evidencia)/trapz(np.exp(post-evidencia),mu)\n",
" \n",
"\n",
"def maximo_sigma(x, y):\n",
" deltax = x[1] - x[0]\n",
"\n",
" ii = np.argmax(y)\n",
"\n",
" # segunda derivada\n",
" d = (y[ii+1] - 2*y[ii] + y[ii-1]) / (deltax**2)\n",
"\n",
" return x[ii], 1.0/np.sqrt(-d)\n",
" \n",
"\n",
"\n",
"maximo, sigma = maximo_sigma(prior(x), posterior(mu,x))\n",
"\n",
"\n",
"\n",
"plt.figure()\n",
"plt.plot(H, post, label='datos={}'.format(secuencia))\n",
"plt.plot(H, gauss, ':', label='Aproximacion Gaussiana')\n",
"plt.title('H= {:.2f} $\\pm$ {:.2f}'.format(max, sigma))\n",
"plt.xlabel('H')\n",
"plt.ylabel('prob(H|datos)')\n",
"plt.legend()\n",
"plt.savefig('coins')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
|
{'cells': [{'cell_type': 'code', 'execution_count': 13, 'metadata': {}, 'outputs': [{'ename': 'ValueError', 'evalue': 'operands could not be broadcast together with shapes (4,) (100,) (4,) ', 'output_type': 'error', 'traceback': ['\x1b[0;31m---------------------------------------------------------------------------\x1b[0m', '\x1b[0;31mValueError\x1b[0m Traceback (most recent call last)', '\x1b[0;32m<ipython-input-13-5f28594dbd6a>\x1b[0m in \x1b[0;36m<module>\x1b[0;34m\x1b[0m\n\x1b[1;32m 38\x1b[0m \x1b[0;34m\x1b[0m\x1b[0m\n\x1b[1;32m 39\x1b[0m \x1b[0;34m\x1b[0m\x1b[0m\n\x1b[0;32m---> 40\x1b[0;31m \x1b[0mmaximo\x1b[0m\x1b[0;34m,\x1b[0m \x1b[0msigma\x1b[0m \x1b[0;34m=\x1b[0m \x1b[0mmaximo_sigma\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mprior\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mx\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m,\x1b[0m \x1b[0mposterior\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mmu\x1b[0m\x1b[0;34m,\x1b[0m\x1b[0mx\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n\x1b[0m\x1b[1;32m 41\x1b[0m \x1b[0;34m\x1b[0m\x1b[0m\n\x1b[1;32m 42\x1b[0m \x1b[0;34m\x1b[0m\x1b[0m\n', '\x1b[0;32m<ipython-input-13-5f28594dbd6a>\x1b[0m in \x1b[0;36mposterior\x1b[0;34m(H, secuencia)\x1b[0m\n\x1b[1;32m 21\x1b[0m \x1b[0mPosterior\x1b[0m \x1b[0mcalculado\x1b[0m \x1b[0mcon\x1b[0m \x1b[0mla\x1b[0m \x1b[0mnormalizacion\x1b[0m \x1b[0madecuada\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n\x1b[1;32m 22\x1b[0m """\n\x1b[0;32m---> 23\x1b[0;31m \x1b[0mpost\x1b[0m \x1b[0;34m=\x1b[0m \x1b[0mlike\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mx\x1b[0m\x1b[0;34m,\x1b[0m \x1b[0msigma\x1b[0m\x1b[0;34m,\x1b[0m\x1b[0mmu\x1b[0m\x1b[0;34m)\x1b[0m \x1b[0;34m+\x1b[0m \x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0mlog\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mprior\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mmu\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n\x1b[0m\x1b[1;32m 24\x1b[0m \x1b[0mevidencia\x1b[0m \x1b[0;34m=\x1b[0m \x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0mamax\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mpost\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n\x1b[1;32m 25\x1b[0m \x1b[0;32mreturn\x1b[0m \x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0mexp\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mpost\x1b[0m\x1b[0;34m-\x1b[0m\x1b[0mevidencia\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m/\x1b[0m\x1b[0mtrapz\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0mexp\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mpost\x1b[0m\x1b[0;34m-\x1b[0m\x1b[0mevidencia\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m,\x1b[0m\x1b[0mmu\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n', '\x1b[0;32m<ipython-input-13-5f28594dbd6a>\x1b[0m in \x1b[0;36mlike\x1b[0;34m(secuencia, sigma, mu)\x1b[0m\n\x1b[1;32m 14\x1b[0m \x1b[0mL\x1b[0m\x1b[0;34m=\x1b[0m\x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0mzeros\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mlen\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mx\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n\x1b[1;32m 15\x1b[0m \x1b[0;32mfor\x1b[0m \x1b[0mi\x1b[0m \x1b[0;32min\x1b[0m \x1b[0mrange\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mlen\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0mx\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m:\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n\x1b[0;32m---> 16\x1b[0;31m \x1b[0mL\x1b[0m \x1b[0;34m+=\x1b[0m \x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0mlog\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0;36m1.\x1b[0m\x1b[0;34m/\x1b[0m\x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0msqrt\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0;36m2.0\x1b[0m\x1b[0;34m*\x1b[0m\x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0mpi\x1b[0m\x1b[0;34m*\x1b[0m\x1b[0msigma\x1b[0m\x1b[0;34m[\x1b[0m\x1b[0mi\x1b[0m\x1b[0;34m]\x1b[0m\x1b[0;34m**\x1b[0m\x1b[0;36m2\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m*\x1b[0m\x1b[0mnp\x1b[0m\x1b[0;34m.\x1b[0m\x1b[0mexp\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0;34m-\x1b[0m\x1b[0;36m0.5\x1b[0m\x1b[0;34m*\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0msecuencia\x1b[0m\x1b[0;34m[\x1b[0m\x1b[0mi\x1b[0m\x1b[0;34m]\x1b[0m\x1b[0;34m-\x1b[0m\x1b[0mmu\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m**\x1b[0m\x1b[0;36m2\x1b[0m\x1b[0;34m/\x1b[0m\x1b[0;34m(\x1b[0m\x1b[0msigma\x1b[0m\x1b[0;34m[\x1b[0m\x1b[0mi\x1b[0m\x1b[0;34m]\x1b[0m\x1b[0;34m**\x1b[0m\x1b[0;36m2\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m)\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n\x1b[0m\x1b[1;32m 17\x1b[0m \x1b[0;32mreturn\x1b[0m \x1b[0mL\x1b[0m\x1b[0;34m\x1b[0m\x1b[0;34m\x1b[0m\x1b[0m\n\x1b[1;32m 18\x1b[0m \x1b[0;34m\x1b[0m\x1b[0m\n', '\x1b[0;31mValueError\x1b[0m: operands could not be broadcast together with shapes (4,) (100,) (4,) ']}], 'source': ['\n', 'import numpy as np\n', 'import matplotlib.pyplot as plt\n', 'x =[4.6, 6.0, 2.0, 5.8] \n', 'x=np.array(x)\n', 'sigma =[2.0, 1.5, 5.0, 1.0]\n', 'mu=np.array(mu)\n', 'mu=np.linspace(10,-10,100)\n', '\n', 'def prior(a):\n', ' p=np.ones(len(a))\n', ' return p\n', '\n', 'def like(secuencia, sigma,mu):\n', ' L=np.zeros(len(x))\n', ' for i in range(len(x)):\n', ' L += np.log(1./np.sqrt(2.0*np.pi*sigma[i]**2))*np.exp(-0.5*(secuencia[i]-mu)**2/(sigma[i]**2))\n', ' return L\n', '\n', 'def posterior(H, secuencia):\n', ' """\n', ' Posterior calculado con la normalizacion adecuada\n', ' """\n', ' post = like(x, sigma,mu) + np.log(prior(mu))\n', ' evidencia = np.amax(post)\n', ' return np.exp(post-evidencia)/trapz(np.exp(post-evidencia),mu)\n', ' \n', '\n', 'def maximo_sigma(x, y):\n', ' deltax = x[1] - x[0]\n', '\n', ' ii = np.argmax(y)\n', '\n', ' # segunda derivada\n', ' d = (y[ii+1] - 2*y[ii] + y[ii-1]) / (deltax**2)\n', '\n', ' return x[ii], 1.0/np.sqrt(-d)\n', ' \n', '\n', '\n', 'maximo, sigma = maximo_sigma(prior(x), posterior(mu,x))\n', '\n', '\n', '\n', 'plt.figure()\n', "plt.plot(H, post, label='datos={}'.format(secuencia))\n", "plt.plot(H, gauss, ':', label='Aproximacion Gaussiana')\n", "plt.title('H= {:.2f} $\\pm$ {:.2f}'.format(max, sigma))\n", "plt.xlabel('H')\n", "plt.ylabel('prob(H|datos)')\n", 'plt.legend()\n', "plt.savefig('coins')"]}, {'cell_type': 'code', 'execution_count': null, 'metadata': {}, 'outputs': [], 'source': []}], 'metadata': {'kernelspec': {'display_name': 'Python 3', 'language': 'python', 'name': 'python3'}, 'language_info': {'codemirror_mode': {'name': 'ipython', 'version': 3}, 'file_extension': '.py', 'mimetype': 'text/x-python', 'name': 'python', 'nbconvert_exporter': 'python', 'pygments_lexer': 'ipython3', 'version': '3.7.6'}}, 'nbformat': 4, 'nbformat_minor': 4}
|
with open("source.txt") as filehandle:
lines = filehandle.readlines()
with open("source.txt", 'w') as filehandle:
lines = filter(lambda x: x.strip(), lines)
filehandle.writelines(lines)
|
with open('source.txt') as filehandle:
lines = filehandle.readlines()
with open('source.txt', 'w') as filehandle:
lines = filter(lambda x: x.strip(), lines)
filehandle.writelines(lines)
|
#break.py
for s in 'python' :
if s == 't' :
continue
print(s,end=" ")
print("over")
|
for s in 'python':
if s == 't':
continue
print(s, end=' ')
print('over')
|
arr = list(range(8))
def func(x):
return x*2
print(list(map(func, arr)))
print(list(map(lambda x: x**3, arr)))
|
arr = list(range(8))
def func(x):
return x * 2
print(list(map(func, arr)))
print(list(map(lambda x: x ** 3, arr)))
|
list1=list(map(int,input().rstrip().split()))
N=list1[0]
list2=list1[2:]
res=[]
for j in list2:
if j not in res:
res.append(j)
for i in range(len(list2)):
if list2[i] in res:
res.remove(list2[i])
print(*res)
|
list1 = list(map(int, input().rstrip().split()))
n = list1[0]
list2 = list1[2:]
res = []
for j in list2:
if j not in res:
res.append(j)
for i in range(len(list2)):
if list2[i] in res:
res.remove(list2[i])
print(*res)
|
#!/bin/env python3
def puzzle1():
tree = {}
acceptedBags = ['shiny gold']
foundNew = True
with open('input.txt', 'r') as input:
for line in input:
if line[-1:] == "\n":
line = line[:-1]
bags = line.split(',')
partName = bags[0].split(' ')
name = partName[0] + ' ' + partName[1]
tree[name] = {}
if partName[4] == 'no':
continue
else:
tree[name][partName[5] + ' ' + partName[6]] = int(partName[4])
if len(bags) > 1:
# print(bags)
for bag in bags[1:]:
bag = bag[1:].split(' ')
tree[name][bag[1] + ' ' + bag[2]] = int(bag[0])
while foundNew == True:
foundNew = False
for rootBag in tree:
for bag in tree[rootBag]:
if bag in acceptedBags and rootBag not in acceptedBags:
acceptedBags.append(rootBag)
foundNew = True
print(tree)
print(acceptedBags[1:])
print(len(acceptedBags[1:]))
if __name__ == "__main__":
puzzle1()
|
def puzzle1():
tree = {}
accepted_bags = ['shiny gold']
found_new = True
with open('input.txt', 'r') as input:
for line in input:
if line[-1:] == '\n':
line = line[:-1]
bags = line.split(',')
part_name = bags[0].split(' ')
name = partName[0] + ' ' + partName[1]
tree[name] = {}
if partName[4] == 'no':
continue
else:
tree[name][partName[5] + ' ' + partName[6]] = int(partName[4])
if len(bags) > 1:
for bag in bags[1:]:
bag = bag[1:].split(' ')
tree[name][bag[1] + ' ' + bag[2]] = int(bag[0])
while foundNew == True:
found_new = False
for root_bag in tree:
for bag in tree[rootBag]:
if bag in acceptedBags and rootBag not in acceptedBags:
acceptedBags.append(rootBag)
found_new = True
print(tree)
print(acceptedBags[1:])
print(len(acceptedBags[1:]))
if __name__ == '__main__':
puzzle1()
|
LinearRegression_Params = [
{"name": "fit_intercept", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "positive", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False}
]
Ridge_Params = [
{"name": "alpha", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "fit_intercept", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "copy_X", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "max_iter", "type": "input", "values": "", "dtype": "int", "accept_none": True},
{"name": "tol", "type": "input", "values": 0.001, "dtype": "float", "accept_none": False},
{"name": "solver", "type": "select",
"values": ["auto", "svd", "cholesky", "lsqr", "sparse_cg", "sag", "saga", "lbfgs"], "dtype": "string",
"accept_none": False},
{"name": "random_state", "type": "input", "values": "", "dtype": "int", "accept_none": True}
]
Lasso_Params = [
{"name": "alpha", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "fit_intercept", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "precompute", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "copy_X", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "max_iter", "type": "input", "values": 1000, "dtype": "int", "accept_none": False},
{"name": "tol", "type": "input", "values": 0.0001, "dtype": "float", "accept_none": False},
{"name": "warm_start", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "random_state", "type": "input", "values": 1, "dtype": "int", "accept_none": True},
{"name": "selection", "type": "select", "values": ["cyclic", "random", "auto"], "dtype": "string",
"accept_none": False}]
ElasticNet_Params = [
{"name": "alpha", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "l1_ratio", "type": "input", "values": 0.5, "dtype": "float", "accept_none": False},
{"name": "fit_intercept", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "precompute", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "max_iter", "type": "input", "values": 1000, "dtype": "int", "accept_none": False},
{"name": "copy_X", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "tol", "type": "input", "values": 0.0001, "dtype": "float", "accept_none": False},
{"name": "warm_start", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "random_state", "type": "input", "values": 1, "dtype": "int", "accept_none": True},
{"name": "selection", "type": "select", "values": ["cyclic", "random"], "dtype": "string", "accept_none": False}]
DecisionTreeRegressor_Params = [
{"name": "criterion", "type": "select", "values": ["squared_error", "friedman_mse", "absolute_error", "poisson"],
"dtype": "string", "accept_none": False},
{"name": "splitter", "type": "select", "values": ["best", "random"], "dtype": "string", "accept_none": False},
{"name": "max_depth", "type": "input", "values": "", "dtype": "int", "accept_none": True},
{"name": "min_samples_split", "type": "input", "values": 2, "dtype": "int", "accept_none": False},
{"name": "min_samples_leaf", "type": "input", "values": 1, "dtype": "int", "accept_none": False},
{"name": "min_weight_fraction_leaf", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "max_features", "type": "select", "values": ["auto", "sqrt", "log2"], "dtype": "string",
"accept_none": False},
{"name": "max_leaf_nodes", "type": "input", "values": "", "dtype": "int", "accept_none": True},
{"name": "min_impurity_decrease", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "ccp_alpha", "type": "input", "values": 0.0, "dtype": "float", "accept_none": True}
]
RandomForestRegressor_Params = [
{"name": "n_estimators", "type": "input", "values": 100, "dtype": "int", "accept_none": False},
{"name": "criterion", "type": "select", "values": ["squared_error", "absolute_error", "poisson"], "dtype": "string",
"accept_none": False},
{"name": "max_depth", "type": "input", "values": 5, "dtype": "int", "accept_none": True},
{"name": "min_samples_split", "type": "input", "values": 2, "dtype": "int", "accept_none": False},
{"name": "min_samples_leaf", "type": "input", "values": 0.1, "dtype": "float", "accept_none": False},
{"name": "min_weight_fraction_leaf", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "max_features", "type": "select", "values": ["auto", "sqrt", "log2"], "dtype": "string",
"accept_none": False},
{"name": "max_leaf_nodes", "type": "input", "values": 4, "dtype": "int", "accept_none": True},
{"name": "min_impurity_decrease", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "bootstrap", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "oob_score", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "n_jobs", "type": "input", "values": -1, "dtype": "int", "accept_none": True},
{"name": "random_state", "type": "input", "values": 1, "dtype": "int", "accept_none": True},
{"name": "verbose", "type": "input", "values": 0, "dtype": "int", "accept_none": False},
{"name": "warm_start", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "ccp_alpha", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "max_samples", "type": "input", "values": 1, "dtype": "float", "accept_none": True}]
SVR_params = [{"name": "kernel", "type": "select", "values": ["rbf", "linear", "poly", "sigmoid", "precomputed"],
"dtype": "string", "accept_none": False},
{"name": "degree", "type": "input", "values": 3, "dtype": "int", "accept_none": False},
{"name": "gamma", "type": "select", "values": ["scale", "auto"], "dtype": "string", "accept_none": False},
{"name": "coef0", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "tol", "type": "input", "values": 0.001, "dtype": "float", "accept_none": False},
{"name": "C", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "epsilon", "type": "input", "values": 0.1, "dtype": "float", "accept_none": False},
{"name": "shrinking", "type": "select", "values": [True, False], "dtype": "boolean",
"accept_none": False},
{"name": "cache_size", "type": "input", "values": 200, "dtype": "float", "accept_none": False},
{"name": "verbose", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "max_iter", "type": "input", "values": -1, "dtype": "int", "accept_none": False}]
AdabootRegressor_Params = [
{"name": "base_estimator", "type": "input", "values": None, "dtype": "object", "accept_none": True},
{"name": "n_estimators", "type": "input", "values": 50, "dtype": "int", "accept_none": False},
{"name": "learning_rate", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "loss", "type": "select", "values": ['linear', 'square', 'exponential'], "dtype": "string",
"accept_none": False},
{"name": "random_state", "type": "input", "values": 1, "dtype": "int", "accept_none": True}]
GradientBoostRegressor_Params = [
{"name": "loss", "type": "select", "values": ['squared_error', 'absolute_error', 'huber', 'quantile'],
"dtype": "string", "accept_none": False},
{"name": "learning_rate", "type": "input", "values": 0.1, "dtype": "float", "accept_none": False},
{"name": "n_estimators", "type": "input", "values": 100, "dtype": "int", "accept_none": False},
{"name": "subsample", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "criterion", "type": "select", "values": ['friedman_mse', 'squared_error', 'mae', 'mse'],
"dtype": "string", "accept_none": False},
{"name": "min_samples_split", "type": "input", "values": 2, "dtype": "int", "accept_none": False},
{"name": "min_samples_leaf", "type": "input", "values": 1, "dtype": "int", "accept_none": False},
{"name": "min_weight_fraction_leaf", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "max_depth", "type": "input", "values": 3, "dtype": "int", "accept_none": False},
{"name": "min_impurity_decrease", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "init", "type": "input", "values": "zero", "dtype": "string", "accept_none": True},
{"name": "random_state", "input": "int", "values": 1, "dtype": "int", "accept_none": True},
{"name": "max_features", "type": "select", "values": ['auto', 'sqrt', 'log2'], "dtype": "string",
"accept_none": False},
{"name": "alpha", "type": "input", "values": 0.9, "dtype": "float", "accept_none": False},
{"name": "verbose", "type": "input", "values": 0, "dtype": "int", "accept_none": False},
{"name": "max_leaf_nodes", "type": "input", "values": 4, "dtype": "int", "accept_none": True},
{"name": "warm_start", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "validation_fraction", "type": "input", "values": 0.1, "dtype": "float", "accept_none": False},
{"name": "n_iter_no_change", "type": "input", "values": 95, "dtype": "int", "accept_none": True},
{"name": "tol", "type": "input", "values": 0.0001, "dtype": "float", "accept_none": False},
{"name": "ccp_alpha", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False}]
# -----------------------------------------------------------------------------------------------------------
# CLASSIFICATION -------------
LogisticRegression_Params = [
{"name": "penalty", "type": "select", "values": ['l2', 'l1', 'elasticnet', 'None'], "dtype": "string",
"accept_none": True},
{"name": "dual", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "tol", "type": "input", "values": 0.0001, "dtype": "float", "accept_none": False},
{"name": "C", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "fit_intercept", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "intercept_scaling", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "class_weight", "type": "select", "values": ["", 'balanced'], "dtype": "string", "accept_none": True},
{"name": "random_state", "type": "input", "values": 101, "dtype": "int", "accept_none": True},
{"name": "solver", "type": "select", "values": ["lbfgs", "newton-cg", "liblinear", "sag", "saga"],
"dtype": "string", "accept_none": False},
{"name": "max_iter", "type": "input", "values": 100, "dtype": "int", "accept_none": False},
{"name": "multi_class", "type": "select", "values": ["auto", "ovr", "multinomial"], "dtype": "string",
"accept_none": False},
{"name": "verbose", "type": "input", "values": 0, "dtype": "int", "accept_none": False},
{"name": "warm_start", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "n_jobs", "type": "input", "values": -1, "dtype": "int", "accept_none": True},
{"name": "l1_ratio", "type": "input", "values": 0.5, "dtype": "float", "accept_none": True}]
SVC_Params = [
{"name": "C", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "kernel", "type": "select", "values": ['rbf', 'poly', 'sigmoid', 'linear', 'precomputed'],
"dtype": "string", "accept_none": False},
{"name": "degree", "type": "input", "values": 3, "dtype": "int", "accept_none": False},
{"name": "gamma", "type": "select", "values": ["scale", "auto"], "dtype": "string", "accept_none": False},
{"name": "coef0", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "shrinking", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "probability", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "tol", "type": "input", "values": 0.001, "dtype": "float", "accept_none": False},
{"name": "cache_size", "type": "input", "values": 200, "dtype": "float", "accept_none": False},
{"name": "class_weight", "type": "select", "values": ['balanced'], "dtype": "string", "accept_none": True},
{"name": "verbose", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "max_iter", "type": "input", "values": -1, "dtype": "int", "accept_none": False},
{"name": "break_ties", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "random_state", "type": "input", "values": 101, "dtype": "int", "accept_none": True}]
KNeighborsClassifier_Params = [
{"name": "n_neighbors", "type": "input", "values": 5, "dtype": "int", "accept_none": False},
{"name": "weights", "type": "select", "values": ['uniform', 'distance'], "dtype": "string", "accept_none": False},
{"name": "algorithm", "type": "select", "values": ["auto", "ball_tree", "kd_tree", "brute"], "dtype": "string",
"accept_none": False},
{"name": "leaf_size", "type": "input", "values": 30, "dtype": "int", "accept_none": False},
{"name": "p", "type": "input", "values": 2, "dtype": "int", "accept_none": True},
{"name": "metric", "type": "select", "values": ['minkowski', 'euclidean', 'manhattan', 'chebyshev', 'mahalanobis'],
"dtype": "string", "accept_none": False},
{"name": "n_jobs", "type": "input", "values": -1, "dtype": "int", "accept_none": True}
]
DecisionTreeClassifier_Params = [
{"name": "criterion", "type": "select", "values": ['gini', 'entropy'], "dtype": "string", "accept_none": False},
{"name": "splitter", "type": "select", "values": ['best', 'random'], "dtype": "string", "accept_none": False},
{"name": "max_depth", "type": "input", "values": 5, "dtype": "int", "accept_none": False},
{"name": "min_samples_split", "type": "input", "values": 2, "dtype": "int", "accept_none": False},
{"name": "min_samples_leaf", "type": "input", "values": 1, "dtype": "int", "accept_none": False},
{"name": "min_weight_fraction_leaf", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "max_features", "type": "select", "values": ["auto", "sqrt", "log2"], "dtype": "string",
"accept_none": True},
{"name": "random_state", "type": "input", "values": 101, "dtype": "int", "accept_none": True},
{"name": "max_leaf_nodes", "type": "input", "values": 5, "dtype": "int", "accept_none": True},
{"name": "min_impurity_decrease", "type": "input", "values": 0.0, "dtype": "float", "accept_none": True},
{"name": "class_weight", "type": "select", "values": ["balanced"], "dtype": "string", "accept_none": True},
{"name": "ccp_alpha", "type": "input", "values": 0.0, "dtype": "float", "accept_none": True}]
RandomForestClassifier_Params = [
{"name": "n_estimators", "type": "input", "values": 100, "dtype": "int", "accept_none": False},
{"name": "criterion", "type": "select", "values": ["gini", "entropy"], "dtype": "string", "accept_none": False},
{"name": "max_depth", "type": "input", "values": 5, "dtype": "int", "accept_none": True},
{"name": "min_samples_split", "type": "input", "values": 2, "dtype": "int", "accept_none": False},
{"name": "min_samples_leaf", "type": "input", "values": 1, "dtype": "int", "accept_none": False},
{"name": "min_weight_fraction_leaf", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "max_features", "type": "select", "values": ["auto", "sqrt", "log2"], "dtype": "string",
"accept_none": True},
{"name": "max_leaf_nodes", "type": "input", "values": 5, "dtype": "int", "accept_none": True},
{"name": "min_impurity_decrease", "type": "input", "values": 0.0, "dtype": "float", "accept_none": True},
{"name": "bootstrap", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "oob_score", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "n_jobs", "type": "input", "values": -1, "dtype": "int", "accept_none": True},
{"name": "random_state", "type": "input", "values": 101, "dtype": "int", "accept_none": True},
{"name": "verbose", "type": "input", "values": 0, "dtype": "int", "accept_none": False},
{"name": "warm_start", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "class_weight", "type": "select", "values": ["balanced", "balanced_subsample"], "dtype": "string",
"accept_none": True},
{"name": "ccp_alpha", "type": "input", "values": 0.0, "dtype": "float", "accept_none": True},
{"name": "max_samples", "type": "input", "values": "", "dtype": "int", "accept_none": True}]
GradientBoostingClassifier_Params = [
{"name": "loss", "type": "select", "values": ["deviance", "exponential"], "dtype": "string", "accept_none": False},
{"name": "learning_rate", "type": "input", "values": 0.1, "dtype": "float", "accept_none": False},
{"name": "n_estimators", "type": "input", "values": 100, "dtype": "int", "accept_none": False},
{"name": "subsample", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "criterion", "type": "select", "values": ["friedman_mse", "squared_error", "mae"], "dtype": "string",
"accept_none": False},
{"name": "min_samples_split", "type": "input", "values": 2, "dtype": "int", "accept_none": False},
{"name": "min_samples_leaf", "type": "input", "values": 1, "dtype": "int", "accept_none": False},
{"name": "min_weight_fraction_leaf", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "max_depth", "type": "input", "values": 3, "dtype": "int", "accept_none": False},
{"name": "min_impurity_decrease", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False},
{"name": "random_state", "type": "input", "values": 100, "dtype": "int", "accept_none": True},
{"name": "max_features", "type": "select", "values": ["auto", "sqrt", "log2"], "dtype": "string",
"accept_none": True},
{"name": "verbose", "type": "input", "values": 0, "dtype": "int", "accept_none": False},
{"name": "max_leaf_nodes", "type": "input", "values": 5, "dtype": "int", "accept_none": True},
{"name": "warm_start", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False},
{"name": "validation_fraction", "type": "input", "values": 0.1, "dtype": "float", "accept_none": False},
{"name": "n_iter_no_change", "type": "input", "values": 5, "dtype": "int", "accept_none": True},
{"name": "tol", "type": "input", "values": 0.0001, "dtype": "float", "accept_none": False},
{"name": "ccp_alpha", "type": "input", "values": 0.0, "dtype": "float", "accept_none": False}]
AdaBoostClassifier_Params = [
{"name": "base_estimator", "type": "input", "values": None, "dtype": "object", "accept_none": True},
{"name": "n_estimators", "type": "input", "values": 50, "dtype": "int", "accept_none": False},
{"name": "learning_rate", "type": "input", "values": 1.0, "dtype": "float", "accept_none": False},
{"name": "algorithm", "type": "select", "values": ["SAMME.R", "SAMME"], "dtype": "string", "accept_none": False},
{"name": "random_state", "type": "input", "values": 1, "dtype": "int", "accept_none": True}]
# -----------------------------------------------------------------------------------------------------------
KmeansClustering_Params = [
{"name": "n_clusters", "type": "input", "values": 8, "dtype": "int", "accept_none": False},
{"name": "init", "type": "select", "values": ["k-means++", "random"], "dtype": "string", "accept_none": False},
{"name": "n_init", "type": "input", "values": 10, "dtype": "int", "accept_none": False},
{"name": "max_iter", "type": "input", "values": 300, "dtype": "int", "accept_none": False},
{"name": "tol", "type": "float", "values": 0.0001, "dtype": "float", "accept_none": False},
{"name": "verbose", "type": "input", "values": 0, "dtype": "int", "accept_none": False},
{"name": "random_state", "type": "input", "values": 1, "dtype": "int", "accept_none": True},
{"name": "copy_x", "type": "select", "values": [True, False], "dtype": "boolean", "accept_none": False},
{"name": "algorithm", "type": "select", "values": ["auto", "full", "elkan"], "dtype": "string",
"accept_none": False}]
DbscanClustering_Params = [
{"name": "eps", "type": "float", "values": 0.5, "dtype": "float", "accept_none": False},
{"name": "min_samples", "type": "input", "values": 5, "dtype": "int", "accept_none": False},
{"name": "metric", "type": "select", "values": ['euclidean', 'cityblock', 'cosine', 'l1', 'l2', 'manhattan'],
"dtype": "string", "accept_none": False},
{"name": "algorithm", "type": "select", "values": ["auto", "ball_tree", "kd_tree", "brute"], "dtype": "string",
"accept_none": False},
{"name": "leaf_size", "type": "input", "values": 30, "dtype": "int", "accept_none": False},
{"name": "n_jobs", "type": "input", "values": -1, "dtype": "int", "accept_none": True}]
AgglomerativeClustering_Params = [
{"name": "n_clusters", "type": "input", "values": 2, "dtype": "int", "accept_none": True},
{"name": "affinity", "type": "select", "values": ["euclidean"], "dtype": "string", "accept_none": False},
{"name": "compute_full_tree", "type": "select", "values": ["auto"], "dtype": "string", "accept_none": False},
{"name": "linkage", "type": "select", "values": ["ward", "complete", "average", "single"], "dtype": "string",
"accept_none": False},
{"name": "compute_distances", "type": "select", "values": [False, True], "dtype": "boolean", "accept_none": False}]
Params_Mappings = {
"true": True,
"false": False
}
|
linear_regression__params = [{'name': 'fit_intercept', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'positive', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}]
ridge__params = [{'name': 'alpha', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'fit_intercept', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'copy_X', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'max_iter', 'type': 'input', 'values': '', 'dtype': 'int', 'accept_none': True}, {'name': 'tol', 'type': 'input', 'values': 0.001, 'dtype': 'float', 'accept_none': False}, {'name': 'solver', 'type': 'select', 'values': ['auto', 'svd', 'cholesky', 'lsqr', 'sparse_cg', 'sag', 'saga', 'lbfgs'], 'dtype': 'string', 'accept_none': False}, {'name': 'random_state', 'type': 'input', 'values': '', 'dtype': 'int', 'accept_none': True}]
lasso__params = [{'name': 'alpha', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'fit_intercept', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'precompute', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'copy_X', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'max_iter', 'type': 'input', 'values': 1000, 'dtype': 'int', 'accept_none': False}, {'name': 'tol', 'type': 'input', 'values': 0.0001, 'dtype': 'float', 'accept_none': False}, {'name': 'warm_start', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'random_state', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': True}, {'name': 'selection', 'type': 'select', 'values': ['cyclic', 'random', 'auto'], 'dtype': 'string', 'accept_none': False}]
elastic_net__params = [{'name': 'alpha', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'l1_ratio', 'type': 'input', 'values': 0.5, 'dtype': 'float', 'accept_none': False}, {'name': 'fit_intercept', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'precompute', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'max_iter', 'type': 'input', 'values': 1000, 'dtype': 'int', 'accept_none': False}, {'name': 'copy_X', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'tol', 'type': 'input', 'values': 0.0001, 'dtype': 'float', 'accept_none': False}, {'name': 'warm_start', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'random_state', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': True}, {'name': 'selection', 'type': 'select', 'values': ['cyclic', 'random'], 'dtype': 'string', 'accept_none': False}]
decision_tree_regressor__params = [{'name': 'criterion', 'type': 'select', 'values': ['squared_error', 'friedman_mse', 'absolute_error', 'poisson'], 'dtype': 'string', 'accept_none': False}, {'name': 'splitter', 'type': 'select', 'values': ['best', 'random'], 'dtype': 'string', 'accept_none': False}, {'name': 'max_depth', 'type': 'input', 'values': '', 'dtype': 'int', 'accept_none': True}, {'name': 'min_samples_split', 'type': 'input', 'values': 2, 'dtype': 'int', 'accept_none': False}, {'name': 'min_samples_leaf', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': False}, {'name': 'min_weight_fraction_leaf', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'max_features', 'type': 'select', 'values': ['auto', 'sqrt', 'log2'], 'dtype': 'string', 'accept_none': False}, {'name': 'max_leaf_nodes', 'type': 'input', 'values': '', 'dtype': 'int', 'accept_none': True}, {'name': 'min_impurity_decrease', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'ccp_alpha', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': True}]
random_forest_regressor__params = [{'name': 'n_estimators', 'type': 'input', 'values': 100, 'dtype': 'int', 'accept_none': False}, {'name': 'criterion', 'type': 'select', 'values': ['squared_error', 'absolute_error', 'poisson'], 'dtype': 'string', 'accept_none': False}, {'name': 'max_depth', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': True}, {'name': 'min_samples_split', 'type': 'input', 'values': 2, 'dtype': 'int', 'accept_none': False}, {'name': 'min_samples_leaf', 'type': 'input', 'values': 0.1, 'dtype': 'float', 'accept_none': False}, {'name': 'min_weight_fraction_leaf', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'max_features', 'type': 'select', 'values': ['auto', 'sqrt', 'log2'], 'dtype': 'string', 'accept_none': False}, {'name': 'max_leaf_nodes', 'type': 'input', 'values': 4, 'dtype': 'int', 'accept_none': True}, {'name': 'min_impurity_decrease', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'bootstrap', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'oob_score', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'n_jobs', 'type': 'input', 'values': -1, 'dtype': 'int', 'accept_none': True}, {'name': 'random_state', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': True}, {'name': 'verbose', 'type': 'input', 'values': 0, 'dtype': 'int', 'accept_none': False}, {'name': 'warm_start', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'ccp_alpha', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'max_samples', 'type': 'input', 'values': 1, 'dtype': 'float', 'accept_none': True}]
svr_params = [{'name': 'kernel', 'type': 'select', 'values': ['rbf', 'linear', 'poly', 'sigmoid', 'precomputed'], 'dtype': 'string', 'accept_none': False}, {'name': 'degree', 'type': 'input', 'values': 3, 'dtype': 'int', 'accept_none': False}, {'name': 'gamma', 'type': 'select', 'values': ['scale', 'auto'], 'dtype': 'string', 'accept_none': False}, {'name': 'coef0', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'tol', 'type': 'input', 'values': 0.001, 'dtype': 'float', 'accept_none': False}, {'name': 'C', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'epsilon', 'type': 'input', 'values': 0.1, 'dtype': 'float', 'accept_none': False}, {'name': 'shrinking', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'cache_size', 'type': 'input', 'values': 200, 'dtype': 'float', 'accept_none': False}, {'name': 'verbose', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'max_iter', 'type': 'input', 'values': -1, 'dtype': 'int', 'accept_none': False}]
adaboot_regressor__params = [{'name': 'base_estimator', 'type': 'input', 'values': None, 'dtype': 'object', 'accept_none': True}, {'name': 'n_estimators', 'type': 'input', 'values': 50, 'dtype': 'int', 'accept_none': False}, {'name': 'learning_rate', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'loss', 'type': 'select', 'values': ['linear', 'square', 'exponential'], 'dtype': 'string', 'accept_none': False}, {'name': 'random_state', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': True}]
gradient_boost_regressor__params = [{'name': 'loss', 'type': 'select', 'values': ['squared_error', 'absolute_error', 'huber', 'quantile'], 'dtype': 'string', 'accept_none': False}, {'name': 'learning_rate', 'type': 'input', 'values': 0.1, 'dtype': 'float', 'accept_none': False}, {'name': 'n_estimators', 'type': 'input', 'values': 100, 'dtype': 'int', 'accept_none': False}, {'name': 'subsample', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'criterion', 'type': 'select', 'values': ['friedman_mse', 'squared_error', 'mae', 'mse'], 'dtype': 'string', 'accept_none': False}, {'name': 'min_samples_split', 'type': 'input', 'values': 2, 'dtype': 'int', 'accept_none': False}, {'name': 'min_samples_leaf', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': False}, {'name': 'min_weight_fraction_leaf', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'max_depth', 'type': 'input', 'values': 3, 'dtype': 'int', 'accept_none': False}, {'name': 'min_impurity_decrease', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'init', 'type': 'input', 'values': 'zero', 'dtype': 'string', 'accept_none': True}, {'name': 'random_state', 'input': 'int', 'values': 1, 'dtype': 'int', 'accept_none': True}, {'name': 'max_features', 'type': 'select', 'values': ['auto', 'sqrt', 'log2'], 'dtype': 'string', 'accept_none': False}, {'name': 'alpha', 'type': 'input', 'values': 0.9, 'dtype': 'float', 'accept_none': False}, {'name': 'verbose', 'type': 'input', 'values': 0, 'dtype': 'int', 'accept_none': False}, {'name': 'max_leaf_nodes', 'type': 'input', 'values': 4, 'dtype': 'int', 'accept_none': True}, {'name': 'warm_start', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'validation_fraction', 'type': 'input', 'values': 0.1, 'dtype': 'float', 'accept_none': False}, {'name': 'n_iter_no_change', 'type': 'input', 'values': 95, 'dtype': 'int', 'accept_none': True}, {'name': 'tol', 'type': 'input', 'values': 0.0001, 'dtype': 'float', 'accept_none': False}, {'name': 'ccp_alpha', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}]
logistic_regression__params = [{'name': 'penalty', 'type': 'select', 'values': ['l2', 'l1', 'elasticnet', 'None'], 'dtype': 'string', 'accept_none': True}, {'name': 'dual', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'tol', 'type': 'input', 'values': 0.0001, 'dtype': 'float', 'accept_none': False}, {'name': 'C', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'fit_intercept', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'intercept_scaling', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'class_weight', 'type': 'select', 'values': ['', 'balanced'], 'dtype': 'string', 'accept_none': True}, {'name': 'random_state', 'type': 'input', 'values': 101, 'dtype': 'int', 'accept_none': True}, {'name': 'solver', 'type': 'select', 'values': ['lbfgs', 'newton-cg', 'liblinear', 'sag', 'saga'], 'dtype': 'string', 'accept_none': False}, {'name': 'max_iter', 'type': 'input', 'values': 100, 'dtype': 'int', 'accept_none': False}, {'name': 'multi_class', 'type': 'select', 'values': ['auto', 'ovr', 'multinomial'], 'dtype': 'string', 'accept_none': False}, {'name': 'verbose', 'type': 'input', 'values': 0, 'dtype': 'int', 'accept_none': False}, {'name': 'warm_start', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'n_jobs', 'type': 'input', 'values': -1, 'dtype': 'int', 'accept_none': True}, {'name': 'l1_ratio', 'type': 'input', 'values': 0.5, 'dtype': 'float', 'accept_none': True}]
svc__params = [{'name': 'C', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'kernel', 'type': 'select', 'values': ['rbf', 'poly', 'sigmoid', 'linear', 'precomputed'], 'dtype': 'string', 'accept_none': False}, {'name': 'degree', 'type': 'input', 'values': 3, 'dtype': 'int', 'accept_none': False}, {'name': 'gamma', 'type': 'select', 'values': ['scale', 'auto'], 'dtype': 'string', 'accept_none': False}, {'name': 'coef0', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'shrinking', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'probability', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'tol', 'type': 'input', 'values': 0.001, 'dtype': 'float', 'accept_none': False}, {'name': 'cache_size', 'type': 'input', 'values': 200, 'dtype': 'float', 'accept_none': False}, {'name': 'class_weight', 'type': 'select', 'values': ['balanced'], 'dtype': 'string', 'accept_none': True}, {'name': 'verbose', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'max_iter', 'type': 'input', 'values': -1, 'dtype': 'int', 'accept_none': False}, {'name': 'break_ties', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'random_state', 'type': 'input', 'values': 101, 'dtype': 'int', 'accept_none': True}]
k_neighbors_classifier__params = [{'name': 'n_neighbors', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': False}, {'name': 'weights', 'type': 'select', 'values': ['uniform', 'distance'], 'dtype': 'string', 'accept_none': False}, {'name': 'algorithm', 'type': 'select', 'values': ['auto', 'ball_tree', 'kd_tree', 'brute'], 'dtype': 'string', 'accept_none': False}, {'name': 'leaf_size', 'type': 'input', 'values': 30, 'dtype': 'int', 'accept_none': False}, {'name': 'p', 'type': 'input', 'values': 2, 'dtype': 'int', 'accept_none': True}, {'name': 'metric', 'type': 'select', 'values': ['minkowski', 'euclidean', 'manhattan', 'chebyshev', 'mahalanobis'], 'dtype': 'string', 'accept_none': False}, {'name': 'n_jobs', 'type': 'input', 'values': -1, 'dtype': 'int', 'accept_none': True}]
decision_tree_classifier__params = [{'name': 'criterion', 'type': 'select', 'values': ['gini', 'entropy'], 'dtype': 'string', 'accept_none': False}, {'name': 'splitter', 'type': 'select', 'values': ['best', 'random'], 'dtype': 'string', 'accept_none': False}, {'name': 'max_depth', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': False}, {'name': 'min_samples_split', 'type': 'input', 'values': 2, 'dtype': 'int', 'accept_none': False}, {'name': 'min_samples_leaf', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': False}, {'name': 'min_weight_fraction_leaf', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'max_features', 'type': 'select', 'values': ['auto', 'sqrt', 'log2'], 'dtype': 'string', 'accept_none': True}, {'name': 'random_state', 'type': 'input', 'values': 101, 'dtype': 'int', 'accept_none': True}, {'name': 'max_leaf_nodes', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': True}, {'name': 'min_impurity_decrease', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': True}, {'name': 'class_weight', 'type': 'select', 'values': ['balanced'], 'dtype': 'string', 'accept_none': True}, {'name': 'ccp_alpha', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': True}]
random_forest_classifier__params = [{'name': 'n_estimators', 'type': 'input', 'values': 100, 'dtype': 'int', 'accept_none': False}, {'name': 'criterion', 'type': 'select', 'values': ['gini', 'entropy'], 'dtype': 'string', 'accept_none': False}, {'name': 'max_depth', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': True}, {'name': 'min_samples_split', 'type': 'input', 'values': 2, 'dtype': 'int', 'accept_none': False}, {'name': 'min_samples_leaf', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': False}, {'name': 'min_weight_fraction_leaf', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'max_features', 'type': 'select', 'values': ['auto', 'sqrt', 'log2'], 'dtype': 'string', 'accept_none': True}, {'name': 'max_leaf_nodes', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': True}, {'name': 'min_impurity_decrease', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': True}, {'name': 'bootstrap', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'oob_score', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'n_jobs', 'type': 'input', 'values': -1, 'dtype': 'int', 'accept_none': True}, {'name': 'random_state', 'type': 'input', 'values': 101, 'dtype': 'int', 'accept_none': True}, {'name': 'verbose', 'type': 'input', 'values': 0, 'dtype': 'int', 'accept_none': False}, {'name': 'warm_start', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'class_weight', 'type': 'select', 'values': ['balanced', 'balanced_subsample'], 'dtype': 'string', 'accept_none': True}, {'name': 'ccp_alpha', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': True}, {'name': 'max_samples', 'type': 'input', 'values': '', 'dtype': 'int', 'accept_none': True}]
gradient_boosting_classifier__params = [{'name': 'loss', 'type': 'select', 'values': ['deviance', 'exponential'], 'dtype': 'string', 'accept_none': False}, {'name': 'learning_rate', 'type': 'input', 'values': 0.1, 'dtype': 'float', 'accept_none': False}, {'name': 'n_estimators', 'type': 'input', 'values': 100, 'dtype': 'int', 'accept_none': False}, {'name': 'subsample', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'criterion', 'type': 'select', 'values': ['friedman_mse', 'squared_error', 'mae'], 'dtype': 'string', 'accept_none': False}, {'name': 'min_samples_split', 'type': 'input', 'values': 2, 'dtype': 'int', 'accept_none': False}, {'name': 'min_samples_leaf', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': False}, {'name': 'min_weight_fraction_leaf', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'max_depth', 'type': 'input', 'values': 3, 'dtype': 'int', 'accept_none': False}, {'name': 'min_impurity_decrease', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}, {'name': 'random_state', 'type': 'input', 'values': 100, 'dtype': 'int', 'accept_none': True}, {'name': 'max_features', 'type': 'select', 'values': ['auto', 'sqrt', 'log2'], 'dtype': 'string', 'accept_none': True}, {'name': 'verbose', 'type': 'input', 'values': 0, 'dtype': 'int', 'accept_none': False}, {'name': 'max_leaf_nodes', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': True}, {'name': 'warm_start', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}, {'name': 'validation_fraction', 'type': 'input', 'values': 0.1, 'dtype': 'float', 'accept_none': False}, {'name': 'n_iter_no_change', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': True}, {'name': 'tol', 'type': 'input', 'values': 0.0001, 'dtype': 'float', 'accept_none': False}, {'name': 'ccp_alpha', 'type': 'input', 'values': 0.0, 'dtype': 'float', 'accept_none': False}]
ada_boost_classifier__params = [{'name': 'base_estimator', 'type': 'input', 'values': None, 'dtype': 'object', 'accept_none': True}, {'name': 'n_estimators', 'type': 'input', 'values': 50, 'dtype': 'int', 'accept_none': False}, {'name': 'learning_rate', 'type': 'input', 'values': 1.0, 'dtype': 'float', 'accept_none': False}, {'name': 'algorithm', 'type': 'select', 'values': ['SAMME.R', 'SAMME'], 'dtype': 'string', 'accept_none': False}, {'name': 'random_state', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': True}]
kmeans_clustering__params = [{'name': 'n_clusters', 'type': 'input', 'values': 8, 'dtype': 'int', 'accept_none': False}, {'name': 'init', 'type': 'select', 'values': ['k-means++', 'random'], 'dtype': 'string', 'accept_none': False}, {'name': 'n_init', 'type': 'input', 'values': 10, 'dtype': 'int', 'accept_none': False}, {'name': 'max_iter', 'type': 'input', 'values': 300, 'dtype': 'int', 'accept_none': False}, {'name': 'tol', 'type': 'float', 'values': 0.0001, 'dtype': 'float', 'accept_none': False}, {'name': 'verbose', 'type': 'input', 'values': 0, 'dtype': 'int', 'accept_none': False}, {'name': 'random_state', 'type': 'input', 'values': 1, 'dtype': 'int', 'accept_none': True}, {'name': 'copy_x', 'type': 'select', 'values': [True, False], 'dtype': 'boolean', 'accept_none': False}, {'name': 'algorithm', 'type': 'select', 'values': ['auto', 'full', 'elkan'], 'dtype': 'string', 'accept_none': False}]
dbscan_clustering__params = [{'name': 'eps', 'type': 'float', 'values': 0.5, 'dtype': 'float', 'accept_none': False}, {'name': 'min_samples', 'type': 'input', 'values': 5, 'dtype': 'int', 'accept_none': False}, {'name': 'metric', 'type': 'select', 'values': ['euclidean', 'cityblock', 'cosine', 'l1', 'l2', 'manhattan'], 'dtype': 'string', 'accept_none': False}, {'name': 'algorithm', 'type': 'select', 'values': ['auto', 'ball_tree', 'kd_tree', 'brute'], 'dtype': 'string', 'accept_none': False}, {'name': 'leaf_size', 'type': 'input', 'values': 30, 'dtype': 'int', 'accept_none': False}, {'name': 'n_jobs', 'type': 'input', 'values': -1, 'dtype': 'int', 'accept_none': True}]
agglomerative_clustering__params = [{'name': 'n_clusters', 'type': 'input', 'values': 2, 'dtype': 'int', 'accept_none': True}, {'name': 'affinity', 'type': 'select', 'values': ['euclidean'], 'dtype': 'string', 'accept_none': False}, {'name': 'compute_full_tree', 'type': 'select', 'values': ['auto'], 'dtype': 'string', 'accept_none': False}, {'name': 'linkage', 'type': 'select', 'values': ['ward', 'complete', 'average', 'single'], 'dtype': 'string', 'accept_none': False}, {'name': 'compute_distances', 'type': 'select', 'values': [False, True], 'dtype': 'boolean', 'accept_none': False}]
params__mappings = {'true': True, 'false': False}
|
class Person:
__key = None
__cipher_algorithm = None
def get_key(self):
return self.__key
def set_key(self, new_key):
self.__key = new_key
def operate_cipher(self, encrypted_text):
pass
def set_cipher_algorithm(self, cipher_algorithm):
self.__cipher_algorithm = cipher_algorithm
def get_cipher_algorithm(self):
return self.__cipher_algorithm
|
class Person:
__key = None
__cipher_algorithm = None
def get_key(self):
return self.__key
def set_key(self, new_key):
self.__key = new_key
def operate_cipher(self, encrypted_text):
pass
def set_cipher_algorithm(self, cipher_algorithm):
self.__cipher_algorithm = cipher_algorithm
def get_cipher_algorithm(self):
return self.__cipher_algorithm
|
def sum_list_values(list_values):
return sum(list_values)
def symbolic_to_octal(perm_string):
perms = {"r": 4, "w": 2, "x": 1, "-": 0}
string_value = []
symb_to_octal = []
slicing_values = {"0": perm_string[:3], "1": perm_string[3:6], "2":perm_string[6:9]}
for perms_key, value in perms.items():
for string_values in slicing_values.items():
for v in string_values[1]:
if v == perms_key:
string_value.append(value)
sum_strings = sum_list_values(string_value)
symb_to_octal.append(sum_strings)
return (symb_to_octal)
#assert symbolic_to_octal('rwxr-x-w-') == 752
print(symbolic_to_octal('rwxr-x-w-'))
|
def sum_list_values(list_values):
return sum(list_values)
def symbolic_to_octal(perm_string):
perms = {'r': 4, 'w': 2, 'x': 1, '-': 0}
string_value = []
symb_to_octal = []
slicing_values = {'0': perm_string[:3], '1': perm_string[3:6], '2': perm_string[6:9]}
for (perms_key, value) in perms.items():
for string_values in slicing_values.items():
for v in string_values[1]:
if v == perms_key:
string_value.append(value)
sum_strings = sum_list_values(string_value)
symb_to_octal.append(sum_strings)
return symb_to_octal
print(symbolic_to_octal('rwxr-x-w-'))
|
print(str(b'ABC'.count(b'A')))
print(str(b'ABC'.count(b'AB')))
print(str(b'ABC'.count(b'AC')))
print(str(b'AbcA'.count(b'A')))
print(str(b'AbcAbcAbc'.count(b'A', 3)))
print(str(b'AbcAbcAbc'.count(b'A', 3, 5)))
print()
print(str(bytearray(b'ABC').count(b'A')))
print(str(bytearray(b'ABC').count(b'AB')))
print(str(bytearray(b'ABC').count(b'AC')))
print(str(bytearray(b'AbcA').count(b'A')))
print(str(bytearray(b'AbcAbcAbc').count(b'A', 3)))
print(str(bytearray(b'AbcAbcAbc').count(b'A', 3, 5)))
print()
print(str(bytearray(b'ABC').count(bytearray(b'A'))))
print(str(bytearray(b'ABC').count(bytearray(b'AB'))))
print(str(bytearray(b'ABC').count(bytearray(b'AC'))))
print(str(bytearray(b'AbcA').count(bytearray(b'A'))))
print(str(bytearray(b'AbcAbcAbc').count(bytearray(b'A'), 3)))
print(str(bytearray(b'AbcAbcAbc').count(bytearray(b'A'), 3, 5)))
|
print(str(b'ABC'.count(b'A')))
print(str(b'ABC'.count(b'AB')))
print(str(b'ABC'.count(b'AC')))
print(str(b'AbcA'.count(b'A')))
print(str(b'AbcAbcAbc'.count(b'A', 3)))
print(str(b'AbcAbcAbc'.count(b'A', 3, 5)))
print()
print(str(bytearray(b'ABC').count(b'A')))
print(str(bytearray(b'ABC').count(b'AB')))
print(str(bytearray(b'ABC').count(b'AC')))
print(str(bytearray(b'AbcA').count(b'A')))
print(str(bytearray(b'AbcAbcAbc').count(b'A', 3)))
print(str(bytearray(b'AbcAbcAbc').count(b'A', 3, 5)))
print()
print(str(bytearray(b'ABC').count(bytearray(b'A'))))
print(str(bytearray(b'ABC').count(bytearray(b'AB'))))
print(str(bytearray(b'ABC').count(bytearray(b'AC'))))
print(str(bytearray(b'AbcA').count(bytearray(b'A'))))
print(str(bytearray(b'AbcAbcAbc').count(bytearray(b'A'), 3)))
print(str(bytearray(b'AbcAbcAbc').count(bytearray(b'A'), 3, 5)))
|
config ={
'CONTEXT' : 'We are in DEV context',
'Log_bucket' : 'gc://bucketname_great',
'versionNR' : 'v12.236',
'zone' : 'europe-west1-d',
}
|
config = {'CONTEXT': 'We are in DEV context', 'Log_bucket': 'gc://bucketname_great', 'versionNR': 'v12.236', 'zone': 'europe-west1-d'}
|
def classify(number):
if number < 1:
raise ValueError("Value too small")
aliquot = 0
for i in range(number-1):
if number % (i+1) == 0:
aliquot += i+1
return "perfect" if aliquot == number else "abundant" if aliquot > number else "deficient"
|
def classify(number):
if number < 1:
raise value_error('Value too small')
aliquot = 0
for i in range(number - 1):
if number % (i + 1) == 0:
aliquot += i + 1
return 'perfect' if aliquot == number else 'abundant' if aliquot > number else 'deficient'
|
# File: taniumrest_consts.py
# Copyright (c) 2019-2021 Splunk Inc.
#
# Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)
SESSION_URL = "/api/v2/session/login"
TANIUMREST_GET_SAVED_QUESTIONS = "/api/v2/saved_questions"
TANIUMREST_GET_QUESTIONS = "/api/v2/questions"
TANIUMREST_GET_QUESTION_RESULTS = "/api/v2/result_data/question/{question_id}"
TANIUMREST_PARSE_QUESTION = "/api/v2/parse_question"
TANIUMREST_EXECUTE_ACTION = "/api/v2/saved_actions"
TANIUMREST_GET_ACTION_GROUP = "/api/v2/action_groups/by-name/{action_group}"
TANIUMREST_GET_GROUP = "/api/v2/groups/by-name/{group_name}"
TANIUMREST_GET_PACKAGE = "/api/v2/packages/by-name/{package}"
TANIUMREST_GET_SAVED_QUESTION = "/api/v2/saved_questions/by-name/{saved_question}"
TANIUMREST_GET_SENSOR_BY_NAME = "/api/v2/sensors/by-name/{sensor_name}"
TANIUMREST_GET_SAVED_QUESTION_RESULT = "/api/v2/result_data/saved_question/{saved_question_id}"
WAIT_SECONDS = 5
TANIUMREST_RESULTS_UNAVAILABLE = ["[current results unavailable]", "[current result unavailable]", "[results currently unavailable]"]
# Constants relating to 'get_error_message_from_exception'
ERR_CODE_MSG = "Error code unavailable"
ERR_MSG_UNAVAILABLE = "Error message unavailable. Please check the asset configuration and|or action parameters"
TYPE_ERR_MSG = "Error occurred while connecting to the Tanium Server. Please check the asset configuration and|or action parameters"
# Constants relating to 'validate_integer'
INVALID_INT_ERR_MSG = "Please provide a valid integer value in the {}"
INVALID_NON_NEG_INT_ERR_MSG = "Please provide a valid non-negative integer value in the {}"
INVALID_NON_NEG_NON_ZERO_ERR_MSG = "PLease provide a valid non-zero non-negative integer value in the {}"
EXPIRE_SECONDS_KEY = "'expire_seconds' action parameter"
DISTRIBUTE_SECONDS_KEY = "'distribute_seconds' action parameter"
ISSUE_SECONDS_KEY = "'issue_seconds' action parameter"
TIMEOUT_SECONDS_KEY = "'timeout_seconds' action parameter"
RETURN_WHEN_N_RESULTS_AVAILABLE_KEY = "'return_when_n_results_available' action parameter"
WAIT_FOR_N_RESULTS_AVAILABLE_KEY = "'wait_for_n_results_available' action parameter"
RESULTS_PERCENTAGE_KEY = "'Consider question results complete at' configuration parameter"
QUESTION_ID_KEY = "'question_id' action parameter"
|
session_url = '/api/v2/session/login'
taniumrest_get_saved_questions = '/api/v2/saved_questions'
taniumrest_get_questions = '/api/v2/questions'
taniumrest_get_question_results = '/api/v2/result_data/question/{question_id}'
taniumrest_parse_question = '/api/v2/parse_question'
taniumrest_execute_action = '/api/v2/saved_actions'
taniumrest_get_action_group = '/api/v2/action_groups/by-name/{action_group}'
taniumrest_get_group = '/api/v2/groups/by-name/{group_name}'
taniumrest_get_package = '/api/v2/packages/by-name/{package}'
taniumrest_get_saved_question = '/api/v2/saved_questions/by-name/{saved_question}'
taniumrest_get_sensor_by_name = '/api/v2/sensors/by-name/{sensor_name}'
taniumrest_get_saved_question_result = '/api/v2/result_data/saved_question/{saved_question_id}'
wait_seconds = 5
taniumrest_results_unavailable = ['[current results unavailable]', '[current result unavailable]', '[results currently unavailable]']
err_code_msg = 'Error code unavailable'
err_msg_unavailable = 'Error message unavailable. Please check the asset configuration and|or action parameters'
type_err_msg = 'Error occurred while connecting to the Tanium Server. Please check the asset configuration and|or action parameters'
invalid_int_err_msg = 'Please provide a valid integer value in the {}'
invalid_non_neg_int_err_msg = 'Please provide a valid non-negative integer value in the {}'
invalid_non_neg_non_zero_err_msg = 'PLease provide a valid non-zero non-negative integer value in the {}'
expire_seconds_key = "'expire_seconds' action parameter"
distribute_seconds_key = "'distribute_seconds' action parameter"
issue_seconds_key = "'issue_seconds' action parameter"
timeout_seconds_key = "'timeout_seconds' action parameter"
return_when_n_results_available_key = "'return_when_n_results_available' action parameter"
wait_for_n_results_available_key = "'wait_for_n_results_available' action parameter"
results_percentage_key = "'Consider question results complete at' configuration parameter"
question_id_key = "'question_id' action parameter"
|
Text = 'text'
Audio = 'audio'
Document = 'document'
Animation = 'animation'
Game = 'game'
Photo = 'photo'
Sticker = 'sticker'
Video = 'video'
Voice = 'voice'
VideoNote = 'video_note'
Contact = 'contact'
Dice = 'dice'
Location = 'location'
Venue = 'venue'
Poll = 'poll'
NewChatMembers = 'new_chat_members'
LeftChatMember = 'left_chat_member'
NewChatTitle = 'new_chat_title'
NewChatPhoto = 'new_chat_photo'
DeleteChatPhoto = 'delete_chat_photo'
GroupChatCreated = 'group_chat_created'
SupergroupChatCreated = 'supergroup_chat_created'
ChannelChatCreated = 'channel_chat_created'
MigrateToChatId = 'migrate_to_chat_id'
MigrateFromChatId = 'migrate_from_chat_id'
PinnedMessage = 'pinned_message'
Invoice = 'invoice'
SuccessfulPayment = 'successful_payment'
PassportData = 'passport_data'
|
text = 'text'
audio = 'audio'
document = 'document'
animation = 'animation'
game = 'game'
photo = 'photo'
sticker = 'sticker'
video = 'video'
voice = 'voice'
video_note = 'video_note'
contact = 'contact'
dice = 'dice'
location = 'location'
venue = 'venue'
poll = 'poll'
new_chat_members = 'new_chat_members'
left_chat_member = 'left_chat_member'
new_chat_title = 'new_chat_title'
new_chat_photo = 'new_chat_photo'
delete_chat_photo = 'delete_chat_photo'
group_chat_created = 'group_chat_created'
supergroup_chat_created = 'supergroup_chat_created'
channel_chat_created = 'channel_chat_created'
migrate_to_chat_id = 'migrate_to_chat_id'
migrate_from_chat_id = 'migrate_from_chat_id'
pinned_message = 'pinned_message'
invoice = 'invoice'
successful_payment = 'successful_payment'
passport_data = 'passport_data'
|
class A:
def met(self):
print("this is a method from class A")
class B(A):
def met(self):
print("this is a method from class B")
class C(A):
def met(self):
print("this is a method from class C")
class D(C,B):
def met(self):
print("this is a method from class D")
a = A()
b = B()
c = C()
d = D()
d.met()
|
class A:
def met(self):
print('this is a method from class A')
class B(A):
def met(self):
print('this is a method from class B')
class C(A):
def met(self):
print('this is a method from class C')
class D(C, B):
def met(self):
print('this is a method from class D')
a = a()
b = b()
c = c()
d = d()
d.met()
|
# -*- coding: utf-8 -*-
class Solution:
def nthPersonGetsNthSeat(self, n):
return 1 if n == 1 else 0.5
if __name__ == '__main__':
solution = Solution()
assert 1 == solution.nthPersonGetsNthSeat(1)
assert 0.5 == solution.nthPersonGetsNthSeat(2)
assert 0.5 == solution.nthPersonGetsNthSeat(3)
|
class Solution:
def nth_person_gets_nth_seat(self, n):
return 1 if n == 1 else 0.5
if __name__ == '__main__':
solution = solution()
assert 1 == solution.nthPersonGetsNthSeat(1)
assert 0.5 == solution.nthPersonGetsNthSeat(2)
assert 0.5 == solution.nthPersonGetsNthSeat(3)
|
# Create a function that takes a number num and returns its length.
def number_length(num):
if num != None:
count = 1
val = num
while(val // 10 != 0):
count += 1
val = val // 10
return count
print(number_length(392))
|
def number_length(num):
if num != None:
count = 1
val = num
while val // 10 != 0:
count += 1
val = val // 10
return count
print(number_length(392))
|
class Node:
def __init__(self, name):
self.data = name
self.nextnode = None
def remove(self, data, previous):
if self.data == data:
previous.nextnode = self.nextnode
del self.data
else:
if self.nextnode is not None:
self.nextnode.remove(data, self)
|
class Node:
def __init__(self, name):
self.data = name
self.nextnode = None
def remove(self, data, previous):
if self.data == data:
previous.nextnode = self.nextnode
del self.data
elif self.nextnode is not None:
self.nextnode.remove(data, self)
|
expected_output = {
'vrf':
{'VRF1':
{'address_family':
{'ipv6': {}}},
'blue':
{'address_family':
{'ipv6':
{'multicast_group':
{'ff30::/12':
{'source_address':
{'*':
{'flags': 'ipv6 pim6',
'incoming_interface_list':
{'Null':
{'rpf_nbr': '0::'}},
'oil_count': '0',
'uptime': '10w5d'}}}}}}},
'default':
{'address_family':
{'ipv6':
{'multicast_group':
{'ff03:3::/64':
{'source_address':
{'*':
{'bidir': True,
'flags': 'pim6',
'incoming_interface_list':
{'Null':
{'rpf_nbr': '0::'}},
'oil_count': '0',
'uptime': '10w5d'}}},
'ff30::/12':
{'source_address':
{'*':
{'flags': 'ipv6 pim6',
'incoming_interface_list':
{'Null':
{'rpf_nbr': '0::'}},
'oil_count': '0',
'uptime': '10w5d'}}}}}}}}}
|
expected_output = {'vrf': {'VRF1': {'address_family': {'ipv6': {}}}, 'blue': {'address_family': {'ipv6': {'multicast_group': {'ff30::/12': {'source_address': {'*': {'flags': 'ipv6 pim6', 'incoming_interface_list': {'Null': {'rpf_nbr': '0::'}}, 'oil_count': '0', 'uptime': '10w5d'}}}}}}}, 'default': {'address_family': {'ipv6': {'multicast_group': {'ff03:3::/64': {'source_address': {'*': {'bidir': True, 'flags': 'pim6', 'incoming_interface_list': {'Null': {'rpf_nbr': '0::'}}, 'oil_count': '0', 'uptime': '10w5d'}}}, 'ff30::/12': {'source_address': {'*': {'flags': 'ipv6 pim6', 'incoming_interface_list': {'Null': {'rpf_nbr': '0::'}}, 'oil_count': '0', 'uptime': '10w5d'}}}}}}}}}
|
# The URL we will use when accessing a gulag API instance.
api_url: str = "cmyui.codes"
# When set to True it will allow us to make unverified HTTPS requests. (Good for testing.)
unsafe_request: bool = False
|
api_url: str = 'cmyui.codes'
unsafe_request: bool = False
|
{
'target_defaults': {
'cflags': [
'-Wunused',
'-Wshadow',
'-Wextra',
],
},
'targets': [
# D-Bus code generator.
{
'target_name': 'dbus_code_generator',
'type': 'none',
'variables': {
'dbus_service_config': 'dbus_bindings/dbus-service-config.json',
'dbus_adaptors_out_dir': 'include/authpolicy',
},
'sources': [
'dbus_bindings/org.chromium.AuthPolicy.xml',
],
'includes': ['../common-mk/generate-dbus-adaptors.gypi'],
},
# Container protos
{
'target_name': 'container-protos',
'type': 'static_library',
'variables': {
'proto_in_dir': 'proto',
'proto_out_dir': 'include/bindings',
},
'sources': [
'<(proto_in_dir)/authpolicy_containers.proto',
],
'includes': ['../common-mk/protoc.gypi'],
},
# Autogenerated policy sources
{
'target_name': 'policy_code_generator',
'type': 'none',
'hard_dependency': 1,
'variables': {
'policy_tools_dir': '<(sysroot)/usr/share/policy_tools',
'policy_resources_dir': '<(sysroot)/usr/share/policy_resources',
'out_dir': '<(SHARED_INTERMEDIATE_DIR)/include/bindings',
},
'actions': [{
'action_name': 'run_generate_script',
'inputs': [
'<(policy_tools_dir)/generate_policy_source.py',
'<(policy_resources_dir)/policy_templates.json',
'<(policy_resources_dir)/VERSION',
],
'outputs': [
'<(out_dir)/policy_constants.h',
'<(out_dir)/policy_constants.cc',
],
'action': [
'python', '<(policy_tools_dir)/generate_policy_source.py',
'--cros-policy-constants-header=<(out_dir)/policy_constants.h',
'--cros-policy-constants-source=<(out_dir)/policy_constants.cc',
'<(policy_resources_dir)/VERSION',
'<(OS)',
'1', # chromeos-flag
'<(policy_resources_dir)/policy_templates.json',
],
}],
},
# Authpolicy library.
{
'target_name': 'libauthpolicy',
'type': 'static_library',
'dependencies': [
'../common-mk/external_dependencies.gyp:policy-protos',
'../common-mk/external_dependencies.gyp:user_policy-protos',
'container-protos',
'dbus_code_generator',
'policy_code_generator',
],
'variables': {
'gen_src_in_dir': '<(SHARED_INTERMEDIATE_DIR)/include/bindings',
'deps': [
'dbus-1',
'libbrillo-<(libbase_ver)',
'libchrome-<(libbase_ver)',
],
},
'sources': [
'<(gen_src_in_dir)/policy_constants.cc',
'authpolicy.cc',
'authpolicy_metrics.cc',
'constants.cc',
'jail_helper.cc',
'path_service.cc',
'platform_helper.cc',
'policy/device_policy_encoder.cc',
'policy/policy_encoder_helper.cc',
'policy/preg_policy_encoder.cc',
'policy/user_policy_encoder.cc',
'process_executor.cc',
'samba_helper.cc',
'samba_interface.cc',
'tgt_manager.cc',
],
},
# Parser tool.
{
'target_name': 'authpolicy_parser',
'type': 'executable',
'dependencies': ['libauthpolicy'],
'variables': {
'deps': [
'libbrillo-<(libbase_ver)',
'libcap',
'libchrome-<(libbase_ver)',
'libmetrics-<(libbase_ver)',
'libminijail',
'protobuf-lite',
],
},
'sources': [
'authpolicy_parser_main.cc',
],
},
# Authpolicy daemon executable.
{
'target_name': 'authpolicyd',
'type': 'executable',
'dependencies': [
'libauthpolicy',
'authpolicy_parser',
],
'variables': {
'deps': [
'libbrillo-<(libbase_ver)',
'libcap',
'libchrome-<(libbase_ver)',
'libmetrics-<(libbase_ver)',
'libminijail',
# system_api depends on protobuf (or protobuf-lite). It must appear
# before protobuf or the linker flags won't be in the right order.
'system_api',
'protobuf-lite',
],
},
'sources': ['authpolicy_main.cc'],
'link_settings': {
'libraries': [
'-linstallattributes-<(libbase_ver)',
],
},
},
],
# Unit tests.
'conditions': [
['USE_test == 1', {
'targets': [
{
'target_name': 'authpolicy_test',
'type': 'executable',
'includes': ['../common-mk/common_test.gypi'],
'defines': ['UNIT_TEST'],
'dependencies': [
'libauthpolicy',
'stub_common',
],
'variables': {
'deps': [
'libbrillo-<(libbase_ver)',
'libcap',
'libchrome-<(libbase_ver)',
'libchrome-test-<(libbase_ver)',
'libmetrics-<(libbase_ver)',
'libminijail',
# system_api depends on protobuf (or protobuf-lite). It must
# appear before protobuf or the linker flags won't be in the right
# order.
'system_api',
'protobuf-lite',
],
},
'sources': [
'authpolicy_testrunner.cc',
'authpolicy_unittest.cc',
'policy/device_policy_encoder_unittest.cc',
'policy/user_policy_encoder_unittest.cc',
'process_executor_unittest.cc',
'samba_helper_unittest.cc',
],
},
{
'target_name': 'stub_common',
'type': 'static_library',
'variables': {
'deps': [
'libchrome-<(libbase_ver)',
],
},
'sources': ['stub_common.cc'],
},
{
'target_name': 'stub_net',
'type': 'executable',
'dependencies': [
'libauthpolicy',
'stub_common',
],
'variables': {
'deps': [
'libcap',
'libchrome-<(libbase_ver)',
],
},
'sources': ['stub_net_main.cc'],
},
{
'target_name': 'stub_kinit',
'type': 'executable',
'dependencies': [
'libauthpolicy',
'stub_common',
],
'variables': {
'deps': [
'libcap',
'libchrome-<(libbase_ver)',
],
},
'sources': ['stub_kinit_main.cc'],
},
{
'target_name': 'stub_klist',
'type': 'executable',
'dependencies': [
'libauthpolicy',
'stub_common',
],
'variables': {
'deps': ['libchrome-<(libbase_ver)'],
},
'sources': ['stub_klist_main.cc'],
},
{
'target_name': 'stub_smbclient',
'type': 'executable',
'dependencies': [
'libauthpolicy',
'stub_common',
],
'variables': {
'deps': ['libchrome-<(libbase_ver)'],
},
'sources': ['stub_smbclient_main.cc'],
},
],
}],
],
}
|
{'target_defaults': {'cflags': ['-Wunused', '-Wshadow', '-Wextra']}, 'targets': [{'target_name': 'dbus_code_generator', 'type': 'none', 'variables': {'dbus_service_config': 'dbus_bindings/dbus-service-config.json', 'dbus_adaptors_out_dir': 'include/authpolicy'}, 'sources': ['dbus_bindings/org.chromium.AuthPolicy.xml'], 'includes': ['../common-mk/generate-dbus-adaptors.gypi']}, {'target_name': 'container-protos', 'type': 'static_library', 'variables': {'proto_in_dir': 'proto', 'proto_out_dir': 'include/bindings'}, 'sources': ['<(proto_in_dir)/authpolicy_containers.proto'], 'includes': ['../common-mk/protoc.gypi']}, {'target_name': 'policy_code_generator', 'type': 'none', 'hard_dependency': 1, 'variables': {'policy_tools_dir': '<(sysroot)/usr/share/policy_tools', 'policy_resources_dir': '<(sysroot)/usr/share/policy_resources', 'out_dir': '<(SHARED_INTERMEDIATE_DIR)/include/bindings'}, 'actions': [{'action_name': 'run_generate_script', 'inputs': ['<(policy_tools_dir)/generate_policy_source.py', '<(policy_resources_dir)/policy_templates.json', '<(policy_resources_dir)/VERSION'], 'outputs': ['<(out_dir)/policy_constants.h', '<(out_dir)/policy_constants.cc'], 'action': ['python', '<(policy_tools_dir)/generate_policy_source.py', '--cros-policy-constants-header=<(out_dir)/policy_constants.h', '--cros-policy-constants-source=<(out_dir)/policy_constants.cc', '<(policy_resources_dir)/VERSION', '<(OS)', '1', '<(policy_resources_dir)/policy_templates.json']}]}, {'target_name': 'libauthpolicy', 'type': 'static_library', 'dependencies': ['../common-mk/external_dependencies.gyp:policy-protos', '../common-mk/external_dependencies.gyp:user_policy-protos', 'container-protos', 'dbus_code_generator', 'policy_code_generator'], 'variables': {'gen_src_in_dir': '<(SHARED_INTERMEDIATE_DIR)/include/bindings', 'deps': ['dbus-1', 'libbrillo-<(libbase_ver)', 'libchrome-<(libbase_ver)']}, 'sources': ['<(gen_src_in_dir)/policy_constants.cc', 'authpolicy.cc', 'authpolicy_metrics.cc', 'constants.cc', 'jail_helper.cc', 'path_service.cc', 'platform_helper.cc', 'policy/device_policy_encoder.cc', 'policy/policy_encoder_helper.cc', 'policy/preg_policy_encoder.cc', 'policy/user_policy_encoder.cc', 'process_executor.cc', 'samba_helper.cc', 'samba_interface.cc', 'tgt_manager.cc']}, {'target_name': 'authpolicy_parser', 'type': 'executable', 'dependencies': ['libauthpolicy'], 'variables': {'deps': ['libbrillo-<(libbase_ver)', 'libcap', 'libchrome-<(libbase_ver)', 'libmetrics-<(libbase_ver)', 'libminijail', 'protobuf-lite']}, 'sources': ['authpolicy_parser_main.cc']}, {'target_name': 'authpolicyd', 'type': 'executable', 'dependencies': ['libauthpolicy', 'authpolicy_parser'], 'variables': {'deps': ['libbrillo-<(libbase_ver)', 'libcap', 'libchrome-<(libbase_ver)', 'libmetrics-<(libbase_ver)', 'libminijail', 'system_api', 'protobuf-lite']}, 'sources': ['authpolicy_main.cc'], 'link_settings': {'libraries': ['-linstallattributes-<(libbase_ver)']}}], 'conditions': [['USE_test == 1', {'targets': [{'target_name': 'authpolicy_test', 'type': 'executable', 'includes': ['../common-mk/common_test.gypi'], 'defines': ['UNIT_TEST'], 'dependencies': ['libauthpolicy', 'stub_common'], 'variables': {'deps': ['libbrillo-<(libbase_ver)', 'libcap', 'libchrome-<(libbase_ver)', 'libchrome-test-<(libbase_ver)', 'libmetrics-<(libbase_ver)', 'libminijail', 'system_api', 'protobuf-lite']}, 'sources': ['authpolicy_testrunner.cc', 'authpolicy_unittest.cc', 'policy/device_policy_encoder_unittest.cc', 'policy/user_policy_encoder_unittest.cc', 'process_executor_unittest.cc', 'samba_helper_unittest.cc']}, {'target_name': 'stub_common', 'type': 'static_library', 'variables': {'deps': ['libchrome-<(libbase_ver)']}, 'sources': ['stub_common.cc']}, {'target_name': 'stub_net', 'type': 'executable', 'dependencies': ['libauthpolicy', 'stub_common'], 'variables': {'deps': ['libcap', 'libchrome-<(libbase_ver)']}, 'sources': ['stub_net_main.cc']}, {'target_name': 'stub_kinit', 'type': 'executable', 'dependencies': ['libauthpolicy', 'stub_common'], 'variables': {'deps': ['libcap', 'libchrome-<(libbase_ver)']}, 'sources': ['stub_kinit_main.cc']}, {'target_name': 'stub_klist', 'type': 'executable', 'dependencies': ['libauthpolicy', 'stub_common'], 'variables': {'deps': ['libchrome-<(libbase_ver)']}, 'sources': ['stub_klist_main.cc']}, {'target_name': 'stub_smbclient', 'type': 'executable', 'dependencies': ['libauthpolicy', 'stub_common'], 'variables': {'deps': ['libchrome-<(libbase_ver)']}, 'sources': ['stub_smbclient_main.cc']}]}]]}
|
class AbstractTransitionSystem:
def __init__(self, num_labels):
self.num_labels = num_labels
def num_actions(self):
raise NotImplementedError()
def state(self, num_tokens):
raise NotImplementedError()
def is_final(self, state):
raise NotImplementedError()
def extract_parse(self, state):
raise NotImplementedError()
def allowed(self, state):
raise NotImplementedError()
def reference_policy(self):
raise NotImplementedError()
def action_name(self, action_index):
return 'Action=' + str(action_index)
def describe_action(self, state, action_index):
raise self.action_name(action_index) + ' at ' + str(state)
def perform(self, state, action_index):
raise NotImplementedError()
class AbstractReferencePolicy:
def is_optimal(self):
raise NotImplementedError()
|
class Abstracttransitionsystem:
def __init__(self, num_labels):
self.num_labels = num_labels
def num_actions(self):
raise not_implemented_error()
def state(self, num_tokens):
raise not_implemented_error()
def is_final(self, state):
raise not_implemented_error()
def extract_parse(self, state):
raise not_implemented_error()
def allowed(self, state):
raise not_implemented_error()
def reference_policy(self):
raise not_implemented_error()
def action_name(self, action_index):
return 'Action=' + str(action_index)
def describe_action(self, state, action_index):
raise self.action_name(action_index) + ' at ' + str(state)
def perform(self, state, action_index):
raise not_implemented_error()
class Abstractreferencepolicy:
def is_optimal(self):
raise not_implemented_error()
|
Import('defenv')
### Configuration options
cfg = Variables()
cfg.Add(
(
'NSIS_MAX_STRLEN',
'defines the maximum string length for internal variables and stack entries. 1024 should be plenty, but if you are doing crazy registry stuff, you might want to bump it up. Generally it adds about 16-32x the memory, so setting this to 4096 from 1024 will add around 64k of memory usage (not really a big deal, but not usually needed).',
1024
)
)
cfg.Add(
(
'NSIS_MAX_INST_TYPES',
'defines the maximum install types. Note that this should not exceed 32, ever.',
32
)
)
cfg.Add(
(
'NSIS_DEFAULT_LANG',
'defines the default language id NSIS will use if nothing else is defined in the script. Default value is 1033 which is English.',
1033
)
)
cfg.Add(
(
'NSIS_VARS_SECTION',
'defines the name of the PE section containing the runtime variables',
'.ndata'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_UNINSTALL_SUPPORT',
"enables the uninstaller support. Turn it off if your installers don't need uninstallers. Adds less than 1kb.",
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_LICENSEPAGE',
'enables support for the installer to present a license page.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_COMPONENTPAGE',
'enables support for the installer to present a page where you can select what sections are installed. with this disabled, all sections are installed by default',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_COMPONENTPAGE_ALTERNATIVE',
'enables an alternative components page behavior. Checkboxes will only be toggled when clicking on the checkbox itself and not on its label. .onMouseOverSection will only be called when the user selects the component and not when moving the mouse pointer over it.',
'no'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_SILENT_SUPPORT',
'enables support for making installers that are completely silent.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_VISIBLE_SUPPORT',
'enables support for making installers that are visible.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_ENHANCEDUI_SUPPORT',
'enables support for CreateFont, SetCtlColors (used by some UIs), SetBrandingImage, .onGUIInit, etc.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_COMPRESSION_SUPPORT',
'enables support for making installers that use compression (recommended).',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_COMPRESS_BZIP2_SMALLMODE',
"if defined, bzip2's decompressor uses bzip2's alternative decompression method that uses less runtime memory, at the expense of speed (and executable size). not recommended.",
'no'
)
)
cfg.Add(
(
'NSIS_COMPRESS_BZIP2_LEVEL',
'bzip2 compression window size. 1-9 is valid. 9 uses the most memory, but typically compresses best (recommended). 1 uses the least memory, but typically compresses the worst.',
9
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_CRC_SUPPORT',
'enables support for installer verification. HIGHLY recommended.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_CRC_ANAL',
'makes the CRC verification extremely careful, meaning extra bytes on the end of file, or the first 512 bytes changing, will give error. Enable this if you are paranoid, otherwise leaving it off seems safe (and is less prone to reporting virii). If you will be digitally signing your installers, leave this off.',
'no'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_LOG',
'enables the logging facility. turning this on (by uncommenting it) adds about 4kb, but can be useful in debugging your installers.',
'no'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_LOG_ODS',
'makes the logging facility use OutputDebugString instead of a file.',
'no'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_LOG_STDOUT',
'makes the logging facility use stdout instead of a file.',
'no'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_LOG_TIMESTAMP',
'adds a timestamp to each log line.',
'no'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_BGBG',
'enables support for the blue (well, whatever color you want) gradient background window.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_CODECALLBACKS',
'enables support for installer code callbacks. recommended, as it uses a minimum of space and allows for neat functionality.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_MOVEONREBOOT',
'enables support for uninstallers that automatically delete themselves from the temp directory, as well as the reboot moving/deleting modes of Delete and Rename. Adds about 512 gay bytes..',
'yes'
)
)
### Instruction enabling configuration
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_ACTIVEXREG',
'enables activeX plug-in registration and deregistration, as well as CallInstDLL',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_INTOPTS',
'enables support for IntCmp, IntCmpU, IntOp, and IntFmt.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_STROPTS',
'enables support for StrCmp, StrCpy, and StrLen, as well as Get*Local.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_STACK',
'enables support for the stack (Push, Pop, Exch)',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_FILEFUNCTIONS',
'enables support for FileOpen,FileClose, FileSeek, FileRead, and FileWrite.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_FINDFIRST',
'enables support for FindFirst, FindNext, and FindClose.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_CREATESHORTCUT',
'enables support for CreateShortCut.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_INIFILES',
'enables support for ReadINIStr and WriteINIStr.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_REGISTRYFUNCTIONS',
'enables support for ReadRegStr, ReadRegDWORD, WriteRegStr, etc etc etc.',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_COPYFILES',
'enables support for CopyFiles',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_REBOOT',
'enables support for Reboot, IfRebootFlag, SetRebootFlag',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_FNUTIL',
'enables support for GetFullPathName, GetTempFileName, and SearchPath',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_EXECUTE',
'enables support for Exec and ExecWait',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_SHELLEXECUTE',
'enables support for ExecShell',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_GETDLLVERSION',
'enables support for GetDLLVersion',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_GETFILETIME',
'enables support for GetFileTime',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_GETFONTVERSION',
'enables support for GetFontversion',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_GETFONTNAME',
'enables support for GetFontName',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_HWNDS',
'enables support for FindWindow, SendMessage, and IsWindow',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_ENVIRONMENT',
'enables support for ReadEnvStr and ExpandEnvStrings',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_RMDIR',
'enables support for RMDir',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_FILE',
'enables support for File (extracting files)',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_DELETE',
'enables support for Delete (delete files)',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_RENAME',
'enables support for Rename (rename files)',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_MESSAGEBOX',
'enables support for MessageBox',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_VERSION_INFO',
'enables support for version information in the installer',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_FIX_DEFINES_IN_STRINGS',
'fixes defines inside defines and handles chars $ perfectly',
'no'
)
)
cfg.Add(
BoolVariable(
'NSIS_SUPPORT_STANDARD_PREDEFINES',
'enables standard predefines - __FILE__, __LINE__, __DATE__, __TIME__ and __TIMESTAMP__',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_LOCKWINDOW_SUPPORT',
'enables the LockWindow command',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_PLUGIN_SUPPORT',
'enables installer plug-ins support',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_FIX_COMMENT_HANDLING',
'fixes comment handling',
'yes'
)
)
cfg.Add(
BoolVariable(
'NSIS_CONFIG_CONST_DATA_PATH',
'determines if plugins, includes, stubs etc. are located in a constant path set at build-time',
defenv['PLATFORM'] != 'win32'
)
)
### Generate help
Help(cfg.GenerateHelpText(defenv))
### Apply configuration
env = Environment()
cfg.Update(env)
def AddValuedDefine(define):
defenv.Append(NSIS_CPPDEFINES = [(define, env[define])])
def AddBoolDefine(define):
if env[define]:
defenv.Append(NSIS_CPPDEFINES = [define])
def AddStringDefine(define):
defenv.Append(NSIS_CPPDEFINES = [(define, '"%s"' % env[define])])
AddValuedDefine('NSIS_MAX_STRLEN')
AddValuedDefine('NSIS_MAX_INST_TYPES')
AddValuedDefine('NSIS_DEFAULT_LANG')
AddBoolDefine('NSIS_CONFIG_UNINSTALL_SUPPORT')
AddBoolDefine('NSIS_CONFIG_LICENSEPAGE')
AddBoolDefine('NSIS_CONFIG_COMPONENTPAGE')
AddBoolDefine('NSIS_CONFIG_COMPONENTPAGE_ALTERNATIVE')
AddBoolDefine('NSIS_CONFIG_SILENT_SUPPORT')
AddBoolDefine('NSIS_CONFIG_VISIBLE_SUPPORT')
AddBoolDefine('NSIS_CONFIG_ENHANCEDUI_SUPPORT')
AddBoolDefine('NSIS_CONFIG_COMPRESSION_SUPPORT')
AddBoolDefine('NSIS_COMPRESS_BZIP2_SMALLMODE')
AddValuedDefine('NSIS_COMPRESS_BZIP2_LEVEL')
AddBoolDefine('NSIS_CONFIG_CRC_SUPPORT')
AddBoolDefine('NSIS_CONFIG_CRC_ANAL')
AddBoolDefine('NSIS_CONFIG_LOG')
AddBoolDefine('NSIS_CONFIG_LOG_ODS')
AddBoolDefine('NSIS_CONFIG_LOG_STDOUT')
AddBoolDefine('NSIS_CONFIG_LOG_TIMESTAMP')
AddBoolDefine('NSIS_SUPPORT_BGBG')
AddBoolDefine('NSIS_SUPPORT_CODECALLBACKS')
AddBoolDefine('NSIS_SUPPORT_MOVEONREBOOT')
AddBoolDefine('NSIS_SUPPORT_ACTIVEXREG')
AddBoolDefine('NSIS_SUPPORT_INTOPTS')
AddBoolDefine('NSIS_SUPPORT_STROPTS')
AddBoolDefine('NSIS_SUPPORT_STACK')
AddBoolDefine('NSIS_SUPPORT_FILEFUNCTIONS')
AddBoolDefine('NSIS_SUPPORT_FINDFIRST')
AddBoolDefine('NSIS_SUPPORT_CREATESHORTCUT')
AddBoolDefine('NSIS_SUPPORT_INIFILES')
AddBoolDefine('NSIS_SUPPORT_REGISTRYFUNCTIONS')
AddBoolDefine('NSIS_SUPPORT_COPYFILES')
AddBoolDefine('NSIS_SUPPORT_REBOOT')
AddBoolDefine('NSIS_SUPPORT_FNUTIL')
AddBoolDefine('NSIS_SUPPORT_EXECUTE')
AddBoolDefine('NSIS_SUPPORT_SHELLEXECUTE')
AddBoolDefine('NSIS_SUPPORT_GETDLLVERSION')
AddBoolDefine('NSIS_SUPPORT_GETFILETIME')
AddBoolDefine('NSIS_SUPPORT_GETFONTVERSION')
AddBoolDefine('NSIS_SUPPORT_GETFONTNAME')
AddBoolDefine('NSIS_SUPPORT_HWNDS')
AddBoolDefine('NSIS_SUPPORT_ENVIRONMENT')
AddBoolDefine('NSIS_SUPPORT_RMDIR')
AddBoolDefine('NSIS_SUPPORT_FILE')
AddBoolDefine('NSIS_SUPPORT_DELETE')
AddBoolDefine('NSIS_SUPPORT_RENAME')
AddBoolDefine('NSIS_SUPPORT_MESSAGEBOX')
AddBoolDefine('NSIS_SUPPORT_VERSION_INFO')
AddBoolDefine('NSIS_FIX_DEFINES_IN_STRINGS')
AddBoolDefine('NSIS_SUPPORT_STANDARD_PREDEFINES')
AddBoolDefine('NSIS_LOCKWINDOW_SUPPORT')
AddBoolDefine('NSIS_CONFIG_PLUGIN_SUPPORT')
AddBoolDefine('NSIS_FIX_COMMENT_HANDLING')
AddBoolDefine('NSIS_CONFIG_CONST_DATA_PATH')
AddStringDefine('NSIS_VARS_SECTION')
|
import('defenv')
cfg = variables()
cfg.Add(('NSIS_MAX_STRLEN', 'defines the maximum string length for internal variables and stack entries. 1024 should be plenty, but if you are doing crazy registry stuff, you might want to bump it up. Generally it adds about 16-32x the memory, so setting this to 4096 from 1024 will add around 64k of memory usage (not really a big deal, but not usually needed).', 1024))
cfg.Add(('NSIS_MAX_INST_TYPES', 'defines the maximum install types. Note that this should not exceed 32, ever.', 32))
cfg.Add(('NSIS_DEFAULT_LANG', 'defines the default language id NSIS will use if nothing else is defined in the script. Default value is 1033 which is English.', 1033))
cfg.Add(('NSIS_VARS_SECTION', 'defines the name of the PE section containing the runtime variables', '.ndata'))
cfg.Add(bool_variable('NSIS_CONFIG_UNINSTALL_SUPPORT', "enables the uninstaller support. Turn it off if your installers don't need uninstallers. Adds less than 1kb.", 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_LICENSEPAGE', 'enables support for the installer to present a license page.', 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_COMPONENTPAGE', 'enables support for the installer to present a page where you can select what sections are installed. with this disabled, all sections are installed by default', 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_COMPONENTPAGE_ALTERNATIVE', 'enables an alternative components page behavior. Checkboxes will only be toggled when clicking on the checkbox itself and not on its label. .onMouseOverSection will only be called when the user selects the component and not when moving the mouse pointer over it.', 'no'))
cfg.Add(bool_variable('NSIS_CONFIG_SILENT_SUPPORT', 'enables support for making installers that are completely silent.', 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_VISIBLE_SUPPORT', 'enables support for making installers that are visible.', 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_ENHANCEDUI_SUPPORT', 'enables support for CreateFont, SetCtlColors (used by some UIs), SetBrandingImage, .onGUIInit, etc.', 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_COMPRESSION_SUPPORT', 'enables support for making installers that use compression (recommended).', 'yes'))
cfg.Add(bool_variable('NSIS_COMPRESS_BZIP2_SMALLMODE', "if defined, bzip2's decompressor uses bzip2's alternative decompression method that uses less runtime memory, at the expense of speed (and executable size). not recommended.", 'no'))
cfg.Add(('NSIS_COMPRESS_BZIP2_LEVEL', 'bzip2 compression window size. 1-9 is valid. 9 uses the most memory, but typically compresses best (recommended). 1 uses the least memory, but typically compresses the worst.', 9))
cfg.Add(bool_variable('NSIS_CONFIG_CRC_SUPPORT', 'enables support for installer verification. HIGHLY recommended.', 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_CRC_ANAL', 'makes the CRC verification extremely careful, meaning extra bytes on the end of file, or the first 512 bytes changing, will give error. Enable this if you are paranoid, otherwise leaving it off seems safe (and is less prone to reporting virii). If you will be digitally signing your installers, leave this off.', 'no'))
cfg.Add(bool_variable('NSIS_CONFIG_LOG', 'enables the logging facility. turning this on (by uncommenting it) adds about 4kb, but can be useful in debugging your installers.', 'no'))
cfg.Add(bool_variable('NSIS_CONFIG_LOG_ODS', 'makes the logging facility use OutputDebugString instead of a file.', 'no'))
cfg.Add(bool_variable('NSIS_CONFIG_LOG_STDOUT', 'makes the logging facility use stdout instead of a file.', 'no'))
cfg.Add(bool_variable('NSIS_CONFIG_LOG_TIMESTAMP', 'adds a timestamp to each log line.', 'no'))
cfg.Add(bool_variable('NSIS_SUPPORT_BGBG', 'enables support for the blue (well, whatever color you want) gradient background window.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_CODECALLBACKS', 'enables support for installer code callbacks. recommended, as it uses a minimum of space and allows for neat functionality.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_MOVEONREBOOT', 'enables support for uninstallers that automatically delete themselves from the temp directory, as well as the reboot moving/deleting modes of Delete and Rename. Adds about 512 gay bytes..', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_ACTIVEXREG', 'enables activeX plug-in registration and deregistration, as well as CallInstDLL', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_INTOPTS', 'enables support for IntCmp, IntCmpU, IntOp, and IntFmt.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_STROPTS', 'enables support for StrCmp, StrCpy, and StrLen, as well as Get*Local.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_STACK', 'enables support for the stack (Push, Pop, Exch)', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_FILEFUNCTIONS', 'enables support for FileOpen,FileClose, FileSeek, FileRead, and FileWrite.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_FINDFIRST', 'enables support for FindFirst, FindNext, and FindClose.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_CREATESHORTCUT', 'enables support for CreateShortCut.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_INIFILES', 'enables support for ReadINIStr and WriteINIStr.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_REGISTRYFUNCTIONS', 'enables support for ReadRegStr, ReadRegDWORD, WriteRegStr, etc etc etc.', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_COPYFILES', 'enables support for CopyFiles', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_REBOOT', 'enables support for Reboot, IfRebootFlag, SetRebootFlag', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_FNUTIL', 'enables support for GetFullPathName, GetTempFileName, and SearchPath', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_EXECUTE', 'enables support for Exec and ExecWait', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_SHELLEXECUTE', 'enables support for ExecShell', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_GETDLLVERSION', 'enables support for GetDLLVersion', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_GETFILETIME', 'enables support for GetFileTime', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_GETFONTVERSION', 'enables support for GetFontversion', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_GETFONTNAME', 'enables support for GetFontName', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_HWNDS', 'enables support for FindWindow, SendMessage, and IsWindow', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_ENVIRONMENT', 'enables support for ReadEnvStr and ExpandEnvStrings', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_RMDIR', 'enables support for RMDir', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_FILE', 'enables support for File (extracting files)', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_DELETE', 'enables support for Delete (delete files)', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_RENAME', 'enables support for Rename (rename files)', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_MESSAGEBOX', 'enables support for MessageBox', 'yes'))
cfg.Add(bool_variable('NSIS_SUPPORT_VERSION_INFO', 'enables support for version information in the installer', 'yes'))
cfg.Add(bool_variable('NSIS_FIX_DEFINES_IN_STRINGS', 'fixes defines inside defines and handles chars $ perfectly', 'no'))
cfg.Add(bool_variable('NSIS_SUPPORT_STANDARD_PREDEFINES', 'enables standard predefines - __FILE__, __LINE__, __DATE__, __TIME__ and __TIMESTAMP__', 'yes'))
cfg.Add(bool_variable('NSIS_LOCKWINDOW_SUPPORT', 'enables the LockWindow command', 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_PLUGIN_SUPPORT', 'enables installer plug-ins support', 'yes'))
cfg.Add(bool_variable('NSIS_FIX_COMMENT_HANDLING', 'fixes comment handling', 'yes'))
cfg.Add(bool_variable('NSIS_CONFIG_CONST_DATA_PATH', 'determines if plugins, includes, stubs etc. are located in a constant path set at build-time', defenv['PLATFORM'] != 'win32'))
help(cfg.GenerateHelpText(defenv))
env = environment()
cfg.Update(env)
def add_valued_define(define):
defenv.Append(NSIS_CPPDEFINES=[(define, env[define])])
def add_bool_define(define):
if env[define]:
defenv.Append(NSIS_CPPDEFINES=[define])
def add_string_define(define):
defenv.Append(NSIS_CPPDEFINES=[(define, '"%s"' % env[define])])
add_valued_define('NSIS_MAX_STRLEN')
add_valued_define('NSIS_MAX_INST_TYPES')
add_valued_define('NSIS_DEFAULT_LANG')
add_bool_define('NSIS_CONFIG_UNINSTALL_SUPPORT')
add_bool_define('NSIS_CONFIG_LICENSEPAGE')
add_bool_define('NSIS_CONFIG_COMPONENTPAGE')
add_bool_define('NSIS_CONFIG_COMPONENTPAGE_ALTERNATIVE')
add_bool_define('NSIS_CONFIG_SILENT_SUPPORT')
add_bool_define('NSIS_CONFIG_VISIBLE_SUPPORT')
add_bool_define('NSIS_CONFIG_ENHANCEDUI_SUPPORT')
add_bool_define('NSIS_CONFIG_COMPRESSION_SUPPORT')
add_bool_define('NSIS_COMPRESS_BZIP2_SMALLMODE')
add_valued_define('NSIS_COMPRESS_BZIP2_LEVEL')
add_bool_define('NSIS_CONFIG_CRC_SUPPORT')
add_bool_define('NSIS_CONFIG_CRC_ANAL')
add_bool_define('NSIS_CONFIG_LOG')
add_bool_define('NSIS_CONFIG_LOG_ODS')
add_bool_define('NSIS_CONFIG_LOG_STDOUT')
add_bool_define('NSIS_CONFIG_LOG_TIMESTAMP')
add_bool_define('NSIS_SUPPORT_BGBG')
add_bool_define('NSIS_SUPPORT_CODECALLBACKS')
add_bool_define('NSIS_SUPPORT_MOVEONREBOOT')
add_bool_define('NSIS_SUPPORT_ACTIVEXREG')
add_bool_define('NSIS_SUPPORT_INTOPTS')
add_bool_define('NSIS_SUPPORT_STROPTS')
add_bool_define('NSIS_SUPPORT_STACK')
add_bool_define('NSIS_SUPPORT_FILEFUNCTIONS')
add_bool_define('NSIS_SUPPORT_FINDFIRST')
add_bool_define('NSIS_SUPPORT_CREATESHORTCUT')
add_bool_define('NSIS_SUPPORT_INIFILES')
add_bool_define('NSIS_SUPPORT_REGISTRYFUNCTIONS')
add_bool_define('NSIS_SUPPORT_COPYFILES')
add_bool_define('NSIS_SUPPORT_REBOOT')
add_bool_define('NSIS_SUPPORT_FNUTIL')
add_bool_define('NSIS_SUPPORT_EXECUTE')
add_bool_define('NSIS_SUPPORT_SHELLEXECUTE')
add_bool_define('NSIS_SUPPORT_GETDLLVERSION')
add_bool_define('NSIS_SUPPORT_GETFILETIME')
add_bool_define('NSIS_SUPPORT_GETFONTVERSION')
add_bool_define('NSIS_SUPPORT_GETFONTNAME')
add_bool_define('NSIS_SUPPORT_HWNDS')
add_bool_define('NSIS_SUPPORT_ENVIRONMENT')
add_bool_define('NSIS_SUPPORT_RMDIR')
add_bool_define('NSIS_SUPPORT_FILE')
add_bool_define('NSIS_SUPPORT_DELETE')
add_bool_define('NSIS_SUPPORT_RENAME')
add_bool_define('NSIS_SUPPORT_MESSAGEBOX')
add_bool_define('NSIS_SUPPORT_VERSION_INFO')
add_bool_define('NSIS_FIX_DEFINES_IN_STRINGS')
add_bool_define('NSIS_SUPPORT_STANDARD_PREDEFINES')
add_bool_define('NSIS_LOCKWINDOW_SUPPORT')
add_bool_define('NSIS_CONFIG_PLUGIN_SUPPORT')
add_bool_define('NSIS_FIX_COMMENT_HANDLING')
add_bool_define('NSIS_CONFIG_CONST_DATA_PATH')
add_string_define('NSIS_VARS_SECTION')
|
def search(text, pat):
n = len(text)
m = len(pat)
skip = 0
right = {}
for c in text:
right[c] = -1
for j in range(0, m):
right[pat[j]] = j
i = 0
while i < n-m:
skip = 0
for j in range(m-1, 0, -1):
if pat[j] != text[i+j]:
skip = max(1, j - right[text[i+j]])
break
if skip == 0:
return i
i += skip
return n
|
def search(text, pat):
n = len(text)
m = len(pat)
skip = 0
right = {}
for c in text:
right[c] = -1
for j in range(0, m):
right[pat[j]] = j
i = 0
while i < n - m:
skip = 0
for j in range(m - 1, 0, -1):
if pat[j] != text[i + j]:
skip = max(1, j - right[text[i + j]])
break
if skip == 0:
return i
i += skip
return n
|
class PhysicsForce :
class PhysicsGG :
pass
def W(self, Force, Distance) :
usaha = Force * Distance
return usaha
class PhysicsRotation :
def W(self, frequency) :
omega = 2 * 3.15 * frequency
return omega
Rinta = PhysicsForce()
Usaha = Rinta.W(3,2)
print(Usaha)
Marsa = PhysicsRotation()
Omega = Marsa.W(4)
print(Omega)
# class Parrot:
# def fly(self):
# print("Parrot can fly")
# def swim(self):
# print("Parrot can't swim")
# class Penguin:
# def fly(self):
# print("Penguin can't fly")
# def swim(self):
# print("Penguin can swim")
# # common interface
# def flying_test(bird):
# bird.fly()
# #instantiate objects
# blu = Parrot()
# peggy = Penguin()
# # passing the object
# flying_test(blu)
# flying_test(peggy)
|
class Physicsforce:
class Physicsgg:
pass
def w(self, Force, Distance):
usaha = Force * Distance
return usaha
class Physicsrotation:
def w(self, frequency):
omega = 2 * 3.15 * frequency
return omega
rinta = physics_force()
usaha = Rinta.W(3, 2)
print(Usaha)
marsa = physics_rotation()
omega = Marsa.W(4)
print(Omega)
|
# gmail credentials
gmail = dict(
username='username',
password='password'
)
# number of centimeters considered to be acceptable
trigger_distance = 10
# number of seconds spent below trigger distance before sending email
alert_after = 20
|
gmail = dict(username='username', password='password')
trigger_distance = 10
alert_after = 20
|
def is_palindrome_permutation(string):
char_set = [0] * 26
total_letter = 0
total_odd = 0
for char in string:
if char >= 'A' and char <= 'Z':
index = ord(char) + ord('A')
elif char >= 'a' and char <= 'z':
index = ord(char)-ord('a')
if char is not ' ':
total_letter += 1
char_set[index] += 1
if total_letter % 2 == 0:
for i in range(26):
if char_set[index] % 2:
return False
elif total_letter % 2 == 1:
for i in range(26):
if char_set[i] % 2 == 1:
total_odd += 1
if total_odd > 1:
return False
return True
print(is_palindrome_permutation('sskdfjs'))
print(is_palindrome_permutation('sas'))
print(is_palindrome_permutation('ssaa'))
|
def is_palindrome_permutation(string):
char_set = [0] * 26
total_letter = 0
total_odd = 0
for char in string:
if char >= 'A' and char <= 'Z':
index = ord(char) + ord('A')
elif char >= 'a' and char <= 'z':
index = ord(char) - ord('a')
if char is not ' ':
total_letter += 1
char_set[index] += 1
if total_letter % 2 == 0:
for i in range(26):
if char_set[index] % 2:
return False
elif total_letter % 2 == 1:
for i in range(26):
if char_set[i] % 2 == 1:
total_odd += 1
if total_odd > 1:
return False
return True
print(is_palindrome_permutation('sskdfjs'))
print(is_palindrome_permutation('sas'))
print(is_palindrome_permutation('ssaa'))
|
a = 1
b = 2
def index():
return 'hello world'
def hello():
return 'hello 2018'
def detail():
return 'detail info'
c = 3
d = 4
|
a = 1
b = 2
def index():
return 'hello world'
def hello():
return 'hello 2018'
def detail():
return 'detail info'
c = 3
d = 4
|
class TrackingMode(object):
PRINTING, LOGGING = range(0, 2)
TRACKING = True
TRACKING_MODE = TrackingMode.PRINTING
class TimyConfig(object):
DEFAULT_IDENT = 'Timy'
def __init__(self, tracking=TRACKING, tracking_mode=TRACKING_MODE):
self.tracking = tracking
self.tracking_mode = tracking_mode
timy_config = TimyConfig()
|
class Trackingmode(object):
(printing, logging) = range(0, 2)
tracking = True
tracking_mode = TrackingMode.PRINTING
class Timyconfig(object):
default_ident = 'Timy'
def __init__(self, tracking=TRACKING, tracking_mode=TRACKING_MODE):
self.tracking = tracking
self.tracking_mode = tracking_mode
timy_config = timy_config()
|
# two float values
val1 = 100.99
val2 = 76.15
# Adding the two given numbers
sum = float(val1) + float(val2)
# Displaying the addition result
print("The sum of given numbers is: ", sum)
|
val1 = 100.99
val2 = 76.15
sum = float(val1) + float(val2)
print('The sum of given numbers is: ', sum)
|
with open("dane/dane.txt") as f:
lines = []
for line in f:
sline = line.strip()
lines.append(sline)
count = 0
for line in lines:
if line[0] == line[-1]:
count += 1
print(f"{count=}")
|
with open('dane/dane.txt') as f:
lines = []
for line in f:
sline = line.strip()
lines.append(sline)
count = 0
for line in lines:
if line[0] == line[-1]:
count += 1
print(f'count={count!r}')
|
class Customer:
def __init__(self, client):
self.client = client
self.logger = client.logger
self.endpoint_base = '/data/v2/projects/{}/customers'.format(client.project_token)
def get_customer(self, ids):
path = '{}/export-one'.format(self.endpoint_base)
payload = {'customer_ids': ids}
response = self.client.post(path, payload)
if response is None:
return None
return {
'ids': response['ids'],
'properties': response['properties'],
'events': response['events']
}
def get_customer_consents(self, ids, consents):
path = '{}/attributes'.format(self.endpoint_base)
payload = {'customer_ids': ids,
'attributes': [{'type': 'consent', 'category': consent_type} for consent_type in consents]}
response = self.client.post(path, payload)
if response is None:
return None
result = {}
for index, consent_type in enumerate(consents):
# Check if user has permission to request data_type
if not response['results'][index]['success']:
self.logger.warning('No permission to retrieve consent {}'.format(consent_type))
result[consent_type] = None
continue
result[consent_type] = response['results'][index]['value']
return result
def get_customer_attributes(self, customer_ids, properties=[], segmentations=[], ids=[], expressions=[],
aggregations=[], predictions=[]):
path = '{}/attributes'.format(self.endpoint_base)
payload = {
'customer_ids': customer_ids,
'attributes':
[{'type': 'property', 'property': customer_property} for customer_property in properties] +
[{'type': 'segmentation', 'id': segmentation} for segmentation in segmentations] +
[{'type': 'id', 'id': _id} for _id in ids] +
[{'type': 'expression', 'id': expression} for expression in expressions] +
[{'type': 'aggregate', 'id': aggregate} for aggregate in aggregations] +
[{'type': 'prediction', 'id': prediction} for prediction in predictions]
}
response = self.client.post(path, payload)
if response is None:
return None
result = {}
attributes_retrieved = 0
for attribute_type in [('properties', properties), ('segmentations', segmentations), ('ids', ids),
('expressions', expressions), ('aggregations', aggregations),
('predictions', predictions)]:
attribute_type_name = attribute_type[0]
attribute_type_ids = attribute_type[1]
if len(attribute_type_ids) == 0:
continue
result[attribute_type_name] = {}
for _id in attribute_type_ids:
# Check if user has permission to request attribute_type
if not response['results'][attributes_retrieved]['success']:
self.logger.warning('No permission to retrieve %s %s', attribute_type_name, _id)
result[attribute_type_name][_id] = None
attributes_retrieved += 1
continue
result[attribute_type_name][_id] = response['results'][attributes_retrieved]['value']
attributes_retrieved += 1
return result
def get_customers(self):
path = '{}/export'.format(self.endpoint_base)
payload = {'format': 'native_json'}
response = self.client.post(path, payload)
if response is None:
return None
users = []
ids = [field['id'] for field in filter(lambda x: x['type'] == 'id', response['fields'])]
properties = [field['property'] for field in filter(lambda x: x['type'] == 'property', response['fields'])]
for row in response['data']:
user = {'ids': {}, 'properties': {}}
for index, attribute in enumerate(row):
if index < len(ids):
user['ids'][ids[index]] = attribute
else:
user['properties'][properties[index - len(ids)]] = attribute
users.append(user)
return users
def get_events(self, customer_ids, event_types):
path = '{}/events'.format(self.endpoint_base)
payload = {'customer_ids': customer_ids, 'event_types': event_types}
response = self.client.post(path, payload)
return None if response is None else response['data']
def anonymize_customer(self, customer_ids):
path = '{}/anonymize'.format(self.endpoint_base)
payload = {'customer_ids': customer_ids}
response = self.client.post(path, payload)
return None if response is None else response['success']
|
class Customer:
def __init__(self, client):
self.client = client
self.logger = client.logger
self.endpoint_base = '/data/v2/projects/{}/customers'.format(client.project_token)
def get_customer(self, ids):
path = '{}/export-one'.format(self.endpoint_base)
payload = {'customer_ids': ids}
response = self.client.post(path, payload)
if response is None:
return None
return {'ids': response['ids'], 'properties': response['properties'], 'events': response['events']}
def get_customer_consents(self, ids, consents):
path = '{}/attributes'.format(self.endpoint_base)
payload = {'customer_ids': ids, 'attributes': [{'type': 'consent', 'category': consent_type} for consent_type in consents]}
response = self.client.post(path, payload)
if response is None:
return None
result = {}
for (index, consent_type) in enumerate(consents):
if not response['results'][index]['success']:
self.logger.warning('No permission to retrieve consent {}'.format(consent_type))
result[consent_type] = None
continue
result[consent_type] = response['results'][index]['value']
return result
def get_customer_attributes(self, customer_ids, properties=[], segmentations=[], ids=[], expressions=[], aggregations=[], predictions=[]):
path = '{}/attributes'.format(self.endpoint_base)
payload = {'customer_ids': customer_ids, 'attributes': [{'type': 'property', 'property': customer_property} for customer_property in properties] + [{'type': 'segmentation', 'id': segmentation} for segmentation in segmentations] + [{'type': 'id', 'id': _id} for _id in ids] + [{'type': 'expression', 'id': expression} for expression in expressions] + [{'type': 'aggregate', 'id': aggregate} for aggregate in aggregations] + [{'type': 'prediction', 'id': prediction} for prediction in predictions]}
response = self.client.post(path, payload)
if response is None:
return None
result = {}
attributes_retrieved = 0
for attribute_type in [('properties', properties), ('segmentations', segmentations), ('ids', ids), ('expressions', expressions), ('aggregations', aggregations), ('predictions', predictions)]:
attribute_type_name = attribute_type[0]
attribute_type_ids = attribute_type[1]
if len(attribute_type_ids) == 0:
continue
result[attribute_type_name] = {}
for _id in attribute_type_ids:
if not response['results'][attributes_retrieved]['success']:
self.logger.warning('No permission to retrieve %s %s', attribute_type_name, _id)
result[attribute_type_name][_id] = None
attributes_retrieved += 1
continue
result[attribute_type_name][_id] = response['results'][attributes_retrieved]['value']
attributes_retrieved += 1
return result
def get_customers(self):
path = '{}/export'.format(self.endpoint_base)
payload = {'format': 'native_json'}
response = self.client.post(path, payload)
if response is None:
return None
users = []
ids = [field['id'] for field in filter(lambda x: x['type'] == 'id', response['fields'])]
properties = [field['property'] for field in filter(lambda x: x['type'] == 'property', response['fields'])]
for row in response['data']:
user = {'ids': {}, 'properties': {}}
for (index, attribute) in enumerate(row):
if index < len(ids):
user['ids'][ids[index]] = attribute
else:
user['properties'][properties[index - len(ids)]] = attribute
users.append(user)
return users
def get_events(self, customer_ids, event_types):
path = '{}/events'.format(self.endpoint_base)
payload = {'customer_ids': customer_ids, 'event_types': event_types}
response = self.client.post(path, payload)
return None if response is None else response['data']
def anonymize_customer(self, customer_ids):
path = '{}/anonymize'.format(self.endpoint_base)
payload = {'customer_ids': customer_ids}
response = self.client.post(path, payload)
return None if response is None else response['success']
|
__author__ = 'wektor'
class GenericBackend(object):
def set(self, key, value):
raise NotImplemented
def get(self, key):
raise NotImplemented
def delete(self, key):
raise NotImplemented
|
__author__ = 'wektor'
class Genericbackend(object):
def set(self, key, value):
raise NotImplemented
def get(self, key):
raise NotImplemented
def delete(self, key):
raise NotImplemented
|
#
# PySNMP MIB module H3C-UNICAST-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/H3C-UNICAST-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:24:11 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, SingleValueConstraint, ConstraintsIntersection, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "SingleValueConstraint", "ConstraintsIntersection", "ValueRangeConstraint")
h3cCommon, = mibBuilder.importSymbols("HUAWEI-3COM-OID-MIB", "h3cCommon")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter32, Integer32, iso, TimeTicks, NotificationType, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, ObjectIdentity, Bits, ModuleIdentity, IpAddress, Gauge32, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "Integer32", "iso", "TimeTicks", "NotificationType", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "ObjectIdentity", "Bits", "ModuleIdentity", "IpAddress", "Gauge32", "Counter64")
TextualConvention, DisplayString, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "TruthValue")
h3cUnicast = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44))
h3cUnicast.setRevisions(('2005-03-24 14:54',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: h3cUnicast.setRevisionsDescriptions((' Revisions made by Hangzhou MIB team.',))
if mibBuilder.loadTexts: h3cUnicast.setLastUpdated('200501311454Z')
if mibBuilder.loadTexts: h3cUnicast.setOrganization('Huawei 3com Technologies Co.,Ltd')
if mibBuilder.loadTexts: h3cUnicast.setContactInfo('Platform Team Hangzhou Institute Huawei-3Com Tech, Inc.')
if mibBuilder.loadTexts: h3cUnicast.setDescription(' This MIB is a framework MIB for unicast related features.')
h3cURPFTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1), )
if mibBuilder.loadTexts: h3cURPFTable.setStatus('current')
if mibBuilder.loadTexts: h3cURPFTable.setDescription(' Unicast Reverse Path Forwarding (URPF) is used to prevent the network attacks caused by source address spoofing. This table is used to configure URPF on specific interfaces.')
h3cURPFEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1), ).setIndexNames((0, "H3C-UNICAST-MIB", "h3cURPFIfIndex"))
if mibBuilder.loadTexts: h3cURPFEntry.setStatus('current')
if mibBuilder.loadTexts: h3cURPFEntry.setDescription(' The entry of h3cURPFTable, indexed by vlan interface index.')
h3cURPFIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: h3cURPFIfIndex.setStatus('current')
if mibBuilder.loadTexts: h3cURPFIfIndex.setDescription(' The ifIndex of vlan interface.')
h3cURPFEnabled = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 2), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cURPFEnabled.setStatus('current')
if mibBuilder.loadTexts: h3cURPFEnabled.setDescription(' This object is used to enable or disable URPF on certain vlan interfaces.')
h3cURPFSlotID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cURPFSlotID.setStatus('current')
if mibBuilder.loadTexts: h3cURPFSlotID.setDescription(' This object specifies to which slot packets are redirected in order to perform URPF check.')
h3cURPFTotalReceivedPacket = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cURPFTotalReceivedPacket.setStatus('current')
if mibBuilder.loadTexts: h3cURPFTotalReceivedPacket.setDescription(' This object provides total received packets number.')
h3cURPFDroppedPacket = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: h3cURPFDroppedPacket.setStatus('current')
if mibBuilder.loadTexts: h3cURPFDroppedPacket.setDescription(' This object provides total dropped invalid packets number.')
h3cURPFClearStat = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("reserved", 0), ("reset", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: h3cURPFClearStat.setStatus('current')
if mibBuilder.loadTexts: h3cURPFClearStat.setDescription(' This object is used to clear the URPF statistics on certain vlan interfaces. This object is actually a write-only object. When read, it always returns 0. When set to 1, the objects h3cURPFTotalReceivedPacket and h3cURPFDroppedPacket are reset to 0.')
mibBuilder.exportSymbols("H3C-UNICAST-MIB", h3cURPFSlotID=h3cURPFSlotID, h3cURPFEnabled=h3cURPFEnabled, h3cURPFClearStat=h3cURPFClearStat, h3cURPFTable=h3cURPFTable, h3cURPFEntry=h3cURPFEntry, h3cURPFTotalReceivedPacket=h3cURPFTotalReceivedPacket, h3cUnicast=h3cUnicast, h3cURPFIfIndex=h3cURPFIfIndex, h3cURPFDroppedPacket=h3cURPFDroppedPacket, PYSNMP_MODULE_ID=h3cUnicast)
|
(octet_string, object_identifier, integer) = mibBuilder.importSymbols('ASN1', 'OctetString', 'ObjectIdentifier', 'Integer')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_size_constraint, constraints_union, single_value_constraint, constraints_intersection, value_range_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueSizeConstraint', 'ConstraintsUnion', 'SingleValueConstraint', 'ConstraintsIntersection', 'ValueRangeConstraint')
(h3c_common,) = mibBuilder.importSymbols('HUAWEI-3COM-OID-MIB', 'h3cCommon')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(counter32, integer32, iso, time_ticks, notification_type, unsigned32, mib_scalar, mib_table, mib_table_row, mib_table_column, mib_identifier, object_identity, bits, module_identity, ip_address, gauge32, counter64) = mibBuilder.importSymbols('SNMPv2-SMI', 'Counter32', 'Integer32', 'iso', 'TimeTicks', 'NotificationType', 'Unsigned32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'MibIdentifier', 'ObjectIdentity', 'Bits', 'ModuleIdentity', 'IpAddress', 'Gauge32', 'Counter64')
(textual_convention, display_string, truth_value) = mibBuilder.importSymbols('SNMPv2-TC', 'TextualConvention', 'DisplayString', 'TruthValue')
h3c_unicast = module_identity((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44))
h3cUnicast.setRevisions(('2005-03-24 14:54',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts:
h3cUnicast.setRevisionsDescriptions((' Revisions made by Hangzhou MIB team.',))
if mibBuilder.loadTexts:
h3cUnicast.setLastUpdated('200501311454Z')
if mibBuilder.loadTexts:
h3cUnicast.setOrganization('Huawei 3com Technologies Co.,Ltd')
if mibBuilder.loadTexts:
h3cUnicast.setContactInfo('Platform Team Hangzhou Institute Huawei-3Com Tech, Inc.')
if mibBuilder.loadTexts:
h3cUnicast.setDescription(' This MIB is a framework MIB for unicast related features.')
h3c_urpf_table = mib_table((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1))
if mibBuilder.loadTexts:
h3cURPFTable.setStatus('current')
if mibBuilder.loadTexts:
h3cURPFTable.setDescription(' Unicast Reverse Path Forwarding (URPF) is used to prevent the network attacks caused by source address spoofing. This table is used to configure URPF on specific interfaces.')
h3c_urpf_entry = mib_table_row((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1)).setIndexNames((0, 'H3C-UNICAST-MIB', 'h3cURPFIfIndex'))
if mibBuilder.loadTexts:
h3cURPFEntry.setStatus('current')
if mibBuilder.loadTexts:
h3cURPFEntry.setDescription(' The entry of h3cURPFTable, indexed by vlan interface index.')
h3c_urpf_if_index = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 1), integer32())
if mibBuilder.loadTexts:
h3cURPFIfIndex.setStatus('current')
if mibBuilder.loadTexts:
h3cURPFIfIndex.setDescription(' The ifIndex of vlan interface.')
h3c_urpf_enabled = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 2), truth_value().clone('false')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
h3cURPFEnabled.setStatus('current')
if mibBuilder.loadTexts:
h3cURPFEnabled.setDescription(' This object is used to enable or disable URPF on certain vlan interfaces.')
h3c_urpf_slot_id = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 3), integer32()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
h3cURPFSlotID.setStatus('current')
if mibBuilder.loadTexts:
h3cURPFSlotID.setDescription(' This object specifies to which slot packets are redirected in order to perform URPF check.')
h3c_urpf_total_received_packet = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 4), counter64()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
h3cURPFTotalReceivedPacket.setStatus('current')
if mibBuilder.loadTexts:
h3cURPFTotalReceivedPacket.setDescription(' This object provides total received packets number.')
h3c_urpf_dropped_packet = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 5), counter64()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
h3cURPFDroppedPacket.setStatus('current')
if mibBuilder.loadTexts:
h3cURPFDroppedPacket.setDescription(' This object provides total dropped invalid packets number.')
h3c_urpf_clear_stat = mib_table_column((1, 3, 6, 1, 4, 1, 2011, 10, 2, 44, 1, 1, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('reserved', 0), ('reset', 1)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
h3cURPFClearStat.setStatus('current')
if mibBuilder.loadTexts:
h3cURPFClearStat.setDescription(' This object is used to clear the URPF statistics on certain vlan interfaces. This object is actually a write-only object. When read, it always returns 0. When set to 1, the objects h3cURPFTotalReceivedPacket and h3cURPFDroppedPacket are reset to 0.')
mibBuilder.exportSymbols('H3C-UNICAST-MIB', h3cURPFSlotID=h3cURPFSlotID, h3cURPFEnabled=h3cURPFEnabled, h3cURPFClearStat=h3cURPFClearStat, h3cURPFTable=h3cURPFTable, h3cURPFEntry=h3cURPFEntry, h3cURPFTotalReceivedPacket=h3cURPFTotalReceivedPacket, h3cUnicast=h3cUnicast, h3cURPFIfIndex=h3cURPFIfIndex, h3cURPFDroppedPacket=h3cURPFDroppedPacket, PYSNMP_MODULE_ID=h3cUnicast)
|
'''
nums: [2, 3, -2, 4]
max: [2, 6, -2, 4]
min: [2, 3, -12, -48]
max: [2, 6, 6, 6]
'''
|
"""
nums: [2, 3, -2, 4]
max: [2, 6, -2, 4]
min: [2, 3, -12, -48]
max: [2, 6, 6, 6]
"""
|
def gmt2json(pathx,hasDescColumn=True,isFuzzy=False):
wordsAll = []
# secondColumn = []
with open(pathx,'r') as gf:
for line in gf:
line = line.strip('\r\n\t')
# if not a empty line
if line:
words = []
i = 0
for item in line.split('\t'):
if i==0:
words.append(item)
else:
# a gene symbol cannot be a string of numbers.
if item!="" and not isNumStr(item):
words.append(item)
wordsAll.append(words)
# secondColumn.append(words[1])
gmtName = getfilename(pathx)
print(gmtName)
gmt = []
if not isFuzzy and hasDescColumn:
for words in wordsAll:
gmt.append({'gmt':gmtName,'desc':words[1],
'term':words[0],'items':words[2:]})
return gmt
def getBaseDir():
currentPath = os.getcwd()
while currentPath != '/':
if os.path.isdir(currentPath+'/.git'):
break
currentPath = getParDir(currentPath)
if currentPath == '/':
raise Exception('Base dir not found because .git directory is not present')
return currentPath
|
def gmt2json(pathx, hasDescColumn=True, isFuzzy=False):
words_all = []
with open(pathx, 'r') as gf:
for line in gf:
line = line.strip('\r\n\t')
if line:
words = []
i = 0
for item in line.split('\t'):
if i == 0:
words.append(item)
elif item != '' and (not is_num_str(item)):
words.append(item)
wordsAll.append(words)
gmt_name = getfilename(pathx)
print(gmtName)
gmt = []
if not isFuzzy and hasDescColumn:
for words in wordsAll:
gmt.append({'gmt': gmtName, 'desc': words[1], 'term': words[0], 'items': words[2:]})
return gmt
def get_base_dir():
current_path = os.getcwd()
while currentPath != '/':
if os.path.isdir(currentPath + '/.git'):
break
current_path = get_par_dir(currentPath)
if currentPath == '/':
raise exception('Base dir not found because .git directory is not present')
return currentPath
|
OCM_SIZE = 2 ** 8
READ_MODE = 0
WRITE_MODE = 1
DATA_BITWIDTH = 32
WORD_SIZE = DATA_BITWIDTH / 8
instream = CoramInStream(0, datawidth=DATA_BITWIDTH, size=64)
outstream = CoramOutStream(0, datawidth=DATA_BITWIDTH, size=64)
channel = CoramChannel(idx=0, datawidth=32)
DOWN_LEFT = 0
DOWN_PARENT = 1
DOWN_RIGHT = 2
UP_PARENT = 1
UP_CHILD = 0
offset = 0
num_entries = 0
def downheap():
if num_entries == 0:
return
if num_entries + 1 >= OCM_SIZE:
instream.write_nonblocking(offset + num_entries * WORD_SIZE + WORD_SIZE, 1)
index = 1
while True:
if index * 2 > num_entries:
outstream.read_nonblocking(index * WORD_SIZE + offset, 1)
break
if (index * 2) >= OCM_SIZE:
instream.write_nonblocking(index * WORD_SIZE * 2 + offset, 2)
elif (index * 2) + 1 >= OCM_SIZE:
instream.write_nonblocking(index * WORD_SIZE * 2 + offset + WORD_SIZE, 1)
select = channel.read()
outstream.read_nonblocking(index * WORD_SIZE + offset, 1)
if select == DOWN_LEFT:
index = index * 2
elif select == DOWN_RIGHT:
index = index * 2 + 1
else:
break
def upheap():
index = num_entries
while index > 1:
if (index / 2) >= OCM_SIZE:
instream.write_nonblocking((index / 2) * WORD_SIZE + offset, 1)
select = channel.read()
outstream.read_nonblocking(index * WORD_SIZE + offset, 1)
index = index / 2
if select == UP_PARENT: break
outstream.read_nonblocking(index * WORD_SIZE + offset, 1)
def heap():
global num_entries
mode = channel.read()
if mode == 1:
num_entries -= 1
downheap()
else:
num_entries += 1
upheap()
while True:
heap()
|
ocm_size = 2 ** 8
read_mode = 0
write_mode = 1
data_bitwidth = 32
word_size = DATA_BITWIDTH / 8
instream = coram_in_stream(0, datawidth=DATA_BITWIDTH, size=64)
outstream = coram_out_stream(0, datawidth=DATA_BITWIDTH, size=64)
channel = coram_channel(idx=0, datawidth=32)
down_left = 0
down_parent = 1
down_right = 2
up_parent = 1
up_child = 0
offset = 0
num_entries = 0
def downheap():
if num_entries == 0:
return
if num_entries + 1 >= OCM_SIZE:
instream.write_nonblocking(offset + num_entries * WORD_SIZE + WORD_SIZE, 1)
index = 1
while True:
if index * 2 > num_entries:
outstream.read_nonblocking(index * WORD_SIZE + offset, 1)
break
if index * 2 >= OCM_SIZE:
instream.write_nonblocking(index * WORD_SIZE * 2 + offset, 2)
elif index * 2 + 1 >= OCM_SIZE:
instream.write_nonblocking(index * WORD_SIZE * 2 + offset + WORD_SIZE, 1)
select = channel.read()
outstream.read_nonblocking(index * WORD_SIZE + offset, 1)
if select == DOWN_LEFT:
index = index * 2
elif select == DOWN_RIGHT:
index = index * 2 + 1
else:
break
def upheap():
index = num_entries
while index > 1:
if index / 2 >= OCM_SIZE:
instream.write_nonblocking(index / 2 * WORD_SIZE + offset, 1)
select = channel.read()
outstream.read_nonblocking(index * WORD_SIZE + offset, 1)
index = index / 2
if select == UP_PARENT:
break
outstream.read_nonblocking(index * WORD_SIZE + offset, 1)
def heap():
global num_entries
mode = channel.read()
if mode == 1:
num_entries -= 1
downheap()
else:
num_entries += 1
upheap()
while True:
heap()
|
'''
Endpoints are collected from the Market Data Endpoints api section under the official binance api docs:
https://binance-docs.github.io/apidocs/spot/en/#market-data-endpoints
'''
# Test Connectivity:
class test_ping:
params = None
method = 'GET'
endpoint = '/api/v3/ping'
security_type = 'None'
# Check Server Time:
class get_serverTime:
params = None
method = 'GET'
endpoint = '/api/v3/time'
security_type = 'None'
# Exchange Information:
class get_exchangeInfo:
params = None
method = 'GET'
endpoint = '/api/v3/exchangeInfo'
security_type = 'None'
# Order Book:
class get_orderBook:
params = {'R':['symbol'],
'O':['limit']}
method = 'GET'
endpoint = '/api/v3/depth'
security_type = 'None'
# Recent Trades List:
class get_recentTrades:
params = {'R':['symbol'],
'O':['limit']}
method = 'GET'
endpoint = '/api/v3/trades'
security_type = 'None'
# Old Trade Lookup:
class get_oldTrades:
params = {'R':['symbol'],
'O':['limit', 'fromId']}
method = 'GET'
endpoint = '/api/v3/historicalTrades'
security_type = 'None'
# Compressed/Aggregate Trades List:
class get_aggTradeList:
params = {'R':['symbol'],
'O':['limit', 'fromId', 'startTime', 'endTime', 'limit']}
method = 'GET'
endpoint = '/api/v3/aggTrades'
security_type = 'None'
# Kline/Candlestick Data:
class get_candles:
params = {'R':['symbol', 'interval'],
'O':['startTime', 'endTime', 'limit']}
method = 'GET'
endpoint = '/api/v3/klines'
security_type = 'None'
# Current Average Price:
class get_averagePrice:
params = {'R':['symbol']}
method = 'GET'
endpoint = '/api/v3/avgPrice'
security_type = 'None'
# 24hr Ticker Price Change Statistics:
class get_24hTicker:
params = {'O':['symbol']}
method = 'GET'
endpoint = '/api/v3/ticker/24hr'
security_type = 'None'
# Symbol Price Ticker:
class get_priceTicker:
params = {'O':['symbol']}
method = 'GET'
endpoint = '/api/v3/ticker/price'
security_type = 'None'
# Symbol Order Book Ticker:
class get_orderbookTicker:
params = {'O':['symbol']}
method = 'GET'
endpoint = '/api/v3/ticker/bookTicker'
security_type = 'None'
|
"""
Endpoints are collected from the Market Data Endpoints api section under the official binance api docs:
https://binance-docs.github.io/apidocs/spot/en/#market-data-endpoints
"""
class Test_Ping:
params = None
method = 'GET'
endpoint = '/api/v3/ping'
security_type = 'None'
class Get_Servertime:
params = None
method = 'GET'
endpoint = '/api/v3/time'
security_type = 'None'
class Get_Exchangeinfo:
params = None
method = 'GET'
endpoint = '/api/v3/exchangeInfo'
security_type = 'None'
class Get_Orderbook:
params = {'R': ['symbol'], 'O': ['limit']}
method = 'GET'
endpoint = '/api/v3/depth'
security_type = 'None'
class Get_Recenttrades:
params = {'R': ['symbol'], 'O': ['limit']}
method = 'GET'
endpoint = '/api/v3/trades'
security_type = 'None'
class Get_Oldtrades:
params = {'R': ['symbol'], 'O': ['limit', 'fromId']}
method = 'GET'
endpoint = '/api/v3/historicalTrades'
security_type = 'None'
class Get_Aggtradelist:
params = {'R': ['symbol'], 'O': ['limit', 'fromId', 'startTime', 'endTime', 'limit']}
method = 'GET'
endpoint = '/api/v3/aggTrades'
security_type = 'None'
class Get_Candles:
params = {'R': ['symbol', 'interval'], 'O': ['startTime', 'endTime', 'limit']}
method = 'GET'
endpoint = '/api/v3/klines'
security_type = 'None'
class Get_Averageprice:
params = {'R': ['symbol']}
method = 'GET'
endpoint = '/api/v3/avgPrice'
security_type = 'None'
class Get_24Hticker:
params = {'O': ['symbol']}
method = 'GET'
endpoint = '/api/v3/ticker/24hr'
security_type = 'None'
class Get_Priceticker:
params = {'O': ['symbol']}
method = 'GET'
endpoint = '/api/v3/ticker/price'
security_type = 'None'
class Get_Orderbookticker:
params = {'O': ['symbol']}
method = 'GET'
endpoint = '/api/v3/ticker/bookTicker'
security_type = 'None'
|
## Oscillating Lambda Man
##
## Directions:
### 0: top
### 1: right
### 2: bottom
### 3: left
def main(world, _ghosts):
return (strategy_state(), step)
def step(state, world):
return (update_state(state), deduce_direction(state, world))
# Oscilation strategy state
def strategy_state():
#returns (frequency, count)
return (4, 0)
# Increase count in oscilation
def update_state(state):
return (state[0], state[1:]+1)
# Deduce direction based on oscillation parameter
def deduce_direction(state, _world):
# if self.cnt % (2 * self.frequency) < self.frequency:
if state[0] > modulo(state[1:], (2 * state[0])):
return 3
else:
return 1
# x % y
def modulo(x, y):
return ( x - ( y * (x / y) ) )
|
def main(world, _ghosts):
return (strategy_state(), step)
def step(state, world):
return (update_state(state), deduce_direction(state, world))
def strategy_state():
return (4, 0)
def update_state(state):
return (state[0], state[1:] + 1)
def deduce_direction(state, _world):
if state[0] > modulo(state[1:], 2 * state[0]):
return 3
else:
return 1
def modulo(x, y):
return x - y * (x / y)
|
url= 'http://ww.sougou.com/s?'
def sougou(nets):
count = 1
for net in nets:
rest1 = 'res%d.txt' %count
with open(rest1,'w',encoding='utf8') as f:
f.write(net)
print(net)
count +=1
if __name__ == '__main__':
nets = ('one','two','pr')
sougou(nets)
|
url = 'http://ww.sougou.com/s?'
def sougou(nets):
count = 1
for net in nets:
rest1 = 'res%d.txt' % count
with open(rest1, 'w', encoding='utf8') as f:
f.write(net)
print(net)
count += 1
if __name__ == '__main__':
nets = ('one', 'two', 'pr')
sougou(nets)
|
class Task:
def name():
raise NotImplementedError
def description():
raise NotImplementedError
def inputs():
raise NotImplementedError
def run(inputs):
raise NotImplementedError
|
class Task:
def name():
raise NotImplementedError
def description():
raise NotImplementedError
def inputs():
raise NotImplementedError
def run(inputs):
raise NotImplementedError
|
# --------------
# Code starts here
class_1 = ['Geoffrey Hinton','Andrew Ng','Sebastian Raschka','Yoshua Bengio']
class_2 = ['Hilary Mason','Carla Gentry','Corinna Cortes']
new_class = class_1 + class_2
print(new_class)
new_class.append('Peter Warden')
print(new_class)
new_class.remove('Carla Gentry')
print(new_class)
# Code ends here
# --------------
# Code starts here
courses= {'Math':65,'English':70,'History':80,'French':70,'Science':60}
total = courses['Math']+courses['English']+courses['History']+courses['French']+courses['Science']
print(total)
percentage = (total/500)*100
print(percentage)
# Code ends here
# --------------
# Code starts here
mathematics = {
'Geoffrey Hinton':78,
'Andrew Ng':95,
'Sebastian Raschka':65,
'Yoshua Benjio':50,
'Hilary Mason':70,
'Corinna Cortes':66,
'Peter Warden':75
}
topper = max(mathematics,key=mathematics.get)
print(topper)
# Code ends here
# --------------
# Given string
topper = 'andrew ng'
first_name,last_name= topper.split(" ")
full_name = last_name+" "+first_name
# Code starts here
certificate_name= full_name.upper()
print(certificate_name)
# Code ends here
|
class_1 = ['Geoffrey Hinton', 'Andrew Ng', 'Sebastian Raschka', 'Yoshua Bengio']
class_2 = ['Hilary Mason', 'Carla Gentry', 'Corinna Cortes']
new_class = class_1 + class_2
print(new_class)
new_class.append('Peter Warden')
print(new_class)
new_class.remove('Carla Gentry')
print(new_class)
courses = {'Math': 65, 'English': 70, 'History': 80, 'French': 70, 'Science': 60}
total = courses['Math'] + courses['English'] + courses['History'] + courses['French'] + courses['Science']
print(total)
percentage = total / 500 * 100
print(percentage)
mathematics = {'Geoffrey Hinton': 78, 'Andrew Ng': 95, 'Sebastian Raschka': 65, 'Yoshua Benjio': 50, 'Hilary Mason': 70, 'Corinna Cortes': 66, 'Peter Warden': 75}
topper = max(mathematics, key=mathematics.get)
print(topper)
topper = 'andrew ng'
(first_name, last_name) = topper.split(' ')
full_name = last_name + ' ' + first_name
certificate_name = full_name.upper()
print(certificate_name)
|
#Updating menu to include a save option
students= []
def displayMenu():
print("what would you like to do?")
print("\t(a) Add new student")
print("\t(v) View students")
print("\t(s) Save students")
print("\t(q) Quit")
choice = input("type one letter (a/v/s/q):").strip()
return choice
def doAdd():
# you have code here to add
print("in adding")
def doView():
# you have code here to view
print("in viewing")
def doSave():
#you will put the call to save dict here
print("in save")
#main program
choice = displayMenu()
while(choice != 'q'):
if choice == 'a':
doAdd()
elif choice == 'v':
doView()
elif choice == 's':
doSave()
elif choice !='q':
print("\n\nPlease select either a,v,s or q")
choice=displayMenu()
|
students = []
def display_menu():
print('what would you like to do?')
print('\t(a) Add new student')
print('\t(v) View students')
print('\t(s) Save students')
print('\t(q) Quit')
choice = input('type one letter (a/v/s/q):').strip()
return choice
def do_add():
print('in adding')
def do_view():
print('in viewing')
def do_save():
print('in save')
choice = display_menu()
while choice != 'q':
if choice == 'a':
do_add()
elif choice == 'v':
do_view()
elif choice == 's':
do_save()
elif choice != 'q':
print('\n\nPlease select either a,v,s or q')
choice = display_menu()
|
# You need the Elemental codex 1+ to cast "Haste"
# You need unique hero to perform resetCooldown action
# You need the Emperor's gloves to cast "Chain Lightning"
hero.cast("haste", hero)
hero.moveDown()
hero.moveRight()
hero.moveDown(0.5)
enemy = hero.findNearestEnemy()
hero.cast("chain-lightning", enemy)
hero.resetCooldown("chain-lightning")
hero.cast("chain-lightning", enemy)
|
hero.cast('haste', hero)
hero.moveDown()
hero.moveRight()
hero.moveDown(0.5)
enemy = hero.findNearestEnemy()
hero.cast('chain-lightning', enemy)
hero.resetCooldown('chain-lightning')
hero.cast('chain-lightning', enemy)
|
class FiniteAutomata:
def __init__(self):
Q = [] # finite set of states
E = [] # finite alphabet
D = {} # transition function
q0 = '' # initial state
F = [] # set of final states
self.clear_values()
def clear_values(self):
self.Q = []
self.E = []
self.D = {}
self.q0 = ''
self.F = []
def read(self, file_name):
with open(file_name) as file:
line = file.readline().strip()
while line != '':
if line == 'Q':
for state in file.readline().strip().split(' '):
self.Q.append(state)
if line == 'E':
for state in file.readline().strip().split(' '):
self.E.append(state)
if line == 'D':
line = file.readline()
while line[0] == '(':
trans_from_a, tras_from_b = line.strip().split('=')[0].replace('(', '').replace(')', '').split(',')
trans_to = line.strip().split('=')[1]
if (trans_from_a, tras_from_b) not in self.D.keys():
self.D[(trans_from_a, tras_from_b)] = trans_to
else:
self.D[(trans_from_a, tras_from_b)].append(trans_to)
line = file.readline()
if line == 'q0':
self.q0 = file.readline()
if line == 'F':
for state in file.readline().strip().strip(' '):
self.F.append(state)
line = file.readline().strip()
def checkDFA(self):
for trans in self.D.values():
if len(trans) >= 2:
return False
return True
def acceptedSequence(self, input_sequence):
if self.checkDFA():
state = self.q0
for s in input_sequence:
if (state, s) in self.D.keys():
state = self.D[(state, s)][0]
else:
return False
if state in self.F:
return True
else:
return False
def getData(self):
items = {}
items['states'] = self.Q
items['alphabet'] = self.E
items['transitions'] = self.D
items['final states'] = self.F
return items
|
class Finiteautomata:
def __init__(self):
q = []
e = []
d = {}
q0 = ''
f = []
self.clear_values()
def clear_values(self):
self.Q = []
self.E = []
self.D = {}
self.q0 = ''
self.F = []
def read(self, file_name):
with open(file_name) as file:
line = file.readline().strip()
while line != '':
if line == 'Q':
for state in file.readline().strip().split(' '):
self.Q.append(state)
if line == 'E':
for state in file.readline().strip().split(' '):
self.E.append(state)
if line == 'D':
line = file.readline()
while line[0] == '(':
(trans_from_a, tras_from_b) = line.strip().split('=')[0].replace('(', '').replace(')', '').split(',')
trans_to = line.strip().split('=')[1]
if (trans_from_a, tras_from_b) not in self.D.keys():
self.D[trans_from_a, tras_from_b] = trans_to
else:
self.D[trans_from_a, tras_from_b].append(trans_to)
line = file.readline()
if line == 'q0':
self.q0 = file.readline()
if line == 'F':
for state in file.readline().strip().strip(' '):
self.F.append(state)
line = file.readline().strip()
def check_dfa(self):
for trans in self.D.values():
if len(trans) >= 2:
return False
return True
def accepted_sequence(self, input_sequence):
if self.checkDFA():
state = self.q0
for s in input_sequence:
if (state, s) in self.D.keys():
state = self.D[state, s][0]
else:
return False
if state in self.F:
return True
else:
return False
def get_data(self):
items = {}
items['states'] = self.Q
items['alphabet'] = self.E
items['transitions'] = self.D
items['final states'] = self.F
return items
|
tup=tuple(input("Enter the tuple").split(","))
st=tuple(input("Enter the another tuple").split(","))
tup1=tup+st
print(tup1)
|
tup = tuple(input('Enter the tuple').split(','))
st = tuple(input('Enter the another tuple').split(','))
tup1 = tup + st
print(tup1)
|
# -*- coding: utf-8 -*-
# DATA STRUCTURES
cats = [
{"name": "tom", "age": 1, "size": "small"},
{"name": "ash", "age": 2, "size": "medium"},
{"name": "hurley", "age": 5, "size": "large"},
]
print(cats)
|
cats = [{'name': 'tom', 'age': 1, 'size': 'small'}, {'name': 'ash', 'age': 2, 'size': 'medium'}, {'name': 'hurley', 'age': 5, 'size': 'large'}]
print(cats)
|
# coding=utf-8
class JavaHeap:
def __init__(self):
pass
|
class Javaheap:
def __init__(self):
pass
|
# Generated by h2py from /usr/include/netinet/in.h
# Included from net/nh.h
# Included from sys/machine.h
LITTLE_ENDIAN = 1234
BIG_ENDIAN = 4321
PDP_ENDIAN = 3412
BYTE_ORDER = BIG_ENDIAN
DEFAULT_GPR = 0xDEADBEEF
MSR_EE = 0x8000
MSR_PR = 0x4000
MSR_FP = 0x2000
MSR_ME = 0x1000
MSR_FE = 0x0800
MSR_FE0 = 0x0800
MSR_SE = 0x0400
MSR_BE = 0x0200
MSR_IE = 0x0100
MSR_FE1 = 0x0100
MSR_AL = 0x0080
MSR_IP = 0x0040
MSR_IR = 0x0020
MSR_DR = 0x0010
MSR_PM = 0x0004
DEFAULT_MSR = (MSR_EE | MSR_ME | MSR_AL | MSR_IR | MSR_DR)
DEFAULT_USER_MSR = (DEFAULT_MSR | MSR_PR)
CR_LT = 0x80000000
CR_GT = 0x40000000
CR_EQ = 0x20000000
CR_SO = 0x10000000
CR_FX = 0x08000000
CR_FEX = 0x04000000
CR_VX = 0x02000000
CR_OX = 0x01000000
XER_SO = 0x80000000
XER_OV = 0x40000000
XER_CA = 0x20000000
def XER_COMP_BYTE(xer): return ((xer >> 8) & 0x000000FF)
def XER_LENGTH(xer): return (xer & 0x0000007F)
DSISR_IO = 0x80000000
DSISR_PFT = 0x40000000
DSISR_LOCK = 0x20000000
DSISR_FPIO = 0x10000000
DSISR_PROT = 0x08000000
DSISR_LOOP = 0x04000000
DSISR_DRST = 0x04000000
DSISR_ST = 0x02000000
DSISR_SEGB = 0x01000000
DSISR_DABR = 0x00400000
DSISR_EAR = 0x00100000
SRR_IS_PFT = 0x40000000
SRR_IS_ISPEC = 0x20000000
SRR_IS_IIO = 0x10000000
SRR_IS_PROT = 0x08000000
SRR_IS_LOOP = 0x04000000
SRR_PR_FPEN = 0x00100000
SRR_PR_INVAL = 0x00080000
SRR_PR_PRIV = 0x00040000
SRR_PR_TRAP = 0x00020000
SRR_PR_IMPRE = 0x00010000
def ntohl(x): return (x)
def ntohs(x): return (x)
def htonl(x): return (x)
def htons(x): return (x)
IPPROTO_IP = 0
IPPROTO_ICMP = 1
IPPROTO_GGP = 3
IPPROTO_TCP = 6
IPPROTO_EGP = 8
IPPROTO_PUP = 12
IPPROTO_UDP = 17
IPPROTO_IDP = 22
IPPROTO_TP = 29
IPPROTO_LOCAL = 63
IPPROTO_EON = 80
IPPROTO_BIP = 0x53
IPPROTO_RAW = 255
IPPROTO_MAX = 256
IPPORT_RESERVED = 1024
IPPORT_USERRESERVED = 5000
IPPORT_TIMESERVER = 37
def IN_CLASSA(i): return (((long)(i) & 0x80000000) == 0)
IN_CLASSA_NET = 0xff000000
IN_CLASSA_NSHIFT = 24
IN_CLASSA_HOST = 0x00ffffff
IN_CLASSA_MAX = 128
def IN_CLASSB(i): return (((long)(i) & 0xc0000000) == 0x80000000)
IN_CLASSB_NET = 0xffff0000
IN_CLASSB_NSHIFT = 16
IN_CLASSB_HOST = 0x0000ffff
IN_CLASSB_MAX = 65536
def IN_CLASSC(i): return (((long)(i) & 0xe0000000) == 0xc0000000)
IN_CLASSC_NET = 0xffffff00
IN_CLASSC_NSHIFT = 8
IN_CLASSC_HOST = 0x000000ff
def IN_CLASSD(i): return (((long)(i) & 0xf0000000) == 0xe0000000)
def IN_MULTICAST(i): return IN_CLASSD(i)
def IN_EXPERIMENTAL(i): return (((long)(i) & 0xe0000000) == 0xe0000000)
def IN_BADCLASS(i): return (((long)(i) & 0xf0000000) == 0xf0000000)
INADDR_ANY = 0x00000000
INADDR_LOOPBACK = 0x7f000001
INADDR_BROADCAST = 0xffffffff
INADDR_NONE = 0xffffffff
IN_LOOPBACKNET = 127
IP_OPTIONS = 1
IP_HDRINCL = 2
IP_TOS = 3
IP_TTL = 4
IP_RECVOPTS = 5
IP_RECVRETOPTS = 6
IP_RECVDSTADDR = 7
IP_RETOPTS = 8
|
little_endian = 1234
big_endian = 4321
pdp_endian = 3412
byte_order = BIG_ENDIAN
default_gpr = 3735928559
msr_ee = 32768
msr_pr = 16384
msr_fp = 8192
msr_me = 4096
msr_fe = 2048
msr_fe0 = 2048
msr_se = 1024
msr_be = 512
msr_ie = 256
msr_fe1 = 256
msr_al = 128
msr_ip = 64
msr_ir = 32
msr_dr = 16
msr_pm = 4
default_msr = MSR_EE | MSR_ME | MSR_AL | MSR_IR | MSR_DR
default_user_msr = DEFAULT_MSR | MSR_PR
cr_lt = 2147483648
cr_gt = 1073741824
cr_eq = 536870912
cr_so = 268435456
cr_fx = 134217728
cr_fex = 67108864
cr_vx = 33554432
cr_ox = 16777216
xer_so = 2147483648
xer_ov = 1073741824
xer_ca = 536870912
def xer_comp_byte(xer):
return xer >> 8 & 255
def xer_length(xer):
return xer & 127
dsisr_io = 2147483648
dsisr_pft = 1073741824
dsisr_lock = 536870912
dsisr_fpio = 268435456
dsisr_prot = 134217728
dsisr_loop = 67108864
dsisr_drst = 67108864
dsisr_st = 33554432
dsisr_segb = 16777216
dsisr_dabr = 4194304
dsisr_ear = 1048576
srr_is_pft = 1073741824
srr_is_ispec = 536870912
srr_is_iio = 268435456
srr_is_prot = 134217728
srr_is_loop = 67108864
srr_pr_fpen = 1048576
srr_pr_inval = 524288
srr_pr_priv = 262144
srr_pr_trap = 131072
srr_pr_impre = 65536
def ntohl(x):
return x
def ntohs(x):
return x
def htonl(x):
return x
def htons(x):
return x
ipproto_ip = 0
ipproto_icmp = 1
ipproto_ggp = 3
ipproto_tcp = 6
ipproto_egp = 8
ipproto_pup = 12
ipproto_udp = 17
ipproto_idp = 22
ipproto_tp = 29
ipproto_local = 63
ipproto_eon = 80
ipproto_bip = 83
ipproto_raw = 255
ipproto_max = 256
ipport_reserved = 1024
ipport_userreserved = 5000
ipport_timeserver = 37
def in_classa(i):
return long(i) & 2147483648 == 0
in_classa_net = 4278190080
in_classa_nshift = 24
in_classa_host = 16777215
in_classa_max = 128
def in_classb(i):
return long(i) & 3221225472 == 2147483648
in_classb_net = 4294901760
in_classb_nshift = 16
in_classb_host = 65535
in_classb_max = 65536
def in_classc(i):
return long(i) & 3758096384 == 3221225472
in_classc_net = 4294967040
in_classc_nshift = 8
in_classc_host = 255
def in_classd(i):
return long(i) & 4026531840 == 3758096384
def in_multicast(i):
return in_classd(i)
def in_experimental(i):
return long(i) & 3758096384 == 3758096384
def in_badclass(i):
return long(i) & 4026531840 == 4026531840
inaddr_any = 0
inaddr_loopback = 2130706433
inaddr_broadcast = 4294967295
inaddr_none = 4294967295
in_loopbacknet = 127
ip_options = 1
ip_hdrincl = 2
ip_tos = 3
ip_ttl = 4
ip_recvopts = 5
ip_recvretopts = 6
ip_recvdstaddr = 7
ip_retopts = 8
|
#Inputing Age
age = int(input("Enter Age : "))
# condition to check if the person is an adult or a teenager or a kid
if age>=18:
status="Not a teenager. You are an adult"
elif age>=13:
status="Teenager"
elif age<=12:
status="You are a kid"
print("You are ",status,)# Printing the result after inputing the age of the kid
|
age = int(input('Enter Age : '))
if age >= 18:
status = 'Not a teenager. You are an adult'
elif age >= 13:
status = 'Teenager'
elif age <= 12:
status = 'You are a kid'
print('You are ', status)
|
__author__ = 'chira'
# "def" as defining mathematical functions
# 18-Unpacking_args gives an alternate way to pass arguments
def f(x): # function name is "f". It has ONE argument
y = 2*x + 3
print("f(%d) = %d" %(x,y))
def g(x): # function name is "g". It has ONE argument
y = pow(x,2)
print("g(%d) = %d" %(x,y))
def h(x,y): # function name is "h". It has TWO arguments
z = pow(x,2) + 3*y;
print("h(%d,%d) = %d" %(x,y,z))
f(1)# call (by value)
f(3)
g(5)
h(2,3)
# for doing a function composition we need the notion of "return" values
|
__author__ = 'chira'
def f(x):
y = 2 * x + 3
print('f(%d) = %d' % (x, y))
def g(x):
y = pow(x, 2)
print('g(%d) = %d' % (x, y))
def h(x, y):
z = pow(x, 2) + 3 * y
print('h(%d,%d) = %d' % (x, y, z))
f(1)
f(3)
g(5)
h(2, 3)
|
def list_reverse(list1):
new_list = []
for i in range(len(list1)-1, -1, -1):
new_list.append(list1[i])
return new_list
test = [1, 2, 3, 4, 5, 6]
print(test)
print(list_reverse(test))
|
def list_reverse(list1):
new_list = []
for i in range(len(list1) - 1, -1, -1):
new_list.append(list1[i])
return new_list
test = [1, 2, 3, 4, 5, 6]
print(test)
print(list_reverse(test))
|
#
# @lc app=leetcode id=205 lang=python3
#
# [205] Isomorphic Strings
#
# https://leetcode.com/problems/isomorphic-strings/description/
#
# algorithms
# Easy (40.89%)
# Likes: 2445
# Dislikes: 520
# Total Accepted: 402.9K
# Total Submissions: 974.8K
# Testcase Example: '"egg"\n"add"'
#
# Given two strings s and t, determine if they are isomorphic.
#
# Two strings s and t are isomorphic if the characters in s can be replaced to
# get t.
#
# All occurrences of a character must be replaced with another character while
# preserving the order of characters. No two characters may map to the same
# character, but a character may map to itself.
#
#
# Example 1:
# Input: s = "egg", t = "add"
# Output: true
# Example 2:
# Input: s = "foo", t = "bar"
# Output: false
# Example 3:
# Input: s = "paper", t = "title"
# Output: true
#
#
# Constraints:
#
#
# 1 <= s.length <= 5 * 10^4
# t.length == s.length
# s and t consist of any valid ascii character.
#
#
#
# @lc code=start
class Solution:
def isIsomorphic(self, s: str, t: str) -> bool:
if not s or not t or len(s) == 0 or len(t) == 0 or len(s) != len(t):
return False
s2t, t2s = {}, {}
n = len(s)
for i in range(n):
if s[i] not in s2t:
if t[i] in t2s and t2s[t[i]] != s[i]:
return False
s2t[s[i]] = t[i]
t2s[t[i]] = s[i]
else:
# mapping contains s[i]
if t[i] != s2t[s[i]]:
return False
return True
# @lc code=end
|
class Solution:
def is_isomorphic(self, s: str, t: str) -> bool:
if not s or not t or len(s) == 0 or (len(t) == 0) or (len(s) != len(t)):
return False
(s2t, t2s) = ({}, {})
n = len(s)
for i in range(n):
if s[i] not in s2t:
if t[i] in t2s and t2s[t[i]] != s[i]:
return False
s2t[s[i]] = t[i]
t2s[t[i]] = s[i]
elif t[i] != s2t[s[i]]:
return False
return True
|
class Solution:
def maxAreaOfIsland(self, grid: List[List[int]]) -> int:
grid = [[0,0,1,0,0,0,0,1,0,0,0,0,0],[0,0,0,0,0,0,0,1,1,1,0,0,0],[0,1,1,0,1,0,0,0,0,0,0,0,0],[0,1,0,0,1,1,0,0,1,0,1,0,0],[0,1,0,0,1,1,0,0,1,1,1,0,0],[0,0,0,0,0,0,0,0,0,0,1,0,0],[0,0,0,0,0,0,0,1,1,1,0,0,0],[0,0,0,0,0,0,0,1,1,0,0,0,0]]
area_list = []
image_y = len(grid)
image_x = len(grid[0])
initial_x = [0] * image_x
visit = []
# initial_y, visit, current, neighbor, image = [], [], [], [], []
for i in range(image_y):
# initial_y.append(initial_x.copy())
# current.append(initial_x.copy())
visit.append(initial_x.copy())
# neighbor.append(initial_x.copy())
for p in range(image_y):
for q in range(image_x):
if grid[p][q] == 1 and visit[p][q] != 1:
# initial_x = [0] * image_x
current, neighbor, image = [], [], []
for i in range(image_y):
current.append(initial_x.copy())
image.append(initial_x.copy())
neighbor.append(initial_x.copy())
neighbor[p][q] = 1
while 1:
current = neighbor.copy()
neighbor_list = []
for i in range(image_y):
for j in range(image_x):
if current[i][j] == 1:
if i - 1 >= 0:
if visit[i - 1][j] != 1:
neighbor_list.append([i - 1, j])
visit[i - 1][j] = 1
if i + 1 <= image_y - 1:
if visit[i + 1][j] != 1:
neighbor_list.append([i + 1, j])
visit[i + 1][j] = 1
if j - 1 >= 0:
if visit[i][j - 1] != 1:
neighbor_list.append([i, j - 1])
visit[i][j - 1] = 1
if j + 1 <= image_x - 1:
if visit[i][j + 1] != 1:
neighbor_list.append([i, j + 1])
visit[i][j + 1] = 1
if neighbor_list == []:
area = 0
for i in image:
area += sum(i)
# area = sum(image)
break
neighbor = []
for i in range(image_y):
neighbor.append(initial_x.copy())
for i, j in enumerate(neighbor_list):
r = j[0]
c = j[1]
if grid[r][c] == 1:
image[r][c] = 1
neighbor[r][c] = 1
area_list.append(area)
|
class Solution:
def max_area_of_island(self, grid: List[List[int]]) -> int:
grid = [[0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0], [0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 0], [0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0]]
area_list = []
image_y = len(grid)
image_x = len(grid[0])
initial_x = [0] * image_x
visit = []
for i in range(image_y):
visit.append(initial_x.copy())
for p in range(image_y):
for q in range(image_x):
if grid[p][q] == 1 and visit[p][q] != 1:
(current, neighbor, image) = ([], [], [])
for i in range(image_y):
current.append(initial_x.copy())
image.append(initial_x.copy())
neighbor.append(initial_x.copy())
neighbor[p][q] = 1
while 1:
current = neighbor.copy()
neighbor_list = []
for i in range(image_y):
for j in range(image_x):
if current[i][j] == 1:
if i - 1 >= 0:
if visit[i - 1][j] != 1:
neighbor_list.append([i - 1, j])
visit[i - 1][j] = 1
if i + 1 <= image_y - 1:
if visit[i + 1][j] != 1:
neighbor_list.append([i + 1, j])
visit[i + 1][j] = 1
if j - 1 >= 0:
if visit[i][j - 1] != 1:
neighbor_list.append([i, j - 1])
visit[i][j - 1] = 1
if j + 1 <= image_x - 1:
if visit[i][j + 1] != 1:
neighbor_list.append([i, j + 1])
visit[i][j + 1] = 1
if neighbor_list == []:
area = 0
for i in image:
area += sum(i)
break
neighbor = []
for i in range(image_y):
neighbor.append(initial_x.copy())
for (i, j) in enumerate(neighbor_list):
r = j[0]
c = j[1]
if grid[r][c] == 1:
image[r][c] = 1
neighbor[r][c] = 1
area_list.append(area)
|
# event manager permissions are set to expire this many days after event ends
CBAC_VALID_AFTER_EVENT_DAYS = 180
# when a superuser overrides permissions, this is how many minutes the temporary permissions last
CBAC_SUDO_VALID_MINUTES = 20
# these claims are used, if present, when sudoing. Note that sudo cannot give you a {} permission
CBAC_SUDO_CLAIMS = ['organization', 'event', 'app']
|
cbac_valid_after_event_days = 180
cbac_sudo_valid_minutes = 20
cbac_sudo_claims = ['organization', 'event', 'app']
|
def is_isogram(string):
found = []
for letter in string.lower():
if letter in found:
return False
if letter.isalpha():
found.append(letter)
return True
|
def is_isogram(string):
found = []
for letter in string.lower():
if letter in found:
return False
if letter.isalpha():
found.append(letter)
return True
|
class speedadjustclass():
def __init__(self):
self.speedadjust = 1.0
return
def speedincrease(self):
self.speedadjust = round(min(3.0, self.speedadjust + 0.05), 2)
print("In speedincrease",self.speedadjust)
def speeddecrease(self):
self.speedadjust = round(max(0.5, self.speedadjust - 0.05), 2)
print("In speeddecrease",self.speedadjust)
def run(self):
return self.speedadjust
|
class Speedadjustclass:
def __init__(self):
self.speedadjust = 1.0
return
def speedincrease(self):
self.speedadjust = round(min(3.0, self.speedadjust + 0.05), 2)
print('In speedincrease', self.speedadjust)
def speeddecrease(self):
self.speedadjust = round(max(0.5, self.speedadjust - 0.05), 2)
print('In speeddecrease', self.speedadjust)
def run(self):
return self.speedadjust
|
# getting input from user and pars it to the integer
your_weight = input("Enter your Weight in kg: ")
print(type(your_weight))
# to parse value of variable, we have to put it in seperate line or put it equal new variable
int_weight_parser = int(your_weight)
print(type(int_weight_parser))
# formatted String
first_name = "pooya"
last_name = "panahandeh"
message = f'mr. {first_name} {last_name}, welcome to the python world.'
print(message)
# print the lenght of the string
print(len(message))
# find special element in string
print(message.find('p'))
# replace string with another one
print(message.replace('python', 'your python'))
# boolean string function to check our string for specific value, the result will be true or false.
print('python' in message) # the result will be true.
|
your_weight = input('Enter your Weight in kg: ')
print(type(your_weight))
int_weight_parser = int(your_weight)
print(type(int_weight_parser))
first_name = 'pooya'
last_name = 'panahandeh'
message = f'mr. {first_name} {last_name}, welcome to the python world.'
print(message)
print(len(message))
print(message.find('p'))
print(message.replace('python', 'your python'))
print('python' in message)
|
a = 200
b = 33
c = 500
if a > b and c > a:
print("Both conditions are True")
|
a = 200
b = 33
c = 500
if a > b and c > a:
print('Both conditions are True')
|
#print is function when we want to print something on output
print("My name is Dhruv")
#You will notice something strange if you try to print any directory
#print("C:\Users\dhruv\Desktop\dhruv.github.io")
#Yes unicodeescape error
# Remember i told about escape character on previous tutorial
# yes it causing problems
# now place "r" in starting of sentence
print(r"C:\Users\dhruv\Desktop\dhruv.github.io")
#yes it is printed
# what what r means ? r means Rush string
# it means that " take the string as it , take no special meaning in this perticular STRING "
# One amazing thing you can do is , string can be store in variables
#You can also Add and Multiply strings
myname = "Dhruv "
myname + "Patel"
myname * 5
# now press run
# Do check my shell file for refrence
|
print('My name is Dhruv')
print('C:\\Users\\dhruv\\Desktop\\dhruv.github.io')
myname = 'Dhruv '
myname + 'Patel'
myname * 5
|
SECRET_KEY = '-dummy-key-'
INSTALLED_APPS = [
'pgcomments',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
},
}
|
secret_key = '-dummy-key-'
installed_apps = ['pgcomments']
databases = {'default': {'ENGINE': 'django.db.backends.postgresql_psycopg2'}}
|
names = ["libquadmath0", "libssl1.0.0"]
status = {"libquadmath0":
{"Name": "libquadmath0",
"Dependencies": ["gcc-5-base", "libc6"],
"Description": "GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>",
"Need me": ["libssl1.0.0"]},
"libssl1.0.0":
{"Name": "libssl1.0.0",
"Dependencies": ["libc6", "zlib1g", "libquadmath0"],
"Description": "SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>",
"Alternatives": ["debconf"]}
}
unsure = {"libssl1.0.0": [" debconf (>= 0.5) ", " libquadmath0\n"]}
before_alt = {"libquadmath0":
{"Name": "libquadmath0",
"Dependencies": ["gcc-5-base", "libc6"],
"Description": "GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>"},
"libssl1.0.0":
{"Name": "libssl1.0.0",
"Dependencies": ["libc6", "zlib1g"],
"Description": "SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>"}
}
before_need = {"libquadmath0":
{"Name": "libquadmath0",
"Dependencies": ["gcc-5-base", "libc6"],
"Description": "GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>"},
"libssl1.0.0":
{"Name": "libssl1.0.0",
"Dependencies": ["libc6", "zlib1g", "libquadmath0"],
"Description": "SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>"}
}
lines = ["Package: libquadmath0\n",
"Status: install ok installed\n",
"Priority: optional\n", "Section: libs\n",
"Installed-Size: 265\n",
"Maintainer: Ubuntu Core developers <[email protected]>\n",
"Architecture: amd64\n", "Multi-Arch: same\n",
"Source: gcc-5\n", "Version: 5.4.0-6ubuntu1~16.04.12\n",
"Depends: gcc-5-base (= 5.4.0-6ubuntu1~16.04.12), libc6 (>= 2.23)\n",
"Description: GCC Quad-Precision Math Library\n",
" A library, which provides quad-precision mathematical functions on targets\n",
" supporting the __float128 datatype. The library is used to provide on such\n",
" targets the REAL(16) type in the GNU Fortran compiler.\n",
"Homepage: http://gcc.gnu.org/\n",
"Original-Maintainer: Debian GCC Maintainers <[email protected]>\n",
"\n",
"Package: libssl1.0.0\n",
"Status: install ok installed\n",
"Multi-Arch: same\n",
"Priority: important\n",
"Section: libs\n",
"Installed-Size: 2836\n",
"Maintainer: Ubuntu Developers <[email protected]>\n",
"Architecture: amd64\n",
"Source: openssl\n",
"Version: 1.0.1-4ubuntu5.5\n",
"Depends: libc6 (>= 2.14), zlib1g (>= 1:1.1.4), debconf (>= 0.5) | libquadmath0\n",
"Pre-Depends: multiarch-support\n",
"Breaks: openssh-client (<< 1:5.9p1-4), openssh-server (<< 1:5.9p1-4)\n",
"Description: SSL shared libraries\n",
" libssl and libcrypto shared libraries needed by programs like\n",
" apache-ssl, telnet-ssl and openssh.\n",
" .\n",
" It is part of the OpenSSL implementation of SSL.\n",
"Original-Maintainer: Debian OpenSSL Team <[email protected]>\n",
"\n"]
|
names = ['libquadmath0', 'libssl1.0.0']
status = {'libquadmath0': {'Name': 'libquadmath0', 'Dependencies': ['gcc-5-base', 'libc6'], 'Description': 'GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>', 'Need me': ['libssl1.0.0']}, 'libssl1.0.0': {'Name': 'libssl1.0.0', 'Dependencies': ['libc6', 'zlib1g', 'libquadmath0'], 'Description': 'SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>', 'Alternatives': ['debconf']}}
unsure = {'libssl1.0.0': [' debconf (>= 0.5) ', ' libquadmath0\n']}
before_alt = {'libquadmath0': {'Name': 'libquadmath0', 'Dependencies': ['gcc-5-base', 'libc6'], 'Description': 'GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>'}, 'libssl1.0.0': {'Name': 'libssl1.0.0', 'Dependencies': ['libc6', 'zlib1g'], 'Description': 'SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>'}}
before_need = {'libquadmath0': {'Name': 'libquadmath0', 'Dependencies': ['gcc-5-base', 'libc6'], 'Description': 'GCC Quad-Precision Math Library<br/> A library, which provides quad-precision mathematical functions on targets<br/> supporting the __float128 datatype. The library is used to provide on such<br/> targets the REAL(16) type in the GNU Fortran compiler.<br/>'}, 'libssl1.0.0': {'Name': 'libssl1.0.0', 'Dependencies': ['libc6', 'zlib1g', 'libquadmath0'], 'Description': 'SSL shared libraries<br/> libssl and libcrypto shared libraries needed by programs like<br/> apache-ssl, telnet-ssl and openssh.<br/> .<br/> It is part of the OpenSSL implementation of SSL.<br/>'}}
lines = ['Package: libquadmath0\n', 'Status: install ok installed\n', 'Priority: optional\n', 'Section: libs\n', 'Installed-Size: 265\n', 'Maintainer: Ubuntu Core developers <[email protected]>\n', 'Architecture: amd64\n', 'Multi-Arch: same\n', 'Source: gcc-5\n', 'Version: 5.4.0-6ubuntu1~16.04.12\n', 'Depends: gcc-5-base (= 5.4.0-6ubuntu1~16.04.12), libc6 (>= 2.23)\n', 'Description: GCC Quad-Precision Math Library\n', ' A library, which provides quad-precision mathematical functions on targets\n', ' supporting the __float128 datatype. The library is used to provide on such\n', ' targets the REAL(16) type in the GNU Fortran compiler.\n', 'Homepage: http://gcc.gnu.org/\n', 'Original-Maintainer: Debian GCC Maintainers <[email protected]>\n', '\n', 'Package: libssl1.0.0\n', 'Status: install ok installed\n', 'Multi-Arch: same\n', 'Priority: important\n', 'Section: libs\n', 'Installed-Size: 2836\n', 'Maintainer: Ubuntu Developers <[email protected]>\n', 'Architecture: amd64\n', 'Source: openssl\n', 'Version: 1.0.1-4ubuntu5.5\n', 'Depends: libc6 (>= 2.14), zlib1g (>= 1:1.1.4), debconf (>= 0.5) | libquadmath0\n', 'Pre-Depends: multiarch-support\n', 'Breaks: openssh-client (<< 1:5.9p1-4), openssh-server (<< 1:5.9p1-4)\n', 'Description: SSL shared libraries\n', ' libssl and libcrypto shared libraries needed by programs like\n', ' apache-ssl, telnet-ssl and openssh.\n', ' .\n', ' It is part of the OpenSSL implementation of SSL.\n', 'Original-Maintainer: Debian OpenSSL Team <[email protected]>\n', '\n']
|
S = input()
scale_list = ["Do", "", "Re", "", "Mi", "Fa", "", "So", "", "La", "", "Si"]
order = "WBWBWWBWBWBW" * 3
print(scale_list[order.find(S)])
|
s = input()
scale_list = ['Do', '', 'Re', '', 'Mi', 'Fa', '', 'So', '', 'La', '', 'Si']
order = 'WBWBWWBWBWBW' * 3
print(scale_list[order.find(S)])
|
class Solution:
def solve(self, nums):
uniques = set()
j = 0
ans = 0
for i in range(len(nums)):
while j < len(nums) and nums[j] not in uniques:
uniques.add(nums[j])
j += 1
ans = max(ans, len(uniques))
uniques.remove(nums[i])
return ans
|
class Solution:
def solve(self, nums):
uniques = set()
j = 0
ans = 0
for i in range(len(nums)):
while j < len(nums) and nums[j] not in uniques:
uniques.add(nums[j])
j += 1
ans = max(ans, len(uniques))
uniques.remove(nums[i])
return ans
|
MUSHISHI_ID = 457
FULLMETAL_ID = 25
GINKO_ID = 425
KANA_HANAZAWA_ID = 185
YEAR = 2018
SEASON = "winter"
DAY = "monday"
TYPE = "anime"
SUBTYPE = "tv"
GENRE = 1
PRODUCER = 37
MAGAZINE = 83
USERNAME = "Nekomata1037"
CLUB_ID = 379
|
mushishi_id = 457
fullmetal_id = 25
ginko_id = 425
kana_hanazawa_id = 185
year = 2018
season = 'winter'
day = 'monday'
type = 'anime'
subtype = 'tv'
genre = 1
producer = 37
magazine = 83
username = 'Nekomata1037'
club_id = 379
|
def setup():
size(500,500)
smooth()
background(50)
strokeWeight(5)
stroke(250)
noLoop()
cx=250
cy=250
cR=200
i=0
def draw():
global cx,cy, cR, i
while i < 2*PI:
i +=PI/6
x1 = cos(i)*cR+cx
y1 = sin(i)*cR+cy
line(x1,y1,x1,y1)
line(cx,cy,cx,cy)
def keyPressed():
if key =="s":
saveFrame("Photo")
|
def setup():
size(500, 500)
smooth()
background(50)
stroke_weight(5)
stroke(250)
no_loop()
cx = 250
cy = 250
c_r = 200
i = 0
def draw():
global cx, cy, cR, i
while i < 2 * PI:
i += PI / 6
x1 = cos(i) * cR + cx
y1 = sin(i) * cR + cy
line(x1, y1, x1, y1)
line(cx, cy, cx, cy)
def key_pressed():
if key == 's':
save_frame('Photo')
|
# Here is the code from the generators2.py file
# From the demo
# Can you refactor any or all of it to use comprehensions?
# More chaining
# Courtesy of my friend Jim Prior
def gen_fibonacci():
a, b = 0, 1
while True:
a, b = b, a + b
yield b
def gen_even(gen):
return (number for number in gen if number % 2 == 0)
def error():
raise StopIteration
def gen_lte(gen, max):
return (error() if number > max else number for number in gen)
# Now it's easy to combine generators in different ways.
for num in gen_lte(gen_even(gen_fibonacci()), 20):
print(num)
print("")
for num in gen_lte(gen_fibonacci(), 1000):
print(num)
|
def gen_fibonacci():
(a, b) = (0, 1)
while True:
(a, b) = (b, a + b)
yield b
def gen_even(gen):
return (number for number in gen if number % 2 == 0)
def error():
raise StopIteration
def gen_lte(gen, max):
return (error() if number > max else number for number in gen)
for num in gen_lte(gen_even(gen_fibonacci()), 20):
print(num)
print('')
for num in gen_lte(gen_fibonacci(), 1000):
print(num)
|
def main():
value = 1
if value == 0:
print("False")
elif value == 1:
print("True")
else:
print("Undefined")
if __name__ == "__main__":
main()
|
def main():
value = 1
if value == 0:
print('False')
elif value == 1:
print('True')
else:
print('Undefined')
if __name__ == '__main__':
main()
|
# -*- coding: UTF-8 -*-
logger.info("Loading 16 objects to table ledger_matchrule...")
# fields: id, account, journal
loader.save(create_ledger_matchrule(1,2,1))
loader.save(create_ledger_matchrule(2,2,2))
loader.save(create_ledger_matchrule(3,4,3))
loader.save(create_ledger_matchrule(4,2,4))
loader.save(create_ledger_matchrule(5,4,4))
loader.save(create_ledger_matchrule(6,17,4))
loader.save(create_ledger_matchrule(7,2,5))
loader.save(create_ledger_matchrule(8,4,5))
loader.save(create_ledger_matchrule(9,17,5))
loader.save(create_ledger_matchrule(10,2,6))
loader.save(create_ledger_matchrule(11,4,6))
loader.save(create_ledger_matchrule(12,17,6))
loader.save(create_ledger_matchrule(13,2,7))
loader.save(create_ledger_matchrule(14,4,7))
loader.save(create_ledger_matchrule(15,17,7))
loader.save(create_ledger_matchrule(16,6,8))
loader.flush_deferred_objects()
|
logger.info('Loading 16 objects to table ledger_matchrule...')
loader.save(create_ledger_matchrule(1, 2, 1))
loader.save(create_ledger_matchrule(2, 2, 2))
loader.save(create_ledger_matchrule(3, 4, 3))
loader.save(create_ledger_matchrule(4, 2, 4))
loader.save(create_ledger_matchrule(5, 4, 4))
loader.save(create_ledger_matchrule(6, 17, 4))
loader.save(create_ledger_matchrule(7, 2, 5))
loader.save(create_ledger_matchrule(8, 4, 5))
loader.save(create_ledger_matchrule(9, 17, 5))
loader.save(create_ledger_matchrule(10, 2, 6))
loader.save(create_ledger_matchrule(11, 4, 6))
loader.save(create_ledger_matchrule(12, 17, 6))
loader.save(create_ledger_matchrule(13, 2, 7))
loader.save(create_ledger_matchrule(14, 4, 7))
loader.save(create_ledger_matchrule(15, 17, 7))
loader.save(create_ledger_matchrule(16, 6, 8))
loader.flush_deferred_objects()
|
#!/bin/python3
# https://www.hackerrank.com/challenges/py-check-subset/problem
# Author : Sagar Malik ([email protected])
n = int(input())
for _ in range(n):
K = int(input())
first = set(input().split())
t = int(input())
second = set(input().split())
print(len(first-second) == 0)
|
n = int(input())
for _ in range(n):
k = int(input())
first = set(input().split())
t = int(input())
second = set(input().split())
print(len(first - second) == 0)
|
class Pipelines(object):
def __init__(self, client):
self._client = client
def get_pipeline(self, pipeline_id, **kwargs):
url = 'pipelines/{}'.format(pipeline_id)
return self._client._get(self._client.BASE_URL + url, **kwargs)
def get_all_pipelines(self, **kwargs):
url = 'pipelines'
return self._client._get(self._client.BASE_URL + url, **kwargs)
def get_pipeline_deals(self, pipeline_id, **kwargs):
url = 'pipelines/{}/deals'.format(pipeline_id)
return self._client._get(self._client.BASE_URL + url, **kwargs)
|
class Pipelines(object):
def __init__(self, client):
self._client = client
def get_pipeline(self, pipeline_id, **kwargs):
url = 'pipelines/{}'.format(pipeline_id)
return self._client._get(self._client.BASE_URL + url, **kwargs)
def get_all_pipelines(self, **kwargs):
url = 'pipelines'
return self._client._get(self._client.BASE_URL + url, **kwargs)
def get_pipeline_deals(self, pipeline_id, **kwargs):
url = 'pipelines/{}/deals'.format(pipeline_id)
return self._client._get(self._client.BASE_URL + url, **kwargs)
|
def grow_plants(db, messenger, object):
#
# grow plant
db.increment_property_of_component('plant', object['entity'], 'growth', object['growth_rate'])
return []
def ripen_fruit(db, messenger, object):
db.increment_property_of_component('plant', object['entity'], 'fruit_growth', object['fruit_growth_rate'])
return []
|
def grow_plants(db, messenger, object):
db.increment_property_of_component('plant', object['entity'], 'growth', object['growth_rate'])
return []
def ripen_fruit(db, messenger, object):
db.increment_property_of_component('plant', object['entity'], 'fruit_growth', object['fruit_growth_rate'])
return []
|
for c in range(1,50):
if c%2==0:
print('.',end='')
print(c,end=' ')
|
for c in range(1, 50):
if c % 2 == 0:
print('.', end='')
print(c, end=' ')
|
# input sell price
a = input("Input Final Sale Price")
# input P&P cost
b = input("Input P&P Costs")
# add a & b together to get total
# fees = total * 0.128 + 0.3 //12.8% + 30p
# total - fees = profit
# output total
# output fees
# output profit
# output description explaining forumla
# output note explaining that fees are charged on P&P as well as sale price.
|
a = input('Input Final Sale Price')
b = input('Input P&P Costs')
|
class Solution:
def isPalindrome(self, x: int) -> bool:
if x < 0 or (not x % 10 and x):
return False
rev = 0
while x > rev:
rev = rev * 10 + x % 10
x //= 10
return rev == x or rev//10 == x
|
class Solution:
def is_palindrome(self, x: int) -> bool:
if x < 0 or (not x % 10 and x):
return False
rev = 0
while x > rev:
rev = rev * 10 + x % 10
x //= 10
return rev == x or rev // 10 == x
|
valorc = float(input('Qual o valor da Casa? R$ '))
salario = float(input('Qual o valor do salario? R$'))
anos = int(input('Em quantos anos deseja pagar? '))
prest = valorc / (anos * 12)
if prest > (salario * (30/100)):
print('Fincanciamento Negado')
else:
print('Financiamento Autorizado')
|
valorc = float(input('Qual o valor da Casa? R$ '))
salario = float(input('Qual o valor do salario? R$'))
anos = int(input('Em quantos anos deseja pagar? '))
prest = valorc / (anos * 12)
if prest > salario * (30 / 100):
print('Fincanciamento Negado')
else:
print('Financiamento Autorizado')
|
'''
Unit tests module for PaPaS module
'''
__all__ = []
|
"""
Unit tests module for PaPaS module
"""
__all__ = []
|
N = int(input())
for i in range(N):
n, k = map(int, input().split())
ranges = {n: 1}
max_range = 0
while k > 0:
max_range, count_range = max(ranges.items())
if k > count_range:
k -= count_range
del ranges[max_range]
range_1, range_2 = (max_range - 1)//2, max_range//2
ranges[range_1] = ranges.get(range_1, 0) + count_range
ranges[range_2] = ranges.get(range_2, 0) + count_range
else:
print('Case #{}: {} {}'.format(i + 1, max_range//2, (max_range - 1)//2, ))
break
|
n = int(input())
for i in range(N):
(n, k) = map(int, input().split())
ranges = {n: 1}
max_range = 0
while k > 0:
(max_range, count_range) = max(ranges.items())
if k > count_range:
k -= count_range
del ranges[max_range]
(range_1, range_2) = ((max_range - 1) // 2, max_range // 2)
ranges[range_1] = ranges.get(range_1, 0) + count_range
ranges[range_2] = ranges.get(range_2, 0) + count_range
else:
print('Case #{}: {} {}'.format(i + 1, max_range // 2, (max_range - 1) // 2))
break
|
alpha_num_dict = {
'a':1,
'b':2,
'c':3
}
|
alpha_num_dict = {'a': 1, 'b': 2, 'c': 3}
|
# Creating an empty Tuple
Tuple1 = (Hello)
print("Initial empty Tuple: ")
print(Tuple1)
A=(1,2,3,4)
B=('a','b','c')
C=(5,6,7,8)
#second tuple
print(A,'length= ',len(A))
print(B,'length= ',len(B))
print(A<C)
print(A+C)
print(max(A))
print(min(B))
tuple('hey')
'good'*3
|
tuple1 = Hello
print('Initial empty Tuple: ')
print(Tuple1)
a = (1, 2, 3, 4)
b = ('a', 'b', 'c')
c = (5, 6, 7, 8)
print(A, 'length= ', len(A))
print(B, 'length= ', len(B))
print(A < C)
print(A + C)
print(max(A))
print(min(B))
tuple('hey')
'good' * 3
|
#
# PySNMP MIB module DSA-MIB (http://pysnmp.sf.net)
# ASN.1 source http://mibs.snmplabs.com:80/asn1/DSA-MIB
# Produced by pysmi-0.0.7 at Sun Feb 14 00:11:07 2016
# On host bldfarm platform Linux version 4.1.13-100.fc21.x86_64 by user goose
# Using Python version 3.5.0 (default, Jan 5 2016, 17:11:52)
#
( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
( SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion")
( DistinguishedName, applIndex, ) = mibBuilder.importSymbols("NETWORK-SERVICES-MIB", "DistinguishedName", "applIndex")
( NotificationGroup, ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
( MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32, Gauge32, iso, NotificationType, Bits, Counter32, mib_2, ModuleIdentity, Integer32, ObjectIdentity, IpAddress, TimeTicks, MibIdentifier, Counter64, ) = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32", "Gauge32", "iso", "NotificationType", "Bits", "Counter32", "mib-2", "ModuleIdentity", "Integer32", "ObjectIdentity", "IpAddress", "TimeTicks", "MibIdentifier", "Counter64")
( DisplayString, TimeStamp, TextualConvention, ) = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TimeStamp", "TextualConvention")
dsaMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 29))
if mibBuilder.loadTexts: dsaMIB.setLastUpdated('9311250000Z')
if mibBuilder.loadTexts: dsaMIB.setOrganization('IETF Mail and Directory Management Working\n Group')
if mibBuilder.loadTexts: dsaMIB.setContactInfo(' Glenn Mansfield\n\n Postal: AIC Systems Laboratory\n 6-6-3, Minami Yoshinari\n Aoba-ku, Sendai, 989-32\n JP\n\n Tel: +81 22 279 3310\n Fax: +81 22 279 3640\n E-Mail: [email protected]')
if mibBuilder.loadTexts: dsaMIB.setDescription(' The MIB module for monitoring Directory System Agents.')
dsaOpsTable = MibTable((1, 3, 6, 1, 2, 1, 29, 1), )
if mibBuilder.loadTexts: dsaOpsTable.setDescription(' The table holding information related to the\n DSA operations.')
dsaOpsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 29, 1, 1), ).setIndexNames((0, "NETWORK-SERVICES-MIB", "applIndex"))
if mibBuilder.loadTexts: dsaOpsEntry.setDescription(' Entry containing operations related statistics\n for a DSA.')
dsaAnonymousBinds = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaAnonymousBinds.setDescription(' Number of anonymous binds to this DSA from DUAs\n since application start.')
dsaUnauthBinds = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaUnauthBinds.setDescription(' Number of un-authenticated binds to this\n DSA since application start.')
dsaSimpleAuthBinds = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaSimpleAuthBinds.setDescription(' Number of binds to this DSA that were authenticated\n using simple authentication procedures since\n application start.')
dsaStrongAuthBinds = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaStrongAuthBinds.setDescription(' Number of binds to this DSA that were authenticated\n using the strong authentication procedures since\n application start. This includes the binds that were\n authenticated using external authentication procedures.')
dsaBindSecurityErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaBindSecurityErrors.setDescription(' Number of bind operations that have been rejected\n by this DSA due to inappropriateAuthentication or\n invalidCredentials.')
dsaInOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaInOps.setDescription(' Number of operations forwarded to this DSA\n from DUAs or other DSAs since application\n start up.')
dsaReadOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaReadOps.setDescription(' Number of read operations serviced by\n this DSA since application startup.')
dsaCompareOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaCompareOps.setDescription(' Number of compare operations serviced by\n this DSA since application startup.')
dsaAddEntryOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaAddEntryOps.setDescription(' Number of addEntry operations serviced by\n this DSA since application startup.')
dsaRemoveEntryOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaRemoveEntryOps.setDescription(' Number of removeEntry operations serviced by\n this DSA since application startup.')
dsaModifyEntryOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaModifyEntryOps.setDescription(' Number of modifyEntry operations serviced by\n this DSA since application startup.')
dsaModifyRDNOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaModifyRDNOps.setDescription(' Number of modifyRDN operations serviced by\n this DSA since application startup.')
dsaListOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaListOps.setDescription(' Number of list operations serviced by\n this DSA since application startup.')
dsaSearchOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaSearchOps.setDescription(' Number of search operations- baseObjectSearches,\n oneLevelSearches and subTreeSearches, serviced\n by this DSA since application startup.')
dsaOneLevelSearchOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaOneLevelSearchOps.setDescription(' Number of oneLevelSearch operations serviced\n by this DSA since application startup.')
dsaWholeTreeSearchOps = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaWholeTreeSearchOps.setDescription(' Number of wholeTreeSearch operations serviced\n by this DSA since application startup.')
dsaReferrals = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaReferrals.setDescription(' Number of referrals returned by this DSA in response\n to requests for operations since application startup.')
dsaChainings = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaChainings.setDescription(' Number of operations forwarded by this DSA\n to other DSAs since application startup.')
dsaSecurityErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaSecurityErrors.setDescription(' Number of operations forwarded to this DSA\n which did not meet the security requirements. ')
dsaErrors = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 1, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaErrors.setDescription(' Number of operations that could not be serviced\n due to errors other than security errors, and\n referrals.\n A partially serviced operation will not be counted\n as an error.\n The errors include NameErrors, UpdateErrors, Attribute\n errors and ServiceErrors.')
dsaEntriesTable = MibTable((1, 3, 6, 1, 2, 1, 29, 2), )
if mibBuilder.loadTexts: dsaEntriesTable.setDescription(' The table holding information related to the\n\n entry statistics and cache performance of the DSAs.')
dsaEntriesEntry = MibTableRow((1, 3, 6, 1, 2, 1, 29, 2, 1), ).setIndexNames((0, "NETWORK-SERVICES-MIB", "applIndex"))
if mibBuilder.loadTexts: dsaEntriesEntry.setDescription(' Entry containing statistics pertaining to entries\n held by a DSA.')
dsaMasterEntries = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 1), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaMasterEntries.setDescription(' Number of entries mastered in the DSA.')
dsaCopyEntries = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaCopyEntries.setDescription(' Number of entries for which systematic (slave)\n copies are maintained in the DSA.')
dsaCacheEntries = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaCacheEntries.setDescription(' Number of entries cached (non-systematic copies) in\n the DSA. This will include the entries that are\n cached partially. The negative cache is not counted.')
dsaCacheHits = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaCacheHits.setDescription(' Number of operations that were serviced from\n the locally held cache since application\n startup.')
dsaSlaveHits = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaSlaveHits.setDescription(' Number of operations that were serviced from\n the locally held object replications [ shadow\n entries] since application startup.')
dsaIntTable = MibTable((1, 3, 6, 1, 2, 1, 29, 3), )
if mibBuilder.loadTexts: dsaIntTable.setDescription(' Each row of this table contains some details\n related to the history of the interaction\n of the monitored DSAs with their respective\n peer DSAs.')
dsaIntEntry = MibTableRow((1, 3, 6, 1, 2, 1, 29, 3, 1), ).setIndexNames((0, "NETWORK-SERVICES-MIB", "applIndex"), (0, "DSA-MIB", "dsaIntIndex"))
if mibBuilder.loadTexts: dsaIntEntry.setDescription(' Entry containing interaction details of a DSA\n with a peer DSA.')
dsaIntIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1,2147483647)))
if mibBuilder.loadTexts: dsaIntIndex.setDescription(' Together with applIndex it forms the unique key to\n identify the conceptual row which contains useful info\n on the (attempted) interaction between the DSA (referred\n to by applIndex) and a peer DSA.')
dsaName = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 2), DistinguishedName()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaName.setDescription(' Distinguished Name of the peer DSA to which this\n entry pertains.')
dsaTimeOfCreation = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaTimeOfCreation.setDescription(' The value of sysUpTime when this row was created.\n If the entry was created before the network management\n subsystem was initialized, this object will contain\n a value of zero.')
dsaTimeOfLastAttempt = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 4), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaTimeOfLastAttempt.setDescription(' The value of sysUpTime when the last attempt was made\n to contact this DSA. If the last attempt was made before\n the network management subsystem was initialized, this\n object will contain a value of zero.')
dsaTimeOfLastSuccess = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaTimeOfLastSuccess.setDescription(' The value of sysUpTime when the last attempt made to\n contact this DSA was successful. If there have\n been no successful attempts this entry will have a value\n of zero. If the last successful attempt was made before\n the network management subsystem was initialized, this\n object will contain a value of zero.')
dsaFailuresSinceLastSuccess = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaFailuresSinceLastSuccess.setDescription(' The number of failures since the last time an\n attempt to contact this DSA was successful. If\n there has been no successful attempts, this counter\n will contain the number of failures since this entry\n was created.')
dsaFailures = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaFailures.setDescription(' Cumulative failures since the creation of\n this entry.')
dsaSuccesses = MibTableColumn((1, 3, 6, 1, 2, 1, 29, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dsaSuccesses.setDescription(' Cumulative successes since the creation of\n this entry.')
dsaConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 29, 4))
dsaGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 29, 4, 1))
dsaCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 29, 4, 2))
dsaOpsCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 1)).setObjects(*(("DSA-MIB", "dsaOpsGroup"),))
if mibBuilder.loadTexts: dsaOpsCompliance.setDescription('The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring\n DSA operations.')
dsaEntryCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 2)).setObjects(*(("DSA-MIB", "dsaOpsGroup"), ("DSA-MIB", "dsaEntryGroup"),))
if mibBuilder.loadTexts: dsaEntryCompliance.setDescription('The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring\n DSA operations, entry statistics and cache\n performance.')
dsaIntCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 3)).setObjects(*(("DSA-MIB", "dsaOpsGroup"), ("DSA-MIB", "dsaIntGroup"),))
if mibBuilder.loadTexts: dsaIntCompliance.setDescription(' The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring DSA\n operations and the interaction of the DSA with\n peer DSAs.')
dsaOpsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 29, 4, 1, 1)).setObjects(*(("DSA-MIB", "dsaAnonymousBinds"), ("DSA-MIB", "dsaUnauthBinds"), ("DSA-MIB", "dsaSimpleAuthBinds"), ("DSA-MIB", "dsaStrongAuthBinds"), ("DSA-MIB", "dsaBindSecurityErrors"), ("DSA-MIB", "dsaInOps"), ("DSA-MIB", "dsaReadOps"), ("DSA-MIB", "dsaCompareOps"), ("DSA-MIB", "dsaAddEntryOps"), ("DSA-MIB", "dsaRemoveEntryOps"), ("DSA-MIB", "dsaModifyEntryOps"), ("DSA-MIB", "dsaModifyRDNOps"), ("DSA-MIB", "dsaListOps"), ("DSA-MIB", "dsaSearchOps"), ("DSA-MIB", "dsaOneLevelSearchOps"), ("DSA-MIB", "dsaWholeTreeSearchOps"), ("DSA-MIB", "dsaReferrals"), ("DSA-MIB", "dsaChainings"), ("DSA-MIB", "dsaSecurityErrors"), ("DSA-MIB", "dsaErrors"),))
if mibBuilder.loadTexts: dsaOpsGroup.setDescription(' A collection of objects for monitoring the DSA\n operations.')
dsaEntryGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 29, 4, 1, 2)).setObjects(*(("DSA-MIB", "dsaMasterEntries"), ("DSA-MIB", "dsaCopyEntries"), ("DSA-MIB", "dsaCacheEntries"), ("DSA-MIB", "dsaCacheHits"), ("DSA-MIB", "dsaSlaveHits"),))
if mibBuilder.loadTexts: dsaEntryGroup.setDescription(' A collection of objects for monitoring the DSA\n entry statistics and cache performance.')
dsaIntGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 29, 4, 1, 3)).setObjects(*(("DSA-MIB", "dsaName"), ("DSA-MIB", "dsaTimeOfCreation"), ("DSA-MIB", "dsaTimeOfLastAttempt"), ("DSA-MIB", "dsaTimeOfLastSuccess"), ("DSA-MIB", "dsaFailuresSinceLastSuccess"), ("DSA-MIB", "dsaFailures"), ("DSA-MIB", "dsaSuccesses"),))
if mibBuilder.loadTexts: dsaIntGroup.setDescription(" A collection of objects for monitoring the DSA's\n interaction with peer DSAs.")
mibBuilder.exportSymbols("DSA-MIB", dsaErrors=dsaErrors, dsaOpsGroup=dsaOpsGroup, dsaTimeOfLastSuccess=dsaTimeOfLastSuccess, dsaGroups=dsaGroups, dsaWholeTreeSearchOps=dsaWholeTreeSearchOps, dsaConformance=dsaConformance, dsaOneLevelSearchOps=dsaOneLevelSearchOps, dsaBindSecurityErrors=dsaBindSecurityErrors, dsaOpsEntry=dsaOpsEntry, dsaSuccesses=dsaSuccesses, dsaOpsCompliance=dsaOpsCompliance, dsaSearchOps=dsaSearchOps, dsaMasterEntries=dsaMasterEntries, dsaTimeOfLastAttempt=dsaTimeOfLastAttempt, dsaUnauthBinds=dsaUnauthBinds, dsaEntryCompliance=dsaEntryCompliance, dsaFailuresSinceLastSuccess=dsaFailuresSinceLastSuccess, dsaMIB=dsaMIB, dsaSecurityErrors=dsaSecurityErrors, dsaModifyEntryOps=dsaModifyEntryOps, dsaIntCompliance=dsaIntCompliance, dsaName=dsaName, dsaOpsTable=dsaOpsTable, dsaIntIndex=dsaIntIndex, dsaTimeOfCreation=dsaTimeOfCreation, dsaChainings=dsaChainings, dsaInOps=dsaInOps, dsaCacheEntries=dsaCacheEntries, dsaEntryGroup=dsaEntryGroup, dsaEntriesEntry=dsaEntriesEntry, dsaStrongAuthBinds=dsaStrongAuthBinds, dsaIntEntry=dsaIntEntry, dsaSimpleAuthBinds=dsaSimpleAuthBinds, dsaReadOps=dsaReadOps, dsaRemoveEntryOps=dsaRemoveEntryOps, dsaModifyRDNOps=dsaModifyRDNOps, dsaFailures=dsaFailures, dsaListOps=dsaListOps, dsaCacheHits=dsaCacheHits, dsaIntTable=dsaIntTable, dsaEntriesTable=dsaEntriesTable, PYSNMP_MODULE_ID=dsaMIB, dsaCompliances=dsaCompliances, dsaCompareOps=dsaCompareOps, dsaCopyEntries=dsaCopyEntries, dsaSlaveHits=dsaSlaveHits, dsaAnonymousBinds=dsaAnonymousBinds, dsaIntGroup=dsaIntGroup, dsaReferrals=dsaReferrals, dsaAddEntryOps=dsaAddEntryOps)
|
(integer, object_identifier, octet_string) = mibBuilder.importSymbols('ASN1', 'Integer', 'ObjectIdentifier', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(single_value_constraint, value_size_constraint, value_range_constraint, constraints_intersection, constraints_union) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ValueSizeConstraint', 'ValueRangeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion')
(distinguished_name, appl_index) = mibBuilder.importSymbols('NETWORK-SERVICES-MIB', 'DistinguishedName', 'applIndex')
(notification_group, module_compliance, object_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance', 'ObjectGroup')
(mib_scalar, mib_table, mib_table_row, mib_table_column, unsigned32, gauge32, iso, notification_type, bits, counter32, mib_2, module_identity, integer32, object_identity, ip_address, time_ticks, mib_identifier, counter64) = mibBuilder.importSymbols('SNMPv2-SMI', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Unsigned32', 'Gauge32', 'iso', 'NotificationType', 'Bits', 'Counter32', 'mib-2', 'ModuleIdentity', 'Integer32', 'ObjectIdentity', 'IpAddress', 'TimeTicks', 'MibIdentifier', 'Counter64')
(display_string, time_stamp, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TimeStamp', 'TextualConvention')
dsa_mib = module_identity((1, 3, 6, 1, 2, 1, 29))
if mibBuilder.loadTexts:
dsaMIB.setLastUpdated('9311250000Z')
if mibBuilder.loadTexts:
dsaMIB.setOrganization('IETF Mail and Directory Management Working\n Group')
if mibBuilder.loadTexts:
dsaMIB.setContactInfo(' Glenn Mansfield\n\n Postal: AIC Systems Laboratory\n 6-6-3, Minami Yoshinari\n Aoba-ku, Sendai, 989-32\n JP\n\n Tel: +81 22 279 3310\n Fax: +81 22 279 3640\n E-Mail: [email protected]')
if mibBuilder.loadTexts:
dsaMIB.setDescription(' The MIB module for monitoring Directory System Agents.')
dsa_ops_table = mib_table((1, 3, 6, 1, 2, 1, 29, 1))
if mibBuilder.loadTexts:
dsaOpsTable.setDescription(' The table holding information related to the\n DSA operations.')
dsa_ops_entry = mib_table_row((1, 3, 6, 1, 2, 1, 29, 1, 1)).setIndexNames((0, 'NETWORK-SERVICES-MIB', 'applIndex'))
if mibBuilder.loadTexts:
dsaOpsEntry.setDescription(' Entry containing operations related statistics\n for a DSA.')
dsa_anonymous_binds = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaAnonymousBinds.setDescription(' Number of anonymous binds to this DSA from DUAs\n since application start.')
dsa_unauth_binds = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaUnauthBinds.setDescription(' Number of un-authenticated binds to this\n DSA since application start.')
dsa_simple_auth_binds = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 3), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaSimpleAuthBinds.setDescription(' Number of binds to this DSA that were authenticated\n using simple authentication procedures since\n application start.')
dsa_strong_auth_binds = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaStrongAuthBinds.setDescription(' Number of binds to this DSA that were authenticated\n using the strong authentication procedures since\n application start. This includes the binds that were\n authenticated using external authentication procedures.')
dsa_bind_security_errors = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 5), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaBindSecurityErrors.setDescription(' Number of bind operations that have been rejected\n by this DSA due to inappropriateAuthentication or\n invalidCredentials.')
dsa_in_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 6), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaInOps.setDescription(' Number of operations forwarded to this DSA\n from DUAs or other DSAs since application\n start up.')
dsa_read_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 7), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaReadOps.setDescription(' Number of read operations serviced by\n this DSA since application startup.')
dsa_compare_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 8), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaCompareOps.setDescription(' Number of compare operations serviced by\n this DSA since application startup.')
dsa_add_entry_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 9), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaAddEntryOps.setDescription(' Number of addEntry operations serviced by\n this DSA since application startup.')
dsa_remove_entry_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 10), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaRemoveEntryOps.setDescription(' Number of removeEntry operations serviced by\n this DSA since application startup.')
dsa_modify_entry_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 11), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaModifyEntryOps.setDescription(' Number of modifyEntry operations serviced by\n this DSA since application startup.')
dsa_modify_rdn_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 12), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaModifyRDNOps.setDescription(' Number of modifyRDN operations serviced by\n this DSA since application startup.')
dsa_list_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 13), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaListOps.setDescription(' Number of list operations serviced by\n this DSA since application startup.')
dsa_search_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 14), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaSearchOps.setDescription(' Number of search operations- baseObjectSearches,\n oneLevelSearches and subTreeSearches, serviced\n by this DSA since application startup.')
dsa_one_level_search_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 15), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaOneLevelSearchOps.setDescription(' Number of oneLevelSearch operations serviced\n by this DSA since application startup.')
dsa_whole_tree_search_ops = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 16), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaWholeTreeSearchOps.setDescription(' Number of wholeTreeSearch operations serviced\n by this DSA since application startup.')
dsa_referrals = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 17), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaReferrals.setDescription(' Number of referrals returned by this DSA in response\n to requests for operations since application startup.')
dsa_chainings = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 18), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaChainings.setDescription(' Number of operations forwarded by this DSA\n to other DSAs since application startup.')
dsa_security_errors = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 19), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaSecurityErrors.setDescription(' Number of operations forwarded to this DSA\n which did not meet the security requirements. ')
dsa_errors = mib_table_column((1, 3, 6, 1, 2, 1, 29, 1, 1, 20), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaErrors.setDescription(' Number of operations that could not be serviced\n due to errors other than security errors, and\n referrals.\n A partially serviced operation will not be counted\n as an error.\n The errors include NameErrors, UpdateErrors, Attribute\n errors and ServiceErrors.')
dsa_entries_table = mib_table((1, 3, 6, 1, 2, 1, 29, 2))
if mibBuilder.loadTexts:
dsaEntriesTable.setDescription(' The table holding information related to the\n\n entry statistics and cache performance of the DSAs.')
dsa_entries_entry = mib_table_row((1, 3, 6, 1, 2, 1, 29, 2, 1)).setIndexNames((0, 'NETWORK-SERVICES-MIB', 'applIndex'))
if mibBuilder.loadTexts:
dsaEntriesEntry.setDescription(' Entry containing statistics pertaining to entries\n held by a DSA.')
dsa_master_entries = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 1), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaMasterEntries.setDescription(' Number of entries mastered in the DSA.')
dsa_copy_entries = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 2), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaCopyEntries.setDescription(' Number of entries for which systematic (slave)\n copies are maintained in the DSA.')
dsa_cache_entries = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 3), gauge32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaCacheEntries.setDescription(' Number of entries cached (non-systematic copies) in\n the DSA. This will include the entries that are\n cached partially. The negative cache is not counted.')
dsa_cache_hits = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaCacheHits.setDescription(' Number of operations that were serviced from\n the locally held cache since application\n startup.')
dsa_slave_hits = mib_table_column((1, 3, 6, 1, 2, 1, 29, 2, 1, 5), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaSlaveHits.setDescription(' Number of operations that were serviced from\n the locally held object replications [ shadow\n entries] since application startup.')
dsa_int_table = mib_table((1, 3, 6, 1, 2, 1, 29, 3))
if mibBuilder.loadTexts:
dsaIntTable.setDescription(' Each row of this table contains some details\n related to the history of the interaction\n of the monitored DSAs with their respective\n peer DSAs.')
dsa_int_entry = mib_table_row((1, 3, 6, 1, 2, 1, 29, 3, 1)).setIndexNames((0, 'NETWORK-SERVICES-MIB', 'applIndex'), (0, 'DSA-MIB', 'dsaIntIndex'))
if mibBuilder.loadTexts:
dsaIntEntry.setDescription(' Entry containing interaction details of a DSA\n with a peer DSA.')
dsa_int_index = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(1, 2147483647)))
if mibBuilder.loadTexts:
dsaIntIndex.setDescription(' Together with applIndex it forms the unique key to\n identify the conceptual row which contains useful info\n on the (attempted) interaction between the DSA (referred\n to by applIndex) and a peer DSA.')
dsa_name = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 2), distinguished_name()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaName.setDescription(' Distinguished Name of the peer DSA to which this\n entry pertains.')
dsa_time_of_creation = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 3), time_stamp()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaTimeOfCreation.setDescription(' The value of sysUpTime when this row was created.\n If the entry was created before the network management\n subsystem was initialized, this object will contain\n a value of zero.')
dsa_time_of_last_attempt = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 4), time_stamp()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaTimeOfLastAttempt.setDescription(' The value of sysUpTime when the last attempt was made\n to contact this DSA. If the last attempt was made before\n the network management subsystem was initialized, this\n object will contain a value of zero.')
dsa_time_of_last_success = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 5), time_stamp()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaTimeOfLastSuccess.setDescription(' The value of sysUpTime when the last attempt made to\n contact this DSA was successful. If there have\n been no successful attempts this entry will have a value\n of zero. If the last successful attempt was made before\n the network management subsystem was initialized, this\n object will contain a value of zero.')
dsa_failures_since_last_success = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 6), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaFailuresSinceLastSuccess.setDescription(' The number of failures since the last time an\n attempt to contact this DSA was successful. If\n there has been no successful attempts, this counter\n will contain the number of failures since this entry\n was created.')
dsa_failures = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 7), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaFailures.setDescription(' Cumulative failures since the creation of\n this entry.')
dsa_successes = mib_table_column((1, 3, 6, 1, 2, 1, 29, 3, 1, 8), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
dsaSuccesses.setDescription(' Cumulative successes since the creation of\n this entry.')
dsa_conformance = mib_identifier((1, 3, 6, 1, 2, 1, 29, 4))
dsa_groups = mib_identifier((1, 3, 6, 1, 2, 1, 29, 4, 1))
dsa_compliances = mib_identifier((1, 3, 6, 1, 2, 1, 29, 4, 2))
dsa_ops_compliance = module_compliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 1)).setObjects(*(('DSA-MIB', 'dsaOpsGroup'),))
if mibBuilder.loadTexts:
dsaOpsCompliance.setDescription('The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring\n DSA operations.')
dsa_entry_compliance = module_compliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 2)).setObjects(*(('DSA-MIB', 'dsaOpsGroup'), ('DSA-MIB', 'dsaEntryGroup')))
if mibBuilder.loadTexts:
dsaEntryCompliance.setDescription('The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring\n DSA operations, entry statistics and cache\n performance.')
dsa_int_compliance = module_compliance((1, 3, 6, 1, 2, 1, 29, 4, 2, 3)).setObjects(*(('DSA-MIB', 'dsaOpsGroup'), ('DSA-MIB', 'dsaIntGroup')))
if mibBuilder.loadTexts:
dsaIntCompliance.setDescription(' The compliance statement for SNMPv2 entities\n which implement the DSA-MIB for monitoring DSA\n operations and the interaction of the DSA with\n peer DSAs.')
dsa_ops_group = object_group((1, 3, 6, 1, 2, 1, 29, 4, 1, 1)).setObjects(*(('DSA-MIB', 'dsaAnonymousBinds'), ('DSA-MIB', 'dsaUnauthBinds'), ('DSA-MIB', 'dsaSimpleAuthBinds'), ('DSA-MIB', 'dsaStrongAuthBinds'), ('DSA-MIB', 'dsaBindSecurityErrors'), ('DSA-MIB', 'dsaInOps'), ('DSA-MIB', 'dsaReadOps'), ('DSA-MIB', 'dsaCompareOps'), ('DSA-MIB', 'dsaAddEntryOps'), ('DSA-MIB', 'dsaRemoveEntryOps'), ('DSA-MIB', 'dsaModifyEntryOps'), ('DSA-MIB', 'dsaModifyRDNOps'), ('DSA-MIB', 'dsaListOps'), ('DSA-MIB', 'dsaSearchOps'), ('DSA-MIB', 'dsaOneLevelSearchOps'), ('DSA-MIB', 'dsaWholeTreeSearchOps'), ('DSA-MIB', 'dsaReferrals'), ('DSA-MIB', 'dsaChainings'), ('DSA-MIB', 'dsaSecurityErrors'), ('DSA-MIB', 'dsaErrors')))
if mibBuilder.loadTexts:
dsaOpsGroup.setDescription(' A collection of objects for monitoring the DSA\n operations.')
dsa_entry_group = object_group((1, 3, 6, 1, 2, 1, 29, 4, 1, 2)).setObjects(*(('DSA-MIB', 'dsaMasterEntries'), ('DSA-MIB', 'dsaCopyEntries'), ('DSA-MIB', 'dsaCacheEntries'), ('DSA-MIB', 'dsaCacheHits'), ('DSA-MIB', 'dsaSlaveHits')))
if mibBuilder.loadTexts:
dsaEntryGroup.setDescription(' A collection of objects for monitoring the DSA\n entry statistics and cache performance.')
dsa_int_group = object_group((1, 3, 6, 1, 2, 1, 29, 4, 1, 3)).setObjects(*(('DSA-MIB', 'dsaName'), ('DSA-MIB', 'dsaTimeOfCreation'), ('DSA-MIB', 'dsaTimeOfLastAttempt'), ('DSA-MIB', 'dsaTimeOfLastSuccess'), ('DSA-MIB', 'dsaFailuresSinceLastSuccess'), ('DSA-MIB', 'dsaFailures'), ('DSA-MIB', 'dsaSuccesses')))
if mibBuilder.loadTexts:
dsaIntGroup.setDescription(" A collection of objects for monitoring the DSA's\n interaction with peer DSAs.")
mibBuilder.exportSymbols('DSA-MIB', dsaErrors=dsaErrors, dsaOpsGroup=dsaOpsGroup, dsaTimeOfLastSuccess=dsaTimeOfLastSuccess, dsaGroups=dsaGroups, dsaWholeTreeSearchOps=dsaWholeTreeSearchOps, dsaConformance=dsaConformance, dsaOneLevelSearchOps=dsaOneLevelSearchOps, dsaBindSecurityErrors=dsaBindSecurityErrors, dsaOpsEntry=dsaOpsEntry, dsaSuccesses=dsaSuccesses, dsaOpsCompliance=dsaOpsCompliance, dsaSearchOps=dsaSearchOps, dsaMasterEntries=dsaMasterEntries, dsaTimeOfLastAttempt=dsaTimeOfLastAttempt, dsaUnauthBinds=dsaUnauthBinds, dsaEntryCompliance=dsaEntryCompliance, dsaFailuresSinceLastSuccess=dsaFailuresSinceLastSuccess, dsaMIB=dsaMIB, dsaSecurityErrors=dsaSecurityErrors, dsaModifyEntryOps=dsaModifyEntryOps, dsaIntCompliance=dsaIntCompliance, dsaName=dsaName, dsaOpsTable=dsaOpsTable, dsaIntIndex=dsaIntIndex, dsaTimeOfCreation=dsaTimeOfCreation, dsaChainings=dsaChainings, dsaInOps=dsaInOps, dsaCacheEntries=dsaCacheEntries, dsaEntryGroup=dsaEntryGroup, dsaEntriesEntry=dsaEntriesEntry, dsaStrongAuthBinds=dsaStrongAuthBinds, dsaIntEntry=dsaIntEntry, dsaSimpleAuthBinds=dsaSimpleAuthBinds, dsaReadOps=dsaReadOps, dsaRemoveEntryOps=dsaRemoveEntryOps, dsaModifyRDNOps=dsaModifyRDNOps, dsaFailures=dsaFailures, dsaListOps=dsaListOps, dsaCacheHits=dsaCacheHits, dsaIntTable=dsaIntTable, dsaEntriesTable=dsaEntriesTable, PYSNMP_MODULE_ID=dsaMIB, dsaCompliances=dsaCompliances, dsaCompareOps=dsaCompareOps, dsaCopyEntries=dsaCopyEntries, dsaSlaveHits=dsaSlaveHits, dsaAnonymousBinds=dsaAnonymousBinds, dsaIntGroup=dsaIntGroup, dsaReferrals=dsaReferrals, dsaAddEntryOps=dsaAddEntryOps)
|
# basic model configuration related and data and training (model specific configuration is declared with Notebook)
args = {
"batch_size":128,
"lr":1e-3,
"epochs":10,
}
|
args = {'batch_size': 128, 'lr': 0.001, 'epochs': 10}
|
marks = [[1,2,3],[4,5,6],[7,8,9]]
rotate = [[False for i in range(len(marks[0]))] for j in range(len(marks))]
for row, items in enumerate(marks):
for col, val in enumerate(items):
rotate[col][row] = val
for row in marks:
print(row)
for row in rotate:
print(row)
|
marks = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
rotate = [[False for i in range(len(marks[0]))] for j in range(len(marks))]
for (row, items) in enumerate(marks):
for (col, val) in enumerate(items):
rotate[col][row] = val
for row in marks:
print(row)
for row in rotate:
print(row)
|
# do a bunch of ternary operations on an NA object
x = 1 / 0
assert type(x) is NA
assert type(pow(x, 2)) is NA
assert type(pow(2, x)) is NA
assert type(x ** 2) is NA
assert type(2 ** x) is NA
|
x = 1 / 0
assert type(x) is NA
assert type(pow(x, 2)) is NA
assert type(pow(2, x)) is NA
assert type(x ** 2) is NA
assert type(2 ** x) is NA
|
# first line: 10
@memory.cache
def read_wav():
wav = dl.data.get_smashing_baby()
return wavfile.read(wav)
|
@memory.cache
def read_wav():
wav = dl.data.get_smashing_baby()
return wavfile.read(wav)
|
class Config:
BASE_DIR = "/usr/local/lib/python3.9/site-packages"
FACEBOOK_PACKAGE = "facebook_business"
ADOBJECT_DIR = "adobjects"
# https://github.com/facebook/facebook-python-business-sdk/tree/master/facebook_business/adobjects
FULL_PATH = f"{BASE_DIR}/{FACEBOOK_PACKAGE}/{ADOBJECT_DIR}"
NEO4J_HOST = "bolt://service-neo4j:7687"
EXCLUSION_LIST = ["__init__.py", "abstractobject.py", "abstractcrudobject.py"]
|
class Config:
base_dir = '/usr/local/lib/python3.9/site-packages'
facebook_package = 'facebook_business'
adobject_dir = 'adobjects'
full_path = f'{BASE_DIR}/{FACEBOOK_PACKAGE}/{ADOBJECT_DIR}'
neo4_j_host = 'bolt://service-neo4j:7687'
exclusion_list = ['__init__.py', 'abstractobject.py', 'abstractcrudobject.py']
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.