blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
457741e02d727ee709a6616e161dfb501b4258f7 | 5229c7fc87e3fddd7aee34284eeebf08fc84f4df | /bgp_configuration/bgp.py | 685b806c9aa8a8fdea9d2da8230e720031ab0fdc | [] | no_license | SahanaSatya/automation_inside_an_autonomous_system | 51144e198a4eb65da6bb07f1fd330945b3509e55 | 1e9bd7dfbd706e03f789a68dae6c4295c67afc0f | refs/heads/master | 2020-07-03T06:57:27.569945 | 2019-08-12T00:28:28 | 2019-08-12T00:28:28 | 201,829,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,932 | py | from netmiko import ConnectHandler
import threading
import os
import time
def get_commands(conf_file,ip_addr):
config_set = []
if os.path.isfile(conf_file):
found = 0
localasnum = ""
nip = ""
nremoteas = ""
networklist = ""
with open(conf_file,'r') as file1:
for row in file1:
if found == 4:
li = row.split(':')[1].split('\n')[0]
networklist = li.split(';')
break
if found == 3:
nremoteas = row.split(':')[1].split('\n')[0]
found = 4
if found == 2:
nip = row.split(':')[1].split('\n')[0]
found = 3
if found == 1:
localasnum = row.split(':')[1].split('\n')[0]
found = 2
if "For "+ip_addr in row:
found = 1
config_set.append("router bgp "+localasnum)
config_set.append("neighbor "+nip+" remote-as "+nremoteas)
for li in networklist:
n = li.split(',')
config_set.append("network "+n[0]+" mask "+n[1])
return config_set
else:
return "No file exits for command"
def config(*device):
dev_con = ConnectHandler(**device[0])
dev_con.enable()
cmd = get_commands("bgp.conf",device[0]['ip'])
if cmd != "No file exits for command" :
try:
output = dev_con.send_config_set(cmd)
if len(output.split('\n')) > 4+len(cmd):
raise Exception(cmd)
except Exception as e:
print("For the device with IP:"+device[0]['ip']+", BGP deployment is not properly done")
print("Reason: Error in the following commands")
print(e.args)
output = dev_con.send_command("sh run")
file_save = "conf_"+device[0]['ip']+"_backup.txt"
with open(file_save,'w') as fh:
fh.write(output)
print("Backed-up the running config for device with IP:"+device[0]['ip']+" locally in file: "+file_save)
return
time.sleep(2)
output = dev_con.send_command("sh ip bgp neighbor")
tab = output.split('\n')[0:3]
li = tab[0].split(',')
neighIP = li[0].split()[-1]
remoteAS = li[1].split()[-1]
state = tab[2].split(",")[0].split()[-1]
print("For device with IP:"+device[0]['ip'])
print("BGP Neighbor IP".ljust(20)+"BGP Neighbor AS".ljust(20)+"BGP Neighbor State".ljust(20))
print(str(neighIP.encode("utf-8")).ljust(20)+str(remoteAS.encode("utf-8")).ljust(20)+str(state.encode("utf-8")).ljust(20))
output = dev_con.send_command("sh run")
file_save = "conf_"+device[0]['ip']+"_backup.txt"
with open(file_save,'w') as fh:
fh.write(output)
print("Backed-up the running config for device with IP:"+device[0]['ip']+" locally in file: "+file_save)
dev_con.disconnect()
else:
print("could not deploy as "+ str(cmd))
def config_devices(devices):
t = [None] * len(devices)
for i in range(len(devices)):
t[i] = threading.Thread(target=config, args = (devices[i],))
t[i].start()
| [
"[email protected]"
] | |
59db6733062d74c3c4eaa39d9a84f6e0b05261d5 | dc7465b43e49267ba6b1c08ec4d15b1613bbd14a | /python/caffe/proto/caffe_pb2.py | 06f3bfb49487ea8b2d1816542b32bc04c9c58452 | [
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | peterWon/pva_textboxpp_merged_caffe | 870d361262078480488663ad4fb988666d0807ff | 9b7cfa28d5335b6b67d70761910d213e9d92f20c | refs/heads/master | 2020-03-10T01:07:28.730457 | 2018-04-11T13:50:48 | 2018-04-11T13:50:48 | 129,101,137 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 377,389 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: caffe.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='caffe.proto',
package='caffe',
serialized_pb=_b('\n\x0b\x63\x61\x66\x66\x65.proto\x12\x05\x63\x61\x66\x66\x65\"\x1c\n\tBlobShape\x12\x0f\n\x03\x64im\x18\x01 \x03(\x03\x42\x02\x10\x01\"\xcc\x01\n\tBlobProto\x12\x1f\n\x05shape\x18\x07 \x01(\x0b\x32\x10.caffe.BlobShape\x12\x10\n\x04\x64\x61ta\x18\x05 \x03(\x02\x42\x02\x10\x01\x12\x10\n\x04\x64iff\x18\x06 \x03(\x02\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_data\x18\x08 \x03(\x01\x42\x02\x10\x01\x12\x17\n\x0b\x64ouble_diff\x18\t \x03(\x01\x42\x02\x10\x01\x12\x0e\n\x03num\x18\x01 \x01(\x05:\x01\x30\x12\x13\n\x08\x63hannels\x18\x02 \x01(\x05:\x01\x30\x12\x11\n\x06height\x18\x03 \x01(\x05:\x01\x30\x12\x10\n\x05width\x18\x04 \x01(\x05:\x01\x30\"2\n\x0f\x42lobProtoVector\x12\x1f\n\x05\x62lobs\x18\x01 \x03(\x0b\x32\x10.caffe.BlobProto\"\x81\x01\n\x05\x44\x61tum\x12\x10\n\x08\x63hannels\x18\x01 \x01(\x05\x12\x0e\n\x06height\x18\x02 \x01(\x05\x12\r\n\x05width\x18\x03 \x01(\x05\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\x0c\x12\r\n\x05label\x18\x05 \x01(\x05\x12\x12\n\nfloat_data\x18\x06 \x03(\x02\x12\x16\n\x07\x65ncoded\x18\x07 \x01(\x08:\x05\x66\x61lse\"A\n\x0cLabelMapItem\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05label\x18\x02 \x01(\x05\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\"-\n\x08LabelMap\x12!\n\x04item\x18\x01 \x03(\x0b\x32\x13.caffe.LabelMapItem\"o\n\x07Sampler\x12\x14\n\tmin_scale\x18\x01 \x01(\x02:\x01\x31\x12\x14\n\tmax_scale\x18\x02 \x01(\x02:\x01\x31\x12\x1b\n\x10min_aspect_ratio\x18\x03 \x01(\x02:\x01\x31\x12\x1b\n\x10max_aspect_ratio\x18\x04 \x01(\x02:\x01\x31\"\xc0\x01\n\x10SampleConstraint\x12\x1b\n\x13min_jaccard_overlap\x18\x01 \x01(\x02\x12\x1b\n\x13max_jaccard_overlap\x18\x02 \x01(\x02\x12\x1b\n\x13min_sample_coverage\x18\x03 \x01(\x02\x12\x1b\n\x13max_sample_coverage\x18\x04 \x01(\x02\x12\x1b\n\x13min_object_coverage\x18\x05 \x01(\x02\x12\x1b\n\x13max_object_coverage\x18\x06 \x01(\x02\"\xb2\x01\n\x0c\x42\x61tchSampler\x12 \n\x12use_original_image\x18\x01 \x01(\x08:\x04true\x12\x1f\n\x07sampler\x18\x02 \x01(\x0b\x32\x0e.caffe.Sampler\x12\x32\n\x11sample_constraint\x18\x03 \x01(\x0b\x32\x17.caffe.SampleConstraint\x12\x12\n\nmax_sample\x18\x04 \x01(\r\x12\x17\n\nmax_trials\x18\x05 \x01(\r:\x03\x31\x30\x30\"\x8a\x01\n\x0e\x45mitConstraint\x12\x39\n\temit_type\x18\x01 \x01(\x0e\x32\x1e.caffe.EmitConstraint.EmitType:\x06\x43\x45NTER\x12\x14\n\x0c\x65mit_overlap\x18\x02 \x01(\x02\"\'\n\x08\x45mitType\x12\n\n\x06\x43\x45NTER\x10\x00\x12\x0f\n\x0bMIN_OVERLAP\x10\x01\"\x87\x01\n\x0eNormalizedBBox\x12\x0c\n\x04xmin\x18\x01 \x01(\x02\x12\x0c\n\x04ymin\x18\x02 \x01(\x02\x12\x0c\n\x04xmax\x18\x03 \x01(\x02\x12\x0c\n\x04ymax\x18\x04 \x01(\x02\x12\r\n\x05label\x18\x05 \x01(\x05\x12\x11\n\tdifficult\x18\x06 \x01(\x08\x12\r\n\x05score\x18\x07 \x01(\x02\x12\x0c\n\x04size\x18\x08 \x01(\x02\"{\n\x0eNormalizedRBox\x12\n\n\x02x1\x18\x01 \x01(\x02\x12\n\n\x02y1\x18\x02 \x01(\x02\x12\n\n\x02x2\x18\x03 \x01(\x02\x12\n\n\x02y2\x18\x04 \x01(\x02\x12\t\n\x01h\x18\x05 \x01(\x02\x12\x11\n\tdifficult\x18\x06 \x01(\x08\x12\r\n\x05score\x18\x07 \x01(\x02\x12\x0c\n\x04size\x18\x08 \x01(\x02\"\xa3\x01\n\x11NormalizedPolygon\x12\n\n\x02x1\x18\x01 \x01(\x02\x12\n\n\x02y1\x18\x02 \x01(\x02\x12\n\n\x02x2\x18\x03 \x01(\x02\x12\n\n\x02y2\x18\x04 \x01(\x02\x12\n\n\x02x3\x18\x05 \x01(\x02\x12\n\n\x02y3\x18\x06 \x01(\x02\x12\n\n\x02x4\x18\x07 \x01(\x02\x12\n\n\x02y4\x18\x08 \x01(\x02\x12\x11\n\tdifficult\x18\t \x01(\x08\x12\r\n\x05score\x18\n \x01(\x02\x12\x0c\n\x04size\x18\x0b \x01(\x02\"\x99\x01\n\nAnnotation\x12\x16\n\x0binstance_id\x18\x01 \x01(\x05:\x01\x30\x12#\n\x04\x62\x62ox\x18\x02 \x01(\x0b\x32\x15.caffe.NormalizedBBox\x12#\n\x04rbox\x18\x03 \x01(\x0b\x32\x15.caffe.NormalizedRBox\x12)\n\x07polygon\x18\x04 \x01(\x0b\x32\x18.caffe.NormalizedPolygon\"M\n\x0f\x41nnotationGroup\x12\x13\n\x0bgroup_label\x18\x01 \x01(\x05\x12%\n\nannotation\x18\x02 \x03(\x0b\x32\x11.caffe.Annotation\"\xaf\x01\n\x0e\x41nnotatedDatum\x12\x1b\n\x05\x64\x61tum\x18\x01 \x01(\x0b\x32\x0c.caffe.Datum\x12\x32\n\x04type\x18\x02 \x01(\x0e\x32$.caffe.AnnotatedDatum.AnnotationType\x12\x30\n\x10\x61nnotation_group\x18\x03 \x03(\x0b\x32\x16.caffe.AnnotationGroup\"\x1a\n\x0e\x41nnotationType\x12\x08\n\x04\x42\x42OX\x10\x00\"\x8a\x02\n\x0f\x46illerParameter\x12\x16\n\x04type\x18\x01 \x01(\t:\x08\x63onstant\x12\x10\n\x05value\x18\x02 \x01(\x02:\x01\x30\x12\x0e\n\x03min\x18\x03 \x01(\x02:\x01\x30\x12\x0e\n\x03max\x18\x04 \x01(\x02:\x01\x31\x12\x0f\n\x04mean\x18\x05 \x01(\x02:\x01\x30\x12\x0e\n\x03std\x18\x06 \x01(\x02:\x01\x31\x12\x12\n\x06sparse\x18\x07 \x01(\x05:\x02-1\x12\x42\n\rvariance_norm\x18\x08 \x01(\x0e\x32#.caffe.FillerParameter.VarianceNorm:\x06\x46\x41N_IN\"4\n\x0cVarianceNorm\x12\n\n\x06\x46\x41N_IN\x10\x00\x12\x0b\n\x07\x46\x41N_OUT\x10\x01\x12\x0b\n\x07\x41VERAGE\x10\x02\"\x8e\x02\n\x0cNetParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05input\x18\x03 \x03(\t\x12%\n\x0binput_shape\x18\x08 \x03(\x0b\x32\x10.caffe.BlobShape\x12\x11\n\tinput_dim\x18\x04 \x03(\x05\x12\x1d\n\x0e\x66orce_backward\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x1e\n\x05state\x18\x06 \x01(\x0b\x32\x0f.caffe.NetState\x12\x19\n\ndebug_info\x18\x07 \x01(\x08:\x05\x66\x61lse\x12$\n\x05layer\x18\x64 \x03(\x0b\x32\x15.caffe.LayerParameter\x12\'\n\x06layers\x18\x02 \x03(\x0b\x32\x17.caffe.V1LayerParameter\"\xfc\n\n\x0fSolverParameter\x12\x0b\n\x03net\x18\x18 \x01(\t\x12&\n\tnet_param\x18\x19 \x01(\x0b\x32\x13.caffe.NetParameter\x12\x11\n\ttrain_net\x18\x01 \x01(\t\x12\x10\n\x08test_net\x18\x02 \x03(\t\x12,\n\x0ftrain_net_param\x18\x15 \x01(\x0b\x32\x13.caffe.NetParameter\x12+\n\x0etest_net_param\x18\x16 \x03(\x0b\x32\x13.caffe.NetParameter\x12$\n\x0btrain_state\x18\x1a \x01(\x0b\x32\x0f.caffe.NetState\x12#\n\ntest_state\x18\x1b \x03(\x0b\x32\x0f.caffe.NetState\x12!\n\teval_type\x18) \x01(\t:\x0e\x63lassification\x12\x1c\n\nap_version\x18* \x01(\t:\x08Integral\x12\x11\n\ttest_iter\x18\x03 \x03(\x05\x12\x18\n\rtest_interval\x18\x04 \x01(\x05:\x01\x30\x12 \n\x11test_compute_loss\x18\x13 \x01(\x08:\x05\x66\x61lse\x12!\n\x13test_initialization\x18 \x01(\x08:\x04true\x12\x0f\n\x07\x62\x61se_lr\x18\x05 \x01(\x02\x12\x0f\n\x07\x64isplay\x18\x06 \x01(\x05\x12\x17\n\x0c\x61verage_loss\x18! \x01(\x05:\x01\x31\x12\x10\n\x08max_iter\x18\x07 \x01(\x05\x12\x14\n\titer_size\x18$ \x01(\x05:\x01\x31\x12\x11\n\tlr_policy\x18\x08 \x01(\t\x12\r\n\x05gamma\x18\t \x01(\x02\x12\r\n\x05power\x18\n \x01(\x02\x12\x10\n\x08momentum\x18\x0b \x01(\x02\x12\x14\n\x0cweight_decay\x18\x0c \x01(\x02\x12\x1f\n\x13regularization_type\x18\x1d \x01(\t:\x02L2\x12\x10\n\x08stepsize\x18\r \x01(\x05\x12\x11\n\tstepvalue\x18\" \x03(\x05\x12\x17\n\x0fplateau_winsize\x18+ \x03(\x05\x12\x1a\n\x0e\x63lip_gradients\x18# \x01(\x02:\x02-1\x12\x13\n\x08snapshot\x18\x0e \x01(\x05:\x01\x30\x12\x17\n\x0fsnapshot_prefix\x18\x0f \x01(\t\x12\x1c\n\rsnapshot_diff\x18\x10 \x01(\x08:\x05\x66\x61lse\x12K\n\x0fsnapshot_format\x18% \x01(\x0e\x32%.caffe.SolverParameter.SnapshotFormat:\x0b\x42INARYPROTO\x12;\n\x0bsolver_mode\x18\x11 \x01(\x0e\x32!.caffe.SolverParameter.SolverMode:\x03GPU\x12\x14\n\tdevice_id\x18\x12 \x01(\x05:\x01\x30\x12\x17\n\x0brandom_seed\x18\x14 \x01(\x03:\x02-1\x12\x11\n\x04type\x18( \x01(\t:\x03SGD\x12\x14\n\x05\x64\x65lta\x18\x1f \x01(\x02:\x05\x31\x65-08\x12\x18\n\tmomentum2\x18\' \x01(\x02:\x05\x30.999\x12\x17\n\trms_decay\x18& \x01(\x02:\x04\x30.99\x12\x19\n\ndebug_info\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\"\n\x14snapshot_after_train\x18\x1c \x01(\x08:\x04true\x12;\n\x0bsolver_type\x18\x1e \x01(\x0e\x32!.caffe.SolverParameter.SolverType:\x03SGD\"+\n\x0eSnapshotFormat\x12\x08\n\x04HDF5\x10\x00\x12\x0f\n\x0b\x42INARYPROTO\x10\x01\"\x1e\n\nSolverMode\x12\x07\n\x03\x43PU\x10\x00\x12\x07\n\x03GPU\x10\x01\"U\n\nSolverType\x12\x07\n\x03SGD\x10\x00\x12\x0c\n\x08NESTEROV\x10\x01\x12\x0b\n\x07\x41\x44\x41GRAD\x10\x02\x12\x0b\n\x07RMSPROP\x10\x03\x12\x0c\n\x08\x41\x44\x41\x44\x45LTA\x10\x04\x12\x08\n\x04\x41\x44\x41M\x10\x05\"\xa5\x01\n\x0bSolverState\x12\x0c\n\x04iter\x18\x01 \x01(\x05\x12\x13\n\x0blearned_net\x18\x02 \x01(\t\x12!\n\x07history\x18\x03 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x17\n\x0c\x63urrent_step\x18\x04 \x01(\x05:\x01\x30\x12\x1b\n\x0cminimum_loss\x18\x05 \x01(\x02:\x05\x31\x65+38\x12\x1a\n\x0fiter_last_event\x18\x06 \x01(\x05:\x01\x30\"N\n\x08NetState\x12!\n\x05phase\x18\x01 \x01(\x0e\x32\x0c.caffe.Phase:\x04TEST\x12\x10\n\x05level\x18\x02 \x01(\x05:\x01\x30\x12\r\n\x05stage\x18\x03 \x03(\t\"s\n\x0cNetStateRule\x12\x1b\n\x05phase\x18\x01 \x01(\x0e\x32\x0c.caffe.Phase\x12\x11\n\tmin_level\x18\x02 \x01(\x05\x12\x11\n\tmax_level\x18\x03 \x01(\x05\x12\r\n\x05stage\x18\x04 \x03(\t\x12\x11\n\tnot_stage\x18\x05 \x03(\t\"\xa3\x01\n\tParamSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\nshare_mode\x18\x02 \x01(\x0e\x32\x1d.caffe.ParamSpec.DimCheckMode\x12\x12\n\x07lr_mult\x18\x03 \x01(\x02:\x01\x31\x12\x15\n\ndecay_mult\x18\x04 \x01(\x02:\x01\x31\"*\n\x0c\x44imCheckMode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"\x96\x1a\n\x0eLayerParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0e\n\x06\x62ottom\x18\x03 \x03(\t\x12\x0b\n\x03top\x18\x04 \x03(\t\x12\x1b\n\x05phase\x18\n \x01(\x0e\x32\x0c.caffe.Phase\x12\x13\n\x0bloss_weight\x18\x05 \x03(\x02\x12\x1f\n\x05param\x18\x06 \x03(\x0b\x32\x10.caffe.ParamSpec\x12\x1f\n\x05\x62lobs\x18\x07 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x16\n\x0epropagate_down\x18\x0b \x03(\x08\x12$\n\x07include\x18\x08 \x03(\x0b\x32\x13.caffe.NetStateRule\x12$\n\x07\x65xclude\x18\t \x03(\x0b\x32\x13.caffe.NetStateRule\x12\x37\n\x0ftransform_param\x18\x64 \x01(\x0b\x32\x1e.caffe.TransformationParameter\x12(\n\nloss_param\x18\x65 \x01(\x0b\x32\x14.caffe.LossParameter\x12\x30\n\x0e\x61\x63\x63uracy_param\x18\x66 \x01(\x0b\x32\x18.caffe.AccuracyParameter\x12<\n\x14\x61nnotated_data_param\x18\xc8\x01 \x01(\x0b\x32\x1d.caffe.AnnotatedDataParameter\x12,\n\x0c\x61rgmax_param\x18g \x01(\x0b\x32\x16.caffe.ArgMaxParameter\x12\x34\n\x10\x62\x61tch_norm_param\x18\x8b\x01 \x01(\x0b\x32\x19.caffe.BatchNormParameter\x12)\n\nbias_param\x18\x8d\x01 \x01(\x0b\x32\x14.caffe.BiasParameter\x12,\n\x0c\x63oncat_param\x18h \x01(\x0b\x32\x16.caffe.ConcatParameter\x12?\n\x16\x63ontrastive_loss_param\x18i \x01(\x0b\x32\x1f.caffe.ContrastiveLossParameter\x12\x36\n\x11\x63onvolution_param\x18j \x01(\x0b\x32\x1b.caffe.ConvolutionParameter\x12)\n\ncrop_param\x18\x90\x01 \x01(\x0b\x32\x14.caffe.CropParameter\x12\x36\n\x11\x63tc_decoder_param\x18\x95\x01 \x01(\x0b\x32\x1a.caffe.CTCDecoderParameter\x12\x30\n\x0e\x63tc_loss_param\x18\x94\x01 \x01(\x0b\x32\x17.caffe.CTCLossParameter\x12(\n\ndata_param\x18k \x01(\x0b\x32\x14.caffe.DataParameter\x12\x44\n\x18\x64\x65tection_evaluate_param\x18\xcd\x01 \x01(\x0b\x32!.caffe.DetectionEvaluateParameter\x12@\n\x16\x64\x65tection_output_param\x18\xcc\x01 \x01(\x0b\x32\x1f.caffe.DetectionOutputParameter\x12.\n\rdropout_param\x18l \x01(\x0b\x32\x17.caffe.DropoutParameter\x12\x33\n\x10\x64ummy_data_param\x18m \x01(\x0b\x32\x19.caffe.DummyDataParameter\x12.\n\reltwise_param\x18n \x01(\x0b\x32\x17.caffe.EltwiseParameter\x12\'\n\telu_param\x18\x8c\x01 \x01(\x0b\x32\x13.caffe.ELUParameter\x12+\n\x0b\x65mbed_param\x18\x89\x01 \x01(\x0b\x32\x15.caffe.EmbedParameter\x12&\n\texp_param\x18o \x01(\x0b\x32\x13.caffe.ExpParameter\x12/\n\rflatten_param\x18\x87\x01 \x01(\x0b\x32\x17.caffe.FlattenParameter\x12\x31\n\x0fhdf5_data_param\x18p \x01(\x0b\x32\x18.caffe.HDF5DataParameter\x12\x35\n\x11hdf5_output_param\x18q \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\x12\x33\n\x10hinge_loss_param\x18r \x01(\x0b\x32\x19.caffe.HingeLossParameter\x12\x33\n\x10image_data_param\x18s \x01(\x0b\x32\x19.caffe.ImageDataParameter\x12\x39\n\x13infogain_loss_param\x18t \x01(\x0b\x32\x1c.caffe.InfogainLossParameter\x12\x39\n\x13inner_product_param\x18u \x01(\x0b\x32\x1c.caffe.InnerProductParameter\x12+\n\x0binput_param\x18\x8f\x01 \x01(\x0b\x32\x15.caffe.InputParameter\x12\'\n\tlog_param\x18\x86\x01 \x01(\x0b\x32\x13.caffe.LogParameter\x12&\n\tlrn_param\x18v \x01(\x0b\x32\x13.caffe.LRNParameter\x12\x35\n\x11memory_data_param\x18w \x01(\x0b\x32\x1a.caffe.MemoryDataParameter\x12:\n\x13multibox_loss_param\x18\xc9\x01 \x01(\x0b\x32\x1c.caffe.MultiBoxLossParameter\x12&\n\tmvn_param\x18x \x01(\x0b\x32\x13.caffe.MVNParameter\x12.\n\nnorm_param\x18\xce\x01 \x01(\x0b\x32\x19.caffe.NormalizeParameter\x12\x33\n\x0fparameter_param\x18\x91\x01 \x01(\x0b\x32\x19.caffe.ParameterParameter\x12/\n\rpermute_param\x18\xca\x01 \x01(\x0b\x32\x17.caffe.PermuteParameter\x12.\n\rpooling_param\x18y \x01(\x0b\x32\x17.caffe.PoolingParameter\x12*\n\x0bpower_param\x18z \x01(\x0b\x32\x15.caffe.PowerParameter\x12+\n\x0bprelu_param\x18\x83\x01 \x01(\x0b\x32\x15.caffe.PReLUParameter\x12\x32\n\x0fprior_box_param\x18\xcb\x01 \x01(\x0b\x32\x18.caffe.PriorBoxParameter\x12-\n\x0cpython_param\x18\x82\x01 \x01(\x0b\x32\x16.caffe.PythonParameter\x12\x33\n\x0frecurrent_param\x18\x92\x01 \x01(\x0b\x32\x19.caffe.RecurrentParameter\x12\x33\n\x0freduction_param\x18\x88\x01 \x01(\x0b\x32\x19.caffe.ReductionParameter\x12(\n\nrelu_param\x18{ \x01(\x0b\x32\x14.caffe.ReLUParameter\x12/\n\rreshape_param\x18\x85\x01 \x01(\x0b\x32\x17.caffe.ReshapeParameter\x12\x36\n\x11roi_pooling_param\x18\x96\x01 \x01(\x0b\x32\x1a.caffe.ROIPoolingParameter\x12/\n\rreverse_param\x18\x93\x01 \x01(\x0b\x32\x17.caffe.ReverseParameter\x12+\n\x0bscale_param\x18\x8e\x01 \x01(\x0b\x32\x15.caffe.ScaleParameter\x12.\n\rsigmoid_param\x18| \x01(\x0b\x32\x17.caffe.SigmoidParameter\x12.\n\rsoftmax_param\x18} \x01(\x0b\x32\x17.caffe.SoftmaxParameter\x12\'\n\tspp_param\x18\x84\x01 \x01(\x0b\x32\x13.caffe.SPPParameter\x12*\n\x0bslice_param\x18~ \x01(\x0b\x32\x15.caffe.SliceParameter\x12(\n\ntanh_param\x18\x7f \x01(\x0b\x32\x14.caffe.TanHParameter\x12\x33\n\x0fthreshold_param\x18\x80\x01 \x01(\x0b\x32\x19.caffe.ThresholdParameter\x12)\n\ntile_param\x18\x8a\x01 \x01(\x0b\x32\x14.caffe.TileParameter\x12\x34\n\x10video_data_param\x18\xcf\x01 \x01(\x0b\x32\x19.caffe.VideoDataParameter\x12\x36\n\x11window_data_param\x18\x81\x01 \x01(\x0b\x32\x1a.caffe.WindowDataParameter\x12=\n\x14smooth_l1_loss_param\x18\xd8\xc7\xf8\x03 \x01(\x0b\x32\x1c.caffe.SmoothL1LossParameter\x12\x33\n\x0eproposal_param\x18\xd9\xc7\xf8\x03 \x01(\x0b\x32\x18.caffe.ProposalParameter\"\xc8\x01\n\x11ProposalParameter\x12\x17\n\x0b\x66\x65\x61t_stride\x18\x01 \x01(\r:\x02\x31\x36\x12\x15\n\tbase_size\x18\x02 \x01(\r:\x02\x31\x36\x12\x14\n\x08min_size\x18\x03 \x01(\r:\x02\x31\x36\x12\r\n\x05ratio\x18\x04 \x03(\x02\x12\r\n\x05scale\x18\x05 \x03(\x02\x12\x1a\n\x0cpre_nms_topn\x18\x06 \x01(\r:\x04\x36\x30\x30\x30\x12\x1a\n\rpost_nms_topn\x18\x07 \x01(\r:\x03\x33\x30\x30\x12\x17\n\nnms_thresh\x18\x08 \x01(\x02:\x03\x30.7\")\n\x15SmoothL1LossParameter\x12\x10\n\x05sigma\x18\x01 \x01(\x02:\x01\x31\"\xca\x03\n\x17TransformationParameter\x12\x10\n\x05scale\x18\x01 \x01(\x02:\x01\x31\x12\x15\n\x06mirror\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x14\n\tcrop_size\x18\x03 \x01(\r:\x01\x30\x12\x11\n\x06\x63rop_h\x18\x0b \x01(\r:\x01\x30\x12\x11\n\x06\x63rop_w\x18\x0c \x01(\r:\x01\x30\x12\x11\n\tmean_file\x18\x04 \x01(\t\x12\x12\n\nmean_value\x18\x05 \x03(\x02\x12\x1a\n\x0b\x66orce_color\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\nforce_gray\x18\x07 \x01(\x08:\x05\x66\x61lse\x12,\n\x0cresize_param\x18\x08 \x01(\x0b\x32\x16.caffe.ResizeParameter\x12*\n\x0bnoise_param\x18\t \x01(\x0b\x32\x15.caffe.NoiseParameter\x12\x31\n\rdistort_param\x18\r \x01(\x0b\x32\x1a.caffe.DistortionParameter\x12/\n\x0c\x65xpand_param\x18\x0e \x01(\x0b\x32\x19.caffe.ExpansionParameter\x12.\n\x0f\x65mit_constraint\x18\n \x01(\x0b\x32\x15.caffe.EmitConstraint\"\x90\x04\n\x0fResizeParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x31\x12=\n\x0bresize_mode\x18\x02 \x01(\x0e\x32\".caffe.ResizeParameter.Resize_mode:\x04WARP\x12\x11\n\x06height\x18\x03 \x01(\r:\x01\x30\x12\x10\n\x05width\x18\x04 \x01(\r:\x01\x30\x12\x17\n\x0cheight_scale\x18\x08 \x01(\r:\x01\x30\x12\x16\n\x0bwidth_scale\x18\t \x01(\r:\x01\x30\x12;\n\x08pad_mode\x18\x05 \x01(\x0e\x32\x1f.caffe.ResizeParameter.Pad_mode:\x08\x43ONSTANT\x12\x11\n\tpad_value\x18\x06 \x03(\x02\x12\x37\n\x0binterp_mode\x18\x07 \x03(\x0e\x32\".caffe.ResizeParameter.Interp_mode\"G\n\x0bResize_mode\x12\x08\n\x04WARP\x10\x01\x12\x12\n\x0e\x46IT_SMALL_SIZE\x10\x02\x12\x1a\n\x16\x46IT_LARGE_SIZE_AND_PAD\x10\x03\":\n\x08Pad_mode\x12\x0c\n\x08\x43ONSTANT\x10\x01\x12\x0c\n\x08MIRRORED\x10\x02\x12\x12\n\x0eREPEAT_NEAREST\x10\x03\"I\n\x0bInterp_mode\x12\n\n\x06LINEAR\x10\x01\x12\x08\n\x04\x41REA\x10\x02\x12\x0b\n\x07NEAREST\x10\x03\x12\t\n\x05\x43UBIC\x10\x04\x12\x0c\n\x08LANCZOS4\x10\x05\"9\n\x13SaltPepperParameter\x12\x13\n\x08\x66raction\x18\x01 \x01(\x02:\x01\x30\x12\r\n\x05value\x18\x02 \x03(\x02\"\xee\x02\n\x0eNoiseParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x30\x12\x16\n\x07hist_eq\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x16\n\x07inverse\x18\x03 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ndecolorize\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x19\n\ngauss_blur\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x04jpeg\x18\x06 \x01(\x02:\x02-1\x12\x18\n\tposterize\x18\x07 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x05\x65rode\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x19\n\nsaltpepper\x18\t \x01(\x08:\x05\x66\x61lse\x12\x34\n\x10saltpepper_param\x18\n \x01(\x0b\x32\x1a.caffe.SaltPepperParameter\x12\x14\n\x05\x63lahe\x18\x0b \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63onvert_to_hsv\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x1d\n\x0e\x63onvert_to_lab\x18\r \x01(\x08:\x05\x66\x61lse\"\xbd\x02\n\x13\x44istortionParameter\x12\x1a\n\x0f\x62rightness_prob\x18\x01 \x01(\x02:\x01\x30\x12\x1b\n\x10\x62rightness_delta\x18\x02 \x01(\x02:\x01\x30\x12\x18\n\rcontrast_prob\x18\x03 \x01(\x02:\x01\x30\x12\x19\n\x0e\x63ontrast_lower\x18\x04 \x01(\x02:\x01\x30\x12\x19\n\x0e\x63ontrast_upper\x18\x05 \x01(\x02:\x01\x30\x12\x13\n\x08hue_prob\x18\x06 \x01(\x02:\x01\x30\x12\x14\n\thue_delta\x18\x07 \x01(\x02:\x01\x30\x12\x1a\n\x0fsaturation_prob\x18\x08 \x01(\x02:\x01\x30\x12\x1b\n\x10saturation_lower\x18\t \x01(\x02:\x01\x30\x12\x1b\n\x10saturation_upper\x18\n \x01(\x02:\x01\x30\x12\x1c\n\x11random_order_prob\x18\x0b \x01(\x02:\x01\x30\"B\n\x12\x45xpansionParameter\x12\x0f\n\x04prob\x18\x01 \x01(\x02:\x01\x31\x12\x1b\n\x10max_expand_ratio\x18\x02 \x01(\x02:\x01\x31\"\xc2\x01\n\rLossParameter\x12\x14\n\x0cignore_label\x18\x01 \x01(\x05\x12\x44\n\rnormalization\x18\x03 \x01(\x0e\x32&.caffe.LossParameter.NormalizationMode:\x05VALID\x12\x11\n\tnormalize\x18\x02 \x01(\x08\"B\n\x11NormalizationMode\x12\x08\n\x04\x46ULL\x10\x00\x12\t\n\x05VALID\x10\x01\x12\x0e\n\nBATCH_SIZE\x10\x02\x12\x08\n\x04NONE\x10\x03\"L\n\x11\x41\x63\x63uracyParameter\x12\x10\n\x05top_k\x18\x01 \x01(\r:\x01\x31\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x14\n\x0cignore_label\x18\x03 \x01(\x05\"\x95\x01\n\x16\x41nnotatedDataParameter\x12*\n\rbatch_sampler\x18\x01 \x03(\x0b\x32\x13.caffe.BatchSampler\x12\x16\n\x0elabel_map_file\x18\x02 \x01(\t\x12\x37\n\tanno_type\x18\x03 \x01(\x0e\x32$.caffe.AnnotatedDatum.AnnotationType\"M\n\x0f\x41rgMaxParameter\x12\x1a\n\x0bout_max_val\x18\x01 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x05top_k\x18\x02 \x01(\r:\x01\x31\x12\x0c\n\x04\x61xis\x18\x03 \x01(\x05\"9\n\x0f\x43oncatParameter\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\x12\x15\n\nconcat_dim\x18\x01 \x01(\r:\x01\x31\"j\n\x12\x42\x61tchNormParameter\x12\x18\n\x10use_global_stats\x18\x01 \x01(\x08\x12&\n\x17moving_average_fraction\x18\x02 \x01(\x02:\x05\x30.999\x12\x12\n\x03\x65ps\x18\x03 \x01(\x02:\x05\x31\x65-05\"]\n\rBiasParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x13\n\x08num_axes\x18\x02 \x01(\x05:\x01\x31\x12&\n\x06\x66iller\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\"L\n\x18\x43ontrastiveLossParameter\x12\x11\n\x06margin\x18\x01 \x01(\x02:\x01\x31\x12\x1d\n\x0elegacy_version\x18\x02 \x01(\x08:\x05\x66\x61lse\"\xfc\x03\n\x14\x43onvolutionParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12\x0b\n\x03pad\x18\x03 \x03(\r\x12\x13\n\x0bkernel_size\x18\x04 \x03(\r\x12\x0e\n\x06stride\x18\x06 \x03(\r\x12\x10\n\x08\x64ilation\x18\x12 \x03(\r\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x10\n\x08kernel_h\x18\x0b \x01(\r\x12\x10\n\x08kernel_w\x18\x0c \x01(\r\x12\x10\n\x08stride_h\x18\r \x01(\r\x12\x10\n\x08stride_w\x18\x0e \x01(\r\x12\x10\n\x05group\x18\x05 \x01(\r:\x01\x31\x12-\n\rweight_filler\x18\x07 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x08 \x01(\x0b\x32\x16.caffe.FillerParameter\x12;\n\x06\x65ngine\x18\x0f \x01(\x0e\x32\".caffe.ConvolutionParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x10 \x01(\x05:\x01\x31\x12\x1e\n\x0f\x66orce_nd_im2col\x18\x11 \x01(\x08:\x05\x66\x61lse\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"0\n\rCropParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x32\x12\x0e\n\x06offset\x18\x02 \x03(\r\"P\n\x13\x43TCDecoderParameter\x12\x17\n\x0b\x62lank_index\x18\x01 \x01(\x05:\x02-1\x12 \n\x12\x63tc_merge_repeated\x18\x02 \x01(\x08:\x04true\"\xb2\x01\n\x10\x43TCLossParameter\x12\x17\n\x0coutput_delay\x18\x01 \x01(\x05:\x01\x30\x12\x17\n\x0b\x62lank_index\x18\x02 \x01(\x05:\x02-1\x12+\n\x1cpreprocess_collapse_repeated\x18\x03 \x01(\x08:\x05\x66\x61lse\x12 \n\x12\x63tc_merge_repeated\x18\x04 \x01(\x08:\x04true\x12\x1d\n\x12loss_calculation_t\x18\x05 \x01(\x05:\x01\x30\"\xa4\x02\n\rDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x12\n\nbatch_size\x18\x04 \x01(\r\x12\x14\n\trand_skip\x18\x07 \x01(\r:\x01\x30\x12\x31\n\x07\x62\x61\x63kend\x18\x08 \x01(\x0e\x32\x17.caffe.DataParameter.DB:\x07LEVELDB\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\"\n\x13\x66orce_encoded_color\x18\t \x01(\x08:\x05\x66\x61lse\x12\x13\n\x08prefetch\x18\n \x01(\r:\x01\x34\"\x1b\n\x02\x44\x42\x12\x0b\n\x07LEVELDB\x10\x00\x12\x08\n\x04LMDB\x10\x01\"\xf7\x01\n\x1a\x44\x65tectionEvaluateParameter\x12\x13\n\x0bnum_classes\x18\x01 \x01(\r\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\x02 \x01(\r:\x01\x30\x12\x1e\n\x11overlap_threshold\x18\x03 \x01(\x02:\x03\x30.5\x12#\n\x15\x65valuate_difficult_gt\x18\x04 \x01(\x08:\x04true\x12\x16\n\x0ename_size_file\x18\x05 \x01(\t\x12,\n\x0cresize_param\x18\x06 \x01(\x0b\x32\x16.caffe.ResizeParameter\x12\x19\n\x0buse_polygon\x18\x07 \x01(\x08:\x04true\"[\n\x1eNonMaximumSuppressionParameter\x12\x1a\n\rnms_threshold\x18\x01 \x01(\x02:\x03\x30.3\x12\r\n\x05top_k\x18\x02 \x01(\x05\x12\x0e\n\x03\x65ta\x18\x03 \x01(\x02:\x01\x31\"\xd8\x01\n\x13SaveOutputParameter\x12\x18\n\x10output_directory\x18\x01 \x01(\t\x12\x1a\n\x12output_name_prefix\x18\x02 \x01(\t\x12\x15\n\routput_format\x18\x03 \x01(\t\x12\x16\n\x0elabel_map_file\x18\x04 \x01(\t\x12\x16\n\x0ename_size_file\x18\x05 \x01(\t\x12\x16\n\x0enum_test_image\x18\x06 \x01(\r\x12,\n\x0cresize_param\x18\x07 \x01(\x0b\x32\x16.caffe.ResizeParameter\"\xe2\x03\n\x18\x44\x65tectionOutputParameter\x12\x13\n\x0bnum_classes\x18\x01 \x01(\r\x12\x1c\n\x0eshare_location\x18\x02 \x01(\x08:\x04true\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\x03 \x01(\x05:\x01\x30\x12\x38\n\tnms_param\x18\x04 \x01(\x0b\x32%.caffe.NonMaximumSuppressionParameter\x12\x35\n\x11save_output_param\x18\x05 \x01(\x0b\x32\x1a.caffe.SaveOutputParameter\x12<\n\tcode_type\x18\x06 \x01(\x0e\x32!.caffe.PriorBoxParameter.CodeType:\x06\x43ORNER\x12)\n\x1avariance_encoded_in_target\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x16\n\nkeep_top_k\x18\x07 \x01(\x05:\x02-1\x12\x1c\n\x14\x63onfidence_threshold\x18\t \x01(\x02\x12\x18\n\tvisualize\x18\n \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x13visualize_threshold\x18\x0b \x01(\x02\x12\x11\n\tsave_file\x18\x0c \x01(\t\x12\x19\n\x0buse_polygon\x18\r \x01(\x08:\x04true\"I\n\x10\x44ropoutParameter\x12\x1a\n\rdropout_ratio\x18\x01 \x01(\x02:\x03\x30.5\x12\x19\n\x0bscale_train\x18\x02 \x01(\x08:\x04true\"\xa0\x01\n\x12\x44ummyDataParameter\x12+\n\x0b\x64\x61ta_filler\x18\x01 \x03(\x0b\x32\x16.caffe.FillerParameter\x12\x1f\n\x05shape\x18\x06 \x03(\x0b\x32\x10.caffe.BlobShape\x12\x0b\n\x03num\x18\x02 \x03(\r\x12\x10\n\x08\x63hannels\x18\x03 \x03(\r\x12\x0e\n\x06height\x18\x04 \x03(\r\x12\r\n\x05width\x18\x05 \x03(\r\"\xa5\x01\n\x10\x45ltwiseParameter\x12\x39\n\toperation\x18\x01 \x01(\x0e\x32!.caffe.EltwiseParameter.EltwiseOp:\x03SUM\x12\r\n\x05\x63oeff\x18\x02 \x03(\x02\x12\x1e\n\x10stable_prod_grad\x18\x03 \x01(\x08:\x04true\"\'\n\tEltwiseOp\x12\x08\n\x04PROD\x10\x00\x12\x07\n\x03SUM\x10\x01\x12\x07\n\x03MAX\x10\x02\" \n\x0c\x45LUParameter\x12\x10\n\x05\x61lpha\x18\x01 \x01(\x02:\x01\x31\"\xac\x01\n\x0e\x45mbedParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x11\n\tinput_dim\x18\x02 \x01(\r\x12\x17\n\tbias_term\x18\x03 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\"D\n\x0c\x45xpParameter\x12\x10\n\x04\x62\x61se\x18\x01 \x01(\x02:\x02-1\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"9\n\x10\x46lattenParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x14\n\x08\x65nd_axis\x18\x02 \x01(\x05:\x02-1\"O\n\x11HDF5DataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x12\n\nbatch_size\x18\x02 \x01(\r\x12\x16\n\x07shuffle\x18\x03 \x01(\x08:\x05\x66\x61lse\"(\n\x13HDF5OutputParameter\x12\x11\n\tfile_name\x18\x01 \x01(\t\"^\n\x12HingeLossParameter\x12\x30\n\x04norm\x18\x01 \x01(\x0e\x32\x1e.caffe.HingeLossParameter.Norm:\x02L1\"\x16\n\x04Norm\x12\x06\n\x02L1\x10\x01\x12\x06\n\x02L2\x10\x02\"\x97\x02\n\x12ImageDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x15\n\nbatch_size\x18\x04 \x01(\r:\x01\x31\x12\x14\n\trand_skip\x18\x07 \x01(\r:\x01\x30\x12\x16\n\x07shuffle\x18\x08 \x01(\x08:\x05\x66\x61lse\x12\x15\n\nnew_height\x18\t \x01(\r:\x01\x30\x12\x14\n\tnew_width\x18\n \x01(\r:\x01\x30\x12\x16\n\x08is_color\x18\x0b \x01(\x08:\x04true\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x15\n\x0broot_folder\x18\x0c \x01(\t:\x00\"\'\n\x15InfogainLossParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\"\xcb\x01\n\x15InnerProductParameter\x12\x12\n\nnum_output\x18\x01 \x01(\r\x12\x17\n\tbias_term\x18\x02 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x04 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0f\n\x04\x61xis\x18\x05 \x01(\x05:\x01\x31\x12\x18\n\ttranspose\x18\x06 \x01(\x08:\x05\x66\x61lse\"1\n\x0eInputParameter\x12\x1f\n\x05shape\x18\x01 \x03(\x0b\x32\x10.caffe.BlobShape\"D\n\x0cLogParameter\x12\x10\n\x04\x62\x61se\x18\x01 \x01(\x02:\x02-1\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"\xb8\x02\n\x0cLRNParameter\x12\x15\n\nlocal_size\x18\x01 \x01(\r:\x01\x35\x12\x10\n\x05\x61lpha\x18\x02 \x01(\x02:\x01\x31\x12\x12\n\x04\x62\x65ta\x18\x03 \x01(\x02:\x04\x30.75\x12\x44\n\x0bnorm_region\x18\x04 \x01(\x0e\x32\x1e.caffe.LRNParameter.NormRegion:\x0f\x41\x43ROSS_CHANNELS\x12\x0c\n\x01k\x18\x05 \x01(\x02:\x01\x31\x12\x33\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x1a.caffe.LRNParameter.Engine:\x07\x44\x45\x46\x41ULT\"5\n\nNormRegion\x12\x13\n\x0f\x41\x43ROSS_CHANNELS\x10\x00\x12\x12\n\x0eWITHIN_CHANNEL\x10\x01\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"Z\n\x13MemoryDataParameter\x12\x12\n\nbatch_size\x18\x01 \x01(\r\x12\x10\n\x08\x63hannels\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\r\n\x05width\x18\x04 \x01(\r\"\x83\t\n\x15MultiBoxLossParameter\x12J\n\rloc_loss_type\x18\x01 \x01(\x0e\x32(.caffe.MultiBoxLossParameter.LocLossType:\tSMOOTH_L1\x12J\n\x0e\x63onf_loss_type\x18\x02 \x01(\x0e\x32).caffe.MultiBoxLossParameter.ConfLossType:\x07SOFTMAX\x12\x15\n\nloc_weight\x18\x03 \x01(\x02:\x01\x31\x12\x13\n\x0bnum_classes\x18\x04 \x01(\r\x12\x1c\n\x0eshare_location\x18\x05 \x01(\x08:\x04true\x12J\n\nmatch_type\x18\x06 \x01(\x0e\x32&.caffe.MultiBoxLossParameter.MatchType:\x0ePER_PREDICTION\x12\x1e\n\x11overlap_threshold\x18\x07 \x01(\x02:\x03\x30.5\x12$\n\x16use_prior_for_matching\x18\x08 \x01(\x08:\x04true\x12\x1e\n\x13\x62\x61\x63kground_label_id\x18\t \x01(\r:\x01\x30\x12\x1e\n\x10use_difficult_gt\x18\n \x01(\x08:\x04true\x12\x15\n\rdo_neg_mining\x18\x0b \x01(\x08\x12\x18\n\rneg_pos_ratio\x18\x0c \x01(\x02:\x01\x33\x12\x18\n\x0bneg_overlap\x18\r \x01(\x02:\x03\x30.5\x12<\n\tcode_type\x18\x0e \x01(\x0e\x32!.caffe.PriorBoxParameter.CodeType:\x06\x43ORNER\x12(\n\x19\x65ncode_variance_in_target\x18\x10 \x01(\x08:\x05\x66\x61lse\x12%\n\x16map_object_to_agnostic\x18\x11 \x01(\x08:\x05\x66\x61lse\x12)\n\x1aignore_cross_boundary_bbox\x18\x12 \x01(\x08:\x05\x66\x61lse\x12\x18\n\tbp_inside\x18\x13 \x01(\x08:\x05\x66\x61lse\x12J\n\x0bmining_type\x18\x14 \x01(\x0e\x32\'.caffe.MultiBoxLossParameter.MiningType:\x0cMAX_NEGATIVE\x12\x38\n\tnms_param\x18\x15 \x01(\x0b\x32%.caffe.NonMaximumSuppressionParameter\x12\x17\n\x0bsample_size\x18\x16 \x01(\x05:\x02\x36\x34\x12 \n\x11use_prior_for_nms\x18\x17 \x01(\x08:\x05\x66\x61lse\x12\x19\n\x0buse_polygon\x18\x18 \x01(\x08:\x04true\"$\n\x0bLocLossType\x12\x06\n\x02L2\x10\x00\x12\r\n\tSMOOTH_L1\x10\x01\")\n\x0c\x43onfLossType\x12\x0b\n\x07SOFTMAX\x10\x00\x12\x0c\n\x08LOGISTIC\x10\x01\".\n\tMatchType\x12\r\n\tBIPARTITE\x10\x00\x12\x12\n\x0ePER_PREDICTION\x10\x01\":\n\nMiningType\x12\x08\n\x04NONE\x10\x00\x12\x10\n\x0cMAX_NEGATIVE\x10\x01\x12\x10\n\x0cHARD_EXAMPLE\x10\x02\"d\n\x0cMVNParameter\x12 \n\x12normalize_variance\x18\x01 \x01(\x08:\x04true\x12\x1e\n\x0f\x61\x63ross_channels\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x12\n\x03\x65ps\x18\x03 \x01(\x02:\x05\x31\x65-09\"\x92\x01\n\x12NormalizeParameter\x12\x1c\n\x0e\x61\x63ross_spatial\x18\x01 \x01(\x08:\x04true\x12,\n\x0cscale_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x1c\n\x0e\x63hannel_shared\x18\x03 \x01(\x08:\x04true\x12\x12\n\x03\x65ps\x18\x04 \x01(\x02:\x05\x31\x65-10\"5\n\x12ParameterParameter\x12\x1f\n\x05shape\x18\x01 \x01(\x0b\x32\x10.caffe.BlobShape\"!\n\x10PermuteParameter\x12\r\n\x05order\x18\x01 \x03(\r\"\xa2\x03\n\x10PoolingParameter\x12\x35\n\x04pool\x18\x01 \x01(\x0e\x32\".caffe.PoolingParameter.PoolMethod:\x03MAX\x12\x0e\n\x03pad\x18\x04 \x01(\r:\x01\x30\x12\x10\n\x05pad_h\x18\t \x01(\r:\x01\x30\x12\x10\n\x05pad_w\x18\n \x01(\r:\x01\x30\x12\x13\n\x0bkernel_size\x18\x02 \x01(\r\x12\x10\n\x08kernel_h\x18\x05 \x01(\r\x12\x10\n\x08kernel_w\x18\x06 \x01(\r\x12\x11\n\x06stride\x18\x03 \x01(\r:\x01\x31\x12\x10\n\x08stride_h\x18\x07 \x01(\r\x12\x10\n\x08stride_w\x18\x08 \x01(\r\x12\x37\n\x06\x65ngine\x18\x0b \x01(\x0e\x32\x1e.caffe.PoolingParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x1d\n\x0eglobal_pooling\x18\x0c \x01(\x08:\x05\x66\x61lse\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"F\n\x0ePowerParameter\x12\x10\n\x05power\x18\x01 \x01(\x02:\x01\x31\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x10\n\x05shift\x18\x03 \x01(\x02:\x01\x30\"\xd8\x02\n\x11PriorBoxParameter\x12\x10\n\x08min_size\x18\x01 \x03(\x02\x12\x10\n\x08max_size\x18\x02 \x03(\x02\x12\x14\n\x0c\x61spect_ratio\x18\x03 \x03(\x02\x12\x12\n\x04\x66lip\x18\x04 \x01(\x08:\x04true\x12\x13\n\x04\x63lip\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x10\n\x08variance\x18\x06 \x03(\x02\x12\x10\n\x08img_size\x18\x07 \x01(\r\x12\r\n\x05img_h\x18\x08 \x01(\r\x12\r\n\x05img_w\x18\t \x01(\r\x12\x0c\n\x04step\x18\n \x01(\x02\x12\x0e\n\x06step_h\x18\x0b \x01(\x02\x12\x0e\n\x06step_w\x18\x0c \x01(\x02\x12\x13\n\x06offset\x18\r \x01(\x02:\x03\x30.5\x12!\n\x12\x64\x65nser_prior_boxes\x18\x0e \x01(\x08:\x05\x66\x61lse\"8\n\x08\x43odeType\x12\n\n\x06\x43ORNER\x10\x01\x12\x0f\n\x0b\x43\x45NTER_SIZE\x10\x02\x12\x0f\n\x0b\x43ORNER_SIZE\x10\x03\"g\n\x0fPythonParameter\x12\x0e\n\x06module\x18\x01 \x01(\t\x12\r\n\x05layer\x18\x02 \x01(\t\x12\x13\n\tparam_str\x18\x03 \x01(\t:\x00\x12 \n\x11share_in_parallel\x18\x04 \x01(\x08:\x05\x66\x61lse\"\xc0\x01\n\x12RecurrentParameter\x12\x15\n\nnum_output\x18\x01 \x01(\r:\x01\x30\x12-\n\rweight_filler\x18\x02 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x19\n\ndebug_info\x18\x04 \x01(\x08:\x05\x66\x61lse\x12\x1c\n\rexpose_hidden\x18\x05 \x01(\x08:\x05\x66\x61lse\"\xad\x01\n\x12ReductionParameter\x12=\n\toperation\x18\x01 \x01(\x0e\x32%.caffe.ReductionParameter.ReductionOp:\x03SUM\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x30\x12\x10\n\x05\x63oeff\x18\x03 \x01(\x02:\x01\x31\"5\n\x0bReductionOp\x12\x07\n\x03SUM\x10\x01\x12\x08\n\x04\x41SUM\x10\x02\x12\t\n\x05SUMSQ\x10\x03\x12\x08\n\x04MEAN\x10\x04\"\x8d\x01\n\rReLUParameter\x12\x19\n\x0enegative_slope\x18\x01 \x01(\x02:\x01\x30\x12\x34\n\x06\x65ngine\x18\x02 \x01(\x0e\x32\x1b.caffe.ReLUParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"Z\n\x10ReshapeParameter\x12\x1f\n\x05shape\x18\x01 \x01(\x0b\x32\x10.caffe.BlobShape\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x30\x12\x14\n\x08num_axes\x18\x03 \x01(\x05:\x02-1\"#\n\x10ReverseParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x30\"Y\n\x13ROIPoolingParameter\x12\x13\n\x08pooled_h\x18\x01 \x01(\r:\x01\x30\x12\x13\n\x08pooled_w\x18\x02 \x01(\r:\x01\x30\x12\x18\n\rspatial_scale\x18\x03 \x01(\x02:\x01\x31\"\xa5\x01\n\x0eScaleParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\x13\n\x08num_axes\x18\x02 \x01(\x05:\x01\x31\x12&\n\x06\x66iller\x18\x03 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x18\n\tbias_term\x18\x04 \x01(\x08:\x05\x66\x61lse\x12+\n\x0b\x62ias_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\"x\n\x10SigmoidParameter\x12\x37\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1e.caffe.SigmoidParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"L\n\x0eSliceParameter\x12\x0f\n\x04\x61xis\x18\x03 \x01(\x05:\x01\x31\x12\x13\n\x0bslice_point\x18\x02 \x03(\r\x12\x14\n\tslice_dim\x18\x01 \x01(\r:\x01\x31\"\x89\x01\n\x10SoftmaxParameter\x12\x37\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1e.caffe.SoftmaxParameter.Engine:\x07\x44\x45\x46\x41ULT\x12\x0f\n\x04\x61xis\x18\x02 \x01(\x05:\x01\x31\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"r\n\rTanHParameter\x12\x34\n\x06\x65ngine\x18\x01 \x01(\x0e\x32\x1b.caffe.TanHParameter.Engine:\x07\x44\x45\x46\x41ULT\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"/\n\rTileParameter\x12\x0f\n\x04\x61xis\x18\x01 \x01(\x05:\x01\x31\x12\r\n\x05tiles\x18\x02 \x01(\x05\"*\n\x12ThresholdParameter\x12\x14\n\tthreshold\x18\x01 \x01(\x02:\x01\x30\"\xbb\x01\n\x12VideoDataParameter\x12?\n\nvideo_type\x18\x01 \x01(\x0e\x32#.caffe.VideoDataParameter.VideoType:\x06WEBCAM\x12\x14\n\tdevice_id\x18\x02 \x01(\x05:\x01\x30\x12\x12\n\nvideo_file\x18\x03 \x01(\t\x12\x16\n\x0bskip_frames\x18\x04 \x01(\r:\x01\x30\"\"\n\tVideoType\x12\n\n\x06WEBCAM\x10\x00\x12\t\n\x05VIDEO\x10\x01\"\xc1\x02\n\x13WindowDataParameter\x12\x0e\n\x06source\x18\x01 \x01(\t\x12\x10\n\x05scale\x18\x02 \x01(\x02:\x01\x31\x12\x11\n\tmean_file\x18\x03 \x01(\t\x12\x12\n\nbatch_size\x18\x04 \x01(\r\x12\x14\n\tcrop_size\x18\x05 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x19\n\x0c\x66g_threshold\x18\x07 \x01(\x02:\x03\x30.5\x12\x19\n\x0c\x62g_threshold\x18\x08 \x01(\x02:\x03\x30.5\x12\x19\n\x0b\x66g_fraction\x18\t \x01(\x02:\x04\x30.25\x12\x16\n\x0b\x63ontext_pad\x18\n \x01(\r:\x01\x30\x12\x17\n\tcrop_mode\x18\x0b \x01(\t:\x04warp\x12\x1b\n\x0c\x63\x61\x63he_images\x18\x0c \x01(\x08:\x05\x66\x61lse\x12\x15\n\x0broot_folder\x18\r \x01(\t:\x00\"\xeb\x01\n\x0cSPPParameter\x12\x16\n\x0epyramid_height\x18\x01 \x01(\r\x12\x31\n\x04pool\x18\x02 \x01(\x0e\x32\x1e.caffe.SPPParameter.PoolMethod:\x03MAX\x12\x33\n\x06\x65ngine\x18\x06 \x01(\x0e\x32\x1a.caffe.SPPParameter.Engine:\x07\x44\x45\x46\x41ULT\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"+\n\x06\x45ngine\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x43\x41\x46\x46\x45\x10\x01\x12\t\n\x05\x43UDNN\x10\x02\"\xe0\x13\n\x10V1LayerParameter\x12\x0e\n\x06\x62ottom\x18\x02 \x03(\t\x12\x0b\n\x03top\x18\x03 \x03(\t\x12\x0c\n\x04name\x18\x04 \x01(\t\x12$\n\x07include\x18 \x03(\x0b\x32\x13.caffe.NetStateRule\x12$\n\x07\x65xclude\x18! \x03(\x0b\x32\x13.caffe.NetStateRule\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.caffe.V1LayerParameter.LayerType\x12\x1f\n\x05\x62lobs\x18\x06 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x0e\n\x05param\x18\xe9\x07 \x03(\t\x12>\n\x0f\x62lob_share_mode\x18\xea\x07 \x03(\x0e\x32$.caffe.V1LayerParameter.DimCheckMode\x12\x10\n\x08\x62lobs_lr\x18\x07 \x03(\x02\x12\x14\n\x0cweight_decay\x18\x08 \x03(\x02\x12\x13\n\x0bloss_weight\x18# \x03(\x02\x12\x30\n\x0e\x61\x63\x63uracy_param\x18\x1b \x01(\x0b\x32\x18.caffe.AccuracyParameter\x12,\n\x0c\x61rgmax_param\x18\x17 \x01(\x0b\x32\x16.caffe.ArgMaxParameter\x12,\n\x0c\x63oncat_param\x18\t \x01(\x0b\x32\x16.caffe.ConcatParameter\x12?\n\x16\x63ontrastive_loss_param\x18( \x01(\x0b\x32\x1f.caffe.ContrastiveLossParameter\x12\x36\n\x11\x63onvolution_param\x18\n \x01(\x0b\x32\x1b.caffe.ConvolutionParameter\x12(\n\ndata_param\x18\x0b \x01(\x0b\x32\x14.caffe.DataParameter\x12.\n\rdropout_param\x18\x0c \x01(\x0b\x32\x17.caffe.DropoutParameter\x12\x33\n\x10\x64ummy_data_param\x18\x1a \x01(\x0b\x32\x19.caffe.DummyDataParameter\x12.\n\reltwise_param\x18\x18 \x01(\x0b\x32\x17.caffe.EltwiseParameter\x12&\n\texp_param\x18) \x01(\x0b\x32\x13.caffe.ExpParameter\x12\x31\n\x0fhdf5_data_param\x18\r \x01(\x0b\x32\x18.caffe.HDF5DataParameter\x12\x35\n\x11hdf5_output_param\x18\x0e \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\x12\x33\n\x10hinge_loss_param\x18\x1d \x01(\x0b\x32\x19.caffe.HingeLossParameter\x12\x33\n\x10image_data_param\x18\x0f \x01(\x0b\x32\x19.caffe.ImageDataParameter\x12\x39\n\x13infogain_loss_param\x18\x10 \x01(\x0b\x32\x1c.caffe.InfogainLossParameter\x12\x39\n\x13inner_product_param\x18\x11 \x01(\x0b\x32\x1c.caffe.InnerProductParameter\x12&\n\tlrn_param\x18\x12 \x01(\x0b\x32\x13.caffe.LRNParameter\x12\x35\n\x11memory_data_param\x18\x16 \x01(\x0b\x32\x1a.caffe.MemoryDataParameter\x12&\n\tmvn_param\x18\" \x01(\x0b\x32\x13.caffe.MVNParameter\x12.\n\rpooling_param\x18\x13 \x01(\x0b\x32\x17.caffe.PoolingParameter\x12*\n\x0bpower_param\x18\x15 \x01(\x0b\x32\x15.caffe.PowerParameter\x12(\n\nrelu_param\x18\x1e \x01(\x0b\x32\x14.caffe.ReLUParameter\x12.\n\rsigmoid_param\x18& \x01(\x0b\x32\x17.caffe.SigmoidParameter\x12.\n\rsoftmax_param\x18\' \x01(\x0b\x32\x17.caffe.SoftmaxParameter\x12*\n\x0bslice_param\x18\x1f \x01(\x0b\x32\x15.caffe.SliceParameter\x12(\n\ntanh_param\x18% \x01(\x0b\x32\x14.caffe.TanHParameter\x12\x32\n\x0fthreshold_param\x18\x19 \x01(\x0b\x32\x19.caffe.ThresholdParameter\x12\x35\n\x11window_data_param\x18\x14 \x01(\x0b\x32\x1a.caffe.WindowDataParameter\x12\x37\n\x0ftransform_param\x18$ \x01(\x0b\x32\x1e.caffe.TransformationParameter\x12(\n\nloss_param\x18* \x01(\x0b\x32\x14.caffe.LossParameter\x12&\n\x05layer\x18\x01 \x01(\x0b\x32\x17.caffe.V0LayerParameter\"\xd8\x04\n\tLayerType\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06\x41\x42SVAL\x10#\x12\x0c\n\x08\x41\x43\x43URACY\x10\x01\x12\n\n\x06\x41RGMAX\x10\x1e\x12\x08\n\x04\x42NLL\x10\x02\x12\n\n\x06\x43ONCAT\x10\x03\x12\x14\n\x10\x43ONTRASTIVE_LOSS\x10%\x12\x0f\n\x0b\x43ONVOLUTION\x10\x04\x12\x08\n\x04\x44\x41TA\x10\x05\x12\x11\n\rDECONVOLUTION\x10\'\x12\x0b\n\x07\x44ROPOUT\x10\x06\x12\x0e\n\nDUMMY_DATA\x10 \x12\x12\n\x0e\x45UCLIDEAN_LOSS\x10\x07\x12\x0b\n\x07\x45LTWISE\x10\x19\x12\x07\n\x03\x45XP\x10&\x12\x0b\n\x07\x46LATTEN\x10\x08\x12\r\n\tHDF5_DATA\x10\t\x12\x0f\n\x0bHDF5_OUTPUT\x10\n\x12\x0e\n\nHINGE_LOSS\x10\x1c\x12\n\n\x06IM2COL\x10\x0b\x12\x0e\n\nIMAGE_DATA\x10\x0c\x12\x11\n\rINFOGAIN_LOSS\x10\r\x12\x11\n\rINNER_PRODUCT\x10\x0e\x12\x07\n\x03LRN\x10\x0f\x12\x0f\n\x0bMEMORY_DATA\x10\x1d\x12\x1d\n\x19MULTINOMIAL_LOGISTIC_LOSS\x10\x10\x12\x07\n\x03MVN\x10\"\x12\x0b\n\x07POOLING\x10\x11\x12\t\n\x05POWER\x10\x1a\x12\x08\n\x04RELU\x10\x12\x12\x0b\n\x07SIGMOID\x10\x13\x12\x1e\n\x1aSIGMOID_CROSS_ENTROPY_LOSS\x10\x1b\x12\x0b\n\x07SILENCE\x10$\x12\x0b\n\x07SOFTMAX\x10\x14\x12\x10\n\x0cSOFTMAX_LOSS\x10\x15\x12\t\n\x05SPLIT\x10\x16\x12\t\n\x05SLICE\x10!\x12\x08\n\x04TANH\x10\x17\x12\x0f\n\x0bWINDOW_DATA\x10\x18\x12\r\n\tTHRESHOLD\x10\x1f\"*\n\x0c\x44imCheckMode\x12\n\n\x06STRICT\x10\x00\x12\x0e\n\nPERMISSIVE\x10\x01\"\xfd\x07\n\x10V0LayerParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x12\n\nnum_output\x18\x03 \x01(\r\x12\x16\n\x08\x62iasterm\x18\x04 \x01(\x08:\x04true\x12-\n\rweight_filler\x18\x05 \x01(\x0b\x32\x16.caffe.FillerParameter\x12+\n\x0b\x62ias_filler\x18\x06 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x0e\n\x03pad\x18\x07 \x01(\r:\x01\x30\x12\x12\n\nkernelsize\x18\x08 \x01(\r\x12\x10\n\x05group\x18\t \x01(\r:\x01\x31\x12\x11\n\x06stride\x18\n \x01(\r:\x01\x31\x12\x35\n\x04pool\x18\x0b \x01(\x0e\x32\".caffe.V0LayerParameter.PoolMethod:\x03MAX\x12\x1a\n\rdropout_ratio\x18\x0c \x01(\x02:\x03\x30.5\x12\x15\n\nlocal_size\x18\r \x01(\r:\x01\x35\x12\x10\n\x05\x61lpha\x18\x0e \x01(\x02:\x01\x31\x12\x12\n\x04\x62\x65ta\x18\x0f \x01(\x02:\x04\x30.75\x12\x0c\n\x01k\x18\x16 \x01(\x02:\x01\x31\x12\x0e\n\x06source\x18\x10 \x01(\t\x12\x10\n\x05scale\x18\x11 \x01(\x02:\x01\x31\x12\x10\n\x08meanfile\x18\x12 \x01(\t\x12\x11\n\tbatchsize\x18\x13 \x01(\r\x12\x13\n\x08\x63ropsize\x18\x14 \x01(\r:\x01\x30\x12\x15\n\x06mirror\x18\x15 \x01(\x08:\x05\x66\x61lse\x12\x1f\n\x05\x62lobs\x18\x32 \x03(\x0b\x32\x10.caffe.BlobProto\x12\x10\n\x08\x62lobs_lr\x18\x33 \x03(\x02\x12\x14\n\x0cweight_decay\x18\x34 \x03(\x02\x12\x14\n\trand_skip\x18\x35 \x01(\r:\x01\x30\x12\x1d\n\x10\x64\x65t_fg_threshold\x18\x36 \x01(\x02:\x03\x30.5\x12\x1d\n\x10\x64\x65t_bg_threshold\x18\x37 \x01(\x02:\x03\x30.5\x12\x1d\n\x0f\x64\x65t_fg_fraction\x18\x38 \x01(\x02:\x04\x30.25\x12\x1a\n\x0f\x64\x65t_context_pad\x18: \x01(\r:\x01\x30\x12\x1b\n\rdet_crop_mode\x18; \x01(\t:\x04warp\x12\x12\n\x07new_num\x18< \x01(\x05:\x01\x30\x12\x17\n\x0cnew_channels\x18= \x01(\x05:\x01\x30\x12\x15\n\nnew_height\x18> \x01(\x05:\x01\x30\x12\x14\n\tnew_width\x18? \x01(\x05:\x01\x30\x12\x1d\n\x0eshuffle_images\x18@ \x01(\x08:\x05\x66\x61lse\x12\x15\n\nconcat_dim\x18\x41 \x01(\r:\x01\x31\x12\x36\n\x11hdf5_output_param\x18\xe9\x07 \x01(\x0b\x32\x1a.caffe.HDF5OutputParameter\".\n\nPoolMethod\x12\x07\n\x03MAX\x10\x00\x12\x07\n\x03\x41VE\x10\x01\x12\x0e\n\nSTOCHASTIC\x10\x02\"W\n\x0ePReLUParameter\x12&\n\x06\x66iller\x18\x01 \x01(\x0b\x32\x16.caffe.FillerParameter\x12\x1d\n\x0e\x63hannel_shared\x18\x02 \x01(\x08:\x05\x66\x61lse*\x1c\n\x05Phase\x12\t\n\x05TRAIN\x10\x00\x12\x08\n\x04TEST\x10\x01')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PHASE = _descriptor.EnumDescriptor(
name='Phase',
full_name='caffe.Phase',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='TRAIN', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TEST', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=23240,
serialized_end=23268,
)
_sym_db.RegisterEnumDescriptor(_PHASE)
Phase = enum_type_wrapper.EnumTypeWrapper(_PHASE)
TRAIN = 0
TEST = 1
_EMITCONSTRAINT_EMITTYPE = _descriptor.EnumDescriptor(
name='EmitType',
full_name='caffe.EmitConstraint.EmitType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CENTER', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MIN_OVERLAP', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=1146,
serialized_end=1185,
)
_sym_db.RegisterEnumDescriptor(_EMITCONSTRAINT_EMITTYPE)
_ANNOTATEDDATUM_ANNOTATIONTYPE = _descriptor.EnumDescriptor(
name='AnnotationType',
full_name='caffe.AnnotatedDatum.AnnotationType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='BBOX', index=0, number=0,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2001,
serialized_end=2027,
)
_sym_db.RegisterEnumDescriptor(_ANNOTATEDDATUM_ANNOTATIONTYPE)
_FILLERPARAMETER_VARIANCENORM = _descriptor.EnumDescriptor(
name='VarianceNorm',
full_name='caffe.FillerParameter.VarianceNorm',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='FAN_IN', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FAN_OUT', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AVERAGE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2244,
serialized_end=2296,
)
_sym_db.RegisterEnumDescriptor(_FILLERPARAMETER_VARIANCENORM)
_SOLVERPARAMETER_SNAPSHOTFORMAT = _descriptor.EnumDescriptor(
name='SnapshotFormat',
full_name='caffe.SolverParameter.SnapshotFormat',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='HDF5', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BINARYPROTO', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3814,
serialized_end=3857,
)
_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SNAPSHOTFORMAT)
_SOLVERPARAMETER_SOLVERMODE = _descriptor.EnumDescriptor(
name='SolverMode',
full_name='caffe.SolverParameter.SolverMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CPU', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='GPU', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3859,
serialized_end=3889,
)
_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SOLVERMODE)
_SOLVERPARAMETER_SOLVERTYPE = _descriptor.EnumDescriptor(
name='SolverType',
full_name='caffe.SolverParameter.SolverType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SGD', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NESTEROV', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADAGRAD', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RMSPROP', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADADELTA', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ADAM', index=5, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3891,
serialized_end=3976,
)
_sym_db.RegisterEnumDescriptor(_SOLVERPARAMETER_SOLVERTYPE)
_PARAMSPEC_DIMCHECKMODE = _descriptor.EnumDescriptor(
name='DimCheckMode',
full_name='caffe.ParamSpec.DimCheckMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STRICT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PERMISSIVE', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4465,
serialized_end=4507,
)
_sym_db.RegisterEnumDescriptor(_PARAMSPEC_DIMCHECKMODE)
_RESIZEPARAMETER_RESIZE_MODE = _descriptor.EnumDescriptor(
name='Resize_mode',
full_name='caffe.ResizeParameter.Resize_mode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='WARP', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FIT_SMALL_SIZE', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FIT_LARGE_SIZE_AND_PAD', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=8892,
serialized_end=8963,
)
_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_RESIZE_MODE)
_RESIZEPARAMETER_PAD_MODE = _descriptor.EnumDescriptor(
name='Pad_mode',
full_name='caffe.ResizeParameter.Pad_mode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CONSTANT', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MIRRORED', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='REPEAT_NEAREST', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=8965,
serialized_end=9023,
)
_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_PAD_MODE)
_RESIZEPARAMETER_INTERP_MODE = _descriptor.EnumDescriptor(
name='Interp_mode',
full_name='caffe.ResizeParameter.Interp_mode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='LINEAR', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AREA', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NEAREST', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUBIC', index=3, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LANCZOS4', index=4, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9025,
serialized_end=9098,
)
_sym_db.RegisterEnumDescriptor(_RESIZEPARAMETER_INTERP_MODE)
_LOSSPARAMETER_NORMALIZATIONMODE = _descriptor.EnumDescriptor(
name='NormalizationMode',
full_name='caffe.LossParameter.NormalizationMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='FULL', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VALID', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BATCH_SIZE', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NONE', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=10045,
serialized_end=10111,
)
_sym_db.RegisterEnumDescriptor(_LOSSPARAMETER_NORMALIZATIONMODE)
_CONVOLUTIONPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.ConvolutionParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11228,
serialized_end=11271,
)
_sym_db.RegisterEnumDescriptor(_CONVOLUTIONPARAMETER_ENGINE)
_DATAPARAMETER_DB = _descriptor.EnumDescriptor(
name='DB',
full_name='caffe.DataParameter.DB',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='LEVELDB', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LMDB', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11852,
serialized_end=11879,
)
_sym_db.RegisterEnumDescriptor(_DATAPARAMETER_DB)
_ELTWISEPARAMETER_ELTWISEOP = _descriptor.EnumDescriptor(
name='EltwiseOp',
full_name='caffe.EltwiseParameter.EltwiseOp',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='PROD', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUM', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MAX', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=13293,
serialized_end=13332,
)
_sym_db.RegisterEnumDescriptor(_ELTWISEPARAMETER_ELTWISEOP)
_HINGELOSSPARAMETER_NORM = _descriptor.EnumDescriptor(
name='Norm',
full_name='caffe.HingeLossParameter.Norm',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='L1', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='L2', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=13867,
serialized_end=13889,
)
_sym_db.RegisterEnumDescriptor(_HINGELOSSPARAMETER_NORM)
_LRNPARAMETER_NORMREGION = _descriptor.EnumDescriptor(
name='NormRegion',
full_name='caffe.LRNParameter.NormRegion',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='ACROSS_CHANNELS', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='WITHIN_CHANNEL', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=14756,
serialized_end=14809,
)
_sym_db.RegisterEnumDescriptor(_LRNPARAMETER_NORMREGION)
_LRNPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.LRNParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11228,
serialized_end=11271,
)
_sym_db.RegisterEnumDescriptor(_LRNPARAMETER_ENGINE)
_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE = _descriptor.EnumDescriptor(
name='LocLossType',
full_name='caffe.MultiBoxLossParameter.LocLossType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='L2', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SMOOTH_L1', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=15917,
serialized_end=15953,
)
_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE)
_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE = _descriptor.EnumDescriptor(
name='ConfLossType',
full_name='caffe.MultiBoxLossParameter.ConfLossType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SOFTMAX', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LOGISTIC', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=15955,
serialized_end=15996,
)
_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE)
_MULTIBOXLOSSPARAMETER_MATCHTYPE = _descriptor.EnumDescriptor(
name='MatchType',
full_name='caffe.MultiBoxLossParameter.MatchType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='BIPARTITE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PER_PREDICTION', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=15998,
serialized_end=16044,
)
_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_MATCHTYPE)
_MULTIBOXLOSSPARAMETER_MININGTYPE = _descriptor.EnumDescriptor(
name='MiningType',
full_name='caffe.MultiBoxLossParameter.MiningType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MAX_NEGATIVE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HARD_EXAMPLE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=16046,
serialized_end=16104,
)
_sym_db.RegisterEnumDescriptor(_MULTIBOXLOSSPARAMETER_MININGTYPE)
_POOLINGPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor(
name='PoolMethod',
full_name='caffe.PoolingParameter.PoolMethod',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MAX', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AVE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STOCHASTIC', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=16775,
serialized_end=16821,
)
_sym_db.RegisterEnumDescriptor(_POOLINGPARAMETER_POOLMETHOD)
_POOLINGPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.PoolingParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11228,
serialized_end=11271,
)
_sym_db.RegisterEnumDescriptor(_POOLINGPARAMETER_ENGINE)
_PRIORBOXPARAMETER_CODETYPE = _descriptor.EnumDescriptor(
name='CodeType',
full_name='caffe.PriorBoxParameter.CodeType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CORNER', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CENTER_SIZE', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CORNER_SIZE', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=17229,
serialized_end=17285,
)
_sym_db.RegisterEnumDescriptor(_PRIORBOXPARAMETER_CODETYPE)
_REDUCTIONPARAMETER_REDUCTIONOP = _descriptor.EnumDescriptor(
name='ReductionOp',
full_name='caffe.ReductionParameter.ReductionOp',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='SUM', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ASUM', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SUMSQ', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MEAN', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=17708,
serialized_end=17761,
)
_sym_db.RegisterEnumDescriptor(_REDUCTIONPARAMETER_REDUCTIONOP)
_RELUPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.ReLUParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11228,
serialized_end=11271,
)
_sym_db.RegisterEnumDescriptor(_RELUPARAMETER_ENGINE)
_SIGMOIDPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.SigmoidParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11228,
serialized_end=11271,
)
_sym_db.RegisterEnumDescriptor(_SIGMOIDPARAMETER_ENGINE)
_SOFTMAXPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.SoftmaxParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11228,
serialized_end=11271,
)
_sym_db.RegisterEnumDescriptor(_SOFTMAXPARAMETER_ENGINE)
_TANHPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.TanHParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11228,
serialized_end=11271,
)
_sym_db.RegisterEnumDescriptor(_TANHPARAMETER_ENGINE)
_VIDEODATAPARAMETER_VIDEOTYPE = _descriptor.EnumDescriptor(
name='VideoType',
full_name='caffe.VideoDataParameter.VideoType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='WEBCAM', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='VIDEO', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=18998,
serialized_end=19032,
)
_sym_db.RegisterEnumDescriptor(_VIDEODATAPARAMETER_VIDEOTYPE)
_SPPPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor(
name='PoolMethod',
full_name='caffe.SPPParameter.PoolMethod',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MAX', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AVE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STOCHASTIC', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=16775,
serialized_end=16821,
)
_sym_db.RegisterEnumDescriptor(_SPPPARAMETER_POOLMETHOD)
_SPPPARAMETER_ENGINE = _descriptor.EnumDescriptor(
name='Engine',
full_name='caffe.SPPParameter.Engine',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DEFAULT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CAFFE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CUDNN', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11228,
serialized_end=11271,
)
_sym_db.RegisterEnumDescriptor(_SPPPARAMETER_ENGINE)
_V1LAYERPARAMETER_LAYERTYPE = _descriptor.EnumDescriptor(
name='LayerType',
full_name='caffe.V1LayerParameter.LayerType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NONE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ABSVAL', index=1, number=35,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ACCURACY', index=2, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ARGMAX', index=3, number=30,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BNLL', index=4, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONCAT', index=5, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONTRASTIVE_LOSS', index=6, number=37,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CONVOLUTION', index=7, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DATA', index=8, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DECONVOLUTION', index=9, number=39,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DROPOUT', index=10, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DUMMY_DATA', index=11, number=32,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EUCLIDEAN_LOSS', index=12, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ELTWISE', index=13, number=25,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='EXP', index=14, number=38,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='FLATTEN', index=15, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HDF5_DATA', index=16, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HDF5_OUTPUT', index=17, number=10,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HINGE_LOSS', index=18, number=28,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IM2COL', index=19, number=11,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='IMAGE_DATA', index=20, number=12,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INFOGAIN_LOSS', index=21, number=13,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INNER_PRODUCT', index=22, number=14,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LRN', index=23, number=15,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MEMORY_DATA', index=24, number=29,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MULTINOMIAL_LOGISTIC_LOSS', index=25, number=16,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MVN', index=26, number=34,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POOLING', index=27, number=17,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POWER', index=28, number=26,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='RELU', index=29, number=18,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SIGMOID', index=30, number=19,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SIGMOID_CROSS_ENTROPY_LOSS', index=31, number=27,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SILENCE', index=32, number=36,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SOFTMAX', index=33, number=20,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SOFTMAX_LOSS', index=34, number=21,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SPLIT', index=35, number=22,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SLICE', index=36, number=33,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='TANH', index=37, number=23,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='WINDOW_DATA', index=38, number=24,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='THRESHOLD', index=39, number=31,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=21481,
serialized_end=22081,
)
_sym_db.RegisterEnumDescriptor(_V1LAYERPARAMETER_LAYERTYPE)
_V1LAYERPARAMETER_DIMCHECKMODE = _descriptor.EnumDescriptor(
name='DimCheckMode',
full_name='caffe.V1LayerParameter.DimCheckMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='STRICT', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PERMISSIVE', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4465,
serialized_end=4507,
)
_sym_db.RegisterEnumDescriptor(_V1LAYERPARAMETER_DIMCHECKMODE)
_V0LAYERPARAMETER_POOLMETHOD = _descriptor.EnumDescriptor(
name='PoolMethod',
full_name='caffe.V0LayerParameter.PoolMethod',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MAX', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='AVE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STOCHASTIC', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=16775,
serialized_end=16821,
)
_sym_db.RegisterEnumDescriptor(_V0LAYERPARAMETER_POOLMETHOD)
_BLOBSHAPE = _descriptor.Descriptor(
name='BlobShape',
full_name='caffe.BlobShape',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dim', full_name='caffe.BlobShape.dim', index=0,
number=1, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=22,
serialized_end=50,
)
_BLOBPROTO = _descriptor.Descriptor(
name='BlobProto',
full_name='caffe.BlobProto',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.BlobProto.shape', index=0,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data', full_name='caffe.BlobProto.data', index=1,
number=5, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
_descriptor.FieldDescriptor(
name='diff', full_name='caffe.BlobProto.diff', index=2,
number=6, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
_descriptor.FieldDescriptor(
name='double_data', full_name='caffe.BlobProto.double_data', index=3,
number=8, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
_descriptor.FieldDescriptor(
name='double_diff', full_name='caffe.BlobProto.double_diff', index=4,
number=9, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
_descriptor.FieldDescriptor(
name='num', full_name='caffe.BlobProto.num', index=5,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channels', full_name='caffe.BlobProto.channels', index=6,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.BlobProto.height', index=7,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.BlobProto.width', index=8,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=53,
serialized_end=257,
)
_BLOBPROTOVECTOR = _descriptor.Descriptor(
name='BlobProtoVector',
full_name='caffe.BlobProtoVector',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='blobs', full_name='caffe.BlobProtoVector.blobs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=259,
serialized_end=309,
)
_DATUM = _descriptor.Descriptor(
name='Datum',
full_name='caffe.Datum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='channels', full_name='caffe.Datum.channels', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.Datum.height', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.Datum.width', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data', full_name='caffe.Datum.data', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label', full_name='caffe.Datum.label', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='float_data', full_name='caffe.Datum.float_data', index=5,
number=6, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='encoded', full_name='caffe.Datum.encoded', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=312,
serialized_end=441,
)
_LABELMAPITEM = _descriptor.Descriptor(
name='LabelMapItem',
full_name='caffe.LabelMapItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.LabelMapItem.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label', full_name='caffe.LabelMapItem.label', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='display_name', full_name='caffe.LabelMapItem.display_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=443,
serialized_end=508,
)
_LABELMAP = _descriptor.Descriptor(
name='LabelMap',
full_name='caffe.LabelMap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='caffe.LabelMap.item', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=510,
serialized_end=555,
)
_SAMPLER = _descriptor.Descriptor(
name='Sampler',
full_name='caffe.Sampler',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_scale', full_name='caffe.Sampler.min_scale', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_scale', full_name='caffe.Sampler.max_scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_aspect_ratio', full_name='caffe.Sampler.min_aspect_ratio', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_aspect_ratio', full_name='caffe.Sampler.max_aspect_ratio', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=557,
serialized_end=668,
)
_SAMPLECONSTRAINT = _descriptor.Descriptor(
name='SampleConstraint',
full_name='caffe.SampleConstraint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_jaccard_overlap', full_name='caffe.SampleConstraint.min_jaccard_overlap', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_jaccard_overlap', full_name='caffe.SampleConstraint.max_jaccard_overlap', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_sample_coverage', full_name='caffe.SampleConstraint.min_sample_coverage', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_sample_coverage', full_name='caffe.SampleConstraint.max_sample_coverage', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_object_coverage', full_name='caffe.SampleConstraint.min_object_coverage', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_object_coverage', full_name='caffe.SampleConstraint.max_object_coverage', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=671,
serialized_end=863,
)
_BATCHSAMPLER = _descriptor.Descriptor(
name='BatchSampler',
full_name='caffe.BatchSampler',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='use_original_image', full_name='caffe.BatchSampler.use_original_image', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sampler', full_name='caffe.BatchSampler.sampler', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sample_constraint', full_name='caffe.BatchSampler.sample_constraint', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_sample', full_name='caffe.BatchSampler.max_sample', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_trials', full_name='caffe.BatchSampler.max_trials', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=100,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=866,
serialized_end=1044,
)
_EMITCONSTRAINT = _descriptor.Descriptor(
name='EmitConstraint',
full_name='caffe.EmitConstraint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='emit_type', full_name='caffe.EmitConstraint.emit_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='emit_overlap', full_name='caffe.EmitConstraint.emit_overlap', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_EMITCONSTRAINT_EMITTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1047,
serialized_end=1185,
)
_NORMALIZEDBBOX = _descriptor.Descriptor(
name='NormalizedBBox',
full_name='caffe.NormalizedBBox',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='xmin', full_name='caffe.NormalizedBBox.xmin', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ymin', full_name='caffe.NormalizedBBox.ymin', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='xmax', full_name='caffe.NormalizedBBox.xmax', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ymax', full_name='caffe.NormalizedBBox.ymax', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label', full_name='caffe.NormalizedBBox.label', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='difficult', full_name='caffe.NormalizedBBox.difficult', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='score', full_name='caffe.NormalizedBBox.score', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='size', full_name='caffe.NormalizedBBox.size', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1188,
serialized_end=1323,
)
_NORMALIZEDRBOX = _descriptor.Descriptor(
name='NormalizedRBox',
full_name='caffe.NormalizedRBox',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x1', full_name='caffe.NormalizedRBox.x1', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y1', full_name='caffe.NormalizedRBox.y1', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x2', full_name='caffe.NormalizedRBox.x2', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y2', full_name='caffe.NormalizedRBox.y2', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='h', full_name='caffe.NormalizedRBox.h', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='difficult', full_name='caffe.NormalizedRBox.difficult', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='score', full_name='caffe.NormalizedRBox.score', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='size', full_name='caffe.NormalizedRBox.size', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1325,
serialized_end=1448,
)
_NORMALIZEDPOLYGON = _descriptor.Descriptor(
name='NormalizedPolygon',
full_name='caffe.NormalizedPolygon',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x1', full_name='caffe.NormalizedPolygon.x1', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y1', full_name='caffe.NormalizedPolygon.y1', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x2', full_name='caffe.NormalizedPolygon.x2', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y2', full_name='caffe.NormalizedPolygon.y2', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x3', full_name='caffe.NormalizedPolygon.x3', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y3', full_name='caffe.NormalizedPolygon.y3', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x4', full_name='caffe.NormalizedPolygon.x4', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y4', full_name='caffe.NormalizedPolygon.y4', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='difficult', full_name='caffe.NormalizedPolygon.difficult', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='score', full_name='caffe.NormalizedPolygon.score', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='size', full_name='caffe.NormalizedPolygon.size', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1451,
serialized_end=1614,
)
_ANNOTATION = _descriptor.Descriptor(
name='Annotation',
full_name='caffe.Annotation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='instance_id', full_name='caffe.Annotation.instance_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bbox', full_name='caffe.Annotation.bbox', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rbox', full_name='caffe.Annotation.rbox', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='polygon', full_name='caffe.Annotation.polygon', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1617,
serialized_end=1770,
)
_ANNOTATIONGROUP = _descriptor.Descriptor(
name='AnnotationGroup',
full_name='caffe.AnnotationGroup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='group_label', full_name='caffe.AnnotationGroup.group_label', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='annotation', full_name='caffe.AnnotationGroup.annotation', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1772,
serialized_end=1849,
)
_ANNOTATEDDATUM = _descriptor.Descriptor(
name='AnnotatedDatum',
full_name='caffe.AnnotatedDatum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='datum', full_name='caffe.AnnotatedDatum.datum', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.AnnotatedDatum.type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='annotation_group', full_name='caffe.AnnotatedDatum.annotation_group', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_ANNOTATEDDATUM_ANNOTATIONTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=1852,
serialized_end=2027,
)
_FILLERPARAMETER = _descriptor.Descriptor(
name='FillerParameter',
full_name='caffe.FillerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='caffe.FillerParameter.type', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("constant").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='caffe.FillerParameter.value', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min', full_name='caffe.FillerParameter.min', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max', full_name='caffe.FillerParameter.max', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean', full_name='caffe.FillerParameter.mean', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='std', full_name='caffe.FillerParameter.std', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sparse', full_name='caffe.FillerParameter.sparse', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='variance_norm', full_name='caffe.FillerParameter.variance_norm', index=7,
number=8, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_FILLERPARAMETER_VARIANCENORM,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=2030,
serialized_end=2296,
)
_NETPARAMETER = _descriptor.Descriptor(
name='NetParameter',
full_name='caffe.NetParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.NetParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input', full_name='caffe.NetParameter.input', index=1,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_shape', full_name='caffe.NetParameter.input_shape', index=2,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_dim', full_name='caffe.NetParameter.input_dim', index=3,
number=4, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_backward', full_name='caffe.NetParameter.force_backward', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='state', full_name='caffe.NetParameter.state', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='debug_info', full_name='caffe.NetParameter.debug_info', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='layer', full_name='caffe.NetParameter.layer', index=7,
number=100, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='layers', full_name='caffe.NetParameter.layers', index=8,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=2299,
serialized_end=2569,
)
_SOLVERPARAMETER = _descriptor.Descriptor(
name='SolverParameter',
full_name='caffe.SolverParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='net', full_name='caffe.SolverParameter.net', index=0,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='net_param', full_name='caffe.SolverParameter.net_param', index=1,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='train_net', full_name='caffe.SolverParameter.train_net', index=2,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_net', full_name='caffe.SolverParameter.test_net', index=3,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='train_net_param', full_name='caffe.SolverParameter.train_net_param', index=4,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_net_param', full_name='caffe.SolverParameter.test_net_param', index=5,
number=22, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='train_state', full_name='caffe.SolverParameter.train_state', index=6,
number=26, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_state', full_name='caffe.SolverParameter.test_state', index=7,
number=27, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eval_type', full_name='caffe.SolverParameter.eval_type', index=8,
number=41, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("classification").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ap_version', full_name='caffe.SolverParameter.ap_version', index=9,
number=42, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("Integral").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_iter', full_name='caffe.SolverParameter.test_iter', index=10,
number=3, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_interval', full_name='caffe.SolverParameter.test_interval', index=11,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_compute_loss', full_name='caffe.SolverParameter.test_compute_loss', index=12,
number=19, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='test_initialization', full_name='caffe.SolverParameter.test_initialization', index=13,
number=32, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='base_lr', full_name='caffe.SolverParameter.base_lr', index=14,
number=5, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='display', full_name='caffe.SolverParameter.display', index=15,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='average_loss', full_name='caffe.SolverParameter.average_loss', index=16,
number=33, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_iter', full_name='caffe.SolverParameter.max_iter', index=17,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iter_size', full_name='caffe.SolverParameter.iter_size', index=18,
number=36, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lr_policy', full_name='caffe.SolverParameter.lr_policy', index=19,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gamma', full_name='caffe.SolverParameter.gamma', index=20,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='power', full_name='caffe.SolverParameter.power', index=21,
number=10, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='momentum', full_name='caffe.SolverParameter.momentum', index=22,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_decay', full_name='caffe.SolverParameter.weight_decay', index=23,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='regularization_type', full_name='caffe.SolverParameter.regularization_type', index=24,
number=29, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("L2").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stepsize', full_name='caffe.SolverParameter.stepsize', index=25,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stepvalue', full_name='caffe.SolverParameter.stepvalue', index=26,
number=34, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='plateau_winsize', full_name='caffe.SolverParameter.plateau_winsize', index=27,
number=43, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='clip_gradients', full_name='caffe.SolverParameter.clip_gradients', index=28,
number=35, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot', full_name='caffe.SolverParameter.snapshot', index=29,
number=14, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot_prefix', full_name='caffe.SolverParameter.snapshot_prefix', index=30,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot_diff', full_name='caffe.SolverParameter.snapshot_diff', index=31,
number=16, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot_format', full_name='caffe.SolverParameter.snapshot_format', index=32,
number=37, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='solver_mode', full_name='caffe.SolverParameter.solver_mode', index=33,
number=17, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='device_id', full_name='caffe.SolverParameter.device_id', index=34,
number=18, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='random_seed', full_name='caffe.SolverParameter.random_seed', index=35,
number=20, type=3, cpp_type=2, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.SolverParameter.type', index=36,
number=40, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("SGD").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='delta', full_name='caffe.SolverParameter.delta', index=37,
number=31, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1e-08,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='momentum2', full_name='caffe.SolverParameter.momentum2', index=38,
number=39, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.999,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rms_decay', full_name='caffe.SolverParameter.rms_decay', index=39,
number=38, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.99,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='debug_info', full_name='caffe.SolverParameter.debug_info', index=40,
number=23, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='snapshot_after_train', full_name='caffe.SolverParameter.snapshot_after_train', index=41,
number=28, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='solver_type', full_name='caffe.SolverParameter.solver_type', index=42,
number=30, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SOLVERPARAMETER_SNAPSHOTFORMAT,
_SOLVERPARAMETER_SOLVERMODE,
_SOLVERPARAMETER_SOLVERTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=2572,
serialized_end=3976,
)
_SOLVERSTATE = _descriptor.Descriptor(
name='SolverState',
full_name='caffe.SolverState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='iter', full_name='caffe.SolverState.iter', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='learned_net', full_name='caffe.SolverState.learned_net', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='history', full_name='caffe.SolverState.history', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='current_step', full_name='caffe.SolverState.current_step', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='minimum_loss', full_name='caffe.SolverState.minimum_loss', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1e+38,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='iter_last_event', full_name='caffe.SolverState.iter_last_event', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=3979,
serialized_end=4144,
)
_NETSTATE = _descriptor.Descriptor(
name='NetState',
full_name='caffe.NetState',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='phase', full_name='caffe.NetState.phase', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='level', full_name='caffe.NetState.level', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stage', full_name='caffe.NetState.stage', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=4146,
serialized_end=4224,
)
_NETSTATERULE = _descriptor.Descriptor(
name='NetStateRule',
full_name='caffe.NetStateRule',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='phase', full_name='caffe.NetStateRule.phase', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_level', full_name='caffe.NetStateRule.min_level', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_level', full_name='caffe.NetStateRule.max_level', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stage', full_name='caffe.NetStateRule.stage', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='not_stage', full_name='caffe.NetStateRule.not_stage', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=4226,
serialized_end=4341,
)
_PARAMSPEC = _descriptor.Descriptor(
name='ParamSpec',
full_name='caffe.ParamSpec',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.ParamSpec.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='share_mode', full_name='caffe.ParamSpec.share_mode', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lr_mult', full_name='caffe.ParamSpec.lr_mult', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='decay_mult', full_name='caffe.ParamSpec.decay_mult', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PARAMSPEC_DIMCHECKMODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=4344,
serialized_end=4507,
)
_LAYERPARAMETER = _descriptor.Descriptor(
name='LayerParameter',
full_name='caffe.LayerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.LayerParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.LayerParameter.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bottom', full_name='caffe.LayerParameter.bottom', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='top', full_name='caffe.LayerParameter.top', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='phase', full_name='caffe.LayerParameter.phase', index=4,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_weight', full_name='caffe.LayerParameter.loss_weight', index=5,
number=5, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param', full_name='caffe.LayerParameter.param', index=6,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs', full_name='caffe.LayerParameter.blobs', index=7,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='propagate_down', full_name='caffe.LayerParameter.propagate_down', index=8,
number=11, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='include', full_name='caffe.LayerParameter.include', index=9,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exclude', full_name='caffe.LayerParameter.exclude', index=10,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transform_param', full_name='caffe.LayerParameter.transform_param', index=11,
number=100, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_param', full_name='caffe.LayerParameter.loss_param', index=12,
number=101, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='accuracy_param', full_name='caffe.LayerParameter.accuracy_param', index=13,
number=102, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='annotated_data_param', full_name='caffe.LayerParameter.annotated_data_param', index=14,
number=200, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='argmax_param', full_name='caffe.LayerParameter.argmax_param', index=15,
number=103, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_norm_param', full_name='caffe.LayerParameter.batch_norm_param', index=16,
number=139, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_param', full_name='caffe.LayerParameter.bias_param', index=17,
number=141, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concat_param', full_name='caffe.LayerParameter.concat_param', index=18,
number=104, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrastive_loss_param', full_name='caffe.LayerParameter.contrastive_loss_param', index=19,
number=105, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='convolution_param', full_name='caffe.LayerParameter.convolution_param', index=20,
number=106, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_param', full_name='caffe.LayerParameter.crop_param', index=21,
number=144, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ctc_decoder_param', full_name='caffe.LayerParameter.ctc_decoder_param', index=22,
number=149, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ctc_loss_param', full_name='caffe.LayerParameter.ctc_loss_param', index=23,
number=148, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data_param', full_name='caffe.LayerParameter.data_param', index=24,
number=107, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='detection_evaluate_param', full_name='caffe.LayerParameter.detection_evaluate_param', index=25,
number=205, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='detection_output_param', full_name='caffe.LayerParameter.detection_output_param', index=26,
number=204, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dropout_param', full_name='caffe.LayerParameter.dropout_param', index=27,
number=108, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dummy_data_param', full_name='caffe.LayerParameter.dummy_data_param', index=28,
number=109, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eltwise_param', full_name='caffe.LayerParameter.eltwise_param', index=29,
number=110, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='elu_param', full_name='caffe.LayerParameter.elu_param', index=30,
number=140, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='embed_param', full_name='caffe.LayerParameter.embed_param', index=31,
number=137, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exp_param', full_name='caffe.LayerParameter.exp_param', index=32,
number=111, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='flatten_param', full_name='caffe.LayerParameter.flatten_param', index=33,
number=135, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_data_param', full_name='caffe.LayerParameter.hdf5_data_param', index=34,
number=112, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_output_param', full_name='caffe.LayerParameter.hdf5_output_param', index=35,
number=113, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hinge_loss_param', full_name='caffe.LayerParameter.hinge_loss_param', index=36,
number=114, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='image_data_param', full_name='caffe.LayerParameter.image_data_param', index=37,
number=115, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='infogain_loss_param', full_name='caffe.LayerParameter.infogain_loss_param', index=38,
number=116, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inner_product_param', full_name='caffe.LayerParameter.inner_product_param', index=39,
number=117, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_param', full_name='caffe.LayerParameter.input_param', index=40,
number=143, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='log_param', full_name='caffe.LayerParameter.log_param', index=41,
number=134, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lrn_param', full_name='caffe.LayerParameter.lrn_param', index=42,
number=118, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='memory_data_param', full_name='caffe.LayerParameter.memory_data_param', index=43,
number=119, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='multibox_loss_param', full_name='caffe.LayerParameter.multibox_loss_param', index=44,
number=201, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvn_param', full_name='caffe.LayerParameter.mvn_param', index=45,
number=120, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='norm_param', full_name='caffe.LayerParameter.norm_param', index=46,
number=206, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameter_param', full_name='caffe.LayerParameter.parameter_param', index=47,
number=145, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='permute_param', full_name='caffe.LayerParameter.permute_param', index=48,
number=202, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pooling_param', full_name='caffe.LayerParameter.pooling_param', index=49,
number=121, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='power_param', full_name='caffe.LayerParameter.power_param', index=50,
number=122, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prelu_param', full_name='caffe.LayerParameter.prelu_param', index=51,
number=131, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prior_box_param', full_name='caffe.LayerParameter.prior_box_param', index=52,
number=203, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='python_param', full_name='caffe.LayerParameter.python_param', index=53,
number=130, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='recurrent_param', full_name='caffe.LayerParameter.recurrent_param', index=54,
number=146, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reduction_param', full_name='caffe.LayerParameter.reduction_param', index=55,
number=136, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='relu_param', full_name='caffe.LayerParameter.relu_param', index=56,
number=123, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reshape_param', full_name='caffe.LayerParameter.reshape_param', index=57,
number=133, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='roi_pooling_param', full_name='caffe.LayerParameter.roi_pooling_param', index=58,
number=150, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='reverse_param', full_name='caffe.LayerParameter.reverse_param', index=59,
number=147, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale_param', full_name='caffe.LayerParameter.scale_param', index=60,
number=142, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sigmoid_param', full_name='caffe.LayerParameter.sigmoid_param', index=61,
number=124, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='softmax_param', full_name='caffe.LayerParameter.softmax_param', index=62,
number=125, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spp_param', full_name='caffe.LayerParameter.spp_param', index=63,
number=132, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slice_param', full_name='caffe.LayerParameter.slice_param', index=64,
number=126, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tanh_param', full_name='caffe.LayerParameter.tanh_param', index=65,
number=127, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='threshold_param', full_name='caffe.LayerParameter.threshold_param', index=66,
number=128, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tile_param', full_name='caffe.LayerParameter.tile_param', index=67,
number=138, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='video_data_param', full_name='caffe.LayerParameter.video_data_param', index=68,
number=207, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='window_data_param', full_name='caffe.LayerParameter.window_data_param', index=69,
number=129, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='smooth_l1_loss_param', full_name='caffe.LayerParameter.smooth_l1_loss_param', index=70,
number=8266712, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='proposal_param', full_name='caffe.LayerParameter.proposal_param', index=71,
number=8266713, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=4510,
serialized_end=7860,
)
_PROPOSALPARAMETER = _descriptor.Descriptor(
name='ProposalParameter',
full_name='caffe.ProposalParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='feat_stride', full_name='caffe.ProposalParameter.feat_stride', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=16,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='base_size', full_name='caffe.ProposalParameter.base_size', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=16,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='min_size', full_name='caffe.ProposalParameter.min_size', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=16,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ratio', full_name='caffe.ProposalParameter.ratio', index=3,
number=4, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.ProposalParameter.scale', index=4,
number=5, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pre_nms_topn', full_name='caffe.ProposalParameter.pre_nms_topn', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=6000,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='post_nms_topn', full_name='caffe.ProposalParameter.post_nms_topn', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=300,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nms_thresh', full_name='caffe.ProposalParameter.nms_thresh', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.7,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=7863,
serialized_end=8063,
)
_SMOOTHL1LOSSPARAMETER = _descriptor.Descriptor(
name='SmoothL1LossParameter',
full_name='caffe.SmoothL1LossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='sigma', full_name='caffe.SmoothL1LossParameter.sigma', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=8065,
serialized_end=8106,
)
_TRANSFORMATIONPARAMETER = _descriptor.Descriptor(
name='TransformationParameter',
full_name='caffe.TransformationParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.TransformationParameter.scale', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.TransformationParameter.mirror', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_size', full_name='caffe.TransformationParameter.crop_size', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_h', full_name='caffe.TransformationParameter.crop_h', index=3,
number=11, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_w', full_name='caffe.TransformationParameter.crop_w', index=4,
number=12, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_file', full_name='caffe.TransformationParameter.mean_file', index=5,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_value', full_name='caffe.TransformationParameter.mean_value', index=6,
number=5, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_color', full_name='caffe.TransformationParameter.force_color', index=7,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_gray', full_name='caffe.TransformationParameter.force_gray', index=8,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resize_param', full_name='caffe.TransformationParameter.resize_param', index=9,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='noise_param', full_name='caffe.TransformationParameter.noise_param', index=10,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='distort_param', full_name='caffe.TransformationParameter.distort_param', index=11,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='expand_param', full_name='caffe.TransformationParameter.expand_param', index=12,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='emit_constraint', full_name='caffe.TransformationParameter.emit_constraint', index=13,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=8109,
serialized_end=8567,
)
_RESIZEPARAMETER = _descriptor.Descriptor(
name='ResizeParameter',
full_name='caffe.ResizeParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='prob', full_name='caffe.ResizeParameter.prob', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resize_mode', full_name='caffe.ResizeParameter.resize_mode', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.ResizeParameter.height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.ResizeParameter.width', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height_scale', full_name='caffe.ResizeParameter.height_scale', index=4,
number=8, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width_scale', full_name='caffe.ResizeParameter.width_scale', index=5,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_mode', full_name='caffe.ResizeParameter.pad_mode', index=6,
number=5, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_value', full_name='caffe.ResizeParameter.pad_value', index=7,
number=6, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='interp_mode', full_name='caffe.ResizeParameter.interp_mode', index=8,
number=7, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_RESIZEPARAMETER_RESIZE_MODE,
_RESIZEPARAMETER_PAD_MODE,
_RESIZEPARAMETER_INTERP_MODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=8570,
serialized_end=9098,
)
_SALTPEPPERPARAMETER = _descriptor.Descriptor(
name='SaltPepperParameter',
full_name='caffe.SaltPepperParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='fraction', full_name='caffe.SaltPepperParameter.fraction', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='caffe.SaltPepperParameter.value', index=1,
number=2, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=9100,
serialized_end=9157,
)
_NOISEPARAMETER = _descriptor.Descriptor(
name='NoiseParameter',
full_name='caffe.NoiseParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='prob', full_name='caffe.NoiseParameter.prob', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hist_eq', full_name='caffe.NoiseParameter.hist_eq', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inverse', full_name='caffe.NoiseParameter.inverse', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='decolorize', full_name='caffe.NoiseParameter.decolorize', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gauss_blur', full_name='caffe.NoiseParameter.gauss_blur', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='jpeg', full_name='caffe.NoiseParameter.jpeg', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='posterize', full_name='caffe.NoiseParameter.posterize', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='erode', full_name='caffe.NoiseParameter.erode', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saltpepper', full_name='caffe.NoiseParameter.saltpepper', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saltpepper_param', full_name='caffe.NoiseParameter.saltpepper_param', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='clahe', full_name='caffe.NoiseParameter.clahe', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='convert_to_hsv', full_name='caffe.NoiseParameter.convert_to_hsv', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='convert_to_lab', full_name='caffe.NoiseParameter.convert_to_lab', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=9160,
serialized_end=9526,
)
_DISTORTIONPARAMETER = _descriptor.Descriptor(
name='DistortionParameter',
full_name='caffe.DistortionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='brightness_prob', full_name='caffe.DistortionParameter.brightness_prob', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='brightness_delta', full_name='caffe.DistortionParameter.brightness_delta', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrast_prob', full_name='caffe.DistortionParameter.contrast_prob', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrast_lower', full_name='caffe.DistortionParameter.contrast_lower', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrast_upper', full_name='caffe.DistortionParameter.contrast_upper', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hue_prob', full_name='caffe.DistortionParameter.hue_prob', index=5,
number=6, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hue_delta', full_name='caffe.DistortionParameter.hue_delta', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saturation_prob', full_name='caffe.DistortionParameter.saturation_prob', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saturation_lower', full_name='caffe.DistortionParameter.saturation_lower', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='saturation_upper', full_name='caffe.DistortionParameter.saturation_upper', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='random_order_prob', full_name='caffe.DistortionParameter.random_order_prob', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=9529,
serialized_end=9846,
)
_EXPANSIONPARAMETER = _descriptor.Descriptor(
name='ExpansionParameter',
full_name='caffe.ExpansionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='prob', full_name='caffe.ExpansionParameter.prob', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_expand_ratio', full_name='caffe.ExpansionParameter.max_expand_ratio', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=9848,
serialized_end=9914,
)
_LOSSPARAMETER = _descriptor.Descriptor(
name='LossParameter',
full_name='caffe.LossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ignore_label', full_name='caffe.LossParameter.ignore_label', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='normalization', full_name='caffe.LossParameter.normalization', index=1,
number=3, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='normalize', full_name='caffe.LossParameter.normalize', index=2,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_LOSSPARAMETER_NORMALIZATIONMODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=9917,
serialized_end=10111,
)
_ACCURACYPARAMETER = _descriptor.Descriptor(
name='AccuracyParameter',
full_name='caffe.AccuracyParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='top_k', full_name='caffe.AccuracyParameter.top_k', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.AccuracyParameter.axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ignore_label', full_name='caffe.AccuracyParameter.ignore_label', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=10113,
serialized_end=10189,
)
_ANNOTATEDDATAPARAMETER = _descriptor.Descriptor(
name='AnnotatedDataParameter',
full_name='caffe.AnnotatedDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='batch_sampler', full_name='caffe.AnnotatedDataParameter.batch_sampler', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label_map_file', full_name='caffe.AnnotatedDataParameter.label_map_file', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='anno_type', full_name='caffe.AnnotatedDataParameter.anno_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=10192,
serialized_end=10341,
)
_ARGMAXPARAMETER = _descriptor.Descriptor(
name='ArgMaxParameter',
full_name='caffe.ArgMaxParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='out_max_val', full_name='caffe.ArgMaxParameter.out_max_val', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='top_k', full_name='caffe.ArgMaxParameter.top_k', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ArgMaxParameter.axis', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=10343,
serialized_end=10420,
)
_CONCATPARAMETER = _descriptor.Descriptor(
name='ConcatParameter',
full_name='caffe.ConcatParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ConcatParameter.axis', index=0,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concat_dim', full_name='caffe.ConcatParameter.concat_dim', index=1,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=10422,
serialized_end=10479,
)
_BATCHNORMPARAMETER = _descriptor.Descriptor(
name='BatchNormParameter',
full_name='caffe.BatchNormParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='use_global_stats', full_name='caffe.BatchNormParameter.use_global_stats', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='moving_average_fraction', full_name='caffe.BatchNormParameter.moving_average_fraction', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.999,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eps', full_name='caffe.BatchNormParameter.eps', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1e-05,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=10481,
serialized_end=10587,
)
_BIASPARAMETER = _descriptor.Descriptor(
name='BiasParameter',
full_name='caffe.BiasParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.BiasParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_axes', full_name='caffe.BiasParameter.num_axes', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='filler', full_name='caffe.BiasParameter.filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=10589,
serialized_end=10682,
)
_CONTRASTIVELOSSPARAMETER = _descriptor.Descriptor(
name='ContrastiveLossParameter',
full_name='caffe.ContrastiveLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='margin', full_name='caffe.ContrastiveLossParameter.margin', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='legacy_version', full_name='caffe.ContrastiveLossParameter.legacy_version', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=10684,
serialized_end=10760,
)
_CONVOLUTIONPARAMETER = _descriptor.Descriptor(
name='ConvolutionParameter',
full_name='caffe.ConvolutionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.ConvolutionParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_term', full_name='caffe.ConvolutionParameter.bias_term', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad', full_name='caffe.ConvolutionParameter.pad', index=2,
number=3, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_size', full_name='caffe.ConvolutionParameter.kernel_size', index=3,
number=4, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride', full_name='caffe.ConvolutionParameter.stride', index=4,
number=6, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dilation', full_name='caffe.ConvolutionParameter.dilation', index=5,
number=18, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_h', full_name='caffe.ConvolutionParameter.pad_h', index=6,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_w', full_name='caffe.ConvolutionParameter.pad_w', index=7,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_h', full_name='caffe.ConvolutionParameter.kernel_h', index=8,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_w', full_name='caffe.ConvolutionParameter.kernel_w', index=9,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride_h', full_name='caffe.ConvolutionParameter.stride_h', index=10,
number=13, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride_w', full_name='caffe.ConvolutionParameter.stride_w', index=11,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group', full_name='caffe.ConvolutionParameter.group', index=12,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.ConvolutionParameter.weight_filler', index=13,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.ConvolutionParameter.bias_filler', index=14,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.ConvolutionParameter.engine', index=15,
number=15, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ConvolutionParameter.axis', index=16,
number=16, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_nd_im2col', full_name='caffe.ConvolutionParameter.force_nd_im2col', index=17,
number=17, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_CONVOLUTIONPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=10763,
serialized_end=11271,
)
_CROPPARAMETER = _descriptor.Descriptor(
name='CropParameter',
full_name='caffe.CropParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.CropParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='offset', full_name='caffe.CropParameter.offset', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=11273,
serialized_end=11321,
)
_CTCDECODERPARAMETER = _descriptor.Descriptor(
name='CTCDecoderParameter',
full_name='caffe.CTCDecoderParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='blank_index', full_name='caffe.CTCDecoderParameter.blank_index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ctc_merge_repeated', full_name='caffe.CTCDecoderParameter.ctc_merge_repeated', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=11323,
serialized_end=11403,
)
_CTCLOSSPARAMETER = _descriptor.Descriptor(
name='CTCLossParameter',
full_name='caffe.CTCLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='output_delay', full_name='caffe.CTCLossParameter.output_delay', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blank_index', full_name='caffe.CTCLossParameter.blank_index', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='preprocess_collapse_repeated', full_name='caffe.CTCLossParameter.preprocess_collapse_repeated', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ctc_merge_repeated', full_name='caffe.CTCLossParameter.ctc_merge_repeated', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_calculation_t', full_name='caffe.CTCLossParameter.loss_calculation_t', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=11406,
serialized_end=11584,
)
_DATAPARAMETER = _descriptor.Descriptor(
name='DataParameter',
full_name='caffe.DataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.DataParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.DataParameter.batch_size', index=1,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rand_skip', full_name='caffe.DataParameter.rand_skip', index=2,
number=7, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='backend', full_name='caffe.DataParameter.backend', index=3,
number=8, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.DataParameter.scale', index=4,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_file', full_name='caffe.DataParameter.mean_file', index=5,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_size', full_name='caffe.DataParameter.crop_size', index=6,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.DataParameter.mirror', index=7,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='force_encoded_color', full_name='caffe.DataParameter.force_encoded_color', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prefetch', full_name='caffe.DataParameter.prefetch', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=4,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_DATAPARAMETER_DB,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=11587,
serialized_end=11879,
)
_DETECTIONEVALUATEPARAMETER = _descriptor.Descriptor(
name='DetectionEvaluateParameter',
full_name='caffe.DetectionEvaluateParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_classes', full_name='caffe.DetectionEvaluateParameter.num_classes', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='background_label_id', full_name='caffe.DetectionEvaluateParameter.background_label_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='overlap_threshold', full_name='caffe.DetectionEvaluateParameter.overlap_threshold', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='evaluate_difficult_gt', full_name='caffe.DetectionEvaluateParameter.evaluate_difficult_gt', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name_size_file', full_name='caffe.DetectionEvaluateParameter.name_size_file', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resize_param', full_name='caffe.DetectionEvaluateParameter.resize_param', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_polygon', full_name='caffe.DetectionEvaluateParameter.use_polygon', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=11882,
serialized_end=12129,
)
_NONMAXIMUMSUPPRESSIONPARAMETER = _descriptor.Descriptor(
name='NonMaximumSuppressionParameter',
full_name='caffe.NonMaximumSuppressionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nms_threshold', full_name='caffe.NonMaximumSuppressionParameter.nms_threshold', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.3,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='top_k', full_name='caffe.NonMaximumSuppressionParameter.top_k', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eta', full_name='caffe.NonMaximumSuppressionParameter.eta', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=12131,
serialized_end=12222,
)
_SAVEOUTPUTPARAMETER = _descriptor.Descriptor(
name='SaveOutputParameter',
full_name='caffe.SaveOutputParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='output_directory', full_name='caffe.SaveOutputParameter.output_directory', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_name_prefix', full_name='caffe.SaveOutputParameter.output_name_prefix', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='output_format', full_name='caffe.SaveOutputParameter.output_format', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='label_map_file', full_name='caffe.SaveOutputParameter.label_map_file', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name_size_file', full_name='caffe.SaveOutputParameter.name_size_file', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_test_image', full_name='caffe.SaveOutputParameter.num_test_image', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='resize_param', full_name='caffe.SaveOutputParameter.resize_param', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=12225,
serialized_end=12441,
)
_DETECTIONOUTPUTPARAMETER = _descriptor.Descriptor(
name='DetectionOutputParameter',
full_name='caffe.DetectionOutputParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_classes', full_name='caffe.DetectionOutputParameter.num_classes', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='share_location', full_name='caffe.DetectionOutputParameter.share_location', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='background_label_id', full_name='caffe.DetectionOutputParameter.background_label_id', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nms_param', full_name='caffe.DetectionOutputParameter.nms_param', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='save_output_param', full_name='caffe.DetectionOutputParameter.save_output_param', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='code_type', full_name='caffe.DetectionOutputParameter.code_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='variance_encoded_in_target', full_name='caffe.DetectionOutputParameter.variance_encoded_in_target', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='keep_top_k', full_name='caffe.DetectionOutputParameter.keep_top_k', index=7,
number=7, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='confidence_threshold', full_name='caffe.DetectionOutputParameter.confidence_threshold', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='visualize', full_name='caffe.DetectionOutputParameter.visualize', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='visualize_threshold', full_name='caffe.DetectionOutputParameter.visualize_threshold', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='save_file', full_name='caffe.DetectionOutputParameter.save_file', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_polygon', full_name='caffe.DetectionOutputParameter.use_polygon', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=12444,
serialized_end=12926,
)
_DROPOUTPARAMETER = _descriptor.Descriptor(
name='DropoutParameter',
full_name='caffe.DropoutParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dropout_ratio', full_name='caffe.DropoutParameter.dropout_ratio', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale_train', full_name='caffe.DropoutParameter.scale_train', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=12928,
serialized_end=13001,
)
_DUMMYDATAPARAMETER = _descriptor.Descriptor(
name='DummyDataParameter',
full_name='caffe.DummyDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='data_filler', full_name='caffe.DummyDataParameter.data_filler', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.DummyDataParameter.shape', index=1,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num', full_name='caffe.DummyDataParameter.num', index=2,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channels', full_name='caffe.DummyDataParameter.channels', index=3,
number=3, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.DummyDataParameter.height', index=4,
number=4, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.DummyDataParameter.width', index=5,
number=5, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13004,
serialized_end=13164,
)
_ELTWISEPARAMETER = _descriptor.Descriptor(
name='EltwiseParameter',
full_name='caffe.EltwiseParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='operation', full_name='caffe.EltwiseParameter.operation', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='coeff', full_name='caffe.EltwiseParameter.coeff', index=1,
number=2, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stable_prod_grad', full_name='caffe.EltwiseParameter.stable_prod_grad', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_ELTWISEPARAMETER_ELTWISEOP,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13167,
serialized_end=13332,
)
_ELUPARAMETER = _descriptor.Descriptor(
name='ELUParameter',
full_name='caffe.ELUParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='alpha', full_name='caffe.ELUParameter.alpha', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13334,
serialized_end=13366,
)
_EMBEDPARAMETER = _descriptor.Descriptor(
name='EmbedParameter',
full_name='caffe.EmbedParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.EmbedParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='input_dim', full_name='caffe.EmbedParameter.input_dim', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_term', full_name='caffe.EmbedParameter.bias_term', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.EmbedParameter.weight_filler', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.EmbedParameter.bias_filler', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13369,
serialized_end=13541,
)
_EXPPARAMETER = _descriptor.Descriptor(
name='ExpParameter',
full_name='caffe.ExpParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='base', full_name='caffe.ExpParameter.base', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.ExpParameter.scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shift', full_name='caffe.ExpParameter.shift', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13543,
serialized_end=13611,
)
_FLATTENPARAMETER = _descriptor.Descriptor(
name='FlattenParameter',
full_name='caffe.FlattenParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.FlattenParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='end_axis', full_name='caffe.FlattenParameter.end_axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13613,
serialized_end=13670,
)
_HDF5DATAPARAMETER = _descriptor.Descriptor(
name='HDF5DataParameter',
full_name='caffe.HDF5DataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.HDF5DataParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.HDF5DataParameter.batch_size', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shuffle', full_name='caffe.HDF5DataParameter.shuffle', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13672,
serialized_end=13751,
)
_HDF5OUTPUTPARAMETER = _descriptor.Descriptor(
name='HDF5OutputParameter',
full_name='caffe.HDF5OutputParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='file_name', full_name='caffe.HDF5OutputParameter.file_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13753,
serialized_end=13793,
)
_HINGELOSSPARAMETER = _descriptor.Descriptor(
name='HingeLossParameter',
full_name='caffe.HingeLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='norm', full_name='caffe.HingeLossParameter.norm', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_HINGELOSSPARAMETER_NORM,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13795,
serialized_end=13889,
)
_IMAGEDATAPARAMETER = _descriptor.Descriptor(
name='ImageDataParameter',
full_name='caffe.ImageDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.ImageDataParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.ImageDataParameter.batch_size', index=1,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rand_skip', full_name='caffe.ImageDataParameter.rand_skip', index=2,
number=7, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shuffle', full_name='caffe.ImageDataParameter.shuffle', index=3,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_height', full_name='caffe.ImageDataParameter.new_height', index=4,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_width', full_name='caffe.ImageDataParameter.new_width', index=5,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='is_color', full_name='caffe.ImageDataParameter.is_color', index=6,
number=11, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.ImageDataParameter.scale', index=7,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_file', full_name='caffe.ImageDataParameter.mean_file', index=8,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_size', full_name='caffe.ImageDataParameter.crop_size', index=9,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.ImageDataParameter.mirror', index=10,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='root_folder', full_name='caffe.ImageDataParameter.root_folder', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=13892,
serialized_end=14171,
)
_INFOGAINLOSSPARAMETER = _descriptor.Descriptor(
name='InfogainLossParameter',
full_name='caffe.InfogainLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.InfogainLossParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14173,
serialized_end=14212,
)
_INNERPRODUCTPARAMETER = _descriptor.Descriptor(
name='InnerProductParameter',
full_name='caffe.InnerProductParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.InnerProductParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_term', full_name='caffe.InnerProductParameter.bias_term', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.InnerProductParameter.weight_filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.InnerProductParameter.bias_filler', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.InnerProductParameter.axis', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transpose', full_name='caffe.InnerProductParameter.transpose', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14215,
serialized_end=14418,
)
_INPUTPARAMETER = _descriptor.Descriptor(
name='InputParameter',
full_name='caffe.InputParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.InputParameter.shape', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14420,
serialized_end=14469,
)
_LOGPARAMETER = _descriptor.Descriptor(
name='LogParameter',
full_name='caffe.LogParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='base', full_name='caffe.LogParameter.base', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.LogParameter.scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shift', full_name='caffe.LogParameter.shift', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14471,
serialized_end=14539,
)
_LRNPARAMETER = _descriptor.Descriptor(
name='LRNParameter',
full_name='caffe.LRNParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='local_size', full_name='caffe.LRNParameter.local_size', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='alpha', full_name='caffe.LRNParameter.alpha', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='beta', full_name='caffe.LRNParameter.beta', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.75,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='norm_region', full_name='caffe.LRNParameter.norm_region', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='k', full_name='caffe.LRNParameter.k', index=4,
number=5, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.LRNParameter.engine', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_LRNPARAMETER_NORMREGION,
_LRNPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14542,
serialized_end=14854,
)
_MEMORYDATAPARAMETER = _descriptor.Descriptor(
name='MemoryDataParameter',
full_name='caffe.MemoryDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.MemoryDataParameter.batch_size', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channels', full_name='caffe.MemoryDataParameter.channels', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='height', full_name='caffe.MemoryDataParameter.height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='width', full_name='caffe.MemoryDataParameter.width', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14856,
serialized_end=14946,
)
_MULTIBOXLOSSPARAMETER = _descriptor.Descriptor(
name='MultiBoxLossParameter',
full_name='caffe.MultiBoxLossParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='loc_loss_type', full_name='caffe.MultiBoxLossParameter.loc_loss_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='conf_loss_type', full_name='caffe.MultiBoxLossParameter.conf_loss_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loc_weight', full_name='caffe.MultiBoxLossParameter.loc_weight', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_classes', full_name='caffe.MultiBoxLossParameter.num_classes', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='share_location', full_name='caffe.MultiBoxLossParameter.share_location', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='match_type', full_name='caffe.MultiBoxLossParameter.match_type', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='overlap_threshold', full_name='caffe.MultiBoxLossParameter.overlap_threshold', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_prior_for_matching', full_name='caffe.MultiBoxLossParameter.use_prior_for_matching', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='background_label_id', full_name='caffe.MultiBoxLossParameter.background_label_id', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_difficult_gt', full_name='caffe.MultiBoxLossParameter.use_difficult_gt', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='do_neg_mining', full_name='caffe.MultiBoxLossParameter.do_neg_mining', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neg_pos_ratio', full_name='caffe.MultiBoxLossParameter.neg_pos_ratio', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=3,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='neg_overlap', full_name='caffe.MultiBoxLossParameter.neg_overlap', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='code_type', full_name='caffe.MultiBoxLossParameter.code_type', index=13,
number=14, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='encode_variance_in_target', full_name='caffe.MultiBoxLossParameter.encode_variance_in_target', index=14,
number=16, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='map_object_to_agnostic', full_name='caffe.MultiBoxLossParameter.map_object_to_agnostic', index=15,
number=17, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='ignore_cross_boundary_bbox', full_name='caffe.MultiBoxLossParameter.ignore_cross_boundary_bbox', index=16,
number=18, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bp_inside', full_name='caffe.MultiBoxLossParameter.bp_inside', index=17,
number=19, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mining_type', full_name='caffe.MultiBoxLossParameter.mining_type', index=18,
number=20, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nms_param', full_name='caffe.MultiBoxLossParameter.nms_param', index=19,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sample_size', full_name='caffe.MultiBoxLossParameter.sample_size', index=20,
number=22, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=64,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_prior_for_nms', full_name='caffe.MultiBoxLossParameter.use_prior_for_nms', index=21,
number=23, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='use_polygon', full_name='caffe.MultiBoxLossParameter.use_polygon', index=22,
number=24, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE,
_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE,
_MULTIBOXLOSSPARAMETER_MATCHTYPE,
_MULTIBOXLOSSPARAMETER_MININGTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14949,
serialized_end=16104,
)
_MVNPARAMETER = _descriptor.Descriptor(
name='MVNParameter',
full_name='caffe.MVNParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='normalize_variance', full_name='caffe.MVNParameter.normalize_variance', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='across_channels', full_name='caffe.MVNParameter.across_channels', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eps', full_name='caffe.MVNParameter.eps', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1e-09,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=16106,
serialized_end=16206,
)
_NORMALIZEPARAMETER = _descriptor.Descriptor(
name='NormalizeParameter',
full_name='caffe.NormalizeParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='across_spatial', full_name='caffe.NormalizeParameter.across_spatial', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale_filler', full_name='caffe.NormalizeParameter.scale_filler', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channel_shared', full_name='caffe.NormalizeParameter.channel_shared', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eps', full_name='caffe.NormalizeParameter.eps', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1e-10,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=16209,
serialized_end=16355,
)
_PARAMETERPARAMETER = _descriptor.Descriptor(
name='ParameterParameter',
full_name='caffe.ParameterParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.ParameterParameter.shape', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=16357,
serialized_end=16410,
)
_PERMUTEPARAMETER = _descriptor.Descriptor(
name='PermuteParameter',
full_name='caffe.PermuteParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order', full_name='caffe.PermuteParameter.order', index=0,
number=1, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=16412,
serialized_end=16445,
)
_POOLINGPARAMETER = _descriptor.Descriptor(
name='PoolingParameter',
full_name='caffe.PoolingParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pool', full_name='caffe.PoolingParameter.pool', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad', full_name='caffe.PoolingParameter.pad', index=1,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_h', full_name='caffe.PoolingParameter.pad_h', index=2,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad_w', full_name='caffe.PoolingParameter.pad_w', index=3,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_size', full_name='caffe.PoolingParameter.kernel_size', index=4,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_h', full_name='caffe.PoolingParameter.kernel_h', index=5,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernel_w', full_name='caffe.PoolingParameter.kernel_w', index=6,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride', full_name='caffe.PoolingParameter.stride', index=7,
number=3, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride_h', full_name='caffe.PoolingParameter.stride_h', index=8,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride_w', full_name='caffe.PoolingParameter.stride_w', index=9,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.PoolingParameter.engine', index=10,
number=11, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='global_pooling', full_name='caffe.PoolingParameter.global_pooling', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_POOLINGPARAMETER_POOLMETHOD,
_POOLINGPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=16448,
serialized_end=16866,
)
_POWERPARAMETER = _descriptor.Descriptor(
name='PowerParameter',
full_name='caffe.PowerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='power', full_name='caffe.PowerParameter.power', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.PowerParameter.scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shift', full_name='caffe.PowerParameter.shift', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=16868,
serialized_end=16938,
)
_PRIORBOXPARAMETER = _descriptor.Descriptor(
name='PriorBoxParameter',
full_name='caffe.PriorBoxParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='min_size', full_name='caffe.PriorBoxParameter.min_size', index=0,
number=1, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='max_size', full_name='caffe.PriorBoxParameter.max_size', index=1,
number=2, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='aspect_ratio', full_name='caffe.PriorBoxParameter.aspect_ratio', index=2,
number=3, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='flip', full_name='caffe.PriorBoxParameter.flip', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='clip', full_name='caffe.PriorBoxParameter.clip', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='variance', full_name='caffe.PriorBoxParameter.variance', index=5,
number=6, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='img_size', full_name='caffe.PriorBoxParameter.img_size', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='img_h', full_name='caffe.PriorBoxParameter.img_h', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='img_w', full_name='caffe.PriorBoxParameter.img_w', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='step', full_name='caffe.PriorBoxParameter.step', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='step_h', full_name='caffe.PriorBoxParameter.step_h', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='step_w', full_name='caffe.PriorBoxParameter.step_w', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='offset', full_name='caffe.PriorBoxParameter.offset', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='denser_prior_boxes', full_name='caffe.PriorBoxParameter.denser_prior_boxes', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PRIORBOXPARAMETER_CODETYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=16941,
serialized_end=17285,
)
_PYTHONPARAMETER = _descriptor.Descriptor(
name='PythonParameter',
full_name='caffe.PythonParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='module', full_name='caffe.PythonParameter.module', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='layer', full_name='caffe.PythonParameter.layer', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param_str', full_name='caffe.PythonParameter.param_str', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='share_in_parallel', full_name='caffe.PythonParameter.share_in_parallel', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=17287,
serialized_end=17390,
)
_RECURRENTPARAMETER = _descriptor.Descriptor(
name='RecurrentParameter',
full_name='caffe.RecurrentParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.RecurrentParameter.num_output', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.RecurrentParameter.weight_filler', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.RecurrentParameter.bias_filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='debug_info', full_name='caffe.RecurrentParameter.debug_info', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='expose_hidden', full_name='caffe.RecurrentParameter.expose_hidden', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=17393,
serialized_end=17585,
)
_REDUCTIONPARAMETER = _descriptor.Descriptor(
name='ReductionParameter',
full_name='caffe.ReductionParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='operation', full_name='caffe.ReductionParameter.operation', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ReductionParameter.axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='coeff', full_name='caffe.ReductionParameter.coeff', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_REDUCTIONPARAMETER_REDUCTIONOP,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=17588,
serialized_end=17761,
)
_RELUPARAMETER = _descriptor.Descriptor(
name='ReLUParameter',
full_name='caffe.ReLUParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='negative_slope', full_name='caffe.ReLUParameter.negative_slope', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.ReLUParameter.engine', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_RELUPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=17764,
serialized_end=17905,
)
_RESHAPEPARAMETER = _descriptor.Descriptor(
name='ReshapeParameter',
full_name='caffe.ReshapeParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='shape', full_name='caffe.ReshapeParameter.shape', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ReshapeParameter.axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_axes', full_name='caffe.ReshapeParameter.num_axes', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=17907,
serialized_end=17997,
)
_REVERSEPARAMETER = _descriptor.Descriptor(
name='ReverseParameter',
full_name='caffe.ReverseParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ReverseParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=17999,
serialized_end=18034,
)
_ROIPOOLINGPARAMETER = _descriptor.Descriptor(
name='ROIPoolingParameter',
full_name='caffe.ROIPoolingParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pooled_h', full_name='caffe.ROIPoolingParameter.pooled_h', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pooled_w', full_name='caffe.ROIPoolingParameter.pooled_w', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='spatial_scale', full_name='caffe.ROIPoolingParameter.spatial_scale', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18036,
serialized_end=18125,
)
_SCALEPARAMETER = _descriptor.Descriptor(
name='ScaleParameter',
full_name='caffe.ScaleParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.ScaleParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_axes', full_name='caffe.ScaleParameter.num_axes', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='filler', full_name='caffe.ScaleParameter.filler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_term', full_name='caffe.ScaleParameter.bias_term', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.ScaleParameter.bias_filler', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18128,
serialized_end=18293,
)
_SIGMOIDPARAMETER = _descriptor.Descriptor(
name='SigmoidParameter',
full_name='caffe.SigmoidParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.SigmoidParameter.engine', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SIGMOIDPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18295,
serialized_end=18415,
)
_SLICEPARAMETER = _descriptor.Descriptor(
name='SliceParameter',
full_name='caffe.SliceParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.SliceParameter.axis', index=0,
number=3, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slice_point', full_name='caffe.SliceParameter.slice_point', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slice_dim', full_name='caffe.SliceParameter.slice_dim', index=2,
number=1, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18417,
serialized_end=18493,
)
_SOFTMAXPARAMETER = _descriptor.Descriptor(
name='SoftmaxParameter',
full_name='caffe.SoftmaxParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.SoftmaxParameter.engine', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.SoftmaxParameter.axis', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SOFTMAXPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18496,
serialized_end=18633,
)
_TANHPARAMETER = _descriptor.Descriptor(
name='TanHParameter',
full_name='caffe.TanHParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.TanHParameter.engine', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TANHPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18635,
serialized_end=18749,
)
_TILEPARAMETER = _descriptor.Descriptor(
name='TileParameter',
full_name='caffe.TileParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='axis', full_name='caffe.TileParameter.axis', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tiles', full_name='caffe.TileParameter.tiles', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18751,
serialized_end=18798,
)
_THRESHOLDPARAMETER = _descriptor.Descriptor(
name='ThresholdParameter',
full_name='caffe.ThresholdParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='threshold', full_name='caffe.ThresholdParameter.threshold', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18800,
serialized_end=18842,
)
_VIDEODATAPARAMETER = _descriptor.Descriptor(
name='VideoDataParameter',
full_name='caffe.VideoDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='video_type', full_name='caffe.VideoDataParameter.video_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='device_id', full_name='caffe.VideoDataParameter.device_id', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='video_file', full_name='caffe.VideoDataParameter.video_file', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='skip_frames', full_name='caffe.VideoDataParameter.skip_frames', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_VIDEODATAPARAMETER_VIDEOTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=18845,
serialized_end=19032,
)
_WINDOWDATAPARAMETER = _descriptor.Descriptor(
name='WindowDataParameter',
full_name='caffe.WindowDataParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='source', full_name='caffe.WindowDataParameter.source', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.WindowDataParameter.scale', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mean_file', full_name='caffe.WindowDataParameter.mean_file', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batch_size', full_name='caffe.WindowDataParameter.batch_size', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_size', full_name='caffe.WindowDataParameter.crop_size', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.WindowDataParameter.mirror', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fg_threshold', full_name='caffe.WindowDataParameter.fg_threshold', index=6,
number=7, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bg_threshold', full_name='caffe.WindowDataParameter.bg_threshold', index=7,
number=8, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fg_fraction', full_name='caffe.WindowDataParameter.fg_fraction', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.25,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='context_pad', full_name='caffe.WindowDataParameter.context_pad', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='crop_mode', full_name='caffe.WindowDataParameter.crop_mode', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("warp").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cache_images', full_name='caffe.WindowDataParameter.cache_images', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='root_folder', full_name='caffe.WindowDataParameter.root_folder', index=12,
number=13, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=19035,
serialized_end=19356,
)
_SPPPARAMETER = _descriptor.Descriptor(
name='SPPParameter',
full_name='caffe.SPPParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pyramid_height', full_name='caffe.SPPParameter.pyramid_height', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pool', full_name='caffe.SPPParameter.pool', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='engine', full_name='caffe.SPPParameter.engine', index=2,
number=6, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SPPPARAMETER_POOLMETHOD,
_SPPPARAMETER_ENGINE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=19359,
serialized_end=19594,
)
_V1LAYERPARAMETER = _descriptor.Descriptor(
name='V1LayerParameter',
full_name='caffe.V1LayerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bottom', full_name='caffe.V1LayerParameter.bottom', index=0,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='top', full_name='caffe.V1LayerParameter.top', index=1,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name', full_name='caffe.V1LayerParameter.name', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='include', full_name='caffe.V1LayerParameter.include', index=3,
number=32, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exclude', full_name='caffe.V1LayerParameter.exclude', index=4,
number=33, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.V1LayerParameter.type', index=5,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs', full_name='caffe.V1LayerParameter.blobs', index=6,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='param', full_name='caffe.V1LayerParameter.param', index=7,
number=1001, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blob_share_mode', full_name='caffe.V1LayerParameter.blob_share_mode', index=8,
number=1002, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs_lr', full_name='caffe.V1LayerParameter.blobs_lr', index=9,
number=7, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_decay', full_name='caffe.V1LayerParameter.weight_decay', index=10,
number=8, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_weight', full_name='caffe.V1LayerParameter.loss_weight', index=11,
number=35, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='accuracy_param', full_name='caffe.V1LayerParameter.accuracy_param', index=12,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='argmax_param', full_name='caffe.V1LayerParameter.argmax_param', index=13,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concat_param', full_name='caffe.V1LayerParameter.concat_param', index=14,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='contrastive_loss_param', full_name='caffe.V1LayerParameter.contrastive_loss_param', index=15,
number=40, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='convolution_param', full_name='caffe.V1LayerParameter.convolution_param', index=16,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='data_param', full_name='caffe.V1LayerParameter.data_param', index=17,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dropout_param', full_name='caffe.V1LayerParameter.dropout_param', index=18,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dummy_data_param', full_name='caffe.V1LayerParameter.dummy_data_param', index=19,
number=26, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='eltwise_param', full_name='caffe.V1LayerParameter.eltwise_param', index=20,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='exp_param', full_name='caffe.V1LayerParameter.exp_param', index=21,
number=41, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_data_param', full_name='caffe.V1LayerParameter.hdf5_data_param', index=22,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_output_param', full_name='caffe.V1LayerParameter.hdf5_output_param', index=23,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hinge_loss_param', full_name='caffe.V1LayerParameter.hinge_loss_param', index=24,
number=29, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='image_data_param', full_name='caffe.V1LayerParameter.image_data_param', index=25,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='infogain_loss_param', full_name='caffe.V1LayerParameter.infogain_loss_param', index=26,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inner_product_param', full_name='caffe.V1LayerParameter.inner_product_param', index=27,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lrn_param', full_name='caffe.V1LayerParameter.lrn_param', index=28,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='memory_data_param', full_name='caffe.V1LayerParameter.memory_data_param', index=29,
number=22, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mvn_param', full_name='caffe.V1LayerParameter.mvn_param', index=30,
number=34, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pooling_param', full_name='caffe.V1LayerParameter.pooling_param', index=31,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='power_param', full_name='caffe.V1LayerParameter.power_param', index=32,
number=21, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='relu_param', full_name='caffe.V1LayerParameter.relu_param', index=33,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='sigmoid_param', full_name='caffe.V1LayerParameter.sigmoid_param', index=34,
number=38, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='softmax_param', full_name='caffe.V1LayerParameter.softmax_param', index=35,
number=39, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slice_param', full_name='caffe.V1LayerParameter.slice_param', index=36,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tanh_param', full_name='caffe.V1LayerParameter.tanh_param', index=37,
number=37, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='threshold_param', full_name='caffe.V1LayerParameter.threshold_param', index=38,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='window_data_param', full_name='caffe.V1LayerParameter.window_data_param', index=39,
number=20, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='transform_param', full_name='caffe.V1LayerParameter.transform_param', index=40,
number=36, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='loss_param', full_name='caffe.V1LayerParameter.loss_param', index=41,
number=42, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='layer', full_name='caffe.V1LayerParameter.layer', index=42,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_V1LAYERPARAMETER_LAYERTYPE,
_V1LAYERPARAMETER_DIMCHECKMODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=19597,
serialized_end=22125,
)
_V0LAYERPARAMETER = _descriptor.Descriptor(
name='V0LayerParameter',
full_name='caffe.V0LayerParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='caffe.V0LayerParameter.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='type', full_name='caffe.V0LayerParameter.type', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_output', full_name='caffe.V0LayerParameter.num_output', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='biasterm', full_name='caffe.V0LayerParameter.biasterm', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_filler', full_name='caffe.V0LayerParameter.weight_filler', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='bias_filler', full_name='caffe.V0LayerParameter.bias_filler', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pad', full_name='caffe.V0LayerParameter.pad', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='kernelsize', full_name='caffe.V0LayerParameter.kernelsize', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='group', full_name='caffe.V0LayerParameter.group', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stride', full_name='caffe.V0LayerParameter.stride', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pool', full_name='caffe.V0LayerParameter.pool', index=10,
number=11, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dropout_ratio', full_name='caffe.V0LayerParameter.dropout_ratio', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='local_size', full_name='caffe.V0LayerParameter.local_size', index=12,
number=13, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='alpha', full_name='caffe.V0LayerParameter.alpha', index=13,
number=14, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='beta', full_name='caffe.V0LayerParameter.beta', index=14,
number=15, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.75,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='k', full_name='caffe.V0LayerParameter.k', index=15,
number=22, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='source', full_name='caffe.V0LayerParameter.source', index=16,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='scale', full_name='caffe.V0LayerParameter.scale', index=17,
number=17, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='meanfile', full_name='caffe.V0LayerParameter.meanfile', index=18,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='batchsize', full_name='caffe.V0LayerParameter.batchsize', index=19,
number=19, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cropsize', full_name='caffe.V0LayerParameter.cropsize', index=20,
number=20, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mirror', full_name='caffe.V0LayerParameter.mirror', index=21,
number=21, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs', full_name='caffe.V0LayerParameter.blobs', index=22,
number=50, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blobs_lr', full_name='caffe.V0LayerParameter.blobs_lr', index=23,
number=51, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='weight_decay', full_name='caffe.V0LayerParameter.weight_decay', index=24,
number=52, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rand_skip', full_name='caffe.V0LayerParameter.rand_skip', index=25,
number=53, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_fg_threshold', full_name='caffe.V0LayerParameter.det_fg_threshold', index=26,
number=54, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_bg_threshold', full_name='caffe.V0LayerParameter.det_bg_threshold', index=27,
number=55, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_fg_fraction', full_name='caffe.V0LayerParameter.det_fg_fraction', index=28,
number=56, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.25,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_context_pad', full_name='caffe.V0LayerParameter.det_context_pad', index=29,
number=58, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='det_crop_mode', full_name='caffe.V0LayerParameter.det_crop_mode', index=30,
number=59, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("warp").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_num', full_name='caffe.V0LayerParameter.new_num', index=31,
number=60, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_channels', full_name='caffe.V0LayerParameter.new_channels', index=32,
number=61, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_height', full_name='caffe.V0LayerParameter.new_height', index=33,
number=62, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='new_width', full_name='caffe.V0LayerParameter.new_width', index=34,
number=63, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shuffle_images', full_name='caffe.V0LayerParameter.shuffle_images', index=35,
number=64, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='concat_dim', full_name='caffe.V0LayerParameter.concat_dim', index=36,
number=65, type=13, cpp_type=3, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='hdf5_output_param', full_name='caffe.V0LayerParameter.hdf5_output_param', index=37,
number=1001, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_V0LAYERPARAMETER_POOLMETHOD,
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=22128,
serialized_end=23149,
)
_PRELUPARAMETER = _descriptor.Descriptor(
name='PReLUParameter',
full_name='caffe.PReLUParameter',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='filler', full_name='caffe.PReLUParameter.filler', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='channel_shared', full_name='caffe.PReLUParameter.channel_shared', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=23151,
serialized_end=23238,
)
_BLOBPROTO.fields_by_name['shape'].message_type = _BLOBSHAPE
_BLOBPROTOVECTOR.fields_by_name['blobs'].message_type = _BLOBPROTO
_LABELMAP.fields_by_name['item'].message_type = _LABELMAPITEM
_BATCHSAMPLER.fields_by_name['sampler'].message_type = _SAMPLER
_BATCHSAMPLER.fields_by_name['sample_constraint'].message_type = _SAMPLECONSTRAINT
_EMITCONSTRAINT.fields_by_name['emit_type'].enum_type = _EMITCONSTRAINT_EMITTYPE
_EMITCONSTRAINT_EMITTYPE.containing_type = _EMITCONSTRAINT
_ANNOTATION.fields_by_name['bbox'].message_type = _NORMALIZEDBBOX
_ANNOTATION.fields_by_name['rbox'].message_type = _NORMALIZEDRBOX
_ANNOTATION.fields_by_name['polygon'].message_type = _NORMALIZEDPOLYGON
_ANNOTATIONGROUP.fields_by_name['annotation'].message_type = _ANNOTATION
_ANNOTATEDDATUM.fields_by_name['datum'].message_type = _DATUM
_ANNOTATEDDATUM.fields_by_name['type'].enum_type = _ANNOTATEDDATUM_ANNOTATIONTYPE
_ANNOTATEDDATUM.fields_by_name['annotation_group'].message_type = _ANNOTATIONGROUP
_ANNOTATEDDATUM_ANNOTATIONTYPE.containing_type = _ANNOTATEDDATUM
_FILLERPARAMETER.fields_by_name['variance_norm'].enum_type = _FILLERPARAMETER_VARIANCENORM
_FILLERPARAMETER_VARIANCENORM.containing_type = _FILLERPARAMETER
_NETPARAMETER.fields_by_name['input_shape'].message_type = _BLOBSHAPE
_NETPARAMETER.fields_by_name['state'].message_type = _NETSTATE
_NETPARAMETER.fields_by_name['layer'].message_type = _LAYERPARAMETER
_NETPARAMETER.fields_by_name['layers'].message_type = _V1LAYERPARAMETER
_SOLVERPARAMETER.fields_by_name['net_param'].message_type = _NETPARAMETER
_SOLVERPARAMETER.fields_by_name['train_net_param'].message_type = _NETPARAMETER
_SOLVERPARAMETER.fields_by_name['test_net_param'].message_type = _NETPARAMETER
_SOLVERPARAMETER.fields_by_name['train_state'].message_type = _NETSTATE
_SOLVERPARAMETER.fields_by_name['test_state'].message_type = _NETSTATE
_SOLVERPARAMETER.fields_by_name['snapshot_format'].enum_type = _SOLVERPARAMETER_SNAPSHOTFORMAT
_SOLVERPARAMETER.fields_by_name['solver_mode'].enum_type = _SOLVERPARAMETER_SOLVERMODE
_SOLVERPARAMETER.fields_by_name['solver_type'].enum_type = _SOLVERPARAMETER_SOLVERTYPE
_SOLVERPARAMETER_SNAPSHOTFORMAT.containing_type = _SOLVERPARAMETER
_SOLVERPARAMETER_SOLVERMODE.containing_type = _SOLVERPARAMETER
_SOLVERPARAMETER_SOLVERTYPE.containing_type = _SOLVERPARAMETER
_SOLVERSTATE.fields_by_name['history'].message_type = _BLOBPROTO
_NETSTATE.fields_by_name['phase'].enum_type = _PHASE
_NETSTATERULE.fields_by_name['phase'].enum_type = _PHASE
_PARAMSPEC.fields_by_name['share_mode'].enum_type = _PARAMSPEC_DIMCHECKMODE
_PARAMSPEC_DIMCHECKMODE.containing_type = _PARAMSPEC
_LAYERPARAMETER.fields_by_name['phase'].enum_type = _PHASE
_LAYERPARAMETER.fields_by_name['param'].message_type = _PARAMSPEC
_LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO
_LAYERPARAMETER.fields_by_name['include'].message_type = _NETSTATERULE
_LAYERPARAMETER.fields_by_name['exclude'].message_type = _NETSTATERULE
_LAYERPARAMETER.fields_by_name['transform_param'].message_type = _TRANSFORMATIONPARAMETER
_LAYERPARAMETER.fields_by_name['loss_param'].message_type = _LOSSPARAMETER
_LAYERPARAMETER.fields_by_name['accuracy_param'].message_type = _ACCURACYPARAMETER
_LAYERPARAMETER.fields_by_name['annotated_data_param'].message_type = _ANNOTATEDDATAPARAMETER
_LAYERPARAMETER.fields_by_name['argmax_param'].message_type = _ARGMAXPARAMETER
_LAYERPARAMETER.fields_by_name['batch_norm_param'].message_type = _BATCHNORMPARAMETER
_LAYERPARAMETER.fields_by_name['bias_param'].message_type = _BIASPARAMETER
_LAYERPARAMETER.fields_by_name['concat_param'].message_type = _CONCATPARAMETER
_LAYERPARAMETER.fields_by_name['contrastive_loss_param'].message_type = _CONTRASTIVELOSSPARAMETER
_LAYERPARAMETER.fields_by_name['convolution_param'].message_type = _CONVOLUTIONPARAMETER
_LAYERPARAMETER.fields_by_name['crop_param'].message_type = _CROPPARAMETER
_LAYERPARAMETER.fields_by_name['ctc_decoder_param'].message_type = _CTCDECODERPARAMETER
_LAYERPARAMETER.fields_by_name['ctc_loss_param'].message_type = _CTCLOSSPARAMETER
_LAYERPARAMETER.fields_by_name['data_param'].message_type = _DATAPARAMETER
_LAYERPARAMETER.fields_by_name['detection_evaluate_param'].message_type = _DETECTIONEVALUATEPARAMETER
_LAYERPARAMETER.fields_by_name['detection_output_param'].message_type = _DETECTIONOUTPUTPARAMETER
_LAYERPARAMETER.fields_by_name['dropout_param'].message_type = _DROPOUTPARAMETER
_LAYERPARAMETER.fields_by_name['dummy_data_param'].message_type = _DUMMYDATAPARAMETER
_LAYERPARAMETER.fields_by_name['eltwise_param'].message_type = _ELTWISEPARAMETER
_LAYERPARAMETER.fields_by_name['elu_param'].message_type = _ELUPARAMETER
_LAYERPARAMETER.fields_by_name['embed_param'].message_type = _EMBEDPARAMETER
_LAYERPARAMETER.fields_by_name['exp_param'].message_type = _EXPPARAMETER
_LAYERPARAMETER.fields_by_name['flatten_param'].message_type = _FLATTENPARAMETER
_LAYERPARAMETER.fields_by_name['hdf5_data_param'].message_type = _HDF5DATAPARAMETER
_LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER
_LAYERPARAMETER.fields_by_name['hinge_loss_param'].message_type = _HINGELOSSPARAMETER
_LAYERPARAMETER.fields_by_name['image_data_param'].message_type = _IMAGEDATAPARAMETER
_LAYERPARAMETER.fields_by_name['infogain_loss_param'].message_type = _INFOGAINLOSSPARAMETER
_LAYERPARAMETER.fields_by_name['inner_product_param'].message_type = _INNERPRODUCTPARAMETER
_LAYERPARAMETER.fields_by_name['input_param'].message_type = _INPUTPARAMETER
_LAYERPARAMETER.fields_by_name['log_param'].message_type = _LOGPARAMETER
_LAYERPARAMETER.fields_by_name['lrn_param'].message_type = _LRNPARAMETER
_LAYERPARAMETER.fields_by_name['memory_data_param'].message_type = _MEMORYDATAPARAMETER
_LAYERPARAMETER.fields_by_name['multibox_loss_param'].message_type = _MULTIBOXLOSSPARAMETER
_LAYERPARAMETER.fields_by_name['mvn_param'].message_type = _MVNPARAMETER
_LAYERPARAMETER.fields_by_name['norm_param'].message_type = _NORMALIZEPARAMETER
_LAYERPARAMETER.fields_by_name['parameter_param'].message_type = _PARAMETERPARAMETER
_LAYERPARAMETER.fields_by_name['permute_param'].message_type = _PERMUTEPARAMETER
_LAYERPARAMETER.fields_by_name['pooling_param'].message_type = _POOLINGPARAMETER
_LAYERPARAMETER.fields_by_name['power_param'].message_type = _POWERPARAMETER
_LAYERPARAMETER.fields_by_name['prelu_param'].message_type = _PRELUPARAMETER
_LAYERPARAMETER.fields_by_name['prior_box_param'].message_type = _PRIORBOXPARAMETER
_LAYERPARAMETER.fields_by_name['python_param'].message_type = _PYTHONPARAMETER
_LAYERPARAMETER.fields_by_name['recurrent_param'].message_type = _RECURRENTPARAMETER
_LAYERPARAMETER.fields_by_name['reduction_param'].message_type = _REDUCTIONPARAMETER
_LAYERPARAMETER.fields_by_name['relu_param'].message_type = _RELUPARAMETER
_LAYERPARAMETER.fields_by_name['reshape_param'].message_type = _RESHAPEPARAMETER
_LAYERPARAMETER.fields_by_name['roi_pooling_param'].message_type = _ROIPOOLINGPARAMETER
_LAYERPARAMETER.fields_by_name['reverse_param'].message_type = _REVERSEPARAMETER
_LAYERPARAMETER.fields_by_name['scale_param'].message_type = _SCALEPARAMETER
_LAYERPARAMETER.fields_by_name['sigmoid_param'].message_type = _SIGMOIDPARAMETER
_LAYERPARAMETER.fields_by_name['softmax_param'].message_type = _SOFTMAXPARAMETER
_LAYERPARAMETER.fields_by_name['spp_param'].message_type = _SPPPARAMETER
_LAYERPARAMETER.fields_by_name['slice_param'].message_type = _SLICEPARAMETER
_LAYERPARAMETER.fields_by_name['tanh_param'].message_type = _TANHPARAMETER
_LAYERPARAMETER.fields_by_name['threshold_param'].message_type = _THRESHOLDPARAMETER
_LAYERPARAMETER.fields_by_name['tile_param'].message_type = _TILEPARAMETER
_LAYERPARAMETER.fields_by_name['video_data_param'].message_type = _VIDEODATAPARAMETER
_LAYERPARAMETER.fields_by_name['window_data_param'].message_type = _WINDOWDATAPARAMETER
_LAYERPARAMETER.fields_by_name['smooth_l1_loss_param'].message_type = _SMOOTHL1LOSSPARAMETER
_LAYERPARAMETER.fields_by_name['proposal_param'].message_type = _PROPOSALPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['noise_param'].message_type = _NOISEPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['distort_param'].message_type = _DISTORTIONPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['expand_param'].message_type = _EXPANSIONPARAMETER
_TRANSFORMATIONPARAMETER.fields_by_name['emit_constraint'].message_type = _EMITCONSTRAINT
_RESIZEPARAMETER.fields_by_name['resize_mode'].enum_type = _RESIZEPARAMETER_RESIZE_MODE
_RESIZEPARAMETER.fields_by_name['pad_mode'].enum_type = _RESIZEPARAMETER_PAD_MODE
_RESIZEPARAMETER.fields_by_name['interp_mode'].enum_type = _RESIZEPARAMETER_INTERP_MODE
_RESIZEPARAMETER_RESIZE_MODE.containing_type = _RESIZEPARAMETER
_RESIZEPARAMETER_PAD_MODE.containing_type = _RESIZEPARAMETER
_RESIZEPARAMETER_INTERP_MODE.containing_type = _RESIZEPARAMETER
_NOISEPARAMETER.fields_by_name['saltpepper_param'].message_type = _SALTPEPPERPARAMETER
_LOSSPARAMETER.fields_by_name['normalization'].enum_type = _LOSSPARAMETER_NORMALIZATIONMODE
_LOSSPARAMETER_NORMALIZATIONMODE.containing_type = _LOSSPARAMETER
_ANNOTATEDDATAPARAMETER.fields_by_name['batch_sampler'].message_type = _BATCHSAMPLER
_ANNOTATEDDATAPARAMETER.fields_by_name['anno_type'].enum_type = _ANNOTATEDDATUM_ANNOTATIONTYPE
_BIASPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER
_CONVOLUTIONPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_CONVOLUTIONPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_CONVOLUTIONPARAMETER.fields_by_name['engine'].enum_type = _CONVOLUTIONPARAMETER_ENGINE
_CONVOLUTIONPARAMETER_ENGINE.containing_type = _CONVOLUTIONPARAMETER
_DATAPARAMETER.fields_by_name['backend'].enum_type = _DATAPARAMETER_DB
_DATAPARAMETER_DB.containing_type = _DATAPARAMETER
_DETECTIONEVALUATEPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER
_SAVEOUTPUTPARAMETER.fields_by_name['resize_param'].message_type = _RESIZEPARAMETER
_DETECTIONOUTPUTPARAMETER.fields_by_name['nms_param'].message_type = _NONMAXIMUMSUPPRESSIONPARAMETER
_DETECTIONOUTPUTPARAMETER.fields_by_name['save_output_param'].message_type = _SAVEOUTPUTPARAMETER
_DETECTIONOUTPUTPARAMETER.fields_by_name['code_type'].enum_type = _PRIORBOXPARAMETER_CODETYPE
_DUMMYDATAPARAMETER.fields_by_name['data_filler'].message_type = _FILLERPARAMETER
_DUMMYDATAPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE
_ELTWISEPARAMETER.fields_by_name['operation'].enum_type = _ELTWISEPARAMETER_ELTWISEOP
_ELTWISEPARAMETER_ELTWISEOP.containing_type = _ELTWISEPARAMETER
_EMBEDPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_EMBEDPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_HINGELOSSPARAMETER.fields_by_name['norm'].enum_type = _HINGELOSSPARAMETER_NORM
_HINGELOSSPARAMETER_NORM.containing_type = _HINGELOSSPARAMETER
_INNERPRODUCTPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_INNERPRODUCTPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_INPUTPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE
_LRNPARAMETER.fields_by_name['norm_region'].enum_type = _LRNPARAMETER_NORMREGION
_LRNPARAMETER.fields_by_name['engine'].enum_type = _LRNPARAMETER_ENGINE
_LRNPARAMETER_NORMREGION.containing_type = _LRNPARAMETER
_LRNPARAMETER_ENGINE.containing_type = _LRNPARAMETER
_MULTIBOXLOSSPARAMETER.fields_by_name['loc_loss_type'].enum_type = _MULTIBOXLOSSPARAMETER_LOCLOSSTYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['conf_loss_type'].enum_type = _MULTIBOXLOSSPARAMETER_CONFLOSSTYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['match_type'].enum_type = _MULTIBOXLOSSPARAMETER_MATCHTYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['code_type'].enum_type = _PRIORBOXPARAMETER_CODETYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['mining_type'].enum_type = _MULTIBOXLOSSPARAMETER_MININGTYPE
_MULTIBOXLOSSPARAMETER.fields_by_name['nms_param'].message_type = _NONMAXIMUMSUPPRESSIONPARAMETER
_MULTIBOXLOSSPARAMETER_LOCLOSSTYPE.containing_type = _MULTIBOXLOSSPARAMETER
_MULTIBOXLOSSPARAMETER_CONFLOSSTYPE.containing_type = _MULTIBOXLOSSPARAMETER
_MULTIBOXLOSSPARAMETER_MATCHTYPE.containing_type = _MULTIBOXLOSSPARAMETER
_MULTIBOXLOSSPARAMETER_MININGTYPE.containing_type = _MULTIBOXLOSSPARAMETER
_NORMALIZEPARAMETER.fields_by_name['scale_filler'].message_type = _FILLERPARAMETER
_PARAMETERPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE
_POOLINGPARAMETER.fields_by_name['pool'].enum_type = _POOLINGPARAMETER_POOLMETHOD
_POOLINGPARAMETER.fields_by_name['engine'].enum_type = _POOLINGPARAMETER_ENGINE
_POOLINGPARAMETER_POOLMETHOD.containing_type = _POOLINGPARAMETER
_POOLINGPARAMETER_ENGINE.containing_type = _POOLINGPARAMETER
_PRIORBOXPARAMETER_CODETYPE.containing_type = _PRIORBOXPARAMETER
_RECURRENTPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_RECURRENTPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_REDUCTIONPARAMETER.fields_by_name['operation'].enum_type = _REDUCTIONPARAMETER_REDUCTIONOP
_REDUCTIONPARAMETER_REDUCTIONOP.containing_type = _REDUCTIONPARAMETER
_RELUPARAMETER.fields_by_name['engine'].enum_type = _RELUPARAMETER_ENGINE
_RELUPARAMETER_ENGINE.containing_type = _RELUPARAMETER
_RESHAPEPARAMETER.fields_by_name['shape'].message_type = _BLOBSHAPE
_SCALEPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER
_SCALEPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_SIGMOIDPARAMETER.fields_by_name['engine'].enum_type = _SIGMOIDPARAMETER_ENGINE
_SIGMOIDPARAMETER_ENGINE.containing_type = _SIGMOIDPARAMETER
_SOFTMAXPARAMETER.fields_by_name['engine'].enum_type = _SOFTMAXPARAMETER_ENGINE
_SOFTMAXPARAMETER_ENGINE.containing_type = _SOFTMAXPARAMETER
_TANHPARAMETER.fields_by_name['engine'].enum_type = _TANHPARAMETER_ENGINE
_TANHPARAMETER_ENGINE.containing_type = _TANHPARAMETER
_VIDEODATAPARAMETER.fields_by_name['video_type'].enum_type = _VIDEODATAPARAMETER_VIDEOTYPE
_VIDEODATAPARAMETER_VIDEOTYPE.containing_type = _VIDEODATAPARAMETER
_SPPPARAMETER.fields_by_name['pool'].enum_type = _SPPPARAMETER_POOLMETHOD
_SPPPARAMETER.fields_by_name['engine'].enum_type = _SPPPARAMETER_ENGINE
_SPPPARAMETER_POOLMETHOD.containing_type = _SPPPARAMETER
_SPPPARAMETER_ENGINE.containing_type = _SPPPARAMETER
_V1LAYERPARAMETER.fields_by_name['include'].message_type = _NETSTATERULE
_V1LAYERPARAMETER.fields_by_name['exclude'].message_type = _NETSTATERULE
_V1LAYERPARAMETER.fields_by_name['type'].enum_type = _V1LAYERPARAMETER_LAYERTYPE
_V1LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO
_V1LAYERPARAMETER.fields_by_name['blob_share_mode'].enum_type = _V1LAYERPARAMETER_DIMCHECKMODE
_V1LAYERPARAMETER.fields_by_name['accuracy_param'].message_type = _ACCURACYPARAMETER
_V1LAYERPARAMETER.fields_by_name['argmax_param'].message_type = _ARGMAXPARAMETER
_V1LAYERPARAMETER.fields_by_name['concat_param'].message_type = _CONCATPARAMETER
_V1LAYERPARAMETER.fields_by_name['contrastive_loss_param'].message_type = _CONTRASTIVELOSSPARAMETER
_V1LAYERPARAMETER.fields_by_name['convolution_param'].message_type = _CONVOLUTIONPARAMETER
_V1LAYERPARAMETER.fields_by_name['data_param'].message_type = _DATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['dropout_param'].message_type = _DROPOUTPARAMETER
_V1LAYERPARAMETER.fields_by_name['dummy_data_param'].message_type = _DUMMYDATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['eltwise_param'].message_type = _ELTWISEPARAMETER
_V1LAYERPARAMETER.fields_by_name['exp_param'].message_type = _EXPPARAMETER
_V1LAYERPARAMETER.fields_by_name['hdf5_data_param'].message_type = _HDF5DATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER
_V1LAYERPARAMETER.fields_by_name['hinge_loss_param'].message_type = _HINGELOSSPARAMETER
_V1LAYERPARAMETER.fields_by_name['image_data_param'].message_type = _IMAGEDATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['infogain_loss_param'].message_type = _INFOGAINLOSSPARAMETER
_V1LAYERPARAMETER.fields_by_name['inner_product_param'].message_type = _INNERPRODUCTPARAMETER
_V1LAYERPARAMETER.fields_by_name['lrn_param'].message_type = _LRNPARAMETER
_V1LAYERPARAMETER.fields_by_name['memory_data_param'].message_type = _MEMORYDATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['mvn_param'].message_type = _MVNPARAMETER
_V1LAYERPARAMETER.fields_by_name['pooling_param'].message_type = _POOLINGPARAMETER
_V1LAYERPARAMETER.fields_by_name['power_param'].message_type = _POWERPARAMETER
_V1LAYERPARAMETER.fields_by_name['relu_param'].message_type = _RELUPARAMETER
_V1LAYERPARAMETER.fields_by_name['sigmoid_param'].message_type = _SIGMOIDPARAMETER
_V1LAYERPARAMETER.fields_by_name['softmax_param'].message_type = _SOFTMAXPARAMETER
_V1LAYERPARAMETER.fields_by_name['slice_param'].message_type = _SLICEPARAMETER
_V1LAYERPARAMETER.fields_by_name['tanh_param'].message_type = _TANHPARAMETER
_V1LAYERPARAMETER.fields_by_name['threshold_param'].message_type = _THRESHOLDPARAMETER
_V1LAYERPARAMETER.fields_by_name['window_data_param'].message_type = _WINDOWDATAPARAMETER
_V1LAYERPARAMETER.fields_by_name['transform_param'].message_type = _TRANSFORMATIONPARAMETER
_V1LAYERPARAMETER.fields_by_name['loss_param'].message_type = _LOSSPARAMETER
_V1LAYERPARAMETER.fields_by_name['layer'].message_type = _V0LAYERPARAMETER
_V1LAYERPARAMETER_LAYERTYPE.containing_type = _V1LAYERPARAMETER
_V1LAYERPARAMETER_DIMCHECKMODE.containing_type = _V1LAYERPARAMETER
_V0LAYERPARAMETER.fields_by_name['weight_filler'].message_type = _FILLERPARAMETER
_V0LAYERPARAMETER.fields_by_name['bias_filler'].message_type = _FILLERPARAMETER
_V0LAYERPARAMETER.fields_by_name['pool'].enum_type = _V0LAYERPARAMETER_POOLMETHOD
_V0LAYERPARAMETER.fields_by_name['blobs'].message_type = _BLOBPROTO
_V0LAYERPARAMETER.fields_by_name['hdf5_output_param'].message_type = _HDF5OUTPUTPARAMETER
_V0LAYERPARAMETER_POOLMETHOD.containing_type = _V0LAYERPARAMETER
_PRELUPARAMETER.fields_by_name['filler'].message_type = _FILLERPARAMETER
DESCRIPTOR.message_types_by_name['BlobShape'] = _BLOBSHAPE
DESCRIPTOR.message_types_by_name['BlobProto'] = _BLOBPROTO
DESCRIPTOR.message_types_by_name['BlobProtoVector'] = _BLOBPROTOVECTOR
DESCRIPTOR.message_types_by_name['Datum'] = _DATUM
DESCRIPTOR.message_types_by_name['LabelMapItem'] = _LABELMAPITEM
DESCRIPTOR.message_types_by_name['LabelMap'] = _LABELMAP
DESCRIPTOR.message_types_by_name['Sampler'] = _SAMPLER
DESCRIPTOR.message_types_by_name['SampleConstraint'] = _SAMPLECONSTRAINT
DESCRIPTOR.message_types_by_name['BatchSampler'] = _BATCHSAMPLER
DESCRIPTOR.message_types_by_name['EmitConstraint'] = _EMITCONSTRAINT
DESCRIPTOR.message_types_by_name['NormalizedBBox'] = _NORMALIZEDBBOX
DESCRIPTOR.message_types_by_name['NormalizedRBox'] = _NORMALIZEDRBOX
DESCRIPTOR.message_types_by_name['NormalizedPolygon'] = _NORMALIZEDPOLYGON
DESCRIPTOR.message_types_by_name['Annotation'] = _ANNOTATION
DESCRIPTOR.message_types_by_name['AnnotationGroup'] = _ANNOTATIONGROUP
DESCRIPTOR.message_types_by_name['AnnotatedDatum'] = _ANNOTATEDDATUM
DESCRIPTOR.message_types_by_name['FillerParameter'] = _FILLERPARAMETER
DESCRIPTOR.message_types_by_name['NetParameter'] = _NETPARAMETER
DESCRIPTOR.message_types_by_name['SolverParameter'] = _SOLVERPARAMETER
DESCRIPTOR.message_types_by_name['SolverState'] = _SOLVERSTATE
DESCRIPTOR.message_types_by_name['NetState'] = _NETSTATE
DESCRIPTOR.message_types_by_name['NetStateRule'] = _NETSTATERULE
DESCRIPTOR.message_types_by_name['ParamSpec'] = _PARAMSPEC
DESCRIPTOR.message_types_by_name['LayerParameter'] = _LAYERPARAMETER
DESCRIPTOR.message_types_by_name['ProposalParameter'] = _PROPOSALPARAMETER
DESCRIPTOR.message_types_by_name['SmoothL1LossParameter'] = _SMOOTHL1LOSSPARAMETER
DESCRIPTOR.message_types_by_name['TransformationParameter'] = _TRANSFORMATIONPARAMETER
DESCRIPTOR.message_types_by_name['ResizeParameter'] = _RESIZEPARAMETER
DESCRIPTOR.message_types_by_name['SaltPepperParameter'] = _SALTPEPPERPARAMETER
DESCRIPTOR.message_types_by_name['NoiseParameter'] = _NOISEPARAMETER
DESCRIPTOR.message_types_by_name['DistortionParameter'] = _DISTORTIONPARAMETER
DESCRIPTOR.message_types_by_name['ExpansionParameter'] = _EXPANSIONPARAMETER
DESCRIPTOR.message_types_by_name['LossParameter'] = _LOSSPARAMETER
DESCRIPTOR.message_types_by_name['AccuracyParameter'] = _ACCURACYPARAMETER
DESCRIPTOR.message_types_by_name['AnnotatedDataParameter'] = _ANNOTATEDDATAPARAMETER
DESCRIPTOR.message_types_by_name['ArgMaxParameter'] = _ARGMAXPARAMETER
DESCRIPTOR.message_types_by_name['ConcatParameter'] = _CONCATPARAMETER
DESCRIPTOR.message_types_by_name['BatchNormParameter'] = _BATCHNORMPARAMETER
DESCRIPTOR.message_types_by_name['BiasParameter'] = _BIASPARAMETER
DESCRIPTOR.message_types_by_name['ContrastiveLossParameter'] = _CONTRASTIVELOSSPARAMETER
DESCRIPTOR.message_types_by_name['ConvolutionParameter'] = _CONVOLUTIONPARAMETER
DESCRIPTOR.message_types_by_name['CropParameter'] = _CROPPARAMETER
DESCRIPTOR.message_types_by_name['CTCDecoderParameter'] = _CTCDECODERPARAMETER
DESCRIPTOR.message_types_by_name['CTCLossParameter'] = _CTCLOSSPARAMETER
DESCRIPTOR.message_types_by_name['DataParameter'] = _DATAPARAMETER
DESCRIPTOR.message_types_by_name['DetectionEvaluateParameter'] = _DETECTIONEVALUATEPARAMETER
DESCRIPTOR.message_types_by_name['NonMaximumSuppressionParameter'] = _NONMAXIMUMSUPPRESSIONPARAMETER
DESCRIPTOR.message_types_by_name['SaveOutputParameter'] = _SAVEOUTPUTPARAMETER
DESCRIPTOR.message_types_by_name['DetectionOutputParameter'] = _DETECTIONOUTPUTPARAMETER
DESCRIPTOR.message_types_by_name['DropoutParameter'] = _DROPOUTPARAMETER
DESCRIPTOR.message_types_by_name['DummyDataParameter'] = _DUMMYDATAPARAMETER
DESCRIPTOR.message_types_by_name['EltwiseParameter'] = _ELTWISEPARAMETER
DESCRIPTOR.message_types_by_name['ELUParameter'] = _ELUPARAMETER
DESCRIPTOR.message_types_by_name['EmbedParameter'] = _EMBEDPARAMETER
DESCRIPTOR.message_types_by_name['ExpParameter'] = _EXPPARAMETER
DESCRIPTOR.message_types_by_name['FlattenParameter'] = _FLATTENPARAMETER
DESCRIPTOR.message_types_by_name['HDF5DataParameter'] = _HDF5DATAPARAMETER
DESCRIPTOR.message_types_by_name['HDF5OutputParameter'] = _HDF5OUTPUTPARAMETER
DESCRIPTOR.message_types_by_name['HingeLossParameter'] = _HINGELOSSPARAMETER
DESCRIPTOR.message_types_by_name['ImageDataParameter'] = _IMAGEDATAPARAMETER
DESCRIPTOR.message_types_by_name['InfogainLossParameter'] = _INFOGAINLOSSPARAMETER
DESCRIPTOR.message_types_by_name['InnerProductParameter'] = _INNERPRODUCTPARAMETER
DESCRIPTOR.message_types_by_name['InputParameter'] = _INPUTPARAMETER
DESCRIPTOR.message_types_by_name['LogParameter'] = _LOGPARAMETER
DESCRIPTOR.message_types_by_name['LRNParameter'] = _LRNPARAMETER
DESCRIPTOR.message_types_by_name['MemoryDataParameter'] = _MEMORYDATAPARAMETER
DESCRIPTOR.message_types_by_name['MultiBoxLossParameter'] = _MULTIBOXLOSSPARAMETER
DESCRIPTOR.message_types_by_name['MVNParameter'] = _MVNPARAMETER
DESCRIPTOR.message_types_by_name['NormalizeParameter'] = _NORMALIZEPARAMETER
DESCRIPTOR.message_types_by_name['ParameterParameter'] = _PARAMETERPARAMETER
DESCRIPTOR.message_types_by_name['PermuteParameter'] = _PERMUTEPARAMETER
DESCRIPTOR.message_types_by_name['PoolingParameter'] = _POOLINGPARAMETER
DESCRIPTOR.message_types_by_name['PowerParameter'] = _POWERPARAMETER
DESCRIPTOR.message_types_by_name['PriorBoxParameter'] = _PRIORBOXPARAMETER
DESCRIPTOR.message_types_by_name['PythonParameter'] = _PYTHONPARAMETER
DESCRIPTOR.message_types_by_name['RecurrentParameter'] = _RECURRENTPARAMETER
DESCRIPTOR.message_types_by_name['ReductionParameter'] = _REDUCTIONPARAMETER
DESCRIPTOR.message_types_by_name['ReLUParameter'] = _RELUPARAMETER
DESCRIPTOR.message_types_by_name['ReshapeParameter'] = _RESHAPEPARAMETER
DESCRIPTOR.message_types_by_name['ReverseParameter'] = _REVERSEPARAMETER
DESCRIPTOR.message_types_by_name['ROIPoolingParameter'] = _ROIPOOLINGPARAMETER
DESCRIPTOR.message_types_by_name['ScaleParameter'] = _SCALEPARAMETER
DESCRIPTOR.message_types_by_name['SigmoidParameter'] = _SIGMOIDPARAMETER
DESCRIPTOR.message_types_by_name['SliceParameter'] = _SLICEPARAMETER
DESCRIPTOR.message_types_by_name['SoftmaxParameter'] = _SOFTMAXPARAMETER
DESCRIPTOR.message_types_by_name['TanHParameter'] = _TANHPARAMETER
DESCRIPTOR.message_types_by_name['TileParameter'] = _TILEPARAMETER
DESCRIPTOR.message_types_by_name['ThresholdParameter'] = _THRESHOLDPARAMETER
DESCRIPTOR.message_types_by_name['VideoDataParameter'] = _VIDEODATAPARAMETER
DESCRIPTOR.message_types_by_name['WindowDataParameter'] = _WINDOWDATAPARAMETER
DESCRIPTOR.message_types_by_name['SPPParameter'] = _SPPPARAMETER
DESCRIPTOR.message_types_by_name['V1LayerParameter'] = _V1LAYERPARAMETER
DESCRIPTOR.message_types_by_name['V0LayerParameter'] = _V0LAYERPARAMETER
DESCRIPTOR.message_types_by_name['PReLUParameter'] = _PRELUPARAMETER
DESCRIPTOR.enum_types_by_name['Phase'] = _PHASE
BlobShape = _reflection.GeneratedProtocolMessageType('BlobShape', (_message.Message,), dict(
DESCRIPTOR = _BLOBSHAPE,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobShape)
))
_sym_db.RegisterMessage(BlobShape)
BlobProto = _reflection.GeneratedProtocolMessageType('BlobProto', (_message.Message,), dict(
DESCRIPTOR = _BLOBPROTO,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobProto)
))
_sym_db.RegisterMessage(BlobProto)
BlobProtoVector = _reflection.GeneratedProtocolMessageType('BlobProtoVector', (_message.Message,), dict(
DESCRIPTOR = _BLOBPROTOVECTOR,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BlobProtoVector)
))
_sym_db.RegisterMessage(BlobProtoVector)
Datum = _reflection.GeneratedProtocolMessageType('Datum', (_message.Message,), dict(
DESCRIPTOR = _DATUM,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.Datum)
))
_sym_db.RegisterMessage(Datum)
LabelMapItem = _reflection.GeneratedProtocolMessageType('LabelMapItem', (_message.Message,), dict(
DESCRIPTOR = _LABELMAPITEM,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LabelMapItem)
))
_sym_db.RegisterMessage(LabelMapItem)
LabelMap = _reflection.GeneratedProtocolMessageType('LabelMap', (_message.Message,), dict(
DESCRIPTOR = _LABELMAP,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LabelMap)
))
_sym_db.RegisterMessage(LabelMap)
Sampler = _reflection.GeneratedProtocolMessageType('Sampler', (_message.Message,), dict(
DESCRIPTOR = _SAMPLER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.Sampler)
))
_sym_db.RegisterMessage(Sampler)
SampleConstraint = _reflection.GeneratedProtocolMessageType('SampleConstraint', (_message.Message,), dict(
DESCRIPTOR = _SAMPLECONSTRAINT,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SampleConstraint)
))
_sym_db.RegisterMessage(SampleConstraint)
BatchSampler = _reflection.GeneratedProtocolMessageType('BatchSampler', (_message.Message,), dict(
DESCRIPTOR = _BATCHSAMPLER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BatchSampler)
))
_sym_db.RegisterMessage(BatchSampler)
EmitConstraint = _reflection.GeneratedProtocolMessageType('EmitConstraint', (_message.Message,), dict(
DESCRIPTOR = _EMITCONSTRAINT,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.EmitConstraint)
))
_sym_db.RegisterMessage(EmitConstraint)
NormalizedBBox = _reflection.GeneratedProtocolMessageType('NormalizedBBox', (_message.Message,), dict(
DESCRIPTOR = _NORMALIZEDBBOX,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NormalizedBBox)
))
_sym_db.RegisterMessage(NormalizedBBox)
NormalizedRBox = _reflection.GeneratedProtocolMessageType('NormalizedRBox', (_message.Message,), dict(
DESCRIPTOR = _NORMALIZEDRBOX,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NormalizedRBox)
))
_sym_db.RegisterMessage(NormalizedRBox)
NormalizedPolygon = _reflection.GeneratedProtocolMessageType('NormalizedPolygon', (_message.Message,), dict(
DESCRIPTOR = _NORMALIZEDPOLYGON,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NormalizedPolygon)
))
_sym_db.RegisterMessage(NormalizedPolygon)
Annotation = _reflection.GeneratedProtocolMessageType('Annotation', (_message.Message,), dict(
DESCRIPTOR = _ANNOTATION,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.Annotation)
))
_sym_db.RegisterMessage(Annotation)
AnnotationGroup = _reflection.GeneratedProtocolMessageType('AnnotationGroup', (_message.Message,), dict(
DESCRIPTOR = _ANNOTATIONGROUP,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AnnotationGroup)
))
_sym_db.RegisterMessage(AnnotationGroup)
AnnotatedDatum = _reflection.GeneratedProtocolMessageType('AnnotatedDatum', (_message.Message,), dict(
DESCRIPTOR = _ANNOTATEDDATUM,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AnnotatedDatum)
))
_sym_db.RegisterMessage(AnnotatedDatum)
FillerParameter = _reflection.GeneratedProtocolMessageType('FillerParameter', (_message.Message,), dict(
DESCRIPTOR = _FILLERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.FillerParameter)
))
_sym_db.RegisterMessage(FillerParameter)
NetParameter = _reflection.GeneratedProtocolMessageType('NetParameter', (_message.Message,), dict(
DESCRIPTOR = _NETPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetParameter)
))
_sym_db.RegisterMessage(NetParameter)
SolverParameter = _reflection.GeneratedProtocolMessageType('SolverParameter', (_message.Message,), dict(
DESCRIPTOR = _SOLVERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SolverParameter)
))
_sym_db.RegisterMessage(SolverParameter)
SolverState = _reflection.GeneratedProtocolMessageType('SolverState', (_message.Message,), dict(
DESCRIPTOR = _SOLVERSTATE,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SolverState)
))
_sym_db.RegisterMessage(SolverState)
NetState = _reflection.GeneratedProtocolMessageType('NetState', (_message.Message,), dict(
DESCRIPTOR = _NETSTATE,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetState)
))
_sym_db.RegisterMessage(NetState)
NetStateRule = _reflection.GeneratedProtocolMessageType('NetStateRule', (_message.Message,), dict(
DESCRIPTOR = _NETSTATERULE,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NetStateRule)
))
_sym_db.RegisterMessage(NetStateRule)
ParamSpec = _reflection.GeneratedProtocolMessageType('ParamSpec', (_message.Message,), dict(
DESCRIPTOR = _PARAMSPEC,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ParamSpec)
))
_sym_db.RegisterMessage(ParamSpec)
LayerParameter = _reflection.GeneratedProtocolMessageType('LayerParameter', (_message.Message,), dict(
DESCRIPTOR = _LAYERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LayerParameter)
))
_sym_db.RegisterMessage(LayerParameter)
ProposalParameter = _reflection.GeneratedProtocolMessageType('ProposalParameter', (_message.Message,), dict(
DESCRIPTOR = _PROPOSALPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ProposalParameter)
))
_sym_db.RegisterMessage(ProposalParameter)
SmoothL1LossParameter = _reflection.GeneratedProtocolMessageType('SmoothL1LossParameter', (_message.Message,), dict(
DESCRIPTOR = _SMOOTHL1LOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SmoothL1LossParameter)
))
_sym_db.RegisterMessage(SmoothL1LossParameter)
TransformationParameter = _reflection.GeneratedProtocolMessageType('TransformationParameter', (_message.Message,), dict(
DESCRIPTOR = _TRANSFORMATIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TransformationParameter)
))
_sym_db.RegisterMessage(TransformationParameter)
ResizeParameter = _reflection.GeneratedProtocolMessageType('ResizeParameter', (_message.Message,), dict(
DESCRIPTOR = _RESIZEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ResizeParameter)
))
_sym_db.RegisterMessage(ResizeParameter)
SaltPepperParameter = _reflection.GeneratedProtocolMessageType('SaltPepperParameter', (_message.Message,), dict(
DESCRIPTOR = _SALTPEPPERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SaltPepperParameter)
))
_sym_db.RegisterMessage(SaltPepperParameter)
NoiseParameter = _reflection.GeneratedProtocolMessageType('NoiseParameter', (_message.Message,), dict(
DESCRIPTOR = _NOISEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NoiseParameter)
))
_sym_db.RegisterMessage(NoiseParameter)
DistortionParameter = _reflection.GeneratedProtocolMessageType('DistortionParameter', (_message.Message,), dict(
DESCRIPTOR = _DISTORTIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DistortionParameter)
))
_sym_db.RegisterMessage(DistortionParameter)
ExpansionParameter = _reflection.GeneratedProtocolMessageType('ExpansionParameter', (_message.Message,), dict(
DESCRIPTOR = _EXPANSIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ExpansionParameter)
))
_sym_db.RegisterMessage(ExpansionParameter)
LossParameter = _reflection.GeneratedProtocolMessageType('LossParameter', (_message.Message,), dict(
DESCRIPTOR = _LOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LossParameter)
))
_sym_db.RegisterMessage(LossParameter)
AccuracyParameter = _reflection.GeneratedProtocolMessageType('AccuracyParameter', (_message.Message,), dict(
DESCRIPTOR = _ACCURACYPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AccuracyParameter)
))
_sym_db.RegisterMessage(AccuracyParameter)
AnnotatedDataParameter = _reflection.GeneratedProtocolMessageType('AnnotatedDataParameter', (_message.Message,), dict(
DESCRIPTOR = _ANNOTATEDDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.AnnotatedDataParameter)
))
_sym_db.RegisterMessage(AnnotatedDataParameter)
ArgMaxParameter = _reflection.GeneratedProtocolMessageType('ArgMaxParameter', (_message.Message,), dict(
DESCRIPTOR = _ARGMAXPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ArgMaxParameter)
))
_sym_db.RegisterMessage(ArgMaxParameter)
ConcatParameter = _reflection.GeneratedProtocolMessageType('ConcatParameter', (_message.Message,), dict(
DESCRIPTOR = _CONCATPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ConcatParameter)
))
_sym_db.RegisterMessage(ConcatParameter)
BatchNormParameter = _reflection.GeneratedProtocolMessageType('BatchNormParameter', (_message.Message,), dict(
DESCRIPTOR = _BATCHNORMPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BatchNormParameter)
))
_sym_db.RegisterMessage(BatchNormParameter)
BiasParameter = _reflection.GeneratedProtocolMessageType('BiasParameter', (_message.Message,), dict(
DESCRIPTOR = _BIASPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.BiasParameter)
))
_sym_db.RegisterMessage(BiasParameter)
ContrastiveLossParameter = _reflection.GeneratedProtocolMessageType('ContrastiveLossParameter', (_message.Message,), dict(
DESCRIPTOR = _CONTRASTIVELOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ContrastiveLossParameter)
))
_sym_db.RegisterMessage(ContrastiveLossParameter)
ConvolutionParameter = _reflection.GeneratedProtocolMessageType('ConvolutionParameter', (_message.Message,), dict(
DESCRIPTOR = _CONVOLUTIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ConvolutionParameter)
))
_sym_db.RegisterMessage(ConvolutionParameter)
CropParameter = _reflection.GeneratedProtocolMessageType('CropParameter', (_message.Message,), dict(
DESCRIPTOR = _CROPPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.CropParameter)
))
_sym_db.RegisterMessage(CropParameter)
CTCDecoderParameter = _reflection.GeneratedProtocolMessageType('CTCDecoderParameter', (_message.Message,), dict(
DESCRIPTOR = _CTCDECODERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.CTCDecoderParameter)
))
_sym_db.RegisterMessage(CTCDecoderParameter)
CTCLossParameter = _reflection.GeneratedProtocolMessageType('CTCLossParameter', (_message.Message,), dict(
DESCRIPTOR = _CTCLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.CTCLossParameter)
))
_sym_db.RegisterMessage(CTCLossParameter)
DataParameter = _reflection.GeneratedProtocolMessageType('DataParameter', (_message.Message,), dict(
DESCRIPTOR = _DATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DataParameter)
))
_sym_db.RegisterMessage(DataParameter)
DetectionEvaluateParameter = _reflection.GeneratedProtocolMessageType('DetectionEvaluateParameter', (_message.Message,), dict(
DESCRIPTOR = _DETECTIONEVALUATEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DetectionEvaluateParameter)
))
_sym_db.RegisterMessage(DetectionEvaluateParameter)
NonMaximumSuppressionParameter = _reflection.GeneratedProtocolMessageType('NonMaximumSuppressionParameter', (_message.Message,), dict(
DESCRIPTOR = _NONMAXIMUMSUPPRESSIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NonMaximumSuppressionParameter)
))
_sym_db.RegisterMessage(NonMaximumSuppressionParameter)
SaveOutputParameter = _reflection.GeneratedProtocolMessageType('SaveOutputParameter', (_message.Message,), dict(
DESCRIPTOR = _SAVEOUTPUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SaveOutputParameter)
))
_sym_db.RegisterMessage(SaveOutputParameter)
DetectionOutputParameter = _reflection.GeneratedProtocolMessageType('DetectionOutputParameter', (_message.Message,), dict(
DESCRIPTOR = _DETECTIONOUTPUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DetectionOutputParameter)
))
_sym_db.RegisterMessage(DetectionOutputParameter)
DropoutParameter = _reflection.GeneratedProtocolMessageType('DropoutParameter', (_message.Message,), dict(
DESCRIPTOR = _DROPOUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DropoutParameter)
))
_sym_db.RegisterMessage(DropoutParameter)
DummyDataParameter = _reflection.GeneratedProtocolMessageType('DummyDataParameter', (_message.Message,), dict(
DESCRIPTOR = _DUMMYDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.DummyDataParameter)
))
_sym_db.RegisterMessage(DummyDataParameter)
EltwiseParameter = _reflection.GeneratedProtocolMessageType('EltwiseParameter', (_message.Message,), dict(
DESCRIPTOR = _ELTWISEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.EltwiseParameter)
))
_sym_db.RegisterMessage(EltwiseParameter)
ELUParameter = _reflection.GeneratedProtocolMessageType('ELUParameter', (_message.Message,), dict(
DESCRIPTOR = _ELUPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ELUParameter)
))
_sym_db.RegisterMessage(ELUParameter)
EmbedParameter = _reflection.GeneratedProtocolMessageType('EmbedParameter', (_message.Message,), dict(
DESCRIPTOR = _EMBEDPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.EmbedParameter)
))
_sym_db.RegisterMessage(EmbedParameter)
ExpParameter = _reflection.GeneratedProtocolMessageType('ExpParameter', (_message.Message,), dict(
DESCRIPTOR = _EXPPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ExpParameter)
))
_sym_db.RegisterMessage(ExpParameter)
FlattenParameter = _reflection.GeneratedProtocolMessageType('FlattenParameter', (_message.Message,), dict(
DESCRIPTOR = _FLATTENPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.FlattenParameter)
))
_sym_db.RegisterMessage(FlattenParameter)
HDF5DataParameter = _reflection.GeneratedProtocolMessageType('HDF5DataParameter', (_message.Message,), dict(
DESCRIPTOR = _HDF5DATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HDF5DataParameter)
))
_sym_db.RegisterMessage(HDF5DataParameter)
HDF5OutputParameter = _reflection.GeneratedProtocolMessageType('HDF5OutputParameter', (_message.Message,), dict(
DESCRIPTOR = _HDF5OUTPUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HDF5OutputParameter)
))
_sym_db.RegisterMessage(HDF5OutputParameter)
HingeLossParameter = _reflection.GeneratedProtocolMessageType('HingeLossParameter', (_message.Message,), dict(
DESCRIPTOR = _HINGELOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.HingeLossParameter)
))
_sym_db.RegisterMessage(HingeLossParameter)
ImageDataParameter = _reflection.GeneratedProtocolMessageType('ImageDataParameter', (_message.Message,), dict(
DESCRIPTOR = _IMAGEDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ImageDataParameter)
))
_sym_db.RegisterMessage(ImageDataParameter)
InfogainLossParameter = _reflection.GeneratedProtocolMessageType('InfogainLossParameter', (_message.Message,), dict(
DESCRIPTOR = _INFOGAINLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InfogainLossParameter)
))
_sym_db.RegisterMessage(InfogainLossParameter)
InnerProductParameter = _reflection.GeneratedProtocolMessageType('InnerProductParameter', (_message.Message,), dict(
DESCRIPTOR = _INNERPRODUCTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InnerProductParameter)
))
_sym_db.RegisterMessage(InnerProductParameter)
InputParameter = _reflection.GeneratedProtocolMessageType('InputParameter', (_message.Message,), dict(
DESCRIPTOR = _INPUTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.InputParameter)
))
_sym_db.RegisterMessage(InputParameter)
LogParameter = _reflection.GeneratedProtocolMessageType('LogParameter', (_message.Message,), dict(
DESCRIPTOR = _LOGPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LogParameter)
))
_sym_db.RegisterMessage(LogParameter)
LRNParameter = _reflection.GeneratedProtocolMessageType('LRNParameter', (_message.Message,), dict(
DESCRIPTOR = _LRNPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.LRNParameter)
))
_sym_db.RegisterMessage(LRNParameter)
MemoryDataParameter = _reflection.GeneratedProtocolMessageType('MemoryDataParameter', (_message.Message,), dict(
DESCRIPTOR = _MEMORYDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MemoryDataParameter)
))
_sym_db.RegisterMessage(MemoryDataParameter)
MultiBoxLossParameter = _reflection.GeneratedProtocolMessageType('MultiBoxLossParameter', (_message.Message,), dict(
DESCRIPTOR = _MULTIBOXLOSSPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MultiBoxLossParameter)
))
_sym_db.RegisterMessage(MultiBoxLossParameter)
MVNParameter = _reflection.GeneratedProtocolMessageType('MVNParameter', (_message.Message,), dict(
DESCRIPTOR = _MVNPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.MVNParameter)
))
_sym_db.RegisterMessage(MVNParameter)
NormalizeParameter = _reflection.GeneratedProtocolMessageType('NormalizeParameter', (_message.Message,), dict(
DESCRIPTOR = _NORMALIZEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.NormalizeParameter)
))
_sym_db.RegisterMessage(NormalizeParameter)
ParameterParameter = _reflection.GeneratedProtocolMessageType('ParameterParameter', (_message.Message,), dict(
DESCRIPTOR = _PARAMETERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ParameterParameter)
))
_sym_db.RegisterMessage(ParameterParameter)
PermuteParameter = _reflection.GeneratedProtocolMessageType('PermuteParameter', (_message.Message,), dict(
DESCRIPTOR = _PERMUTEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PermuteParameter)
))
_sym_db.RegisterMessage(PermuteParameter)
PoolingParameter = _reflection.GeneratedProtocolMessageType('PoolingParameter', (_message.Message,), dict(
DESCRIPTOR = _POOLINGPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PoolingParameter)
))
_sym_db.RegisterMessage(PoolingParameter)
PowerParameter = _reflection.GeneratedProtocolMessageType('PowerParameter', (_message.Message,), dict(
DESCRIPTOR = _POWERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PowerParameter)
))
_sym_db.RegisterMessage(PowerParameter)
PriorBoxParameter = _reflection.GeneratedProtocolMessageType('PriorBoxParameter', (_message.Message,), dict(
DESCRIPTOR = _PRIORBOXPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PriorBoxParameter)
))
_sym_db.RegisterMessage(PriorBoxParameter)
PythonParameter = _reflection.GeneratedProtocolMessageType('PythonParameter', (_message.Message,), dict(
DESCRIPTOR = _PYTHONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PythonParameter)
))
_sym_db.RegisterMessage(PythonParameter)
RecurrentParameter = _reflection.GeneratedProtocolMessageType('RecurrentParameter', (_message.Message,), dict(
DESCRIPTOR = _RECURRENTPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.RecurrentParameter)
))
_sym_db.RegisterMessage(RecurrentParameter)
ReductionParameter = _reflection.GeneratedProtocolMessageType('ReductionParameter', (_message.Message,), dict(
DESCRIPTOR = _REDUCTIONPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReductionParameter)
))
_sym_db.RegisterMessage(ReductionParameter)
ReLUParameter = _reflection.GeneratedProtocolMessageType('ReLUParameter', (_message.Message,), dict(
DESCRIPTOR = _RELUPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReLUParameter)
))
_sym_db.RegisterMessage(ReLUParameter)
ReshapeParameter = _reflection.GeneratedProtocolMessageType('ReshapeParameter', (_message.Message,), dict(
DESCRIPTOR = _RESHAPEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReshapeParameter)
))
_sym_db.RegisterMessage(ReshapeParameter)
ReverseParameter = _reflection.GeneratedProtocolMessageType('ReverseParameter', (_message.Message,), dict(
DESCRIPTOR = _REVERSEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ReverseParameter)
))
_sym_db.RegisterMessage(ReverseParameter)
ROIPoolingParameter = _reflection.GeneratedProtocolMessageType('ROIPoolingParameter', (_message.Message,), dict(
DESCRIPTOR = _ROIPOOLINGPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ROIPoolingParameter)
))
_sym_db.RegisterMessage(ROIPoolingParameter)
ScaleParameter = _reflection.GeneratedProtocolMessageType('ScaleParameter', (_message.Message,), dict(
DESCRIPTOR = _SCALEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ScaleParameter)
))
_sym_db.RegisterMessage(ScaleParameter)
SigmoidParameter = _reflection.GeneratedProtocolMessageType('SigmoidParameter', (_message.Message,), dict(
DESCRIPTOR = _SIGMOIDPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SigmoidParameter)
))
_sym_db.RegisterMessage(SigmoidParameter)
SliceParameter = _reflection.GeneratedProtocolMessageType('SliceParameter', (_message.Message,), dict(
DESCRIPTOR = _SLICEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SliceParameter)
))
_sym_db.RegisterMessage(SliceParameter)
SoftmaxParameter = _reflection.GeneratedProtocolMessageType('SoftmaxParameter', (_message.Message,), dict(
DESCRIPTOR = _SOFTMAXPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SoftmaxParameter)
))
_sym_db.RegisterMessage(SoftmaxParameter)
TanHParameter = _reflection.GeneratedProtocolMessageType('TanHParameter', (_message.Message,), dict(
DESCRIPTOR = _TANHPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TanHParameter)
))
_sym_db.RegisterMessage(TanHParameter)
TileParameter = _reflection.GeneratedProtocolMessageType('TileParameter', (_message.Message,), dict(
DESCRIPTOR = _TILEPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.TileParameter)
))
_sym_db.RegisterMessage(TileParameter)
ThresholdParameter = _reflection.GeneratedProtocolMessageType('ThresholdParameter', (_message.Message,), dict(
DESCRIPTOR = _THRESHOLDPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.ThresholdParameter)
))
_sym_db.RegisterMessage(ThresholdParameter)
VideoDataParameter = _reflection.GeneratedProtocolMessageType('VideoDataParameter', (_message.Message,), dict(
DESCRIPTOR = _VIDEODATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.VideoDataParameter)
))
_sym_db.RegisterMessage(VideoDataParameter)
WindowDataParameter = _reflection.GeneratedProtocolMessageType('WindowDataParameter', (_message.Message,), dict(
DESCRIPTOR = _WINDOWDATAPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.WindowDataParameter)
))
_sym_db.RegisterMessage(WindowDataParameter)
SPPParameter = _reflection.GeneratedProtocolMessageType('SPPParameter', (_message.Message,), dict(
DESCRIPTOR = _SPPPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.SPPParameter)
))
_sym_db.RegisterMessage(SPPParameter)
V1LayerParameter = _reflection.GeneratedProtocolMessageType('V1LayerParameter', (_message.Message,), dict(
DESCRIPTOR = _V1LAYERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.V1LayerParameter)
))
_sym_db.RegisterMessage(V1LayerParameter)
V0LayerParameter = _reflection.GeneratedProtocolMessageType('V0LayerParameter', (_message.Message,), dict(
DESCRIPTOR = _V0LAYERPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.V0LayerParameter)
))
_sym_db.RegisterMessage(V0LayerParameter)
PReLUParameter = _reflection.GeneratedProtocolMessageType('PReLUParameter', (_message.Message,), dict(
DESCRIPTOR = _PRELUPARAMETER,
__module__ = 'caffe_pb2'
# @@protoc_insertion_point(class_scope:caffe.PReLUParameter)
))
_sym_db.RegisterMessage(PReLUParameter)
_BLOBSHAPE.fields_by_name['dim'].has_options = True
_BLOBSHAPE.fields_by_name['dim']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
_BLOBPROTO.fields_by_name['data'].has_options = True
_BLOBPROTO.fields_by_name['data']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
_BLOBPROTO.fields_by_name['diff'].has_options = True
_BLOBPROTO.fields_by_name['diff']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
_BLOBPROTO.fields_by_name['double_data'].has_options = True
_BLOBPROTO.fields_by_name['double_data']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
_BLOBPROTO.fields_by_name['double_diff'].has_options = True
_BLOBPROTO.fields_by_name['double_diff']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
ef6c06660d86b62672a536b74a43851456d18b17 | eba25f559725d80eaeb3f3c6d71e3f28880f2716 | /Final Project/starter_code/part_b/partb.py | 53a9d054774d7363641d169e765150950a86954e | [
"MIT"
] | permissive | Catherine1124k/CSC311_Fall2020 | da72f6ce05e6589e1e0abd1b25e805187e197271 | 8ba16154982fe9975431d895e4c3bff537055312 | refs/heads/master | 2023-02-07T13:27:52.180786 | 2020-12-21T20:23:21 | 2020-12-21T20:23:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,135 | py | from utils import *
from part_a.item_response import *
def load_question_meta():
# load question_meta.csv
path = os.path.join("../data", "question_meta.csv")
if not os.path.exists(path):
raise Exception("The specified path {} does not exist.".format(path))
# Initialize the data.
data = {
"question_id": [],
"subject_id": []
}
# Iterate over the row to fill in the data.
with open(path, "r") as csv_file:
reader = csv.reader(csv_file)
for row in reader:
try:
data["question_id"].append(int(row[0]))
s = str(row[1])[1:-1]
subjects = s.split(", ")
subjects = [int(x) - 1 for x in subjects]
data["subject_id"].append(subjects)
except ValueError:
# Pass first row.
pass
except IndexError:
# is_correct might not be available.
pass
return data
def meta_to_matrix(data):
"""
Transform the question_meta data into matrix representation
"""
q = data["question_id"]
sub = data["subject_id"]
result = np.zeros((1774, 387))
for i in range(len(q)):
result[q[i], sub[i]] = 1
return result
def neg_log_likelihood_b(data, subject, theta, beta, gamma):
""" Compute the negative log-likelihood.
:param data: A dictionary {user_id: list, question_id: list,
is_correct: list}
:param subject: Matrix
:param theta: Vector
:param beta: Vector
:param gamma: Vector
:return: float
"""
usr = np.array(data["user_id"])
q = np.array(data["question_id"])
c = np.array(data["is_correct"])
para = theta[usr] - beta[q] - subject[q].dot(gamma)
log_like = np.log(sigmoid(para)) * c + np.log(1 - sigmoid(para)) * (1 - c)
log_lklihood = np.sum(log_like)
return -log_lklihood
def update_b(data, subject, lr, theta, beta, gamma):
""" Update theta and beta using gradient descent.
You are using alternating gradient descent. Your update should look:
for i in iterations ...
theta <- new_theta
beta <- new_beta
You may optionally replace the function arguments to receive a matrix.
:param data: A dictionary {user_id: list, question_id: list,
is_correct: list}
:param subject: Matrix
:param lr: float
:param theta: Vector
:param beta: Vector
:param gamma: Vector
:return: tuple of vectors
"""
usr = np.array(data["user_id"])
q = np.array(data["question_id"])
c = np.array(data["is_correct"])
para = theta[usr] - beta[q] - subject[q].dot(gamma)
values = c - sigmoid(para)
sparse = csc_matrix((values, (usr, q)), shape=(len(theta), len(beta))).toarray()
theta = theta + np.sum(sparse, axis=1) * lr
para = theta[usr] - beta[q] - subject[q].dot(gamma)
values = c - sigmoid(para)
sparse = csc_matrix((values, (usr, q)), shape=(len(theta), len(beta))).toarray()
for i in range(len(gamma)):
grad = np.sum(sparse.dot(subject[:, i]))
gamma[i] = gamma[i] - grad * lr * 0.012
para = theta[usr] - beta[q] - subject[q].dot(gamma)
values = c - sigmoid(para)
sparse = csc_matrix((values, (usr, q)), shape=(len(theta), len(beta))).toarray()
beta = beta - np.sum(sparse, axis=0) * lr
return theta, beta, gamma
def irt_b(data, subject, val_data, lr, iterations):
""" Train IRT model.
You may optionally replace the function arguments to receive a matrix.
:param data: A dictionary {user_id: list, question_id: list,
is_correct: list}
:param subject: Matrix
:param val_data: A dictionary {user_id: list, question_id: list,
is_correct: list}
:param lr: float
:param iterations: int
:return: (theta, beta, gamma, val_acc_lst)
"""
theta = np.zeros(542)
beta = np.zeros(1774)
gamma = np.zeros(387)
val_acc_lst = []
train_loglik = []
valid_loglik = []
for i in range(iterations):
neg_lld = neg_log_likelihood_b(data, subject, theta, beta, gamma)
val_neg_lld = neg_log_likelihood_b(val_data, subject, theta, beta, gamma)
train_loglik.append(-neg_lld)
valid_loglik.append(-val_neg_lld)
score = evaluate_b(val_data, subject, theta, beta, gamma)
val_acc_lst.append(score)
print("NLLK: {} \t Score: {}".format(neg_lld, score))
theta, beta, gamma = update_b(data, subject, lr, theta, beta, gamma)
return theta, beta, gamma, val_acc_lst, train_loglik, valid_loglik
def evaluate_b(data, subject, theta, beta, gamma):
""" Evaluate the model given data and return the accuracy.
:param data: A dictionary {user_id: list, question_id: list,
is_correct: list}
:param subject: Matrix
:param theta: Vector
:param beta: Vector
:param gamma: Vector
:return: float
"""
pred = []
for i, q in enumerate(data["question_id"]):
u = data["user_id"][i]
x = (theta[u] - beta[q] - subject[q].dot(gamma)).sum()
p_a = sigmoid(x)
pred.append(p_a >= 0.5)
return np.sum((data["is_correct"] == np.array(pred))) \
/ len(data["is_correct"])
if __name__ == '__main__':
subject_meta = load_question_meta()
subject_matrix = meta_to_matrix(subject_meta)
train_data = load_train_csv("../data")
val_data = load_valid_csv("../data")
test_data = load_public_test_csv("../data")
print("Train baseline model")
theta, beta, val_acc, train_ll, valid_ll \
= irt(train_data, val_data, 0.01, 70)
print("Train modified model")
theta_b, beta_b, gamma_b, val_acc_b, train_ll_b, valid_ll_b \
= irt_b(train_data, subject_matrix, val_data, 0.01, 70)
validation_accuracy = evaluate(val_data, theta, beta)
test_accuracy = evaluate(test_data, theta, beta)
print("Baseline validation accuracy is: " + str(validation_accuracy))
print("Baseline test accuracy is: " + str(test_accuracy))
validation_accuracy = evaluate_b(val_data, subject_matrix, theta_b, beta_b, gamma_b)
test_accuracy = evaluate_b(test_data, subject_matrix, theta_b, beta_b, gamma_b)
print("Modified validation accuracy is: " + str(validation_accuracy))
print("Modified test accuracy is: " + str(test_accuracy))
plt.plot(train_ll, label="Baseline Training Log-likelihood")
plt.plot(train_ll_b, label="Modified training Log-likelihood")
plt.xlabel("Num of Iteration")
plt.ylabel("Log-likelihood")
plt.title("Log-likelihood Comparison")
plt.legend()
plt.show()
plt.plot(valid_ll, label="Baseline Validation Log-likelihood")
plt.plot(valid_ll_b, label="Modified validation Log-likelihood")
plt.xlabel("Num of Iteration")
plt.ylabel("Log-likelihood")
plt.title("Log-likelihood Comparison")
plt.legend()
plt.show()
plt.plot(val_acc, label="Baseline Validation Accuracy")
plt.plot(val_acc_b, label="Modified Validation Accuracy")
plt.xlabel("Num of Iteration")
plt.ylabel("Validation Accuracy")
plt.title("Accuracy Comparison")
plt.legend()
plt.show()
| [
"[email protected]"
] | |
387d64c44a25a19d0b26c959227376443767d02a | 1159b2200134468b2ea27c720a12b95b59be1bb0 | /lib/parsers2/toml/toml.py | 9af589d0d131d91ad806ce71c1b334aab3a98386 | [] | no_license | YULIYA2001/Parser | a3ac8e418010159810416a5996d1ff9dd05b18e4 | 23d940bc7f46f99a24cfe7af92de4073854c9454 | refs/heads/main | 2023-06-11T18:23:47.850168 | 2021-06-24T08:06:45 | 2021-06-24T08:06:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,000 | py | import re
from .resources import *
def dumps(obj):
def dumps_complex(compl_obj, primary_key=''):
if len(compl_obj) == 0:
return ''
ans = str()
for key, item in compl_obj.items():
obj_type = type(item)
if obj_type is dict:
continue
elif obj_type is list or obj_type is tuple:
ans += key + ' = '
ans += '[' + dumps_simple(item) + ']\n'
elif obj_type is str:
ans += key + ' = '
ans += '\'' + item + '\'' + '\n'
elif obj_type is bool:
ans += key + ' = '
if item:
ans += "true" + '\n'
else:
ans += "false" + '\n'
elif item is None:
ans += key + ' = '
ans += "null" + '\n'
else:
ans += key + ' = '
ans += str(item) + '\n'
for key, item in compl_obj.items():
obj_type = type(item)
if obj_type is dict:
if primary_key != '':
primary_key += '.'
primary_key += key
ans += '\n[' + primary_key + ']\n'
ans += dumps_complex(item, primary_key)
primary_key = primary_key[:primary_key.rfind('.')]
return ans
def dumps_simple(simp_obj):
if len(simp_obj) == 0:
return ""
ans = str()
for item in simp_obj:
obj_type = type(item)
if obj_type == str:
ans += '\'' + item + '\'' + ', '
elif obj_type == dict:
ans += dumps_complex(item) + ', '
elif obj_type == list or type(item) == tuple:
ans += '[' + dumps_simple(item) + ']' + ', '
elif obj_type == bool:
if item:
ans += "true" + ', '
else:
ans += "false" + ', '
elif item is None:
ans += "null" + ', '
else:
ans += str(item) + ', '
ans = ans[0:len(ans) - 2]
return ans
return dumps_complex(obj)
def dump(obj, fp):
s = dumps(obj)
fp.write(s)
def loads(temp_str):
def find_last_index(str_obj, i, key):
temp_counter = i
brackets = 0
temp_key = str()
while temp_counter < len(str_obj):
while str_obj[temp_counter] != '[':
temp_counter += 1
if not (temp_counter < len(str_obj)):
return len(str_obj)
if str_obj[temp_counter - 1] != '\n':
temp_counter += 1
continue
temp_position = temp_counter
while str_obj[temp_counter] != ']':
temp_counter += 1
temp_key = str_obj[temp_position + 1:temp_counter]
if not temp_key.startswith(key):
return temp_position - 1
else:
temp_counter += 1
continue
return len(str_obj)
def loads_obj(str_obj, prev_key=''):
obj = dict()
brackets = 0
quotes = 0
is_key = True
definition = ""
key = ""
i = 0
temp_i = 0
while i < len(str_obj):
if str_obj[i] == ' ':
i += 1
elif str_obj[i] == '=':
i += 1
is_key = False
elif str_obj[i] == '\'':
i += 1
temp_i = i
while str_obj[temp_i] != '\'':
temp_i += 1
if temp_i >= len(str_obj):
raise ValueError()
obj[key] = str_obj[i:temp_i]
i = temp_i + 2
is_key = True
key = ''
definition = ''
elif str_obj[i] == '[':
brackets = 1
i += 1
temp_i = i
while brackets:
if str_obj[temp_i] == '[':
brackets += 1
elif str_obj[temp_i] == ']':
brackets -= 1
temp_i += 1
if temp_i > len(str_obj):
raise ValueError()
obj[key] = loads_arr(str_obj[i:temp_i - 1])
i = temp_i + 1
is_key = True
key = ''
definition = ''
elif str_obj[i] == '\n':
if str_obj[i - 1] != '\n':
if is_key:
raise ValueError()
if definition == 'true':
obj[key] = True
elif definition == 'false':
obj[key] = False
elif definition == 'null':
obj[key] = None
elif re.fullmatch(FLOAT_REGEX, definition):
obj[key] = float(definition)
elif re.fullmatch(INT_REGEX, definition):
obj[key] = int(definition)
else:
raise ValueError()
is_key = True
key = ''
definition = ''
i += 1
else:
i += 2
if i == len(str_obj):
break
if str_obj[i - 1] != '[':
raise KeyError()
while str_obj[i] != ']':
if str_obj[i] == '[':
raise KeyError()
key += str_obj[i]
i += 1
if i > len(str_obj):
raise ValueError()
i += 1
temp_i = find_last_index(str_obj, i, key)
key = key[key.rfind('.') + 1:]
obj[key] = loads_obj(str_obj[i + 1: temp_i], key)
i = temp_i
key = ''
definition = ''
is_key = True
else:
if is_key:
key += str_obj[i]
else:
definition += str_obj[i]
i += 1
return obj
def loads_arr(str_obj):
obj = list()
brackets = 0
definition = ""
i = 0
temp_i = 0
while i < len(str_obj):
if str_obj[i] != ' ':
if str_obj[i] == '[':
brackets = 1
i += 1
temp_i = i
while brackets:
if str_obj[temp_i] == '[':
brackets += 1
elif str_obj[temp_i] == ']':
brackets -= 1
temp_i += 1
if temp_i > len(str_obj):
raise ValueError()
obj.append(loads_arr(str_obj[i:temp_i - 1]))
i = temp_i + 1
definition = ''
elif str_obj[i] == '\'':
i += 1
temp_i = i
while str_obj[temp_i] != '\'':
temp_i += 1
if temp_i > len(str_obj):
raise ValueError()
obj.append(str(str_obj[i:temp_i]))
i = temp_i + 1
elif str_obj[i] == ',':
if re.fullmatch(FLOAT_REGEX, definition):
obj.append(float(definition))
elif definition == 'true':
obj.append(True)
elif definition == 'false':
obj.append(False)
elif definition == 'null':
obj.append(None)
elif re.fullmatch(INT_REGEX, definition):
obj.append(int(definition))
else:
raise ValueError()
definition = ''
i += 1
else:
definition += str_obj[i]
i += 1
else:
i += 1
if definition != '':
if re.fullmatch(FLOAT_REGEX, definition):
obj.append(float(definition))
elif definition == 'true':
obj.append(True)
elif definition == 'false':
obj.append(False)
elif definition == 'null':
obj.append(None)
elif re.fullmatch(INT_REGEX, definition):
obj.append(int(definition))
else:
raise ValueError()
return obj
return loads_obj(temp_str)
def load(fp):
return loads(fp.read())
| [
"[email protected]"
] | |
10697df0fbb303ec76e0a8afc5734c074597401c | 9c2f620c8827f1e1e5e74505dbbbde8563136ac9 | /_tests/modules/test_rabbitmq.py | b57c84910d8345fdd9f7f5802356da1522676b5a | [
"BSD-2-Clause"
] | permissive | nasqueron/operations | af8c1f33edeec15d2fa798a464e54b405a149c12 | f75aaf610ace599ef163821561078a5f474dcda1 | refs/heads/main | 2023-08-05T09:14:55.241997 | 2023-07-24T20:11:33 | 2023-07-24T20:31:17 | 27,307,453 | 18 | 1 | null | 2023-02-05T20:16:11 | 2014-11-29T16:16:08 | SaltStack | UTF-8 | Python | false | false | 558 | py | #!/usr/bin/env python3
from importlib.machinery import SourceFileLoader
import unittest
salt_test_case = SourceFileLoader("salt_test_case", "salt_test_case.py").load_module()
rabbitmq = SourceFileLoader("rabbitmq", "../_modules/rabbitmq.py").load_module()
class Testinstance(unittest.TestCase, salt_test_case.SaltTestCase):
def test_compute_password_hash_with_salt(self):
self.assertEqual(
"kI3GCqW5JLMJa4iX1lo7X4D6XbYqlLgxIs30+P6tENUV2POR",
rabbitmq._compute_password_hash_with_salt(0x908DC60A, "test12"),
)
| [
"[email protected]"
] | |
b906b0551f7988f9c076fd83d07000b804048027 | eaa30db47aa017f5f951ba5ddc5be55a205d1aa5 | /serviceAnalysis/migrations/0002_customerservicehistory_miles.py | 2af8bd0fcaf655e8bc5fd095cc4878988633dc90 | [] | no_license | cyobero/websites | 8cc7d6754dc528d7b154daee7368f7c0da41c589 | 5112e3feca4d4650eb9b22b0fe883daf3f56d1e4 | refs/heads/master | 2021-09-02T12:18:04.177603 | 2018-01-02T14:39:24 | 2018-01-02T14:39:24 | 115,207,609 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-12-23 03:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('serviceAnalysis', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='customerservicehistory',
name='miles',
field=models.IntegerField(blank=True, null=True),
),
]
| [
"[email protected]"
] | |
2d38dcb91332ff3a7c9d232d62866608fb719f06 | ec0b8bfe19b03e9c3bb13d9cfa9bd328fb9ca3f1 | /res/packages/scripts/scripts/client/bwobsolete_helpers/PyGUI/PyGUIBase.py | 834a250a485148a54b8d4bd40344fe93be77ec21 | [] | no_license | webiumsk/WOT-0.9.20.0 | de3d7441c5d442f085c47a89fa58a83f1cd783f2 | 811cb4e1bca271372a1d837a268b6e0e915368bc | refs/heads/master | 2021-01-20T22:11:45.505844 | 2017-08-29T20:11:38 | 2017-08-29T20:11:38 | 101,803,045 | 0 | 1 | null | null | null | null | WINDOWS-1250 | Python | false | false | 5,952 | py | # 2017.08.29 21:44:03 Střední Evropa (letní čas)
# Embedded file name: scripts/client/bwobsolete_helpers/PyGUI/PyGUIBase.py
import BigWorld, GUI
import weakref
from bwdebug import *
from functools import partial
from Listener import Listenable
class PyGUIBase(object, Listenable):
def __init__(self, component = None):
Listenable.__init__(self)
self.component = component
self.eventHandler = None
self._parent = None
self.isActive = False
return
def active(self, state):
if state == self.isActive:
return
if not self.component:
return
self.isActive = state
if state:
if not self._parent:
GUI.addRoot(self.component)
else:
self._parent.addChild(self.component)
self.component.mouseButtonFocus = True
self.component.moveFocus = True
self.component.crossFocus = True
else:
if not self._parent:
GUI.delRoot(self.component)
else:
self._parent.delChild(self.component)
self.component.mouseButtonFocus = False
self.component.moveFocus = False
self.component.crossFocus = False
self.listeners.activated(state)
def _setparent(self, parent):
if self.isActive:
if not self._parent:
GUI.delRoot(self.component)
else:
self._parent.delChild(self.component)
if parent:
self._parent = weakref.proxy(parent)
else:
self._parent = parent
if self.isActive:
if not self._parent:
GUI.addRoot(self.component)
else:
self._parent.addChild(self.component)
def _getparent(self):
return self._parent
parent = property(_getparent, _setparent)
def getWindow(self):
import Window
if isinstance(self, Window.Window):
return self
elif self.component.parent and self.component.parent.script:
return self.component.parent.script.getWindow()
else:
return
return
def toggleActive(self):
self.active(not self.isActive)
def setEventHandler(self, eh):
self.eventHandler = eh
def doLayout(self, parent):
for name, child in self.component.children:
child.script.doLayout(self)
def setToolTipInfo(self, toolTipInfo):
self.toolTipInfo = toolTipInfo
def removeToolTipInfo(self):
if hasattr(self, toolTipInfo):
del self.toolTipInfo
def focus(self, state):
pass
def mouseButtonFocus(self, state):
pass
def handleInputLangChangeEvent(self):
return False
def handleKeyEvent(self, event):
return False
def handleMouseEvent(self, comp, event):
return False
def handleMouseButtonEvent(self, comp, event):
window = self.getWindow()
if window:
window.listeners.windowClicked()
return False
def handleMouseClickEvent(self, component):
return False
def handleMouseEnterEvent(self, comp):
if getattr(self, 'toolTipInfo', None):
import ToolTip
ToolTip.ToolTipManager.instance.setupToolTip(self.component, self.toolTipInfo)
return False
def handleMouseLeaveEvent(self, comp):
return False
def handleAxisEvent(self, event):
return False
def handleDragStartEvent(self, comp):
return False
def handleDragStopEvent(self, comp):
return False
def handleDragEnterEvent(self, comp, dragged):
return False
def handleDragLeaveEvent(self, comp, dragged):
return False
def handleDropEvent(self, comp, dropped):
return False
def handleIMEEvent(self, event):
return False
def onLoad(self, dataSection):
if dataSection.has_key('toolTipInfo'):
import ToolTip
self.toolTipInfo = ToolTip.ToolTipInfo()
self.toolTipInfo.onLoad(dataSection._toolTipInfo)
def onSave(self, dataSection):
if hasattr(self, 'toolTipInfo') and self.toolTipInfo is not None:
toolTipInfoSection = dataSection.createSection('toolTipInfo')
self.toolTipInfo.onSave(toolTipInfoSection)
return
def onBound(self):
for name, child in self.component.children:
if not child.script:
child.script = PyGUIBase(child)
raise isinstance(child.script, PyGUIBase) or AssertionError
self._bindEvents(self.__class__)
def _bindEvents(self, cls):
for name, function in cls.__dict__.iteritems():
if hasattr(function, '_PyGUIEventHandler'):
for componentName, eventName, args, kargs in function._PyGUIEventHandler:
if not callable(function):
raise AssertionError
component = self.component
for name in componentName.split('.'):
component = getattr(component, name, None)
if component is None:
break
component is None and ERROR_MSG("PyGUIEvent: '%s' has no component named '%s'." % (str(self), componentName))
continue
function = getattr(self, function.__name__)
setattr(component.script, eventName, partial(function, *args, **kargs))
for base in cls.__bases__:
self._bindEvents(base)
return
# okay decompyling c:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\client\bwobsolete_helpers\PyGUI\PyGUIBase.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.08.29 21:44:03 Střední Evropa (letní čas)
| [
"[email protected]"
] | |
4c6f30773778755b28abf02e091617900053df6f | 29cc3c7008d6bd1ca6cd7e9b377ab569a31994cc | /PythonAulas/aula11.py | 9a38db459f8305e273a431c297b32407b1a68f73 | [] | no_license | leclm/CeV-Python | d84a5b4f17c05480c62debca0aaee511ec1c77f4 | ac37b8700d82ba9a61108fb5908e7e79a07565d2 | refs/heads/master | 2021-05-20T08:13:11.406957 | 2020-04-08T13:57:31 | 2020-04-08T13:57:31 | 252,187,554 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,071 | py | print('\033[32mOlá mundo!') # vai ficar com a letra verde
print('\033[36;41mOlá mundo!') # vai ficar com a letra ciano e o fundo vermelho até o fim da linha
print('\033[1;34;43mOlá mundo!\033[m') # vai ficar sublinhado, com a letra azul e o fundo amarelo até o !
print('\033[4;30;46mOlá mundo!\033[m') # vai ficar em negrito, com a letra roxa e o fundo ciano até o !
print('\033[0;33;44mOlá Mundo!\033[m') # vai ficar com a letra amarela e o fundo azul
print('\033[7;33;44mOlá Mundo!\033[m') # vai ficar com a letra azul e o fundo amarelo, inverteu por causa do 7
print('Os valores são \033[33m3\033[m e \033[35m5\033[m!!')
nome = 'Letícia'
print('Muito prazer em te conhecer {}{}{}!!'.format('\033[4;35m', nome, '\033[m'))
name = 'Eduardo'
cores = {'limpa': '\033[m',
'amareloesub': '\033[4;33m',
'azulenegrito': '\033[1;34m'}
print('Muito prazer em te conhecer {}{}{}!'.format(cores['amareloesub'], name, cores['limpa']))
print('Muito prazer em te conhecer {}{}{}!'.format(cores['azulenegrito'], nome, cores['limpa']))
| [
"[email protected]"
] | |
24e24c1bb50cbbd0c3f4af14a06c6dcf353f6fe4 | 425db5a849281d333e68c26a26678e7c8ce11b66 | /LeetCodeSolutions/LeetCode_0252.py | ccf1daf59fe8c44bc1f9575209b20c8851cafb90 | [
"MIT"
] | permissive | lih627/python-algorithm-templates | e8092b327a02506086414df41bbfb2af5d6b06dc | a61fd583e33a769b44ab758990625d3381793768 | refs/heads/master | 2021-07-23T17:10:43.814639 | 2021-01-21T17:14:55 | 2021-01-21T17:14:55 | 238,456,498 | 29 | 8 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | from typing import List
class Solution:
def canAttendMeetings(self, intervals: List[List[int]]) -> bool:
if not intervals:
return True
intervals.sort()
end = -1
for interval in intervals:
if interval[0] < end:
return False
end = max(end, interval[1])
return True
| [
"[email protected]"
] | |
9c006a8120bb60c9b28d807c9abeaa3c113ba668 | b7795d5154005cd12afaac9d099be43a21b2e143 | /tests/ignite/engine/test_deterministic.py | 9e567d1633052d799c86a0ae4fd75af867eb60c0 | [
"BSD-3-Clause"
] | permissive | jonrbates/ignite | 60361646ecbcf8659ced60460fe3ff2cb035ff74 | 15eeb8791a2e0c2f55265e1f6b91f91dc35286c5 | refs/heads/master | 2022-06-16T14:58:25.608252 | 2020-05-10T13:43:58 | 2020-05-10T13:43:58 | 262,943,590 | 0 | 0 | BSD-3-Clause | 2020-05-11T05:04:56 | 2020-05-11T05:04:56 | null | UTF-8 | Python | false | false | 29,013 | py | import os
import random
from unittest.mock import patch
import numpy as np
import pytest
import torch
import torch.nn as nn
from ignite.engine import Events
from ignite.engine.deterministic import (
DeterministicEngine,
ReproducibleBatchSampler,
keep_random_state,
update_dataloader,
)
from ignite.utils import manual_seed
from tests.ignite.engine import BatchChecker, setup_sampler
def test_update_dataloader():
def _test(sampler_type=None):
num_epochs = 3
batch_size = 4
num_iters = 17
data = torch.randint(0, 1000, size=(num_iters * batch_size,))
num_workers = 4
sampler = setup_sampler(sampler_type, num_iters, batch_size)
dataloader = torch.utils.data.DataLoader(
data,
batch_size=batch_size,
num_workers=num_workers,
pin_memory=False,
sampler=sampler,
drop_last=True,
shuffle=sampler is None,
)
torch.manual_seed(12)
seen_batches = []
for i in range(num_epochs):
t = []
if sampler_type == "distributed":
sampler.set_epoch(i)
for b in dataloader:
t.append(b)
seen_batches.append(t)
sampler = setup_sampler(sampler_type, num_iters, batch_size)
dataloader = torch.utils.data.DataLoader(
data,
batch_size=batch_size,
num_workers=num_workers,
pin_memory=False,
sampler=sampler,
drop_last=True,
shuffle=sampler is None,
)
batch_sampler = dataloader.batch_sampler
new_dataloader = update_dataloader(dataloader, ReproducibleBatchSampler(batch_sampler))
torch.manual_seed(12)
new_batches = []
for i in range(num_epochs):
t = []
if sampler_type == "distributed":
sampler.set_epoch(i)
for b in new_dataloader:
t.append(b)
new_batches.append(t)
for i in range(num_epochs):
assert all([(b1 == b2).all() for b1, b2 in zip(seen_batches[i], new_batches[i])])
_test()
_test("weighted")
_test("distributed")
def test_reproducible_batch_sampler_wrong_input():
with pytest.raises(TypeError, match=r"Argument batch_sampler should be torch.utils.data.sampler.BatchSampler"):
ReproducibleBatchSampler("abc")
def test_reproducible_batch_sampler():
import torch
from torch.utils.data import DataLoader
data = list(range(100))
dataloader = DataLoader(data, batch_size=12, num_workers=0, shuffle=True, drop_last=True)
torch.manual_seed(12 + 0)
dataloader_ = update_dataloader(dataloader, ReproducibleBatchSampler(dataloader.batch_sampler))
seen_batches = []
num_epochs = 3
for i in range(num_epochs):
t = []
for b in dataloader_:
t.append(b)
seen_batches.append(t)
torch.manual_seed(12 + i + 1)
for i in range(num_epochs - 1):
for j in range(i + 1, num_epochs):
assert not all([(b1 == b2).all() for b1, b2 in zip(seen_batches[i], seen_batches[j])])
for resume_epoch in range(num_epochs):
torch.manual_seed(12 + resume_epoch)
dataloader_ = update_dataloader(dataloader, ReproducibleBatchSampler(dataloader.batch_sampler))
resumed_seen_batches = []
for b in dataloader_:
resumed_seen_batches.append(b)
assert all([(b1 == b2).all() for b1, b2 in zip(seen_batches[resume_epoch], resumed_seen_batches)])
def _test_keep_random_state(with_numpy):
manual_seed(54)
true_values = []
for _ in range(5):
t = [
torch.tensor([random.random()]),
torch.rand(2),
]
if with_numpy:
t.append(torch.from_numpy(np.random.rand(2)))
true_values.append(t)
@keep_random_state
def user_handler():
manual_seed(22)
_ = [
random.random(),
torch.rand(2),
]
if with_numpy:
_ = np.random.rand(2)
manual_seed(54)
res_values = []
for _ in range(5):
r = [
torch.tensor([random.random()]),
torch.rand(2),
]
if with_numpy:
r.append(torch.from_numpy(np.random.rand(2)))
res_values.append(r)
user_handler()
for a, b in zip(true_values, res_values):
for i, j in zip(a, b):
assert (i == j).all()
def test_keep_random_state():
_test_keep_random_state(with_numpy=True)
def test_keep_random_state_without_numpy():
with patch.dict("sys.modules", {"numpy": None}):
_test_keep_random_state(with_numpy=False)
def test_strict_resume_from_iter():
def _test(epoch_length=None):
max_epochs = 5
num_iters = 21
torch.manual_seed(0)
data = torch.randint(0, 1000, size=(num_iters,))
if epoch_length is None:
epoch_length = num_iters
for resume_iteration in range(2, min(num_iters * max_epochs, epoch_length * max_epochs), 4):
print("\n----", resume_iteration, epoch_length)
batch_checker = BatchChecker(data, init_counter=resume_iteration)
def update_fn(_, batch):
assert batch_checker.check(batch), "{} | {}: {} vs {}".format(
resume_iteration, batch_checker.counter, batch_checker.true_batch, batch
)
engine = DeterministicEngine(update_fn)
@engine.on(Events.EPOCH_COMPLETED)
def check_iteration(_):
assert engine.state.iteration == batch_checker.counter
resume_state_dict = dict(
iteration=resume_iteration, max_epochs=max_epochs, epoch_length=epoch_length, rng_states=None
)
engine.load_state_dict(resume_state_dict)
engine.run(data)
assert engine.state.epoch == max_epochs
assert engine.state.iteration == epoch_length * max_epochs
_test()
_test(60)
_test(15)
def test_strict_resume_from_epoch():
def _test(epoch_length=None):
max_epochs = 10
num_iters = 21
torch.manual_seed(0)
data = torch.randint(0, 1000, size=(num_iters,))
if epoch_length is None:
epoch_length = num_iters
for resume_epoch in range(1, max_epochs):
batch_checker = BatchChecker(data, init_counter=resume_epoch * epoch_length)
def update_fn(_, batch):
assert batch_checker.check(batch), "{} | {}: {} vs {}".format(
resume_epoch, batch_checker.counter, batch_checker.true_batch, batch
)
engine = DeterministicEngine(update_fn)
resume_state_dict = dict(
epoch=resume_epoch, max_epochs=max_epochs, epoch_length=epoch_length, rng_states=None
)
engine.load_state_dict(resume_state_dict)
engine.run(data)
assert engine.state.epoch == max_epochs
assert engine.state.iteration == epoch_length * max_epochs
_test()
_test(60)
_test(15)
def _test_resume_random_dataloader_from_epoch(device, _setup_sampler, sampler_type=None):
def _test(epoch_length=None):
max_epochs = 5
batch_size = 4
num_iters = 21
torch.manual_seed(0)
data = torch.randint(0, 1000, size=(num_iters * batch_size,))
if epoch_length is None:
epoch_length = num_iters
for resume_epoch in range(1, max_epochs):
for num_workers in [0, 4]:
sampler = _setup_sampler(sampler_type, num_iters, batch_size)
orig_dataloader = torch.utils.data.DataLoader(
data,
batch_size=batch_size,
num_workers=num_workers,
pin_memory="cuda" in device,
sampler=sampler,
drop_last=True,
shuffle=sampler is None,
)
seen_batchs = []
def update_fn(_, batch):
batch_to_device = batch.to(device)
seen_batchs.append(batch)
engine = DeterministicEngine(update_fn)
if sampler_type == "distributed":
@engine.on(Events.EPOCH_STARTED)
def _(engine):
sampler.set_epoch(engine.state.epoch - 1)
torch.manual_seed(87)
engine.run(
orig_dataloader, max_epochs=max_epochs, epoch_length=epoch_length,
)
batch_checker = BatchChecker(seen_batchs, init_counter=resume_epoch * epoch_length)
sampler = _setup_sampler(sampler_type, num_iters, batch_size)
resume_dataloader = torch.utils.data.DataLoader(
data,
batch_size=batch_size,
num_workers=num_workers,
pin_memory="cuda" in device,
sampler=sampler,
drop_last=True,
shuffle=sampler is None,
)
def update_fn(_, batch):
batch_to_device = batch.to(device)
assert batch_checker.check(batch), "{} {} | {}: {} vs {}".format(
num_workers, resume_epoch, batch_checker.counter, batch_checker.true_batch, batch
)
engine = DeterministicEngine(update_fn)
if sampler_type == "distributed":
@engine.on(Events.EPOCH_STARTED)
def _(engine):
sampler.set_epoch(engine.state.epoch - 1)
resume_state_dict = dict(
epoch=resume_epoch, max_epochs=max_epochs, epoch_length=epoch_length, rng_states=None
)
engine.load_state_dict(resume_state_dict)
torch.manual_seed(87)
engine.run(resume_dataloader)
assert engine.state.epoch == max_epochs
assert engine.state.iteration == epoch_length * max_epochs
_test()
if sampler_type != "distributed":
_test(60)
_test(15)
def test_resume_random_dataloader_from_epoch():
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler)
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler, sampler_type="weighted")
_test_resume_random_dataloader_from_epoch("cpu", setup_sampler, sampler_type="distributed")
class AugmentedData:
def __init__(self, data, enabled=True):
self.data = data
self.enabled = enabled
def __getitem__(self, i):
dp = self.data[i]
r = torch.randint_like(dp, -100, 100) if self.enabled else 0.0
return dp + r * 0.01
def __len__(self):
return len(self.data)
def _test_resume_random_dataloader_from_iter(device, _setup_sampler, sampler_type=None):
def _test(epoch_length=None):
max_epochs = 3
batch_size = 4
num_iters = 17
torch.manual_seed(0)
data = torch.randint(0, 1000, size=(num_iters * batch_size,))
if epoch_length is None:
epoch_length = num_iters
for resume_iteration in range(2, min(num_iters * max_epochs, epoch_length * max_epochs), 7):
for num_workers in [0, 4]:
sampler = _setup_sampler(sampler_type, num_iters, batch_size)
orig_dataloader = torch.utils.data.DataLoader(
data,
batch_size=batch_size,
num_workers=num_workers,
pin_memory="cuda" in device,
sampler=sampler,
drop_last=True,
shuffle=sampler is None,
)
seen_batchs = []
def update_fn(_, batch):
batch_to_device = batch.to(device)
seen_batchs.append(batch)
engine = DeterministicEngine(update_fn)
if sampler_type == "distributed":
@engine.on(Events.EPOCH_STARTED)
def _(engine):
sampler.set_epoch(engine.state.epoch)
torch.manual_seed(12)
engine.run(
orig_dataloader, max_epochs=max_epochs, epoch_length=epoch_length,
)
batch_checker = BatchChecker(seen_batchs, init_counter=resume_iteration)
sampler = _setup_sampler(sampler_type, num_iters, batch_size)
resume_dataloader = torch.utils.data.DataLoader(
data,
batch_size=batch_size,
num_workers=num_workers,
pin_memory="cuda" in device,
sampler=sampler,
drop_last=True,
shuffle=sampler is None,
)
def update_fn(_, batch):
batch_to_device = batch.to(device)
assert batch_checker.check(batch), "{} {} | {}: {} vs {}".format(
num_workers, resume_iteration, batch_checker.counter, batch_checker.true_batch, batch
)
engine = DeterministicEngine(update_fn)
if sampler_type == "distributed":
@engine.on(Events.EPOCH_STARTED)
def _(engine):
sampler.set_epoch(engine.state.epoch)
resume_state_dict = dict(
iteration=resume_iteration, max_epochs=max_epochs, epoch_length=epoch_length, rng_states=None
)
engine.load_state_dict(resume_state_dict)
torch.manual_seed(12)
engine.run(resume_dataloader)
assert engine.state.epoch == max_epochs
assert engine.state.iteration == epoch_length * max_epochs, "{}, {} | {} vs {}".format(
num_workers, resume_iteration, engine.state.iteration, epoch_length * max_epochs
)
_test()
if sampler_type != "distributed":
_test(40)
_test(11)
def test_resume_random_dataloader_from_iter():
_test_resume_random_dataloader_from_iter("cpu", setup_sampler)
_test_resume_random_dataloader_from_iter("cpu", setup_sampler, sampler_type="weighted")
_test_resume_random_dataloader_from_iter("cpu", setup_sampler, sampler_type="distributed")
def _test_resume_random_data_iterator_from_epoch(device):
def _test(epoch_length=None):
max_epochs = 5
batch_size = 4
num_iters = 21
def infinite_data_iterator():
while True:
for _ in range(num_iters):
data = torch.randint(0, 1000, size=(batch_size,), device=device)
yield data
if epoch_length is None:
epoch_length = num_iters
for resume_epoch in range(1, max_epochs):
seen_batchs = []
def update_fn(_, batch):
# if there is a random op when using data batch etc, we can not resume correctly
# torch.rand(1)
seen_batchs.append(batch)
engine = DeterministicEngine(update_fn)
torch.manual_seed(121)
engine.run(
infinite_data_iterator(), max_epochs=max_epochs, epoch_length=epoch_length,
)
batch_checker = BatchChecker(seen_batchs, init_counter=resume_epoch * epoch_length)
def update_fn(_, batch):
assert batch_checker.check(batch), "{} | {}: {} vs {}".format(
resume_epoch, batch_checker.counter, batch_checker.true_batch, batch
)
engine = DeterministicEngine(update_fn)
resume_state_dict = dict(
epoch=resume_epoch, max_epochs=max_epochs, epoch_length=epoch_length, rng_states=None
)
engine.load_state_dict(resume_state_dict)
torch.manual_seed(121)
engine.run(infinite_data_iterator())
assert engine.state.epoch == max_epochs
assert engine.state.iteration == epoch_length * max_epochs
_test()
_test(60)
_test(15)
def test_resume_random_data_iterator_from_epoch():
_test_resume_random_data_iterator_from_epoch("cpu")
def _test_resume_random_data_iterator_from_iter(device):
def _test(epoch_length=None):
max_epochs = 3
batch_size = 4
num_iters = 17
def infinite_data_iterator():
while True:
for _ in range(num_iters):
data = torch.randint(0, 1000, size=(batch_size,), device=device)
yield data
if epoch_length is None:
epoch_length = num_iters
for resume_iteration in range(1, min(num_iters * max_epochs, epoch_length * max_epochs), 7):
seen_batchs = []
def update_fn(_, batch):
seen_batchs.append(batch)
engine = DeterministicEngine(update_fn)
torch.manual_seed(24)
engine.run(
infinite_data_iterator(), max_epochs=max_epochs, epoch_length=epoch_length,
)
batch_checker = BatchChecker(seen_batchs, init_counter=resume_iteration)
def update_fn(_, batch):
assert batch_checker.check(batch), "{} | {}: {} vs {}".format(
resume_iteration, batch_checker.counter, batch_checker.true_batch, batch
)
engine = DeterministicEngine(update_fn)
resume_state_dict = dict(
iteration=resume_iteration, max_epochs=max_epochs, epoch_length=epoch_length, rng_states=None
)
engine.load_state_dict(resume_state_dict)
torch.manual_seed(24)
engine.run(infinite_data_iterator())
assert engine.state.epoch == max_epochs
assert engine.state.iteration == epoch_length * max_epochs, "{} | {} vs {}".format(
resume_iteration, engine.state.iteration, epoch_length * max_epochs
)
_test()
_test(50)
_test(11)
def test_resume_random_data_iterator_from_iter():
_test_resume_random_data_iterator_from_iter("cpu")
@pytest.mark.distributed
@pytest.mark.skipif(torch.cuda.device_count() < 1, reason="Skip if no GPU")
def test_distrib_gpu(distributed_context_single_node_nccl):
device = "cuda:{}".format(distributed_context_single_node_nccl["local_rank"])
_test_resume_random_data_iterator_from_iter(device)
_test_resume_random_data_iterator_from_epoch(device)
_test_resume_random_dataloader_from_iter(device, setup_sampler)
_test_resume_random_dataloader_from_epoch(device, setup_sampler)
@pytest.mark.distributed
def test_distrib_cpu(distributed_context_single_node_gloo):
device = "cpu"
_test_resume_random_data_iterator_from_iter(device)
_test_resume_random_data_iterator_from_epoch(device)
_test_resume_random_dataloader_from_iter(device, setup_sampler)
_test_resume_random_dataloader_from_epoch(device, setup_sampler)
@pytest.mark.multinode_distributed
@pytest.mark.skipif("MULTINODE_DISTRIB" not in os.environ, reason="Skip if not multi-node distributed")
def test_multinode_distrib_cpu(distributed_context_multi_node_gloo):
device = "cpu"
_test_resume_random_data_iterator_from_iter(device)
_test_resume_random_data_iterator_from_epoch(device)
_test_resume_random_dataloader_from_iter(device, setup_sampler)
_test_resume_random_dataloader_from_epoch(device, setup_sampler)
@pytest.mark.multinode_distributed
@pytest.mark.skipif("GPU_MULTINODE_DISTRIB" not in os.environ, reason="Skip if not multi-node distributed")
def test_multinode_distrib_gpu(distributed_context_multi_node_nccl):
device = "cuda:{}".format(distributed_context_multi_node_nccl["local_rank"])
_test_resume_random_data_iterator_from_iter(device)
_test_resume_random_data_iterator_from_epoch(device)
_test_resume_random_dataloader_from_iter(device, setup_sampler)
_test_resume_random_dataloader_from_epoch(device, setup_sampler)
def test_concepts_snippet_resume():
import torch
from torch.utils.data import DataLoader
from ignite.engine import DeterministicEngine, Events
from ignite.utils import manual_seed
seen_batches = []
manual_seed(seed=15)
def random_train_data_loader(size):
data = torch.arange(0, size)
return DataLoader(data, batch_size=4, shuffle=True)
def print_train_data(engine, batch):
i = engine.state.iteration
e = engine.state.epoch
print("train", e, i, batch.tolist())
seen_batches.append(batch)
trainer = DeterministicEngine(print_train_data)
print("Original Run")
manual_seed(56)
trainer.run(random_train_data_loader(40), max_epochs=2, epoch_length=5)
original_batches = list(seen_batches)
seen_batches = []
print("Resumed Run")
trainer.load_state_dict({"epoch": 1, "epoch_length": 5, "max_epochs": 2, "rng_states": None})
manual_seed(56)
trainer.run(random_train_data_loader(40))
resumed_batches = list(seen_batches)
seen_batches = []
for b1, b2 in zip(original_batches[5:], resumed_batches):
assert (b1 == b2).all()
def test_concepts_snippet_warning():
def random_train_data_generator():
while True:
yield torch.randint(0, 100, size=(1,))
def print_train_data(engine, batch):
i = engine.state.iteration
e = engine.state.epoch
print("train", e, i, batch.tolist())
trainer = DeterministicEngine(print_train_data)
@trainer.on(Events.ITERATION_COMPLETED(every=3))
def user_handler(_):
# handler synchronizes the random state
torch.manual_seed(12)
a = torch.rand(1)
trainer.run(random_train_data_generator(), max_epochs=3, epoch_length=5)
def _test_gradients_on_resume(
dirname, device, with_dropout=True, with_dataaugs=True, data_size=24, batch_size=4, save_iter=None, save_epoch=None
):
debug = True
from torch.utils.data import DataLoader
from torch.optim import SGD
def random_train_data_loader(size):
d = AugmentedData(torch.rand(size, 3, 32, 32), enabled=with_dataaugs)
return DataLoader(d, batch_size=batch_size, shuffle=True, num_workers=4)
def _train(save_iter=None, save_epoch=None, sd=None):
w_norms = []
grad_norms = []
data = []
chkpt = []
manual_seed(12)
arch = [
nn.Conv2d(3, 10, 3),
nn.ReLU(),
nn.Conv2d(10, 10, 3),
nn.ReLU(),
nn.AdaptiveAvgPool2d(1),
nn.Flatten(),
nn.Linear(10, 5),
nn.ReLU(),
nn.Linear(5, 2),
]
if with_dropout:
arch.insert(2, nn.Dropout2d())
arch.insert(-2, nn.Dropout())
model = nn.Sequential(*arch).to(device)
opt = SGD(model.parameters(), lr=0.001)
def proc_fn(e, b):
from ignite.engine.deterministic import _repr_rng_state, _get_rng_states
s = _repr_rng_state(_get_rng_states())
model.train()
opt.zero_grad()
y = model(b.to(device))
y.sum().backward()
opt.step()
if debug:
print(
trainer.state.iteration, trainer.state.epoch, "proc_fn - b.shape", b.shape, torch.norm(y).item(), s
)
trainer = DeterministicEngine(proc_fn)
if save_iter is not None:
ev = Events.ITERATION_COMPLETED(once=save_iter)
elif save_epoch is not None:
ev = Events.EPOCH_COMPLETED(once=save_epoch)
save_iter = save_epoch * (data_size // batch_size)
@trainer.on(ev)
def save_chkpt(_):
if debug:
print(trainer.state.iteration, "save_chkpt")
fp = os.path.join(dirname, "test.pt")
from ignite.engine.deterministic import _repr_rng_state
tsd = trainer.state_dict()
if debug:
print("->", _repr_rng_state(tsd["rng_states"]))
torch.save([model.state_dict(), opt.state_dict(), tsd], fp)
chkpt.append(fp)
def log_event_filter(_, event):
if (event // save_iter == 1) and 1 <= (event % save_iter) <= 5:
return True
return False
@trainer.on(Events.ITERATION_COMPLETED(event_filter=log_event_filter))
def write_data_grads_weights(e):
x = e.state.batch
i = e.state.iteration
data.append([i, x.mean().item(), x.std().item()])
total = [0.0, 0.0]
out1 = []
out2 = []
for p in model.parameters():
n1 = torch.norm(p).item()
n2 = torch.norm(p.grad).item()
out1.append(n1)
out2.append(n2)
total[0] += n1
total[1] += n2
w_norms.append([i, total[0]] + out1)
grad_norms.append([i, total[1]] + out2)
if sd is not None:
sd = torch.load(sd)
model.load_state_dict(sd[0])
opt.load_state_dict(sd[1])
from ignite.engine.deterministic import _repr_rng_state
if debug:
print("-->", _repr_rng_state(sd[2]["rng_states"]))
trainer.load_state_dict(sd[2])
manual_seed(32)
trainer.run(random_train_data_loader(size=data_size), max_epochs=5)
return {"sd": chkpt, "data": data, "grads": grad_norms, "weights": w_norms}
out_original = _train(save_iter=save_iter, save_epoch=save_epoch)
assert len(out_original["sd"]) > 0
out_resumed = _train(save_iter=save_iter, save_epoch=save_epoch, sd=out_original["sd"][0])
if debug:
print("Original:")
print(" data:", out_original["data"])
print("grads:", out_original["grads"])
print(" W:", out_original["weights"])
print("Resume:")
print(" data:", out_resumed["data"])
print("grads:", out_resumed["grads"])
print(" W:", out_resumed["weights"])
# check data:
for d1, d2 in zip(out_original["data"], out_resumed["data"]):
assert d1 == d2
# check grads:
for d1, d2 in zip(out_original["grads"], out_resumed["grads"]):
assert d1 == d2
# check weights:
for d1, d2 in zip(out_original["weights"], out_resumed["weights"]):
assert d1 == d2
def test_gradients_on_resume_cpu(dirname):
with pytest.raises(AssertionError):
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=True, save_iter=25)
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=False, save_iter=25)
# resume from epoch
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=True, save_epoch=3)
_test_gradients_on_resume(dirname, "cpu", with_dataaugs=False, save_epoch=3)
@pytest.mark.skipif(not torch.cuda.is_available(), reason="Skip if no GPU")
def test_gradients_on_resume_gpu(dirname):
with pytest.raises(AssertionError):
_test_gradients_on_resume(dirname, "cuda", with_dataaugs=True)
_test_gradients_on_resume(dirname, "cuda", with_dataaugs=False)
# resume from epoch
_test_gradients_on_resume(dirname, "cuda", with_dataaugs=True, save_iter=30)
_test_gradients_on_resume(dirname, "cuda", with_dataaugs=False, save_iter=30)
def test_engine_with_dataloader_no_auto_batching():
# tests https://github.com/pytorch/ignite/issues/941
from torch.utils.data import DataLoader, BatchSampler, RandomSampler
data = torch.rand(64, 4, 10)
data_loader = torch.utils.data.DataLoader(
data, batch_size=None, sampler=BatchSampler(RandomSampler(data), batch_size=8, drop_last=True)
)
counter = [0]
def foo(e, b):
print("{}-{}: {}".format(e.state.epoch, e.state.iteration, b))
counter[0] += 1
engine = DeterministicEngine(foo)
engine.run(data_loader, epoch_length=10, max_epochs=5)
assert counter[0] == 50
def test_run_finite_iterator_no_epoch_length():
# FR: https://github.com/pytorch/ignite/issues/871
unknown_size = 11
def finite_unk_size_data_iter():
for i in range(unknown_size):
yield i
bc = BatchChecker(data=list(range(unknown_size)))
engine = DeterministicEngine(lambda e, b: bc.check(b))
@engine.on(Events.DATALOADER_STOP_ITERATION)
def restart_iter():
engine.state.dataloader = finite_unk_size_data_iter()
data_iter = finite_unk_size_data_iter()
engine.run(data_iter, max_epochs=5)
assert engine.state.epoch == 5
assert engine.state.iteration == unknown_size * 5
| [
"[email protected]"
] | |
5ef8b237e55d37ff2e070de67953ad80ebf813f1 | df07bfa1e4ff4330177b38e3d99c0bc2e77e25bb | /demo/pdf.py | 67ca0d507c70d01b02b55e9bd8859dcdf2503b08 | [] | no_license | alexviome/vetafi | df3ddfba8a251a0043ecf89445ab9c549b9d1eee | f599d6a9d94c988c38291dfd633f7f985d77f74b | refs/heads/master | 2020-09-20T15:56:23.757630 | 2017-08-17T06:09:36 | 2017-08-17T06:09:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,103 | py | import pprint
import sys
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdftypes import resolve1
def load_form(filename):
"""Load pdf form contents into a nested list of name/value tuples"""
with open(filename, 'rb') as file:
parser = PDFParser(file)
doc = PDFDocument(parser)
for f in resolve1(doc.catalog['AcroForm'])['Fields']:
for field_name in load_fields(resolve1(f)):
yield field_name
def load_fields(field):
"""Recursively load form fields"""
form = field.get('Kids', None)
if form:
for f in form:
for field_name in load_fields(resolve1(f)):
yield field_name
else:
try:
yield field.get('T').decode('utf-16')
except:
yield field.get('T')
#yield field
# Some field types, like signatures, need extra resolving
#return (field.get('T').decode('utf-16'), resolve1(field.get('V')))
if __name__ == '__main__':
pprint.pprint([f for f in load_form(sys.argv[1])])
| [
"[email protected]"
] | |
e9d21ef02220e169f6df0bb222eb80eb3d958818 | 518b946eed96800708386bcc4972a77312953eea | /lib/models/comment_converter.py | d5c57e464666003756949fded9212df86eb0858e | [
"MIT"
] | permissive | aoisupersix/git2bit | e6acfc7c8926327bed83e88b8121506154610a3a | d7d333bc6b2b29f11652ec9e8ac1c5dda554a21f | refs/heads/master | 2023-08-07T20:24:19.979680 | 2020-10-24T14:52:58 | 2020-10-24T14:52:58 | 218,724,072 | 0 | 0 | MIT | 2023-07-07T01:14:48 | 2019-10-31T08:58:54 | Python | UTF-8 | Python | false | false | 763 | py | from lib.models import GitbucketComment
from lib.models import IdConverter
def convert(gitbucketComment: GitbucketComment, idConverter: IdConverter) -> dict:
"""
GitbucketのCommentをBitbucketのインポータに対応した形式に変換します
"""
userId = idConverter.convertToBitbucketId(gitbucketComment.payload['user'].get('login'))
return {
'content': gitbucketComment.payload.get('body'),
'created_on': gitbucketComment.payload.get('created_at'),
'id': gitbucketComment.payload.get('id'),
'issue': gitbucketComment.issueNo,
'updated_on': gitbucketComment.payload.get('updated_at'),
'user': {
'display_name': userId,
'account_id': userId,
},
}
| [
"[email protected]"
] | |
7b98acc53d76f81399ffb120b7e715a6c5608d0a | 00c9701cfc7b1b0bff6a72319d02cd59dc1eca9c | /ros_ws/src/regulation_imugps/src/regulation_from_err_alpha_dist.py | 146f95c8f23cd620b7aa61a5194cd0db3ac032a3 | [] | no_license | EnstaBretagneClubRobo/GuerledanDamScanning | ae80340556898ec6a39395e11975e21272c16c31 | 4309412f0dc883db3e5e4415539f38b5baaa762d | refs/heads/master | 2021-06-14T16:11:16.907465 | 2017-03-03T14:10:51 | 2017-03-03T14:10:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,085 | py | #!/usr/bin/env python
"""
This regulateur is just a template and publish a forward command only
"""
import rospy
from geometry_msgs.msg import Twist
from std_msgs.msg import Float32
from math import atan, pi, tan
def update_err_d(msg):
global eD
eD = msg.data
def update_err_cap(msg):
global ecap
ecap = msg.data
rospy.init_node('regulation_cap')
cmd_pub = rospy.Publisher('cmd_vel', Twist, queue_size=1)
imu_sub = rospy.Subscriber('err_d', Float32, update_err_d)
gps_sub = rospy.Subscriber('err_cap', Float32, update_err_cap)
# erreur en cap et en distance
ecap, eD = 0, 0
K = -3 / pi # rad/s
radius = 5 # largeur d'effet du suivi de ligne
v = -5.0 # todo trouver pourquoi
cmd = Twist()
rate = rospy.Rate(20) # il faut avoir une bonne frequence
while not rospy.is_shutdown():
# error = cap(/mur) - cap_desire
err = ecap - atan(eD / radius)
err = err / 2 # pour ramener de [-pi,pi] a [-pi/2,pi/2]
cmd.angular.z = K * atan(tan((err)))
print ecap, atan(eD)
cmd.linear.x = v
cmd_pub.publish(cmd)
rate.sleep()
| [
"[email protected]"
] | |
a9ec1c6208ab0e57d7b0ab0699b53c48862f92c5 | 951552085dc24d864b35c53300f81cdd9c02e738 | /chandra_models/chandra_models/__init__.py | fc88c9d8dc4af3931b5bf1cadb5202eda33c3462 | [] | no_license | matthewdahmer/chandra_models | 06d65b467b8f43201d59bc7f2f6cefa70ec1f0a7 | 654eff027a0a57f3617c76f65e4bada2d357b7d4 | refs/heads/master | 2021-03-12T22:55:29.980321 | 2013-07-10T19:16:20 | 2013-07-10T19:16:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | from .get_model_spec import *
from .version import __version__
| [
"[email protected]"
] | |
b3a177cdd830bf2a2b57fc5cb16d754555abd759 | 9f7a9f268abfc168e408e36b513132402fdd353c | /micro_detect/out1.2.py | 0f6038c2781d0daf80f281d60b7d92ce7d906c17 | [] | no_license | 863752027z/lab_server | fe602bf0a588989b0a7ae171454eba67fa6907ca | 65eeaf94712afd96363d449376a291918156354f | refs/heads/master | 2020-08-10T12:40:30.803168 | 2019-10-11T04:51:14 | 2019-10-11T04:51:14 | 214,344,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,556 | py | import cv2
import os
import numpy as np
import pandas as pd
import datetime
import matplotlib.pyplot as plt
import torch
import torchvision
import torch.nn as nn
import torch.utils.data as Data
from torchvision import transforms, datasets
from collections import OrderedDict
os.environ["CUDA_VISIBLE_DEVICES"] = "6"
device = torch.device('cuda:0')
class Encoder(nn.Module):
def __init__(self):
super(Encoder, self).__init__()
encoder_layer = OrderedDict([
('Con1', nn.Conv2d(3, 32, 4, stride=2, padding=1)),
('BatchNorm1', nn.BatchNorm2d(32)),
('LeakyReLU1', nn.LeakyReLU(0.2, True)),
('Con2', nn.Conv2d(32, 64, 4, stride=2, padding=1)),
('BatchNorm2', nn.BatchNorm2d(64)),
('LeakyReLU2', nn.LeakyReLU(0.2, True)),
('Con3', nn.Conv2d(64, 128, 4, stride=2, padding=1)),
('BatchNorm3', nn.BatchNorm2d(128)),
('LeakyReLU3', nn.LeakyReLU(0.2, True)),
('Con4', nn.Conv2d(128, 256, 4, stride=2, padding=1)),
('BatchNorm4', nn.BatchNorm2d(256)),
('LeakyReLU4', nn.LeakyReLU(0.2, True)),
('Con5', nn.Conv2d(256, 256, 4, stride=2, padding=1)),
('BatchNorm5', nn.BatchNorm2d(256)),
('LeakyReLU5', nn.LeakyReLU(0.2, True)),
('Con6', nn.Conv2d(256, 256, 4, stride=2, padding=1)),
('BatchNorm6', nn.BatchNorm2d(256)),
('LeakyReLU6', nn.LeakyReLU(0.2, True)),
('Con7', nn.Conv2d(256, 256, 4, stride=2, padding=1)),
('BatchNorm7', nn.BatchNorm2d(256)),
('LeakyReLU7', nn.LeakyReLU(0.2, True)),
('Con8', nn.Conv2d(256, 256, 4, stride=2, padding=1)),
])
self.Encoder = nn.Sequential(encoder_layer)
def forward(self, x):
x = self.Encoder(x)
return x
class Decoder(nn.Module):
def __init__(self):
super(Decoder, self).__init__()
decoder_layer = OrderedDict([
('Upsample1', nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)),
('Con1', nn.Conv2d(512, 32, 3, stride=1, padding=1)),
('BatchNorm1', nn.BatchNorm2d(32)),
('ReLU1', nn.ReLU()),
('Upsample2', nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)),
('Con2', nn.Conv2d(32, 64, 3, stride=1, padding=1)),
('BatchNorm2', nn.BatchNorm2d(64)),
('ReLU2', nn.ReLU()),
('Upsample3', nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)),
('Con3', nn.Conv2d(64, 128, 3, stride=1, padding=1)),
('BatchNorm3', nn.BatchNorm2d(128)),
('ReLU3', nn.ReLU()),
('Upsample4', nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)),
('Con4', nn.Conv2d(128, 256, 3, stride=1, padding=1)),
('BatchNorm4', nn.BatchNorm2d(256)),
('ReLU4', nn.ReLU()),
('Upsample5', nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)),
('Con5', nn.Conv2d(256, 256, 3, stride=1, padding=1)),
('BatchNorm5', nn.BatchNorm2d(256)),
('ReLU5', nn.ReLU()),
('Upsample6', nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)),
('Con5', nn.Conv2d(256, 256, 3, stride=1, padding=1)),
('BatchNorm5', nn.BatchNorm2d(256)),
('ReLU6', nn.ReLU()),
('Upsample7', nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)),
('Con7', nn.Conv2d(256, 256, 3, stride=1, padding=1)),
('BatchNorm7', nn.BatchNorm2d(256)),
('ReLU7', nn.ReLU()),
('Upsample8', nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False)),
('Con8', nn.Conv2d(256, 3, 3, stride=1, padding=1)),
('Tanh', nn.Tanh())
])
self.Decoder = nn.Sequential(decoder_layer)
def forward(self, x):
x = self.Decoder(x)
return x
class LstmCell(nn.Module):
def __init__(self):
super(LstmCell, self).__init__()
self.LstmCell = nn.LSTMCell(input_size=256, hidden_size=256)
def forward(self, xt, h, c):
x = [h, c]
h, c = self.LstmCell(xt, x)
return h, c
def printGPU():
for i in range(torch.cuda.device_count()):
print(i, torch.cuda.get_device_name(0))
def draw(loss_list):
x = range(0, len(loss_list))
y = loss_list
plt.subplot(2, 1, 1)
plt.plot(x, y, 'r-')
plt.xlabel('batch_num')
plt.ylabel('loss')
plt.show()
def save_data_to_excel(data, path):
print(datetime.datetime.now())
print('generating:', path)
print(data.shape)
data_df = pd.DataFrame(data)
writer = pd.ExcelWriter(path)
data_df.to_excel(writer, 'page_1', float_format='%.5f') # float_format 控制精度
writer.save()
print('done')
def read_data_from_excel(path):
df = pd.read_excel(path, 'page_1')
data = np.array(df)
data = np.delete(data, 0, axis=1)
return data
def get_path(base_path):
path_list = []
for root, dirs, files in os.walk(base_path):
for i in range(len(dirs)):
temp_path = base_path + '/' + dirs[i]
path_list.append(temp_path)
break
return path_list
def trainLoader(file_path, batch_size, shuffle, num_workers):
data_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=(0.5, 0.5, 0.5),
std=(0.5, 0.5, 0.5))])
data_set = datasets.ImageFolder(file_path,
transform=data_transform)
train_loader = Data.DataLoader(dataset=data_set,
batch_size=batch_size,
shuffle=shuffle,
num_workers=num_workers)
return train_loader
def testLoader(file_path, batch_size, shuffle, num_workers):
data_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=(0.5, 0.5, 0.5),
std=(0.5, 0.5, 0.5))])
data_set = datasets.ImageFolder(file_path,
transform=data_transform)
test_loader = Data.DataLoader(dataset=data_set,
batch_size=batch_size,
shuffle=shuffle,
num_workers=num_workers)
return test_loader
def train(loader_list, learning_rate, num_epochs, seq):
cell_model = LstmCell().to(device)
encoder_model = Encoder().to(device)
decoder_model = Decoder().to(device)
criterion = nn.MSELoss().to(device)
optimizer = torch.optim.SGD([
{'params': encoder_model.parameters()},
{'params': cell_model.parameters()},
{'params': decoder_model.parameters()}
], lr=learning_rate, momentum=0.9)
loss_list = []
for epoch in range(num_epochs):
for i in range(len(loader_list)):
train_loader = loader_list[i]
for idx, (data, label) in enumerate(train_loader):
if data.shape[0] < seq:
break
h = torch.zeros(seq-1, 256).to(device)
c = torch.zeros(seq-1, 256).to(device)
data = data.to(device) #4*3*256*256
# =========forward===========
encoder_output = encoder_model(data)
encoder_output = encoder_output.view((encoder_output.shape[0], encoder_output.shape[1])) #4*256
temp_target = encoder_output[0].view(1, encoder_output.shape[1], 1, 1) #1*256*1*1
temp_source = encoder_output[1:].view(3, encoder_output.shape[1]) #3*256
h, c = cell_model(temp_source, h, c)
cell_output = h[-1].view(1, 256, 1, 1)
decoder_input = torch.cat((temp_target, cell_output), 1) #1*512*1*1
decoder_output = decoder_model(decoder_input) #1*3*256*256
target_img = data[0:1, :, :, :] #1*3*256*256
loss = criterion(decoder_output, target_img)
# =========backward=========
optimizer.zero_grad()
loss.backward()
optimizer.step()
# ============log===========
print('epoch [{}/{}], batch [{}], loader [{}] loss:{:.4f}'
.format(epoch + 1, num_epochs, idx, i, loss.item()))
if epoch % 2 == 0:
loss_list.append(loss.item())
return loss_list, encoder_model, cell_model
def test(encoder_moudle, cell_moudle, loader, seq):
with torch.no_grad():
for idx, (data, label) in enumerate(loader):
data = data.to(device)
if idx <= seq - 2:
if idx == 0:
Quad = data
else:
Quad = torch.cat((Quad, data), 0)
if idx == seq - 1:
Quad = torch.cat((Quad, data), 0)
h = torch.zeros(seq - 1, 256).to(device)
c = torch.zeros(seq - 1, 256).to(device)
encoder_out = encoder_moudle(Quad)
temp_target = encoder_out[0:1, :, :, :]
temp_source = encoder_out[1:, :, :, :].view(seq - 1, 256)
h, c = cell_moudle(temp_source, h, c)
cell_out = h[-1].view(1, 256, 1, 1)
feature = torch.cat((temp_target, cell_out), 1).to(device)
if idx >= seq:
h = torch.zeros(seq - 1, 256).to(device)
c = torch.zeros(seq - 1, 256).to(device)
Quad = Quad[1:, :, :, :]
Quad = torch.cat((Quad, data), 0)
encoder_out = encoder_moudle(Quad)
temp_target = encoder_out[0:1, :, :, :]
temp_source = encoder_out[1:, :, :, :].view(seq - 1, 256)
h, c = cell_moudle(temp_source, h, c)
cell_out = h[-1].view(1, 256, 1, 1)
curr_feature = torch.cat((temp_target, cell_out), 1)
feature = torch.cat((feature, curr_feature), 0)
feature = feature.cpu().detach().view(feature.shape[0], feature.shape[1]).numpy()
return feature
def gen_train_feature(encoder_moudle, cell_moudle, path_list, save_path, seq):
for i in range(len(path_list)):
curr_path = save_path + '/' + path_list[i][29:] + '.xlsx'
temp_loader = testLoader(path_list[i], batch_size=1, shuffle=False, num_workers=8)
feature = test(encoder_moudle, cell_moudle, temp_loader, seq)
print('generating ' + curr_path)
save_data_to_excel(feature, curr_path)
printGPU()
base_path = '/home/zlw/dataset/SAMM/train'
encoder_moudle_path = '/home/zlw/dataset/SAMM/moudle/encoder_moudle_40.pkl'
cell_moudle_path = '/home/zlw/dataset/SAMM/moudle/cell_moudle_40.pkl'
save_path = '/home/zlw/dataset/SAMM/train_feature'
learning_rate = 1e-4
batch_size = 4
num_workers = 8
num_epochs = 40
path_list = get_path(base_path)
loader_list = []
for i in range(len(path_list)):
temp_loader = trainLoader(path_list[i], batch_size, False, num_workers)
loader_list.append(temp_loader)
loss_list, encoder_moudle, cell_moudle = train(loader_list, learning_rate, num_epochs, batch_size)
torch.save(encoder_moudle, encoder_moudle_path)
torch.save(cell_moudle, cell_moudle_path)
print(str(datetime.datetime.now()) + ' moudle save successfully\n')
draw(loss_list)
| [
"[email protected]"
] | |
3ce5b745b7bb73991b75f239120a3a8be10b9ca4 | 47dd0f0fe0b5c49c39af5800196ebca6b31a3483 | /algorithm/raw_problem/RemoveDuplicatesfromSortedArray.py | 26769d76836c559dd7ade7099ef633afe95ddc57 | [] | no_license | gift9527/leetcode | e33eac7e2ce88cc72fce8d9f0271074c86f750b6 | 70e7f24dff27e1e7ac4b53f57a91a46bc6b38b31 | refs/heads/master | 2022-03-26T06:47:58.332265 | 2019-12-31T08:07:04 | 2019-12-31T08:07:04 | 111,184,352 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | class Solution(object):
def removeDuplicatesfromSortedArray(self,nums):
length = 1
tmp = nums[0]
for i in nums:
if i == tmp:
continue
else:
tmp = i
length += 1
return length
if __name__ == "__main__":
a = Solution()
f = a.removeDuplicatesfromSortedArray([1,1,2])
print (f)
| [
"[email protected]"
] | |
38c933659b02a22a20f6d083599f99fcb5084d21 | a80e59f13ca24f9033944e509841d61c50ad3e48 | /german_words.py | 8cf50ccc1a73953f59afcd50cc3a92afc2d408a8 | [
"Apache-2.0"
] | permissive | thomi137/Python-Samples | a23125c3235912b8e81285fb19a2ca428894df37 | 7c8cc5eae94a4737ef9116617389ca78a25434f3 | refs/heads/master | 2021-01-22T17:57:39.452837 | 2013-11-09T16:10:34 | 2013-11-09T16:10:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,032 | py | #!/usr/bin/python
# -* coding: utf-8 -*-
#########################################################################################
#
# Fun with german words... Oh yeah and Python classes, albeit a complete overkill
#
# Copyright 2013 by Thomas Prosser, [email protected]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#########################################################################################
import urllib2
'''
Top 10000 words in the german language
Nasty iso something encoding. Should be able to strip that in the future
'''
URL_DE = 'http://wortschatz.uni-leipzig.de/Papers/top10000de.txt'
class German_words:
def __init__(self, max_num = 10000):
if max_num > 10000:
print 'Truncating, maximum wordcount is 10000'
max_num = 10000
self.max_num = max_num
self.page = urllib2.urlopen(URL_DE)
self.word_list = [item.decode('latin-1').replace('\n','')
for item in self.page][:max_num]
def is_in_list(self, word):
return word.decode('utf-8') in self.word_list
#########################################################################################
# Test code follows here
#########################################################################################
def get_test(max_num = 10000):
gw = German_words(max_num)
print gw.word_list
def is_in_test(string, max_num = 10000):
gw = German_words(max_num)
print gw.is_in_list(string)
if __name__ == '__main__':
get_test(1)
get_test(2)
get_test(4)
is_in_test('über')
is_in_test('Elefant')
| [
"[email protected]"
] | |
6f12aa49c1b58757880e07d40286744fc1c5e54e | 13e012bd1fe359bb000309bc0fbc677d523e0ceb | /app.py | eadaa1d38507cf6e90e6b0371caaf643d786f4fd | [] | no_license | catboytao/sports | bca6245f5dcdb37025ef0220faffe58c35587c68 | 42e54c5ce19306cf81868c83d857bea987f405d3 | refs/heads/master | 2023-02-09T10:32:55.122199 | 2020-04-09T12:10:14 | 2020-04-09T12:10:14 | 248,934,195 | 0 | 0 | null | 2023-02-02T06:17:41 | 2020-03-21T08:18:44 | Python | UTF-8 | Python | false | false | 2,760 | py | import logging
from flask import Flask, jsonify,abort,request,g,url_for
from flask_httpauth import HTTPBasicAuth
from passlib.apps import custom_app_context as pwd_context
from datetime import datetime
# 输出时间
from spider.crawler import Crawler
from spider.load_data import Loader
from factory import create_app
from models.model import User,Sports
from utils.core import db
app = create_app(config_name="DEVELOPMENT")
app.app_context().push()
auth = HTTPBasicAuth()
db.create_all()
# celery -A app:celery_app worker -l info -P gevent
logging.basicConfig(
level=logging.INFO,
filename="logs/log.txt",
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
@app.route('/')
def hello_world():
return 'Hello World!'
@auth.verify_password
def verify_password(username_or_token, password):
# first try to authenticate by token
user = User.verify_auth_token(username_or_token,app)
if not user:
# try to authenticate with username/password
user = User.query.filter_by(username=username_or_token).first()
if not user or not user.verify_password(password):
return False
g.user = user
return True
@app.route('/api/add_user/',methods=['POST'])
def new_user():
username = request.json.get('username')
password = request.json.get('password')
if username is None or password is None:
abort(400) # missing arguments
if User.query.filter_by(username=username).first() is not None:
abort(400) # existing user
user = User(username=username)
user.hash_password(password)
db.session.add(user)
db.session.commit()
return jsonify({'username': user.username}), 201, {'Location': url_for('get_user', id=user.id, _external=True)}
@app.route('/api/users/<int:id>')
def get_user(id):
user = User.query.get(id)
if not user:
abort(400)
return jsonify({'username': user.username})
@app.route('/api/token')
@auth.login_required
def get_auth_token():
token = g.user.generate_auth_token(app,600)
return jsonify({'token': token.decode('ascii'), 'duration': 600})
@app.route('/api/resource')
@auth.login_required
def get_resource():
return jsonify({'data': 'Hello, %s!' % g.user.username})
@app.route('/api/getSports')
@auth.login_required
def get_sports():
resp = {'data':None,'msg':'Success','code':0}
try:
all_data = Sports.query.all()
data = list(map(Sports.user_to_dict, all_data))
resp['data'] = data
resp['count'] = len(data)
except Exception as e:
print(e)
resp['msg'] = 'Fail'
resp['code'] = -1
rts = jsonify(resp)
return rts
#api.add_resource(SportsResource,'/getSports')
if __name__ == '__main__':
app.run()
| [
"[email protected]"
] | |
264248272a1c358a4acd5d74b1c03580e66eaedb | 7807d8d9d109a3e272fffed91bf841201da39256 | /trans_ITP1_8_A/tsuru_aji_ITP1_8_A_kotonoha.py | 235487016630d8bb7d2384be3761ff1a3e9e983b | [] | no_license | y-akinobu/AOJ_to_Kotonoha | 0e8df43393964fcdd5df06c75545091bd6c0c2e2 | 5a694a55a3d85e3fbc4a07b57edc4374556db9a1 | refs/heads/main | 2023-02-05T15:33:16.581177 | 2020-12-30T16:14:44 | 2020-12-30T16:14:44 | 325,524,216 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 160 | py | # strと入力された文字列の英大文字を英小文字、英小文字を英大文字に変換した文字列を出力する
print(str.swapcase(input())) | [
"[email protected]"
] | |
e69f606bfb4db52a51edf5b7a7469866ce20c8ca | 4f12d74448bd835bd222504e660672dbe0159e68 | /2.py | c6f07a7419faa91e33f04eecdc354f5633247a27 | [] | no_license | clara51/DataStructure | 2d82c25d17a8402daca02d93d208269f8fa9577e | 52d14fc64beaa6646443e2309e37d9a9d83fe6ba | refs/heads/master | 2020-04-12T10:39:52.202546 | 2018-12-19T12:46:48 | 2018-12-19T12:46:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,632 | py | '''
买卖股票的最佳时机 II
给定一个数组,它的第 i 个元素是一支给定股票第 i 天的价格。
设计一个算法来计算你所能获取的最大利润。你可以尽可能地完成更多的交易(多次买卖一支股票)。
注意:你不能同时参与多笔交易(你必须在再次购买前出售掉之前的股票)。
示例 1:
输入: [7,1,5,3,6,4]
输出: 7
解释: 在第 2 天(股票价格 = 1)的时候买入,在第 3 天(股票价格 = 5)的时候卖出, 这笔交易所能获得利润 = 5-1 = 4 。
随后,在第 4 天(股票价格 = 3)的时候买入,在第 5 天(股票价格 = 6)的时候卖出, 这笔交易所能获得利润 = 6-3 = 3 。
示例 2:
输入: [1,2,3,4,5]
输出: 4
解释: 在第 1 天(股票价格 = 1)的时候买入,在第 5 天 (股票价格 = 5)的时候卖出, 这笔交易所能获得利润 = 5-1 = 4 。
注意你不能在第 1 天和第 2 天接连购买股票,之后再将它们卖出。
因为这样属于同时参与了多笔交易,你必须在再次购买前出售掉之前的股票。
示例 3:
输入: [7,6,4,3,1]
输出: 0
解释: 在这种情况下, 没有交易完成, 所以最大利润为 0。
'''
prices = [7, 1, 5, 3, 6, 4]
class Solution:
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
max_p = 0
for i in range(0, len(prices) - 1):
if prices[i] < prices[i + 1]:
max_p += prices[i + 1] - prices[i]
print(max_p)
return max_p
a = Solution()
a.maxProfit(prices) | [
"[email protected]"
] | |
319634b2638f825383e2e84f42c7b1f36523f596 | 367a0ad6b268c0dfe34841173a9da71c5517b798 | /Gen-Key.py | a03cb66ff43d5e027acdfb2dfee8841cbd9d03c1 | [] | no_license | BakedBinJuice/file-encryption | ed159f7378ce97a7edfae91f8573d58b0d568bcd | c3ce19fef25230b472a87f9dedf82045c149e3ad | refs/heads/master | 2022-12-02T10:45:51.033322 | 2020-08-14T02:49:14 | 2020-08-14T02:49:14 | 287,428,197 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | #!/usr/bin/env python3
from cryptography.fernet import Fernet
def gen_key():
global key
key = Fernet.generate_key()
def write_key(key):
file = open('key.key', 'wb')
file.write(key)
file.close
gen_key()
write_key(key)
| [
"[email protected]"
] | |
b952733ad1f26d285dcea235356195d9224c9350 | 17e295e1fe88b66546cba50ae1d314aa14b6a2d4 | /iRobot_control/venv/Scripts/miniterm.py | f82cba11ffee2277b46238bcdbdf1670943787e9 | [] | no_license | JaheimMao/iRobot-Create-2 | e2f99cfe50f7686bbfe1dc43bebcbfed2620bed6 | 873efcff47f18e7f3b39badba1a3a0aa4d910e46 | refs/heads/main | 2023-03-17T22:42:11.057683 | 2021-03-08T12:23:43 | 2021-03-08T12:23:43 | 344,784,885 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,148 | py | #!D:\lenovo\Documents\PycharmProjects\iRobot\venv\Scripts\python.exe
#
# Very simple serial terminal
#
# This file is part of pySerial. https://github.com/pyserial/pyserial
# (C)2002-2015 Chris Liechti <[email protected]>
#
# SPDX-License-Identifier: BSD-3-Clause
import codecs
import os
import sys
import threading
import serial
from serial.tools.list_ports import comports
from serial.tools import hexlify_codec
# pylint: disable=wrong-import-order,wrong-import-position
codecs.register(lambda c: hexlify_codec.getregentry() if c == 'hexlify' else None)
try:
raw_input
except NameError:
# pylint: disable=redefined-builtin,invalid-name
raw_input = input # in python3 it's "raw"
unichr = chr
def key_description(character):
"""generate a readable description for a key"""
ascii_code = ord(character)
if ascii_code < 32:
return 'Ctrl+{:c}'.format(ord('@') + ascii_code)
else:
return repr(character)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class ConsoleBase(object):
"""OS abstraction for console (input/output codec, no echo)"""
def __init__(self):
if sys.version_info >= (3, 0):
self.byte_output = sys.stdout.buffer
else:
self.byte_output = sys.stdout
self.output = sys.stdout
def setup(self):
"""Set console to read single characters, no echo"""
def cleanup(self):
"""Restore default console settings"""
def getkey(self):
"""Read a single key from the console"""
return None
def write_bytes(self, byte_string):
"""Write bytes (already encoded)"""
self.byte_output.write(byte_string)
self.byte_output.flush()
def write(self, text):
"""Write string"""
self.output.write(text)
self.output.flush()
def cancel(self):
"""Cancel getkey operation"""
# - - - - - - - - - - - - - - - - - - - - - - - -
# context manager:
# switch terminal temporary to normal mode (e.g. to get user input)
def __enter__(self):
self.cleanup()
return self
def __exit__(self, *args, **kwargs):
self.setup()
if os.name == 'nt': # noqa
import msvcrt
import ctypes
class Out(object):
"""file-like wrapper that uses os.write"""
def __init__(self, fd):
self.fd = fd
def flush(self):
pass
def write(self, s):
os.write(self.fd, s)
class Console(ConsoleBase):
def __init__(self):
super(Console, self).__init__()
self._saved_ocp = ctypes.windll.kernel32.GetConsoleOutputCP()
self._saved_icp = ctypes.windll.kernel32.GetConsoleCP()
ctypes.windll.kernel32.SetConsoleOutputCP(65001)
ctypes.windll.kernel32.SetConsoleCP(65001)
self.output = codecs.getwriter('UTF-8')(Out(sys.stdout.fileno()), 'replace')
# the change of the code page is not propagated to Python, manually fix it
sys.stderr = codecs.getwriter('UTF-8')(Out(sys.stderr.fileno()), 'replace')
sys.stdout = self.output
self.output.encoding = 'UTF-8' # needed for input
def __del__(self):
ctypes.windll.kernel32.SetConsoleOutputCP(self._saved_ocp)
ctypes.windll.kernel32.SetConsoleCP(self._saved_icp)
def getkey(self):
while True:
z = msvcrt.getwch()
if z == unichr(13):
return unichr(10)
elif z in (unichr(0), unichr(0x0e)): # functions keys, ignore
msvcrt.getwch()
else:
return z
def cancel(self):
# CancelIo, CancelSynchronousIo do not seem to work when using
# getwch, so instead, send a key to the window with the console
hwnd = ctypes.windll.kernel32.GetConsoleWindow()
ctypes.windll.user32.PostMessageA(hwnd, 0x100, 0x0d, 0)
elif os.name == 'posix':
import atexit
import termios
import fcntl
class Console(ConsoleBase):
def __init__(self):
super(Console, self).__init__()
self.fd = sys.stdin.fileno()
self.old = termios.tcgetattr(self.fd)
atexit.register(self.cleanup)
if sys.version_info < (3, 0):
self.enc_stdin = codecs.getreader(sys.stdin.encoding)(sys.stdin)
else:
self.enc_stdin = sys.stdin
def setup(self):
new = termios.tcgetattr(self.fd)
new[3] = new[3] & ~termios.ICANON & ~termios.ECHO & ~termios.ISIG
new[6][termios.VMIN] = 1
new[6][termios.VTIME] = 0
termios.tcsetattr(self.fd, termios.TCSANOW, new)
def getkey(self):
c = self.enc_stdin.read(1)
if c == unichr(0x7f):
c = unichr(8) # map the BS key (which yields DEL) to backspace
return c
def cancel(self):
fcntl.ioctl(self.fd, termios.TIOCSTI, b'\0')
def cleanup(self):
termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old)
else:
raise NotImplementedError(
'Sorry no implementation for your platform ({}) available.'.format(sys.platform))
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class Transform(object):
"""do-nothing: forward all data unchanged"""
def rx(self, text):
"""text received from serial port"""
return text
def tx(self, text):
"""text to be sent to serial port"""
return text
def echo(self, text):
"""text to be sent but displayed on console"""
return text
class CRLF(Transform):
"""ENTER sends CR+LF"""
def tx(self, text):
return text.replace('\n', '\r\n')
class CR(Transform):
"""ENTER sends CR"""
def rx(self, text):
return text.replace('\r', '\n')
def tx(self, text):
return text.replace('\n', '\r')
class LF(Transform):
"""ENTER sends LF"""
class NoTerminal(Transform):
"""remove typical terminal control codes from input"""
REPLACEMENT_MAP = dict((x, 0x2400 + x) for x in range(32) if unichr(x) not in '\r\n\b\t')
REPLACEMENT_MAP.update(
{
0x7F: 0x2421, # DEL
0x9B: 0x2425, # CSI
})
def rx(self, text):
return text.translate(self.REPLACEMENT_MAP)
echo = rx
class NoControls(NoTerminal):
"""Remove all control codes, incl. CR+LF"""
REPLACEMENT_MAP = dict((x, 0x2400 + x) for x in range(32))
REPLACEMENT_MAP.update(
{
0x20: 0x2423, # visual space
0x7F: 0x2421, # DEL
0x9B: 0x2425, # CSI
})
class Printable(Transform):
"""Show decimal code for all non-ASCII characters and replace most control codes"""
def rx(self, text):
r = []
for c in text:
if ' ' <= c < '\x7f' or c in '\r\n\b\t':
r.append(c)
elif c < ' ':
r.append(unichr(0x2400 + ord(c)))
else:
r.extend(unichr(0x2080 + ord(d) - 48) for d in '{:d}'.format(ord(c)))
r.append(' ')
return ''.join(r)
echo = rx
class Colorize(Transform):
"""Apply different colors for received and echo"""
def __init__(self):
# XXX make it configurable, use colorama?
self.input_color = '\x1b[37m'
self.echo_color = '\x1b[31m'
def rx(self, text):
return self.input_color + text
def echo(self, text):
return self.echo_color + text
class DebugIO(Transform):
"""Print what is sent and received"""
def rx(self, text):
sys.stderr.write(' [RX:{}] '.format(repr(text)))
sys.stderr.flush()
return text
def tx(self, text):
sys.stderr.write(' [TX:{}] '.format(repr(text)))
sys.stderr.flush()
return text
# other ideas:
# - add date/time for each newline
# - insert newline after: a) timeout b) packet end character
EOL_TRANSFORMATIONS = {
'crlf': CRLF,
'cr': CR,
'lf': LF,
}
TRANSFORMATIONS = {
'direct': Transform, # no transformation
'default': NoTerminal,
'nocontrol': NoControls,
'printable': Printable,
'colorize': Colorize,
'debug': DebugIO,
}
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def ask_for_port():
"""\
Show a list of ports and ask the user for a choice. To make selection
easier on systems with long device names, also allow the input of an
index.
"""
sys.stderr.write('\n--- Available ports:\n')
ports = []
for n, (port, desc, hwid) in enumerate(sorted(comports()), 1):
sys.stderr.write('--- {:2}: {:20} {!r}\n'.format(n, port, desc))
ports.append(port)
while True:
port = raw_input('--- Enter port index or full name: ')
try:
index = int(port) - 1
if not 0 <= index < len(ports):
sys.stderr.write('--- Invalid index!\n')
continue
except ValueError:
pass
else:
port = ports[index]
return port
class Miniterm(object):
"""\
Terminal application. Copy data from serial port to console and vice versa.
Handle special keys from the console to show menu etc.
"""
def __init__(self, serial_instance, echo=False, eol='crlf', filters=()):
self.console = Console()
self.serial = serial_instance
self.echo = echo
self.raw = False
self.input_encoding = 'UTF-8'
self.output_encoding = 'UTF-8'
self.eol = eol
self.filters = filters
self.update_transformations()
self.exit_character = 0x1d # GS/CTRL+]
self.menu_character = 0x14 # Menu: CTRL+T
self.alive = None
self._reader_alive = None
self.receiver_thread = None
self.rx_decoder = None
self.tx_decoder = None
def _start_reader(self):
"""Start reader thread"""
self._reader_alive = True
# start serial->console thread
self.receiver_thread = threading.Thread(target=self.reader, name='rx')
self.receiver_thread.daemon = True
self.receiver_thread.start()
def _stop_reader(self):
"""Stop reader thread only, wait for clean exit of thread"""
self._reader_alive = False
if hasattr(self.serial, 'cancel_read'):
self.serial.cancel_read()
self.receiver_thread.join()
def start(self):
"""start worker threads"""
self.alive = True
self._start_reader()
# enter console->serial loop
self.transmitter_thread = threading.Thread(target=self.writer, name='tx')
self.transmitter_thread.daemon = True
self.transmitter_thread.start()
self.console.setup()
def stop(self):
"""set flag to stop worker threads"""
self.alive = False
def join(self, transmit_only=False):
"""wait for worker threads to terminate"""
self.transmitter_thread.join()
if not transmit_only:
if hasattr(self.serial, 'cancel_read'):
self.serial.cancel_read()
self.receiver_thread.join()
def close(self):
self.serial.close()
def update_transformations(self):
"""take list of transformation classes and instantiate them for rx and tx"""
transformations = [EOL_TRANSFORMATIONS[self.eol]] + [TRANSFORMATIONS[f]
for f in self.filters]
self.tx_transformations = [t() for t in transformations]
self.rx_transformations = list(reversed(self.tx_transformations))
def set_rx_encoding(self, encoding, errors='replace'):
"""set encoding for received data"""
self.input_encoding = encoding
self.rx_decoder = codecs.getincrementaldecoder(encoding)(errors)
def set_tx_encoding(self, encoding, errors='replace'):
"""set encoding for transmitted data"""
self.output_encoding = encoding
self.tx_encoder = codecs.getincrementalencoder(encoding)(errors)
def dump_port_settings(self):
"""Write current settings to sys.stderr"""
sys.stderr.write("\n--- Settings: {p.name} {p.baudrate},{p.bytesize},{p.parity},{p.stopbits}\n".format(
p=self.serial))
sys.stderr.write('--- RTS: {:8} DTR: {:8} BREAK: {:8}\n'.format(
('active' if self.serial.rts else 'inactive'),
('active' if self.serial.dtr else 'inactive'),
('active' if self.serial.break_condition else 'inactive')))
try:
sys.stderr.write('--- CTS: {:8} DSR: {:8} RI: {:8} CD: {:8}\n'.format(
('active' if self.serial.cts else 'inactive'),
('active' if self.serial.dsr else 'inactive'),
('active' if self.serial.ri else 'inactive'),
('active' if self.serial.cd else 'inactive')))
except serial.SerialException:
# on RFC 2217 ports, it can happen if no modem state notification was
# yet received. ignore this error.
pass
sys.stderr.write('--- software flow control: {}\n'.format('active' if self.serial.xonxoff else 'inactive'))
sys.stderr.write('--- hardware flow control: {}\n'.format('active' if self.serial.rtscts else 'inactive'))
sys.stderr.write('--- serial input encoding: {}\n'.format(self.input_encoding))
sys.stderr.write('--- serial output encoding: {}\n'.format(self.output_encoding))
sys.stderr.write('--- EOL: {}\n'.format(self.eol.upper()))
sys.stderr.write('--- filters: {}\n'.format(' '.join(self.filters)))
def reader(self):
"""loop and copy serial->console"""
try:
while self.alive and self._reader_alive:
# read all that is there or wait for one byte
data = self.serial.read(self.serial.in_waiting or 1)
if data:
if self.raw:
self.console.write_bytes(data)
else:
text = self.rx_decoder.decode(data)
for transformation in self.rx_transformations:
text = transformation.rx(text)
self.console.write(text)
except serial.SerialException:
self.alive = False
self.console.cancel()
raise # XXX handle instead of re-raise?
def writer(self):
"""\
Loop and copy console->serial until self.exit_character character is
found. When self.menu_character is found, interpret the next key
locally.
"""
menu_active = False
try:
while self.alive:
try:
c = self.console.getkey()
except KeyboardInterrupt:
c = '\x03'
if not self.alive:
break
if menu_active:
self.handle_menu_key(c)
menu_active = False
elif c == self.menu_character:
menu_active = True # next char will be for menu
elif c == self.exit_character:
self.stop() # exit app
break
else:
#~ if self.raw:
text = c
for transformation in self.tx_transformations:
text = transformation.tx(text)
self.serial.write(self.tx_encoder.encode(text))
if self.echo:
echo_text = c
for transformation in self.tx_transformations:
echo_text = transformation.echo(echo_text)
self.console.write(echo_text)
except:
self.alive = False
raise
def handle_menu_key(self, c):
"""Implement a simple menu / settings"""
if c == self.menu_character or c == self.exit_character:
# Menu/exit character again -> send itself
self.serial.write(self.tx_encoder.encode(c))
if self.echo:
self.console.write(c)
elif c == '\x15': # CTRL+U -> upload file
self.upload_file()
elif c in '\x08hH?': # CTRL+H, h, H, ? -> Show help
sys.stderr.write(self.get_help_text())
elif c == '\x12': # CTRL+R -> Toggle RTS
self.serial.rts = not self.serial.rts
sys.stderr.write('--- RTS {} ---\n'.format('active' if self.serial.rts else 'inactive'))
elif c == '\x04': # CTRL+D -> Toggle DTR
self.serial.dtr = not self.serial.dtr
sys.stderr.write('--- DTR {} ---\n'.format('active' if self.serial.dtr else 'inactive'))
elif c == '\x02': # CTRL+B -> toggle BREAK condition
self.serial.break_condition = not self.serial.break_condition
sys.stderr.write('--- BREAK {} ---\n'.format('active' if self.serial.break_condition else 'inactive'))
elif c == '\x05': # CTRL+E -> toggle local echo
self.echo = not self.echo
sys.stderr.write('--- local echo {} ---\n'.format('active' if self.echo else 'inactive'))
elif c == '\x06': # CTRL+F -> edit filters
self.change_filter()
elif c == '\x0c': # CTRL+L -> EOL mode
modes = list(EOL_TRANSFORMATIONS) # keys
eol = modes.index(self.eol) + 1
if eol >= len(modes):
eol = 0
self.eol = modes[eol]
sys.stderr.write('--- EOL: {} ---\n'.format(self.eol.upper()))
self.update_transformations()
elif c == '\x01': # CTRL+A -> set encoding
self.change_encoding()
elif c == '\x09': # CTRL+I -> info
self.dump_port_settings()
#~ elif c == '\x01': # CTRL+A -> cycle escape mode
#~ elif c == '\x0c': # CTRL+L -> cycle linefeed mode
elif c in 'pP': # P -> change port
self.change_port()
elif c in 'sS': # S -> suspend / open port temporarily
self.suspend_port()
elif c in 'bB': # B -> change baudrate
self.change_baudrate()
elif c == '8': # 8 -> change to 8 bits
self.serial.bytesize = serial.EIGHTBITS
self.dump_port_settings()
elif c == '7': # 7 -> change to 8 bits
self.serial.bytesize = serial.SEVENBITS
self.dump_port_settings()
elif c in 'eE': # E -> change to even parity
self.serial.parity = serial.PARITY_EVEN
self.dump_port_settings()
elif c in 'oO': # O -> change to odd parity
self.serial.parity = serial.PARITY_ODD
self.dump_port_settings()
elif c in 'mM': # M -> change to mark parity
self.serial.parity = serial.PARITY_MARK
self.dump_port_settings()
elif c in 'sS': # S -> change to space parity
self.serial.parity = serial.PARITY_SPACE
self.dump_port_settings()
elif c in 'nN': # N -> change to no parity
self.serial.parity = serial.PARITY_NONE
self.dump_port_settings()
elif c == '1': # 1 -> change to 1 stop bits
self.serial.stopbits = serial.STOPBITS_ONE
self.dump_port_settings()
elif c == '2': # 2 -> change to 2 stop bits
self.serial.stopbits = serial.STOPBITS_TWO
self.dump_port_settings()
elif c == '3': # 3 -> change to 1.5 stop bits
self.serial.stopbits = serial.STOPBITS_ONE_POINT_FIVE
self.dump_port_settings()
elif c in 'xX': # X -> change software flow control
self.serial.xonxoff = (c == 'X')
self.dump_port_settings()
elif c in 'rR': # R -> change hardware flow control
self.serial.rtscts = (c == 'R')
self.dump_port_settings()
else:
sys.stderr.write('--- unknown menu character {} --\n'.format(key_description(c)))
def upload_file(self):
"""Ask user for filenname and send its contents"""
sys.stderr.write('\n--- File to upload: ')
sys.stderr.flush()
with self.console:
filename = sys.stdin.readline().rstrip('\r\n')
if filename:
try:
with open(filename, 'rb') as f:
sys.stderr.write('--- Sending file {} ---\n'.format(filename))
while True:
block = f.read(1024)
if not block:
break
self.serial.write(block)
# Wait for output buffer to drain.
self.serial.flush()
sys.stderr.write('.') # Progress indicator.
sys.stderr.write('\n--- File {} sent ---\n'.format(filename))
except IOError as e:
sys.stderr.write('--- ERROR opening file {}: {} ---\n'.format(filename, e))
def change_filter(self):
"""change the i/o transformations"""
sys.stderr.write('\n--- Available Filters:\n')
sys.stderr.write('\n'.join(
'--- {:<10} = {.__doc__}'.format(k, v)
for k, v in sorted(TRANSFORMATIONS.items())))
sys.stderr.write('\n--- Enter new filter name(s) [{}]: '.format(' '.join(self.filters)))
with self.console:
new_filters = sys.stdin.readline().lower().split()
if new_filters:
for f in new_filters:
if f not in TRANSFORMATIONS:
sys.stderr.write('--- unknown filter: {}\n'.format(repr(f)))
break
else:
self.filters = new_filters
self.update_transformations()
sys.stderr.write('--- filters: {}\n'.format(' '.join(self.filters)))
def change_encoding(self):
"""change encoding on the serial port"""
sys.stderr.write('\n--- Enter new encoding name [{}]: '.format(self.input_encoding))
with self.console:
new_encoding = sys.stdin.readline().strip()
if new_encoding:
try:
codecs.lookup(new_encoding)
except LookupError:
sys.stderr.write('--- invalid encoding name: {}\n'.format(new_encoding))
else:
self.set_rx_encoding(new_encoding)
self.set_tx_encoding(new_encoding)
sys.stderr.write('--- serial input encoding: {}\n'.format(self.input_encoding))
sys.stderr.write('--- serial output encoding: {}\n'.format(self.output_encoding))
def change_baudrate(self):
"""change the baudrate"""
sys.stderr.write('\n--- Baudrate: ')
sys.stderr.flush()
with self.console:
backup = self.serial.baudrate
try:
self.serial.baudrate = int(sys.stdin.readline().strip())
except ValueError as e:
sys.stderr.write('--- ERROR setting baudrate: {} ---\n'.format(e))
self.serial.baudrate = backup
else:
self.dump_port_settings()
def change_port(self):
"""Have a conversation with the user to change the serial port"""
with self.console:
try:
port = ask_for_port()
except KeyboardInterrupt:
port = None
if port and port != self.serial.port:
# reader thread needs to be shut down
self._stop_reader()
# save settings
settings = self.serial.getSettingsDict()
try:
new_serial = serial.serial_for_url(port, do_not_open=True)
# restore settings and open
new_serial.applySettingsDict(settings)
new_serial.rts = self.serial.rts
new_serial.dtr = self.serial.dtr
new_serial.open()
new_serial.break_condition = self.serial.break_condition
except Exception as e:
sys.stderr.write('--- ERROR opening new port: {} ---\n'.format(e))
new_serial.close()
else:
self.serial.close()
self.serial = new_serial
sys.stderr.write('--- Port changed to: {} ---\n'.format(self.serial.port))
# and restart the reader thread
self._start_reader()
def suspend_port(self):
"""\
open port temporarily, allow reconnect, exit and port change to get
out of the loop
"""
# reader thread needs to be shut down
self._stop_reader()
self.serial.close()
sys.stderr.write('\n--- Port closed: {} ---\n'.format(self.serial.port))
do_change_port = False
while not self.serial.is_open:
sys.stderr.write('--- Quit: {exit} | p: port change | any other key to reconnect ---\n'.format(
exit=key_description(self.exit_character)))
k = self.console.getkey()
if k == self.exit_character:
self.stop() # exit app
break
elif k in 'pP':
do_change_port = True
break
try:
self.serial.open()
except Exception as e:
sys.stderr.write('--- ERROR opening port: {} ---\n'.format(e))
if do_change_port:
self.change_port()
else:
# and restart the reader thread
self._start_reader()
sys.stderr.write('--- Port opened: {} ---\n'.format(self.serial.port))
def get_help_text(self):
"""return the help text"""
# help text, starts with blank line!
return """
--- pySerial ({version}) - miniterm - help
---
--- {exit:8} Exit program
--- {menu:8} Menu escape key, followed by:
--- Menu keys:
--- {menu:7} Send the menu character itself to remote
--- {exit:7} Send the exit character itself to remote
--- {info:7} Show info
--- {upload:7} Upload file (prompt will be shown)
--- {repr:7} encoding
--- {filter:7} edit filters
--- Toggles:
--- {rts:7} RTS {dtr:7} DTR {brk:7} BREAK
--- {echo:7} echo {eol:7} EOL
---
--- Port settings ({menu} followed by the following):
--- p change port
--- 7 8 set data bits
--- N E O S M change parity (None, Even, Odd, Space, Mark)
--- 1 2 3 set stop bits (1, 2, 1.5)
--- b change baud rate
--- x X disable/enable software flow control
--- r R disable/enable hardware flow control
""".format(version=getattr(serial, 'VERSION', 'unknown version'),
exit=key_description(self.exit_character),
menu=key_description(self.menu_character),
rts=key_description('\x12'),
dtr=key_description('\x04'),
brk=key_description('\x02'),
echo=key_description('\x05'),
info=key_description('\x09'),
upload=key_description('\x15'),
repr=key_description('\x01'),
filter=key_description('\x06'),
eol=key_description('\x0c'))
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# default args can be used to override when calling main() from an other script
# e.g to create a miniterm-my-device.py
def main(default_port=None, default_baudrate=9600, default_rts=None, default_dtr=None):
"""Command line tool, entry point"""
import argparse
parser = argparse.ArgumentParser(
description="Miniterm - A simple terminal program for the serial port.")
parser.add_argument(
"port",
nargs='?',
help="serial port name ('-' to show port list)",
default=default_port)
parser.add_argument(
"baudrate",
nargs='?',
type=int,
help="set baud rate, default: %(default)s",
default=default_baudrate)
group = parser.add_argument_group("port settings")
group.add_argument(
"--parity",
choices=['N', 'E', 'O', 'S', 'M'],
type=lambda c: c.upper(),
help="set parity, one of {N E O S M}, default: N",
default='N')
group.add_argument(
"--rtscts",
action="store_true",
help="enable RTS/CTS flow control (default off)",
default=False)
group.add_argument(
"--xonxoff",
action="store_true",
help="enable software flow control (default off)",
default=False)
group.add_argument(
"--rts",
type=int,
help="set initial RTS line state (possible values: 0, 1)",
default=default_rts)
group.add_argument(
"--dtr",
type=int,
help="set initial DTR line state (possible values: 0, 1)",
default=default_dtr)
group.add_argument(
"--ask",
action="store_true",
help="ask again for port when open fails",
default=False)
group = parser.add_argument_group("data handling")
group.add_argument(
"-e", "--echo",
action="store_true",
help="enable local echo (default off)",
default=False)
group.add_argument(
"--encoding",
dest="serial_port_encoding",
metavar="CODEC",
help="set the encoding for the serial port (e.g. hexlify, Latin1, UTF-8), default: %(default)s",
default='UTF-8')
group.add_argument(
"-f", "--filter",
action="append",
metavar="NAME",
help="add text transformation",
default=[])
group.add_argument(
"--eol",
choices=['CR', 'LF', 'CRLF'],
type=lambda c: c.upper(),
help="end of line mode",
default='CRLF')
group.add_argument(
"--raw",
action="store_true",
help="Do no apply any encodings/transformations",
default=False)
group = parser.add_argument_group("hotkeys")
group.add_argument(
"--exit-char",
type=int,
metavar='NUM',
help="Unicode of special character that is used to exit the application, default: %(default)s",
default=0x1d) # GS/CTRL+]
group.add_argument(
"--menu-char",
type=int,
metavar='NUM',
help="Unicode code of special character that is used to control miniterm (menu), default: %(default)s",
default=0x14) # Menu: CTRL+T
group = parser.add_argument_group("diagnostics")
group.add_argument(
"-q", "--quiet",
action="store_true",
help="suppress non-error messages",
default=False)
group.add_argument(
"--develop",
action="store_true",
help="show Python traceback on error",
default=False)
args = parser.parse_args()
if args.menu_char == args.exit_char:
parser.error('--exit-char can not be the same as --menu-char')
if args.filter:
if 'help' in args.filter:
sys.stderr.write('Available filters:\n')
sys.stderr.write('\n'.join(
'{:<10} = {.__doc__}'.format(k, v)
for k, v in sorted(TRANSFORMATIONS.items())))
sys.stderr.write('\n')
sys.exit(1)
filters = args.filter
else:
filters = ['default']
while True:
# no port given on command line -> ask user now
if args.port is None or args.port == '-':
try:
args.port = ask_for_port()
except KeyboardInterrupt:
sys.stderr.write('\n')
parser.error('user aborted and port is not given')
else:
if not args.port:
parser.error('port is not given')
try:
serial_instance = serial.serial_for_url(
args.port,
args.baudrate,
parity=args.parity,
rtscts=args.rtscts,
xonxoff=args.xonxoff,
do_not_open=True)
if not hasattr(serial_instance, 'cancel_read'):
# enable timeout for alive flag polling if cancel_read is not available
serial_instance.timeout = 1
if args.dtr is not None:
if not args.quiet:
sys.stderr.write('--- forcing DTR {}\n'.format('active' if args.dtr else 'inactive'))
serial_instance.dtr = args.dtr
if args.rts is not None:
if not args.quiet:
sys.stderr.write('--- forcing RTS {}\n'.format('active' if args.rts else 'inactive'))
serial_instance.rts = args.rts
serial_instance.open()
except serial.SerialException as e:
sys.stderr.write('could not open port {}: {}\n'.format(repr(args.port), e))
if args.develop:
raise
if not args.ask:
sys.exit(1)
else:
args.port = '-'
else:
break
miniterm = Miniterm(
serial_instance,
echo=args.echo,
eol=args.eol.lower(),
filters=filters)
miniterm.exit_character = unichr(args.exit_char)
miniterm.menu_character = unichr(args.menu_char)
miniterm.raw = args.raw
miniterm.set_rx_encoding(args.serial_port_encoding)
miniterm.set_tx_encoding(args.serial_port_encoding)
if not args.quiet:
sys.stderr.write('--- Miniterm on {p.name} {p.baudrate},{p.bytesize},{p.parity},{p.stopbits} ---\n'.format(
p=miniterm.serial))
sys.stderr.write('--- Quit: {} | Menu: {} | Help: {} followed by {} ---\n'.format(
key_description(miniterm.exit_character),
key_description(miniterm.menu_character),
key_description(miniterm.menu_character),
key_description('\x08')))
miniterm.start()
try:
miniterm.join(True)
except KeyboardInterrupt:
pass
if not args.quiet:
sys.stderr.write("\n--- exit ---\n")
miniterm.join()
miniterm.close()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
c0925dc7a9640a76a8d62fa371ea8e94e76af828 | 535a174b976ec82d54c742cb1fc75687168144fb | /commands/__init__.py | f97228cca1a67516639aee507d235d64f7b778fc | [] | no_license | lalacat/crawler | 11fa9bf56920dcc7348a5d912a9eac91c9453efd | 9c1236a410fa339fe998418cbfdd4c0ba7eb7d27 | refs/heads/master | 2021-06-04T00:58:27.492919 | 2020-04-01T14:03:49 | 2020-04-01T14:03:49 | 130,938,082 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,967 | py | import argparse
class BaseCommand(object):
"""
简要对各个命令的用法进行说明的方法
"""
def __init__(self,setting = None):
self.setting = setting
def short_desc(self):
pass
def long_desc(self):
return "Test for Long desc"
#给出程序版本的回调函数
def _print_vision(self,option,opt_str,value,parser):
print("This vision is 0.0.0")
def add_options(self,parser):
"""
给出基本的参数表
"""
group = parser.add_argument_group( "Global Options")
group.add_argument("--logfile", metavar="FILE",
help="log file. if omitted stderr will be used")
group.add_argument("--nolog", action="store_true",
help="disable logging completely")
group.add_argument("--profile", metavar="FILE", default=None,
help="write python cProfile stats to FILE")
group.add_argument("--pidfile", metavar="FILE",
help="write process ID to FILE")
group.add_argument("--version", action="version", version="version 0.0",help="command vision")
# 处理这类参数的时候,需要使用一个方法,将-s之后的键值对处理为dict格式'-s key=value'
group.add_argument("-s", "--set", action="append", default=[], metavar="NAME=VALUE",
help="set/override setting (may be repeated)")
print("global options")
def procss_option(self, arg):
if arg.logfile is not None:
self.setting.set("logfile", arg.logfile, "cmdline")
print(self.setting.attributes["logfile"].priority)
if arg.nolog:
self.setting.set("nolog", True, "cmdline")
if arg.pidfile is not None:
self.setting.set("pidfile", arg.pidfile, "cmdline")
if arg.profile is not None:
self.setting.set("profile", arg.profile, "cmdline") | [
"scott.si@hotmailcom"
] | scott.si@hotmailcom |
de5bfd5baf736fa68ca8e5884c4e9094199c87ff | abda61b9cde643ba2e076386701490531e909697 | /Final_Algorithm/final_mod.py | 1e677fb8b5e2398248df1311bd781cff6a150769 | [] | no_license | anassaeed72/Topics | d1e6d05e809d9eba858343ee04efcc78f9c083f5 | 0b37751ca6cffe9dc2ce052eb9f3d1d9e7e64af5 | refs/heads/master | 2021-01-10T13:48:54.621750 | 2016-05-12T06:35:30 | 2016-05-12T06:35:30 | 53,721,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,307 | py | from proximity import ProximitySearch
import random
import os
from history import*
from scan import*
import sys
from relays import get_relays
from geoip import geolite2
import stem.control
import ipgetter
from midpoint import midpointCalculator
import shutil
from UserURLHistory import getFetechableURLsFromPage
from test2 import totalDistance
with stem.control.Controller.from_port() as controller:
pass
# Get the ccomplete page including all src urls embedded in the page
num_relays = 5
# ====================================================================================
# Get every content of a page and savee in a folder with name of
# website (eg. yahoo, facebook). If folder already exists it'll be deleted
# ====================================================================================
def get_page(url, controller, circuit, results, distance):
hostname = url.split(".")[1]
path = os.path.join(os.getcwd(), hostname)
if (os.path.exists(path)):
shutil.rmtree(path)
os.mkdir(path)
os.chdir(path)
fd = open(hostname + ".html", "w")
fd_read = open(hostname + ".html", "r")
time_taken = scan(controller, circuit, url, fd)
fetchable = getFetechableURLsFromPage(fd_read.read())
fetchable = list(set(fetchable))
urls = map(convert_src_to_url, fetchable)
time = query_parallel(urls) + time_taken
return time
# convert from for "src="xyz"" to xyz
def convert_src_to_url(str):
return str[5:len(str)-1]
# ====================================================================================
# This function tests n random circuits from the results of proximity search
# and returns the best circuit as measured by taking average of getting the head
# ====================================================================================
def get_best_circuit(url, controller, entry, middle, exit, n):
best_path = None
best_time = 1000000
count = 0
for x in range(0, n):
if (count == 2):
break
entry_relay = entry[random.randint(0, len(entry) -1)]
exit_relay = exit[random.randint(0, len(exit) -1)]
middle_relay = middle[random.randint(0, len(middle) -1)]
path = [entry_relay[0], middle_relay[0], exit_relay[0]]
path_with_locations = [entry_relay, middle_relay, exit_relay]
# print path
try:
circuit_id = controller.new_circuit(path, await_build = True)
circuit = controller.get_circuit(circuit_id)
print "Accessing Head"
time = scan_head(controller, circuit, url)
if (time == -1):
return -1
continue
if (time < best_time):
best_path = path_with_locations
count = count + 1
controller.close_circuit(circuit_id)
except stem.CircuitExtensionFailed as error:
# print "Circuit failed, trying next"
continue
return best_path
def readinFile():
with open("ListOfDomains2.csv") as f:
stocks = f.read().splitlines()
return stocks
def main():
# history = get_top_visited(get_history(), 10)
# history = ["ask.com", "tumblr.com"]
controller.authenticate()
# experiment_smartor(history)
history = ["yahoo.com"]
time_1 = experiment_smartor(history)
time_2 = experiment_tor(history)
if (time_1 > time_2):
temp = time_1
time_1 = time_2
time_2 = time_1
print "Smartor: " + str(time_1)
print "Tor: " + str(time_2)
# =====================================================================================
# Run the experiment using our algorithm.
# =====================================================================================
def experiment_smartor(history):
results_smartor = open("results_smartor.txt", "a")
relays = get_relays(controller)
entry = relays[0];
middle = relays[1];
exit = relays[2];
myIP = ipgetter.myip();
my_Address = geolite2.lookup(socket.gethostbyname(myIP))
for url in history:
dest_Address = geolite2.lookup(socket.gethostbyname(url))
if (dest_Address == None):
print("Couldn't get location of ", url)
continue
# Get list of fingerprints for exit nodes
exit_nodes = get_relays_fingerprint(num_relays, exit, dest_Address.location)
entry_nodes = get_relays_fingerprint(num_relays, entry, my_Address.location)
middleLocation = midpointCalculator(dest_Address.location, my_Address.location)
middle_nodes = get_relays_fingerprint(num_relays, middle, my_Address.location)
url = 'https://www.' + url
path_with_locations = get_best_circuit(url, controller, entry_nodes, middle_nodes, exit_nodes, 10)
if path_with_locations == -1:
continue
locations = [my_Address.location] + [x[1] for x in path_with_locations] + [dest_Address.location]
distance = totalDistance(locations)
best_path = [x[0] for x in path_with_locations]
print("best path ", best_path)
circuit_id = controller.new_circuit(best_path, await_build = True)
test = controller.get_circuit(circuit_id)
print 'Accessing url: ' + url
get_page(url, controller, test, results_smartor, distance)
# =====================================================================================
# Run the experiment using tor
# =====================================================================================
def experiment_tor(history):
results_tor = open("results_tor.txt", "a")
myIP = ipgetter.myip();
my_Address = geolite2.lookup(socket.gethostbyname(myIP))
for url in history:
dest_Address = geolite2.lookup(socket.gethostbyname(url))
if (dest_Address == None):
print("Couldn't get location of ", url)
continue
url = 'https://www.' + url
test = controller.get_circuits()
for circuit in test:
if (len(circuit.path) > 2):
path = circuit.path
circ = circuit
break
print path
# test = path
res_list = [controller.get_network_status(x[0]).address for x in path] # Get ip addresses from fingerprints
# print res_list
locations_relay = [geolite2.lookup(x).location for x in res_list] # Do lookups
# print locations_relay
locations = [my_Address.location] + locations_relay + [dest_Address.location]
distance = totalDistance(locations)
time = get_page(url, controller, circ, results_tor, distance)
if (time != -1):
results_tor.write(str(distance) + "," + str(time) + "\n")
# =====================================================================================
# Given a dictionary of relays and a location, get n closest
# relays(fingerprints) in a list
# =====================================================================================
def get_relays_fingerprint(n, relays, location):
retval = []
proximityClass = ProximitySearch(relays)
nRelaysLocation = proximityClass.get_points_nearby(location, n)
for i in nRelaysLocation:
retval.append((relays[i], i))
return retval
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
35cefe7148d9cc9fafa089ca70fc3dfe6504e2be | 4afb696bc7bee257a57e468d67f153767e3f67c2 | /StockAnalysis.py | bc7ed441c076bc42d59d0cc8b2356f556cbe0929 | [] | no_license | MuzzzammilMia/StockAnalysis | 0c5ed15af77293a9fd5e51e255d12dd226be2c6d | e719258e2bfcb59a70afd2140ba03cae9509c6a3 | refs/heads/master | 2022-11-13T16:46:01.973754 | 2020-06-28T16:29:10 | 2020-06-28T16:29:10 | 265,611,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,912 | py | #!/usr/bin/env python
# coding: utf-8
# In[28]:
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
print('Pandas version:',pd.__version__)
print('Matplotlib version:',mpl.__version__)
print('Numpy version:',np.__version__)
# In[22]:
#Converting the csv file to the DataFrame.
CompanyCheck = pd.read_csv("ticker_list.csv")
#Formatting the Name column to be standardised.
CompanyCheck['Name'] = CompanyCheck['Name'].str.replace('.','')
CompanyCheck['Name'] = CompanyCheck['Name'].str.replace(' ','')
CompanyCheck['Name'] = CompanyCheck['Name'].str.lower()
CompanyCheck.head()
# In[23]:
#Formatting the Input to be the same as DataFrame name column.
ChosenCompany = input("Please enter a company name:") #CompanyName
ChosenCompany = ChosenCompany.lower()
ChosenCompany = ChosenCompany.replace('.','')
ChosenCompany = ChosenCompany.replace(' ','')
print(ChosenCompany)
# In[24]:
#filters for the company name, returns an arr with the corresponding ticker value.
TickValue = CompanyCheck[CompanyCheck['Name']==ChosenCompany]["Ticker"].values
print(TickValue)
# In[33]:
#Acceptable time params in order for the API to receive information
Intervals = ["1min","5min","15min","30min","45min","1h","2h","4h","1day","1week","1month"]
Time = input("Please enter a timeframe: ")
NumCalls = input("Please choose the number of calls:")
# In[37]:
#API
td = TDClient(apikey="password") #Removed the API key
ts = td.time_series(symbol=TickValue[0],interval=time,outputsize=NumCalls, timezone="America/New_York",)
#Returning a dataFrame with the time series data
Stock1 = ts.as_pandas()
# In[35]:
# Plotting high/low values
plt.figure()
hl=Stock1.loc[:,"high":"low"]
hl.plot(linewidth=1)
#Formatting the graph
plt.xlabel('Time')
plt.grid(True)
plt.ylabel('Cost measured in dollars ($)')
plt.title('High/Low values : {}'.format(ChosenCompany))
| [
"[email protected]"
] | |
43866c23e7957b764f0b579688d0275579b2fd44 | ef2e2a40c9e03173ee936f6672a90a794db5b2a0 | /app/search.py | 5dbfba175888cd77005d66737abc91a5e3083ee9 | [] | no_license | crazynayan/flask-tutorial | fc2fbc3bd7e7f30d48dd2abce5ea05ef3168fc6b | 6e51323bf086cadd39a4860388e07b047b8c6fbe | refs/heads/master | 2022-12-13T23:13:08.832155 | 2019-10-30T12:16:54 | 2019-10-30T12:16:54 | 182,255,340 | 0 | 0 | null | 2022-12-08T05:01:38 | 2019-04-19T11:36:10 | Python | UTF-8 | Python | false | false | 969 | py | from flask import current_app
def add_to_index(index, model):
if not current_app.elasticsearch:
return
payload = {}
for field in model.__searchable__:
payload[field] = getattr(model, field)
current_app.elasticsearch.index(index=index, id=model.id, body=payload)
def remove_from_index(index, model):
if not current_app.elasticsearch:
return
current_app.elaseticsearch.delete(index=index, id=model.id)
def query_index(index, query, page, per_page):
if not current_app.elasticsearch:
return
query_body = {
'query': {
'multi_match': {
'query': query,
'fields': ['*'],
},
},
'from': (page - 1) * per_page,
'size': per_page,
}
search = current_app.elasticsearch.search(index=index, body=query_body)
ids = [int(hit['_id']) for hit in search['hits']['hits']]
return ids, search['hits']['total']['value'] | [
"[email protected]"
] | |
3a2127cf485882ad716605f78202ae8536f46498 | f453897fccafc2278f959010c6bad52c7802a2fe | /sidebarUpdate.py | ec7becd648760176a127d1c08e6db75bb5c76b28 | [] | no_license | ColinHaley/Python | 4977c325c13652251386e5a5e3f65d55a3f13a07 | bbef9fc8c4e1d31fe5e1142cf7506fc4738295dd | refs/heads/master | 2021-01-25T08:28:17.231365 | 2018-05-09T21:46:32 | 2018-05-09T21:46:32 | 42,951,804 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,866 | py | """
__author__ = 'Colin Haley, aka Kazra'
__purpose__ = 'Update the /r/asov sidebar with online players from asov Vanilla'
Steps:
1. Create upload variables: [string]CSS, [string]Sidebar
2. Get current players
a. If 0:
i. Clear Sidebar Playerheads
ii. Set to "No Players Online."
ii. Exit()
b. If >= 1:
i. For each player online:
- If their img exists in /data && newer than GETDATE()-3:
1. Add Strings to CSS and Sidebar variables.
- If not:
1. If older than GETDATE()-7, delete old playerhead icon.
2. wget or python equivalent to ~/srv/_config/data/ their player head icon
3. rename from 32.png to playername.png
4. Upload image
- Update Users table with:
1. UPDATE Users set Timestamp = NOW() WHERE Username = 'playername'
# Other Resources
http://cravatar.us/head/__playername__/32.png
Even unclaimed names return a 'Steve' head, no error handling needed? Dangerzone
https://www.reddit.com/dev/api
#POST_api_upload_sr_img
#POST_api_delete_sr_img
https://github.com/reddit/reddit/wiki/OAuth2
# Mandatory External Libraries
Praw: https://gist.github.com/shrayasr/100005943
Mcstatus: https://github.com/Dinnerbone/mcstatus
"""
# Imports
import praw
import time
import datetime
from mcstatus import MinecraftServer
import urllib
#Static Variables
__clientID__ = 'redditClientID'
__secretkey__ = 'redditSecretKey'
__subreddit__ = 'subredditName'
__username__ = 'redditUsername'
__password__ = 'redditPassword'
__serveraddress__ = 'minecraftAddress'
__serverport__ = #RCON Port for Minecraft
__datadirectory__ = '/dir/to/location/to/store/playerheads'
# Section to display playerheads within on the sidebar on reddit.
__sidebarheader__ = '[](/STARTONLINEPLAYERS)'
__sidebarfooter__ = '[](/ENDONLINEPLAYERS)'
# Header for CSS to update playerheads online.
__cssheader__ = '/* END ONLINE PLAYER HEADS DO NOT DELETE OR MOVE FROM HEADER POSITION */'
def generate_css(playerName):
# return a string formatted "a[href="/playername"]:after { content: url(%%playername%%) }"
# change this to a .format(playername) at some later point.
return 'a[href="/' + playerName + ']:after { content: url(%%'+ playerName + '%%) }'
def generate_sidebar(playerName):
# return a string formatted "[](/playername)"
# change this to a .format(playerName) at some point.
return '[](/' + playerName + ')'
def clear_sidebar():
# Needs to iterate through players currently listed online and remove their image uploads.
# Requires open connection to Reddit through use of global 'r' variable.
sidebar = r.get_settings(__subreddit__)['Description']
clearString = sidebar[:sidebar.index(__sidebarheader__) + len(__sidebarheader__) + sidebar[sidebar.index(__sidebarfooter__):]
r.update_settings(r.get_subreddit(__subreddit__), description = clearString)
def get_css():
stylesheet = r.get_stylesheet(__subreddit__)
return stylesheet
def clear_css():
# Delete all CSS between two marker comments, using indexOf("str")
# Requires open connection to reddit via 'r' global
subCSS = get_css()
r.set_stylesheet(__subreddit__, [__header__:])
def upload_css_to_reddit(stringCSS):
# takes .join() list of generateCSS(playername) as a string for upload
r.set_stylesheet(__subreddit__, stringCSS)
def upload_sidebar_to_reddit(stringSidebar):
# takes .join() list of generateSidebar(playername) as a string for upload
def getCurrentPlayers():
server = MinecraftServer(__serveraddress__, __serverport__)
try:
query = server.query()
return {'Count': query.players.online, 'Players':query.players.names}
except:
exit()
def download_playerhead(playername):
downloadPath = 'http://cravatar.eu/head/' + playername + '/32.png'
savepath = __datadirectory__ + playername + '.png'
urllib.urlretrieve(downloadPath, savePath)
# grabs a player head from cravatar to the data folder.
def upload_image_to_reddit(playername):
__imagedir__ = __datadirectory__ + playername + '.png'
r.upload_image(__subreddit__, __imagedir__, playername)
def delete_image_from_reddit(playername):
r.delete_image(__subreddit__, name=playername, header=False)
def parse_players_from_sidebar()
# Get the players online from the server via RCON
# if unsure of the address use MinecraftServer.lookup()
server = MinecraftServer(__serveraddress__, __serverport__)
try:
query = server.query()
if query.players.online > 0:
#do stuff
else
#set sidebar to 'No Players Online'
clear_css()
clear_sidebar()
except:
exit()
#Define the Praw useragent
settings = r.get_settings(__subreddit__)
| [
"[email protected]"
] | |
ebbdd594ec1e0b143441c4a911fcf81481ed0acf | 4ae1879c21a4193da3df6ae740674ee0655a8beb | /drawDeviation.py | a8b9efe078feb123768f809991f2275a25cac77e | [] | no_license | cynerelee/collision-avoidance | 68bccce1a54009ce7b3bee1bf2adc571b6cde956 | c269b7040b68b91eb5e7e1134feb8363da1091f0 | refs/heads/master | 2023-07-09T02:40:23.760176 | 2023-06-24T03:44:02 | 2023-06-24T03:44:02 | 281,842,101 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,147 | py | import matplotlib.pyplot as plt
import matplotlib
import numpy as np
import xlrd #读取excel的库
x=np.arange(0, 2.01,0.01)
#print(x)
#print(x.shape)
data1 = xlrd.open_workbook("deviation_k1.xlsx")
table1 = data1.sheet_by_index(0)
line=table1.col_values(0)
base=np.array(line)
base=base.T
resArray=[] #先声明一个空list
data = xlrd.open_workbook("deviation_k3.xlsx") #读取文件
table = data.sheet_by_index(0) #按索引获取工作表,0就是工作表1
for i in range(table.nrows): #table.nrows表示总行数
line=table.row_values(i) #读取每行数据,保存在line里面,line是list
resArray.append(line) #将line加入到resArray中,resArray是二维list
resArray=np.array(resArray) #将resArray从二维list变成数组
font1 = {'family' : 'Times New Roman',
'weight' : 'normal',
'size':15,
}
font2 = {'family' : 'Times New Roman',
'weight' : 'normal',
'size':10,
}
color=['#377eb8', '#ff7f00', '#4daf4a','#f781bf', '#a65628', '#984ea3','#999999', '#e41a1c']
alpha=0.6
figure, ax = plt.subplots()
# 设置matplotlib正常显示中文和负号
matplotlib.rcParams['font.sans-serif']=['SimHei'] # 用黑体显示中文
matplotlib.rcParams['axes.unicode_minus']=False # 正常显示负号
# 显示横轴标签
plt.xlabel("Time(s)",font1)
# 显示纵轴标签
plt.ylabel("Deviation(cm)",font1)
plt.axis([0, 2, 0, 6])
plt.tick_params(labelsize=15)
plt.xticks([0,0.2,0.4,0.6,0.8,1,1.2,1.4,1.6,1.8,2])
plt.yticks([0,1,2,3,4,5,6])
labels = ax.get_xticklabels() + ax.get_yticklabels()
[label.set_fontname('Times New Roman') for label in labels]
# 显示图标题
#plt.title("频数/频率分布直方图")
#plt.legend(loc = 'upper right',prop=font2)
plt.plot(x, base,alpha=0.6,label='Baseline',color=color[0],linewidth=2)
plt.plot(x, resArray[:,1],alpha=0.6,label='K2=0.1',color=color[1],linewidth=2)
plt.plot(x, resArray[:,2],alpha=0.6,label='K2=1',color=color[2],linewidth=2)
plt.plot(x, resArray[:,3],alpha=0.6,label='K2=5',color=color[3],linewidth=2)
plt.plot(x, resArray[:,4],alpha=0.6,label='K2=10',color=color[4],linewidth=2)
plt.legend(loc = 0,prop=font2)
plt.savefig('./Deviation_k3.png')
plt.show() | [
"l"
] | l |
b94636c5ce40cd95b97b2b35ed36b2306822ab9e | 6ab977ddb640969e208abdfb8870f2e0736deafc | /advent2017/day17.py | b779114f0752af2f0cbed65df1af4c11ea3d7662 | [] | no_license | nessalc/AdventOfCode | c78fa81dc360d9538a211eaddef6cee39a9dce49 | f71ca4810d536b1a2025b20c34afb6f99155ba85 | refs/heads/master | 2022-12-18T19:08:07.679129 | 2022-12-16T22:44:05 | 2022-12-16T22:44:05 | 75,259,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | #Advent of Code 2017
#Day 17: Spinlock
from collections import deque
def fill_buffer(steps,iterations):
buffer=deque([0])
idx=0
for i in range(1,iterations+1):
buffer.rotate(-steps)
buffer.append(i)
if i%1000000==0:
print('.',end='')
return buffer
if __name__=='__main__':
test17=3
input17=370
b=fill_buffer(input17,2017)
print('Part 1: {}'.format(b[0]))
b=fill_buffer(input17,50000000)
print('Part 2: {}'.format(b[b.index(0)+1]))
| [
"[email protected]"
] | |
cfc06a8572219937335727054137cb9dc4acbf0c | f3fd05416adb2932222d4e5b4fea42d57eb6f6d0 | /.ipynb_checkpoints/interactive-checkpoint.py | 4f1903e9761bb71761402bc894609edaa5ab3534 | [] | no_license | anurag-ux/covid-19-analysis | 4accaf356299d65512b039d2c4a3f37e0cd7f0ca | a8141923adb1c63e42f411050336f450e3f8c8d5 | refs/heads/master | 2022-04-23T21:32:49.251502 | 2020-04-27T09:32:06 | 2020-04-27T09:32:06 | 259,219,828 | 0 | 0 | null | 2020-04-27T06:19:19 | 2020-04-27T05:59:53 | Jupyter Notebook | UTF-8 | Python | false | false | 1,311 | py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import plotly.graph_objects as go
def setup():
global df
global st_x
global conf_y
df=pd.read_csv('covid_19_india.csv')
df['Date']=pd.to_datetime(df['Date'],format="%d/%m/%y")
st_x=[]
conf_y=[]
for state in df['State/UnionTerritory'].unique():
st_x.append(df[df['State/UnionTerritory']==state]['Date'])
conf_y.append(df[df['State/UnionTerritory']==state]['Confirmed'])
def show_graph(state):
fig1 = go.Figure()
fig1.update_layout(title="Confirmed Cases",xaxis_title="Date",yaxis_title="Number of Cases",font=dict( family="Courier New, monospace",size=18,color="#096291" ))
if(state!='all'):
st=state.split()
for s in st:
i=np.where(df['State/UnionTerritory'].unique()==s)[0][0]
fig1.add_trace(go.Scatter(x=st_x[i],y=conf_y[i],mode='lines+markers',name=df['State/UnionTerritory'].unique()[i]))
else:
for i in range(35):
fig1.add_trace(go.Scatter(x=st_x[i],y=conf_y[i],mode='lines+markers',name=df['State/UnionTerritory'].unique()[i]))
fig1.show()
if __name__ == "__main__":
state=input('Enter State name(s) or type all to view every state ')
setup()
show_graph(state)
| [
"[email protected]"
] | |
e6c3c35d2126d046113a15b927b79a265d45938f | d0d9cfbdb391e7ff3bb7cb4ae3d34c80f46b35bf | /ghaaspy/postgres.py | 05367ca39a51d8de024c5ef3d00f28529b1ef52e | [] | no_license | dvignoles/ghaaspy | ed977cd752d9d1c982d57be993d5ffe7e30c60bc | af97d99224e1be11d8228af1b2768a1e363c41a9 | refs/heads/master | 2023-06-15T12:26:44.371930 | 2021-06-29T16:05:48 | 2021-06-29T16:05:48 | 345,821,745 | 0 | 0 | null | 2021-06-29T16:05:49 | 2021-03-08T23:15:09 | Python | UTF-8 | Python | false | false | 3,248 | py | from pathlib import Path
from psycopg2 import sql, connect
class PostgresDB:
def __init__(self, database=None, user='postgres', password='admin', host='localhost', port=5432, verify=True):
self.database = database
self.host = host
self.port = int(port)
self.user = user
self.password = password
if verify:
try:
self.conn = connect(
dbname=self.database,
user=self.user,
host=self.host,
port=self.port,
password=self.password
)
except Exception as err:
print("psycopg2 connect() ERROR:", err)
self.conn = None
@classmethod
def from_pgpass(cls, idsubstring, pgpass=Path.home().joinpath('.pgpass').resolve(), verify=True):
"""Use postgres password file as source of postgres connection. See https://www.postgresql.org/docs/current/libpq-pgpass.html.
Entries are of form hostname:port:database:username:password
Args:
idsubstring (str): identifying substring ie database, hostname:port:database, hostname:port:databse:username
pgpass (Path, optional): Path of .pgpass file. Defaults to Path.home().joinpath('.pgpass').resolve().
Raises:
FileNotFoundError: if pgpass not valid
Returns:
PostgresDB: class instance
"""
if not pgpass.exists():
raise FileNotFoundError(".pgpass file not found at {}".format(pgpass))
with open(pgpass, 'r') as f:
credentials = f.read().splitlines()
matches = []
for c in credentials:
if idsubstring in c:
matches.append(c)
assert(len(matches) == 1)
host, port, db, user, password = matches[0].split(':')
return cls(database=db, user=user, password=password, host=host, port=port, verify=verify)
@classmethod
def from_gdal_string(cls, gdal_pg, verify=True):
"""Get PostgresDB instance form gdal driver style string.
Args:
gdal_pg (str): postgres connection str https://gdal.org/drivers/vector/pg.html
verify (bool, optional): Throw error if not valid connection. Defaults to True.
Returns:
PostgresDB : class instance
"""
db = {part.split('=')[0]:part.split('=')[1] for part in gdal_pg.split()}
assert(
('dbname' in db) &
('host' in db) &
('port' in db) &
('user' in db) &
('password' in db)
)
return cls(database=db['dbname'], user=db['user'], password=db['password'], host=db['host'], port=db['port'], verify=verify)
def get_gdal_string(self):
"""Return gdal driver format postgres connection string
https://gdal.org/drivers/vector/pg.html
PG:"dbname='databasename' host='addr' port='5432' user='x' password='y'"
Returns:
[type]: [description]
"""
return "dbname={} host={} port={} user={} password={}".format(self.database, self.host, self.port, self.user, self.password) | [
"[email protected]"
] | |
cf01fa55f942fa2fcd66150ed980df7a693d2f4f | 1923b16ad09b44272b330598d10ab444b5834773 | /Basic_test/pa.py | 820bc320af8641353d8d9499984d3786b0ec256b | [] | no_license | akhilakr06/pythonluminar | 4b9503312cfd09ef8609c12e4052143ed00fb51c | 72aa56eee078d7e5929dea0e74f7b9b01c6fef17 | refs/heads/master | 2023-08-17T11:02:13.228181 | 2021-09-20T05:49:17 | 2021-09-20T05:49:17 | 402,320,777 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | # row=5
# for i in range(row+1):
# for j in range(i):
# print(i,end=" ")
# print('')
# for i in range(row+1):
# for j in range(i):
# print(i,end=" ")
# print('')
#
a=int(input("initial value"))
b=int(input("final value"))
r=5
for i in range(a,b):
if(i%2==0):
for k in range(r,0,-1):
for j in range(0,k):
print(i,end=" ")
print()
else:
for l in range(r):
for m in range(0,l+1):
print(i,end=" ")
print() | [
"[email protected]"
] | |
41f76bde5c8c6d1b8115d6a7b484d3d798e330a9 | 9870351dff92683882eb6f4e8e27edf29f8e2560 | /bookmarks/common/decorators.py | 56a1de4d1522053090a5b6d329941e20a3ba543b | [] | no_license | peterniyon/paithoni | cf86878ad8dc12c6e20189ef036d6ede888381df | 97fb26b8cf8d6f747ef4420f2e6a0fb9e52a4aab | refs/heads/master | 2022-07-09T03:13:17.545373 | 2020-05-19T14:19:27 | 2020-05-19T14:19:27 | 265,268,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | from django.http import HttpResponseBadRequest
def ajax_required(f):
def wrap(request, *args, **kwargs):
if not request.is_ajax():
return HttpResponseBadRequest()
return f(request, *args, **kwargs)
wrap.__doc__=f.__doc__
wrap.__name__=f.__name__
return wrap
| [
"[email protected]"
] | |
bb6a962fee8f976bdd835956ef211574d904f51f | 6c1b604de2a212c148149d9011855c19d2dfd63d | /jaCloud.py | 3c5a7f1b03abc4cef7c9b891dec1ad655780d16e | [] | no_license | ryoheimatsumo/slack_bot | 48d38278ea4533bc942575b58ec04afcadee6190 | 337aa0151ac3062eded178210eb0e2e220c5c665 | refs/heads/master | 2022-07-16T14:48:20.855342 | 2020-05-13T12:08:11 | 2020-05-13T12:08:11 | 263,618,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,817 | py | from MeCab import Tagger
import matplotlib.pyplot as plt
from wordcloud import WordCloud
t = Tagger()
text = """
Wherever you are
作詞:Taka 作曲:ONE OK ROCK
ONE OK ROCK - Wherever you are
I'm telling you, oh yeah
I softly whisper
Tonight tonight
You are my angel
愛してるよ
2人は一つに
Tonight tonight
I just say…
Wherever you are, I always make you smile
Wherever you are, I'm always by your side
Whatever you say, 君を思う氣持ち
I promise you「forever」right now
I don't need a reason, oh yeah
I just want you baby
Alright alright
Day after day
この先長いことずっと uh yeah
どうかこんな僕とずっと
死ぬまで Stay with me
We carry on…
Wherever you are, I always make you smile
Wherever you are, I'm always by your side
Whatever you say, 君を思う氣持ち
I promise you「forever」right now
Wherever you are, I never make you cry
Wherever you are, I never say goodbye
Whatever you say, 君を思う氣持ち
I promise you「forever」right now
僕らが出逢った日は2人にとって
一番目の記念すべき日だね
そして今日という日は2人にとって
二番目の記念すべき日だね
心から愛せる人
心から愛しい人
この僕の愛の真ん中には
いつも心(きみ)がいるから
Wherever you are, I always make you smile
Wherever you are, I'm always by your side
Whatever you say, 君を思う氣持ち
I promise you「forever」right now
Wherever you are, wherever you are
Wherever you are
"""
splitted = " ".join([x.split("\t")[0] for x in t.parse(text).splitlines()[:-1]])
text2="この僕の愛の真ん中には"
print(t.parse(text2))
wc = WordCloud(font_path="/Users/matsumotoryouhei/Downloads/Noto-unhinted/NotoSansCJKjp-Regular.otf", regexp="[\w']+")
wc.generate(text)
plt.imshow(wc)
plt.show()
| [
"[email protected]"
] | |
e6fc3f88aad6cd1b16f5989145c60723173c18f8 | cedc585c5fba9b3f09d41ec959eb512edb978089 | /IndependentAllels.py | ee163e7b497a6c2d3f0f770af3dcc82edb55e393 | [] | no_license | chernovsergey/rosalind | 4024f863fb3d642d81df2b82c072856842c26166 | 193a26c7a383895afb373e14c44000d0dfd6ba09 | refs/heads/master | 2021-01-01T17:28:05.386689 | 2015-02-27T18:49:47 | 2015-02-27T18:49:47 | 28,854,692 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 434 | py | from scipy.special._ufuncs import binom
__author__ = 'sergey'
def P(n, k):
return binom(2 ** k, n) * 0.25 ** n * 0.75 ** (2 ** k - n)
def Solve(n, k):
return 1 - sum([P(n, k) for n in range(N)])
if __name__ == '__main__':
data = 0
k = 0
N = 0
with open('IndependentAlleles.txt') as f:
data = f.read().strip().split()
k, N = map(int, data)
print k, N
print round(Solve(N, k), 3) | [
"[email protected]"
] | |
4a2d723ff34579a40e5a5ed814bf5a4a854501cd | 2eb9c98a99f74ef0e03260609406d3cd644620e9 | /test.py | a3bcb2d2b63366fda1812be23044888cbaa3a651 | [] | no_license | DHdroid/HearMe | ec1326f23cc728fe167ce352cbed78b0d5d90f1a | 9e579e59c09fd4e146c5e6c226a9668dab24e7b5 | refs/heads/main | 2023-03-19T18:50:55.717852 | 2021-03-09T07:08:05 | 2021-03-09T07:08:05 | 346,954,512 | 1 | 0 | null | 2021-03-12T05:45:19 | 2021-03-12T05:45:18 | null | UTF-8 | Python | false | false | 3,067 | py | Future<dynamic> speechToText(File file) async {
final bytes = file.readAsBytesSync();
var uri = Uri.parse("https://westus.stt.speech.microsoft.com/speech/recognition/conversation/cognitiveservices/v1?language=en-US");
var request = new http.Request("POST", uri)
..headers['Ocp-Apim-Subscription-Key'] = "d10dd8eff0e145eead43c5a63b808d1e"
..headers['Content-Type'] = "audio/wav"
..bodyBytes = bytes;
var response = await request.send();
print(request);
print(response.statusCode);
response.stream.transform(utf8.decoder).listen((value) {
print(value);
});
return text;
}
Future<dynamic> addProfile(File file) async {
var uri = Uri.parse("https://westus.api.cognitive.microsoft.com/sts/v1.0/issuetoken/speaker/identification/v2.0/text-independent/profiles");
var request = new http.Request("POST", uri)
..headers['Ocp-Apim-Subscription-Key'] = "d10dd8eff0e145eead43c5a63b808d1e"
..headers['Content-Type'] = "application/json"
..bodyFields['locale'] = 'en-us';
var response = await request.send();
print(request);
print(response.statusCode);
response.stream.transform(utf8.decoder).listen((value) {
print(value);
});
return profileid;
}
Future<dynamic> enrollProfile(File file, profileid) async {
final bytes = file.readAsBytesSync();
var uri = Uri.parse('https://westus.api.cognitive.microsoft.com/sts/v1.0/issuetoken/speaker/identification/v2.0/text-independent/profiles/{profileid}/enrollments');
var request = new http.Request("POST", uri)
..headers['Ocp-Apim-Subscription-Key'] = "d10dd8eff0e145eead43c5a63b808d1e"
..headers['Content-Type'] = "audio/wav"
..bodyBytes = bytes;
var response = await request.send();
print(request);
print(response.statusCode);
response.stream.transform(utf8.decoder).listen((value) {
print(value);
});
}
Future<dynamic> identifyProfile(File file) async {
final bytes = file.readAsBytesSync();
var uri = Uri.parse('https://westus.api.cognitive.microsoft.com/sts/v1.0/issuetoken/speaker/identification/v2.0/text-independent/profiles/identifySingleSpeaker?profileIds={profileid}');
var request = new http.Request("POST", uri)
..headers['Ocp-Apim-Subscription-Key'] = "d10dd8eff0e145eead43c5a63b808d1e"
..headers['Content-Type'] = "audio/wav"
..bodyBytes = bytes;
var response = await request.send();
print(request);
print(response.statusCode);
response.stream.transform(utf8.decoder).listen((value) {
print(value);
});
return profileid, score;
}
Future<dynamic> delete Profile() async {
final bytes = file.readAsBytesSync();
var uri = Uri.parse('https://westus.api.cognitive.microsoft.com/sts/v1.0/issuetoken/speaker/identification/v2.0/text-independent/profiles/INSERT_PROFILE_ID_HERE');
var request = new http.Request("POST", uri)
..headers['Ocp-Apim-Subscription-Key'] = "d10dd8eff0e145eead43c5a63b808d1e"
var response = await request.send();
print(request);
print(response.statusCode);
response.stream.transform(utf8.decoder).listen((value) {
print(value);
});
} | [
"[email protected]"
] | |
be10b6bc0c9150c1fba18f808a0eefbde924ab5c | 734bccdcbaaef2ca12e6ff3526aa054d5dbcb9ef | /XSum-Topic-ConvS2S/fairseq/modules/__init__.py | cf36d19bfabc7885cc2179464045d778afa200aa | [
"MIT",
"BSD-3-Clause"
] | permissive | artidoro/XSum | 6215340c36013c4568e42f23132a6173e3c57912 | 29730d742914111175cebd0c769115e1b10f1b85 | refs/heads/master | 2020-09-24T01:10:34.930958 | 2019-12-03T21:32:40 | 2019-12-03T21:32:40 | 225,626,229 | 1 | 0 | MIT | 2019-12-03T13:26:30 | 2019-12-03T13:26:30 | null | UTF-8 | Python | false | false | 650 | py | # Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the LICENSE file in
# the root directory of this source tree. An additional grant of patent rights
# can be found in the PATENTS file in the same directory.
from .beamable_mm import BeamableMM
from .conv_tbc import ConvTBC
from .grad_multiply import GradMultiply
from .learned_positional_embedding import LearnedPositionalEmbedding
from .linearized_convolution import LinearizedConvolution
__all__ = [
'BeamableMM',
'ConvTBC',
'GradMultiply',
'LearnedPositionalEmbedding',
'LinearizedConvolution',
]
| [
"[email protected]"
] | |
f68f506b70b8c396f6fb4f61e09bdc790912ba44 | c7a332a0e3b0e31e7369922e4e2dc052e21f2c0e | /backend/venv/bin/easy_install | f2bacc67a305de92b1b0b32c2e5d3dda6c2d474a | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | JonathanLimZS/JPMorgan-Code-For-Good-2019-Team2 | 7cd014db755636588d6060ddf6e956d1b6f50a42 | cfe179eca15ec8cb6b5f772a97ee719aedc04093 | refs/heads/master | 2020-09-05T12:34:00.066531 | 2019-10-29T18:57:42 | 2019-10-29T18:57:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 277 | #!/Users/wayne/Documents/GitHub/team-2/backend/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
d0c7e8f9d398295ea760aa1b3cc7b658fb54a81b | 36cd6cd6b6f4fb984e00774f54f5f65e3e12a943 | /Siamese_Loader.py | b134a69bc2ecc2fd4cbf88fdd985efe9f80775ca | [] | no_license | deyachatterjee/KagglePersonalizedMedicineText | e72964d02a056771f6a16d71d9ad4db22bcdb0c7 | a92ba6ba4dd4de94f14c60be5e962c07a79b20b2 | refs/heads/master | 2020-04-22T23:07:56.633710 | 2018-03-04T01:12:03 | 2018-03-04T01:12:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,598 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 9 11:44:42 2017
@author: suresh
"""
import numpy as np
import numpy.random as rng
from sklearn.utils import shuffle
class Siamese_Loader:
"""For loading batches and testing tasks to a siamese net"""
def __init__(self,Xtrain,Xval,Ytrain,Yval,n_classes):
self.data = {}
self.categories = {}
self.data["train"]=np.array(Xtrain)
self.categories["train"]=Ytrain
self.data["val"]=np.array(Xval)
self.categories["val"]=Yval
self.n_classes = n_classes
self.ndim = len(Xtrain.columns)
self.YtrainClassBins = np.bincount(Yval)
def get_batch(self,n,s="train"):
"""Create batch of n pairs, half same class, half different class"""
X=self.data[s]
Y=self.categories[s]
categories = rng.choice(self.n_classes,size=(n,),replace=True)
pairs=[np.zeros((n, self.ndim)) for i in range(2)]
targets=np.zeros((n,))
targets[n//2:] = 1
for i in range(n):
category = categories[i]
idx_1 = rng.randint(0,self.YtrainClassBins[category])
pairs[0][i,:] = X[Y==category][idx_1]#choose a random index from the subset containing just this category's data
#idx_2 = rng.randint(0,self.n_examples)
#pick images of same class for 1st half, different for 2nd
category_2 = category if i >= n//2 else (category + rng.randint(1,self.n_classes)) % self.n_classes
idx_2 = rng.randint(0,self.YtrainClassBins[category_2])
pairs[1][i,:] = X[Y==category_2][idx_2]
return pairs, targets
def make_oneshot_task(self,N,s="val"):
"""Create pairs of test image, support set for testing N way one-shot learning. """
X=self.data[s]
Y=self.categories[s]
#n_examples = len(Y)
#categories = rng.choice(range(self.n_classes),size=(N,),replace=False)
#start_idx, end_idx =self.categories[s][language]
true_category = rng.randint(0,self.n_classes)
ex1, ex2 = rng.choice(X[Y==true_category].shape[0],replace=False,size=(2,))
#test_image = np.asarray([X[true_category,ex1,:,:]]*N).reshape(N,self.w,self.h,1)
#test_image = np.asarray(X[Y==true_category][ex1,:]*N).reshape(N,self.ndim)# create n copies of true category
test_image = np.vstack([X[Y==true_category][ex1]]*N)
indices = rng.randint(0,len(X[Y!=true_category]),size=(N,))
support_set = X[Y!=true_category][indices,:]
support_set[0,:] = X[Y==true_category][ex2,:]
#support_set = support_set.reshape(N,self.w,self.h,1)
targets = np.zeros((N,))
targets[0] = 1
targets, test_image, support_set = shuffle(targets, test_image, support_set)
pairs = [test_image,support_set]
return pairs, targets
def test_oneshot(self,model,N,k,s="val",verbose=0):
"""Test average N way oneshot learning accuracy of a siamese neural net over k one-shot tasks"""
n_correct = 0
if verbose:
print("Evaluating model on {} unique {} way one-shot learning tasks ...".format(k,N))
for i in range(k):
inputs, targets = self.make_oneshot_task(N,s)
probs = model.predict(inputs)
if np.argmax(probs) == np.argmax(targets):
n_correct+=1
percent_correct = (100.0*n_correct / k)
if verbose:
print("Got an average of {}% {} way one-shot learning accuracy".format(percent_correct,N))
return percent_correct | [
"[email protected]"
] | |
a3b305ed929f1b6f60c1ce7b611b5ddeaa5aba79 | 822f34c3d908fae26ea7f08a3d557f1e40e0a57c | /6/main.py | 76804bb8389e8db88b386f0774c019bd00a43c90 | [] | no_license | astory-vik/lab6 | ee84e9ac5873c04e073c7c144914d8b0a0ca9feb | 309edae3efa8a8a84a4d021a1962758aaebc026f | refs/heads/master | 2023-01-22T16:20:36.747057 | 2020-11-20T13:58:00 | 2020-11-20T13:58:00 | 314,569,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 789 | py | import requests
from bs4 import BeautifulSoup
url = "https://news.liga.net/"
def main():
html = GetHtml(url)
soup = BeautifulSoup(html, "html.parser")
link = []
news = []
link = soup.find_all("div", class_="news-nth-title")
for i in link:
news.append(i.find('a').text)
for k in news:
print("Количество слов в новосте " + str(len(set(k.split()))))
numInt = []
numInt = soup.find_all("a")
print("Количество ссылок " + str(len(numInt)))
print("Количество изображений " + str(len(soup.find_all("img"))))
def GetHtml(url):
r = requests.get(url)
if(r.status_code == 200):
return r.text
else:
print("Fail")
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
d9e0b891cffbffce226b0db23a07df61215be4a1 | e9656d837dea040cd2bfdbba3b541fe94800315c | /pyautobuild_slidev/main.py | d0175c5b04e24404d11eef3530ce45ba0d3f0eb7 | [
"MIT"
] | permissive | mcoops/container | 7aba77c92d9a9719339548581a059590ec76359c | 95174aecf0ed5ac051f319e87c86c7bf9714e8f0 | refs/heads/main | 2023-07-05T19:50:40.128232 | 2021-08-12T16:01:05 | 2021-08-12T16:01:05 | 390,958,429 | 0 | 0 | MIT | 2021-07-30T06:35:26 | 2021-07-30T06:35:25 | null | UTF-8 | Python | false | false | 1,933 | py | #!/usr/bin/env python3
import requests as curl
import subprocess
def getreleasegh():
owner = 'slidevjs/'
repo = 'slidev/'
search = 'tags'
url = "https://api.github.com/repos/" + owner + repo + search
with curl.get(url) as r:
if r.status_code == 200:
j = r.json()
release = str(j[0]['name'])
release = release.replace("v", "").replace(".", "").lstrip('0')
return release
def getactualimage():
owner = 'stig124/'
repo = 'slidev/'
search = 'tags'
url = 'https://registry.hub.docker.com/v2/repositories/' + owner + repo + search
with curl.get(url) as r:
if r.status_code == 200:
j = r.json()
for i in range(10):
image = str(j['results'][i]['name'])
if image != 'latest' and '-buster' not in image:
image = image.replace(".", "").lstrip('0')
return image
def checknpm():
base = 'https://api.npms.io/v2/search?q='
package = 'slidev'
url = base + package
with curl.get(url) as r:
if r.status_code == 200:
j = r.json()
for i in range(5):
if package in str(j['results'][i]['package']['scope']):
npm = str(j['results'][i]['package']['version'])
npm2 = npm.replace(".", "").lstrip('0')
return npm2, npm
def process(imv, ghv, npv, rv):
if imv == ghv:
print("Nothing to do")
exit(0)
elif imv < ghv:
if ghv == npv:
print("Build")
cmd = "build_slidev " + rv
try:
subprocess.check_call(cmd, shell=True)
except subprocess.CalledProcessError:
print("Script failure")
exit(4)
else:
print("Wating for NPM to catch up")
exit(6)
if __name__ == "__main__":
imv = getactualimage()
ghv = getreleasegh()
npv, rv = checknpm()
process(imv, ghv, npv, rv)
| [
"[email protected]"
] | |
f24942104a030a0925c4947eaf99b2672eadd724 | bf8c8f718e1025bd86e3e0a5716e63d8b9c532ed | /bot/cogs/personal.py | e27c0e9e9c540477cef7a53d1a3e0538537b78d7 | [
"MIT"
] | permissive | iGaming2/rammus-discord-bot | 816c569ba47c8c14a1d911dd7c23653f503e9051 | 04d5ff4141ccccfeccdbb91fd1a4d72496e43e13 | refs/heads/master | 2020-04-21T14:01:23.548765 | 2019-01-31T05:02:30 | 2019-01-31T05:02:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,078 | py | import random
import discord
from discord.ext import commands
import bot.checks
from bot.resources import PACER_TEST
class Personal:
def __init__(self, bot):
self.bot = bot
self.append = " `:^)`"
async def msg(self, ctx, message):
await ctx.send(message + self.append)
# ace
@commands.command(hidden=True)
@bot.checks.is_member(155625382748356608)
async def ace(self, ctx):
await self.msg(ctx, "nabei look what look")
# akey
@commands.command(hidden=True)
@commands.bot_has_permissions(manage_nicknames=True)
@bot.checks.is_member(474170410213048331)
async def akey(self, ctx):
if ctx.author.display_name != "ASIAN":
await ctx.author.edit(nick="ASIAN")
await self.msg(ctx, ":white_check_mark: Successfully changed this "
"Asian's name")
else:
await self.msg(ctx, ":x: No need to change this Asian's name" +
self.append)
# archy
@commands.command(hidden=True)
@bot.checks.is_member(205107664533848065)
async def archy(self, ctx):
options = [
f"{ctx.author.mention} is a lesbian",
PACER_TEST
]
option = random.choice(options)
await self.msg(ctx, option)
# astaris
@commands.command(hidden=True)
@bot.checks.is_member(192974987513036800)
async def astaris(self, ctx):
options = [
"Astaris is big bolly today",
"Astaris isn't a big bolly today"
]
option = random.choice(options)
await self.msg(ctx, option)
# azey
@commands.command(hidden=True)
@bot.checks.is_member(239276819918880769)
async def azey(self, ctx):
options = [
"Yes I’m aze pls don’t touch",
"Archy abuses me"
]
option = random.choice(options)
await self.msg(ctx, option)
# beem
@commands.command(hidden=True)
@commands.bot_has_permissions(manage_nicknames=True)
@bot.checks.is_member(336336895711772693)
async def beem(self, ctx):
if ctx.author.display_name != "Baam":
await ctx.author.edit(nick="Baam")
await self.msg(ctx, "Changed stupid Baam's name")
else:
await self.msg(ctx, "No need to change stupid Baam's name" +
self.append)
# cat
@commands.command(hidden=True)
@bot.checks.is_member(440802535301709827)
async def cat(self, ctx):
options = [
"meow",
"wat",
"noni",
"send help"
]
option = random.choice(options)
await self.msg(ctx, option)
# catsis
@commands.command(hidden=True)
@bot.checks.is_member(440802535301709827)
async def catsis(self, ctx):
options = [
"You got no jams",
"Infires",
"Jjang jjang man bbong bbong",
"Kkaepjang",
]
option = random.choice(options)
await self.msg(ctx, option)
# char
@commands.command(hidden=True)
@bot.checks.is_member(473457198207467522)
async def char(self, ctx):
await self.msg(ctx, "Char is a lolicon")
# chun
@commands.command(hidden=True)
@bot.checks.is_member(202373732067442690)
async def chun(self, ctx):
await self.msg(ctx, "2D girls are better than 3D")
# fcb
@commands.command(hidden=True)
@commands.bot_has_permissions(manage_nicknames=True)
@bot.checks.is_member(283204260781490176)
async def fcb(self, ctx):
if ctx.author.display_name != ctx.author.name:
try:
await ctx.author.edit(nick=None)
except discord.errors.Forbidden:
pass
await self.msg(ctx, "FCB is h0t")
# hunter
@commands.command(hidden=True)
@bot.checks.is_member(285908956570976259)
async def hunter(self, ctx):
await self.msg(ctx, "hunter is gay lol")
# jackie
@commands.command(hidden=True)
@bot.checks.is_member(293025979880833024)
async def jackie(self, ctx):
options = [
"Handsome as **FUCK!**",
"Jackie is {:,} pounds today."
]
rint = random.randint
weight = round(rint(1, 100) * rint(1, 100) / (rint(1, 100) /
rint(1, 100)), 2)
option = random.choice(options).format(weight)
await self.msg(ctx, option)
# kroy
@commands.command(hidden=True)
@commands.bot_has_permissions(manage_nicknames=True)
@bot.checks.is_member(346115225625296897)
async def kroy(self, ctx):
if ctx.author.display_name != ctx.author.name:
try:
await ctx.author.edit(nick=ctx.author.name)
except discord.errors.Forbidden:
pass
await self.msg(ctx, "Changed Kroyburger's name")
else:
await self.msg(ctx, "No need to change Kroyburger's name")
# menmis
@commands.command(hidden=True)
@bot.checks.is_member(286573603368206347)
async def menmis(self, ctx):
options = [
"Menmis is a good mod",
"Menmis is getting demoted"
]
option = random.choice(options)
await self.msg(ctx, option + "")
# orcles
@commands.command(hidden=True)
@commands.bot_has_permissions(manage_nicknames=True)
@bot.checks.is_member(301638410815406081)
async def orcles(self, ctx):
if ctx.author.display_name != ctx.author.name:
await ctx.author.edit(nick=None)
await self.msg(ctx, "Changed obnoxious Orcles's stupid name" +
self.append)
else:
await self.msg(ctx, "Can't ~~ stand ~~ change Orcles's name." +
self.append)
# Rage
@commands.command(hidden=True)
@bot.checks.is_member(447187805106339864)
async def Rage(self, ctx):
await self.msg(ctx, "Rage dies faster than light")
# rory
@commands.command(hidden=True)
@commands.bot_has_permissions(manage_nicknames=True)
@bot.checks.is_member(353180156883632128)
async def rory(self, ctx):
options = [
"rory",
"dinorory rex"
]
option = random.choice(options)
if ctx.author.display_name != option:
await ctx.author.edit(nick=option)
await self.msg(ctx, f":white_check_mark: Successfully changed fat "
f"rory's name to \"**{option}**\"")
else:
await self.msg(ctx, f":x: No need to change fat rory's name to "
f"\"**{option}**\"")
# sharky
# sh4rky
@commands.command(hidden=True)
@bot.checks.is_member(254759884367724554)
async def sh4rky(self, ctx):
await self.msg(ctx, "Below gay")
# traf
@commands.command(hidden=True)
@bot.checks.is_member(311514087639089162)
async def traf(self, ctx):
options = [
"**TRAF IS A MONKEY** :monkey_face::monkey::banana: ooh ooh ooh "
"ah ah ah!!",
"**TRAF IS THE OPEST**"
]
option = random.choice(options)
await self.msg(ctx, option + "")
# xero
@commands.command(hidden=True)
@commands.bot_has_permissions(manage_nicknames=True)
@bot.checks.is_member(257239037721444353)
async def xero(self, ctx):
if ctx.author.display_name != ctx.author.name:
await ctx.author.edit(nick=None)
await self.msg(ctx, "Changed noob Xero's name")
else:
await self.msg(ctx, "No need to change *this* loser's name" +
self.append)
# zogic
@commands.command(hidden=True)
@commands.bot_has_permissions(manage_nicknames=True)
@bot.checks.is_member(397628415085379584)
async def zogic(self, ctx):
await ctx.author.edit(nick=None)
await self.msg(ctx, "Don't call me zoggy")
def setup(bot):
bot.add_cog(Personal(bot))
| [
"[email protected]"
] | |
3df3fbea6d84f8960b962c2bbd112a115aaafa12 | 8ceceaf6f029e4c20af35c686cb3cf908d73f6e5 | /account/urls.py | 5093bbc1aa70fca4eaf0113212cac8946cba98d7 | [] | no_license | Tekkieware/CodeConfab | df2f01081e53f68c041124dfbcc9f13c1311f95b | 43652396112addcbb33ce24f3413aca79c3be5ab | refs/heads/master | 2022-04-14T03:19:24.011771 | 2022-03-08T08:38:03 | 2022-03-08T08:38:03 | 250,670,137 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,313 | py | from . import views
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
from django.contrib.auth.views import PasswordResetView, PasswordResetDoneView
app_name = 'account'
urlpatterns = [
path('registration/' , views.register.as_view(), name = 'register'),
path('logout/', views.Logout.as_view(), name = 'logout'),
path('login/' , views.login.as_view(), name = 'login'),
path('password/change/',views.PasswordChange.as_view(), name = 'password_change'),
path('password/change/done/',views.PasswordChangDone.as_view(), name = 'password_change_done'),
path('reset-password', views.passwordreset.as_view(), name = 'reset_password'),
path('password/reset/done', views.passwordresetdone.as_view(), name = 'password_reset_done'),
path('password-reset/confirm/<uidb64>/<token>', views.confirmpasswordreset.as_view(), name = 'password_reset_confirm'),
path('password-reset/complete', views.passwordresetcomplete.as_view(), name = 'password_reset_complete'),
path('profile/',views.profileview, name = 'profile'),
path('<str:user>/profile/public',views.Publicprofile, name = 'pub_profile'),
path('profile/work/information/edit', views.UpdateWorkInfo, name = "work_edit"),
path('profile/personal_information/edit', views.UpdatePersonalinfo, name = 'edit_pinfo'),
path('profile/contact_information/edit', views.UpdateContatctinfo, name = 'edit_cinfo'),
path('profile/acheivements/edit', views.UpdatAcheiveInfo, name = 'edit_ainfo'),
path('profile/other_information/edit', views.UpdateOtherInfo, name = 'edit_oinfo'),
path('profile/education_information/edit', views.UpdateEducationInfo, name = 'edu_edit'),
path('profile/user/story/edit', views.UpdateAboutInfo, name = 'about_edit'),
path('profile/user/edit/language/add', views.AddLanguages, name = 'lang_add'),
path('profile/user/edit/language/remove', views.RemoveLanguages, name = 'lang_remove'),
path('user/resources/add', views.ResourceAdd , name = 'add_resource'),
path('user/resources/<int:resourceid>/delete', views.ResourceDelete , name = 'rem_resource'),
path('user/profile/picture/add', views.UploadProfilepic , name = 'add_pic')
]+ static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| [
"[email protected]"
] | |
4a59a6d730c7d42759eeb4c97d075bd0b74a5420 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/vns/rsvdevdomainrefconttodomainref.py | 6f6631bb9d8ebd61481610df7c86e13fd1a69120 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,979 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class RsVDevDomainRefContToDomainRef(Mo):
"""
"""
meta = NamedSourceRelationMeta("cobra.model.vns.RsVDevDomainRefContToDomainRef", "cobra.model.aaa.DomainRef")
meta.targetNameProps["name"] = "tnAaaDomainRefName"
meta.cardinality = SourceRelationMeta.N_TO_ONE
meta.moClassName = "vnsRsVDevDomainRefContToDomainRef"
meta.rnFormat = "rsVDevDomainRefContToDomainRef"
meta.category = MoCategory.RELATIONSHIP_TO_LOCAL
meta.label = "Relation from VDev DomainRef Container To AAA Domain Ref"
meta.writeAccessMask = 0x6000000000000001
meta.readAccessMask = 0x6000000000000001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.parentClasses.add("cobra.model.vns.VDevDomainRefCont")
meta.superClasses.add("cobra.model.reln.Inst")
meta.superClasses.add("cobra.model.reln.To")
meta.superClasses.add("cobra.model.pol.NToRef")
meta.rnPrefixes = [
('rsVDevDomainRefContToDomainRef', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "forceResolve", "forceResolve", 107, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = True
prop.defaultValueStr = "yes"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("forceResolve", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 18098, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "rType", "rType", 106, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "mo"
prop._addConstant("local", "local", 3)
prop._addConstant("mo", "mo", 1)
prop._addConstant("service", "service", 2)
meta.props.add("rType", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "state", "state", 103, PropCategory.REGULAR)
prop.label = "State"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unformed"
prop._addConstant("cardinality-violation", "cardinality-violation", 5)
prop._addConstant("formed", "formed", 1)
prop._addConstant("invalid-target", "invalid-target", 4)
prop._addConstant("missing-target", "missing-target", 2)
prop._addConstant("unformed", "unformed", 0)
meta.props.add("state", prop)
prop = PropMeta("str", "stateQual", "stateQual", 104, PropCategory.REGULAR)
prop.label = "State Qualifier"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("default-target", "default-target", 2)
prop._addConstant("mismatch-target", "mismatch-target", 1)
prop._addConstant("none", "none", 0)
meta.props.add("stateQual", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "tCl", "tCl", 18094, PropCategory.REGULAR)
prop.label = "Target-class"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1562
prop.defaultValueStr = "aaaDomainRef"
prop._addConstant("aaaDomainRef", None, 1562)
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("tCl", prop)
prop = PropMeta("str", "tContextDn", "tContextDn", 4990, PropCategory.REGULAR)
prop.label = "Target-context"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("tContextDn", prop)
prop = PropMeta("str", "tDn", "tDn", 100, PropCategory.REGULAR)
prop.label = "Target-dn"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("tDn", prop)
prop = PropMeta("str", "tRn", "tRn", 4989, PropCategory.REGULAR)
prop.label = "Target-rn"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("tRn", prop)
prop = PropMeta("str", "tType", "tType", 4988, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "name"
prop._addConstant("all", "all", 2)
prop._addConstant("mo", "mo", 1)
prop._addConstant("name", "name", 0)
meta.props.add("tType", prop)
prop = PropMeta("str", "tnAaaDomainRefName", "tnAaaDomainRefName", 18093, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("tnAaaDomainRefName", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
2c827b70acdad62ca67fd30e1824c1fba685a3ec | 492c1e1dabb84ec4efb874b3d9228d31a675a38f | /121.py | bd46672c3c29a00f05e67a8d9d5a65edbc8accd8 | [] | no_license | ksnt/leet | 65f3c36c8a524e1cc1a5d00bb7a840222ecc9dfe | 6680ff978b88d3c44e538b4d5f0e6805ed85f9cf | refs/heads/master | 2022-09-24T10:59:18.740314 | 2022-09-01T19:06:12 | 2022-09-01T19:06:12 | 136,970,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 494 | py | import sys
class Solution:
def maxProfit(self,prices):
"""
:type prices: List[int]
:rtype: int
"""
if len(prices) == 0: return 0
min_price = sys.maxsize
max_profit = 0
length = len(prices)
for i in range(length):
if prices[i] < min_price:
min_price = prices[i]
elif prices[i] - min_price > max_profit:
max_profit = prices[i] - min_price
return max_profit | [
"[email protected]"
] | |
a82c891c8c753024768d78e5716329e714114205 | cf5b2850dc9794eb0fc11826da4fd3ea6c22e9b1 | /xlsxwriter/test/comparison/test_chart_drop_lines01.py | 6e303f1bb4c31e9ce82494adcc98a6d81795dacb | [
"BSD-2-Clause"
] | permissive | glasah/XlsxWriter | bcf74b43b9c114e45e1a3dd679b5ab49ee20a0ec | 1e8aaeb03000dc2f294ccb89b33806ac40dabc13 | refs/heads/main | 2023-09-05T03:03:53.857387 | 2021-11-01T07:35:46 | 2021-11-01T07:35:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,470 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2021, John McNamara, [email protected]
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('chart_drop_lines01.xlsx')
def test_create_file(self):
"""Test the creation of an XlsxWriter file with drop down lines."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'line'})
chart.axis_ids = [48034944, 48036864]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.set_drop_lines()
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$B$1:$B$5',
})
chart.add_series({
'categories': '=Sheet1!$A$1:$A$5',
'values': '=Sheet1!$C$1:$C$5',
})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| [
"[email protected]"
] | |
a8ac8ed1fc1f33027e25548e0effc34a0d1d0e87 | d530e02257918ce734ed964a0a101c6d9cebee41 | /test_saved_model.py | 0f546d655d83babc0ab379725cef7d3017244131 | [] | no_license | KtRamsay/4I15_RL_Project | eb888571ec4e93b54ad6427b8c56158f1582a59b | 5aa9e8c46a25f1c9f24dc0478c968930b5fddb9d | refs/heads/main | 2023-04-12T21:44:57.204093 | 2021-04-26T15:23:22 | 2021-04-26T15:23:22 | 361,708,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,588 | py | from collections import namedtuple
import numpy as np
#from tensorboardX import SummaryWriter
from tqdm import tqdm
import matplotlib.pyplot as plt
import time
import copy
import random
from map_generation import resetMap
from observation_functions import getObsSpaceRepresentation
from plot_functions import plotLifespanBar, plotMap, plotObservationInput, plotSuccess, plotSuccessReward
import torch
import torch.nn as nn
import torch.nn.functional as F
#Define actor network
class Actor(nn.Module):
def __init__(self,obSize,hiddenSize, hiddenSize2, numActions):
super(Actor,self).__init__()
if useObservationSpace:
self.conv1 = nn.Conv2d(1,5,5,1).double()
self.conv2 = nn.Conv2d(5,18,3).double()
self.conv3 = nn.Conv2d(18,3,3).double()
self.flatten = nn.Flatten().double()
#self.fc1 = nn.Linear(86,80).double()
self.fc1 = nn.Linear(78,80).double()
else:
#self.fc1 = nn.Linear(11,80).double()
self.fc1 = nn.Linear(3,80).double()
self.fc2 = nn.Linear(80,80).double()
self.fc3 = nn.Linear(80,numActions).double()
self.tanh = nn.Tanh()
def forward(self,spaceMatrix, additionalData):
if useObservationSpace:
spaceMatrix = spaceMatrix.view((spaceMatrix.shape[0], 1, spaceMatrix.shape[1], spaceMatrix.shape[1]))
spaceMatrix = F.relu(self.conv1(spaceMatrix))
spaceMatrix = F.avg_pool2d(spaceMatrix,2,2)
spaceMatrix = F.relu(self.conv2(spaceMatrix))
spaceMatrix = F.avg_pool2d(spaceMatrix,2,2)
spaceMatrix = F.relu(self.conv3(spaceMatrix))
spaceMatrix = F.avg_pool2d(spaceMatrix,2,1)
observation = self.flatten(spaceMatrix)
state = torch.cat((observation, additionalData), 1)
else :
state = additionalData
state = F.relu(self.fc1(state))
state = F.relu(self.fc2(state))
state = self.tanh(self.fc3(state))
return state
#Define learning model settings
HIDDEN_SIZE = 128
HIDDEN_SIZE2 = 100
BATCH_SIZE = 64
TARGET_UPDATE = 10
MAX_EPISODE_ITERS = 150
EPISODES = 500
SEE_EPIDODE = EPISODES + 1
#Set if to load a model
setName = "ObservationSpaceTest"
#saveNum = "1619187260"
#saveNum = "1619186452"
#saveNum = "1619203557"
saveNum = "1619265915"
savedActorPath = "Model_Saves/" + setName + "/Actor_" + saveNum
#Define map settings
totBlocks = 60
mapWidth = 15
mapHeight = 15
waypointDist = 10
observationDist = 2 #Must be divisible by 2
obsPixleDensity = 10 #Number of pixles per unit cell of map at highest resolution
turnMemorySize = 6
allowedPositionError = 0.2
allowedBearingError = 15 #In degrees
showSuccessfull = True
stepReward = -1
spinReward = 0
collisionReward = -10
perfectWaypointPositionReward = 0
perfectWaypointBearingReward = 0
wayPointPositionReward = 10
plotEvery = 1
obsSize = (obsPixleDensity*observationDist*2)**2 + 2
allowedBearingError = allowedBearingError *np.pi/180
nActions = 2
maxSpeed = 0.1
maxTurnRate = np.pi/6
plotBestPath = False
plotObsRange = False
smoothInput = True
circleMap = True
allowClipping = False
useObservationSpace = True
requireBearing = False
device = torch.device("cuda" if torch.cuda.is_available() else "cpu") #WIll only be faster with lager observation space or batch size
print("Running program jobs on {}".format(device))
#Set up the display figure
fig = plt.figure()
axs = []
axs.append(plt.subplot2grid((3,5), (0, 0), colspan=3, rowspan=3))
axs.append(plt.subplot2grid((3,5), (0, 3), colspan=2, rowspan=2))
axs.append(plt.subplot2grid((3,5), (2, 3), colspan=2))
plt.ion()
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=0)
#Construct networks
#Actor
actorNetwork = Actor(obsSize,HIDDEN_SIZE, HIDDEN_SIZE2, nActions).to(device)
actorNetwork.load_state_dict(torch.load(savedActorPath))
actorNetwork.eval()
#Run training
episodeRewards = []
sucessfullEpisodes = 0
reachedOneWaypoint = 0
successfullMemories = []
for episode in tqdm(range(1, EPISODES + 1), ascii=True, unit='episodes'):
#Reset environment
episodeReward = 0.0
previousPositions = []
turnRateLog = [0, 0, 0, 0, 0, 0]
waypointTarget = 0
isDone = False
mapObstacles, robot, goal, bestPath, waypoints, mapMatrix = resetMap(mapWidth, mapHeight, totBlocks, waypointDist, obsPixleDensity, circleMap)
episodeIteration = 0
episodeReachedWaypoint = False
episodeMemory = []
episodeRewardHistory = [0]
#Generate the first state
spaceMatrix = getObsSpaceRepresentation(mapMatrix, robot, (mapWidth, mapHeight), smoothInput, obsPixleDensity, observationDist)
state = torch.DoubleTensor(spaceMatrix).unsqueeze(0).to(device)
#additionalData = np.concatenate((np.array([waypoints[waypointTarget].position[0] - robot.position[0], waypoints[waypointTarget].position[1] - robot.position[1], waypoints[waypointTarget].yaw, robot.yaw, robot.getYawToPoint(waypoints[waypointTarget].position)]), np.array(turnRateLog)))
additionalData = np.array([np.round(np.absolute(np.linalg.norm(waypoints[waypointTarget].position - robot.position)), 2), np.round(waypoints[waypointTarget].yaw - robot.yaw, 2), np.round(robot.getYawToPoint(waypoints[waypointTarget].position), 2)])
additionalState = torch.DoubleTensor(additionalData).unsqueeze(0).to(device)
while episodeIteration < MAX_EPISODE_ITERS:
episodeIteration += 1
#Get the next action
action = actorNetwork(state, additionalState)
action = action.detach().cpu().numpy()[0]
"""
if random.random() < 0.1:
#Do a random action
u = random.uniform(0,1)
w = random.uniform(-1,1)
#update the action data for memory push
action = np.array([u, w])
else: """
u = action[0]
w = action[1]
u = np.clip(u, 0., 1.)
u = u*maxSpeed
w = np.clip(w, -1., 1.)
w = w*maxTurnRate
turnRateLog.append(w)
if len(turnRateLog) > turnMemorySize:
turnRateLog = turnRateLog[1:turnMemorySize+1]
#Compute the reward for the motion
reward = robot.move(u, w, mapObstacles, mapWidth, mapHeight, stepReward, collisionReward, circleMap, allowClipping)
#Check for waypoint reward
reward = robot.getWaypointProximityReward(reward, waypoints[waypointTarget], wayPointPositionReward)
#Check if the rolling average turn rate is too high (robot is spinning)
if np.absolute(np.mean(turnRateLog)) > 0.9*maxTurnRate:
#Robot is spinning
reward += spinReward
#Add some reward for pointing in the right direction
reward -= np.absolute(robot.getYawToPoint(waypoints[waypointTarget].position))/(2*np.pi)*5
episodeMemory.append((mapObstacles, mapWidth, mapHeight, copy.copy(robot.position), copy.copy(robot.yaw), robot.radius, goal, copy.deepcopy(previousPositions), bestPath, plotBestPath, copy.deepcopy(waypoints), observationDist, plotObsRange, copy.copy(waypointTarget), copy.copy(episodeIteration), MAX_EPISODE_ITERS, copy.copy(episodeRewardHistory)))
#Check if waypoint is reached
if robot.hasReachedWaypointPosition(waypoints[waypointTarget], allowedPositionError):
reward += perfectWaypointPositionReward
reward += 10*(0.1**((np.absolute(robot.getYawToPoint(waypoints[waypointTarget].position))/(2*np.pi))))
if robot.hasReachedWaypointBearing(waypoints[waypointTarget], allowedBearingError) or not requireBearing:
reward += perfectWaypointBearingReward
waypoints[waypointTarget].reached = True
waypointTarget += 1
episodeIteration = 0
if not episodeReachedWaypoint:
reachedOneWaypoint += 1
episodeReachedWaypoint = True
if waypointTarget == len(waypoints):
#Final goal has been reached
sucessfullEpisodes += 1
successfullMemories.append(episodeMemory)
isDone = True
waypointTarget -= 1 #Prevent fail on newAdditionalState creation
if episode%SEE_EPIDODE == 0:
print("\nWatched Success")
#Get the new state with chosen action
spaceMatrix = getObsSpaceRepresentation(mapMatrix, robot, (mapWidth, mapHeight), smoothInput, obsPixleDensity, observationDist)
state = torch.DoubleTensor(spaceMatrix).unsqueeze(0).to(device)
#newAdditionalData = np.concatenate((np.array([waypoints[waypointTarget].position[0] - robot.position[0], waypoints[waypointTarget].position[1] - robot.position[1], waypoints[waypointTarget].yaw, robot.yaw, robot.getYawToPoint(waypoints[waypointTarget].position)]), np.array(turnRateLog)))
newAdditionalData = np.array([np.round(np.absolute(np.linalg.norm(waypoints[waypointTarget].position - robot.position)), 2), np.round(waypoints[waypointTarget].yaw - robot.yaw, 2), np.round(robot.getYawToPoint(waypoints[waypointTarget].position), 2)])
additionalState = torch.DoubleTensor(newAdditionalData).unsqueeze(0).to(device)
#Update the episode information
episodeReward += reward
episodeRewardHistory.append(episodeReward)
if episode%SEE_EPIDODE == 0:
#The last run of the batch is being computed
if episodeIteration%10 == 0:
#Save every 10 iterations to plot
previousPositions.append(np.copy(robot.position))
if episodeIteration%plotEvery == 0:
#Plot enviromnent
plotMap(axs[0], mapObstacles, mapWidth, mapHeight, robot, goal, previousPositions, bestPath, plotBestPath, waypoints, observationDist, plotObsRange, waypointTarget, circleMap)
plotObservationInput(axs[1], spaceMatrix, obsPixleDensity, robot)
plotLifespanBar(axs[2], mapWidth, episodeIteration, MAX_EPISODE_ITERS)
plt.draw()
plt.pause(0.0008)
plt.show()
if isDone:
#Episode is complete
episodeRewards.append(episodeReward)
if episode%SEE_EPIDODE == 0:
print("\nWatched reward: {}".format(episodeReward))
break
if not isDone:
episodeRewards.append(episodeReward)
if episode%SEE_EPIDODE == 0:
print("\nWatched reward: {}".format(episodeReward))
print("###########################")
print("Model run complete")
print("###########################")
print("Episodes fully completed: {} of {}".format(sucessfullEpisodes, EPISODES))
print("Success rate: {}%".format(round(100*sucessfullEpisodes/EPISODES, 2)))
print("At least 1 waypoint reached: {} of {}".format(reachedOneWaypoint, EPISODES))
print("Success rate: {}%".format(round(100*reachedOneWaypoint/EPISODES, 2)))
if showSuccessfull:
for successfullMemory in successfullMemories:
for memoryFrame in successfullMemory:
plotSuccess(axs[0], memoryFrame[0], memoryFrame[1], memoryFrame[2], memoryFrame[3], memoryFrame[4], memoryFrame[5], memoryFrame[6], memoryFrame[7], memoryFrame[8], memoryFrame[9], memoryFrame[10], memoryFrame[11], memoryFrame[12], memoryFrame[13], circleMap)
plotSuccessReward(axs[1], memoryFrame[16])
plotLifespanBar(axs[2], memoryFrame[1], memoryFrame[14], memoryFrame[15])
plt.draw()
plt.pause(0.1)
plt.show()
| [
"[email protected]"
] | |
38457dc838816aa418c8908fcbb7b3aa0e3c8dd8 | 19937697667261b0c180faddf7b75e767d9fc2cf | /app/tools/engineio/packet.py | a4f40e97e515c7d5e431fca73d9baf7ad5dc3460 | [] | no_license | 413180794/aliPay | 197647cd3389e2b8236602b5bc3d36213b146d96 | 1c71e631a3730490f5794f1a69adaa0ff76f46fc | refs/heads/master | 2020-06-18T07:50:25.238424 | 2019-07-19T13:02:11 | 2019-07-19T13:02:11 | 196,221,150 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,514 | py | import base64
import json as _json
import six
(OPEN, CLOSE, PING, PONG, MESSAGE, UPGRADE, NOOP) = (0, 1, 2, 3, 4, 5, 6)
packet_names = ['OPEN', 'CLOSE', 'PING', 'PONG', 'MESSAGE', 'UPGRADE', 'NOOP']
binary_types = (six.binary_type, bytearray)
class EngineIoPacket(object):
"""Engine.IO packet."""
json = _json
def __init__(self, packet_type=NOOP, data=None, binary=None,
encoded_packet=None):
self.packet_type = packet_type
self.data = data
if binary is not None:
self.binary = binary
elif isinstance(data, six.text_type):
self.binary = False
elif isinstance(data, binary_types):
self.binary = True
else:
self.binary = False
if encoded_packet:
self.decode(encoded_packet)
def encode(self, b64=False, always_bytes=True):
"""Encode the packet for transmission."""
if self.binary and not b64:
encoded_packet = six.int2byte(self.packet_type)
else:
encoded_packet = six.text_type(self.packet_type)
if self.binary and b64:
encoded_packet = 'b' + encoded_packet
if self.binary:
if b64:
encoded_packet += base64.b64encode(self.data).decode('utf-8')
else:
encoded_packet += self.data
elif isinstance(self.data, six.string_types):
encoded_packet += self.data
elif isinstance(self.data, dict) or isinstance(self.data, list):
encoded_packet += self.json.dumps(self.data,
separators=(',', ':'))
elif self.data is not None:
encoded_packet += str(self.data)
if always_bytes and not isinstance(encoded_packet, binary_types):
encoded_packet = encoded_packet.encode('utf-8')
return encoded_packet
def decode(self, encoded_packet):
"""Decode a transmitted package."""
b64 = False
if not isinstance(encoded_packet, binary_types):
encoded_packet = encoded_packet.encode('utf-8')
elif not isinstance(encoded_packet, bytes):
encoded_packet = bytes(encoded_packet)
self.packet_type = six.byte2int(encoded_packet[0:1])
if self.packet_type == 98: # 'b' --> binary base64 encoded packet
self.binary = True
encoded_packet = encoded_packet[1:]
self.packet_type = six.byte2int(encoded_packet[0:1])
self.packet_type -= 48
b64 = True
elif self.packet_type >= 48:
self.packet_type -= 48
self.binary = False
else:
self.binary = True
self.data = None
if len(encoded_packet) > 1:
if self.binary:
if b64:
self.data = base64.b64decode(encoded_packet[1:])
else:
self.data = encoded_packet[1:]
else:
try:
self.data = self.json.loads(
encoded_packet[1:].decode('utf-8'))
if isinstance(self.data, int):
# do not allow integer payloads, see
# github.com/miguelgrinberg/python-engineio/issues/75
# for background on this decision
raise ValueError
except ValueError:
self.data = encoded_packet[1:].decode('utf-8')
| [
"w123456256456"
] | w123456256456 |
5431d40c72d373dfc4b8862e7524c47fceb70a16 | 0c56f110c09743bbf951d681731df04f88bf99a3 | /venv/bin/dicom2nifti | 2801f7bfb5002ab39368ba8b3759565cbf6853f2 | [] | no_license | wildwolf1994411/mri_project | dc50dfeffc02cc629421703648022674e568a9a1 | 2d2e2038cc52d5483308f70374133fea226f5269 | refs/heads/master | 2020-04-24T12:58:29.006155 | 2019-02-22T23:48:04 | 2019-02-22T23:48:04 | 171,972,252 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,636 | #!/home/shihong/Desktop/Qi_Chen/mri-project/venv/bin/python
"""
This script is the standalone/script version of dicom2nifti
@author: abrys
"""
from __future__ import print_function
import argparse
import os
import logging
import dicom2nifti.convert_dir as convert_directory
import dicom2nifti.settings as settings
import sys
# Setup the logger correctly
import logging
import sys
logger = logging.getLogger(__name__)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(logging.WARNING)
def main(args):
parser = argparse.ArgumentParser(description='dicom2nifti, convert dicom files into nifti format.')
parser.add_argument('input_directory', type=str,
help='directory containing dicom files, can be nested')
parser.add_argument('output_directory', type=str,
help='directory to store the nifti files')
parser.add_argument('-G', '--allow-gantry-tilting', action='store_true',
help='allow the conversion of gantry tilted data (this will be reflected in the affine matrix only)')
parser.add_argument('-r', '--resample-gantry-tilting', action='store_true',
help='resample gantry tilted data to an orthogonal image')
parser.add_argument('-o', '--resample-order', type=int,
help='order of the spline interpolation used during the resampling (0 -> 5) [0 = NN, 1 = LIN, ....]')
parser.add_argument('-p', '--resample-padding', type=int,
help='padding value to used during resampling to use as fill value')
parser.add_argument('-M', '--allow-multiframe-implicit', action='store_true',
help='allow the conversion of multiframe data with implicit vr transfer syntax (this is not guaranteed to work)')
parser.add_argument('-C', '--no-compression', action='store_true',
help='disable gzip compression and write .nii files instead of .nii.gz')
parser.add_argument('-R', '--no-reorientation', action='store_true',
help='disable image reorientation (default: images are reoriented to LAS orientation)')
args = parser.parse_args(args)
if not os.path.isdir(args.input_directory):
logging.info('ERROR: \'input_directory\' should be a valid path')
logging.info('----------------------------------------------------\n')
parser.print_help()
return 2
elif not os.path.isdir(args.output_directory):
logging.info('ERROR: \'output_directory\' should be a valid path')
logging.info('----------------------------------------------------\n')
parser.print_help()
return 2
else:
if args.allow_gantry_tilting:
settings.disable_validate_orthogonal()
if args.allow_multiframe_implicit:
settings.disable_validate_multiframe_implicit()
if args.resample_gantry_tilting:
settings.enable_resampling()
if args.resample_order:
settings.set_resample_spline_interpolation_order(args.resample_order)
if args.resample_padding:
settings.set_resample_padding(args.resample_padding)
convert_directory.convert_directory(args.input_directory,
args.output_directory,
not args.no_compression,
not args.no_reorientation)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| [
"[email protected]"
] | ||
78449bf47c907409436262751fab4a0327e9bb74 | ad5d38fce4785037c108186f17eb1c64380355ef | /sddsd/google-cloud-sdk.staging/lib/googlecloudsdk/api_lib/cloudbuild/cloudbuild_util.py | c5720e7008ffdafaf648390fa1b04db8874cdcd5 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | saranraju90/multik8s | 75864b605a139ddb7947ed4de4ae8466bdd49acb | 428576dedef7bb9cd6516e2c1ab2714581e1137c | refs/heads/master | 2023-03-03T21:56:14.383571 | 2021-02-20T14:56:42 | 2021-02-20T14:56:42 | 339,665,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,887 | py | # -*- coding: utf-8 -*- #
# Copyright 2016 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the cloudbuild API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import re
from apitools.base.protorpclite import messages as proto_messages
from apitools.base.py import encoding as apitools_encoding
from googlecloudsdk.api_lib.util import apis
from googlecloudsdk.calliope import base
from googlecloudsdk.core import exceptions
from googlecloudsdk.core import yaml
from googlecloudsdk.core.resource import resource_property
from googlecloudsdk.core.util import files
import six
_API_NAME = 'cloudbuild'
_GA_API_VERSION = 'v1'
_BETA_API_VERSION = 'v1beta1'
RELEASE_TRACK_TO_API_VERSION = {
base.ReleaseTrack.GA: _GA_API_VERSION,
base.ReleaseTrack.BETA: _BETA_API_VERSION,
base.ReleaseTrack.ALPHA: _BETA_API_VERSION,
}
REGIONAL_WORKERPOOL_NAME_MATCHER = r'projects/.*/locations/.*/workerPools/.*'
REGIONAL_WORKERPOOL_NAME_SELECTOR = r'projects/.*/locations/.*/workerPools/(.*)'
REGIONAL_WORKERPOOL_REGION_SELECTOR = r'projects/.*/locations/(.*)/workerPools/.*'
# Default for optionally-regional requests when the user does not specify.
DEFAULT_REGION = 'global'
def GetMessagesModule(release_track=base.ReleaseTrack.GA):
"""Returns the messages module for Cloud Build.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
Returns:
Module containing the definitions of messages for Cloud Build.
"""
return apis.GetMessagesModule(_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track])
def GetClientClass(release_track=base.ReleaseTrack.GA):
"""Returns the client class for Cloud Build.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
Returns:
base_api.BaseApiClient, Client class for Cloud Build.
"""
return apis.GetClientClass(_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track])
def GetClientInstance(release_track=base.ReleaseTrack.GA, use_http=True):
"""Returns an instance of the Cloud Build client.
Args:
release_track: The desired value of the enum
googlecloudsdk.calliope.base.ReleaseTrack.
use_http: bool, True to create an http object for this client.
Returns:
base_api.BaseApiClient, An instance of the Cloud Build client.
"""
return apis.GetClientInstance(
_API_NAME,
RELEASE_TRACK_TO_API_VERSION[release_track],
no_http=(not use_http))
def EncodeSubstitutions(substitutions, messages):
if not substitutions:
return None
substitution_properties = []
# TODO(b/35470611): Use map encoder function instead when implemented
for key, value in sorted(six.iteritems(substitutions)): # Sort for tests
substitution_properties.append(
messages.Build.SubstitutionsValue.AdditionalProperty(
key=key, value=value))
return messages.Build.SubstitutionsValue(
additionalProperties=substitution_properties)
def EncodeTriggerSubstitutions(substitutions, messages):
if not substitutions:
return None
substitution_properties = []
for key, value in sorted(six.iteritems(substitutions)): # Sort for tests
substitution_properties.append(
messages.BuildTrigger.SubstitutionsValue.AdditionalProperty(
key=key, value=value))
return messages.BuildTrigger.SubstitutionsValue(
additionalProperties=substitution_properties)
class ParserError(exceptions.Error):
"""Error parsing YAML into a dictionary."""
def __init__(self, path, msg):
msg = 'parsing {path}: {msg}'.format(
path=path,
msg=msg,
)
super(ParserError, self).__init__(msg)
class ParseProtoException(exceptions.Error):
"""Error interpreting a dictionary as a specific proto message."""
def __init__(self, path, proto_name, msg):
msg = 'interpreting {path} as {proto_name}: {msg}'.format(
path=path,
proto_name=proto_name,
msg=msg,
)
super(ParseProtoException, self).__init__(msg)
def SnakeToCamelString(snake):
"""Change a snake_case string into a camelCase string.
Args:
snake: str, the string to be transformed.
Returns:
str, the transformed string.
"""
parts = snake.split('_')
if not parts:
return snake
# Handle snake with leading '_'s by collapsing them into the next part.
# Legit field names will never look like this, but completeness of the
# function is important.
leading_blanks = 0
for p in parts:
if not p:
leading_blanks += 1
else:
break
if leading_blanks:
parts = parts[leading_blanks:]
if not parts:
# If they were all blanks, then we over-counted by one because of split
# behavior.
return '_' * (leading_blanks - 1)
parts[0] = '_' * leading_blanks + parts[0]
return ''.join(parts[:1] + [s.capitalize() for s in parts[1:]])
def SnakeToCamel(msg, skip=None):
"""Recursively transform all keys and values from snake_case to camelCase.
If a key is in skip, then its value is left alone.
Args:
msg: dict, list, or other. If 'other', the function returns immediately.
skip: contains dict keys whose values should not have camel case applied.
Returns:
Same type as msg, except all strings that were snake_case are now CamelCase,
except for the values of dict keys contained in skip.
"""
if skip is None:
skip = []
if isinstance(msg, dict):
return {
SnakeToCamelString(key):
(SnakeToCamel(val, skip) if key not in skip else val)
for key, val in six.iteritems(msg)
}
elif isinstance(msg, list):
return [SnakeToCamel(elem, skip) for elem in msg]
else:
return msg
def MessageToFieldPaths(msg):
"""Produce field paths from a message object.
The result is used to create a FieldMask proto message that contains all field
paths presented in the object.
https://github.com/protocolbuffers/protobuf/blob/master/src/google/protobuf/field_mask.proto
Args:
msg: A user defined message object that extends the messages.Message class.
https://github.com/google/apitools/blob/master/apitools/base/protorpclite/messages.py
Returns:
The list of field paths.
"""
fields = []
for field in msg.all_fields():
v = msg.get_assigned_value(field.name)
if field.repeated and not v:
# Repeated field is initialized as an empty list.
continue
if v is not None:
name = resource_property.ConvertToSnakeCase(field.name)
if hasattr(v, 'all_fields'):
# message has sub-messages, constructing subpaths.
for f in MessageToFieldPaths(v):
fields.append('{}.{}'.format(name, f))
else:
fields.append(name)
return fields
def _UnpackCheckUnused(obj, msg_type):
"""Stuff a dict into a proto message, and fail if there are unused values.
Args:
obj: dict(), The structured data to be reflected into the message type.
msg_type: type, The proto message type.
Raises:
ValueError: If there is an unused value in obj.
Returns:
Proto message, The message that was created from obj.
"""
msg = apitools_encoding.DictToMessage(obj, msg_type)
def _CheckForUnusedFields(obj):
"""Check for any unused fields in nested messages or lists."""
if isinstance(obj, proto_messages.Message):
unused_fields = obj.all_unrecognized_fields()
if unused_fields:
if len(unused_fields) > 1:
# Because this message shows up in a dotted path, use braces.
# eg .foo.bar.{x,y,z}
unused_msg = '{%s}' % ','.join(sorted(unused_fields))
else:
# For single items, omit the braces.
# eg .foo.bar.x
unused_msg = unused_fields[0]
raise ValueError('.%s: unused' % unused_msg)
for used_field in obj.all_fields():
try:
field = getattr(obj, used_field.name)
_CheckForUnusedFields(field)
except ValueError as e:
raise ValueError('.%s%s' % (used_field.name, e))
if isinstance(obj, list):
for i, item in enumerate(obj):
try:
_CheckForUnusedFields(item)
except ValueError as e:
raise ValueError('[%d]%s' % (i, e))
_CheckForUnusedFields(msg)
return msg
def LoadMessageFromStream(stream,
msg_type,
msg_friendly_name,
skip_camel_case=None,
path=None):
"""Load a proto message from a stream of JSON or YAML text.
Args:
stream: file-like object containing the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
path: str or None. Optional path to be used in error messages.
Raises:
ParserError: If there was a problem parsing the stream as a dict.
ParseProtoException: If there was a problem interpreting the stream as the
given message type.
Returns:
Proto message, The message that got decoded.
"""
if skip_camel_case is None:
skip_camel_case = []
# Turn the data into a dict
try:
structured_data = yaml.load(stream, file_hint=path)
except yaml.Error as e:
raise ParserError(path, e.inner_error)
if not isinstance(structured_data, dict):
raise ParserError(path, 'Could not parse as a dictionary.')
return _YamlToMessage(structured_data, msg_type, msg_friendly_name,
skip_camel_case, path)
def LoadMessagesFromStream(stream,
msg_type,
msg_friendly_name,
skip_camel_case=None,
path=None):
"""Load multiple proto message from a stream of JSON or YAML text.
Args:
stream: file-like object containing the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
path: str or None. Optional path to be used in error messages.
Raises:
ParserError: If there was a problem parsing the stream.
ParseProtoException: If there was a problem interpreting the stream as the
given message type.
Returns:
Proto message list of the messages that got decoded.
"""
if skip_camel_case is None:
skip_camel_case = []
# Turn the data into a dict
try:
structured_data = yaml.load_all(stream, file_hint=path)
except yaml.Error as e:
raise ParserError(path, e.inner_error)
return [
_YamlToMessage(item, msg_type, msg_friendly_name, skip_camel_case, path)
for item in structured_data
]
def _YamlToMessage(structured_data,
msg_type,
msg_friendly_name,
skip_camel_case=None,
path=None):
"""Load a proto message from a file containing JSON or YAML text.
Args:
structured_data: Dict containing the decoded YAML data.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
path: str or None. Optional path to be used in error messages.
Raises:
ParseProtoException: If there was a problem interpreting the file as the
given message type.
Returns:
Proto message, The message that got decoded.
"""
# Transform snake_case into camelCase.
structured_data = SnakeToCamel(structured_data, skip_camel_case)
# Then, turn the dict into a proto message.
try:
msg = _UnpackCheckUnused(structured_data, msg_type)
except Exception as e:
# Catch all exceptions here because a valid YAML can sometimes not be a
# valid message, so we need to catch all errors in the dict to message
# conversion.
raise ParseProtoException(path, msg_friendly_name, '%s' % e)
return msg
def LoadMessageFromPath(path,
msg_type,
msg_friendly_name,
skip_camel_case=None):
"""Load a proto message from a file containing JSON or YAML text.
Args:
path: The path to a file containing the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
Raises:
files.MissingFileError: If the file does not exist.
ParserError: If there was a problem parsing the file as a dict.
ParseProtoException: If there was a problem interpreting the file as the
given message type.
Returns:
Proto message, The message that got decoded.
"""
with files.FileReader(path) as f: # Returns user-friendly error messages
return LoadMessageFromStream(f, msg_type, msg_friendly_name,
skip_camel_case, path)
def LoadMessagesFromPath(path,
msg_type,
msg_friendly_name,
skip_camel_case=None):
"""Load a proto message from a file containing JSON or YAML text.
Args:
path: The path to a file containing the JSON or YAML data to be decoded.
msg_type: The protobuf message type to create.
msg_friendly_name: A readable name for the message type, for use in error
messages.
skip_camel_case: Contains proto field names or map keys whose values should
not have camel case applied.
Raises:
files.MissingFileError: If the file does not exist.
ParseProtoException: If there was a problem interpreting the file as the
given message type.
Returns:
Proto message list of the messages that got decoded.
"""
with files.FileReader(path) as f: # Returns user-friendly error messages
return LoadMessagesFromStream(f, msg_type, msg_friendly_name,
skip_camel_case, path)
def IsRegionalWorkerPool(resource_name):
"""Determine if the provided full resource name is a regional worker pool.
Args:
resource_name: str, The string to test.
Returns:
bool, True if the string is a regional worker pool's full resource name.
"""
return bool(re.match(REGIONAL_WORKERPOOL_NAME_MATCHER, resource_name))
def RegionalWorkerPoolShortName(resource_name):
"""Get the name part of a regional worker pool's full resource name.
For example, "projects/abc/locations/def/workerPools/ghi" returns "ghi".
Args:
resource_name: A regional worker pool's full resource name.
Raises:
ValueError: If the full resource name was not well-formatted.
Returns:
The worker pool's short name.
"""
match = re.search(REGIONAL_WORKERPOOL_NAME_SELECTOR, resource_name)
if match:
return match.group(1)
raise ValueError('The worker pool resource name must match "%s"' %
(REGIONAL_WORKERPOOL_NAME_MATCHER,))
def RegionalWorkerPoolRegion(resource_name):
"""Get the region part of a regional worker pool's full resource name.
For example, "projects/abc/locations/def/workerPools/ghi" returns "def".
Args:
resource_name: str, A regional worker pool's full resource name.
Raises:
ValueError: If the full resource name was not well-formatted.
Returns:
str, The worker pool's region string.
"""
match = re.search(REGIONAL_WORKERPOOL_REGION_SELECTOR, resource_name)
if match:
return match.group(1)
raise ValueError('The worker pool resource name must match "%s"' %
(REGIONAL_WORKERPOOL_NAME_MATCHER,))
def GitHubEnterpriseConfigFromArgs(args, update=False):
"""Construct the GitHubEnterpires resource from the command line args.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
update: bool, if the args are for an update.
Returns:
A populated GitHubEnterpriseConfig message.
"""
messages = GetMessagesModule()
ghe = messages.GitHubEnterpriseConfig()
ghe.hostUrl = args.host_uri
ghe.appId = args.app_id
if args.webhook_key is not None:
ghe.webhookKey = args.webhook_key
if not update and args.peered_network is not None:
ghe.peeredNetwork = args.peered_network
if args.gcs_bucket is not None:
gcs_location = messages.GCSLocation()
gcs_location.bucket = args.gcs_bucket
gcs_location.object = args.gcs_object
if args.generation is not None:
gcs_location.generation = args.generation
ghe.appConfigJson = gcs_location
else:
secret_location = messages.GitHubEnterpriseSecrets()
secret_location.privateKeyName = args.private_key_name
secret_location.webhookSecretName = args.webhook_secret_name
secret_location.oauthSecretName = args.oauth_secret_name
secret_location.oauthClientIdName = args.oauth_client_id_name
ghe.secrets = secret_location
return ghe
| [
"[email protected]"
] | |
33bd9813fab74f630b0d6986aa9f4747cd2d0f9b | 18f2d1458103e1aacaaa14d9ff52654da0154dc8 | /src/layers/cnn.py | a65eefba9fdcd3fd3a51a8020d43ef2cd3f172b7 | [] | no_license | yamad07/IADA | 4fbda5b2e7cdb5efd83f2bd2960bfb8dcfd0d455 | 7dbda1eb336f44e57567f4541e14b31304a4e381 | refs/heads/master | 2020-04-10T23:18:01.809883 | 2019-01-30T16:05:21 | 2019-01-30T16:05:21 | 161,347,800 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 598 | py | import torch.nn as nn
def conv_layer(in_dim, out_dim, kernel_size):
return nn.Sequential(
nn.Conv2d(in_dim, out_dim, kernel_size=kernel_size, padding=int((kernel_size - 1)/2)),
nn.ELU(inplace=True),
nn.Conv2d(out_dim, out_dim, kernel_size=kernel_size, padding=int((kernel_size - 1)/2)),
nn.ELU(inplace=True),
nn.Conv2d(out_dim, out_dim, kernel_size=kernel_size, padding=int((kernel_size - 1)/2)),
nn.ELU(inplace=True),
nn.BatchNorm2d(out_dim),
nn.AvgPool2d(kernel_size=2, stride=2),
)
| [
"[email protected]"
] | |
f01a7bc2ce9074bc9789a6850d69bb287d4328b0 | 48732e80f8bbb7707ccbe2f864d63e1b120502e1 | /graduate/Lab-1/approx_errors_erk.py | fbbc5b2c09861355d89d3d558bc1e4b38a54c2e1 | [] | no_license | ChristopherShort/computational-econ-labs | a0d9023cbc704d15ff7f9076a8b4fdb27f030136 | c9dac8e9b9bbf36e7e969a1c8014b3e525104b0a | refs/heads/master | 2021-01-25T01:21:13.565627 | 2013-10-14T12:50:45 | 2013-10-14T12:50:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,773 | py | import numpy as np
import matplotlib.pyplot as plt
from pyeconomics.models import growth
def cobb_douglas_output(t, k, params):
"""
Cobb-Douglas production function.
Arguments:
t: (array-like) Time.
k: (array-like) Capital (per person/effective person).
params: (dict) Dictionary of parameter values.
Returns:
y: (array-like) Output (per person/ effective person)
"""
# extract params
alpha = params['alpha']
# Cobb-Douglas technology
y = k**alpha
return y
def marginal_product_capital(t, k, params):
"""
Marginal product of capital with Cobb-Douglas production technology.
Arguments:
t: (array-like) Time.
k: (array-like) Capital (per person/effective person).
params: (dict) Dictionary of parameter values.
Returns:
y_k: (array-like) Derivative of output with respect to capital, k.
"""
# extract params
alpha = params['alpha']
return alpha * k**(alpha - 1)
def analytic_k_star(params):
"""The steady-state level of capital stock per effective worker, k_bar,
in the Solow model is a function of the 5 exogenous parameters!
"""
# extract params
s = params['s']
n = params['n']
g = params['g']
alpha = params['alpha']
delta = params['delta']
return (s / (n + g + delta))**(1 / (1 - alpha))
def solow_analytic_solution(k0, t, params):
"""
Computes the analytic solution for the Solow model with Cobb-Douglas
production technology.
Arguments:
k0: (float) Initial value for capital (per person/effective person)
t: (array-like) (T,) array of points at which the solution is
desired.
Returns:
analytic_traj: (array-like) (T,2) array representing the analytic
solution trajectory.
"""
# extract parameter values
g = params['g']
n = params['n']
s = params['s']
alpha = params['alpha']
delta = params['delta']
# lambda governs the speed of convergence
lmbda = (n + g + delta) * (1 - alpha)
# analytic solution for Solow model at time t
k_t = (((s / (n + g + delta)) * (1 - np.exp(-lmbda * t)) +
k0**(1 - alpha) * np.exp(-lmbda * t))**(1 / (1 - alpha)))
# combine into a (T, 2) array
analytic_traj = np.hstack((t[:,np.newaxis], k_t[:,np.newaxis]))
return analytic_traj
# create a new model object
params = {'s':0.1, 'n':0.02, 'g':0.02, 'delta':0.1, 'alpha':0.33}
model = growth.SolowModel(cobb_douglas_output, marginal_product_capital, params)
# create a dictionary of steady state expressions
steady_state_funcs = {'k_star':analytic_k_star}
# pass it as an argument to the set_steady_state_functions method
model.steady_state.set_functions(steady_state_funcs)
model.steady_state.set_values()
# solve the model using various methods
k0 = 6
h = 1.0
T = 200
forward_euler_traj = model.integrate(0, k0, T, h, 'forward_euler')
erk2_traj = model.integrate(0, k0, T, h, 'erk2')
erk3_traj = model.integrate(0, k0, T, h, 'erk3')
erk4_traj = model.integrate(0, k0, T, h, 'erk4')
grid = erk2_traj[:,0]
analytic_trajectory = solow_analytic_solution(k0, grid, model.args)
##### Approximation errors for RK methods #####
fig = plt.figure(figsize=(8,6))
# plot the forward Euler approximation error
benchmark_error = model.plot_approximation_error(forward_euler_traj,
analytic_trajectory,
log=True)[1]
benchmark_error.set_label('Forward Euler')
benchmark_error.set_marker('o')
benchmark_error.set_linestyle('none')
# plot the ERK2 approximation error
traj_error = model.plot_approximation_error(erk2_traj,
analytic_trajectory,
log=True)[1]
traj_error.set_label('ERK2')
#traj_error.set_color('r')
traj_error.set_marker('o')
traj_error.set_linestyle('none')
# plot the backward Euler approximation error
traj_error2 = model.plot_approximation_error(erk3_traj,
analytic_trajectory,
log=True)[1]
traj_error2.set_label('ERK3')
#traj_error2.set_color('r')
traj_error2.set_marker('o')
traj_error2.set_linestyle('none')
# plot the trapezoidal rule approximation error
traj_error3 = model.plot_approximation_error(erk4_traj,
analytic_trajectory,
log=True)[1]
traj_error3.set_label('ERK4')
#traj_error3.set_color('r')
traj_error3.set_marker('o')
traj_error3.set_linestyle('none')
# demarcate machine eps
plt.axhline(np.finfo('float').eps, color='k', ls='--',
label=r'Machine-$\epsilon$')
# Change the title and add a legend
plt.title('Approximation errors for explicit RK methods',
fontsize=20, family='serif')
plt.legend(loc='best', frameon=False, prop={'family':'serif'})
plt.savefig('graphics/solow-approximation-error-erk.png')
plt.savefig('graphics/solow-approximation-error-erk.pdf')
plt.show()
##### Compare convergence of RK4 with forward Euler #####
# solve the model using various methods
k0 = 6
h = 1.0
T = 200
forward_euler_traj = model.integrate(0, k0, T, h, 'forward_euler')
erk4_traj = model.integrate(0, k0, T, h, 'erk4')
grid = erk4_traj[:,0]
analytic_trajectory = solow_analytic_solution(k0, grid, model.args)
h = 0.1
forward_euler_traj_2 = model.integrate(0, k0, T, h, 'forward_euler')
erk4_traj_2 = model.integrate(0, k0, T, h, 'erk4')
grid = erk4_traj_2[:,0]
analytic_trajectory_2 = solow_analytic_solution(k0, grid, model.args)
fig = plt.figure(figsize=(8,6))
# plot the forward Euler approximation error
benchmark_error = model.plot_approximation_error(forward_euler_traj,
analytic_trajectory,
log=True)[1]
benchmark_error.set_label('Forward Euler, h=1.0')
benchmark_error.set_marker('o')
benchmark_error.set_linestyle('none')
benchmark_error2 = model.plot_approximation_error(forward_euler_traj_2,
analytic_trajectory_2,
log=True)[1]
benchmark_error2.set_label('Forward Euler, h=0.1')
benchmark_error2.set_color('b')
benchmark_error2.set_marker('^')
benchmark_error2.set_linestyle('none')
# plot the ERK4 approximation error
traj_error = model.plot_approximation_error(erk4_traj,
analytic_trajectory,
log=True)[1]
traj_error.set_label('ERK4, h=1.0')
traj_error.set_color('c')
traj_error.set_marker('o')
traj_error.set_linestyle('none')
traj_error2 = model.plot_approximation_error(erk4_traj_2,
analytic_trajectory_2,
log=True)[1]
traj_error2.set_label('ERK4, h=0.1')
traj_error.set_color('c')
traj_error2.set_marker('^')
traj_error2.set_linestyle('none')
# demarcate machine eps
plt.axhline(np.finfo('float').eps, color='k', ls='--',
label=r'Machine-$\epsilon$')
# Change the title and add a legend
plt.title(r'The difference between $\mathcal{O}(h)$ and $\mathcal{O}(h^4)$',
fontsize=20, family='serif')
plt.legend(loc='upper right', frameon=False, prop={'family':'serif'})
#bbox_to_anchor=(1.45, 1.0))
plt.savefig('graphics/solow-convergence-erk4.png')#, bbox_inches='tight')
plt.savefig('graphics/solow-convergence-erk4.pdf')#, bbox_inches='tight')
plt.show()
##### Compare Forward Euler, RK5, and dopri5 #####
# solve the model using various methods
k0 = 6
h = 1.0
T = 200
forward_euler_traj = model.integrate(0, k0, T, h, 'forward_euler')
erk5_traj = model.integrate(0, k0, T, h, 'erk5')
dopri5_traj = model.integrate(0, k0, T, h, 'dopri5')
grid = erk5_traj[:,0]
analytic_trajectory = solow_analytic_solution(k0, grid, model.args)
fig = plt.figure(figsize=(8,6))
# plot the forward Euler approximation error
benchmark_error = model.plot_approximation_error(forward_euler_traj,
analytic_trajectory,
log=True)[1]
benchmark_error.set_label('Forward Euler')
benchmark_error.set_marker('o')
benchmark_error.set_linestyle('none')
# plot the ERK4 approximation error
traj_error = model.plot_approximation_error(erk5_traj,
analytic_trajectory,
log=True)[1]
traj_error.set_label('ERK5')
traj_error.set_color('c')
traj_error.set_marker('o')
traj_error.set_linestyle('none')
# plot the dopri5 approximation error
traj_error2 = model.plot_approximation_error(dopri5_traj,
analytic_trajectory,
log=True)[1]
traj_error2.set_label('dopri5')
traj_error.set_color('m')
traj_error2.set_marker('o')
traj_error2.set_linestyle('none')
# demarcate machine eps
plt.axhline(np.finfo('float').eps, color='k', ls='--',
label=r'Machine-$\epsilon$')
# Change the title and add a legend
plt.title(r'The benefits of adaptive step size',
fontsize=20, family='serif')
plt.legend(loc='best', frameon=False, prop={'family':'serif'})
plt.savefig('graphics/solow-erk5-dopri5.png', bbox_inches='tight')
plt.savefig('graphics/solow-erk5-dopri5.pdf', bbox_inches='tight')
plt.show()
| [
"[email protected]"
] | |
da79b470a5b4630a6673a89f07ae0c4dfb16071e | f4bb7a7e3dc8c37f7df66f1e4b207160b8091d97 | /TeesUni/circlearea.py | dec8dd45e2fee8bb9126672786ed6fd45535e5ac | [] | no_license | davidwilliamwillis/TeesSession1 | 58fb7ebb8ef9aa2e93a0cfa3b2a8d5bb3b834923 | db265986674e167ee54034c3fd3227fc964d54d4 | refs/heads/master | 2020-06-01T18:58:39.094395 | 2019-06-23T14:44:56 | 2019-06-23T14:44:56 | 190,892,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | import math
radius = int (input("Please enter the radius:"))
radiusSquared = radius**2
Area = math.pi * radiusSquared
print("Your Circle Area is ", Area)
circumference = math.pi * radius * 2
print ("Your circle's circumference is: ", circumference)
| [
"[email protected]"
] | |
541ecd685d223e738f5743b51455b88c1cfda397 | 666592be9b8f88105bb8ad4ff7727124aed26aaa | /app/api_1_0/api_auth.py | b192ddcfc600e16c27f853e6a50a9894ecb03c94 | [] | no_license | StevyZheng/rks | f952f24e780ef0e1575874e1ea25c64f4b37ae9f | b23d5b228042a0bed9975f163255bf6f5fb0a672 | refs/heads/master | 2020-04-29T12:02:39.180093 | 2019-04-15T16:34:36 | 2019-04-15T16:34:36 | 176,123,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,856 | py | #!/usr/bin/env python
# _*_ coding:utf-8 _*_
# @Author : Stevy
from flask_httpauth import HTTPBasicAuth
from flask import jsonify, app
from itsdangerous import SignatureExpired, BadSignature
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from config import Config
from app.models.user import User
auth = HTTPBasicAuth()
# 请求api接口数据的时候,-u 后面输入的账号密码不正确,返回该值
@auth.error_handler
def unauthorized():
error_info = '{}'.format("Invalid credentials")
print(error_info)
response = jsonify({'error': error_info})
response.status_code = 403
return response
# 只是一个辅助函数,当传入用户名密码的时候,查询数据库中是否有这条记录
# 并且密码也正确,则返回为True
def verify_password_for_token(username, password):
user = User.query.filter_by(username=username).first()
if not user or not user.verify_password(password):
return False
return True
# 验证 token 和 用户名密码
# 默认传的用户名密码的格式,例如用户名叫liuxin,密码是123456 在shell里加入 -u username:password
# 先验证用户名,首先假想是token,解密,查询是否有这么个用户存在,如果有返回True
# 如果用户名,那么上面解密这个名字,也肯定解密不出来,所以得出来的user是None
# 那么接下来就通过用户名密码的方式验证
# 需要注意的是,传入token的方式与传账号密码的方式一样,别忘记后面加一个冒号:
# url中加入@auth.login_required修饰符,会默认调用此函数
@auth.verify_password
def verify_password(username_or_token, password):
# first try to authenticate by token
user = verify_auth_token(username_or_token)
if user is None:
# try to authenticate with username/password
return verify_password_for_token(username=username_or_token, password=password)
return True
# 定义一个产生token的方法
def generate_auth_token(expiration=36000):
# 注意这里的Serializer是这么导入的
# from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
s = Serializer(secret_key=Config.SECRET_KEY, expires_in=expiration)
# print(s.dumps({'id': 1}))
# 返回第一个用户,这里我就将数据库里的id=1的用户作为token的加密用户
return s.dumps({'id': 1})
# 解密token,因为上面加密的是 id=1 的用户,所以解密出来的用户也是 id=1 的用户
# 所以data的数值应该是 {'id': 1}
def verify_auth_token(token):
s = Serializer(Config.SECRET_KEY)
try:
data = s.loads(token)
except SignatureExpired:
return None # valid token, but expired
except BadSignature:
return None # invalid token
user = User.query.get(data['id'])
return user
| [
"[email protected]"
] | |
209980e269323975daadb6f92996d0f260698963 | e53b067f6a41f076588efda85a2dd1616b8a6858 | /remoteroadrunner/plat.py | 054e492806d00c752e57a89d5bb78ddaf8a203d5 | [] | no_license | mbatc/EGB220-Robot | 4dd9b52c3c80861ab0a0b7d9714beb80736f46ac | 118d3bc4f2cb78279c2cec1d99d84e5168958f0f | refs/heads/master | 2023-05-21T21:49:20.211478 | 2021-06-03T12:10:21 | 2021-06-03T12:10:21 | 344,645,882 | 0 | 1 | null | 2021-05-30T08:16:15 | 2021-03-05T00:18:18 | C++ | UTF-8 | Python | false | false | 2,050 | py | from sdl2 import *
import ctypes
class Window:
def __init__(self, width, height, name):
if SDL_Init(SDL_INIT_EVERYTHING) < 0:
print("Error: SDL could not initialize! SDL Error: " + SDL_GetError().decode("utf-8"))
exit(1)
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1)
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24)
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8)
SDL_GL_SetAttribute(SDL_GL_ACCELERATED_VISUAL, 1)
SDL_GL_SetAttribute(SDL_GL_MULTISAMPLEBUFFERS, 1)
SDL_GL_SetAttribute(SDL_GL_MULTISAMPLESAMPLES, 16)
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, SDL_GL_CONTEXT_FORWARD_COMPATIBLE_FLAG)
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 4)
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1)
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_CORE)
SDL_SetHint(SDL_HINT_MAC_CTRL_CLICK_EMULATE_RIGHT_CLICK, b"1")
SDL_SetHint(SDL_HINT_VIDEO_HIGHDPI_DISABLED, b"1")
self.sdl_window = SDL_CreateWindow(
name.encode('utf-8'),
SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED,
width, height,
SDL_WINDOW_OPENGL|SDL_WINDOW_RESIZABLE
)
self.gl_context = SDL_GL_CreateContext(self.sdl_window)
if self.gl_context is None:
print("Error: Cannot create OpenGL Context! SDL Error: " + SDL_GetError().decode("utf-8"))
exit(1)
SDL_GL_MakeCurrent(self.sdl_window, self.gl_context)
if SDL_GL_SetSwapInterval(1) < 0:
print("Warning: Unable to set VSync! SDL Error: " + SDL_GetError().decode("utf-8"))
exit(1)
def title(self):
pass
def width(self):
w = c_int()
SDL_GetWindowSize(self.sdl_window, ctypes.byref(w), None)
return w.value
def height(self):
h = c_int()
SDL_GetWindowSize(self.sdl_window, None, ctypes.byref(h))
return h.value
def x(self):
x = c_int()
SDL_GetWindowPosition(self.sdl_window, ctypes.byref(x), None)
return x.value
def y(self):
y = c_int()
SDL_GetWindowPosition(self.sdl_window, None, ctypes.byref(y))
return y.value
| [
"[email protected]"
] | |
454b3f6fcc8d3a395b5b82e4188f13105894c960 | aae216eb4688b37fc8b96fc3900dfcb5d8a3ca16 | /midterm-c.py | 3a0583911d0a89006df1e7789cdfe85c804e54b4 | [] | no_license | omerfarukkutlu/python-midterm | d7461d0bc6a76b65cc0f92b15353c72ac944bf87 | 75a7c7a531f9b7da13fd65477cfcc8c475554133 | refs/heads/master | 2020-05-18T14:15:51.375587 | 2019-05-01T21:27:50 | 2019-05-01T21:27:50 | 184,462,382 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,596 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 20 20:56:08 2019
@author: farukkutlu
"""
import numpy as np
import matplotlib.pyplot as plt
with open('airfoils/eh2012.dat', 'r') as file:
header = file.readline()
x, y = np.loadtxt(file, dtype=float, unpack=True)
class Panel:
def __init__(self, xa, ya, xb, yb):
self.xa, self.ya = xa, ya # panel starting-point
self.xb, self.yb = xb, yb # panel ending-point
self.xc, self.yc = (xa + xb) / 2, (ya + yb) / 2 # panel center
self.length = np.sqrt((xb - xa)**2 + (yb - ya)**2) # panel length
# orientation of panel (angle between x-axis and panel's normal)
if xb - xa <= 0.0:
self.beta = np.arccos((yb - ya) / self.length)
elif xb - xa > 0.0:
self.beta = np.pi + np.arccos(-(yb - ya) / self.length)
def define_panels(x, y, N):
R = 0.5*(x.max() - x.min()) # radius of the circle
x_c = (x.max() + x.min()) / 2.0 # x-coordinate of circle center
theta = np.linspace(0.0, 2.0*np.pi, N+1) # array of angles
x_circle = x_c + R*np.cos(theta) # x-coordinates of circle
x_last = np.copy(x_circle) # x-coordinate of tr. edge
y_last = np.empty_like(x_last) # y-coordinate of tr. edge
# to close the trailing edge gap
x, y = np.append(x, x[0]), np.append(y, y[0])
# calculating the y-points of the panels
j = 0
for i in range(N):
while j < len(x)-1:
if (x[j] <= x_last[i] <= x[j+1]) or (x[j+1] <= x_last[i] <= x[j]):
break
else:
j += 1
a = (y[j + 1] - y[j])/(x[j + 1] - x[j])
b = y[j + 1] - a*x[j + 1]
y_last[i] = a*x_last[i] + b
y_last[N] = y_last[0]
# creating panels
panels = np.empty(N, dtype=object)
for i in range(N):
panels[i] = Panel(x_last[i], y_last[i], x_last[i + 1], y_last[i + 1])
return panels
def foil_normals(x, y):
x_c = (x[1:] + x[:-1])/2 # center of x-points
y_c = (y[1:] + y[:-1])/2 # center of y-points
d_x = x[1:] - x[:-1] # distance in x of two points.
d_y = y[1:] - y[:-1] # distance in y of two points.
l = (d_x**2+d_y**2)**0.5 # distance between two points. (length)
dx = d_y/l # unit vector in x.
dy = -d_x/l # unit vector in y.
return x_c, y_c, dx, dy
def cusp(x,y):
vx1, vy1, vx2, vy2 = x[0]-x[1], y[0]-y[1], x[-1]-x[-2], y[-1]-y[-2]
l1 = (vx1**2+vy1**2)**0.5
l2 = (vx2**2+vy2**2)**0.5
theta = np.arccos((vx1*vx2+vy1*vy2)/(l1*l2))
if 2.5 <= np.rad2deg(theta) <= 5.0:
cusp_ = 'almost cusped'
elif 0.0 <= np.rad2deg(theta) <= 2.5:
cusp_ = 'cusped'
else:
cusp_ = 'pointed'
return cusp_, vx1, vy1, vx2, vy2, np.rad2deg(theta)
def camberline(x,y):
x, y = x.tolist(), y.tolist()
if y[0] == y[-1]:
if len(x)%2 != 0:
mid = x.index(min(x))
x1, x2, y1, y2 = x[:mid+1], x[mid:], y[:mid+1], y[mid:]
meanx, meany = x2 , []
else:
mid = x.index(min(x))
x1, x2, y1, y2 = x[:mid], x[mid+1:], y[:mid], y[mid+1:]
x1.reverse(), x1.pop()
meanx, meany = [min(x)] + (np.array(np.array(x1)+np.array(x2))/2).tolist() + [x[-1]], [y[x.index(min(x))]]
else:
if len(x)%2 != 0:
mid = x.index(min(x))
x1, x2, y1, y2 = x[:mid], x[mid+1:], y[:mid], y[mid+1:]
meanx, meany = [min(x)] + x2 , [y[mid]]
else:
mid = x.index(min(x))
x1, x2, y1, y2 = x[:mid+1], x[mid+1:], y[:mid+1], y[mid+1:]
meanx, meany = [min(x)] + x2 , [y[x.index(min(x))]]
max_t, t, t_x, t_y = 0, 0, 0, 0
y1.reverse()
for ty1, ty2 in zip(y1, y2):
meany.append((ty1+ty2)/2)
t = ty1 - ty2
if t > max_t:
max_t = t
t_x, t_y = [x[y.index(ty1)], x[y.index(ty1)]], [ty1, ty2]
if y[0] == y[-1]:
if len(x)%2 == 0:
meany.append(y[-1])
return meanx, meany, t_x, t_y
def plot(header, x, y):
x_l = x.tolist()
camberx, cambery, tx, ty = camberline(x, y)
x,y = np.append(x, x[0]), np.append(y, y[0])
min_ = x_l.index(min(x_l))
chordx, chordy = [ min(x), max(x) ], [ y[min_], (y[0]+y[-2])*0.5 ]
plt.figure(figsize=(15, 15))
plt.plot(chordx, chordy, color='deepskyblue', linestyle='-', label='Chord Line') # Chord Line
plt.plot(camberx, cambery, 'k-.', label = 'Mean Camberline', linewidth=2) # Mean Camberline
plt.plot(tx, ty, color='mediumseagreen', linestyle='-', linewidth=3,
label='Max Thickness at '+str(round(tx[0],3))+'c') # Max thickness
plt.title(header, loc='center', fontsize=16) # header
plt.plot(x, y, color='k', linestyle='-', linewidth=4, alpha=1) # plot of airfoil
plt.axes().set_aspect('equal') # aspect ratio
plt.xlim(-0.05, 1.05) # x-limit
plt.ylim(min(y) - 0.05, max(y) + 0.075) # y-limit
plt.xlabel('x', fontsize=16)
plt.ylabel('y', fontsize=16)
plt.legend()
def plot_panels(header, x, y):
plot(header, x, y)
cusp_, vx1, vy1, vx2, vy2, theta = cusp(x,y)
panels = define_panels(x, y, N=20)
# plot paneled geometry
plt.plot(np.append([panel.xa for panel in panels], panels[0].xa),
np.append([panel.ya for panel in panels], panels[0].ya),
linestyle='-', linewidth=2, marker='o', markersize=6,
color='red', label='Panel Lines', alpha=1)
plt.quiver([panel.xc for panel in panels],[panel.yc for panel in panels],
np.cos([panel.beta for panel in panels]),
np.sin([panel.beta for panel in panels]),
alpha=0.8, scale=20, width=0.004)
plt.quiver(x[0], y[0], vx1, vy1, width = 0.003, color='crimson')
plt.quiver(x[-2], y[-2], vx2, vy2, width = 0.003, color='crimson')
t = plt.annotate(str(round(theta,2))+'\u00b0,'+' '+cusp_,
xy=(1.01,-0.01), xycoords='data', xytext=(-100,-60),
textcoords='offset points', arrowprops=dict(arrowstyle='fancy',
fc='0.6', connectionstyle="angle3,angleA=0,angleB=-40"))
t.set_bbox(dict(facecolor='crimson', alpha=.9, edgecolor='red'))
plt.ylim(min(y) - 0.075, max(y) + 0.15) # y-limit
plt.legend()
plot_panels(header, x, y)
| [
"[email protected]"
] | |
d9c526969dc748e9e84b094f1266d0535a2a6f15 | 20ef681ddfb4de241f77660698a1f05bbe928abd | /ansible/dynamic_vars/testing.py | 19e28595e78ebac720ba83286b1ca495d1ffcf26 | [] | no_license | burnyd/arista_automation_events | 78e9591b92e83e27adc40facfe8d80b5fa14dffc | 73c7b733500fc5b26b6bb4d8a056a2ab9830d38d | refs/heads/master | 2021-07-12T18:19:27.524401 | 2021-04-02T18:28:13 | 2021-04-02T18:28:13 | 123,217,062 | 8 | 0 | null | 2021-04-02T18:28:14 | 2018-02-28T02:26:02 | Python | UTF-8 | Python | false | false | 1,130 | py | #!/usr/bin/python
#Change the common structure so it breaks out leafs/spines as well as all devices.
import json
import requests
import os
import glob
common_url = "http://flaskapi:5000/api/static/common.json"
headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
requests.packages.urllib3.disable_warnings()
common_result = requests.get(common_url, headers=headers, verify=False)
common = json.loads(common_result.content.decode('utf-8'))
def get_leafdevices():
empty_list = []
for devices in common['leafs']:
empty_list.append(devices)
return empty_list
def get_spinedevices():
empty_list = []
for devices in common['spines']:
empty_list.append(devices)
return empty_list
def get_all_configs():
dir_list = os.listdir("../../flask/static")
for name in dir_list:
url_config = "http://flaskapi:5000/api/static/%s" % (name)
url_result = requests.get(url_config, headers=headers, verify=False)
url_json = json.loads(url_result.content.decode('utf-8'))
return url_json
testing = get_all_configs()
print(testing['hostname']) | [
"[email protected]"
] | |
440d85991f4a5c63c993bfa5575e75c0fe80b2eb | f281d0d6431c1b45c6e5ebfff5856c374af4b130 | /DAY001~099/DAY25-BOJ1068-트리/shinjam.py | 7db78b4398a5df90c58f272225b3fb2e50d4feb0 | [] | no_license | tachyon83/code-rhino | ec802dc91dce20980fac401b26165a487494adb4 | b1af000f5798cd12ecdab36aeb9c7a36f91c1101 | refs/heads/master | 2022-08-13T09:10:16.369287 | 2022-07-30T11:27:34 | 2022-07-30T11:27:34 | 292,142,812 | 5 | 6 | null | null | null | null | UTF-8 | Python | false | false | 612 | py | from collections import defaultdict
N = int(input())
input_nodes = map(int, input().split())
del_node = int(input())
nodes = defaultdict(list)
stack = []
visited = [0] * N
for idx, val in enumerate(input_nodes):
if del_node in [idx, val]:
continue
if val == -1:
stack.append(idx)
continue
nodes[idx].append(val)
nodes[val].append(idx)
ret = 0
while stack:
node = stack.pop()
visited[node] = 1
leaf = True
for n in nodes[node]:
if not visited[n]:
stack.append(n)
leaf = False
if leaf:
ret += 1
print(ret)
| [
"[email protected]"
] | |
737d0d14cbf7617d263689780445a33d38fb5afa | 8c466de1fb9de881718b6f59a71e02f54963ea96 | /DJCelery/urls.py | 5905e385ad9cfc4de49fb3867d7d810cf38778ea | [] | no_license | IsaacNewLee/DjCeelry | d17837b50cda8f4a2d96b1f36d953956b54fa7ad | 06f5c6cdddeb4bbf130fe52bde5c32922cd66c5e | refs/heads/master | 2022-02-18T20:31:47.997005 | 2019-09-13T14:18:47 | 2019-09-13T14:18:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 814 | py | """DJCelery URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from celeryapp.views import course
urlpatterns = [
path('admin/', admin.site.urls),
path('course/', course),
]
| [
"[email protected]"
] | |
6911bfdb1062917403ac7d66e2e5833799fd3992 | 8f38dc6bcdf7a260b875bad6f68187a6ecde00b5 | /tests/test_dialects/test_db/test_parser.py | 2426e5ef2ed03b98f600a27dcdf0272c162c7816 | [] | no_license | gavel-tool/python-gavel-db | b2d630f8e3732b1965ad5f66ab1becbf43764bb7 | cc82381569f8e102abbc08e3c1729307da2b6e4a | refs/heads/master | 2023-02-13T22:06:00.728622 | 2021-01-09T16:49:18 | 2021-01-09T16:49:18 | 309,721,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 843 | py | from unittest import TestCase
import gavel_db.dialects.db.structures as fol_db
from gavel.dialects.tptp.compiler import TPTPCompiler
from gavel.dialects.tptp.parser import TPTPParser
class TestProcessor(TPTPParser):
compiler = TPTPCompiler()
def parse(self, tree, *args, **kwargs):
original = tree.getText()
internal = self.visitor.visit(tree)
if internal.logic != "thf":
reconstructed = self.compiler.visit(internal)
assert original.replace("(", "").replace(")", "") == reconstructed.replace(
"(", ""
).replace(")", ""), (original, reconstructed)
print(reconstructed)
return internal
axioms = ["GRP001-0.ax"]
problems = ["ALG/ALG001-1.p", "NUN/NUN030^1.p"]
class TestParser(TestCase):
def test_imports(self):
pass
| [
"[email protected]"
] | |
7c1c9eef31590352927dadcd54597dbf6987e241 | f9d9a49a4033f7f081ce6d44c39dbe581b3f1e6e | /WebCrawler/ShockingBox/ShockingBox/production_settings.py | 54b29c28eb77f63eeaffa42425685d0fb6a89b88 | [] | no_license | commonlife/SmartPiggyBank | 455c14580cb5fca1115356335999151291d4bbf8 | d62a55b080d3901f78cdea5015f2e33936facc4f | refs/heads/master | 2021-01-12T04:52:25.220840 | 2017-01-02T05:28:41 | 2017-01-02T05:28:41 | 77,805,654 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,316 | py | """
Django settings for ShockingBox project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^k6*+7f%u%hh=g6w!_aeck##wjqzxbv(0mq$5e#q#e0*_w&yyj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Crawler11st',
'ApiGateway',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ShockingBox.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ShockingBox.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
| [
"[email protected]"
] | |
ba32974d230cd953717d1b398d47c36bbdaa86d9 | 55c7f7e2e7d366e81b8438865792664a7aa8eebb | /services/users/tests/test_drf_urls.py | 857b32e65ba2f676541b49dc45a08a6863ce705a | [
"MIT"
] | permissive | elmakhandaker/Services | 78de4655d7d2f8f33e16b9db800a76c0539f6d32 | 36790d1b58398408468fcad907ca5678be45316c | refs/heads/master | 2022-12-24T06:14:20.895633 | 2020-10-08T15:37:44 | 2020-10-08T15:37:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | import pytest
from django.urls import resolve, reverse
from services.users.models import User
pytestmark = pytest.mark.django_db
def test_user_detail(user: User):
assert (
reverse("api:user-detail", kwargs={"username": user.username})
== f"/api/users/{user.username}/"
)
assert resolve(f"/api/users/{user.username}/").view_name == "api:user-detail"
def test_user_list():
assert reverse("api:user-list") == "/api/users/"
assert resolve("/api/users/").view_name == "api:user-list"
def test_user_me():
assert reverse("api:user-me") == "/api/users/me/"
assert resolve("/api/users/me/").view_name == "api:user-me"
| [
"[email protected]"
] | |
dd8a4afbe6dd3252e432ddcf32a9f74e9ffe44f4 | 45394e169c45c71eb17948a91c6dd8d707676236 | /Data analysis and selection/spanselector_zoom.py | a39693d2bcfce43e2a77e574b11c35eeddea9cc3 | [] | no_license | jmajorNRELgit/Random-code_bits | 235f4b6ebfdf8ed079db98c8601728180a0336e6 | f656f558f9afd0f97d560c3df12df06e26c3985a | refs/heads/master | 2020-04-03T12:17:03.386939 | 2019-03-27T13:59:49 | 2019-03-27T13:59:49 | 155,247,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,293 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Mar 15 08:48:30 2019
@author: jmajor
"""
import matplotlib.pyplot as plt
from matplotlib.widgets import SpanSelector
import pandas as pd
import numpy as np
from scipy import signal
#calibration
file = 'C:/Users/jmajor/Desktop/Fast_charge_part_2/Condensed data.csv'
calibration_slope = 7.89768885939907
calibration_intercept = -0.0005410827748427023
df = pd.read_csv(file)
#data from calibration
TEG1 = df['TEG1']
TEG2 = df['TEG2']
current = df['Current']
supply_voltage = df['Supply_voltage']
cell_voltage = df['Cell_voltage']
time = df['Time']
TEG_sum = []
for i in range(len(TEG1)):
TEG_sum.append(TEG1[i] + TEG2[i])
TEG_fitted = [(i*calibration_slope+calibration_intercept) for i in TEG_sum]
power = []
for i in range(len(supply_voltage)):
power.append(supply_voltage[i] * current[i])
x = time
TEG_fitted = [i*1000 for i in TEG_fitted]
power = [i*1000 for i in power]
fig, (ax1, ax2) = plt.subplots(2, figsize=(8, 6))
#ax1.set(facecolor='#FFFFCC')
ax1.plot(x, TEG_fitted, label = 'Fitted TEG data')
ax1.legend(loc='center left', bbox_to_anchor=(1.1, 0.5))
ax1.set_title('Press left mouse button and drag to test')
ax1.plot(x, power, label = 'Supply Power')
ax1.legend(loc='center left', bbox_to_anchor=(1.1, 0.5))
ax1.set_xlabel('Time (s)')
ax1.set_ylabel('Power (mWatts)')
ax2.plot(x, TEG_fitted, label = 'Fitted TEG data')
ax2.legend(loc='center left', bbox_to_anchor=(1.1, 0.5))
ax2.set_title('Press left mouse button and drag to test')
ax2.plot(x, power, label = 'Supply Power')
ax2.legend(loc='center left', bbox_to_anchor=(1.1, 0.5))
ax2.set_xlabel('Time (s)')
ax2.set_ylabel('Power (mWatts)')
'''change TEG data to charge power data'''
#TEG_fitted = power
def onselect(xmin, xmax):
indmin, indmax = np.searchsorted(x, (xmin, xmax))
indmax = min(len(x) - 1, indmax)
ax2.clear()
ax2.plot(x[indmin:indmax], TEG_fitted[indmin:indmax], label = 'Fitted TEG data')
ax2.legend(loc='center left', bbox_to_anchor=(1.1, 0.5))
ax2.set_title('Press left mouse button and drag to test')
ax2.plot(x[indmin:indmax], power[indmin:indmax], label = 'Supply Power')
ax2.legend(loc='center left', bbox_to_anchor=(1.1, 0.5))
fig.canvas.draw()
# Set useblit=True on most backends for enhanced performance.
span = SpanSelector(ax1, onselect, 'horizontal', useblit=True,
rectprops=dict(alpha=0.5, facecolor='red'))
#data collected from the plot
data_lists = []
def onselect2(xmin, xmax):
global data_dictionaries
x_data = None
indmin, indmax = np.searchsorted(x, (xmin, xmax))
indmax = min(len(x) - 1, indmax)
x_data = x[indmin:indmax]
TEG_data_to_integrate = TEG_fitted[indmin:indmax]
integration_time = time[indmin:indmax]
data_lists.append((x_data, TEG_data_to_integrate, integration_time))
# Set useblit=True on most backends for enhanced performance.
span2 = SpanSelector(ax2, onselect2, 'horizontal', useblit=True,
rectprops=dict(alpha=0.5, facecolor='red'))
plt.show(block = True)
for i in range(len(data_lists)):
plt.fill_between(data_lists[i][2],[0]*len(data_lists[i][0]), data_lists[i][1])
for i in range(len(data_lists)):
print('{}'.format( np.abs(np.trapz(data_lists[i][1])))) | [
"[email protected]"
] | |
c27db6a1a5fe6540f5fe1c700d2b2ee27a972c38 | 21b39d50e4df56ea01453001845d1580729af1df | /jdcloud_sdk/services/waf/models/WafConf.py | 27441dad49c7751c59eb9cce3518e52ea22c2365 | [
"Apache-2.0"
] | permissive | Tanc009/jdcloud-sdk-python | ef46eac7731aa8a1839b1fc1efd93249b7a977f0 | 8b045c99bc5b73ca7348e950b6f01e03a27982f5 | refs/heads/master | 2021-08-09T14:49:16.177709 | 2021-06-25T02:38:41 | 2021-06-25T02:38:41 | 141,714,695 | 0 | 0 | Apache-2.0 | 2018-07-20T13:21:17 | 2018-07-20T13:21:16 | null | UTF-8 | Python | false | false | 1,201 | py | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class WafConf(object):
def __init__(self, enable=None, wafMode=None, wafLevel=None, redirection=None):
"""
:param enable: (Optional) 是否使能 0表示否
:param wafMode: (Optional) 0表示防护,1表示预警
:param wafLevel: (Optional) 0表示宽松,1表示正常,2表示严格
:param redirection: (Optional) 自定义页面名称
"""
self.enable = enable
self.wafMode = wafMode
self.wafLevel = wafLevel
self.redirection = redirection
| [
"[email protected]"
] | |
9fe0052ed77b41b803970201931a83a8834c5944 | b71eb888bf324bfe19c58f060f8d04371ff26bed | /venv/Scripts/easy_install-3.7-script.py | 7360180eeb5f2b49c3c8421e8bee51d3cf3723b9 | [] | no_license | victorllcrc/Test-Django-NGINX-Gunicorn | 0481d50dd4dbe58260e466541b258087d03fa89f | 068b0a5200554e32a17a19fb5bed955437f43eb2 | refs/heads/master | 2020-05-01T13:17:22.555924 | 2019-06-11T17:07:47 | 2019-06-11T17:07:47 | 177,488,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | #!C:\Users\VICTOR\Desktop\tutorial\Scripts\yout111\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"[email protected]"
] | |
4b42a8e4857642f190b7bb3d89cf6b567ce773da | 39306599d4302204da535a5be16c738220348e50 | /tancrend/MateAndrea_tancrend.py | c14ad61f4f94bfe25648bd59aa54539ee6a2c463 | [] | no_license | mt-andrea/py | c0ab7bf4e87b0f02d7c0bf30cbe2a38541828847 | e96f9919d73d72873c3280cccc1a9c4cc13e6cbd | refs/heads/master | 2023-06-12T08:46:31.355362 | 2021-06-23T07:06:50 | 2021-06-23T07:06:50 | 379,511,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,917 | py | rec=[]
def f1(label):
print(label)
f=open("tancrend.txt","r")
for sor in f:
if sor[-1]=="\n":
sor=sor[:-1].split(";")
else:
sor=sor.split(";")
rec.append([sor[0],sor[1],sor[2]])
txt="A fájl beolvasása...kész!"
print("\t"+txt)
txt=rec[0][0]
print("\t"+txt)
txt=rec[-1][0]
print("\t"+txt)
def f2(label):
print(label)
c=0
for i in range(len(rec)):
if rec[i][0]=="samba":
c+=1
txt=rec[i][1]+", "+rec[i][2]
print("\t"+txt)
txt=str(c)+" pár mutatta be a sambát."
print("\t"+txt)
def f3(label):
print(label)
for i in range(len(rec)):
if rec[i][1]=="Vilma":
txt=rec[i][0]
print("\t"+txt)
def f4(label):
print(label)
txt="Kérek egy táncot (cha-cha, salsa, rumba, samba, jive, tango, bachata): "
tanc=input(txt)
par=[]
for i in range(len(rec)):
if rec[i][0]==tanc and rec[i][1]=="Vilma":
par.append(rec[i][2])
txt="A "+tanc+" bemutatóján Vilma párja "+par[0]+" volt."
elif par==[]:
txt="Vilma nem táncolt "+tanc+"-t."
print("\t"+txt)
def f5(label):
print(label)
girls=[]
boys=[]
nevek=[]
f=open("szereplok.txt","w")
for i in range(len(rec)):
nevek.append(rec[i][1])
girls=sorted(set(nevek))
txt="Lányok: "
for _ in range(len(girls)):
txt=txt+girls[_]+", "
print("\t"+txt[:-2])
f.write(txt[:-2]+"\n")
nevek=[]
for i in range(len(rec)):
nevek.append(rec[i][2])
boys=sorted(set(nevek))
txt="Fiúk: "
for _ in range(len(boys)):
txt=txt+boys[_]+", "
print("\t"+txt[:-2])
f.write(txt[:-2])
txt="A szereplok.txt fájl kiírása...kész!"
print("\t"+txt)
f1("1. feladat")
f2("2. feladat")
f3("3. feladat")
f4("4. feladat")
f5("5. feladat") | [
"[email protected]"
] | |
5f54bda94951ffce22f6ec5b88699d5c5256cc36 | 291695dfdf7b5c203f9642c0ad99fad662fd3b69 | /main.py | c21052305c870e8d5fc306a9c627e6219dfa6dc9 | [] | no_license | ayush1420/bot-in-class | e7f66bee80365d4ba5e37d428083bd3374d35f7d | 0cb54f9804e24aa85482345f7980ae9e7a4ba8e3 | refs/heads/master | 2023-08-18T12:16:21.849641 | 2021-10-01T05:56:54 | 2021-10-01T05:56:54 | 412,329,480 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,471 | py | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import datetime
import time
import os
import keyboard
class meet_bot:
def __init__(self):
#enter driver location here
self.bot = webdriver.Chrome(r"C:\Users\Ayush\Desktop\New folder\chromedriver.exe")
def login(self,email,pas):
bot = self.bot
bot.get("https://accounts.google.com/signin/v2/identifier?ltmpl=meet&continue=https%3A%2F%2Fmeet.google.com%3Fhs%3D193&&o_ref=https%3A%2F%2Fmeet.google.com%2F_meet%2Fwhoops%3Fsc%3D232%26alias%3Dmymeetingraheel&_ga=2.262670348.1240836039.1604695943-1869502693.1604695943&flowName=GlifWebSignIn&flowEntry=ServiceLogin")
time.sleep(2)
email_in = bot.find_element_by_xpath("/html/body/div[1]/div[1]/div[2]/div/div[2]/div/div/div[2]/div/div[1]/div/form/span/section/div/div/div[1]/div/div[1]/div/div[1]/input")
email_in.send_keys(email)
next_btn = bot.find_element_by_xpath("/html/body/div[1]/div[1]/div[2]/div/div[2]/div/div/div[2]/div/div[2]/div/div[1]/div/div/button/div[2]")
next_btn.click()
time.sleep(4)
pas_in = bot.find_element_by_xpath("/html/body/div[1]/div[1]/div[2]/div/div[2]/div/div/div[2]/div/div[1]/div/form/span/section/div/div/div[1]/div[1]/div/div/div/div/div[1]/div/div[1]/input")
pas_in.send_keys(pas)
next1_btn = bot.find_element_by_xpath("/html/body/div[1]/div[1]/div[2]/div/div[2]/div/div/div[2]/div/div[2]/div/div[1]/div/div/button/div[2]")
next1_btn.click()
time.sleep(2)
def join(self,meeting_link):
bot = self.bot
bot.get(meeting_link)
time.sleep(1)
diss_btn = bot.find_element_by_xpath("/html/body/div/div[3]/div/div[2]/div[3]/div/span/span")
diss_btn.click()
keyboard.send("tab", do_press=True, do_release=True)
keyboard.send("tab", do_press=True, do_release=True)
keyboard.send("tab", do_press=True, do_release=True)
keyboard.send("enter", do_press=True, do_release=True)
time.sleep(2)
keyboard.send("tab", do_press=True, do_release=True)
keyboard.send("tab", do_press=True, do_release=True)
keyboard.send("tab", do_press=True, do_release=True)
keyboard.send("enter", do_press=True, do_release=True)
time.sleep(2)
join_btn = bot.find_element_by_xpath("/html/body/div[1]/c-wiz/div/div/div[9]/div[3]/div/div/div[2]/div/div[1]/div[2]/div/div[2]/div/div[1]/div[1]/span/span")
join_btn.click()
link =r'https://meet.google.com/pxm-pxda-pkq'
youremail='___'
yourpassword='*****'
obj = meet_bot()
obj.login(youremail,yourpassword)
obj.join(link) | [
"[email protected]"
] | |
493f7fc5468420d339d65ff3e7a6cc763c3735b9 | 26f867cab34a6d3cd127faa15606b3b90cce846f | /Shrooms Class/Shrooms_knn.py | 17ace1b7703f15741e4a63da05f6ad6bbb5ccb6c | [] | no_license | TheFloatingString/Mushroom-Classifier | 38e7be78c3a3ce5d4f09c085335ad90061f0a38c | 457d01e05733a9d0948085d79093df823f06059e | refs/heads/master | 2020-03-19T05:26:15.940502 | 2018-06-08T00:40:20 | 2018-06-08T00:40:20 | 135,930,693 | 0 | 0 | null | 2018-06-08T00:40:22 | 2018-06-03T18:41:01 | HTML | UTF-8 | Python | false | false | 581 | py | # import modules
import numpy as np
from sklearn.neighbors import KNeighborsClassifier
#from sklearn.svm import SVC
# read data
train_data = np.loadtxt('M_Train_data.txt')
train_labels = np.loadtxt('M_Train_labels.txt')
test_data = np.loadtxt('M_Test_data.txt')
test_labels = np.loadtxt('M_Test_labels.txt')
# fit k-NN model
model = KNeighborsClassifier(n_neighbors=1)
# model = SVC()
model.fit(train_data, train_labels)
# print accuracy
print("TRAINING DONE!")
print(model.score(test_data, test_labels))
#Predict New Data
#model.predict(array) | [
"[email protected]"
] | |
44bf8f5d04ab2ef20b3544249cd1b6392eb19290 | 1e9c9f2a9639db7cdb032aae69cb4d99aef1d3a5 | /w3schools/python/reference/builtInFunctions.py | b9e411f63673bbb33d19faf1d68a200cdb99c7a9 | [
"MIT"
] | permissive | sagarnikam123/learnNPractice | f0da3f8acf653e56c591353ab342765a6831698c | 1b3b0cb2cff2f478006626a4c37a99102acbb628 | refs/heads/master | 2023-02-04T11:21:18.211654 | 2023-01-24T14:47:52 | 2023-01-24T14:47:52 | 61,184,927 | 2 | 1 | MIT | 2022-03-06T11:07:18 | 2016-06-15T06:57:19 | Python | UTF-8 | Python | false | false | 3,948 | py | # Built in Functions
# abs()-Returns the absolute value of a number
print(abs(-7.52))
print(abs(3+5j))
# all()-Returns True if all items in an iterable object are true
mylist = [True, True, True]
print(all(mylist)) # True
print(all([1, 1, 1])) # True
print(all([0, 1, 1])) # False
print(all([])) # True
print(all((0, True, False))) # False
# any()-Returns True if any item in an iterable object is true
"""ascii()-Returns a readable version of an object.
Replaces none-ascii characters with escape character"""
# bin()-Returns the binary version of a number
# bool()-Returns the boolean value of the specified object
# bytearray()-Returns an array of bytes
# bytes()-Returns a bytes object
# callable()-Returns True if the specified object is callable, otherwise False
# chr()-Returns a character from the specified Unicode code.
# classmethod()-Converts a method into a class method
# compile()-Returns the specified source as an object, ready to be executed
# complex()-Returns a complex number
"""
delattr()-Deletes the specified attribute
(property or method) from the specified object
"""
# dict()-Returns a dictionary (Array)
# dir()-Returns a list of the specified object's properties and methods
"""
divmod()-Returns the quotient and the remainder
when argument1 is divided by argument2
"""
"""
enumerate()-Takes a collection (e.g. a tuple)
and returns it as an enumerate object
"""
# eval()-Evaluates and executes an expression
# exec()-Executes the specified code (or object)
# filter()-Use a filter function to exclude items in an iterable object
# float()-Returns a floating point number
# format()-Formats a specified value
# frozenset()-Returns a frozenset object
# getattr()-Returns the value of the specified attribute (property or method)
# globals()-Returns the current global symbol table as a dictionary
"""hasattr()-Returns True if the specified object
has the specified attribute (property/method)"""
# hash()-Returns the hash value of a specified object
# help()-Executes the built-in help system
# hex()-Converts a number into a hexadecimal value
# id()-Returns the id of an object
# input()-Allowing user input
# int()-Returns an integer number
"""isinstance()-Returns True if a specified object
is an instance of a specified object"""
"""issubclass()-Returns True if a specified class is
a subclass of a specified object"""
# iter()-Returns an iterator object
# len()-Returns the length of an object
# list()-Returns a list
# locals()-Returns an updated dictionary of the current local symbol table
"""map()-Returns the specified iterator with
the specified function applied to each item"""
# max()-Returns the largest item in an iterable
# memoryview()-Returns a memory view object
# min()-Returns the smallest item in an iterable
# next()-Returns the next item in an iterable
# object()-Returns a new object
# oct()-Converts a number into an octal
# open()-Opens a file and returns a file object
# ord()-Convert an integer representing the Unicode of the specified character
# pow()-Returns the value of x to the power of y
# print()-Prints to the standard output device
# property()-Gets, sets, deletes a property
"""range()-Returns a sequence of numbers,
starting from 0 and increments by 1 (by default)"""
# repr()-Returns a readable version of an object
# reversed()-Returns a reversed iterator
# round()-Rounds a numbers
# set()-Returns a new set object
# setattr()-Sets an attribute (property/method) of an object
# slice()-Returns a slice object
# sorted()-Returns a sorted list
# staticmethod()-Converts a method into a static method
# str()-Returns a string object
# sum()-Sums the items of an iterator
# super()-Returns an object that represents the parent class
# tuple()-Returns a tuple
# type()-Returns the type of an object
# vars()-Returns the __dict__ property of an object
# zip()-Returns an iterator, from two or more iterators
| [
"[email protected]"
] | |
23784e10aecbb68bd613b6e2d347dc67a80b58bf | b87769ad82c2cb893bdef590efabaade163cbec7 | /0004_product.py | 67318dcb56e518b7b7d23dde3efc703c421073f6 | [] | no_license | prashantavhad/canteen-automation-system | 6f80a727f3f9c11037c3e065590d8806b1a63c60 | 88c6ed66b99beed0f32f76f6a5583902f92395c3 | refs/heads/master | 2022-11-06T23:14:13.170968 | 2020-07-01T12:55:02 | 2020-07-01T12:55:02 | 276,369,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 757 | py | # Generated by Django 3.0.3 on 2020-04-10 09:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('MyApp', '0003_auto_20200409_1821'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('product_name', models.CharField(max_length=256)),
('product_image', models.ImageField(blank=True, upload_to='product_images')),
('product_cost', models.IntegerField()),
('product_available', models.BooleanField()),
],
),
]
| [
"[email protected]"
] | |
46348f23567e5333fb55bf3a28a7c85d888f4703 | b7b52e51c7be931d4d1176f3f1150de980ad21c1 | /robot_tracker/__init__.py | 021c08a6dfd433fad616aa6b87a5999ff677f2a4 | [] | no_license | amezcua/Django-Robot-Tracker | d704eb982193d96386c62149f43d8df5cbc32aa6 | 6371af2ee9a3b6d91de7e71218d69c6ab21a7caa | refs/heads/master | 2021-01-01T19:15:05.257564 | 2012-10-12T08:31:57 | 2012-10-12T08:31:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | # Robot-tracket app.
# App que mira para cada solicitud si es de un robot. Inicialmente se parte de una lista vacia y se van almacenando los
# useragents de todas las solicitudes que intentan acceder a robots.txt en el directorio raiz en una lista
# para cada solicitud se mira la lista en un middleware que establece si la solicitud es de un robot o no.
| [
"[email protected]"
] | |
ae18e15d31cb04495f56ec8136afcdb8cbf22861 | 6ecf8227cc63ea5c8f05fdd6a7d28b3167119367 | /blueking_forum/wsgi.py | 9b85fd8c45ff19aed7455d4ee3ba00e35d2a3b0a | [] | no_license | doraemonext/blueking_forum | 5ad0f46780e785a5af4db6f171654e351f509aa1 | f5737dcdeaef15c37b37a0988aa1be98f6283834 | refs/heads/master | 2020-12-28T21:29:19.982785 | 2015-11-04T04:15:20 | 2015-11-04T04:15:20 | 44,859,369 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
WSGI config for blueking_forum project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "blueking_forum.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
d5ad6d7c22d647be13c4d019c1289512ae3c728a | ddb02b6a5e73709502971f1b98aba2143abdf61a | /OR157.LRU Cache.py | d13e4fca3fe7d406241fdb26c838bfc83e9945fd | [] | no_license | MingYanWoo/Leetcode | 0463b7e4f4a931c177f528333e5b039bb5913fcf | 4ba35ea2a3c3c57c490a65f26bc6723abdbe104b | refs/heads/master | 2022-01-12T05:44:04.705616 | 2021-12-22T18:14:38 | 2021-12-22T18:14:38 | 125,158,961 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,777 | py | # OR157.LRU Cache
# 题目描述
# 设计一个数据结构,实现LRU Cache的功能(Least Recently Used – 最近最少使用缓存)。它支持如下2个操作: get 和 put。
# int get(int key) – 如果key已存在,则返回key对应的值value(始终大于0);如果key不存在,则返回-1。
# void put(int key, int value) – 如果key不存在,将value插入;如果key已存在,则使用value替换原先已经存在的值。如果容量达到了限制,LRU Cache需要在插入新元素之前,将最近最少使用的元素删除。
# 请特别注意“使用”的定义:新插入或获取key视为被使用一次;而将已经存在的值替换更新,不算被使用。
# 限制:请在O(1)的时间复杂度内完成上述2个操作。
# 输入描述:
# 第一行读入一个整数n,表示LRU Cache的容量限制。 从第二行开始一直到文件末尾,每1行代表1个操作。
# 如果每行的第1个字符是p,则该字符后面会跟随2个整数,表示put操作的key和value。
# 如果每行的第1个字符是g,则该字符后面会跟随1个整数,表示get操作的key。
# 输出描述:
# 按照输入中get操作出现的顺序,按行输出get操作的返回结果。
# 示例1
# 输入
# 复制
# 2
# p 1 1
# p 2 2
# g 1
# p 2 102
# p 3 3
# g 1
# g 2
# g 3
# 输出
# 复制
# 1
# 1
# -1
# 3
# 说明
# 2 //Cache容量为2
# p 1 1 //put(1, 1)
# p 2 2 //put(2, 2)
# g 1 //get(1), 返回1
# p 2 102 //put(2, 102),更新已存在的key,不算被使用
# p 3 3 //put(3, 3),容量超过限制,将最近最少使用的key=2清除
# g 1 //get(1), 返回1
# g 2 //get(2), 返回-1
# g 3 //get(3), 返回3
class ListNode:
def __init__(self, key, value):
self.key = key
self.value = value
self.next = None
self.pre = None
class LRU_Cache:
def __init__(self, cap):
self.cap = cap
self.head = ListNode(None, None)
self.tail = ListNode(None, None)
self.head.next = self.tail
self.tail.pre = self.head
self.hashMap = {}
def put(self, key, value):
if key in self.hashMap:
self.hashMap[key].value = value
# self.move_to_head(key)
else:
if self.cap == 0:
return
if len(self.hashMap) >= self.cap:
# delete last
tailPre = self.tail.pre
tailPrePre = tailPre.pre
tailPrePre.next = self.tail
self.tail.pre = tailPrePre
self.hashMap.pop(tailPre.key)
newNode = ListNode(key, value)
self.hashMap[key] = newNode
self.insert_to_head(key)
def get(self, key):
if key in self.hashMap:
self.move_to_head(key)
return self.hashMap[key].value
else:
return -1
def move_to_head(self, key):
node = self.hashMap[key]
preNode = node.pre
nextNode = node.next
preNode.next = nextNode
nextNode.pre = preNode
self.insert_to_head(key)
def insert_to_head(self, key):
node = self.hashMap[key]
headNext = self.head.next
self.head.next = node
node.pre = self.head
node.next = headNext
headNext.pre = node
if __name__ == '__main__':
n = int(input())
lru = LRU_Cache(n)
while True:
try:
row = input().split(' ')
op = row[0]
#print(lru.hashMap)
if op == 'p':
lru.put(int(row[1]), int(row[2]))
else:
print(lru.get(int(row[1])))
except:
break | [
"[email protected]"
] | |
d524057902de1c41b9ee766e42eaa71198651fb0 | eaeb7f30a4cd72710c545409f7c5ed847794e1a7 | /Dirbtinis intelektas/Uzduotys/mano/FCBC/batch.py | 15ae5790d145cfa6e9239ab45f29ae730081f43b | [] | no_license | lbstore/MIF_Informatics_Semester7 | 63d6edfbe2a728ad5e67d639bbf02ce64d20940c | 90ed517416b2a0a558daec35af94b01b3d8dd414 | refs/heads/master | 2022-01-18T10:25:43.032096 | 2022-01-05T16:47:48 | 2022-01-05T16:47:48 | 127,039,807 | 0 | 7 | null | 2022-01-05T16:47:48 | 2018-03-27T20:04:37 | JavaScript | UTF-8 | Python | false | false | 180 | py | import os
if __name__ == "__main__":
for i in range(1,11):
n = str(i)
m = "BC"
os.system("java -jar FCBC.jar "+m +" testas"+n+".txt "+"res"+m+n+".txt")
| [
"[email protected]"
] | |
2599f43c702b477847beae310b71941347de3dfc | d5492bcc77824e29669400622fd89b1349c90caf | /python网络数据采集/my_爬虫_进阶_之路/scrapy框架/my_spiders/电商项目/阿里1688_淘宝_天猫_京东_折800_卷皮_拼多多/my_flask_server/tools/时间戳_to_时间.py | bb9790a02ba469733ed07993cf5d5bc247faef0e | [] | no_license | XCodeAny/python | d88980682ba4db839911a5de8c073fa33a63da80 | 35991daf6c7eff4197662b9d07cb9fcdee6a0c02 | refs/heads/master | 2021-08-30T20:00:14.231120 | 2017-12-19T07:55:15 | 2017-12-19T07:55:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,009 | py | # coding:utf-8
'''
@author = super_fazai
@File : 时间戳_to_时间.py
@Time : 2017/11/15 17:13
@connect : [email protected]
'''
import time
def timestamp_to_regulartime(timestamp):
'''
将时间戳转换成时间
'''
# 利用localtime()函数将时间戳转化成localtime的格式
# 利用strftime()函数重新格式化时间
# 转换成localtime
time_local = time.localtime(int(timestamp))
# print(time_local)
# 转换成新的时间格式(2016-05-05 20:28:54)
dt = time.strftime("%Y-%m-%d %H:%M:%S", time_local)
return dt
timestamp = 1511625600
dt = timestamp_to_regulartime(timestamp)
print(dt)
def is_recent_time(timestamp):
'''
返回是否在指定的日期差内
:param timestamp:
:return:
'''
time_1 = int(timestamp)
time_2 = time.time() # 当前的时间戳
time_1 = time.localtime(time_1)
time_2 = time.localtime(time_2)
if time_1.tm_year == time_2.tm_year:
if time_1.tm_mon >= time_2.tm_mon: # 如果目标时间的月份时间 >= 当前月份(月份合法, 表示是当前月份或者是今年其他月份)
if time_1.tm_mday >= time_2.tm_mday:
if time_1.tm_hour >= 8 and time_1.tm_hour <= 16:
print('合法时间')
# diff_days = abs(time_1.tm_mday - time_2.tm_mday)
return True
else:
print('该小时在8点到16点以外,此处不处理跳过')
return False
else:
print('该日时间已过期, 此处跳过')
return False
else: # 月份过期
print('该月份时间已过期,此处跳过')
return False
else:
print('非本年度的限时秒杀时间,此处跳过')
return False
# while True:
# timestamp = input('请输入要判断的时间戳: ')
# print(is_recent_time(timestamp)) | [
"[email protected]"
] | |
23c12ad1df97afd41a3e333439b9970dafdda74b | ff9e1536f9ec1097cee573dcf1c4cf19b1bd6a6e | /donation/admin.py | e7ecef462ac9e74baf83b8032a25ccca193bf795 | [] | no_license | bedant/Codefundo-Hackathon-2018 | e4e22c9df0772d50aafc34773e2e3c8666ac5c6d | 23527ed1967b3e577d9b6ee1578a0f70ce6112b4 | refs/heads/master | 2020-04-23T19:31:32.990141 | 2019-02-19T05:02:36 | 2019-02-19T05:02:36 | 171,407,299 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 90 | py | from django.contrib import admin
from .models import Donator
admin.site.register(Donator) | [
"[email protected]"
] | |
830a665eb52f1f43f7ad9fd6d9b4f83e8aad4eb7 | 188a0d6f2dca86d4b6d7665af6e05b5420365051 | /week1/week1-ex6.py | 1eed8311783d61f5175493810e3733bb2167290d | [] | no_license | JuliaGNH/PythonAutomation032018 | 11ec6989cf6bd18caed9dc18c60b199be56f25ce | e68c097684999b7990f701f7e3b2b9bb951606e6 | refs/heads/master | 2020-03-10T20:14:10.022940 | 2018-04-15T19:01:50 | 2018-04-15T19:01:50 | 129,565,990 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | import yaml
import json
week1_dict = {
'ip_addr': '10.10.10.1',
'mac_addr': '0a:fc:b4:7k:2d:1c'
}
week1_list = [
'exercise_6',
'list_example',
week1_dict
]
with open("week1_ex6.yml", "w") as f:
f.write(yaml.dump(week1_list, default_flow_style=False))
with open("week1_ex6.json", "w") as k:
json.dump(week1_list, k)
| [
"[email protected]"
] | |
eee26574d21382a1d2154b82d943b7157c19278b | ac288e3a2f78d3992a4f28c0f1695470a6162ea8 | /p_library/migrations/0003_auto_20200825_0707.py | 348f65e5b56e21aac0f863cea7911cbd41cb7477 | [] | no_license | PavelGvozdev/D6 | 0f992cf4ed9a692db2596ecaf6d0d5c11507e043 | 6a3c1b0ad55dbd4b2a6a18a34416ef3b1c9bdde1 | refs/heads/master | 2022-12-14T05:30:39.914651 | 2020-09-08T12:59:54 | 2020-09-08T12:59:54 | 293,764,980 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | # Generated by Django 2.2.6 on 2020-08-25 07:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('p_library', '0002_auto_20200825_0704'),
]
operations = [
migrations.AlterField(
model_name='book',
name='price',
field=models.DecimalField(decimal_places=2, max_digits=5),
),
]
| [
"[email protected]"
] | |
4d80575643129294e7e323eb5fffde77bb3373ea | 99ea511bf62e2b09225663ff687823da1de3dd48 | /第4次作业 project3/codes/loss_visualization.py | 98c76d9effdc0fdd7b63ac7d1d28ef36ba3680b0 | [] | no_license | Schuture/DL-course | cf5af26a8df5dd8fbcb22de77ef75840506f3690 | fabbe74640a3b2f327cca69d5a789ef321ad775e | refs/heads/master | 2022-11-07T04:44:54.814972 | 2020-06-26T03:16:37 | 2020-06-26T03:16:37 | 260,418,153 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,942 | py | import matplotlib.pyplot as plt
FILE_PATH = 'D:/学习/课程/大数据/深度学习和神经网络/作业/第4次作业 project3/project3/codes/'
with open(FILE_PATH+'2020_06_22_14_11_00_88.09%'+'.txt') as f:
line = f.readline()
while True:
if not line:
break
elif line.startswith('Training loss'):
training_loss = f.readline()
training_loss = [float(loss) for loss in training_loss[1:-2].split(',')]
elif line.startswith('Training acc'):
training_acc = f.readline()
training_acc = [float(acc) for acc in training_acc[1:-2].split(',')]
elif line.startswith('Validation loss'):
validation_loss = f.readline()
validation_loss = [float(loss) for loss in validation_loss[1:-2].split(',')]
elif line.startswith('Validation acc'):
validation_acc = f.readline()
validation_acc = [float(acc) for acc in validation_acc[1:-2].split(',')]
line = f.readline()
# 一个epoch可能会保存多个训练acc/loss,不管保存了几个,我们都只取一个来可视化
#training_loss = training_loss[::9]
#training_acc = training_acc[::9]
n = len(training_loss)
plt.figure(figsize=(16, 16))
plt.subplot(211)
m1 = plt.plot(list(range(1, n+1)), training_loss)
m2 = plt.plot(list(range(1, n+1)), validation_loss)
plt.title('Loss vs time', fontsize=24)
plt.xlabel('Epoch', fontsize=20)
plt.ylabel('loss', fontsize=20)
plt.tick_params(labelsize=13)
plt.legend(["Training loss", "Validation loss"], loc='upper right', fontsize=20)
plt.subplot(212)
m3 = plt.plot(list(range(1, n+1)), training_acc)
m4 = plt.plot(list(range(1, n+1)), validation_acc)
plt.title('Accuracy vs time', fontsize=24)
plt.xlabel('Epoch', fontsize=20)
plt.ylabel('acc', fontsize=20)
plt.tick_params(labelsize=13)
plt.legend(["Training accuracy", "Validation accuracy"], loc='lower right', fontsize=20)
plt.show()
| [
"[email protected]"
] | |
e4af87b7e9aadfd959ec0d159db3a94fc89c7bfb | 51ca1a8b4d1d46450a9265a38be3b750043fb771 | /ejemplocrud/settings.py | 2e2aa881170e11d53a4072fe6d850642dc222ce6 | [] | no_license | inova-team/ejemplocrud | c5419d14bd67b355a0cb5b43bd62cf7cc648feb4 | 004cdb3dd54fd2176065850409dae5c6f96a3aef | refs/heads/master | 2023-04-08T20:23:22.844452 | 2021-04-03T02:27:47 | 2021-04-03T02:27:47 | 354,164,358 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,520 | py | """
Django settings for ejemplocrud project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@1vg1-3h_z9@rsd7*cd8+r*h0xhle*shpq(goh&u=!hoiw+6en'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ejemplocrud.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ejemplocrud.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': str(os.path.join(BASE_DIR, "db.sqlite3"))
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
# this defines the url for static files
# eg: base-url.com/static/your-js-file.js
STATIC_URL = '/static/'
# this is directory name where collectstatic files command will put your app level static files
STATIC_ROOT = 'staticfiles'
# this is directory paths where you have to put your project level static files
# you can put multiple folders here
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
| [
"[email protected]"
] | |
0d62d9c5d3258d0ca419c6fc1e58216b43f3ba26 | 110d4d8944760d53cddee7fea2ab25e51bd32c22 | /video/management/commands/flickr.py | e84ad6fa9496b7d1d2aff62ce7e844aacd0a4f47 | [] | no_license | lancha90/wetravel | 1993d52603f1c74c272f1df15a4b38bccd731dba | bb077330067129d1842d646c5955692563ff6c29 | refs/heads/master | 2021-01-02T22:45:25.566673 | 2015-03-23T16:15:58 | 2015-03-23T16:15:58 | 23,551,256 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,331 | py | # python manage.py flickr -a start
# heroku run python flickr.py migration -a start
from django.utils.encoding import smart_str, smart_unicode
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
import urllib2
import json
from video.models import *
KEY='55b11c4fca4f090083ecfc1811ddc32c'
URL='https://api.flickr.com/services/rest/?method=flickr.groups.pools.getPhotos&api_key=%s&group_id=%s&format=json&nojsoncallback=1&page=%s'
# Class MUST be named 'Command'
class Command(BaseCommand):
# Displayed from 'manage.py help mycommand'
help = "That's Your help message"
option_list = BaseCommand.option_list + (
make_option(
"-a",
"--action",
dest = "action",
help = "specify the option { start | update }",
metavar = "FILE"
),
)
def handle(self, *app_labels, **options):
"""
app_labels - app labels (eg. myapp in "manage.py reset myapp")
options - configurable command line options
"""
def get_image(_group,_page):
current_url=URL % (KEY,_group.group_id,_page)
handler=urllib2.urlopen(current_url)
data=json.loads(handler.read())
photos=data.get('photos')
current_page=photos['pages']
for item in photos['photo']:
if len(Photo.objects.filter(photo_id=item['id'])) > 0:
_page=current_page
else:
photo=Photo(photo_id=item['id'],owner=item['owner'],secret=item['secret'],server=item['server'],farm=item['farm'],title=item['title'],ownername=item['ownername'],dateadded=item['dateadded'],group=_group)
try:
photo.save()
except Exception as e:
print 'URL: %s >>> %s (%s)' % (current_url,e.message, type(e))
if(current_page!=_page):
get_image(_group,_page+1)
if options['action'] == 'start':
groups = Group.objects.all()
for group in groups:
print 'Loading group: %s' % (group.name)
get_image(group,1)
elif options['action'] == 'update':
print 'update'
else:
print 'No command' | [
"[email protected]"
] | |
05bf10e915b53d57bb3f0174801892d61daffed8 | f4434c85e3814b6347f8f8099c081ed4af5678a5 | /sdk/search/azure-search-documents/azure/search/documents/_internal/_generated/aio/__init__.py | fa69578ea7f244621643bd7e1b4c113301d9ff0d | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | yunhaoling/azure-sdk-for-python | 5da12a174a37672ac6ed8e3c1f863cb77010a506 | c4eb0ca1aadb76ad892114230473034830116362 | refs/heads/master | 2022-06-11T01:17:39.636461 | 2020-12-08T17:42:08 | 2020-12-08T17:42:08 | 177,675,796 | 1 | 0 | MIT | 2020-03-31T20:35:17 | 2019-03-25T22:43:40 | Python | UTF-8 | Python | false | false | 552 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._search_index_client import SearchIndexClient
__all__ = ['SearchIndexClient']
| [
"[email protected]"
] | |
3117132a22995f2dafdf31bca233b20cc3d8e947 | 20fb326e74c60f23886ec14a41d56452e6584181 | /task12.py | 8ac6d7eb5c49932d0f794e9c6572bed8fa464991 | [] | no_license | Argen-Aman/chapter2task12 | 9554b16f32c0f448465a2e4c55e7628b674c2c06 | 38a7c1adf66a2c15dade1eef3d6cf74e662b4a1a | refs/heads/master | 2022-06-03T13:03:00.978752 | 2020-05-01T19:20:39 | 2020-05-01T19:20:39 | 260,540,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 552 | py | Convert = input("To convert the temperature in Celcius to Fahrenheit, press 'f'. To convert the temperature in Fahrenheit to Celcius, press 'c'. In order ro quit - any other key: ")
if Convert == 'f':
def Convert_C_to_F (C):
Fahrenheit=(C*9/5)+32
print(Fahrenheit)
C=float(input("Enter a temperature in Celcius: "))
Convert_C_to_F (C)
elif Convert == 'c':
def Convert_F_to_C (F):
Celcius=(F-32)*5/9
print(Celcius)
F=float(input("Enter a temperature in Fahrenheit: "))
Convert_F_to_C (F)
| [
"[email protected]"
] | |
f4031575da6f062f26daf888cb9649299d518f04 | 04d34f0267dabf84608f547266cc321e1cebb634 | /DuckDuckGo/test.py | 88f25123d75cdcebd27edd78abd744390e6aaf34 | [] | no_license | Hoaas/Supybot-plugins | f909d8ca588087e4a9113d1ca0939fe7176c1124 | e34548cd4bb5c4edef24be04727dec997e69b10d | refs/heads/master | 2022-02-21T07:53:28.833639 | 2022-01-29T21:41:13 | 2022-01-29T21:41:13 | 4,174,832 | 4 | 2 | null | 2018-06-03T18:32:02 | 2012-04-29T14:21:04 | Python | UTF-8 | Python | false | false | 1,739 | py | ###
# Copyright (c) 2010, Terje Hoaas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
class DuckDuckGoTestCase(PluginTestCase):
plugins = ('DuckDuckGo',)
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
| [
"[email protected]"
] | |
3b4e85f84c636c12a3726f0382d64893d0c648e6 | 0635cbe903fc77b6436f07580434262ba3825de1 | /backtrader_ib_api/test/test_wrapper.py | e41f20b5344128436ed073a09a07a3ec965e6a94 | [
"MIT"
] | permissive | webclinic017/backtrader-ib-api | 853d945c5e61ecab7df28f9939454ca7200224b5 | 66997c2be388f63bfb3a3387642be5fa73d32095 | refs/heads/main | 2023-06-04T10:23:03.769342 | 2021-05-28T03:05:33 | 2021-05-28T03:05:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,556 | py | from backtrader_ib_api.wrapper.wrapper import RequestWrapper
def test_stock_details(wrapper: RequestWrapper):
details = wrapper.request_stock_details("AAPL")
print(details)
def test_stock_historical_trades(wrapper: RequestWrapper):
history = wrapper.request_stock_trades_history("AAPL")
print(history)
def test_option_lookup(wrapper: RequestWrapper):
history = wrapper.request_stock_trades_history("AAPL")
latest_price = history.close.iloc[-1]
print(f"Latest price: {latest_price}")
# contract ID can be requested via request_stock_details
option_params = wrapper.request_option_params("AAPL", 265598)
print(f"Found {len(option_params)} option param results")
# try to auto-select SMART exchange
preferred_contracts = option_params[option_params.exchange == "SMART"]
option_contract = preferred_contracts.iloc[0]
front_expiration = min(option_contract.expirations)
print(f"Front Expiration: {front_expiration}")
option_chain = wrapper.request_option_chain("AAPL", option_contract.exchange, front_expiration)
front_strike = option_chain.strike[option_chain.strike > latest_price].iloc[0]
print(f"Closest OTM Call Strike: {front_strike}")
option_price_history = wrapper.request_option_trades_history("AAPL",
front_expiration,
front_strike,
"C")
print(option_price_history)
| [
"[email protected]"
] | |
0b469fb413cbb6ceffb8894953d834ffb9317edf | 72e57463384261722aa3ba909700eec5cc72c703 | /c2rust-analyze/rename_nll_facts.py | 33744000874629b448913b4b30529375aaa78180 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | immunant/c2rust | 79c1c158252075bfd33870677a4a9d0c7fc168e1 | f22c6923668f4fa1fe962a02c62ec6d0597dd794 | refs/heads/master | 2023-08-31T07:13:03.567937 | 2023-08-28T21:32:20 | 2023-08-28T21:32:20 | 130,285,553 | 3,467 | 221 | NOASSERTION | 2023-09-08T18:08:32 | 2018-04-20T00:05:50 | Rust | UTF-8 | Python | false | false | 4,514 | py | '''
Usage: `python3 rename_nll_facts.py src ref dest`
Renames atoms in `src/*.facts` to match the names used in `ref/*.facts`, then
writes the renamed facts to `dest/`.
'''
import ast
from collections import defaultdict
import os
import sys
src_dir, ref_dir, dest_dir = sys.argv[1:]
# Map `src` loan/origin/path names to `ref` loan/origin/path names. We don't
# break this down by type because the names for each type don't collide anyway.
name_map = {}
# Set of `ref` names that appear as values in `name_map`.
ref_names_seen = set()
def match_name(src_name, ref_name):
if src_name in name_map:
old_ref_name = name_map[src_name]
if ref_name != old_ref_name:
print('error: %r matches both %r and %r' % (
src_name, old_ref_name, ref_name))
return
else:
if ref_name in ref_names_seen:
print('error: %r matches %r, but %r is already used' % (
src_name, ref_name, ref_name))
return
name_map[src_name] = ref_name
ref_names_seen.add(ref_name)
def match_loan(src_name, ref_name):
match_name(src_name, ref_name)
def match_origin(src_name, ref_name):
match_name(src_name, ref_name)
def match_path(src_name, ref_name):
match_name(src_name, ref_name)
def load(name):
with open(os.path.join(src_dir, name + '.facts')) as f:
src_rows = [[ast.literal_eval(s) for s in line.strip().split('\t')]
for line in f]
with open(os.path.join(ref_dir, name + '.facts')) as f:
ref_rows = [[ast.literal_eval(s) for s in line.strip().split('\t')]
for line in f]
return src_rows, ref_rows
# Match up paths using `path_is_var` and `path_assigned_at_base`.
def match_path_is_var():
src, ref = load('path_is_var')
ref_dct = {var: path for path, var in ref}
for path, var in src:
if var not in ref_dct:
continue
match_path(path, ref_dct[var])
match_path_is_var()
def match_path_assigned_at_base():
src, ref = load('path_assigned_at_base')
ref_dct = {point: path for path, point in ref}
for path, point in src:
if point not in ref_dct:
continue
match_path(path, ref_dct[point])
match_path_assigned_at_base()
# Match up origins and loans using `loan_issued_at`
def match_loan_issued_at():
src, ref = load('loan_issued_at')
ref_dct = {point: (origin, loan) for origin, loan, point in ref}
for origin, loan, point in src:
if point not in ref_dct:
continue
match_origin(origin, ref_dct[point][0])
match_origin(loan, ref_dct[point][1])
match_loan_issued_at()
# Match up origins using `use_of_var_derefs_origin`
def match_use_of_var_derefs_origin():
src, ref = load('use_of_var_derefs_origin')
src_dct = defaultdict(list)
for var, origin in src:
src_dct[var].append(origin)
ref_dct = defaultdict(list)
for var, origin in ref:
ref_dct[var].append(origin)
for var in set(src_dct.keys()) & set(ref_dct.keys()):
src_origins = src_dct[var]
ref_origins = ref_dct[var]
if len(src_origins) != len(ref_origins):
print('error: var %r has %d origins in src but %d in ref' % (
var, len(src_origins), len(ref_origins)))
continue
for src_origin, ref_origin in zip(src_origins, ref_origins):
match_origin(src_origin, ref_origin)
match_use_of_var_derefs_origin()
# Rewrite `src` using the collected name mappings.
os.makedirs(dest_dir, exist_ok=True)
for name in os.listdir(src_dir):
if name.startswith('.') or not name.endswith('.facts'):
continue
with open(os.path.join(src_dir, name)) as src, \
open(os.path.join(dest_dir, name), 'w') as dest:
for line in src:
src_parts = [ast.literal_eval(s) for s in line.strip().split('\t')]
dest_parts = []
for part in src_parts:
if part.startswith('_') or part.startswith('Start') or part.startswith('Mid'):
dest_parts.append(part)
continue
dest_part = name_map.get(part)
if dest_part is None:
print('error: no mapping for %r (used in %s: %r)' % (
part, name, src_parts))
dest_part = 'OLD:' + part
dest_parts.append(dest_part)
dest.write('\t'.join('"%s"' % part for part in dest_parts) + '\n')
| [
"[email protected]"
] | |
15a1cfdd93d41a4625fcfc638ea6440557a275d2 | 45826bdfebbd1d7638ab607906ac480031d6118b | /lib/metrics/F1_running_score.py | 38f3b58177a69c05b0d47ee4c5cd0b3de7c3e2b9 | [
"MIT"
] | permissive | openseg-group/openseg.pytorch | b75cec5c95b6ff71707d8daf7806001bab89ecb3 | aefc75517b09068d7131a69420bc5f66cb41f0ee | refs/heads/master | 2023-09-06T10:19:57.749113 | 2022-08-07T09:10:20 | 2022-08-07T09:10:20 | 166,743,301 | 1,227 | 159 | MIT | 2021-07-14T06:10:44 | 2019-01-21T03:34:59 | Python | UTF-8 | Python | false | false | 7,826 | py | ##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
## Created by: JingyiXie, RainbowSecret
## Microsoft Research
## [email protected]
## Copyright (c) 2019
##
## Code adapted from:
## https://github.com/nv-tlabs/GSCNN/blob/master/utils/f_boundary.py
##
## This source code is licensed under the MIT-style license found in the
## LICENSE file in the root directory of this source tree
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pdb
import numpy as np
import torch
from multiprocessing.pool import Pool
class F1RunningScore(object):
def __init__(self, configer=None, num_classes=None, boundary_threshold=0.00088, num_proc=15):
assert configer is not None or num_classes is not None
self.configer = configer
if configer is not None:
self.n_classes = self.configer.get('data', 'num_classes')
else:
self.n_classes = num_classes
self.ignore_index = -1
self.boundary_threshold = boundary_threshold
self.pool = Pool(processes=num_proc)
self.num_proc = num_proc
self._Fpc = 0
self._Fc = 0
self.seg_map_cache = []
self.gt_map_cache = []
def _update_cache(self, seg_map, gt_map):
"""
Append inputs to `seg_map_cache` and `gt_map_cache`.
Returns whether the length reached our pool size.
"""
self.seg_map_cache.extend(seg_map)
self.gt_map_cache.extend(gt_map)
return len(self.gt_map_cache) >= self.num_proc
def _get_from_cache(self):
n = self.num_proc
seg_map, self.seg_map_cache = self.seg_map_cache[:n], self.seg_map_cache[n:]
gt_map, self.gt_map_cache = self.gt_map_cache[:n], self.gt_map_cache[n:]
return seg_map, gt_map
def update(self, seg_map, gt_map):
if self._update_cache(seg_map, gt_map):
seg_map, gt_map = self._get_from_cache()
self._update_scores(seg_map, gt_map)
else:
return
def _update_scores(self, seg_map, gt_map):
batch_size = len(seg_map)
if batch_size == 0:
return
Fpc = np.zeros(self.n_classes)
Fc = np.zeros(self.n_classes)
for class_id in range(self.n_classes):
args = []
for i in range(batch_size):
if seg_map[i].shape[0] == self.n_classes:
pred_i = seg_map[i][class_id] > 0.5
pred_is_boundary = True
else:
pred_i = seg_map[i] == class_id
pred_is_boundary = False
args.append([
(pred_i).astype(np.uint8),
(gt_map[i] == class_id).astype(np.uint8),
(gt_map[i] == -1),
self.boundary_threshold,
class_id,
pred_is_boundary
])
results = self.pool.map(db_eval_boundary, args)
results = np.array(results)
Fs = results[:, 0]
_valid = ~np.isnan(Fs)
Fc[class_id] = np.sum(_valid)
Fs[np.isnan(Fs)] = 0
Fpc[class_id] = sum(Fs)
self._Fc = self._Fc + Fc
self._Fpc = self._Fpc + Fpc
def get_scores(self):
if self.seg_map_cache is None:
return 0, 0
self._update_scores(self.seg_map_cache, self.gt_map_cache)
F_score = np.sum(self._Fpc / self._Fc) / self.n_classes
F_score_classwise = self._Fpc / self._Fc
return F_score, F_score_classwise
def reset(self):
self._Fpc = self._Fc = 0
def db_eval_boundary(args):
"""
Compute mean,recall and decay from per-frame evaluation.
Calculates precision/recall for boundaries between foreground_mask and
gt_mask using morphological operators to speed it up.
Arguments:
foreground_mask (ndarray): binary segmentation image.
gt_mask (ndarray): binary annotated image.
Returns:
F (float): boundaries F-measure
P (float): boundaries precision
R (float): boundaries recall
"""
foreground_mask, gt_mask, ignore_mask, bound_th, class_id, pred_is_boundary = args
assert np.atleast_3d(foreground_mask).shape[2] == 1
bound_pix = bound_th if bound_th >= 1 else \
np.ceil(bound_th*np.linalg.norm(foreground_mask.shape))
# print(bound_pix)
# print(gt.shape)
# print(np.unique(gt))
foreground_mask[ignore_mask] = 0
gt_mask[ignore_mask] = 0
# Get the pixel boundaries of both masks
if pred_is_boundary:
fg_boundary = foreground_mask
else:
fg_boundary = seg2bmap(foreground_mask)
gt_boundary = seg2bmap(gt_mask)
from skimage.morphology import disk
from cv2 import dilate
def binary_dilation(x, d): return dilate(
x.astype(np.uint8), d).astype(np.bool)
fg_dil = binary_dilation(fg_boundary, disk(bound_pix))
gt_dil = binary_dilation(gt_boundary, disk(bound_pix))
# Get the intersection
gt_match = gt_boundary * fg_dil
fg_match = fg_boundary * gt_dil
# Area of the intersection
n_fg = np.sum(fg_boundary)
n_gt = np.sum(gt_boundary)
# % Compute precision and recall
if n_fg == 0 and n_gt > 0:
precision = 1
recall = 0
elif n_fg > 0 and n_gt == 0:
precision = 0
recall = 1
elif n_fg == 0 and n_gt == 0:
precision = 1
recall = 1
else:
precision = np.sum(fg_match) / float(n_fg)
recall = np.sum(gt_match) / float(n_gt)
# Compute F measure
if precision + recall == 0:
F = 0
else:
F = 2 * precision * recall / (precision + recall)
return F, precision
def seg2bmap(seg, width=None, height=None):
"""
From a segmentation, compute a binary boundary map with 1 pixel wide
boundaries. The boundary pixels are offset by 1/2 pixel towards the
origin from the actual segment boundary.
Arguments:
seg : Segments labeled from 1..k.
width : Width of desired bmap <= seg.shape[1]
height : Height of desired bmap <= seg.shape[0]
Returns:
bmap (ndarray): Binary boundary map.
David Martin <[email protected]>
January 2003
"""
seg = seg.astype(np.bool)
seg[seg > 0] = 1
assert np.atleast_3d(seg).shape[2] == 1
width = seg.shape[1] if width is None else width
height = seg.shape[0] if height is None else height
h, w = seg.shape[:2]
ar1 = float(width) / float(height)
ar2 = float(w) / float(h)
assert not (width > w | height > h | abs(ar1 - ar2) > 0.01),\
'Can''t convert %dx%d seg to %dx%d bmap.' % (w, h, width, height)
e = np.zeros_like(seg)
s = np.zeros_like(seg)
se = np.zeros_like(seg)
e[:, :-1] = seg[:, 1:]
s[:-1, :] = seg[1:, :]
se[:-1, :-1] = seg[1:, 1:]
b = seg ^ e | seg ^ s | seg ^ se
b[-1, :] = seg[-1, :] ^ e[-1, :]
b[:, -1] = seg[:, -1] ^ s[:, -1]
b[-1, -1] = 0
if w == width and h == height:
bmap = b
else:
bmap = np.zeros((height, width))
for x in range(w):
for y in range(h):
if b[y, x]:
j = 1 + floor((y - 1) + height / h)
i = 1 + floor((x - 1) + width / h)
bmap[j, i] = 1
return bmap | [
"[email protected]"
] | |
9d8eef47748cb50afa81f15fa27c8d75bfaca146 | 08351ac650385e2ee0f4fc08ab8ef0978bc5bf3c | /Module2_HTTP/Request_response/Request.py | 981163757b7ae56b101453c505885d2f3f2dcdcd | [] | no_license | tertiarycourses/PythonNetworkingTraining | d3c02488e91d318874558130a89fb112a2c95d55 | 9c5f223a4b83d21a791ac0d322306c3a78c4122f | refs/heads/master | 2019-07-13T07:59:49.241235 | 2017-05-11T14:48:19 | 2017-05-11T14:48:19 | 83,748,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,087 | py | #Requests with urllib
# from urllib.request import urlopen
# from urllib.request import Request
# response = urlopen('http://www.debian.org')
# print(response)
# print(response.readline())
# ##response object
# print(response.url)
# print(response.status)
# print(response.headers['content-type'])
#response = urlopen('http://www.debian.org')
#print(response.read(50))
#response = urlopen('http://www.debian.org')
#print(response.read())
##print(response.read())
##Status Code
#print(response.status)
#-------------------------------------
#custom request
#req = Request('http://www.debian.org')
#req.add_header('Accept-Language', 'sv')
#response = urlopen(req)
#print(response.readlines()[:5])
#----------------------------------------
#Content Compression
#with decompression cannot see data
#from urllib.request import Request
#from urllib.request import urlopen
#req = Request('http://www.debian.org')
#req.add_header('Accept-Encoding', 'gzip')
#response = urlopen(req)
#print(response.getheader('Content-Encoding'))
#print(response.read())
#With Decompression can view data
#from urllib.request import Request
#from urllib.request import urlopen
#import gzip
#req = Request('http://www.debian.org')
#req.add_header('Accept-Encoding', 'gzip')
#response = urlopen(req)
#content = gzip.decompress(response.read())
#result=content.splitlines()[:5]
#print(result)
#--------------------------------------
#Content Negotiation
#from urllib.request import urlopen
#import gzip
#req = Request('http://www.debian.org')
#req.add_header('Accept-Content-Type', 'text/plain')
#response = urlopen(req)
#content = response.read()
#result=content.splitlines()[:5]
#print(result)
#-------------------------------------------
#User Agent
#from urllib.request import Request
#from urllib.request import urlopen
#req = Request('http://www.debian.org')
#req.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64;rv:24.0) Gecko/20140722 Firefox/24.0 Iceweasel/24.7.0')
#response = urlopen(req)
#print(response.readline())
#---------------------------------------------
#Cookie
#from http.cookiejar import CookieJar
#cookie_jar = CookieJar()
#from urllib.request import build_opener, HTTPCookieProcessor
#opener = build_opener(HTTPCookieProcessor(cookie_jar))
#opener.open('http://www.github.com')
#print(len(cookie_jar))
#cookies = list(cookie_jar)
#print(cookies)
#---------------------------------------------\
#Redirect
#from urllib.request import Request
#from urllib.request import urlopen
#req = Request('http://www.gmail.com')
#response = urlopen(req)
#print(response.url)
#print(req.redirect_dict)
#---------------------------------------
#HTTP Methods
#GET
import requests
response = requests.get('http://www.debian.org')
print(response.content)
print(response.status_code)
#POST
# import requests
# r = requests.post("http://bugs.python.org", data={'number': 12524, 'type': 'issue', 'action': 'show'})
# print(r.status_code, r.reason)
# print(r.text)
| [
"[email protected]"
] | |
e223b08659d04f02b9ff57fd9cc627a0bfbc4420 | 63ba933a294865f65409635f62e0f1d59f725f37 | /src/arrays/bagOfTokensScore.py | 86ce1032d9eb0987f1da6b22e658f67679b0f34d | [
"CC0-1.0"
] | permissive | way2arun/datastructures_algorithms | fc4302bdbb923ef8912a4acf75a286f2b695de2a | 4ea4c1579c28308455be4dfa02bd45ebd88b2d0a | refs/heads/master | 2021-12-07T04:34:35.732026 | 2021-09-30T12:11:32 | 2021-09-30T12:11:32 | 203,658,808 | 1 | 0 | null | 2020-08-08T15:55:09 | 2019-08-21T20:23:46 | Python | UTF-8 | Python | false | false | 2,716 | py | """
Bag of Tokens
You have an initial power of P, an initial score of 0, and a bag of tokens where tokens[i] is the value of the ith token (0-indexed).
Your goal is to maximize your total score by potentially playing each token in one of two ways:
If your current power is at least tokens[i], you may play the ith token face up, losing tokens[i] power and gaining 1 score.
If your current score is at least 1, you may play the ith token face down, gaining tokens[i] power and losing 1 score.
Each token may be played at most once and in any order. You do not have to play all the tokens.
Return the largest possible score you can achieve after playing any number of tokens.
Example 1:
Input: tokens = [100], P = 50
Output: 0
Explanation: Playing the only token in the bag is impossible because you either have too little power or too little score.
Example 2:
Input: tokens = [100,200], P = 150
Output: 1
Explanation: Play the 0th token (100) face up, your power becomes 50 and score becomes 1.
There is no need to play the 1st token since you cannot play it face up to add to your score.
Example 3:
Input: tokens = [100,200,300,400], P = 200
Output: 2
Explanation: Play the tokens in this order to get a score of 2:
1. Play the 0th token (100) face up, your power becomes 100 and score becomes 1.
2. Play the 3rd token (400) face down, your power becomes 500 and score becomes 0.
3. Play the 1st token (200) face up, your power becomes 300 and score becomes 1.
4. Play the 2nd token (300) face up, your power becomes 0 and score becomes 2.
Constraints:
0 <= tokens.length <= 1000
0 <= tokens[i], P < 104
"""
from collections import deque
from typing import List
class Solution:
def bagOfTokensScore(self, tokens: List[int], P: int) -> int:
# Solution 1 - 64 ms
"""
q = deque(sorted(tokens))
res = 0
while q and P >= q[0]:
P -= q.popleft()
res += 1
if q and len(q) > 1 and P < q[0]:
res -= 1
P += q.pop()
return res
"""
# Solution 2 - 40 ms
tokens.sort()
if not tokens or P < tokens[0]:
return 0
score = 0
left, right = 0, len(tokens) - 1
while left <= right:
if P >= tokens[left]:
P -= tokens[left]
left += 1
score += 1
else:
if right - left > 1:
P += tokens[right]
right -= 1
score -= 1
else:
break
return score
# Main Call
tokens = [100, 200]
P = 150
solution = Solution()
print(solution.bagOfTokensScore(tokens, P))
| [
"[email protected]"
] | |
929c1957a029eacd49d34f3759ed03fa3205602b | 391e0515bbbcfaaba5c2375c17fa8f11c46a0f73 | /anb/config/urls.py | b40a9b6d2e9f95a7ad0be8e9e359495a54c5a912 | [] | no_license | ggnight82/DRF-GraphQL | 9ea26458833757ebdcd96f2e1dcdc48bf6fbc2a7 | ee54921a82b6f926254a67c923c946f5062ddb44 | refs/heads/master | 2023-04-09T03:41:44.871431 | 2021-04-18T09:47:05 | 2021-04-18T09:47:05 | 350,238,210 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path("admin/", admin.site.urls),
path("api/v1/rooms/",include("rooms.urls")),
path("api/v1/users/",include("users.urls")),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"[email protected]"
] | |
a8b32038a3ade070c8f67b3eed0e66408c072e48 | 25d4c31d5ebe470118b14beb84f3cd1e53d99c15 | /01_Tutorials/PyQt5_GUI_Tutorial/09_2_Tutorial_Progressbar_Button.py | 195496bbd802cc5cf6756f04db46337e8a71d385 | [] | no_license | daltdoerfer/Python_Templates-1 | ea4b59489feb7b7617e81b7c94d4375dbf25def3 | c2471cebeaf20bbfdfd3fd263d458e5a67ad8d1e | refs/heads/master | 2023-05-10T15:07:10.109280 | 2021-06-08T06:45:53 | 2021-06-08T06:45:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,012 | py | # Dieses Tutorial beinhaltet das einfügen von:
# Progressbar mit ButtonS und (Multi-)Threading (Programm muss weiterlaufen und lagert andere Prozesse aus)
# https://riptutorial.com/pyqt5/example/29500/basic-pyqt-progress-bar
import sys
import time
from PyQt5 import *
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
TIME_LIMIT = 100 # Ausgelagertes TIME Limit, da mehrere Klassen darauf zugreifen
class External(QThread):
"""
Runs a counter thread.
"""
countChanged = pyqtSignal(int)
def run(self):
count = 0
while count < TIME_LIMIT:
count += 1
time.sleep(1)
self.countChanged.emit(count)
class Fenster(QDialog): # Wichtig für Status und Menübar von QMainWindow erben
def __init__(self):
super().__init__()
self.initMe()
def initMe(self):
#################################
# Progressbar
#################################
self.pb1 = QProgressBar(self)
self.pb1.setGeometry(0, 0, 300, 25)
self.pb1.move(50, 50)
self.pb1.setMaximum(100)
self.bt1 = QPushButton("Start", self)
self.bt1.move(50, 75)
self.bt1.clicked.connect(self.onButtonClick)
#################################
# Allgmeine Fenster Config (Hauptfenster)
#################################
self.setGeometry(50, 50, 1000, 500)
self.setWindowTitle("My First GUI")
self.setWindowIcon(QIcon("icon.png"))
self.show()
def onButtonClick(self):
self.calc = External()
self.calc.countChanged.connect(self.onCountChanged)
self.calc.start()
def onCountChanged(self, value):
self.pb1.setValue(value)
if __name__ == "__main__":
app = QApplication(sys.argv) # Neue Default-Application anlegen
w = Fenster() # Einfaches Fenster bauen -> Neue Instanz w
sys.exit(app.exec_()) # Beendet Python Skript wenn Fenster geschlossen wird | [
"[email protected]"
] | |
d5061520a0c93bc4dc2f06ffebf65a6b28ccfdcc | 5b6b1e410cceead0bab46109c482eaf9ddb3ffb1 | /rbti_app/migrations/0003_auto_20200919_1632.py | 27fef89e08d1d54cd9e9f51bd3db50b885c73d8e | [] | no_license | ikalkali/rbti-app | f5ea2d01d41dccde820ed630c529138ed9562766 | c5773b8ae73fafd5da5d6ad2ffe8344f6ab36cf4 | refs/heads/master | 2023-07-18T22:28:32.155706 | 2021-09-23T02:23:51 | 2021-09-23T02:23:51 | 331,884,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | # Generated by Django 3.1.1 on 2020-09-19 09:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rbti_app', '0002_auto_20200919_1513'),
]
operations = [
migrations.RemoveField(
model_name='buku',
name='kategori',
),
migrations.AlterField(
model_name='buku',
name='id_buku',
field=models.CharField(max_length=255, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='buku',
name='tahun',
field=models.CharField(max_length=255),
),
]
| [
"[email protected]"
] | |
968aa7f632f718fc709de6f060dd463d3bf62c56 | 269e5a222eade2acd2f732ca6d8ec5753405dd5d | /assignment 7.py | afd668cf069cf299ada01f0dd7ca839e88b772b3 | [] | no_license | naveen0845/Assignments | 25894c23e03a8956ce83656c040af3ea13a9628e | f8184bda66139e0ccaac8758703890bcd7b66f36 | refs/heads/master | 2022-11-11T14:11:42.467483 | 2020-07-03T05:56:26 | 2020-07-03T05:56:26 | 273,655,478 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,780 | py | #adding n natural numbers
sum=0
i=0
n=int(input("enter the number"))
while(i<=n):
sum=sum+i
i=i+1
print(sum)
#counting even and odd
numlist=[]
even_count=0
odd_count=0
n=int(input("enter no of elements"))
for i in range(0,n):
v=int(input("enter the values"))
numlist.append(v)
for j in range(n):
if(numlist[j]%2==0):
even_count=even_count+1
else:
odd_count=odd_count+1
print("no of even",even_count)
print("no of odd",odd_count)
#print 0 to 6 except 3 and 6
for i in range(0,6):
if(i%3!=0):
print(i)
#square of numbers
for i in [1,2,3,4,5]:
square=i*i
print(square)
#sum and average of n numbers
i=0
sum=0
n=int(input("enter n "))
while(i<=n):
sum=sum+i
i=i+1
ave=sum/n
print("sum of given number=",sum)
print("average=",ave)
#reversing a number
number=int(input("enter the number"))
reverse=0
while(number>0):
x=number%10
reverse=(reverse*10)+x
number=number//10
print("reverse of entered number is ",reverse)
#print odd number in given range
for i in range(10):
if(i%2!=0):
print(i)
#print no of digits in number
n=int(input("enter the digits"))
count =0
while(n>0):
n=n//10
count=count+1
print(count)
#palindrome or not
n=int(input("enter the number"))
reverse=0
temp=n
while(n>0):
dig=n%10
reverse=(reverse*10)+dig
n=n//10
if(temp==reverse):
print("palindrome")
else:
print("not palindrome")
#identity matrix
n=int(input("enter n"))
for i in range(0,n):
for j in range(0,n):
if i==j:
print("1",sep=" ",end=" ")
else:
print("0",sep=" ",end=" ")
print()
print()
#perfect number or not
n=int(input("enter the number "))
sum=0
for i in range(1,n):
if(n%i==0):
sum+=i
if(sum==n):
print("entered number",n,"is perfect")
else:
print("entered number",n,"is not perfect")
| [
"[email protected]"
] | |
98a753ab6a07f9b5d2e4c3f7490787d85a4f4119 | 975b2d421d3661e6770b601929d5f11d981d8985 | /msgraph/generated/role_management/entitlement_management/role_eligibility_schedule_instances/item/directory_scope/directory_scope_request_builder.py | a136ee58b839e8a93ac974456db629b983364510 | [
"MIT"
] | permissive | microsoftgraph/msgraph-sdk-python | a7c551b85daadeebf76ec4ae12668664ea639b42 | 27de7ccbe688d7614b2f6bde0fdbcda4bc5cc949 | refs/heads/main | 2023-09-03T21:45:27.989672 | 2023-08-31T06:22:18 | 2023-08-31T06:22:18 | 534,665,999 | 135 | 18 | MIT | 2023-09-14T11:04:11 | 2022-09-09T14:00:17 | Python | UTF-8 | Python | false | false | 5,208 | py | from __future__ import annotations
from dataclasses import dataclass, field
from kiota_abstractions.base_request_builder import BaseRequestBuilder
from kiota_abstractions.get_path_parameters import get_path_parameters
from kiota_abstractions.method import Method
from kiota_abstractions.request_adapter import RequestAdapter
from kiota_abstractions.request_information import RequestInformation
from kiota_abstractions.request_option import RequestOption
from kiota_abstractions.serialization import Parsable, ParsableFactory
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:
from ......models.directory_object import DirectoryObject
from ......models.o_data_errors.o_data_error import ODataError
class DirectoryScopeRequestBuilder(BaseRequestBuilder):
"""
Provides operations to manage the directoryScope property of the microsoft.graph.unifiedRoleScheduleInstanceBase entity.
"""
def __init__(self,request_adapter: RequestAdapter, path_parameters: Optional[Union[Dict[str, Any], str]] = None) -> None:
"""
Instantiates a new DirectoryScopeRequestBuilder and sets the default values.
Args:
path_parameters: The raw url or the Url template parameters for the request.
request_adapter: The request adapter to use to execute the requests.
"""
super().__init__(request_adapter, "{+baseurl}/roleManagement/entitlementManagement/roleEligibilityScheduleInstances/{unifiedRoleEligibilityScheduleInstance%2Did}/directoryScope{?%24select,%24expand}", path_parameters)
async def get(self,request_configuration: Optional[DirectoryScopeRequestBuilderGetRequestConfiguration] = None) -> Optional[DirectoryObject]:
"""
The directory object that is the scope of the assignment or role eligibility. Read-only.
Args:
request_configuration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: Optional[DirectoryObject]
"""
request_info = self.to_get_request_information(
request_configuration
)
from ......models.o_data_errors.o_data_error import ODataError
error_mapping: Dict[str, ParsableFactory] = {
"4XX": ODataError,
"5XX": ODataError,
}
if not self.request_adapter:
raise Exception("Http core is null")
from ......models.directory_object import DirectoryObject
return await self.request_adapter.send_async(request_info, DirectoryObject, error_mapping)
def to_get_request_information(self,request_configuration: Optional[DirectoryScopeRequestBuilderGetRequestConfiguration] = None) -> RequestInformation:
"""
The directory object that is the scope of the assignment or role eligibility. Read-only.
Args:
request_configuration: Configuration for the request such as headers, query parameters, and middleware options.
Returns: RequestInformation
"""
request_info = RequestInformation()
request_info.url_template = self.url_template
request_info.path_parameters = self.path_parameters
request_info.http_method = Method.GET
request_info.headers["Accept"] = ["application/json"]
if request_configuration:
request_info.add_request_headers(request_configuration.headers)
request_info.set_query_string_parameters_from_raw_object(request_configuration.query_parameters)
request_info.add_request_options(request_configuration.options)
return request_info
@dataclass
class DirectoryScopeRequestBuilderGetQueryParameters():
"""
The directory object that is the scope of the assignment or role eligibility. Read-only.
"""
def get_query_parameter(self,original_name: Optional[str] = None) -> str:
"""
Maps the query parameters names to their encoded names for the URI template parsing.
Args:
original_name: The original query parameter name in the class.
Returns: str
"""
if not original_name:
raise TypeError("original_name cannot be null.")
if original_name == "expand":
return "%24expand"
if original_name == "select":
return "%24select"
return original_name
# Expand related entities
expand: Optional[List[str]] = None
# Select properties to be returned
select: Optional[List[str]] = None
from kiota_abstractions.base_request_configuration import BaseRequestConfiguration
@dataclass
class DirectoryScopeRequestBuilderGetRequestConfiguration(BaseRequestConfiguration):
from kiota_abstractions.base_request_configuration import BaseRequestConfiguration
"""
Configuration for the request such as headers, query parameters, and middleware options.
"""
# Request query parameters
query_parameters: Optional[DirectoryScopeRequestBuilder.DirectoryScopeRequestBuilderGetQueryParameters] = None
| [
"[email protected]"
] | |
4308427144a103deb0b3a10389cb9ac3ce571b5a | 8187981f1c3e5bdef3e1fe2812093b6a04566b54 | /utils/plotting.py | 01d2034c073c2c2849d6d083d5c2a6d56ed8de28 | [] | no_license | jerrychen109/CLIP-fewshot | f50dcf4bcecb94776a2ea202ce221ce658a05a3d | 9eed396748631f24b993891036adbb739f225c7e | refs/heads/master | 2023-05-16T23:44:04.125227 | 2021-06-03T01:27:17 | 2021-06-03T01:27:17 | 361,955,934 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 898 | py | import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
def plot_run_accuracies(accuracies, xtick_interval=10):
""" Plots the results of a fewshot classification experiment across different values of k
and multiple trials per value of k.
Input:
- accuracies: a list (k, trial_accuracies) tuples where trial_accuracies is a list of accuracies
across all trials for k.
"""
df = pd.DataFrame.from_records(np.array(accuracies), columns=['k', 'acc'])
df = df.explode('acc')
df['acc'] = df['acc'].astype(float)
ax = sns.barplot(x='k', y='acc', data=df)
ax.yaxis.set_major_locator(plt.MaxNLocator(20))
ax.set(ylim=(0, 1))
xticks = ax.xaxis.get_major_ticks()
for i in range(len(xticks)):
if i % xtick_interval != xtick_interval - 1:
xticks[i].set_visible(False)
plt.show()
| [
"[email protected]"
] | |
ccf86f17aa2024ee4e9dcc06b7ad6c121a091fee | 77f49d6fd287da24980f8e2bca9f7aa4a9fd26dc | /spas_functions.py | b8cff0d608072a346cb2d70dee7679844656fefb | [] | no_license | Raj067/SolorPropriatorshipAccountingSoftware | f7f894abaf4ebafde57ee8f2dee0615b10527e30 | 7982b7d9926b1cb450511a5541df10dbcb5dcb54 | refs/heads/main | 2023-07-18T17:15:55.506550 | 2021-09-14T13:22:43 | 2021-09-14T13:22:43 | 406,370,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,876 | py | import datetime
import sqlite3
from PyQt5 import QtWidgets, uic
from PyQt5.QtGui import QDoubleValidator
from SqliteHelper import database, transactions
def first_run():
try:
database.create_table()
except sqlite3.Error as err:
print(err)
def initial_adjustment():
"""Set up balance for specific account
when running for the first time"""
ui = uic.loadUi('init_adjustment.ui')
# populate combobox
query = '''SELECT [accounts].[name], [accounts].[address], [accounts].[uid]
FROM [accounts] ORDER BY [accounts].[name]'''
data = database.select(query)
for out in data:
# out[0]=name, out[1]= address, out[2]= uid
name, address, uid = out
ui.comboBox.addItem(name + ',' + address, uid)
def confirmed():
amount = ui.le_amount.text()
name = ui.comboBox.currentText()
uid = ui.comboBox.itemData(ui.comboBox.currentIndex())
param = (amount, uid)
if not amount == '':
if database.update_balance(param):
msg = str(name) + " এর হালনাগাদ সম্পন্ন। ৳" + amount
ui.label_msg.setText(msg)
else:
msg = "ফর্মটি যথাযথভাবে পূরন করুন।"
ui.label_msg.setText(msg)
if ui.rb_add_more.isChecked():
# reset amount field
ui.le_amount.setText('')
ui.show()
ui.comboBox.setFocus()
else:
ui.close()
ui.show()
ui.accepted.connect(confirmed)
def ledger_transactions(uid=''):
ledger = uic.loadUi('ledger.ui')
# load Journal UI
"""Ledger Widget table contents"""
data = transactions(uid)
for row_number, row_data in enumerate(data):
ledger.tableWidget.insertRow(row_number)
for column_number, column_data in enumerate(row_data):
cell = QtWidgets.QTableWidgetItem(str(column_data))
ledger.tableWidget.setItem(row_number, column_number, cell)
def fetch_accounts(table):
"""populate accounts window table"""
print("Fetching accounts table items")
table.setRowCount(0)
query = '''SELECT
[accounts].[name],
[accounts].[address],
[accounts].[mobile]
FROM [accounts] ORDER BY [accounts].[name];'''
try:
data = database.select(query)
# accounts_window.tableWidget.setColumnCount(2)
for row_number, row_data in enumerate(data):
table.insertRow(row_number)
for column_number, info in enumerate(row_data):
celldata = QtWidgets.QTableWidgetItem(str(info))
table.setItem(
row_number, column_number, celldata)
except sqlite3.Error as err:
print('Error', err)
table.setEditTriggers(
QtWidgets.QTreeView.NoEditTriggers)
def init_add_account(table):
add_account_ui = uic.loadUi('ui/diagNewAccount.ui')
add_account_ui.show()
# todo get items from settings configuration
add_account_ui.comboGroup.addItem("Payable/Receivable", "PR")
def confirmed():
name = add_account_ui.ld_name.text()
address = add_account_ui.ld_address.text()
mobile = add_account_ui.ld_mobile.text()
group = add_account_ui.comboGroup.currentData()
if name != '' and address != '': # eliminates empty data
data = (name, address, mobile, group)
try:
database.insert_account(data)
msg = name + "- Added successfully."
add_account_ui.label_msg.setText(msg)
# after insert reset fields
add_account_ui.ld_name.setText('')
add_account_ui.ld_address.setText('')
add_account_ui.ld_mobile.setText('')
add_account_ui.ld_name.setFocus()
fetch_accounts(table)
except sqlite3.Error as err:
add_account_ui.label_msg.setText(err)
else:
msg = 'Required fields are empty.'
add_account_ui.show()
add_account_ui.label_msg.setText(msg)
add_account_ui.pb_confirm.clicked.connect(confirmed)
add_account_ui.pb_cancel.clicked.connect(lambda: add_account_ui.close())
# get uid for specific Name supplied
def get_uid_for(name=None):
query = '''SELECT [accounts].[uid] FROM [accounts] WHERE [name] = ?'''
data = database.select(query, name)
# select uid only as int
for x in data:
for y in x:
uid = y
return uid
# trx_type == "IN" --> Cash In Flow
# else Cash Out Flow
def init_cash_transaction(trx_tag=None):
""""pops addTransaction Dialog for trx_type = IN/OUT
based on trx_type argument provided
completes transaction and balance insertion"""
add_trx = uic.loadUi('ui/diagNewCashTrx.ui')
add_trx.show()
if trx_tag == "CASH_IN":
add_trx.setWindowTitle("নগদ জমা")
elif trx_tag == "CASH_OUT":
add_trx.setWindowTitle("নগদ পরিশোধ")
add_trx.lddate.setText(str(datetime.date.today()))
# fetch names from db to populate dropdown list
query = '''SELECT [accounts].[name], [accounts].[address], [accounts].[uid]
FROM [accounts] WHERE [accounts].[group] = "PR" ORDER BY [accounts].[name];'''
data = database.select(query)
for out in data:
name, address, uid = out
add_trx.comboBox.addItem(name + ',' + address, uid)
# declaring zero values before processing
# todo check if cash_uid not found
cash_uid = get_uid_for(["Cash"])
p_id = 0 # not for cash trx
p_lott = 0 # not for cash trx
quantity = 0 # not for cash trx
cgs = 0 # not for cash trx
def cash_transaction_confirmed():
if trx_tag == "CASH_IN":
add_trx.setWindowTitle("নগদ জমা")
dr_uid = cash_uid
cr_uid = add_trx.comboBox.itemData(
add_trx.comboBox.currentIndex())
else:
add_trx.setWindowTitle("নগদ পরিশোধ")
cr_uid = cash_uid
dr_uid = add_trx.comboBox.itemData(add_trx.comboBox.currentIndex())
amount = add_trx.ldamount.text()
trx_date = add_trx.lddate.text()
description = add_trx.tddesc.toPlainText()
if description == '':
description = "N/A"
if dr_uid != '' and cr_uid != '' and trx_date != '' and amount != '':
# ^^^ eliminates empty data
# insert transaction
values = (trx_date, trx_tag, dr_uid, cr_uid, description,
amount, p_id, p_lott, quantity, cgs)
if database.insert_transaction(values):
msg = "সংযোজিত হয়েছে।"
add_trx.label_msg.setText(msg)
else:
print("ব্যর্থ।")
else:
msg = "ফর্মটি যথাযথভাবে পূরণ করুন"
add_trx.label_msg.setText(msg)
print('Input required Transaction in DATA')
if add_trx.rb_add_more.isChecked():
# reset amount field
add_trx.ldamount.setText('')
add_trx.show()
add_trx.comboBox.setFocus()
else:
add_trx.close()
add_trx.pb_ok.clicked.connect(cash_transaction_confirmed)
add_trx.pb_cancel.clicked.connect(lambda: add_trx.close())
def cash_flow_in_table(self):
# set query to select all cash in transactions with meta name
query = '''SELECT
[transactions].[date],
[accounts].[name],
[transactions].[amount]
FROM [transactions]
INNER JOIN [accounts] ON [accounts].[uid] = [transactions].[dr_uid]
WHERE [transactions].[trx_tag] = 'CASH_IN' ORDER BY [transactions].[date] DESC;'''
param = ''
self.setRowCount(0)
out = database.select(query, param)
for row_number, row_data in enumerate(out):
self.insertRow(row_number)
for column_number, column_data in enumerate(row_data):
cell = QtWidgets.QTableWidgetItem(str(column_data))
self.setItem(row_number, column_number, cell)
def cash_flow_out_table(self):
# set query to select all cash out transactions with meta name
query = '''SELECT
[transactions].[date],
[accounts].[name],
[transactions].[amount]
FROM [transactions]
INNER JOIN [accounts] ON [accounts].[uid] = [transactions].[dr_uid]
WHERE [transactions].[trx_tag] = 'CASH_OUT' ORDER BY [transactions].[date] DESC;'''
param = ''
self.setRowCount(0)
out = database.select(query, param)
for row_number, row_data in enumerate(out):
self.insertRow(row_number)
for column_number, column_data in enumerate(row_data):
cell = QtWidgets.QTableWidgetItem(str(column_data))
self.setItem(row_number, column_number, cell)
def init_inv_transaction(trx_tag):
def update_cgs():
if not trx_tag == "BUY":
product_id = ui.comboProducts.itemData(ui.comboProducts.currentIndex())
quantity = ui.ld_quantity.text()
cgs = get_cgs(product_id, quantity)
ui.ld_cgs.setText(str(cgs))
else:
ui.ld_cgs.setText(str(0))
def count_rate():
# amount / quantity
amount = ui.ld_amount.text()
quantity = ui.ld_quantity.text()
if amount and quantity:
rate = float(amount) / float(quantity)
ui.ld_rate.setText(str(rate))
def count_amount():
# quantity * rate = total
# total / quantity = rate
quantity = ui.ld_quantity.text()
rate = ui.ld_rate.text()
if quantity and rate:
amount = float(quantity) * float(rate)
ui.ld_amount.setText(str(amount))
def count_profit():
amount = ui.ld_amount.text()
cgs = ui.ld_cgs.text()
diff = float(amount) - float(cgs)
if diff > 0:
profit = float(diff)
ui.ld_profit.setText(str(profit))
elif diff < 0:
loss = diff
ui.ld_profit.setText(str(loss))
else:
balanced = diff
ui.ld_profit.setText(str(balanced))
def inv_trx_confirmed():
trx_date = ui.ld_date.text()
quantity = ui.ld_quantity.text()
amount = ui.ld_amount.text()
description = ui.ld_desc.text()
p_id = ui.comboProducts.itemData(ui.comboProducts.currentIndex())
p_lott = 'NA'
cgs = ui.ld_cgs.text()
if trx_tag == "BUY":
debit_uid = ui.comboProducts.itemData(ui.comboProducts.currentIndex())
credit_uid = ui.comboNames.itemData(ui.comboNames.currentIndex())
else:
debit_uid = ui.comboNames.itemData(ui.comboNames.currentIndex())
credit_uid = ui.comboProducts.itemData(ui.comboProducts.currentIndex())
# eliminate empty values
if debit_uid != '' and credit_uid != '' and amount != '' and quantity != '':
param = trx_date, trx_tag, debit_uid, credit_uid, description, amount, p_id, p_lott, quantity, cgs
print(str(param))
# store in database
if database.insert_transaction(param):
inserted = True
msg = "Transaction added", quantity, "KG", amount, "Tk"
ui.label_msg.setText(str(msg))
if ui.rb_add_more.isChecked() and inserted:
# todo reset fields for another trx
ui.ld_quantity.setText('')
ui.ld_rate.setText('')
ui.ld_amount.setText('')
ui.ld_desc.setText('')
print("triggered", inserted)
else:
inserted = False
msg = "Could not insert into database."
ui.label_msg.setText(str(msg))
else:
msg = "Insert required values."
ui.label_msg.setText(str(msg))
ui = uic.loadUi('ui/diagNewInvTrx.ui')
# set float validators
double_validator = QDoubleValidator(0.0, 9.9, 2)
ui.ld_quantity.setValidator(double_validator)
ui.ld_amount.setValidator(double_validator)
ui.ld_rate.setValidator(double_validator)
# set window title
if trx_tag == "BUY":
print("Buy")
ui.setWindowTitle("Buy Form")
elif trx_tag == "SALE":
print("Sale")
ui.setWindowTitle("Sale Form")
# set to today's date
ui.ld_date.setText(str(datetime.date.today()))
# fetch accounts
query = '''SELECT [accounts].[name], [accounts].[address], [accounts].[uid]
FROM [accounts] WHERE [accounts].[group] = "PR" ORDER BY [accounts].[name];'''
data = database.select(query)
# fill comboNames with accounts
for out in data:
name, address, uid = out
ui.comboNames.addItem(name + ',' + address, uid)
# fill comboProducts
query = '''SELECT
[inventory].[product_id],
[inventory].[product_name]
FROM [inventory];'''
data = database.select(query)
for out in data:
p_id, p_name = out
ui.comboProducts.addItem(p_name, p_id)
# show ui
ui.show()
# fill ld_cgs on comboProduct changeSignal
ui.comboProducts.currentIndexChanged.connect(lambda: update_cgs())
# update cgs on load
update_cgs()
ui.ld_quantity.textChanged.connect(update_cgs)
ui.pb_count.clicked.connect(count_amount)
ui.ld_amount.textChanged.connect(count_rate)
if not trx_tag == "BUY":
ui.ld_amount.textChanged.connect(count_profit)
ui.pb_ok.clicked.connect(inv_trx_confirmed)
ui.pb_cancel.clicked.connect(lambda: ui.close())
def get_cgs(p_id, quantity):
if quantity == '':
quantity = 0
# get product quantity and amount
query = '''SELECT [inventory].[product_quantity], [inventory].[cgs]
FROM [inventory] WHERE [inventory].[product_id] = ?;'''
param = str(p_id)
print("P_ID = ", p_id)
data = database.select(query, param)
for x, y in enumerate(data):
gross_quantity, gross_cgs = y
# divide them and multiply by quantity and get cgs
cgs = float(gross_cgs) / float(gross_quantity) * float(quantity)
print("CGS: ", cgs)
# return cgs
return cgs
def inventory_buy_table():
pass
def inventory_sale_table():
pass
| [
"[email protected]"
] | |
e89f211cd0002a9e34709ee2502e4c94d5a4389e | c813a613abc05bb845b67dbf912e0f1e165851fd | /m2.py | a1e038300cd043f84e44c30f07cd65d895d99ec7 | [] | no_license | kdshop/pythonPodstawy2019 | df3f4b5668ee480cd295e5e5b34d213c00987836 | c183cfe728c25f262651c0bb0461df1999a3e654 | refs/heads/master | 2020-04-26T12:34:31.147828 | 2019-06-02T08:09:58 | 2019-06-02T08:09:58 | 173,554,206 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | def czysabokamitrojkata(a, b, c):
return True if a < b+c and b < a+c and c < a+b else False
| [
"[email protected]"
] | |
657970fa3b03a8c1275c36d93fc9114f57307ff0 | 6b9ee3c44e0af8a58e6cb498f30f6345fdeb2ef6 | /final/src/R-net/test.py | 00f31e603a1edb4db27b3a4755c18195fbe68722 | [
"MIT"
] | permissive | Cooper111/ML2017FALL | b5bd23eb0c4052dc6de02e75c62c7cf4f6ca836c | c6ab78435a907329d244dcf1dd41e4286516c777 | refs/heads/master | 2020-04-02T16:47:55.554482 | 2018-01-27T07:06:56 | 2018-01-27T07:06:56 | 154,629,101 | 1 | 0 | null | 2018-10-25T07:35:39 | 2018-10-25T07:35:36 | Python | UTF-8 | Python | false | false | 6,040 | py | import os
import json
import pickle
import pandas as pd
import torch
from gensim.models.word2vec import Word2Vec
from tester import Tester
from utils.utils import prepare_data, get_args, read_embedding
# TODO: read vocab into a cpu embedding layer
def read_vocab(vocab_config):
"""
:param counter: counter of words in dataset
:param vocab_config: word_embedding config: (root, word_type, dim)
:return: itos, stoi, vectors
"""
# wv_dict, wv_vectors, wv_size = read_embedding(vocab_config["embedding_root"],
# vocab_config["embedding_type"],
# vocab_config["embedding_dim"])
wv = Word2Vec.load('data/zh.bin')
wv_dict = dict()
wv_vectors = []
wv_size = 300
for i, w in enumerate(wv.wv.vocab):
wv_dict[w] = i
wv_vectors.append(wv[w])
# embedding size = glove vector size
# embed_size = wv_vectors.size(1)
embed_size = 300
print("word embedding size: %d" % embed_size)
itos = vocab_config['specials'][:]
stoi = {}
itos.extend(list(w for w, i in sorted(wv_dict.items(), key=lambda x: x[1])))
for idx, word in enumerate(itos):
stoi[word] = idx
vectors = torch.zeros([len(itos), embed_size])
for word, idx in stoi.items():
if word not in wv_dict or word in vocab_config['specials']:
continue
vectors[idx, :wv_size].copy_(torch.FloatTensor(wv_vectors[wv_dict[word]]))
return itos, stoi, vectors
def main():
args = get_args()
prepare_data()
word_vocab_config = {
"<UNK>": 0,
"<PAD>": 1,
"<start>": 2,
"<end>": 3,
"insert_start": "<SOS>",
"insert_end": "<EOS>",
"tokenization": "nltk",
"specials": ["<UNK>", "<PAD>", "<SOS>", "<EOS>"],
"embedding_root": os.path.join(args.app_path, "data", "embedding", "word"),
"embedding_type": "glove.840B",
"embedding_dim": 300
}
print("Reading Vocab", flush=True)
char_vocab_config = word_vocab_config.copy()
char_vocab_config["embedding_root"] = os.path.join(args.app_path, "data", "embedding", "char")
char_vocab_config["embedding_type"] = "glove_char.840B"
# TODO: build vocab out of dataset
# build vocab
itos, stoi, wv_vec = read_vocab(word_vocab_config)
itoc, ctoi, cv_vec = read_vocab(char_vocab_config)
char_embedding_config = {"embedding_weights": cv_vec,
"padding_idx": word_vocab_config["<UNK>"],
"update": args.update_char_embedding,
"bidirectional": args.bidirectional,
"cell_type": "gru", "output_dim": 300}
word_embedding_config = {"embedding_weights": wv_vec,
"padding_idx": word_vocab_config["<UNK>"],
"update": args.update_word_embedding}
sentence_encoding_config = {"hidden_size": args.hidden_size,
"num_layers": args.num_layers,
"bidirectional": True,
"dropout": args.dropout, }
pair_encoding_config = {"hidden_size": args.hidden_size,
"num_layers": args.num_layers,
"bidirectional": args.bidirectional,
"dropout": args.dropout,
"gated": True, "mode": "GRU",
"rnn_cell": torch.nn.GRUCell,
"attn_size": args.attention_size,
"residual": args.residual}
self_matching_config = {"hidden_size": args.hidden_size,
"num_layers": args.num_layers,
"bidirectional": args.bidirectional,
"dropout": args.dropout,
"gated": True, "mode": "GRU",
"rnn_cell": torch.nn.GRUCell,
"attn_size": args.attention_size,
"residual": args.residual}
pointer_config = {"hidden_size": args.hidden_size,
"num_layers": args.num_layers,
"dropout": args.dropout,
"residual": args.residual,
"rnn_cell": torch.nn.GRUCell}
print("DEBUG Mode is ", "On" if args.debug else "Off", flush=True)
dev_cache = "./data/cache/SQuAD_dev%s.pkl" % ("_debug" if args.debug else "")
test_json = args.test_json
test = read_dataset(test_json, itos, stoi, itoc, ctoi, dev_cache, args.debug, split="dev")
test_dataloader = test.get_dataloader(args.batch_size_dev)
tester = Tester(args, test_dataloader, char_embedding_config, word_embedding_config,
sentence_encoding_config, pair_encoding_config,
self_matching_config, pointer_config)
result = tester.test()
json.dump(result, open('prediction.json', 'w'))
pd.DataFrame([[id, ' '.join([str(j) for j in range(ans[0], ans[1])])] for id, ans in result.items()],
columns=['id', 'answer']).to_csv('prediction.csv', index=False)
def read_dataset(json_file, itos, stoi, itoc, ctoi, cache_file, is_debug=False, split="train"):
'''
if os.path.isfile(cache_file):
print("Read built %s dataset from %s" % (split, cache_file), flush=True)
dataset = pickle.load(open(cache_file, "rb"))
print("Finished reading %s dataset from %s" % (split, cache_file), flush=True)
else:
print("building %s dataset" % split, flush=True)
from utils.dataset import SQuAD
dataset = SQuAD(json_file, itos, stoi, itoc, ctoi, debug_mode=is_debug, split=split)
pickle.dump(dataset, open(cache_file, "wb"))
'''
from utils.dataset import SQuAD
dataset = SQuAD(json_file, itos, stoi, itoc, ctoi, debug_mode=is_debug, split=split)
return dataset
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
629527dd4f990bcc460edb29a2c0b6f2d87784ea | f6d7ed50c7747e4d064c5b2ed02429c3b0452957 | /official/recommendation/neumf_model.py | 45715478bf5fd63991775893478d42ff58eca460 | [
"MIT"
] | permissive | deephdc/retinopathy_test | eabbba5399a1c62bbe72e66762cf3e43ec18f3ce | 5e87be2a67bbbc0b82f6ca258324e80068ef9407 | refs/heads/master | 2021-07-16T20:35:05.415170 | 2020-05-30T23:06:41 | 2020-05-30T23:06:41 | 159,072,604 | 1 | 1 | MIT | 2020-05-25T08:00:39 | 2018-11-25T20:40:39 | Python | UTF-8 | Python | false | false | 16,010 | py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Defines NeuMF model for NCF framework.
Some abbreviations used in the code base:
NeuMF: Neural Matrix Factorization
NCF: Neural Collaborative Filtering
GMF: Generalized Matrix Factorization
MLP: Multi-Layer Perceptron
GMF applies a linear kernel to model the latent feature interactions, and MLP
uses a nonlinear kernel to learn the interaction function from data. NeuMF model
is a fused model of GMF and MLP to better model the complex user-item
interactions, and unifies the strengths of linearity of MF and non-linearity of
MLP for modeling the user-item latent structures.
In NeuMF model, it allows GMF and MLP to learn separate embeddings, and combine
the two models by concatenating their last hidden layer.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import typing
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from official.datasets import movielens # pylint: disable=g-bad-import-order
from official.recommendation import constants as rconst
from official.recommendation import stat_utils
def _sparse_to_dense_grads(grads_and_vars):
"""Convert sparse gradients to dense gradients.
All sparse gradients, which are represented as instances of tf.IndexedSlices,
are converted to dense Tensors. Dense gradients, which are represents as
Tensors, are unchanged.
The purpose of this conversion is that for small embeddings, which are used by
this model, applying dense gradients with the AdamOptimizer is faster than
applying sparse gradients.
Args
grads_and_vars: A list of (gradient, variable) tuples. Each gradient can
be a Tensor or an IndexedSlices. Tensors are unchanged, and IndexedSlices
are converted to dense Tensors.
Returns:
The same list of (gradient, variable) as `grads_and_vars`, except each
IndexedSlices gradient is converted to a Tensor.
"""
# Calling convert_to_tensor changes IndexedSlices into Tensors, and leaves
# Tensors unchanged.
return [(tf.convert_to_tensor(g), v) for g, v in grads_and_vars]
def neumf_model_fn(features, labels, mode, params):
"""Model Function for NeuMF estimator."""
if params.get("use_seed"):
tf.set_random_seed(stat_utils.random_int32())
users = features[movielens.USER_COLUMN]
items = tf.cast(features[movielens.ITEM_COLUMN], tf.int32)
logits = construct_model(users=users, items=items, params=params)
# Softmax with the first column of zeros is equivalent to sigmoid.
softmax_logits = tf.concat([tf.zeros(logits.shape, dtype=logits.dtype),
logits], axis=1)
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = {
movielens.ITEM_COLUMN: items,
movielens.RATING_COLUMN: logits,
}
if params["use_tpu"]:
return tf.contrib.tpu.TPUEstimatorSpec(mode=mode, predictions=predictions)
return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions)
elif mode == tf.estimator.ModeKeys.EVAL:
duplicate_mask = tf.cast(features[rconst.DUPLICATE_MASK], tf.float32)
return compute_eval_loss_and_metrics(
logits, softmax_logits, duplicate_mask, params["num_neg"],
params["match_mlperf"], params["use_tpu"])
elif mode == tf.estimator.ModeKeys.TRAIN:
labels = tf.cast(labels, tf.int32)
optimizer = tf.train.AdamOptimizer(
learning_rate=params["learning_rate"], beta1=params["beta1"],
beta2=params["beta2"], epsilon=params["epsilon"])
if params["use_tpu"]:
optimizer = tf.contrib.tpu.CrossShardOptimizer(optimizer)
loss = tf.losses.sparse_softmax_cross_entropy(
labels=labels,
logits=softmax_logits
)
# This tensor is used by logging hooks.
tf.identity(loss, name="cross_entropy")
global_step = tf.train.get_global_step()
tvars = tf.trainable_variables()
gradients = optimizer.compute_gradients(
loss, tvars, colocate_gradients_with_ops=True)
gradients = _sparse_to_dense_grads(gradients)
minimize_op = optimizer.apply_gradients(
gradients, global_step=global_step, name="train")
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
train_op = tf.group(minimize_op, update_ops)
if params["use_tpu"]:
return tf.contrib.tpu.TPUEstimatorSpec(
mode=mode, loss=loss, train_op=train_op)
return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op)
else:
raise NotImplementedError
def construct_model(users, items, params):
# type: (tf.Tensor, tf.Tensor, dict) -> tf.Tensor
"""Initialize NeuMF model.
Args:
users: Tensor of user ids.
items: Tensor of item ids.
params: Dict of hyperparameters.
Raises:
ValueError: if the first model layer is not even.
"""
num_users = params["num_users"]
num_items = params["num_items"]
model_layers = params["model_layers"]
mf_regularization = params["mf_regularization"]
mlp_reg_layers = params["mlp_reg_layers"]
mf_dim = params["mf_dim"]
if model_layers[0] % 2 != 0:
raise ValueError("The first layer size should be multiple of 2!")
# Input variables
user_input = tf.keras.layers.Input(tensor=users)
item_input = tf.keras.layers.Input(tensor=items)
# Initializer for embedding layers
embedding_initializer = "glorot_uniform"
# Embedding layers of GMF and MLP
mf_embedding_user = tf.keras.layers.Embedding(
num_users,
mf_dim,
embeddings_initializer=embedding_initializer,
embeddings_regularizer=tf.keras.regularizers.l2(mf_regularization),
input_length=1)
mf_embedding_item = tf.keras.layers.Embedding(
num_items,
mf_dim,
embeddings_initializer=embedding_initializer,
embeddings_regularizer=tf.keras.regularizers.l2(mf_regularization),
input_length=1)
mlp_embedding_user = tf.keras.layers.Embedding(
num_users,
model_layers[0]//2,
embeddings_initializer=embedding_initializer,
embeddings_regularizer=tf.keras.regularizers.l2(mlp_reg_layers[0]),
input_length=1)
mlp_embedding_item = tf.keras.layers.Embedding(
num_items,
model_layers[0]//2,
embeddings_initializer=embedding_initializer,
embeddings_regularizer=tf.keras.regularizers.l2(mlp_reg_layers[0]),
input_length=1)
# GMF part
mf_user_latent = mf_embedding_user(user_input)
mf_item_latent = mf_embedding_item(item_input)
# Element-wise multiply
mf_vector = tf.keras.layers.multiply([mf_user_latent, mf_item_latent])
# MLP part
mlp_user_latent = mlp_embedding_user(user_input)
mlp_item_latent = mlp_embedding_item(item_input)
# Concatenation of two latent features
mlp_vector = tf.keras.layers.concatenate([mlp_user_latent, mlp_item_latent])
num_layer = len(model_layers) # Number of layers in the MLP
for layer in xrange(1, num_layer):
model_layer = tf.keras.layers.Dense(
model_layers[layer],
kernel_regularizer=tf.keras.regularizers.l2(mlp_reg_layers[layer]),
activation="relu")
mlp_vector = model_layer(mlp_vector)
# Concatenate GMF and MLP parts
predict_vector = tf.keras.layers.concatenate([mf_vector, mlp_vector])
# Final prediction layer
logits = tf.keras.layers.Dense(
1, activation=None, kernel_initializer="lecun_uniform",
name=movielens.RATING_COLUMN)(predict_vector)
# Print model topology.
tf.keras.models.Model([user_input, item_input], logits).summary()
sys.stdout.flush()
return logits
def compute_eval_loss_and_metrics(logits, # type: tf.Tensor
softmax_logits, # type: tf.Tensor
duplicate_mask, # type: tf.Tensor
num_training_neg, # type: int
match_mlperf=False, # type: bool
use_tpu=False # type: bool
):
# type: (...) -> tf.estimator.EstimatorSpec
"""Model evaluation with HR and NDCG metrics.
The evaluation protocol is to rank the test interacted item (truth items)
among the randomly chosen 999 items that are not interacted by the user.
The performance of the ranked list is judged by Hit Ratio (HR) and Normalized
Discounted Cumulative Gain (NDCG).
For evaluation, the ranked list is truncated at 10 for both metrics. As such,
the HR intuitively measures whether the test item is present on the top-10
list, and the NDCG accounts for the position of the hit by assigning higher
scores to hits at top ranks. Both metrics are calculated for each test user,
and the average scores are reported.
If `match_mlperf` is True, then the HR and NDCG computations are done in a
slightly unusual way to match the MLPerf reference implementation.
Specifically, if the evaluation negatives contain duplicate items, it will be
treated as if the item only appeared once. Effectively, for duplicate items in
a row, the predicted score for all but one of the items will be set to
-infinity
For example, suppose we have that following inputs:
logits_by_user: [[ 2, 3, 3],
[ 5, 4, 4]]
items_by_user: [[10, 20, 20],
[30, 40, 40]]
# Note: items_by_user is not explicitly present. Instead the relevant \
information is contained within `duplicate_mask`
top_k: 2
Then with match_mlperf=True, the HR would be 2/2 = 1.0. With
match_mlperf=False, the HR would be 1/2 = 0.5. This is because each user has
predicted scores for only 2 unique items: 10 and 20 for the first user, and 30
and 40 for the second. Therefore, with match_mlperf=True, it's guaranteed the
first item's score is in the top 2. With match_mlperf=False, this function
would compute the first user's first item is not in the top 2, because item 20
has a higher score, and item 20 occurs twice.
Args:
logits: A tensor containing the predicted logits for each user. The shape
of logits is (num_users_per_batch * (1 + NUM_EVAL_NEGATIVES),) Logits
for a user are grouped, and the first element of the group is the true
element.
softmax_logits: The same tensor, but with zeros left-appended.
duplicate_mask: A vector with the same shape as logits, with a value of 1
if the item corresponding to the logit at that position has already
appeared for that user.
num_training_neg: The number of negatives per positive during training.
match_mlperf: Use the MLPerf reference convention for computing rank.
use_tpu: Should the evaluation be performed on a TPU.
Returns:
An EstimatorSpec for evaluation.
"""
in_top_k, ndcg, metric_weights, logits_by_user = compute_top_k_and_ndcg(
logits, duplicate_mask, match_mlperf)
# Examples are provided by the eval Dataset in a structured format, so eval
# labels can be reconstructed on the fly.
eval_labels = tf.reshape(tf.one_hot(
tf.zeros(shape=(logits_by_user.shape[0],), dtype=tf.int32),
logits_by_user.shape[1], dtype=tf.int32), (-1,))
eval_labels_float = tf.cast(eval_labels, tf.float32)
# During evaluation, the ratio of negatives to positives is much higher
# than during training. (Typically 999 to 1 vs. 4 to 1) By adjusting the
# weights for the negative examples we compute a loss which is consistent with
# the training data. (And provides apples-to-apples comparison)
negative_scale_factor = num_training_neg / rconst.NUM_EVAL_NEGATIVES
example_weights = (
(eval_labels_float + (1 - eval_labels_float) * negative_scale_factor) *
(1 + rconst.NUM_EVAL_NEGATIVES) / (1 + num_training_neg))
# Tile metric weights back to logit dimensions
expanded_metric_weights = tf.reshape(tf.tile(
metric_weights[:, tf.newaxis], (1, rconst.NUM_EVAL_NEGATIVES + 1)), (-1,))
# ignore padded examples
example_weights *= tf.cast(expanded_metric_weights, tf.float32)
cross_entropy = tf.losses.sparse_softmax_cross_entropy(
logits=softmax_logits, labels=eval_labels, weights=example_weights)
def metric_fn(top_k_tensor, ndcg_tensor, weight_tensor):
return {
rconst.HR_KEY: tf.metrics.mean(top_k_tensor, weights=weight_tensor),
rconst.NDCG_KEY: tf.metrics.mean(ndcg_tensor, weights=weight_tensor),
}
if use_tpu:
return tf.contrib.tpu.TPUEstimatorSpec(
mode=tf.estimator.ModeKeys.EVAL, loss=cross_entropy,
eval_metrics=(metric_fn, [in_top_k, ndcg, metric_weights]))
return tf.estimator.EstimatorSpec(
mode=tf.estimator.ModeKeys.EVAL,
loss=cross_entropy,
eval_metric_ops=metric_fn(in_top_k, ndcg, metric_weights)
)
def compute_top_k_and_ndcg(logits, # type: tf.Tensor
duplicate_mask, # type: tf.Tensor
match_mlperf=False # type: bool
):
"""Compute inputs of metric calculation.
Args:
logits: A tensor containing the predicted logits for each user. The shape
of logits is (num_users_per_batch * (1 + NUM_EVAL_NEGATIVES),) Logits
for a user are grouped, and the first element of the group is the true
element.
duplicate_mask: A vector with the same shape as logits, with a value of 1
if the item corresponding to the logit at that position has already
appeared for that user.
match_mlperf: Use the MLPerf reference convention for computing rank.
Returns:
is_top_k, ndcg and weights, all of which has size (num_users_in_batch,), and
logits_by_user which has size
(num_users_in_batch, (rconst.NUM_EVAL_NEGATIVES + 1)).
"""
logits_by_user = tf.reshape(logits, (-1, rconst.NUM_EVAL_NEGATIVES + 1))
duplicate_mask_by_user = tf.reshape(duplicate_mask,
(-1, rconst.NUM_EVAL_NEGATIVES + 1))
if match_mlperf:
# Set duplicate logits to the min value for that dtype. The MLPerf
# reference dedupes during evaluation.
logits_by_user *= (1 - duplicate_mask_by_user)
logits_by_user += duplicate_mask_by_user * logits_by_user.dtype.min
# Determine the location of the first element in each row after the elements
# are sorted.
sort_indices = tf.contrib.framework.argsort(
logits_by_user, axis=1, direction="DESCENDING")
# Use matrix multiplication to extract the position of the true item from the
# tensor of sorted indices. This approach is chosen because both GPUs and TPUs
# perform matrix multiplications very quickly. This is similar to np.argwhere.
# However this is a special case because the target will only appear in
# sort_indices once.
one_hot_position = tf.cast(tf.equal(sort_indices, 0), tf.int32)
sparse_positions = tf.multiply(
one_hot_position, tf.range(logits_by_user.shape[1])[tf.newaxis, :])
position_vector = tf.reduce_sum(sparse_positions, axis=1)
in_top_k = tf.cast(tf.less(position_vector, rconst.TOP_K), tf.float32)
ndcg = tf.log(2.) / tf.log(tf.cast(position_vector, tf.float32) + 2)
ndcg *= in_top_k
# If a row is a padded row, all but the first element will be a duplicate.
metric_weights = tf.not_equal(tf.reduce_sum(duplicate_mask_by_user, axis=1),
rconst.NUM_EVAL_NEGATIVES)
return in_top_k, ndcg, metric_weights, logits_by_user
| [
"[email protected]"
] | |
75b7140688bd7f5663275f7481f344ba0990f781 | 4e04f819e376c3fba7b6a57c228c289b2c3dde12 | /compass/ocean/tests/global_ocean/mesh/so12to60/dynamic_adjustment/__init__.py | c183fae208713987c10bf3bf3c959e87c5ac2da9 | [
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause"
] | permissive | Rihui-L/compass | 65e88253f24240a4376a9f04c047c2756848a45a | 4446f76222be26996fc44569a2047bdfb22e33ff | refs/heads/master | 2023-06-19T12:45:30.190857 | 2021-07-20T19:48:43 | 2021-07-20T19:48:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,066 | py | from compass.ocean.tests.global_ocean.dynamic_adjustment import \
DynamicAdjustment
from compass.ocean.tests.global_ocean.forward import ForwardStep
class SO12to60DynamicAdjustment(DynamicAdjustment):
"""
A test case performing dynamic adjustment (dissipating fast-moving waves)
from an initial condition on the SO12to60 MPAS-Ocean mesh
Attributes
----------
restart_filenames : list of str
A list of restart files from each dynamic-adjustment step
"""
def __init__(self, test_group, mesh, init, time_integrator):
"""
Create the test case
Parameters
----------
test_group : compass.ocean.tests.global_ocean.GlobalOcean
The global ocean test group that this test case belongs to
mesh : compass.ocean.tests.global_ocean.mesh.Mesh
The test case that produces the mesh for this run
init : compass.ocean.tests.global_ocean.init.Init
The test case that produces the initial condition for this run
time_integrator : {'split_explicit', 'RK4'}
The time integrator to use for the forward run
"""
if time_integrator != 'split_explicit':
raise ValueError('{} dynamic adjustment not defined for {}'.format(
mesh.mesh_name, time_integrator))
restart_times = ['0001-01-03_00:00:00', '0001-01-07_00:00:00',
'0001-01-11_00:00:00', '0001-01-21_00:00:00']
restart_filenames = [
'restarts/rst.{}.nc'.format(restart_time.replace(':', '.'))
for restart_time in restart_times]
super().__init__(test_group=test_group, mesh=mesh, init=init,
time_integrator=time_integrator,
restart_filenames=restart_filenames)
module = self.__module__
# first step
step_name = 'damped_adjustment_1'
step = ForwardStep(test_case=self, mesh=mesh, init=init,
time_integrator=time_integrator, name=step_name,
subdir=step_name)
namelist_options = {
'config_run_duration': "'00-00-02_00:00:00'",
'config_dt': "'00:05:00'",
'config_btr_dt': "'00:00:20'",
'config_Rayleigh_friction': '.true.',
'config_Rayleigh_damping_coeff': '1.0e-4'}
step.add_namelist_options(namelist_options)
stream_replacements = {
'output_interval': '00-00-10_00:00:00',
'restart_interval': '00-00-02_00:00:00'}
step.add_streams_file(module, 'streams.template',
template_replacements=stream_replacements)
step.add_output_file(filename='../{}'.format(restart_filenames[0]))
self.add_step(step)
# second step
step_name = 'damped_adjustment_2'
step = ForwardStep(test_case=self, mesh=mesh, init=init,
time_integrator=time_integrator, name=step_name,
subdir=step_name)
namelist_options = {
'config_run_duration': "'00-00-04_00:00:00'",
'config_dt': "'00:07:30'",
'config_btr_dt': "'00:00:20'",
'config_Rayleigh_friction': '.true.',
'config_Rayleigh_damping_coeff': '4.0e-5',
'config_do_restart': '.true.',
'config_start_time': "'{}'".format(restart_times[0])}
step.add_namelist_options(namelist_options)
stream_replacements = {
'output_interval': '00-00-10_00:00:00',
'restart_interval': '00-00-02_00:00:00'}
step.add_streams_file(module, 'streams.template',
template_replacements=stream_replacements)
step.add_input_file(filename='../{}'.format(restart_filenames[0]))
step.add_output_file(filename='../{}'.format(restart_filenames[1]))
self.add_step(step)
# third step
step_name = 'damped_adjustment_3'
step = ForwardStep(test_case=self, mesh=mesh, init=init,
time_integrator=time_integrator, name=step_name,
subdir=step_name)
namelist_options = {
'config_run_duration': "'00-00-04_00:00:00'",
'config_dt': "'00:10:00'",
'config_btr_dt': "'00:00:20'",
'config_Rayleigh_friction': '.true.',
'config_Rayleigh_damping_coeff': '1.0e-5',
'config_do_restart': '.true.',
'config_start_time': "'{}'".format(restart_times[1])}
step.add_namelist_options(namelist_options)
stream_replacements = {
'output_interval': '00-00-10_00:00:00',
'restart_interval': '00-00-02_00:00:00'}
step.add_streams_file(module, 'streams.template',
template_replacements=stream_replacements)
step.add_input_file(filename='../{}'.format(restart_filenames[1]))
step.add_output_file(filename='../{}'.format(restart_filenames[2]))
self.add_step(step)
# final step
step_name = 'simulation'
step = ForwardStep(test_case=self, mesh=mesh, init=init,
time_integrator=time_integrator, name=step_name,
subdir=step_name)
namelist_options = {
'config_run_duration': "'00-00-10_00:00:00'",
'config_do_restart': '.true.',
'config_start_time': "'{}'".format(restart_times[2])}
step.add_namelist_options(namelist_options)
stream_replacements = {
'output_interval': '00-00-10_00:00:00',
'restart_interval': '00-00-10_00:00:00'}
step.add_streams_file(module, 'streams.template',
template_replacements=stream_replacements)
step.add_input_file(filename='../{}'.format(restart_filenames[2]))
step.add_output_file(filename='../{}'.format(restart_filenames[3]))
self.add_step(step)
self.restart_filenames = restart_filenames
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.